]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR c++/28152 (Diagnostic about wrong use _Complex prints __complex__)
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
66647d44 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
726a989a 55#include "gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
d1b38208 116typedef struct GTY(()) machine_function
5b667039
JJ
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
ec507f2d
DE
146/* Always emit branch hint bits. */
147static GTY(()) bool rs6000_always_hint;
148
149/* Schedule instructions for group formation. */
150static GTY(()) bool rs6000_sched_groups;
151
44cd321e
PS
152/* Align branch targets. */
153static GTY(()) bool rs6000_align_branch_targets;
154
569fa502
DN
155/* Support for -msched-costly-dep option. */
156const char *rs6000_sched_costly_dep_str;
157enum rs6000_dependence_cost rs6000_sched_costly_dep;
158
cbe26ab8
DN
159/* Support for -minsert-sched-nops option. */
160const char *rs6000_sched_insert_nops_str;
161enum rs6000_nop_insertion rs6000_sched_insert_nops;
162
7ccf35ed 163/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 164static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 165
602ea4d3 166/* Size of long double. */
6fa3f289
ZW
167int rs6000_long_double_type_size;
168
602ea4d3
JJ
169/* IEEE quad extended precision long double. */
170int rs6000_ieeequad;
171
a2db2771 172/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
173int rs6000_altivec_abi;
174
94f4765c
NF
175/* Nonzero if we want SPE SIMD instructions. */
176int rs6000_spe;
177
a3170dc6
AH
178/* Nonzero if we want SPE ABI extensions. */
179int rs6000_spe_abi;
180
94f4765c
NF
181/* Nonzero to use isel instructions. */
182int rs6000_isel;
183
5da702b1
AH
184/* Nonzero if floating point operations are done in the GPRs. */
185int rs6000_float_gprs = 0;
186
594a51fe
SS
187/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
188int rs6000_darwin64_abi;
189
a0ab749a 190/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 191static GTY(()) int common_mode_defined;
c81bebd7 192
9878760c
RK
193/* Save information from a "cmpxx" operation until the branch or scc is
194 emitted. */
9878760c
RK
195rtx rs6000_compare_op0, rs6000_compare_op1;
196int rs6000_compare_fp_p;
874a0744 197
874a0744
MM
198/* Label number of label created for -mrelocatable, to call to so we can
199 get the address of the GOT section */
200int rs6000_pic_labelno;
c81bebd7 201
b91da81f 202#ifdef USING_ELFOS_H
c81bebd7 203/* Which abi to adhere to */
9739c90c 204const char *rs6000_abi_name;
d9407988
MM
205
206/* Semantics of the small data area */
207enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
208
209/* Which small data model to use */
815cdc52 210const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
211
212/* Counter for labels which are to be placed in .fixup. */
213int fixuplabelno = 0;
874a0744 214#endif
4697a36c 215
c4501e62
JJ
216/* Bit size of immediate TLS offsets and string from which it is decoded. */
217int rs6000_tls_size = 32;
218const char *rs6000_tls_size_string;
219
b6c9286a
MM
220/* ABI enumeration available for subtarget to use. */
221enum rs6000_abi rs6000_current_abi;
222
85b776df
AM
223/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
224int dot_symbols;
225
38c1f2d7 226/* Debug flags */
815cdc52 227const char *rs6000_debug_name;
38c1f2d7
MM
228int rs6000_debug_stack; /* debug stack applications */
229int rs6000_debug_arg; /* debug argument handling */
230
aabcd309 231/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
232bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
233
58646b77
PB
234/* Built in types. */
235
236tree rs6000_builtin_types[RS6000_BTI_MAX];
237tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 238
57ac7be9
AM
239const char *rs6000_traceback_name;
240static enum {
241 traceback_default = 0,
242 traceback_none,
243 traceback_part,
244 traceback_full
245} rs6000_traceback;
246
38c1f2d7
MM
247/* Flag to say the TOC is initialized */
248int toc_initialized;
9ebbca7d 249char toc_label_name[10];
38c1f2d7 250
44cd321e
PS
251/* Cached value of rs6000_variable_issue. This is cached in
252 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
253static short cached_can_issue_more;
254
d6b5193b
RS
255static GTY(()) section *read_only_data_section;
256static GTY(()) section *private_data_section;
257static GTY(()) section *read_only_private_data_section;
258static GTY(()) section *sdata2_section;
259static GTY(()) section *toc_section;
260
a3c9585f
KH
261/* Control alignment for fields within structures. */
262/* String from -malign-XXXXX. */
025d9908
KH
263int rs6000_alignment_flags;
264
78f5898b
AH
265/* True for any options that were explicitly set. */
266struct {
df01da37 267 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 268 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
269 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
270 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
271 bool spe; /* True if -mspe= was used. */
272 bool float_gprs; /* True if -mfloat-gprs= was used. */
273 bool isel; /* True if -misel was used. */
274 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 275 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 276 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
277} rs6000_explicit_options;
278
a3170dc6
AH
279struct builtin_description
280{
281 /* mask is not const because we're going to alter it below. This
282 nonsense will go away when we rewrite the -march infrastructure
283 to give us more target flag bits. */
284 unsigned int mask;
285 const enum insn_code icode;
286 const char *const name;
287 const enum rs6000_builtins code;
288};
8b897cfa
RS
289\f
290/* Target cpu costs. */
291
292struct processor_costs {
c4ad648e 293 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
294 const int mulsi_const; /* cost of SImode multiplication by constant. */
295 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
296 const int muldi; /* cost of DImode multiplication. */
297 const int divsi; /* cost of SImode division. */
298 const int divdi; /* cost of DImode division. */
299 const int fp; /* cost of simple SFmode and DFmode insns. */
300 const int dmul; /* cost of DFmode multiplication (and fmadd). */
301 const int sdiv; /* cost of SFmode division (fdivs). */
302 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
303 const int cache_line_size; /* cache line size in bytes. */
304 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
305 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
306 const int simultaneous_prefetches; /* number of parallel prefetch
307 operations. */
8b897cfa
RS
308};
309
310const struct processor_costs *rs6000_cost;
311
312/* Processor costs (relative to an add) */
313
314/* Instruction size costs on 32bit processors. */
315static const
316struct processor_costs size32_cost = {
06a67bdd
RS
317 COSTS_N_INSNS (1), /* mulsi */
318 COSTS_N_INSNS (1), /* mulsi_const */
319 COSTS_N_INSNS (1), /* mulsi_const9 */
320 COSTS_N_INSNS (1), /* muldi */
321 COSTS_N_INSNS (1), /* divsi */
322 COSTS_N_INSNS (1), /* divdi */
323 COSTS_N_INSNS (1), /* fp */
324 COSTS_N_INSNS (1), /* dmul */
325 COSTS_N_INSNS (1), /* sdiv */
326 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
327 32,
328 0,
329 0,
5f732aba 330 0,
8b897cfa
RS
331};
332
333/* Instruction size costs on 64bit processors. */
334static const
335struct processor_costs size64_cost = {
06a67bdd
RS
336 COSTS_N_INSNS (1), /* mulsi */
337 COSTS_N_INSNS (1), /* mulsi_const */
338 COSTS_N_INSNS (1), /* mulsi_const9 */
339 COSTS_N_INSNS (1), /* muldi */
340 COSTS_N_INSNS (1), /* divsi */
341 COSTS_N_INSNS (1), /* divdi */
342 COSTS_N_INSNS (1), /* fp */
343 COSTS_N_INSNS (1), /* dmul */
344 COSTS_N_INSNS (1), /* sdiv */
345 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
346 128,
347 0,
348 0,
5f732aba 349 0,
8b897cfa
RS
350};
351
352/* Instruction costs on RIOS1 processors. */
353static const
354struct processor_costs rios1_cost = {
06a67bdd
RS
355 COSTS_N_INSNS (5), /* mulsi */
356 COSTS_N_INSNS (4), /* mulsi_const */
357 COSTS_N_INSNS (3), /* mulsi_const9 */
358 COSTS_N_INSNS (5), /* muldi */
359 COSTS_N_INSNS (19), /* divsi */
360 COSTS_N_INSNS (19), /* divdi */
361 COSTS_N_INSNS (2), /* fp */
362 COSTS_N_INSNS (2), /* dmul */
363 COSTS_N_INSNS (19), /* sdiv */
364 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 365 128, /* cache line size */
5f732aba
DE
366 64, /* l1 cache */
367 512, /* l2 cache */
0b11da67 368 0, /* streams */
8b897cfa
RS
369};
370
371/* Instruction costs on RIOS2 processors. */
372static const
373struct processor_costs rios2_cost = {
06a67bdd
RS
374 COSTS_N_INSNS (2), /* mulsi */
375 COSTS_N_INSNS (2), /* mulsi_const */
376 COSTS_N_INSNS (2), /* mulsi_const9 */
377 COSTS_N_INSNS (2), /* muldi */
378 COSTS_N_INSNS (13), /* divsi */
379 COSTS_N_INSNS (13), /* divdi */
380 COSTS_N_INSNS (2), /* fp */
381 COSTS_N_INSNS (2), /* dmul */
382 COSTS_N_INSNS (17), /* sdiv */
383 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 384 256, /* cache line size */
5f732aba
DE
385 256, /* l1 cache */
386 1024, /* l2 cache */
0b11da67 387 0, /* streams */
8b897cfa
RS
388};
389
390/* Instruction costs on RS64A processors. */
391static const
392struct processor_costs rs64a_cost = {
06a67bdd
RS
393 COSTS_N_INSNS (20), /* mulsi */
394 COSTS_N_INSNS (12), /* mulsi_const */
395 COSTS_N_INSNS (8), /* mulsi_const9 */
396 COSTS_N_INSNS (34), /* muldi */
397 COSTS_N_INSNS (65), /* divsi */
398 COSTS_N_INSNS (67), /* divdi */
399 COSTS_N_INSNS (4), /* fp */
400 COSTS_N_INSNS (4), /* dmul */
401 COSTS_N_INSNS (31), /* sdiv */
402 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 403 128, /* cache line size */
5f732aba
DE
404 128, /* l1 cache */
405 2048, /* l2 cache */
0b11da67 406 1, /* streams */
8b897cfa
RS
407};
408
409/* Instruction costs on MPCCORE processors. */
410static const
411struct processor_costs mpccore_cost = {
06a67bdd
RS
412 COSTS_N_INSNS (2), /* mulsi */
413 COSTS_N_INSNS (2), /* mulsi_const */
414 COSTS_N_INSNS (2), /* mulsi_const9 */
415 COSTS_N_INSNS (2), /* muldi */
416 COSTS_N_INSNS (6), /* divsi */
417 COSTS_N_INSNS (6), /* divdi */
418 COSTS_N_INSNS (4), /* fp */
419 COSTS_N_INSNS (5), /* dmul */
420 COSTS_N_INSNS (10), /* sdiv */
421 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 422 32, /* cache line size */
5f732aba
DE
423 4, /* l1 cache */
424 16, /* l2 cache */
0b11da67 425 1, /* streams */
8b897cfa
RS
426};
427
428/* Instruction costs on PPC403 processors. */
429static const
430struct processor_costs ppc403_cost = {
06a67bdd
RS
431 COSTS_N_INSNS (4), /* mulsi */
432 COSTS_N_INSNS (4), /* mulsi_const */
433 COSTS_N_INSNS (4), /* mulsi_const9 */
434 COSTS_N_INSNS (4), /* muldi */
435 COSTS_N_INSNS (33), /* divsi */
436 COSTS_N_INSNS (33), /* divdi */
437 COSTS_N_INSNS (11), /* fp */
438 COSTS_N_INSNS (11), /* dmul */
439 COSTS_N_INSNS (11), /* sdiv */
440 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 441 32, /* cache line size */
5f732aba
DE
442 4, /* l1 cache */
443 16, /* l2 cache */
0b11da67 444 1, /* streams */
8b897cfa
RS
445};
446
447/* Instruction costs on PPC405 processors. */
448static const
449struct processor_costs ppc405_cost = {
06a67bdd
RS
450 COSTS_N_INSNS (5), /* mulsi */
451 COSTS_N_INSNS (4), /* mulsi_const */
452 COSTS_N_INSNS (3), /* mulsi_const9 */
453 COSTS_N_INSNS (5), /* muldi */
454 COSTS_N_INSNS (35), /* divsi */
455 COSTS_N_INSNS (35), /* divdi */
456 COSTS_N_INSNS (11), /* fp */
457 COSTS_N_INSNS (11), /* dmul */
458 COSTS_N_INSNS (11), /* sdiv */
459 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 460 32, /* cache line size */
5f732aba
DE
461 16, /* l1 cache */
462 128, /* l2 cache */
0b11da67 463 1, /* streams */
8b897cfa
RS
464};
465
466/* Instruction costs on PPC440 processors. */
467static const
468struct processor_costs ppc440_cost = {
06a67bdd
RS
469 COSTS_N_INSNS (3), /* mulsi */
470 COSTS_N_INSNS (2), /* mulsi_const */
471 COSTS_N_INSNS (2), /* mulsi_const9 */
472 COSTS_N_INSNS (3), /* muldi */
473 COSTS_N_INSNS (34), /* divsi */
474 COSTS_N_INSNS (34), /* divdi */
475 COSTS_N_INSNS (5), /* fp */
476 COSTS_N_INSNS (5), /* dmul */
477 COSTS_N_INSNS (19), /* sdiv */
478 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 479 32, /* cache line size */
5f732aba
DE
480 32, /* l1 cache */
481 256, /* l2 cache */
0b11da67 482 1, /* streams */
8b897cfa
RS
483};
484
485/* Instruction costs on PPC601 processors. */
486static const
487struct processor_costs ppc601_cost = {
06a67bdd
RS
488 COSTS_N_INSNS (5), /* mulsi */
489 COSTS_N_INSNS (5), /* mulsi_const */
490 COSTS_N_INSNS (5), /* mulsi_const9 */
491 COSTS_N_INSNS (5), /* muldi */
492 COSTS_N_INSNS (36), /* divsi */
493 COSTS_N_INSNS (36), /* divdi */
494 COSTS_N_INSNS (4), /* fp */
495 COSTS_N_INSNS (5), /* dmul */
496 COSTS_N_INSNS (17), /* sdiv */
497 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 498 32, /* cache line size */
5f732aba
DE
499 32, /* l1 cache */
500 256, /* l2 cache */
0b11da67 501 1, /* streams */
8b897cfa
RS
502};
503
504/* Instruction costs on PPC603 processors. */
505static const
506struct processor_costs ppc603_cost = {
06a67bdd
RS
507 COSTS_N_INSNS (5), /* mulsi */
508 COSTS_N_INSNS (3), /* mulsi_const */
509 COSTS_N_INSNS (2), /* mulsi_const9 */
510 COSTS_N_INSNS (5), /* muldi */
511 COSTS_N_INSNS (37), /* divsi */
512 COSTS_N_INSNS (37), /* divdi */
513 COSTS_N_INSNS (3), /* fp */
514 COSTS_N_INSNS (4), /* dmul */
515 COSTS_N_INSNS (18), /* sdiv */
516 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 517 32, /* cache line size */
5f732aba
DE
518 8, /* l1 cache */
519 64, /* l2 cache */
0b11da67 520 1, /* streams */
8b897cfa
RS
521};
522
523/* Instruction costs on PPC604 processors. */
524static const
525struct processor_costs ppc604_cost = {
06a67bdd
RS
526 COSTS_N_INSNS (4), /* mulsi */
527 COSTS_N_INSNS (4), /* mulsi_const */
528 COSTS_N_INSNS (4), /* mulsi_const9 */
529 COSTS_N_INSNS (4), /* muldi */
530 COSTS_N_INSNS (20), /* divsi */
531 COSTS_N_INSNS (20), /* divdi */
532 COSTS_N_INSNS (3), /* fp */
533 COSTS_N_INSNS (3), /* dmul */
534 COSTS_N_INSNS (18), /* sdiv */
535 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 536 32, /* cache line size */
5f732aba
DE
537 16, /* l1 cache */
538 512, /* l2 cache */
0b11da67 539 1, /* streams */
8b897cfa
RS
540};
541
542/* Instruction costs on PPC604e processors. */
543static const
544struct processor_costs ppc604e_cost = {
06a67bdd
RS
545 COSTS_N_INSNS (2), /* mulsi */
546 COSTS_N_INSNS (2), /* mulsi_const */
547 COSTS_N_INSNS (2), /* mulsi_const9 */
548 COSTS_N_INSNS (2), /* muldi */
549 COSTS_N_INSNS (20), /* divsi */
550 COSTS_N_INSNS (20), /* divdi */
551 COSTS_N_INSNS (3), /* fp */
552 COSTS_N_INSNS (3), /* dmul */
553 COSTS_N_INSNS (18), /* sdiv */
554 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 555 32, /* cache line size */
5f732aba
DE
556 32, /* l1 cache */
557 1024, /* l2 cache */
0b11da67 558 1, /* streams */
8b897cfa
RS
559};
560
f0517163 561/* Instruction costs on PPC620 processors. */
8b897cfa
RS
562static const
563struct processor_costs ppc620_cost = {
06a67bdd
RS
564 COSTS_N_INSNS (5), /* mulsi */
565 COSTS_N_INSNS (4), /* mulsi_const */
566 COSTS_N_INSNS (3), /* mulsi_const9 */
567 COSTS_N_INSNS (7), /* muldi */
568 COSTS_N_INSNS (21), /* divsi */
569 COSTS_N_INSNS (37), /* divdi */
570 COSTS_N_INSNS (3), /* fp */
571 COSTS_N_INSNS (3), /* dmul */
572 COSTS_N_INSNS (18), /* sdiv */
573 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 574 128, /* cache line size */
5f732aba
DE
575 32, /* l1 cache */
576 1024, /* l2 cache */
0b11da67 577 1, /* streams */
f0517163
RS
578};
579
580/* Instruction costs on PPC630 processors. */
581static const
582struct processor_costs ppc630_cost = {
06a67bdd
RS
583 COSTS_N_INSNS (5), /* mulsi */
584 COSTS_N_INSNS (4), /* mulsi_const */
585 COSTS_N_INSNS (3), /* mulsi_const9 */
586 COSTS_N_INSNS (7), /* muldi */
587 COSTS_N_INSNS (21), /* divsi */
588 COSTS_N_INSNS (37), /* divdi */
589 COSTS_N_INSNS (3), /* fp */
590 COSTS_N_INSNS (3), /* dmul */
591 COSTS_N_INSNS (17), /* sdiv */
592 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 593 128, /* cache line size */
5f732aba
DE
594 64, /* l1 cache */
595 1024, /* l2 cache */
0b11da67 596 1, /* streams */
8b897cfa
RS
597};
598
d296e02e
AP
599/* Instruction costs on Cell processor. */
600/* COSTS_N_INSNS (1) ~ one add. */
601static const
602struct processor_costs ppccell_cost = {
603 COSTS_N_INSNS (9/2)+2, /* mulsi */
604 COSTS_N_INSNS (6/2), /* mulsi_const */
605 COSTS_N_INSNS (6/2), /* mulsi_const9 */
606 COSTS_N_INSNS (15/2)+2, /* muldi */
607 COSTS_N_INSNS (38/2), /* divsi */
608 COSTS_N_INSNS (70/2), /* divdi */
609 COSTS_N_INSNS (10/2), /* fp */
610 COSTS_N_INSNS (10/2), /* dmul */
611 COSTS_N_INSNS (74/2), /* sdiv */
612 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 613 128, /* cache line size */
5f732aba
DE
614 32, /* l1 cache */
615 512, /* l2 cache */
616 6, /* streams */
d296e02e
AP
617};
618
8b897cfa
RS
619/* Instruction costs on PPC750 and PPC7400 processors. */
620static const
621struct processor_costs ppc750_cost = {
06a67bdd
RS
622 COSTS_N_INSNS (5), /* mulsi */
623 COSTS_N_INSNS (3), /* mulsi_const */
624 COSTS_N_INSNS (2), /* mulsi_const9 */
625 COSTS_N_INSNS (5), /* muldi */
626 COSTS_N_INSNS (17), /* divsi */
627 COSTS_N_INSNS (17), /* divdi */
628 COSTS_N_INSNS (3), /* fp */
629 COSTS_N_INSNS (3), /* dmul */
630 COSTS_N_INSNS (17), /* sdiv */
631 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 632 32, /* cache line size */
5f732aba
DE
633 32, /* l1 cache */
634 512, /* l2 cache */
0b11da67 635 1, /* streams */
8b897cfa
RS
636};
637
638/* Instruction costs on PPC7450 processors. */
639static const
640struct processor_costs ppc7450_cost = {
06a67bdd
RS
641 COSTS_N_INSNS (4), /* mulsi */
642 COSTS_N_INSNS (3), /* mulsi_const */
643 COSTS_N_INSNS (3), /* mulsi_const9 */
644 COSTS_N_INSNS (4), /* muldi */
645 COSTS_N_INSNS (23), /* divsi */
646 COSTS_N_INSNS (23), /* divdi */
647 COSTS_N_INSNS (5), /* fp */
648 COSTS_N_INSNS (5), /* dmul */
649 COSTS_N_INSNS (21), /* sdiv */
650 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 651 32, /* cache line size */
5f732aba
DE
652 32, /* l1 cache */
653 1024, /* l2 cache */
0b11da67 654 1, /* streams */
8b897cfa 655};
a3170dc6 656
8b897cfa
RS
657/* Instruction costs on PPC8540 processors. */
658static const
659struct processor_costs ppc8540_cost = {
06a67bdd
RS
660 COSTS_N_INSNS (4), /* mulsi */
661 COSTS_N_INSNS (4), /* mulsi_const */
662 COSTS_N_INSNS (4), /* mulsi_const9 */
663 COSTS_N_INSNS (4), /* muldi */
664 COSTS_N_INSNS (19), /* divsi */
665 COSTS_N_INSNS (19), /* divdi */
666 COSTS_N_INSNS (4), /* fp */
667 COSTS_N_INSNS (4), /* dmul */
668 COSTS_N_INSNS (29), /* sdiv */
669 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 670 32, /* cache line size */
5f732aba
DE
671 32, /* l1 cache */
672 256, /* l2 cache */
0b11da67 673 1, /* prefetch streams /*/
8b897cfa
RS
674};
675
fa41c305
EW
676/* Instruction costs on E300C2 and E300C3 cores. */
677static const
678struct processor_costs ppce300c2c3_cost = {
679 COSTS_N_INSNS (4), /* mulsi */
680 COSTS_N_INSNS (4), /* mulsi_const */
681 COSTS_N_INSNS (4), /* mulsi_const9 */
682 COSTS_N_INSNS (4), /* muldi */
683 COSTS_N_INSNS (19), /* divsi */
684 COSTS_N_INSNS (19), /* divdi */
685 COSTS_N_INSNS (3), /* fp */
686 COSTS_N_INSNS (4), /* dmul */
687 COSTS_N_INSNS (18), /* sdiv */
688 COSTS_N_INSNS (33), /* ddiv */
642639ce 689 32,
a19b7d46
EW
690 16, /* l1 cache */
691 16, /* l2 cache */
642639ce 692 1, /* prefetch streams /*/
fa41c305
EW
693};
694
edae5fe3
DE
695/* Instruction costs on PPCE500MC processors. */
696static const
697struct processor_costs ppce500mc_cost = {
698 COSTS_N_INSNS (4), /* mulsi */
699 COSTS_N_INSNS (4), /* mulsi_const */
700 COSTS_N_INSNS (4), /* mulsi_const9 */
701 COSTS_N_INSNS (4), /* muldi */
702 COSTS_N_INSNS (14), /* divsi */
703 COSTS_N_INSNS (14), /* divdi */
704 COSTS_N_INSNS (8), /* fp */
705 COSTS_N_INSNS (10), /* dmul */
706 COSTS_N_INSNS (36), /* sdiv */
707 COSTS_N_INSNS (66), /* ddiv */
708 64, /* cache line size */
709 32, /* l1 cache */
710 128, /* l2 cache */
711 1, /* prefetch streams /*/
712};
713
8b897cfa
RS
714/* Instruction costs on POWER4 and POWER5 processors. */
715static const
716struct processor_costs power4_cost = {
06a67bdd
RS
717 COSTS_N_INSNS (3), /* mulsi */
718 COSTS_N_INSNS (2), /* mulsi_const */
719 COSTS_N_INSNS (2), /* mulsi_const9 */
720 COSTS_N_INSNS (4), /* muldi */
721 COSTS_N_INSNS (18), /* divsi */
722 COSTS_N_INSNS (34), /* divdi */
723 COSTS_N_INSNS (3), /* fp */
724 COSTS_N_INSNS (3), /* dmul */
725 COSTS_N_INSNS (17), /* sdiv */
726 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 727 128, /* cache line size */
5f732aba
DE
728 32, /* l1 cache */
729 1024, /* l2 cache */
0b11da67 730 8, /* prefetch streams /*/
8b897cfa
RS
731};
732
44cd321e
PS
733/* Instruction costs on POWER6 processors. */
734static const
735struct processor_costs power6_cost = {
736 COSTS_N_INSNS (8), /* mulsi */
737 COSTS_N_INSNS (8), /* mulsi_const */
738 COSTS_N_INSNS (8), /* mulsi_const9 */
739 COSTS_N_INSNS (8), /* muldi */
740 COSTS_N_INSNS (22), /* divsi */
741 COSTS_N_INSNS (28), /* divdi */
742 COSTS_N_INSNS (3), /* fp */
743 COSTS_N_INSNS (3), /* dmul */
744 COSTS_N_INSNS (13), /* sdiv */
745 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 746 128, /* cache line size */
5f732aba
DE
747 64, /* l1 cache */
748 2048, /* l2 cache */
0b11da67 749 16, /* prefetch streams */
44cd321e
PS
750};
751
8b897cfa 752\f
a2369ed3 753static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 754static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 755static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
756static void rs6000_emit_stack_tie (void);
757static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 758static bool spe_func_has_64bit_regs_p (void);
b20a9cca 759static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 760 int, HOST_WIDE_INT);
a2369ed3 761static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 762static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
763static unsigned rs6000_hash_constant (rtx);
764static unsigned toc_hash_function (const void *);
765static int toc_hash_eq (const void *, const void *);
a2369ed3 766static bool constant_pool_expr_p (rtx);
d04b6e6e 767static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
768static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
769static struct machine_function * rs6000_init_machine_status (void);
770static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 771static bool no_global_regs_above (int, bool);
5add3202 772#ifdef HAVE_GAS_HIDDEN
a2369ed3 773static void rs6000_assemble_visibility (tree, int);
5add3202 774#endif
a2369ed3
DJ
775static int rs6000_ra_ever_killed (void);
776static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 777static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 778static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 779static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 780static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 781static const char *rs6000_mangle_type (const_tree);
b86fe7b4 782extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 783static void rs6000_set_default_type_attributes (tree);
f78c3290
NF
784static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
785static void rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
786static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
787 enum machine_mode, bool, bool, bool);
52ff33d0 788static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
789static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
790static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
791static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
792static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
793static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
794 tree);
a2369ed3 795static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 796static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 797static void rs6000_file_start (void);
7c262518 798#if TARGET_ELF
9b580a0b 799static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
800static void rs6000_elf_asm_out_constructor (rtx, int);
801static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 802static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 803static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
804static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
805 unsigned HOST_WIDE_INT);
a56d7372 806static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 807 ATTRIBUTE_UNUSED;
7c262518 808#endif
3101faab 809static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
810static void rs6000_alloc_sdmode_stack_slot (void);
811static void rs6000_instantiate_decls (void);
cbaaba19 812#if TARGET_XCOFF
0d5817b2 813static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 814static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 815static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 816static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 817static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 818static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 819 unsigned HOST_WIDE_INT);
d6b5193b
RS
820static void rs6000_xcoff_unique_section (tree, int);
821static section *rs6000_xcoff_select_rtx_section
822 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
823static const char * rs6000_xcoff_strip_name_encoding (const char *);
824static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
825static void rs6000_xcoff_file_start (void);
826static void rs6000_xcoff_file_end (void);
f1384257 827#endif
a2369ed3 828static int rs6000_variable_issue (FILE *, int, rtx, int);
f40751dd 829static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
a2369ed3 830static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 831static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 832static bool is_microcoded_insn (rtx);
d296e02e 833static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
834static bool is_cracked_insn (rtx);
835static bool is_branch_slot_insn (rtx);
44cd321e 836static bool is_load_insn (rtx);
e3a0e200 837static rtx get_store_dest (rtx pat);
44cd321e
PS
838static bool is_store_insn (rtx);
839static bool set_to_load_agen (rtx,rtx);
982afe02 840static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
841static int rs6000_adjust_priority (rtx, int);
842static int rs6000_issue_rate (void);
b198261f 843static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
844static rtx get_next_active_insn (rtx, rtx);
845static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
846static bool insn_must_be_first_in_group (rtx);
847static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
848static bool is_costly_group (rtx *, rtx);
849static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
850static int redefine_groups (FILE *, int, rtx, rtx);
851static int pad_groups (FILE *, int, rtx, rtx);
852static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
853static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
854static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 855static int rs6000_use_sched_lookahead (void);
d296e02e 856static int rs6000_use_sched_lookahead_guard (rtx);
e855c69d
AB
857static void * rs6000_alloc_sched_context (void);
858static void rs6000_init_sched_context (void *, bool);
859static void rs6000_set_sched_context (void *);
860static void rs6000_free_sched_context (void *);
9c78b944 861static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 862static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
863static tree rs6000_builtin_mul_widen_even (tree);
864static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 865static tree rs6000_builtin_conversion (enum tree_code, tree);
0fca40f5 866static tree rs6000_builtin_vec_perm (tree, tree *);
a2369ed3 867
58646b77 868static void def_builtin (int, const char *, tree, int);
3101faab 869static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
870static void rs6000_init_builtins (void);
871static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
872static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
873static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
874static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
875static void altivec_init_builtins (void);
876static void rs6000_common_init_builtins (void);
c15c90bb 877static void rs6000_init_libfuncs (void);
a2369ed3 878
96038623
DE
879static void paired_init_builtins (void);
880static rtx paired_expand_builtin (tree, rtx, bool *);
881static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
882static rtx paired_expand_stv_builtin (enum insn_code, tree);
883static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
884
b20a9cca
AM
885static void enable_mask_for_builtins (struct builtin_description *, int,
886 enum rs6000_builtins,
887 enum rs6000_builtins);
a2369ed3
DJ
888static void spe_init_builtins (void);
889static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 890static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
891static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
892static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
893static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
894static rs6000_stack_t *rs6000_stack_info (void);
895static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
896
897static rtx altivec_expand_builtin (tree, rtx, bool *);
898static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
899static rtx altivec_expand_st_builtin (tree, rtx, bool *);
900static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
901static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 902static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 903 const char *, tree, rtx);
a2369ed3 904static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
905static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
906static rtx altivec_expand_vec_set_builtin (tree);
907static rtx altivec_expand_vec_ext_builtin (tree, rtx);
908static int get_element_number (tree, tree);
78f5898b 909static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 910static void rs6000_parse_tls_size_option (void);
5da702b1 911static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
912static int first_altivec_reg_to_save (void);
913static unsigned int compute_vrsave_mask (void);
9390387d 914static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
915static void is_altivec_return_reg (rtx, void *);
916static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
917int easy_vector_constant (rtx, enum machine_mode);
a2369ed3 918static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 919static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 920static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 921static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
922static rtx rs6000_tls_get_addr (void);
923static rtx rs6000_got_sym (void);
9390387d 924static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
925static const char *rs6000_get_some_local_dynamic_name (void);
926static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 927static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 928static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 929 enum machine_mode, tree);
0b5383eb
DJ
930static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
931 HOST_WIDE_INT);
932static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
933 tree, HOST_WIDE_INT);
934static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
935 HOST_WIDE_INT,
936 rtx[], int *);
937static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
938 const_tree, HOST_WIDE_INT,
939 rtx[], int *);
940static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 941static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 942static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
943static void setup_incoming_varargs (CUMULATIVE_ARGS *,
944 enum machine_mode, tree,
945 int *, int);
8cd5a4e0 946static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 947 const_tree, bool);
78a52f11
RH
948static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
949 tree, bool);
3101faab 950static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
951#if TARGET_MACHO
952static void macho_branch_islands (void);
efdba735
SH
953static int no_previous_def (tree function_name);
954static tree get_prev_label (tree function_name);
c4e18b1c 955static void rs6000_darwin_file_start (void);
efdba735
SH
956#endif
957
c35d187f 958static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 959static void rs6000_va_start (tree, rtx);
726a989a 960static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
586de218 961static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 962static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 963static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 964static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 965 enum machine_mode);
94ff898d 966static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
967 enum machine_mode);
968static int get_vsel_insn (enum machine_mode);
969static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 970static tree rs6000_stack_protect_fail (void);
21213b4c
DP
971
972const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
973static enum machine_mode rs6000_eh_return_filter_mode (void);
974
17211ab5
GK
975/* Hash table stuff for keeping track of TOC entries. */
976
d1b38208 977struct GTY(()) toc_hash_struct
17211ab5
GK
978{
979 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
980 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
981 rtx key;
982 enum machine_mode key_mode;
983 int labelno;
984};
985
986static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
987\f
988/* Default register names. */
989char rs6000_reg_names[][8] =
990{
802a0058
MM
991 "0", "1", "2", "3", "4", "5", "6", "7",
992 "8", "9", "10", "11", "12", "13", "14", "15",
993 "16", "17", "18", "19", "20", "21", "22", "23",
994 "24", "25", "26", "27", "28", "29", "30", "31",
995 "0", "1", "2", "3", "4", "5", "6", "7",
996 "8", "9", "10", "11", "12", "13", "14", "15",
997 "16", "17", "18", "19", "20", "21", "22", "23",
998 "24", "25", "26", "27", "28", "29", "30", "31",
999 "mq", "lr", "ctr","ap",
1000 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
1001 "xer",
1002 /* AltiVec registers. */
0cd5e3a1
AH
1003 "0", "1", "2", "3", "4", "5", "6", "7",
1004 "8", "9", "10", "11", "12", "13", "14", "15",
1005 "16", "17", "18", "19", "20", "21", "22", "23",
1006 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1007 "vrsave", "vscr",
1008 /* SPE registers. */
7d5175e1
JJ
1009 "spe_acc", "spefscr",
1010 /* Soft frame pointer. */
1011 "sfp"
c81bebd7
MM
1012};
1013
1014#ifdef TARGET_REGNAMES
8b60264b 1015static const char alt_reg_names[][8] =
c81bebd7 1016{
802a0058
MM
1017 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1018 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1019 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1020 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1021 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1022 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1023 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1024 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1025 "mq", "lr", "ctr", "ap",
1026 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1027 "xer",
59a4c851 1028 /* AltiVec registers. */
0ac081f6 1029 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1030 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1031 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1032 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1033 "vrsave", "vscr",
1034 /* SPE registers. */
7d5175e1
JJ
1035 "spe_acc", "spefscr",
1036 /* Soft frame pointer. */
1037 "sfp"
c81bebd7
MM
1038};
1039#endif
9878760c 1040\f
daf11973
MM
1041#ifndef MASK_STRICT_ALIGN
1042#define MASK_STRICT_ALIGN 0
1043#endif
ffcfcb5f
AM
1044#ifndef TARGET_PROFILE_KERNEL
1045#define TARGET_PROFILE_KERNEL 0
1046#endif
3961e8fe
RH
1047
1048/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1049#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1050\f
1051/* Initialize the GCC target structure. */
91d231cb
JM
1052#undef TARGET_ATTRIBUTE_TABLE
1053#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1054#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1055#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1056
301d03af
RS
1057#undef TARGET_ASM_ALIGNED_DI_OP
1058#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1059
1060/* Default unaligned ops are only provided for ELF. Find the ops needed
1061 for non-ELF systems. */
1062#ifndef OBJECT_FORMAT_ELF
cbaaba19 1063#if TARGET_XCOFF
ae6c1efd 1064/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1065 64-bit targets. */
1066#undef TARGET_ASM_UNALIGNED_HI_OP
1067#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1068#undef TARGET_ASM_UNALIGNED_SI_OP
1069#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1070#undef TARGET_ASM_UNALIGNED_DI_OP
1071#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1072#else
1073/* For Darwin. */
1074#undef TARGET_ASM_UNALIGNED_HI_OP
1075#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1076#undef TARGET_ASM_UNALIGNED_SI_OP
1077#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1078#undef TARGET_ASM_UNALIGNED_DI_OP
1079#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1080#undef TARGET_ASM_ALIGNED_DI_OP
1081#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1082#endif
1083#endif
1084
1085/* This hook deals with fixups for relocatable code and DI-mode objects
1086 in 64-bit code. */
1087#undef TARGET_ASM_INTEGER
1088#define TARGET_ASM_INTEGER rs6000_assemble_integer
1089
93638d7a
AM
1090#ifdef HAVE_GAS_HIDDEN
1091#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1092#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1093#endif
1094
c4501e62
JJ
1095#undef TARGET_HAVE_TLS
1096#define TARGET_HAVE_TLS HAVE_AS_TLS
1097
1098#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1099#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1100
08c148a8
NB
1101#undef TARGET_ASM_FUNCTION_PROLOGUE
1102#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1103#undef TARGET_ASM_FUNCTION_EPILOGUE
1104#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1105
b54cf83a
DE
1106#undef TARGET_SCHED_VARIABLE_ISSUE
1107#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1108
c237e94a
ZW
1109#undef TARGET_SCHED_ISSUE_RATE
1110#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1111#undef TARGET_SCHED_ADJUST_COST
1112#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1113#undef TARGET_SCHED_ADJUST_PRIORITY
1114#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1115#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1116#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1117#undef TARGET_SCHED_INIT
1118#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1119#undef TARGET_SCHED_FINISH
1120#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1121#undef TARGET_SCHED_REORDER
1122#define TARGET_SCHED_REORDER rs6000_sched_reorder
1123#undef TARGET_SCHED_REORDER2
1124#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1125
be12c2b0
VM
1126#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1127#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1128
d296e02e
AP
1129#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1130#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1131
e855c69d
AB
1132#undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1133#define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1134#undef TARGET_SCHED_INIT_SCHED_CONTEXT
1135#define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1136#undef TARGET_SCHED_SET_SCHED_CONTEXT
1137#define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1138#undef TARGET_SCHED_FREE_SCHED_CONTEXT
1139#define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1140
7ccf35ed
DN
1141#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1142#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1143#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1144#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1145#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1146#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1147#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1148#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
0fca40f5
IR
1149#undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1150#define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
7ccf35ed 1151
5b900a4c
DN
1152#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1153#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1154
0ac081f6
AH
1155#undef TARGET_INIT_BUILTINS
1156#define TARGET_INIT_BUILTINS rs6000_init_builtins
1157
1158#undef TARGET_EXPAND_BUILTIN
1159#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1160
608063c3
JB
1161#undef TARGET_MANGLE_TYPE
1162#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1163
c15c90bb
ZW
1164#undef TARGET_INIT_LIBFUNCS
1165#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1166
f1384257 1167#if TARGET_MACHO
0e5dbd9b 1168#undef TARGET_BINDS_LOCAL_P
31920d83 1169#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1170#endif
0e5dbd9b 1171
77ccdfed
EC
1172#undef TARGET_MS_BITFIELD_LAYOUT_P
1173#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1174
3961e8fe
RH
1175#undef TARGET_ASM_OUTPUT_MI_THUNK
1176#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1177
3961e8fe 1178#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1179#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1180
4977bab6
ZW
1181#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1182#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1183
2e3f0db6
DJ
1184#undef TARGET_INVALID_WITHIN_DOLOOP
1185#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1186
3c50106f
RH
1187#undef TARGET_RTX_COSTS
1188#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67 1189#undef TARGET_ADDRESS_COST
f40751dd 1190#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 1191
96714395
AH
1192#undef TARGET_DWARF_REGISTER_SPAN
1193#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1194
37ea0b7e
JM
1195#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1196#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1197
c6e8c921
GK
1198/* On rs6000, function arguments are promoted, as are function return
1199 values. */
1200#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1201#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1202#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1203#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1204
c6e8c921
GK
1205#undef TARGET_RETURN_IN_MEMORY
1206#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1207
1208#undef TARGET_SETUP_INCOMING_VARARGS
1209#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1210
1211/* Always strict argument naming on rs6000. */
1212#undef TARGET_STRICT_ARGUMENT_NAMING
1213#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1214#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1215#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1216#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1217#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1218#undef TARGET_MUST_PASS_IN_STACK
1219#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1220#undef TARGET_PASS_BY_REFERENCE
1221#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1222#undef TARGET_ARG_PARTIAL_BYTES
1223#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1224
c35d187f
RH
1225#undef TARGET_BUILD_BUILTIN_VA_LIST
1226#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1227
d7bd8aeb
JJ
1228#undef TARGET_EXPAND_BUILTIN_VA_START
1229#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1230
cd3ce9b4
JM
1231#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1232#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1233
93f90be6
FJ
1234#undef TARGET_EH_RETURN_FILTER_MODE
1235#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1236
00b79d54
BE
1237#undef TARGET_SCALAR_MODE_SUPPORTED_P
1238#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1239
f676971a
EC
1240#undef TARGET_VECTOR_MODE_SUPPORTED_P
1241#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1242
4d3e6fae
FJ
1243#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1244#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1245
78f5898b
AH
1246#undef TARGET_HANDLE_OPTION
1247#define TARGET_HANDLE_OPTION rs6000_handle_option
1248
1249#undef TARGET_DEFAULT_TARGET_FLAGS
1250#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1251 (TARGET_DEFAULT)
78f5898b 1252
3aebbe5f
JJ
1253#undef TARGET_STACK_PROTECT_FAIL
1254#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1255
445cf5eb
JM
1256/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1257 The PowerPC architecture requires only weak consistency among
1258 processors--that is, memory accesses between processors need not be
1259 sequentially consistent and memory accesses among processors can occur
1260 in any order. The ability to order memory accesses weakly provides
1261 opportunities for more efficient use of the system bus. Unless a
1262 dependency exists, the 604e allows read operations to precede store
1263 operations. */
1264#undef TARGET_RELAXED_ORDERING
1265#define TARGET_RELAXED_ORDERING true
1266
fdbe66f2
EB
1267#ifdef HAVE_AS_TLS
1268#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1269#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1270#endif
1271
aacd3885
RS
1272/* Use a 32-bit anchor range. This leads to sequences like:
1273
1274 addis tmp,anchor,high
1275 add dest,tmp,low
1276
1277 where tmp itself acts as an anchor, and can be shared between
1278 accesses to the same 64k page. */
1279#undef TARGET_MIN_ANCHOR_OFFSET
1280#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1281#undef TARGET_MAX_ANCHOR_OFFSET
1282#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1283#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1284#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1285
9c78b944
DE
1286#undef TARGET_BUILTIN_RECIPROCAL
1287#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1288
e41b2a33
PB
1289#undef TARGET_EXPAND_TO_RTL_HOOK
1290#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1291
1292#undef TARGET_INSTANTIATE_DECLS
1293#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1294
f6897b10 1295struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1296\f
0d1fbc8c
AH
1297
1298/* Value is 1 if hard register REGNO can hold a value of machine-mode
1299 MODE. */
1300static int
1301rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1302{
1303 /* The GPRs can hold any mode, but values bigger than one register
1304 cannot go past R31. */
1305 if (INT_REGNO_P (regno))
1306 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1307
a5a97921 1308 /* The float registers can only hold floating modes and DImode.
7393f7f8 1309 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1310 if (FP_REGNO_P (regno))
1311 return
96038623 1312 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1313 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1314 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1315 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1316 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1317 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1318 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1319
1320 /* The CR register can only hold CC modes. */
1321 if (CR_REGNO_P (regno))
1322 return GET_MODE_CLASS (mode) == MODE_CC;
1323
1324 if (XER_REGNO_P (regno))
1325 return mode == PSImode;
1326
1327 /* AltiVec only in AldyVec registers. */
1328 if (ALTIVEC_REGNO_P (regno))
1329 return ALTIVEC_VECTOR_MODE (mode);
1330
1331 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1332 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1333 return 1;
1334
1335 /* We cannot put TImode anywhere except general register and it must be
1336 able to fit within the register set. */
1337
1338 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1339}
1340
1341/* Initialize rs6000_hard_regno_mode_ok_p table. */
1342static void
1343rs6000_init_hard_regno_mode_ok (void)
1344{
1345 int r, m;
1346
1347 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1348 for (m = 0; m < NUM_MACHINE_MODES; ++m)
bbbbb16a 1349 if (rs6000_hard_regno_mode_ok (r, (enum machine_mode) m))
0d1fbc8c
AH
1350 rs6000_hard_regno_mode_ok_p[m][r] = true;
1351}
1352
e4cad568
GK
1353#if TARGET_MACHO
1354/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1355
1356static void
1357darwin_rs6000_override_options (void)
1358{
1359 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1360 off. */
1361 rs6000_altivec_abi = 1;
1362 TARGET_ALTIVEC_VRSAVE = 1;
1363 if (DEFAULT_ABI == ABI_DARWIN)
1364 {
1365 if (MACHO_DYNAMIC_NO_PIC_P)
1366 {
1367 if (flag_pic)
1368 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1369 flag_pic = 0;
1370 }
1371 else if (flag_pic == 1)
1372 {
1373 flag_pic = 2;
1374 }
1375 }
1376 if (TARGET_64BIT && ! TARGET_POWERPC64)
1377 {
1378 target_flags |= MASK_POWERPC64;
1379 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1380 }
1381 if (flag_mkernel)
1382 {
1383 rs6000_default_long_calls = 1;
1384 target_flags |= MASK_SOFT_FLOAT;
1385 }
1386
1387 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1388 Altivec. */
1389 if (!flag_mkernel && !flag_apple_kext
1390 && TARGET_64BIT
1391 && ! (target_flags_explicit & MASK_ALTIVEC))
1392 target_flags |= MASK_ALTIVEC;
1393
1394 /* Unless the user (not the configurer) has explicitly overridden
1395 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1396 G4 unless targetting the kernel. */
1397 if (!flag_mkernel
1398 && !flag_apple_kext
1399 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1400 && ! (target_flags_explicit & MASK_ALTIVEC)
1401 && ! rs6000_select[1].string)
1402 {
1403 target_flags |= MASK_ALTIVEC;
1404 }
1405}
1406#endif
1407
c1e55850
GK
1408/* If not otherwise specified by a target, make 'long double' equivalent to
1409 'double'. */
1410
1411#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1412#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1413#endif
1414
5248c961
RK
1415/* Override command line options. Mostly we process the processor
1416 type and sometimes adjust other TARGET_ options. */
1417
1418void
d779d0dc 1419rs6000_override_options (const char *default_cpu)
5248c961 1420{
c4d38ccb 1421 size_t i, j;
8e3f41e7 1422 struct rs6000_cpu_select *ptr;
66188a7e 1423 int set_masks;
5248c961 1424
66188a7e 1425 /* Simplifications for entries below. */
85638c0d 1426
66188a7e
GK
1427 enum {
1428 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1429 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1430 };
85638c0d 1431
66188a7e
GK
1432 /* This table occasionally claims that a processor does not support
1433 a particular feature even though it does, but the feature is slower
1434 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1435 complete description of the processor's support.
66188a7e
GK
1436
1437 Please keep this list in order, and don't forget to update the
1438 documentation in invoke.texi when adding a new processor or
1439 flag. */
5248c961
RK
1440 static struct ptt
1441 {
8b60264b
KG
1442 const char *const name; /* Canonical processor name. */
1443 const enum processor_type processor; /* Processor type enum value. */
1444 const int target_enable; /* Target flags to enable. */
8b60264b 1445 } const processor_target_table[]
66188a7e 1446 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1447 {"403", PROCESSOR_PPC403,
66188a7e 1448 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1449 {"405", PROCESSOR_PPC405,
716019c0
JM
1450 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1451 {"405fp", PROCESSOR_PPC405,
1452 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1453 {"440", PROCESSOR_PPC440,
716019c0
JM
1454 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1455 {"440fp", PROCESSOR_PPC440,
1456 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1457 {"464", PROCESSOR_PPC440,
1458 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1459 {"464fp", PROCESSOR_PPC440,
1460 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1461 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1462 {"601", PROCESSOR_PPC601,
66188a7e
GK
1463 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1464 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1465 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1466 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1467 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1468 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1469 {"620", PROCESSOR_PPC620,
1470 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1471 {"630", PROCESSOR_PPC630,
1472 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1473 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1474 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1475 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1476 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1477 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1478 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1479 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1480 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1481 /* 8548 has a dummy entry for now. */
a45bce6e 1482 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1483 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1484 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1485 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1486 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1487 {"970", PROCESSOR_POWER4,
66188a7e 1488 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1489 {"cell", PROCESSOR_CELL,
1490 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1491 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1492 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1493 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1494 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1495 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1496 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1497 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1498 {"power2", PROCESSOR_POWER,
1499 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1500 {"power3", PROCESSOR_PPC630,
1501 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1502 {"power4", PROCESSOR_POWER4,
9a8d7941 1503 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1bc39d2f 1504 | MASK_MFCRF},
ec507f2d 1505 {"power5", PROCESSOR_POWER5,
9a8d7941 1506 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1507 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1508 {"power5+", PROCESSOR_POWER5,
9a8d7941 1509 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1510 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1511 {"power6", PROCESSOR_POWER6,
0783d48d
DE
1512 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1513 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1514 {"power6x", PROCESSOR_POWER6,
0783d48d
DE
1515 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1516 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1517 | MASK_MFPGPR},
d40c9e33
PB
1518 {"power7", PROCESSOR_POWER5,
1519 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1520 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
66188a7e
GK
1521 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1522 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1523 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1524 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1525 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1526 {"rios2", PROCESSOR_RIOS2,
1527 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1528 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1529 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1530 {"rs64", PROCESSOR_RS64A,
1531 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1532 };
5248c961 1533
ca7558fc 1534 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1535
66188a7e
GK
1536 /* Some OSs don't support saving the high part of 64-bit registers on
1537 context switch. Other OSs don't support saving Altivec registers.
1538 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1539 settings; if the user wants either, the user must explicitly specify
1540 them and we won't interfere with the user's specification. */
1541
1542 enum {
1543 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1544 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1545 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1546 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1547 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1548 };
0d1fbc8c 1549
c4ad648e 1550 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1551#ifdef OS_MISSING_POWERPC64
1552 if (OS_MISSING_POWERPC64)
1553 set_masks &= ~MASK_POWERPC64;
1554#endif
1555#ifdef OS_MISSING_ALTIVEC
1556 if (OS_MISSING_ALTIVEC)
1557 set_masks &= ~MASK_ALTIVEC;
1558#endif
1559
768875a8
AM
1560 /* Don't override by the processor default if given explicitly. */
1561 set_masks &= ~target_flags_explicit;
957211c3 1562
a4f6c312 1563 /* Identify the processor type. */
8e3f41e7 1564 rs6000_select[0].string = default_cpu;
3cb999d8 1565 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1566
b6a1cbae 1567 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1568 {
8e3f41e7
MM
1569 ptr = &rs6000_select[i];
1570 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1571 {
8e3f41e7
MM
1572 for (j = 0; j < ptt_size; j++)
1573 if (! strcmp (ptr->string, processor_target_table[j].name))
1574 {
1575 if (ptr->set_tune_p)
1576 rs6000_cpu = processor_target_table[j].processor;
1577
1578 if (ptr->set_arch_p)
1579 {
66188a7e
GK
1580 target_flags &= ~set_masks;
1581 target_flags |= (processor_target_table[j].target_enable
1582 & set_masks);
8e3f41e7
MM
1583 }
1584 break;
1585 }
1586
4406229e 1587 if (j == ptt_size)
8e3f41e7 1588 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1589 }
1590 }
8a61d227 1591
edae5fe3
DE
1592 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1593 && !rs6000_explicit_options.isel)
a3170dc6
AH
1594 rs6000_isel = 1;
1595
edae5fe3
DE
1596 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1597 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1598 {
1599 if (TARGET_ALTIVEC)
1600 error ("AltiVec not supported in this target");
1601 if (TARGET_SPE)
1602 error ("Spe not supported in this target");
1603 }
1604
25696a75 1605 /* Disable Cell microcode if we are optimizing for the Cell
c921bad8
AP
1606 and not optimizing for size. */
1607 if (rs6000_gen_cell_microcode == -1)
1608 rs6000_gen_cell_microcode = !(rs6000_cpu == PROCESSOR_CELL
1609 && !optimize_size);
1610
dff9f1b6 1611 /* If we are optimizing big endian systems for space, use the load/store
c921bad8
AP
1612 multiple and string instructions unless we are not generating
1613 Cell microcode. */
1614 if (BYTES_BIG_ENDIAN && optimize_size && !rs6000_gen_cell_microcode)
957211c3 1615 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1616
a4f6c312
SS
1617 /* Don't allow -mmultiple or -mstring on little endian systems
1618 unless the cpu is a 750, because the hardware doesn't support the
1619 instructions used in little endian mode, and causes an alignment
1620 trap. The 750 does not cause an alignment trap (except when the
1621 target is unaligned). */
bef84347 1622
b21fb038 1623 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1624 {
1625 if (TARGET_MULTIPLE)
1626 {
1627 target_flags &= ~MASK_MULTIPLE;
b21fb038 1628 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1629 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1630 }
1631
1632 if (TARGET_STRING)
1633 {
1634 target_flags &= ~MASK_STRING;
b21fb038 1635 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1636 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1637 }
1638 }
3933e0e1 1639
38c1f2d7
MM
1640 /* Set debug flags */
1641 if (rs6000_debug_name)
1642 {
bfc79d3b 1643 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1644 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1645 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1646 rs6000_debug_stack = 1;
bfc79d3b 1647 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1648 rs6000_debug_arg = 1;
1649 else
c725bd79 1650 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1651 }
1652
57ac7be9
AM
1653 if (rs6000_traceback_name)
1654 {
1655 if (! strncmp (rs6000_traceback_name, "full", 4))
1656 rs6000_traceback = traceback_full;
1657 else if (! strncmp (rs6000_traceback_name, "part", 4))
1658 rs6000_traceback = traceback_part;
1659 else if (! strncmp (rs6000_traceback_name, "no", 2))
1660 rs6000_traceback = traceback_none;
1661 else
9e637a26 1662 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1663 rs6000_traceback_name);
1664 }
1665
78f5898b
AH
1666 if (!rs6000_explicit_options.long_double)
1667 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1668
602ea4d3 1669#ifndef POWERPC_LINUX
d3603e8c 1670 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1671 rs6000_ieeequad = 1;
1672#endif
1673
0db747be
DE
1674 /* Enable Altivec ABI for AIX -maltivec. */
1675 if (TARGET_XCOFF && TARGET_ALTIVEC)
1676 rs6000_altivec_abi = 1;
1677
a2db2771
JJ
1678 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1679 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1680 be explicitly overridden in either case. */
1681 if (TARGET_ELF)
6d0ef01e 1682 {
a2db2771
JJ
1683 if (!rs6000_explicit_options.altivec_abi
1684 && (TARGET_64BIT || TARGET_ALTIVEC))
1685 rs6000_altivec_abi = 1;
1686
1687 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1688 if (!rs6000_explicit_options.vrsave)
1689 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1690 }
1691
594a51fe
SS
1692 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1693 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1694 {
1695 rs6000_darwin64_abi = 1;
9c7956fd 1696#if TARGET_MACHO
6ac49599 1697 darwin_one_byte_bool = 1;
9c7956fd 1698#endif
d9168963
SS
1699 /* Default to natural alignment, for better performance. */
1700 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1701 }
1702
194c524a
DE
1703 /* Place FP constants in the constant pool instead of TOC
1704 if section anchors enabled. */
1705 if (flag_section_anchors)
1706 TARGET_NO_FP_IN_TOC = 1;
1707
c4501e62
JJ
1708 /* Handle -mtls-size option. */
1709 rs6000_parse_tls_size_option ();
1710
a7ae18e2
AH
1711#ifdef SUBTARGET_OVERRIDE_OPTIONS
1712 SUBTARGET_OVERRIDE_OPTIONS;
1713#endif
1714#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1715 SUBSUBTARGET_OVERRIDE_OPTIONS;
1716#endif
4d4cbc0e
AH
1717#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1718 SUB3TARGET_OVERRIDE_OPTIONS;
1719#endif
a7ae18e2 1720
edae5fe3 1721 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1722 {
edae5fe3 1723 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1724 MASK_STRING above when optimizing for size. */
1725 if ((target_flags & MASK_STRING) != 0)
1726 target_flags = target_flags & ~MASK_STRING;
1727 }
1728 else if (rs6000_select[1].string != NULL)
1729 {
1730 /* For the powerpc-eabispe configuration, we set all these by
1731 default, so let's unset them if we manually set another
1732 CPU that is not the E500. */
a2db2771 1733 if (!rs6000_explicit_options.spe_abi)
5da702b1 1734 rs6000_spe_abi = 0;
78f5898b 1735 if (!rs6000_explicit_options.spe)
5da702b1 1736 rs6000_spe = 0;
78f5898b 1737 if (!rs6000_explicit_options.float_gprs)
5da702b1 1738 rs6000_float_gprs = 0;
78f5898b 1739 if (!rs6000_explicit_options.isel)
5da702b1
AH
1740 rs6000_isel = 0;
1741 }
b5044283 1742
eca0d5e8
JM
1743 /* Detect invalid option combinations with E500. */
1744 CHECK_E500_OPTIONS;
1745
ec507f2d 1746 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1747 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1748 && rs6000_cpu != PROCESSOR_POWER6
1749 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1750 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1751 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1752 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1753 || rs6000_cpu == PROCESSOR_POWER5
1754 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1755
ec507f2d
DE
1756 rs6000_sched_restricted_insns_priority
1757 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1758
569fa502 1759 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1760 rs6000_sched_costly_dep
1761 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1762
569fa502
DN
1763 if (rs6000_sched_costly_dep_str)
1764 {
f676971a 1765 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1766 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1767 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1768 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1769 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1770 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1771 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1772 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1773 else
32e8bb8e
ILT
1774 rs6000_sched_costly_dep = ((enum rs6000_dependence_cost)
1775 atoi (rs6000_sched_costly_dep_str));
cbe26ab8
DN
1776 }
1777
1778 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1779 rs6000_sched_insert_nops
1780 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1781
cbe26ab8
DN
1782 if (rs6000_sched_insert_nops_str)
1783 {
1784 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1785 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1786 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1787 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1788 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1789 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1790 else
32e8bb8e
ILT
1791 rs6000_sched_insert_nops = ((enum rs6000_nop_insertion)
1792 atoi (rs6000_sched_insert_nops_str));
569fa502
DN
1793 }
1794
c81bebd7 1795#ifdef TARGET_REGNAMES
a4f6c312
SS
1796 /* If the user desires alternate register names, copy in the
1797 alternate names now. */
c81bebd7 1798 if (TARGET_REGNAMES)
4e135bdd 1799 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1800#endif
1801
df01da37 1802 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1803 If -maix-struct-return or -msvr4-struct-return was explicitly
1804 used, don't override with the ABI default. */
df01da37
DE
1805 if (!rs6000_explicit_options.aix_struct_ret)
1806 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1807
602ea4d3 1808 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1809 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1810
f676971a 1811 if (TARGET_TOC)
9ebbca7d 1812 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1813
301d03af
RS
1814 /* We can only guarantee the availability of DI pseudo-ops when
1815 assembling for 64-bit targets. */
ae6c1efd 1816 if (!TARGET_64BIT)
301d03af
RS
1817 {
1818 targetm.asm_out.aligned_op.di = NULL;
1819 targetm.asm_out.unaligned_op.di = NULL;
1820 }
1821
1494c534
DE
1822 /* Set branch target alignment, if not optimizing for size. */
1823 if (!optimize_size)
1824 {
d296e02e
AP
1825 /* Cell wants to be aligned 8byte for dual issue. */
1826 if (rs6000_cpu == PROCESSOR_CELL)
1827 {
1828 if (align_functions <= 0)
1829 align_functions = 8;
1830 if (align_jumps <= 0)
1831 align_jumps = 8;
1832 if (align_loops <= 0)
1833 align_loops = 8;
1834 }
44cd321e 1835 if (rs6000_align_branch_targets)
1494c534
DE
1836 {
1837 if (align_functions <= 0)
1838 align_functions = 16;
1839 if (align_jumps <= 0)
1840 align_jumps = 16;
1841 if (align_loops <= 0)
1842 align_loops = 16;
1843 }
1844 if (align_jumps_max_skip <= 0)
1845 align_jumps_max_skip = 15;
1846 if (align_loops_max_skip <= 0)
1847 align_loops_max_skip = 15;
1848 }
2792d578 1849
71f123ca
FS
1850 /* Arrange to save and restore machine status around nested functions. */
1851 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1852
1853 /* We should always be splitting complex arguments, but we can't break
1854 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1855 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1856 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1857
1858 /* Initialize rs6000_cost with the appropriate target costs. */
1859 if (optimize_size)
1860 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1861 else
1862 switch (rs6000_cpu)
1863 {
1864 case PROCESSOR_RIOS1:
1865 rs6000_cost = &rios1_cost;
1866 break;
1867
1868 case PROCESSOR_RIOS2:
1869 rs6000_cost = &rios2_cost;
1870 break;
1871
1872 case PROCESSOR_RS64A:
1873 rs6000_cost = &rs64a_cost;
1874 break;
1875
1876 case PROCESSOR_MPCCORE:
1877 rs6000_cost = &mpccore_cost;
1878 break;
1879
1880 case PROCESSOR_PPC403:
1881 rs6000_cost = &ppc403_cost;
1882 break;
1883
1884 case PROCESSOR_PPC405:
1885 rs6000_cost = &ppc405_cost;
1886 break;
1887
1888 case PROCESSOR_PPC440:
1889 rs6000_cost = &ppc440_cost;
1890 break;
1891
1892 case PROCESSOR_PPC601:
1893 rs6000_cost = &ppc601_cost;
1894 break;
1895
1896 case PROCESSOR_PPC603:
1897 rs6000_cost = &ppc603_cost;
1898 break;
1899
1900 case PROCESSOR_PPC604:
1901 rs6000_cost = &ppc604_cost;
1902 break;
1903
1904 case PROCESSOR_PPC604e:
1905 rs6000_cost = &ppc604e_cost;
1906 break;
1907
1908 case PROCESSOR_PPC620:
8b897cfa
RS
1909 rs6000_cost = &ppc620_cost;
1910 break;
1911
f0517163
RS
1912 case PROCESSOR_PPC630:
1913 rs6000_cost = &ppc630_cost;
1914 break;
1915
982afe02 1916 case PROCESSOR_CELL:
d296e02e
AP
1917 rs6000_cost = &ppccell_cost;
1918 break;
1919
8b897cfa
RS
1920 case PROCESSOR_PPC750:
1921 case PROCESSOR_PPC7400:
1922 rs6000_cost = &ppc750_cost;
1923 break;
1924
1925 case PROCESSOR_PPC7450:
1926 rs6000_cost = &ppc7450_cost;
1927 break;
1928
1929 case PROCESSOR_PPC8540:
1930 rs6000_cost = &ppc8540_cost;
1931 break;
1932
fa41c305
EW
1933 case PROCESSOR_PPCE300C2:
1934 case PROCESSOR_PPCE300C3:
1935 rs6000_cost = &ppce300c2c3_cost;
1936 break;
1937
edae5fe3
DE
1938 case PROCESSOR_PPCE500MC:
1939 rs6000_cost = &ppce500mc_cost;
1940 break;
1941
8b897cfa
RS
1942 case PROCESSOR_POWER4:
1943 case PROCESSOR_POWER5:
1944 rs6000_cost = &power4_cost;
1945 break;
1946
44cd321e
PS
1947 case PROCESSOR_POWER6:
1948 rs6000_cost = &power6_cost;
1949 break;
1950
8b897cfa 1951 default:
37409796 1952 gcc_unreachable ();
8b897cfa 1953 }
0b11da67
DE
1954
1955 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1956 set_param_value ("simultaneous-prefetches",
1957 rs6000_cost->simultaneous_prefetches);
1958 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1959 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1960 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1961 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1962 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1963 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1964
1965 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1966 can be optimized to ap = __builtin_next_arg (0). */
1967 if (DEFAULT_ABI != ABI_V4)
1968 targetm.expand_builtin_va_start = NULL;
696e45ba
ME
1969
1970 /* Set up single/double float flags.
1971 If TARGET_HARD_FLOAT is set, but neither single or double is set,
1972 then set both flags. */
1973 if (TARGET_HARD_FLOAT && TARGET_FPRS
1974 && rs6000_single_float == 0 && rs6000_double_float == 0)
1975 rs6000_single_float = rs6000_double_float = 1;
1976
1977 /* Reset single and double FP flags if target is E500. */
1978 if (TARGET_E500)
1979 {
1980 rs6000_single_float = rs6000_double_float = 0;
1981 if (TARGET_E500_SINGLE)
1982 rs6000_single_float = 1;
1983 if (TARGET_E500_DOUBLE)
1984 rs6000_single_float = rs6000_double_float = 1;
1985 }
1986
001b9eb6
PH
1987 /* If not explicitly specified via option, decide whether to generate indexed
1988 load/store instructions. */
1989 if (TARGET_AVOID_XFORM == -1)
1990 /* Avoid indexed addressing when targeting Power6 in order to avoid
1991 the DERAT mispredict penalty. */
1992 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB);
1993
696e45ba 1994 rs6000_init_hard_regno_mode_ok ();
5248c961 1995}
5accd822 1996
7ccf35ed
DN
1997/* Implement targetm.vectorize.builtin_mask_for_load. */
1998static tree
1999rs6000_builtin_mask_for_load (void)
2000{
2001 if (TARGET_ALTIVEC)
2002 return altivec_builtin_mask_for_load;
2003 else
2004 return 0;
2005}
2006
7910ae0c
DN
2007/* Implement targetm.vectorize.builtin_conversion.
2008 Returns a decl of a function that implements conversion of an integer vector
2009 into a floating-point vector, or vice-versa. TYPE is the type of the integer
2010 side of the conversion.
2011 Return NULL_TREE if it is not available. */
f57d17f1
TM
2012static tree
2013rs6000_builtin_conversion (enum tree_code code, tree type)
2014{
2015 if (!TARGET_ALTIVEC)
2016 return NULL_TREE;
982afe02 2017
f57d17f1
TM
2018 switch (code)
2019 {
7910ae0c
DN
2020 case FIX_TRUNC_EXPR:
2021 switch (TYPE_MODE (type))
2022 {
2023 case V4SImode:
2024 return TYPE_UNSIGNED (type)
2025 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2026 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2027 default:
2028 return NULL_TREE;
2029 }
2030
f57d17f1
TM
2031 case FLOAT_EXPR:
2032 switch (TYPE_MODE (type))
2033 {
2034 case V4SImode:
7910ae0c
DN
2035 return TYPE_UNSIGNED (type)
2036 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2037 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
f57d17f1
TM
2038 default:
2039 return NULL_TREE;
2040 }
7910ae0c 2041
f57d17f1
TM
2042 default:
2043 return NULL_TREE;
2044 }
2045}
2046
89d67cca
DN
2047/* Implement targetm.vectorize.builtin_mul_widen_even. */
2048static tree
2049rs6000_builtin_mul_widen_even (tree type)
2050{
2051 if (!TARGET_ALTIVEC)
2052 return NULL_TREE;
2053
2054 switch (TYPE_MODE (type))
2055 {
2056 case V8HImode:
7910ae0c
DN
2057 return TYPE_UNSIGNED (type)
2058 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2059 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
89d67cca
DN
2060
2061 case V16QImode:
7910ae0c
DN
2062 return TYPE_UNSIGNED (type)
2063 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2064 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
89d67cca
DN
2065 default:
2066 return NULL_TREE;
2067 }
2068}
2069
2070/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2071static tree
2072rs6000_builtin_mul_widen_odd (tree type)
2073{
2074 if (!TARGET_ALTIVEC)
2075 return NULL_TREE;
2076
2077 switch (TYPE_MODE (type))
2078 {
2079 case V8HImode:
7910ae0c
DN
2080 return TYPE_UNSIGNED (type)
2081 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2082 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
89d67cca
DN
2083
2084 case V16QImode:
7910ae0c
DN
2085 return TYPE_UNSIGNED (type)
2086 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2087 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
89d67cca
DN
2088 default:
2089 return NULL_TREE;
2090 }
2091}
2092
5b900a4c
DN
2093
2094/* Return true iff, data reference of TYPE can reach vector alignment (16)
2095 after applying N number of iterations. This routine does not determine
2096 how may iterations are required to reach desired alignment. */
2097
2098static bool
3101faab 2099rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2100{
2101 if (is_packed)
2102 return false;
2103
2104 if (TARGET_32BIT)
2105 {
2106 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2107 return true;
2108
2109 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2110 return true;
2111
2112 return false;
2113 }
2114 else
2115 {
2116 if (TARGET_MACHO)
2117 return false;
2118
2119 /* Assuming that all other types are naturally aligned. CHECKME! */
2120 return true;
2121 }
2122}
2123
0fca40f5
IR
2124/* Implement targetm.vectorize.builtin_vec_perm. */
2125tree
2126rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2127{
2128 tree d;
2129
2130 *mask_element_type = unsigned_char_type_node;
2131
2132 switch (TYPE_MODE (type))
2133 {
2134 case V16QImode:
2135 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2136 break;
2137
2138 case V8HImode:
2139 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2140 break;
2141
2142 case V4SImode:
2143 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2144 break;
2145
2146 case V4SFmode:
2147 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2148 break;
2149
2150 default:
2151 return NULL_TREE;
2152 }
2153
2154 gcc_assert (d);
2155 return d;
2156}
2157
5da702b1
AH
2158/* Handle generic options of the form -mfoo=yes/no.
2159 NAME is the option name.
2160 VALUE is the option value.
2161 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2162 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2163static void
5da702b1 2164rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2165{
5da702b1 2166 if (value == 0)
993f19a8 2167 return;
5da702b1
AH
2168 else if (!strcmp (value, "yes"))
2169 *flag = 1;
2170 else if (!strcmp (value, "no"))
2171 *flag = 0;
08b57fb3 2172 else
5da702b1 2173 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2174}
2175
c4501e62
JJ
2176/* Validate and record the size specified with the -mtls-size option. */
2177
2178static void
863d938c 2179rs6000_parse_tls_size_option (void)
c4501e62
JJ
2180{
2181 if (rs6000_tls_size_string == 0)
2182 return;
2183 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2184 rs6000_tls_size = 16;
2185 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2186 rs6000_tls_size = 32;
2187 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2188 rs6000_tls_size = 64;
2189 else
9e637a26 2190 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2191}
2192
5accd822 2193void
a2369ed3 2194optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2195{
2e3f0db6
DJ
2196 if (DEFAULT_ABI == ABI_DARWIN)
2197 /* The Darwin libraries never set errno, so we might as well
2198 avoid calling them when that's the only reason we would. */
2199 flag_errno_math = 0;
59d6560b
DE
2200
2201 /* Double growth factor to counter reduced min jump length. */
2202 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2203
2204 /* Enable section anchors by default.
2205 Skip section anchors for Objective C and Objective C++
2f3b0d4a
ST
2206 until front-ends fixed. */
2207 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
d6cc6ec9 2208 flag_section_anchors = 2;
5accd822 2209}
78f5898b 2210
0bb7b92e
ME
2211static enum fpu_type_t
2212rs6000_parse_fpu_option (const char *option)
2213{
2214 if (!strcmp("none", option)) return FPU_NONE;
2215 if (!strcmp("sp_lite", option)) return FPU_SF_LITE;
2216 if (!strcmp("dp_lite", option)) return FPU_DF_LITE;
2217 if (!strcmp("sp_full", option)) return FPU_SF_FULL;
2218 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
2219 error("unknown value %s for -mfpu", option);
2220 return FPU_NONE;
2221}
2222
78f5898b
AH
2223/* Implement TARGET_HANDLE_OPTION. */
2224
2225static bool
2226rs6000_handle_option (size_t code, const char *arg, int value)
2227{
0bb7b92e
ME
2228 enum fpu_type_t fpu_type = FPU_NONE;
2229
78f5898b
AH
2230 switch (code)
2231 {
2232 case OPT_mno_power:
2233 target_flags &= ~(MASK_POWER | MASK_POWER2
2234 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2235 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2236 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2237 break;
2238 case OPT_mno_powerpc:
2239 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2240 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2241 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2242 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2243 break;
2244 case OPT_mfull_toc:
d2894ab5
DE
2245 target_flags &= ~MASK_MINIMAL_TOC;
2246 TARGET_NO_FP_IN_TOC = 0;
2247 TARGET_NO_SUM_IN_TOC = 0;
2248 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2249#ifdef TARGET_USES_SYSV4_OPT
2250 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2251 just the same as -mminimal-toc. */
2252 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2253 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2254#endif
2255 break;
2256
2257#ifdef TARGET_USES_SYSV4_OPT
2258 case OPT_mtoc:
2259 /* Make -mtoc behave like -mminimal-toc. */
2260 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2261 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2262 break;
2263#endif
2264
2265#ifdef TARGET_USES_AIX64_OPT
2266 case OPT_maix64:
2267#else
2268 case OPT_m64:
2269#endif
2c9c9afd
AM
2270 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2271 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2272 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2273 break;
2274
2275#ifdef TARGET_USES_AIX64_OPT
2276 case OPT_maix32:
2277#else
2278 case OPT_m32:
2279#endif
2280 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2281 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2282 break;
2283
2284 case OPT_minsert_sched_nops_:
2285 rs6000_sched_insert_nops_str = arg;
2286 break;
2287
2288 case OPT_mminimal_toc:
2289 if (value == 1)
2290 {
d2894ab5
DE
2291 TARGET_NO_FP_IN_TOC = 0;
2292 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2293 }
2294 break;
2295
2296 case OPT_mpower:
2297 if (value == 1)
c2dba4ab
AH
2298 {
2299 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2300 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2301 }
78f5898b
AH
2302 break;
2303
2304 case OPT_mpower2:
2305 if (value == 1)
c2dba4ab
AH
2306 {
2307 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2308 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2309 }
78f5898b
AH
2310 break;
2311
2312 case OPT_mpowerpc_gpopt:
2313 case OPT_mpowerpc_gfxopt:
2314 if (value == 1)
c2dba4ab
AH
2315 {
2316 target_flags |= MASK_POWERPC;
2317 target_flags_explicit |= MASK_POWERPC;
2318 }
78f5898b
AH
2319 break;
2320
df01da37
DE
2321 case OPT_maix_struct_return:
2322 case OPT_msvr4_struct_return:
2323 rs6000_explicit_options.aix_struct_ret = true;
2324 break;
2325
b5e3caf2
BE
2326 case OPT_mvrsave:
2327 rs6000_explicit_options.vrsave = true;
2328 TARGET_ALTIVEC_VRSAVE = value;
2329 break;
2330
78f5898b 2331 case OPT_mvrsave_:
a2db2771 2332 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2333 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2334 break;
78f5898b 2335
94f4765c
NF
2336 case OPT_misel:
2337 rs6000_explicit_options.isel = true;
2338 rs6000_isel = value;
2339 break;
2340
78f5898b
AH
2341 case OPT_misel_:
2342 rs6000_explicit_options.isel = true;
2343 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2344 break;
2345
94f4765c
NF
2346 case OPT_mspe:
2347 rs6000_explicit_options.spe = true;
2348 rs6000_spe = value;
2349 break;
2350
78f5898b
AH
2351 case OPT_mspe_:
2352 rs6000_explicit_options.spe = true;
2353 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2354 break;
2355
2356 case OPT_mdebug_:
2357 rs6000_debug_name = arg;
2358 break;
2359
2360#ifdef TARGET_USES_SYSV4_OPT
2361 case OPT_mcall_:
2362 rs6000_abi_name = arg;
2363 break;
2364
2365 case OPT_msdata_:
2366 rs6000_sdata_name = arg;
2367 break;
2368
2369 case OPT_mtls_size_:
2370 rs6000_tls_size_string = arg;
2371 break;
2372
2373 case OPT_mrelocatable:
2374 if (value == 1)
c2dba4ab 2375 {
e0bf274f
AM
2376 target_flags |= MASK_MINIMAL_TOC;
2377 target_flags_explicit |= MASK_MINIMAL_TOC;
2378 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2379 }
78f5898b
AH
2380 break;
2381
2382 case OPT_mrelocatable_lib:
2383 if (value == 1)
c2dba4ab 2384 {
e0bf274f
AM
2385 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2386 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2387 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2388 }
78f5898b 2389 else
c2dba4ab
AH
2390 {
2391 target_flags &= ~MASK_RELOCATABLE;
2392 target_flags_explicit |= MASK_RELOCATABLE;
2393 }
78f5898b
AH
2394 break;
2395#endif
2396
2397 case OPT_mabi_:
78f5898b
AH
2398 if (!strcmp (arg, "altivec"))
2399 {
a2db2771 2400 rs6000_explicit_options.altivec_abi = true;
78f5898b 2401 rs6000_altivec_abi = 1;
a2db2771
JJ
2402
2403 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2404 rs6000_spe_abi = 0;
2405 }
2406 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2407 {
a2db2771 2408 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2409 rs6000_altivec_abi = 0;
2410 }
78f5898b
AH
2411 else if (! strcmp (arg, "spe"))
2412 {
a2db2771 2413 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2414 rs6000_spe_abi = 1;
2415 rs6000_altivec_abi = 0;
2416 if (!TARGET_SPE_ABI)
2417 error ("not configured for ABI: '%s'", arg);
2418 }
2419 else if (! strcmp (arg, "no-spe"))
d3603e8c 2420 {
a2db2771 2421 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2422 rs6000_spe_abi = 0;
2423 }
78f5898b
AH
2424
2425 /* These are here for testing during development only, do not
2426 document in the manual please. */
2427 else if (! strcmp (arg, "d64"))
2428 {
2429 rs6000_darwin64_abi = 1;
2430 warning (0, "Using darwin64 ABI");
2431 }
2432 else if (! strcmp (arg, "d32"))
2433 {
2434 rs6000_darwin64_abi = 0;
2435 warning (0, "Using old darwin ABI");
2436 }
2437
602ea4d3
JJ
2438 else if (! strcmp (arg, "ibmlongdouble"))
2439 {
d3603e8c 2440 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2441 rs6000_ieeequad = 0;
2442 warning (0, "Using IBM extended precision long double");
2443 }
2444 else if (! strcmp (arg, "ieeelongdouble"))
2445 {
d3603e8c 2446 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2447 rs6000_ieeequad = 1;
2448 warning (0, "Using IEEE extended precision long double");
2449 }
2450
78f5898b
AH
2451 else
2452 {
2453 error ("unknown ABI specified: '%s'", arg);
2454 return false;
2455 }
2456 break;
2457
2458 case OPT_mcpu_:
2459 rs6000_select[1].string = arg;
2460 break;
2461
2462 case OPT_mtune_:
2463 rs6000_select[2].string = arg;
2464 break;
2465
2466 case OPT_mtraceback_:
2467 rs6000_traceback_name = arg;
2468 break;
2469
2470 case OPT_mfloat_gprs_:
2471 rs6000_explicit_options.float_gprs = true;
2472 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2473 rs6000_float_gprs = 1;
2474 else if (! strcmp (arg, "double"))
2475 rs6000_float_gprs = 2;
2476 else if (! strcmp (arg, "no"))
2477 rs6000_float_gprs = 0;
2478 else
2479 {
2480 error ("invalid option for -mfloat-gprs: '%s'", arg);
2481 return false;
2482 }
2483 break;
2484
2485 case OPT_mlong_double_:
2486 rs6000_explicit_options.long_double = true;
2487 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2488 if (value != 64 && value != 128)
2489 {
2490 error ("Unknown switch -mlong-double-%s", arg);
2491 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2492 return false;
2493 }
2494 else
2495 rs6000_long_double_type_size = value;
2496 break;
2497
2498 case OPT_msched_costly_dep_:
2499 rs6000_sched_costly_dep_str = arg;
2500 break;
2501
2502 case OPT_malign_:
2503 rs6000_explicit_options.alignment = true;
2504 if (! strcmp (arg, "power"))
2505 {
2506 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2507 some C library functions, so warn about it. The flag may be
2508 useful for performance studies from time to time though, so
2509 don't disable it entirely. */
2510 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2511 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2512 " it is incompatible with the installed C and C++ libraries");
2513 rs6000_alignment_flags = MASK_ALIGN_POWER;
2514 }
2515 else if (! strcmp (arg, "natural"))
2516 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2517 else
2518 {
2519 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2520 return false;
2521 }
2522 break;
696e45ba
ME
2523
2524 case OPT_msingle_float:
2525 if (!TARGET_SINGLE_FPU)
2526 warning (0, "-msingle-float option equivalent to -mhard-float");
2527 /* -msingle-float implies -mno-double-float and TARGET_HARD_FLOAT. */
2528 rs6000_double_float = 0;
2529 target_flags &= ~MASK_SOFT_FLOAT;
2530 target_flags_explicit |= MASK_SOFT_FLOAT;
2531 break;
2532
2533 case OPT_mdouble_float:
2534 /* -mdouble-float implies -msingle-float and TARGET_HARD_FLOAT. */
2535 rs6000_single_float = 1;
2536 target_flags &= ~MASK_SOFT_FLOAT;
2537 target_flags_explicit |= MASK_SOFT_FLOAT;
2538 break;
2539
2540 case OPT_msimple_fpu:
2541 if (!TARGET_SINGLE_FPU)
2542 warning (0, "-msimple-fpu option ignored");
2543 break;
2544
2545 case OPT_mhard_float:
2546 /* -mhard_float implies -msingle-float and -mdouble-float. */
2547 rs6000_single_float = rs6000_double_float = 1;
2548 break;
2549
2550 case OPT_msoft_float:
2551 /* -msoft_float implies -mnosingle-float and -mnodouble-float. */
2552 rs6000_single_float = rs6000_double_float = 0;
2553 break;
0bb7b92e
ME
2554
2555 case OPT_mfpu_:
2556 fpu_type = rs6000_parse_fpu_option(arg);
2557 if (fpu_type != FPU_NONE)
2558 /* If -mfpu is not none, then turn off SOFT_FLOAT, turn on HARD_FLOAT. */
2559 {
2560 target_flags &= ~MASK_SOFT_FLOAT;
2561 target_flags_explicit |= MASK_SOFT_FLOAT;
2562 rs6000_xilinx_fpu = 1;
2563 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_SF_FULL)
2564 rs6000_single_float = 1;
2565 if (fpu_type == FPU_DF_LITE || fpu_type == FPU_DF_FULL)
2566 rs6000_single_float = rs6000_double_float = 1;
2567 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_DF_LITE)
2568 rs6000_simple_fpu = 1;
2569 }
2570 else
2571 {
2572 /* -mfpu=none is equivalent to -msoft-float */
2573 target_flags |= MASK_SOFT_FLOAT;
2574 target_flags_explicit |= MASK_SOFT_FLOAT;
2575 rs6000_single_float = rs6000_double_float = 0;
2576 }
2577 break;
78f5898b
AH
2578 }
2579 return true;
2580}
3cfa4909
MM
2581\f
2582/* Do anything needed at the start of the asm file. */
2583
1bc7c5b6 2584static void
863d938c 2585rs6000_file_start (void)
3cfa4909 2586{
c4d38ccb 2587 size_t i;
3cfa4909 2588 char buffer[80];
d330fd93 2589 const char *start = buffer;
3cfa4909 2590 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2591 const char *default_cpu = TARGET_CPU_DEFAULT;
2592 FILE *file = asm_out_file;
2593
2594 default_file_start ();
2595
2596#ifdef TARGET_BI_ARCH
2597 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2598 default_cpu = 0;
2599#endif
3cfa4909
MM
2600
2601 if (flag_verbose_asm)
2602 {
2603 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2604 rs6000_select[0].string = default_cpu;
2605
b6a1cbae 2606 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2607 {
2608 ptr = &rs6000_select[i];
2609 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2610 {
2611 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2612 start = "";
2613 }
2614 }
2615
9c6b4ed9 2616 if (PPC405_ERRATUM77)
b0bfee6e 2617 {
9c6b4ed9 2618 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2619 start = "";
2620 }
b0bfee6e 2621
b91da81f 2622#ifdef USING_ELFOS_H
3cfa4909
MM
2623 switch (rs6000_sdata)
2624 {
2625 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2626 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2627 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2628 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2629 }
2630
2631 if (rs6000_sdata && g_switch_value)
2632 {
307b599c
MK
2633 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2634 g_switch_value);
3cfa4909
MM
2635 start = "";
2636 }
2637#endif
2638
2639 if (*start == '\0')
949ea356 2640 putc ('\n', file);
3cfa4909 2641 }
b723e82f 2642
e51917ae
JM
2643#ifdef HAVE_AS_GNU_ATTRIBUTE
2644 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2645 {
2646 fprintf (file, "\t.gnu_attribute 4, %d\n",
696e45ba
ME
2647 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
2648 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
2649 : 2));
aaa42494
DJ
2650 fprintf (file, "\t.gnu_attribute 8, %d\n",
2651 (TARGET_ALTIVEC_ABI ? 2
2652 : TARGET_SPE_ABI ? 3
2653 : 1));
f9fd1e77
NF
2654 fprintf (file, "\t.gnu_attribute 12, %d\n",
2655 aix_struct_return ? 2 : 1);
2656
aaa42494 2657 }
e51917ae
JM
2658#endif
2659
b723e82f
JJ
2660 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2661 {
d6b5193b
RS
2662 switch_to_section (toc_section);
2663 switch_to_section (text_section);
b723e82f 2664 }
3cfa4909 2665}
c4e18b1c 2666
5248c961 2667\f
a0ab749a 2668/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2669
2670int
863d938c 2671direct_return (void)
9878760c 2672{
4697a36c
MM
2673 if (reload_completed)
2674 {
2675 rs6000_stack_t *info = rs6000_stack_info ();
2676
2677 if (info->first_gp_reg_save == 32
2678 && info->first_fp_reg_save == 64
00b960c7 2679 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2680 && ! info->lr_save_p
2681 && ! info->cr_save_p
00b960c7 2682 && info->vrsave_mask == 0
c81fc13e 2683 && ! info->push_p)
4697a36c
MM
2684 return 1;
2685 }
2686
2687 return 0;
9878760c
RK
2688}
2689
4e74d8ec
MM
2690/* Return the number of instructions it takes to form a constant in an
2691 integer register. */
2692
48d72335 2693int
a2369ed3 2694num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2695{
2696 /* signed constant loadable with {cal|addi} */
547b216d 2697 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2698 return 1;
2699
4e74d8ec 2700 /* constant loadable with {cau|addis} */
547b216d
DE
2701 else if ((value & 0xffff) == 0
2702 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2703 return 1;
2704
5f59ecb7 2705#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2706 else if (TARGET_POWERPC64)
4e74d8ec 2707 {
a65c591c
DE
2708 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2709 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2710
a65c591c 2711 if (high == 0 || high == -1)
4e74d8ec
MM
2712 return 2;
2713
a65c591c 2714 high >>= 1;
4e74d8ec 2715
a65c591c 2716 if (low == 0)
4e74d8ec 2717 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2718 else
2719 return (num_insns_constant_wide (high)
e396202a 2720 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2721 }
2722#endif
2723
2724 else
2725 return 2;
2726}
2727
2728int
a2369ed3 2729num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2730{
37409796 2731 HOST_WIDE_INT low, high;
bb8df8a6 2732
37409796 2733 switch (GET_CODE (op))
0d30d435 2734 {
37409796 2735 case CONST_INT:
0d30d435 2736#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2737 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2738 && mask64_operand (op, mode))
c4ad648e 2739 return 2;
0d30d435
DE
2740 else
2741#endif
2742 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2743
37409796 2744 case CONST_DOUBLE:
e41b2a33 2745 if (mode == SFmode || mode == SDmode)
37409796
NS
2746 {
2747 long l;
2748 REAL_VALUE_TYPE rv;
bb8df8a6 2749
37409796 2750 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2751 if (DECIMAL_FLOAT_MODE_P (mode))
2752 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2753 else
2754 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2755 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2756 }
a260abc9 2757
37409796
NS
2758 if (mode == VOIDmode || mode == DImode)
2759 {
2760 high = CONST_DOUBLE_HIGH (op);
2761 low = CONST_DOUBLE_LOW (op);
2762 }
2763 else
2764 {
2765 long l[2];
2766 REAL_VALUE_TYPE rv;
bb8df8a6 2767
37409796 2768 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2769 if (DECIMAL_FLOAT_MODE_P (mode))
2770 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2771 else
2772 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2773 high = l[WORDS_BIG_ENDIAN == 0];
2774 low = l[WORDS_BIG_ENDIAN != 0];
2775 }
47ad8c61 2776
37409796
NS
2777 if (TARGET_32BIT)
2778 return (num_insns_constant_wide (low)
2779 + num_insns_constant_wide (high));
2780 else
2781 {
2782 if ((high == 0 && low >= 0)
2783 || (high == -1 && low < 0))
2784 return num_insns_constant_wide (low);
bb8df8a6 2785
1990cd79 2786 else if (mask64_operand (op, mode))
37409796 2787 return 2;
bb8df8a6 2788
37409796
NS
2789 else if (low == 0)
2790 return num_insns_constant_wide (high) + 1;
bb8df8a6 2791
37409796
NS
2792 else
2793 return (num_insns_constant_wide (high)
2794 + num_insns_constant_wide (low) + 1);
2795 }
bb8df8a6 2796
37409796
NS
2797 default:
2798 gcc_unreachable ();
4e74d8ec 2799 }
4e74d8ec
MM
2800}
2801
0972012c
RS
2802/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2803 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2804 corresponding element of the vector, but for V4SFmode and V2SFmode,
2805 the corresponding "float" is interpreted as an SImode integer. */
2806
847535b6 2807HOST_WIDE_INT
0972012c
RS
2808const_vector_elt_as_int (rtx op, unsigned int elt)
2809{
2810 rtx tmp = CONST_VECTOR_ELT (op, elt);
2811 if (GET_MODE (op) == V4SFmode
2812 || GET_MODE (op) == V2SFmode)
2813 tmp = gen_lowpart (SImode, tmp);
2814 return INTVAL (tmp);
2815}
452a7d36 2816
77ccdfed 2817/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2818 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2819 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2820 all items are set to the same value and contain COPIES replicas of the
2821 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2822 operand and the others are set to the value of the operand's msb. */
2823
2824static bool
2825vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2826{
66180ff3
PB
2827 enum machine_mode mode = GET_MODE (op);
2828 enum machine_mode inner = GET_MODE_INNER (mode);
2829
2830 unsigned i;
2831 unsigned nunits = GET_MODE_NUNITS (mode);
2832 unsigned bitsize = GET_MODE_BITSIZE (inner);
2833 unsigned mask = GET_MODE_MASK (inner);
2834
0972012c 2835 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2836 HOST_WIDE_INT splat_val = val;
2837 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2838
2839 /* Construct the value to be splatted, if possible. If not, return 0. */
2840 for (i = 2; i <= copies; i *= 2)
452a7d36 2841 {
66180ff3
PB
2842 HOST_WIDE_INT small_val;
2843 bitsize /= 2;
2844 small_val = splat_val >> bitsize;
2845 mask >>= bitsize;
2846 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2847 return false;
2848 splat_val = small_val;
2849 }
c4ad648e 2850
66180ff3
PB
2851 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2852 if (EASY_VECTOR_15 (splat_val))
2853 ;
2854
2855 /* Also check if we can splat, and then add the result to itself. Do so if
2856 the value is positive, of if the splat instruction is using OP's mode;
2857 for splat_val < 0, the splat and the add should use the same mode. */
2858 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2859 && (splat_val >= 0 || (step == 1 && copies == 1)))
2860 ;
2861
2862 else
2863 return false;
2864
2865 /* Check if VAL is present in every STEP-th element, and the
2866 other elements are filled with its most significant bit. */
2867 for (i = 0; i < nunits - 1; ++i)
2868 {
2869 HOST_WIDE_INT desired_val;
2870 if (((i + 1) & (step - 1)) == 0)
2871 desired_val = val;
2872 else
2873 desired_val = msb_val;
2874
0972012c 2875 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2876 return false;
452a7d36 2877 }
66180ff3
PB
2878
2879 return true;
452a7d36
HP
2880}
2881
69ef87e2 2882
77ccdfed 2883/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2884 with a vspltisb, vspltish or vspltisw. */
2885
2886bool
2887easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2888{
66180ff3 2889 unsigned step, copies;
d744e06e 2890
66180ff3
PB
2891 if (mode == VOIDmode)
2892 mode = GET_MODE (op);
2893 else if (mode != GET_MODE (op))
2894 return false;
d744e06e 2895
66180ff3
PB
2896 /* Start with a vspltisw. */
2897 step = GET_MODE_NUNITS (mode) / 4;
2898 copies = 1;
2899
2900 if (vspltis_constant (op, step, copies))
2901 return true;
2902
2903 /* Then try with a vspltish. */
2904 if (step == 1)
2905 copies <<= 1;
2906 else
2907 step >>= 1;
2908
2909 if (vspltis_constant (op, step, copies))
2910 return true;
2911
2912 /* And finally a vspltisb. */
2913 if (step == 1)
2914 copies <<= 1;
2915 else
2916 step >>= 1;
2917
2918 if (vspltis_constant (op, step, copies))
2919 return true;
2920
2921 return false;
d744e06e
AH
2922}
2923
66180ff3
PB
2924/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2925 result is OP. Abort if it is not possible. */
d744e06e 2926
f676971a 2927rtx
66180ff3 2928gen_easy_altivec_constant (rtx op)
452a7d36 2929{
66180ff3
PB
2930 enum machine_mode mode = GET_MODE (op);
2931 int nunits = GET_MODE_NUNITS (mode);
2932 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2933 unsigned step = nunits / 4;
2934 unsigned copies = 1;
2935
2936 /* Start with a vspltisw. */
2937 if (vspltis_constant (op, step, copies))
2938 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2939
2940 /* Then try with a vspltish. */
2941 if (step == 1)
2942 copies <<= 1;
2943 else
2944 step >>= 1;
2945
2946 if (vspltis_constant (op, step, copies))
2947 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2948
2949 /* And finally a vspltisb. */
2950 if (step == 1)
2951 copies <<= 1;
2952 else
2953 step >>= 1;
2954
2955 if (vspltis_constant (op, step, copies))
2956 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2957
2958 gcc_unreachable ();
d744e06e
AH
2959}
2960
2961const char *
a2369ed3 2962output_vec_const_move (rtx *operands)
d744e06e
AH
2963{
2964 int cst, cst2;
2965 enum machine_mode mode;
2966 rtx dest, vec;
2967
2968 dest = operands[0];
2969 vec = operands[1];
d744e06e 2970 mode = GET_MODE (dest);
69ef87e2 2971
d744e06e
AH
2972 if (TARGET_ALTIVEC)
2973 {
66180ff3 2974 rtx splat_vec;
d744e06e
AH
2975 if (zero_constant (vec, mode))
2976 return "vxor %0,%0,%0";
37409796 2977
66180ff3
PB
2978 splat_vec = gen_easy_altivec_constant (vec);
2979 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2980 operands[1] = XEXP (splat_vec, 0);
2981 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2982 return "#";
bb8df8a6 2983
66180ff3 2984 switch (GET_MODE (splat_vec))
98ef3137 2985 {
37409796 2986 case V4SImode:
66180ff3 2987 return "vspltisw %0,%1";
c4ad648e 2988
37409796 2989 case V8HImode:
66180ff3 2990 return "vspltish %0,%1";
c4ad648e 2991
37409796 2992 case V16QImode:
66180ff3 2993 return "vspltisb %0,%1";
bb8df8a6 2994
37409796
NS
2995 default:
2996 gcc_unreachable ();
98ef3137 2997 }
69ef87e2
AH
2998 }
2999
37409796 3000 gcc_assert (TARGET_SPE);
bb8df8a6 3001
37409796
NS
3002 /* Vector constant 0 is handled as a splitter of V2SI, and in the
3003 pattern of V1DI, V4HI, and V2SF.
3004
3005 FIXME: We should probably return # and add post reload
3006 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
3007 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
3008 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
3009 operands[1] = CONST_VECTOR_ELT (vec, 0);
3010 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
3011 if (cst == cst2)
3012 return "li %0,%1\n\tevmergelo %0,%0,%0";
3013 else
3014 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
3015}
3016
f5027409
RE
3017/* Initialize TARGET of vector PAIRED to VALS. */
3018
3019void
3020paired_expand_vector_init (rtx target, rtx vals)
3021{
3022 enum machine_mode mode = GET_MODE (target);
3023 int n_elts = GET_MODE_NUNITS (mode);
3024 int n_var = 0;
0a2aaacc 3025 rtx x, new_rtx, tmp, constant_op, op1, op2;
f5027409
RE
3026 int i;
3027
3028 for (i = 0; i < n_elts; ++i)
3029 {
3030 x = XVECEXP (vals, 0, i);
3031 if (!CONSTANT_P (x))
3032 ++n_var;
3033 }
3034 if (n_var == 0)
3035 {
3036 /* Load from constant pool. */
3037 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
3038 return;
3039 }
3040
3041 if (n_var == 2)
3042 {
3043 /* The vector is initialized only with non-constants. */
0a2aaacc 3044 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
f5027409
RE
3045 XVECEXP (vals, 0, 1));
3046
0a2aaacc 3047 emit_move_insn (target, new_rtx);
f5027409
RE
3048 return;
3049 }
3050
3051 /* One field is non-constant and the other one is a constant. Load the
3052 constant from the constant pool and use ps_merge instruction to
3053 construct the whole vector. */
3054 op1 = XVECEXP (vals, 0, 0);
3055 op2 = XVECEXP (vals, 0, 1);
3056
3057 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
3058
3059 tmp = gen_reg_rtx (GET_MODE (constant_op));
3060 emit_move_insn (tmp, constant_op);
3061
3062 if (CONSTANT_P (op1))
0a2aaacc 3063 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
f5027409 3064 else
0a2aaacc 3065 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
f5027409 3066
0a2aaacc 3067 emit_move_insn (target, new_rtx);
f5027409
RE
3068}
3069
e2e95f45
RE
3070void
3071paired_expand_vector_move (rtx operands[])
3072{
3073 rtx op0 = operands[0], op1 = operands[1];
3074
3075 emit_move_insn (op0, op1);
3076}
3077
3078/* Emit vector compare for code RCODE. DEST is destination, OP1 and
3079 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
3080 operands for the relation operation COND. This is a recursive
3081 function. */
3082
3083static void
3084paired_emit_vector_compare (enum rtx_code rcode,
3085 rtx dest, rtx op0, rtx op1,
3086 rtx cc_op0, rtx cc_op1)
3087{
3088 rtx tmp = gen_reg_rtx (V2SFmode);
3089 rtx tmp1, max, min, equal_zero;
3090
3091 gcc_assert (TARGET_PAIRED_FLOAT);
3092 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
3093
3094 switch (rcode)
3095 {
3096 case LT:
3097 case LTU:
3098 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3099 return;
3100 case GE:
3101 case GEU:
3102 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3103 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3104 return;
3105 case LE:
3106 case LEU:
3107 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3108 return;
3109 case GT:
3110 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3111 return;
3112 case EQ:
3113 tmp1 = gen_reg_rtx (V2SFmode);
3114 max = gen_reg_rtx (V2SFmode);
3115 min = gen_reg_rtx (V2SFmode);
3116 equal_zero = gen_reg_rtx (V2SFmode);
3117
3118 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3119 emit_insn (gen_selv2sf4
3120 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3121 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3122 emit_insn (gen_selv2sf4
3123 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3124 emit_insn (gen_subv2sf3 (tmp1, min, max));
3125 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3126 return;
3127 case NE:
3128 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3129 return;
3130 case UNLE:
3131 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3132 return;
3133 case UNLT:
3134 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3135 return;
3136 case UNGE:
3137 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3138 return;
3139 case UNGT:
3140 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3141 return;
3142 default:
3143 gcc_unreachable ();
3144 }
3145
3146 return;
3147}
3148
3149/* Emit vector conditional expression.
3150 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3151 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3152
3153int
3154paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3155 rtx cond, rtx cc_op0, rtx cc_op1)
3156{
3157 enum rtx_code rcode = GET_CODE (cond);
3158
3159 if (!TARGET_PAIRED_FLOAT)
3160 return 0;
3161
3162 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3163
3164 return 1;
3165}
3166
7a4eca66
DE
3167/* Initialize vector TARGET to VALS. */
3168
3169void
3170rs6000_expand_vector_init (rtx target, rtx vals)
3171{
3172 enum machine_mode mode = GET_MODE (target);
3173 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3174 int n_elts = GET_MODE_NUNITS (mode);
3175 int n_var = 0, one_var = -1;
3176 bool all_same = true, all_const_zero = true;
3177 rtx x, mem;
3178 int i;
3179
3180 for (i = 0; i < n_elts; ++i)
3181 {
3182 x = XVECEXP (vals, 0, i);
3183 if (!CONSTANT_P (x))
3184 ++n_var, one_var = i;
3185 else if (x != CONST0_RTX (inner_mode))
3186 all_const_zero = false;
3187
3188 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3189 all_same = false;
3190 }
3191
3192 if (n_var == 0)
3193 {
501fb355 3194 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3195 if (mode != V4SFmode && all_const_zero)
3196 {
3197 /* Zero register. */
3198 emit_insn (gen_rtx_SET (VOIDmode, target,
3199 gen_rtx_XOR (mode, target, target)));
3200 return;
3201 }
501fb355 3202 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3203 {
3204 /* Splat immediate. */
501fb355 3205 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3206 return;
3207 }
3208 else if (all_same)
3209 ; /* Splat vector element. */
3210 else
3211 {
3212 /* Load from constant pool. */
501fb355 3213 emit_move_insn (target, const_vec);
7a4eca66
DE
3214 return;
3215 }
3216 }
3217
3218 /* Store value to stack temp. Load vector element. Splat. */
3219 if (all_same)
3220 {
3221 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3222 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3223 XVECEXP (vals, 0, 0));
3224 x = gen_rtx_UNSPEC (VOIDmode,
3225 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3226 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3227 gen_rtvec (2,
3228 gen_rtx_SET (VOIDmode,
3229 target, mem),
3230 x)));
3231 x = gen_rtx_VEC_SELECT (inner_mode, target,
3232 gen_rtx_PARALLEL (VOIDmode,
3233 gen_rtvec (1, const0_rtx)));
3234 emit_insn (gen_rtx_SET (VOIDmode, target,
3235 gen_rtx_VEC_DUPLICATE (mode, x)));
3236 return;
3237 }
3238
3239 /* One field is non-constant. Load constant then overwrite
3240 varying field. */
3241 if (n_var == 1)
3242 {
3243 rtx copy = copy_rtx (vals);
3244
57b51d4d 3245 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3246 varying element. */
3247 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3248 rs6000_expand_vector_init (target, copy);
3249
3250 /* Insert variable. */
3251 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3252 return;
3253 }
3254
3255 /* Construct the vector in memory one field at a time
3256 and load the whole vector. */
3257 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3258 for (i = 0; i < n_elts; i++)
3259 emit_move_insn (adjust_address_nv (mem, inner_mode,
3260 i * GET_MODE_SIZE (inner_mode)),
3261 XVECEXP (vals, 0, i));
3262 emit_move_insn (target, mem);
3263}
3264
3265/* Set field ELT of TARGET to VAL. */
3266
3267void
3268rs6000_expand_vector_set (rtx target, rtx val, int elt)
3269{
3270 enum machine_mode mode = GET_MODE (target);
3271 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3272 rtx reg = gen_reg_rtx (mode);
3273 rtx mask, mem, x;
3274 int width = GET_MODE_SIZE (inner_mode);
3275 int i;
3276
3277 /* Load single variable value. */
3278 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3279 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3280 x = gen_rtx_UNSPEC (VOIDmode,
3281 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3282 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3283 gen_rtvec (2,
3284 gen_rtx_SET (VOIDmode,
3285 reg, mem),
3286 x)));
3287
3288 /* Linear sequence. */
3289 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3290 for (i = 0; i < 16; ++i)
3291 XVECEXP (mask, 0, i) = GEN_INT (i);
3292
3293 /* Set permute mask to insert element into target. */
3294 for (i = 0; i < width; ++i)
3295 XVECEXP (mask, 0, elt*width + i)
3296 = GEN_INT (i + 0x10);
3297 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3298 x = gen_rtx_UNSPEC (mode,
3299 gen_rtvec (3, target, reg,
3300 force_reg (V16QImode, x)),
3301 UNSPEC_VPERM);
3302 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3303}
3304
3305/* Extract field ELT from VEC into TARGET. */
3306
3307void
3308rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3309{
3310 enum machine_mode mode = GET_MODE (vec);
3311 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3312 rtx mem, x;
3313
3314 /* Allocate mode-sized buffer. */
3315 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3316
3317 /* Add offset to field within buffer matching vector element. */
3318 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3319
3320 /* Store single field into mode-sized buffer. */
3321 x = gen_rtx_UNSPEC (VOIDmode,
3322 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3323 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3324 gen_rtvec (2,
3325 gen_rtx_SET (VOIDmode,
3326 mem, vec),
3327 x)));
3328 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3329}
3330
0ba1b2ff
AM
3331/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3332 implement ANDing by the mask IN. */
3333void
a2369ed3 3334build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3335{
3336#if HOST_BITS_PER_WIDE_INT >= 64
3337 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3338 int shift;
3339
37409796 3340 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3341
3342 c = INTVAL (in);
3343 if (c & 1)
3344 {
3345 /* Assume c initially something like 0x00fff000000fffff. The idea
3346 is to rotate the word so that the middle ^^^^^^ group of zeros
3347 is at the MS end and can be cleared with an rldicl mask. We then
3348 rotate back and clear off the MS ^^ group of zeros with a
3349 second rldicl. */
3350 c = ~c; /* c == 0xff000ffffff00000 */
3351 lsb = c & -c; /* lsb == 0x0000000000100000 */
3352 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3353 c = ~c; /* c == 0x00fff000000fffff */
3354 c &= -lsb; /* c == 0x00fff00000000000 */
3355 lsb = c & -c; /* lsb == 0x0000100000000000 */
3356 c = ~c; /* c == 0xff000fffffffffff */
3357 c &= -lsb; /* c == 0xff00000000000000 */
3358 shift = 0;
3359 while ((lsb >>= 1) != 0)
3360 shift++; /* shift == 44 on exit from loop */
3361 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3362 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3363 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3364 }
3365 else
0ba1b2ff
AM
3366 {
3367 /* Assume c initially something like 0xff000f0000000000. The idea
3368 is to rotate the word so that the ^^^ middle group of zeros
3369 is at the LS end and can be cleared with an rldicr mask. We then
3370 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3371 a second rldicr. */
3372 lsb = c & -c; /* lsb == 0x0000010000000000 */
3373 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3374 c = ~c; /* c == 0x00fff0ffffffffff */
3375 c &= -lsb; /* c == 0x00fff00000000000 */
3376 lsb = c & -c; /* lsb == 0x0000100000000000 */
3377 c = ~c; /* c == 0xff000fffffffffff */
3378 c &= -lsb; /* c == 0xff00000000000000 */
3379 shift = 0;
3380 while ((lsb >>= 1) != 0)
3381 shift++; /* shift == 44 on exit from loop */
3382 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3383 m1 >>= shift; /* m1 == 0x0000000000000fff */
3384 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3385 }
3386
3387 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3388 masks will be all 1's. We are guaranteed more than one transition. */
3389 out[0] = GEN_INT (64 - shift);
3390 out[1] = GEN_INT (m1);
3391 out[2] = GEN_INT (shift);
3392 out[3] = GEN_INT (m2);
3393#else
045572c7
GK
3394 (void)in;
3395 (void)out;
37409796 3396 gcc_unreachable ();
0ba1b2ff 3397#endif
a260abc9
DE
3398}
3399
54b695e7 3400/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3401
3402bool
54b695e7
AH
3403invalid_e500_subreg (rtx op, enum machine_mode mode)
3404{
61c76239
JM
3405 if (TARGET_E500_DOUBLE)
3406 {
17caeff2 3407 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3408 subreg:TI and reg:TF. Decimal float modes are like integer
3409 modes (only low part of each register used) for this
3410 purpose. */
61c76239 3411 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3412 && (mode == SImode || mode == DImode || mode == TImode
3413 || mode == DDmode || mode == TDmode)
61c76239 3414 && REG_P (SUBREG_REG (op))
17caeff2 3415 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3416 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3417 return true;
3418
17caeff2
JM
3419 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3420 reg:TI. */
61c76239 3421 if (GET_CODE (op) == SUBREG
4f011e1e 3422 && (mode == DFmode || mode == TFmode)
61c76239 3423 && REG_P (SUBREG_REG (op))
17caeff2 3424 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3425 || GET_MODE (SUBREG_REG (op)) == TImode
3426 || GET_MODE (SUBREG_REG (op)) == DDmode
3427 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3428 return true;
3429 }
54b695e7 3430
61c76239
JM
3431 if (TARGET_SPE
3432 && GET_CODE (op) == SUBREG
3433 && mode == SImode
54b695e7 3434 && REG_P (SUBREG_REG (op))
14502dad 3435 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3436 return true;
3437
3438 return false;
3439}
3440
58182de3 3441/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3442 field is an FP double while the FP fields remain word aligned. */
3443
19d66194 3444unsigned int
fa5b0972
AM
3445rs6000_special_round_type_align (tree type, unsigned int computed,
3446 unsigned int specified)
95727fb8 3447{
fa5b0972 3448 unsigned int align = MAX (computed, specified);
95727fb8 3449 tree field = TYPE_FIELDS (type);
95727fb8 3450
bb8df8a6 3451 /* Skip all non field decls */
85962ac8 3452 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3453 field = TREE_CHAIN (field);
3454
fa5b0972
AM
3455 if (field != NULL && field != type)
3456 {
3457 type = TREE_TYPE (field);
3458 while (TREE_CODE (type) == ARRAY_TYPE)
3459 type = TREE_TYPE (type);
3460
3461 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3462 align = MAX (align, 64);
3463 }
95727fb8 3464
fa5b0972 3465 return align;
95727fb8
AP
3466}
3467
58182de3
GK
3468/* Darwin increases record alignment to the natural alignment of
3469 the first field. */
3470
3471unsigned int
3472darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3473 unsigned int specified)
3474{
3475 unsigned int align = MAX (computed, specified);
3476
3477 if (TYPE_PACKED (type))
3478 return align;
3479
3480 /* Find the first field, looking down into aggregates. */
3481 do {
3482 tree field = TYPE_FIELDS (type);
3483 /* Skip all non field decls */
3484 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3485 field = TREE_CHAIN (field);
3486 if (! field)
3487 break;
3488 type = TREE_TYPE (field);
3489 while (TREE_CODE (type) == ARRAY_TYPE)
3490 type = TREE_TYPE (type);
3491 } while (AGGREGATE_TYPE_P (type));
3492
3493 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3494 align = MAX (align, TYPE_ALIGN (type));
3495
3496 return align;
3497}
3498
a4f6c312 3499/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3500
3501int
f676971a 3502small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3503 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3504{
38c1f2d7 3505#if TARGET_ELF
5f59ecb7 3506 rtx sym_ref;
7509c759 3507
d9407988 3508 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3509 return 0;
a54d04b7 3510
f607bc57 3511 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3512 return 0;
3513
2aa42e6e
NF
3514 /* Vector and float memory instructions have a limited offset on the
3515 SPE, so using a vector or float variable directly as an operand is
3516 not useful. */
3517 if (TARGET_SPE
3518 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3519 return 0;
3520
88228c4b
MM
3521 if (GET_CODE (op) == SYMBOL_REF)
3522 sym_ref = op;
3523
3524 else if (GET_CODE (op) != CONST
3525 || GET_CODE (XEXP (op, 0)) != PLUS
3526 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3527 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3528 return 0;
3529
88228c4b 3530 else
dbf55e53
MM
3531 {
3532 rtx sum = XEXP (op, 0);
3533 HOST_WIDE_INT summand;
3534
3535 /* We have to be careful here, because it is the referenced address
c4ad648e 3536 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3537 summand = INTVAL (XEXP (sum, 1));
307b599c 3538 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3539 return 0;
dbf55e53
MM
3540
3541 sym_ref = XEXP (sum, 0);
3542 }
88228c4b 3543
20bfcd69 3544 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3545#else
3546 return 0;
3547#endif
7509c759 3548}
46c07df8 3549
3a1f863f 3550/* Return true if either operand is a general purpose register. */
46c07df8 3551
3a1f863f
DE
3552bool
3553gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3554{
3a1f863f
DE
3555 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3556 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3557}
3558
9ebbca7d 3559\f
4d588c14
RH
3560/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3561
4d588c14 3562static bool
a2369ed3 3563constant_pool_expr_p (rtx op)
9ebbca7d 3564{
2e4316da
RS
3565 rtx base, offset;
3566
3567 split_const (op, &base, &offset);
3568 return (GET_CODE (base) == SYMBOL_REF
3569 && CONSTANT_POOL_ADDRESS_P (base)
3570 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
9ebbca7d
GK
3571}
3572
48d72335 3573bool
a2369ed3 3574toc_relative_expr_p (rtx op)
9ebbca7d 3575{
2e4316da
RS
3576 rtx base, offset;
3577
3578 if (GET_CODE (op) != CONST)
3579 return false;
3580
3581 split_const (op, &base, &offset);
3582 return (GET_CODE (base) == UNSPEC
3583 && XINT (base, 1) == UNSPEC_TOCREL);
4d588c14
RH
3584}
3585
4d588c14 3586bool
a2369ed3 3587legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3588{
3589 return (TARGET_TOC
3590 && GET_CODE (x) == PLUS
3591 && GET_CODE (XEXP (x, 0)) == REG
3592 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2e4316da 3593 && toc_relative_expr_p (XEXP (x, 1)));
4d588c14
RH
3594}
3595
d04b6e6e
EB
3596static bool
3597legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3598{
3599 return (DEFAULT_ABI == ABI_V4
3600 && !flag_pic && !TARGET_TOC
3601 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3602 && small_data_operand (x, mode));
3603}
3604
60cdabab
DE
3605/* SPE offset addressing is limited to 5-bits worth of double words. */
3606#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3607
76d2b81d
DJ
3608bool
3609rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3610{
3611 unsigned HOST_WIDE_INT offset, extra;
3612
3613 if (GET_CODE (x) != PLUS)
3614 return false;
3615 if (GET_CODE (XEXP (x, 0)) != REG)
3616 return false;
3617 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3618 return false;
60cdabab
DE
3619 if (legitimate_constant_pool_address_p (x))
3620 return true;
4d588c14
RH
3621 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3622 return false;
3623
3624 offset = INTVAL (XEXP (x, 1));
3625 extra = 0;
3626 switch (mode)
3627 {
3628 case V16QImode:
3629 case V8HImode:
3630 case V4SFmode:
3631 case V4SImode:
7a4eca66 3632 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3633 constant offset zero should not occur due to canonicalization. */
3634 return false;
4d588c14
RH
3635
3636 case V4HImode:
3637 case V2SImode:
3638 case V1DImode:
3639 case V2SFmode:
d42a3bae 3640 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3641 constant offset zero should not occur due to canonicalization. */
d42a3bae 3642 if (TARGET_PAIRED_FLOAT)
1a23970d 3643 return false;
4d588c14
RH
3644 /* SPE vector modes. */
3645 return SPE_CONST_OFFSET_OK (offset);
3646
3647 case DFmode:
4d4cbc0e
AH
3648 if (TARGET_E500_DOUBLE)
3649 return SPE_CONST_OFFSET_OK (offset);
3650
4f011e1e 3651 case DDmode:
4d588c14 3652 case DImode:
54b695e7
AH
3653 /* On e500v2, we may have:
3654
3655 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3656
3657 Which gets addressed with evldd instructions. */
3658 if (TARGET_E500_DOUBLE)
3659 return SPE_CONST_OFFSET_OK (offset);
3660
7393f7f8 3661 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3662 extra = 4;
3663 else if (offset & 3)
3664 return false;
3665 break;
3666
3667 case TFmode:
17caeff2
JM
3668 if (TARGET_E500_DOUBLE)
3669 return (SPE_CONST_OFFSET_OK (offset)
3670 && SPE_CONST_OFFSET_OK (offset + 8));
3671
4f011e1e 3672 case TDmode:
4d588c14 3673 case TImode:
7393f7f8 3674 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3675 extra = 12;
3676 else if (offset & 3)
3677 return false;
3678 else
3679 extra = 8;
3680 break;
3681
3682 default:
3683 break;
3684 }
3685
b1917422
AM
3686 offset += 0x8000;
3687 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3688}
3689
6fb5fa3c 3690bool
a2369ed3 3691legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3692{
3693 rtx op0, op1;
3694
3695 if (GET_CODE (x) != PLUS)
3696 return false;
850e8d3d 3697
4d588c14
RH
3698 op0 = XEXP (x, 0);
3699 op1 = XEXP (x, 1);
3700
bf00cc0f 3701 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3702 replaced with proper base and index regs. */
3703 if (!strict
3704 && reload_in_progress
3705 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3706 && REG_P (op1))
3707 return true;
3708
3709 return (REG_P (op0) && REG_P (op1)
3710 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3711 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3712 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3713 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3714}
3715
001b9eb6
PH
3716bool
3717avoiding_indexed_address_p (enum machine_mode mode)
3718{
3719 /* Avoid indexed addressing for modes that have non-indexed
3720 load/store instruction forms. */
3721 return TARGET_AVOID_XFORM && !ALTIVEC_VECTOR_MODE (mode);
3722}
3723
48d72335 3724inline bool
a2369ed3 3725legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3726{
3727 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3728}
3729
48d72335 3730bool
4c81e946
FJ
3731macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3732{
c4ad648e 3733 if (!TARGET_MACHO || !flag_pic
9390387d 3734 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3735 return false;
3736 x = XEXP (x, 0);
4c81e946
FJ
3737
3738 if (GET_CODE (x) != LO_SUM)
3739 return false;
3740 if (GET_CODE (XEXP (x, 0)) != REG)
3741 return false;
3742 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3743 return false;
3744 x = XEXP (x, 1);
3745
3746 return CONSTANT_P (x);
3747}
3748
4d588c14 3749static bool
a2369ed3 3750legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3751{
3752 if (GET_CODE (x) != LO_SUM)
3753 return false;
3754 if (GET_CODE (XEXP (x, 0)) != REG)
3755 return false;
3756 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3757 return false;
54b695e7 3758 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3759 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3760 || mode == DDmode || mode == TDmode
17caeff2 3761 || mode == DImode))
f82f556d 3762 return false;
4d588c14
RH
3763 x = XEXP (x, 1);
3764
8622e235 3765 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3766 {
a29077da 3767 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3768 return false;
3769 if (TARGET_TOC)
3770 return false;
3771 if (GET_MODE_NUNITS (mode) != 1)
3772 return false;
5e5f01b9 3773 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3774 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
696e45ba 3775 && !(TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
4d4447b5 3776 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3777 return false;
3778
3779 return CONSTANT_P (x);
3780 }
3781
3782 return false;
3783}
3784
3785
9ebbca7d
GK
3786/* Try machine-dependent ways of modifying an illegitimate address
3787 to be legitimate. If we find one, return the new, valid address.
3788 This is used from only one place: `memory_address' in explow.c.
3789
a4f6c312
SS
3790 OLDX is the address as it was before break_out_memory_refs was
3791 called. In some cases it is useful to look at this to decide what
3792 needs to be done.
9ebbca7d 3793
a4f6c312 3794 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3795
a4f6c312
SS
3796 It is always safe for this function to do nothing. It exists to
3797 recognize opportunities to optimize the output.
9ebbca7d
GK
3798
3799 On RS/6000, first check for the sum of a register with a constant
3800 integer that is out of range. If so, generate code to add the
3801 constant with the low-order 16 bits masked to the register and force
3802 this result into another register (this can be done with `cau').
3803 Then generate an address of REG+(CONST&0xffff), allowing for the
3804 possibility of bit 16 being a one.
3805
3806 Then check for the sum of a register and something not constant, try to
3807 load the other things into a register and return the sum. */
4d588c14 3808
9ebbca7d 3809rtx
a2369ed3
DJ
3810rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3811 enum machine_mode mode)
0ac081f6 3812{
c4501e62
JJ
3813 if (GET_CODE (x) == SYMBOL_REF)
3814 {
3815 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3816 if (model != 0)
3817 return rs6000_legitimize_tls_address (x, model);
3818 }
3819
f676971a 3820 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3821 && GET_CODE (XEXP (x, 0)) == REG
3822 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb 3823 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
7da13f1d
NF
3824 && !((TARGET_POWERPC64
3825 && (mode == DImode || mode == TImode)
3826 && (INTVAL (XEXP (x, 1)) & 3) != 0)
3827 || SPE_VECTOR_MODE (mode)
efc05e3c 3828 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3829 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3830 || mode == DImode || mode == DDmode
3831 || mode == TDmode))))
f676971a 3832 {
9ebbca7d
GK
3833 HOST_WIDE_INT high_int, low_int;
3834 rtx sum;
a65c591c
DE
3835 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3836 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3837 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3838 GEN_INT (high_int)), 0);
3839 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3840 }
f676971a 3841 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3842 && GET_CODE (XEXP (x, 0)) == REG
3843 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3844 && GET_MODE_NUNITS (mode) == 1
696e45ba 3845 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 3846 || TARGET_POWERPC64
efc05e3c 3847 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3848 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3849 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 3850 && !avoiding_indexed_address_p (mode)
efc05e3c
PB
3851 && mode != TImode
3852 && mode != TFmode
3853 && mode != TDmode)
9ebbca7d
GK
3854 {
3855 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3856 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3857 }
0ac081f6
AH
3858 else if (ALTIVEC_VECTOR_MODE (mode))
3859 {
3860 rtx reg;
3861
3862 /* Make sure both operands are registers. */
3863 if (GET_CODE (x) == PLUS)
9f85ed45 3864 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3865 force_reg (Pmode, XEXP (x, 1)));
3866
3867 reg = force_reg (Pmode, x);
3868 return reg;
3869 }
4d4cbc0e 3870 else if (SPE_VECTOR_MODE (mode)
17caeff2 3871 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3872 || mode == DDmode || mode == TDmode
54b695e7 3873 || mode == DImode)))
a3170dc6 3874 {
54b695e7
AH
3875 if (mode == DImode)
3876 return NULL_RTX;
a3170dc6
AH
3877 /* We accept [reg + reg] and [reg + OFFSET]. */
3878
3879 if (GET_CODE (x) == PLUS)
61dd226f
NF
3880 {
3881 rtx op1 = XEXP (x, 0);
3882 rtx op2 = XEXP (x, 1);
3883 rtx y;
3884
3885 op1 = force_reg (Pmode, op1);
3886
3887 if (GET_CODE (op2) != REG
3888 && (GET_CODE (op2) != CONST_INT
3889 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3890 || (GET_MODE_SIZE (mode) > 8
3891 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3892 op2 = force_reg (Pmode, op2);
3893
3894 /* We can't always do [reg + reg] for these, because [reg +
3895 reg + offset] is not a legitimate addressing mode. */
3896 y = gen_rtx_PLUS (Pmode, op1, op2);
3897
4f011e1e 3898 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3899 return force_reg (Pmode, y);
3900 else
3901 return y;
3902 }
a3170dc6
AH
3903
3904 return force_reg (Pmode, x);
3905 }
f1384257
AM
3906 else if (TARGET_ELF
3907 && TARGET_32BIT
3908 && TARGET_NO_TOC
3909 && ! flag_pic
9ebbca7d 3910 && GET_CODE (x) != CONST_INT
f676971a 3911 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3912 && CONSTANT_P (x)
6ac7bf2c
GK
3913 && GET_MODE_NUNITS (mode) == 1
3914 && (GET_MODE_BITSIZE (mode) <= 32
696e45ba 3915 || ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3916 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3917 {
3918 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3919 emit_insn (gen_elf_high (reg, x));
3920 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3921 }
ee890fe2
SS
3922 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3923 && ! flag_pic
ab82a49f
AP
3924#if TARGET_MACHO
3925 && ! MACHO_DYNAMIC_NO_PIC_P
3926#endif
ee890fe2 3927 && GET_CODE (x) != CONST_INT
f676971a 3928 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3929 && CONSTANT_P (x)
506a7bc8 3930 && GET_MODE_NUNITS (mode) == 1
696e45ba 3931 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3932 || (mode != DFmode && mode != DDmode))
f676971a 3933 && mode != DImode
ee890fe2
SS
3934 && mode != TImode)
3935 {
3936 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3937 emit_insn (gen_macho_high (reg, x));
3938 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3939 }
f676971a 3940 else if (TARGET_TOC
0cdc04e8 3941 && GET_CODE (x) == SYMBOL_REF
4d588c14 3942 && constant_pool_expr_p (x)
a9098fd0 3943 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3944 {
3945 return create_TOC_reference (x);
3946 }
3947 else
3948 return NULL_RTX;
3949}
258bfae2 3950
fdbe66f2 3951/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3952 We need to emit DTP-relative relocations. */
3953
fdbe66f2 3954static void
c973d557
JJ
3955rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3956{
3957 switch (size)
3958 {
3959 case 4:
3960 fputs ("\t.long\t", file);
3961 break;
3962 case 8:
3963 fputs (DOUBLE_INT_ASM_OP, file);
3964 break;
3965 default:
37409796 3966 gcc_unreachable ();
c973d557
JJ
3967 }
3968 output_addr_const (file, x);
3969 fputs ("@dtprel+0x8000", file);
3970}
3971
c4501e62
JJ
3972/* Construct the SYMBOL_REF for the tls_get_addr function. */
3973
3974static GTY(()) rtx rs6000_tls_symbol;
3975static rtx
863d938c 3976rs6000_tls_get_addr (void)
c4501e62
JJ
3977{
3978 if (!rs6000_tls_symbol)
3979 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3980
3981 return rs6000_tls_symbol;
3982}
3983
3984/* Construct the SYMBOL_REF for TLS GOT references. */
3985
3986static GTY(()) rtx rs6000_got_symbol;
3987static rtx
863d938c 3988rs6000_got_sym (void)
c4501e62
JJ
3989{
3990 if (!rs6000_got_symbol)
3991 {
3992 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3993 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3994 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3995 }
c4501e62
JJ
3996
3997 return rs6000_got_symbol;
3998}
3999
4000/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
4001 this (thread-local) address. */
4002
4003static rtx
a2369ed3 4004rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
4005{
4006 rtx dest, insn;
4007
4008 dest = gen_reg_rtx (Pmode);
4009 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
4010 {
4011 rtx tlsreg;
4012
4013 if (TARGET_64BIT)
4014 {
4015 tlsreg = gen_rtx_REG (Pmode, 13);
4016 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
4017 }
4018 else
4019 {
4020 tlsreg = gen_rtx_REG (Pmode, 2);
4021 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
4022 }
4023 emit_insn (insn);
4024 }
4025 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
4026 {
4027 rtx tlsreg, tmp;
4028
4029 tmp = gen_reg_rtx (Pmode);
4030 if (TARGET_64BIT)
4031 {
4032 tlsreg = gen_rtx_REG (Pmode, 13);
4033 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
4034 }
4035 else
4036 {
4037 tlsreg = gen_rtx_REG (Pmode, 2);
4038 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
4039 }
4040 emit_insn (insn);
4041 if (TARGET_64BIT)
4042 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
4043 else
4044 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
4045 emit_insn (insn);
4046 }
4047 else
4048 {
4049 rtx r3, got, tga, tmp1, tmp2, eqv;
4050
4fed8f8f
AM
4051 /* We currently use relocations like @got@tlsgd for tls, which
4052 means the linker will handle allocation of tls entries, placing
4053 them in the .got section. So use a pointer to the .got section,
4054 not one to secondary TOC sections used by 64-bit -mminimal-toc,
4055 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 4056 if (TARGET_64BIT)
972f427b 4057 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
4058 else
4059 {
4060 if (flag_pic == 1)
4061 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
4062 else
4063 {
4064 rtx gsym = rs6000_got_sym ();
4065 got = gen_reg_rtx (Pmode);
4066 if (flag_pic == 0)
4067 rs6000_emit_move (got, gsym, Pmode);
4068 else
4069 {
e65a3857 4070 rtx tmp3, mem;
c4501e62
JJ
4071 rtx first, last;
4072
c4501e62
JJ
4073 tmp1 = gen_reg_rtx (Pmode);
4074 tmp2 = gen_reg_rtx (Pmode);
4075 tmp3 = gen_reg_rtx (Pmode);
542a8afa 4076 mem = gen_const_mem (Pmode, tmp1);
c4501e62 4077
e65a3857
DE
4078 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
4079 emit_move_insn (tmp1,
1de43f85 4080 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
4081 emit_move_insn (tmp2, mem);
4082 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
4083 last = emit_move_insn (got, tmp3);
bd94cb6e 4084 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
4085 }
4086 }
4087 }
4088
4089 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4090 {
4091 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4092 tga = rs6000_tls_get_addr ();
4093
4094 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4095 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4096 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4097 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4098 else if (DEFAULT_ABI == ABI_V4)
4099 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 4100 else
02135bc1
SB
4101 gcc_unreachable ();
4102
c4501e62 4103 start_sequence ();
c4501e62 4104 insn = emit_call_insn (insn);
becfd6e5 4105 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4106 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4107 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4108 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4109 insn = get_insns ();
4110 end_sequence ();
4111 emit_libcall_block (insn, dest, r3, addr);
4112 }
4113 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4114 {
4115 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4116 tga = rs6000_tls_get_addr ();
4117
4118 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4119 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4120 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4121 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4122 else if (DEFAULT_ABI == ABI_V4)
4123 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 4124 else
02135bc1
SB
4125 gcc_unreachable ();
4126
c4501e62 4127 start_sequence ();
c4501e62 4128 insn = emit_call_insn (insn);
becfd6e5 4129 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4130 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4131 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4132 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4133 insn = get_insns ();
4134 end_sequence ();
4135 tmp1 = gen_reg_rtx (Pmode);
4136 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4137 UNSPEC_TLSLD);
4138 emit_libcall_block (insn, tmp1, r3, eqv);
4139 if (rs6000_tls_size == 16)
4140 {
4141 if (TARGET_64BIT)
4142 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4143 else
4144 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4145 }
4146 else if (rs6000_tls_size == 32)
4147 {
4148 tmp2 = gen_reg_rtx (Pmode);
4149 if (TARGET_64BIT)
4150 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4151 else
4152 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4153 emit_insn (insn);
4154 if (TARGET_64BIT)
4155 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4156 else
4157 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4158 }
4159 else
4160 {
4161 tmp2 = gen_reg_rtx (Pmode);
4162 if (TARGET_64BIT)
4163 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4164 else
4165 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4166 emit_insn (insn);
4167 insn = gen_rtx_SET (Pmode, dest,
4168 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4169 }
4170 emit_insn (insn);
4171 }
4172 else
4173 {
a7b376ee 4174 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4175 tmp2 = gen_reg_rtx (Pmode);
4176 if (TARGET_64BIT)
4177 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4178 else
4179 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4180 emit_insn (insn);
4181 if (TARGET_64BIT)
4182 insn = gen_tls_tls_64 (dest, tmp2, addr);
4183 else
4184 insn = gen_tls_tls_32 (dest, tmp2, addr);
4185 emit_insn (insn);
4186 }
4187 }
4188
4189 return dest;
4190}
4191
c4501e62
JJ
4192/* Return 1 if X contains a thread-local symbol. */
4193
4194bool
a2369ed3 4195rs6000_tls_referenced_p (rtx x)
c4501e62 4196{
cd413cab
AP
4197 if (! TARGET_HAVE_TLS)
4198 return false;
4199
c4501e62
JJ
4200 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4201}
4202
4203/* Return 1 if *X is a thread-local symbol. This is the same as
4204 rs6000_tls_symbol_ref except for the type of the unused argument. */
4205
9390387d 4206static int
a2369ed3 4207rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4208{
4209 return RS6000_SYMBOL_REF_TLS_P (*x);
4210}
4211
24ea750e
DJ
4212/* The convention appears to be to define this wherever it is used.
4213 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
4214 is now used here. */
4215#ifndef REG_MODE_OK_FOR_BASE_P
4216#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
4217#endif
4218
4219/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4220 replace the input X, or the original X if no replacement is called for.
4221 The output parameter *WIN is 1 if the calling macro should goto WIN,
4222 0 if it should not.
4223
4224 For RS/6000, we wish to handle large displacements off a base
4225 register by splitting the addend across an addiu/addis and the mem insn.
4226 This cuts number of extra insns needed from 3 to 1.
4227
4228 On Darwin, we use this to generate code for floating point constants.
4229 A movsf_low is generated so we wind up with 2 instructions rather than 3.
08a6a74b
RS
4230 The Darwin code is inside #if TARGET_MACHO because only then are the
4231 machopic_* functions defined. */
24ea750e 4232rtx
f676971a 4233rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4234 int opnum, int type,
4235 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4236{
f676971a 4237 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4238 if (GET_CODE (x) == PLUS
4239 && GET_CODE (XEXP (x, 0)) == PLUS
4240 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4241 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4242 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4243 {
4244 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4245 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4246 opnum, (enum reload_type)type);
24ea750e
DJ
4247 *win = 1;
4248 return x;
4249 }
3deb2758 4250
24ea750e
DJ
4251#if TARGET_MACHO
4252 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4253 && GET_CODE (x) == LO_SUM
4254 && GET_CODE (XEXP (x, 0)) == PLUS
4255 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4256 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
24ea750e 4257 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
08a6a74b 4258 && machopic_operand_p (XEXP (x, 1)))
24ea750e
DJ
4259 {
4260 /* Result of previous invocation of this function on Darwin
6f317ef3 4261 floating point constant. */
24ea750e 4262 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4263 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4264 opnum, (enum reload_type)type);
24ea750e
DJ
4265 *win = 1;
4266 return x;
4267 }
4268#endif
4937d02d
DE
4269
4270 /* Force ld/std non-word aligned offset into base register by wrapping
4271 in offset 0. */
4272 if (GET_CODE (x) == PLUS
4273 && GET_CODE (XEXP (x, 0)) == REG
4274 && REGNO (XEXP (x, 0)) < 32
4275 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4276 && GET_CODE (XEXP (x, 1)) == CONST_INT
4277 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4278 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4279 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4280 && TARGET_POWERPC64)
4281 {
4282 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4283 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4284 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4285 opnum, (enum reload_type) type);
4286 *win = 1;
4287 return x;
4288 }
4289
24ea750e
DJ
4290 if (GET_CODE (x) == PLUS
4291 && GET_CODE (XEXP (x, 0)) == REG
4292 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4293 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4294 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4295 && !SPE_VECTOR_MODE (mode)
17caeff2 4296 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4297 || mode == DDmode || mode == TDmode
54b695e7 4298 || mode == DImode))
78c875e8 4299 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4300 {
4301 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4302 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4303 HOST_WIDE_INT high
c4ad648e 4304 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4305
4306 /* Check for 32-bit overflow. */
4307 if (high + low != val)
c4ad648e 4308 {
24ea750e
DJ
4309 *win = 0;
4310 return x;
4311 }
4312
4313 /* Reload the high part into a base reg; leave the low part
c4ad648e 4314 in the mem directly. */
24ea750e
DJ
4315
4316 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4317 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4318 GEN_INT (high)),
4319 GEN_INT (low));
24ea750e
DJ
4320
4321 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4322 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4323 opnum, (enum reload_type)type);
24ea750e
DJ
4324 *win = 1;
4325 return x;
4326 }
4937d02d 4327
24ea750e 4328 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4329 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4330 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4331#if TARGET_MACHO
4332 && DEFAULT_ABI == ABI_DARWIN
a29077da 4333 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4334#else
4335 && DEFAULT_ABI == ABI_V4
4336 && !flag_pic
4337#endif
7393f7f8 4338 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4339 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4340 without fprs. */
0d8c1c97 4341 && mode != TFmode
7393f7f8 4342 && mode != TDmode
7b5d92b2 4343 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4344 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
696e45ba 4345 || (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)))
24ea750e 4346 {
8308679f 4347#if TARGET_MACHO
a29077da
GK
4348 if (flag_pic)
4349 {
08a6a74b 4350 rtx offset = machopic_gen_offset (x);
a29077da
GK
4351 x = gen_rtx_LO_SUM (GET_MODE (x),
4352 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4353 gen_rtx_HIGH (Pmode, offset)), offset);
4354 }
4355 else
8308679f 4356#endif
a29077da 4357 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4358 gen_rtx_HIGH (Pmode, x), x);
a29077da 4359
24ea750e 4360 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4361 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4362 opnum, (enum reload_type)type);
24ea750e
DJ
4363 *win = 1;
4364 return x;
4365 }
4937d02d 4366
dec1f3aa
DE
4367 /* Reload an offset address wrapped by an AND that represents the
4368 masking of the lower bits. Strip the outer AND and let reload
4369 convert the offset address into an indirect address. */
4370 if (TARGET_ALTIVEC
4371 && ALTIVEC_VECTOR_MODE (mode)
4372 && GET_CODE (x) == AND
4373 && GET_CODE (XEXP (x, 0)) == PLUS
4374 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4375 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4376 && GET_CODE (XEXP (x, 1)) == CONST_INT
4377 && INTVAL (XEXP (x, 1)) == -16)
4378 {
4379 x = XEXP (x, 0);
4380 *win = 1;
4381 return x;
4382 }
4383
24ea750e 4384 if (TARGET_TOC
0cdc04e8 4385 && GET_CODE (x) == SYMBOL_REF
4d588c14 4386 && constant_pool_expr_p (x)
c1f11548 4387 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4388 {
194c524a 4389 x = create_TOC_reference (x);
24ea750e
DJ
4390 *win = 1;
4391 return x;
4392 }
4393 *win = 0;
4394 return x;
f676971a 4395}
24ea750e 4396
258bfae2
FS
4397/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4398 that is a valid memory address for an instruction.
4399 The MODE argument is the machine mode for the MEM expression
4400 that wants to use this address.
4401
4402 On the RS/6000, there are four valid address: a SYMBOL_REF that
4403 refers to a constant pool entry of an address (or the sum of it
4404 plus a constant), a short (16-bit signed) constant plus a register,
4405 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4406 auto-increment. For DFmode, DDmode and DImode with a constant plus
4407 register, we must ensure that both words are addressable or PowerPC64
4408 with offset word aligned.
258bfae2 4409
4d4447b5 4410 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4411 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4412 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4413 during assembly output. */
4414int
a2369ed3 4415rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4416{
850e8d3d
DN
4417 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4418 if (TARGET_ALTIVEC
4419 && ALTIVEC_VECTOR_MODE (mode)
4420 && GET_CODE (x) == AND
4421 && GET_CODE (XEXP (x, 1)) == CONST_INT
4422 && INTVAL (XEXP (x, 1)) == -16)
4423 x = XEXP (x, 0);
4424
c4501e62
JJ
4425 if (RS6000_SYMBOL_REF_TLS_P (x))
4426 return 0;
4d588c14 4427 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4428 return 1;
4429 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4430 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4431 && !SPE_VECTOR_MODE (mode)
429ec7dc 4432 && mode != TFmode
7393f7f8 4433 && mode != TDmode
54b695e7 4434 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4435 && !(TARGET_E500_DOUBLE
4436 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4437 && TARGET_UPDATE
4d588c14 4438 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4439 return 1;
d04b6e6e 4440 if (legitimate_small_data_p (mode, x))
258bfae2 4441 return 1;
4d588c14 4442 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4443 return 1;
4444 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4445 if (! reg_ok_strict
4446 && GET_CODE (x) == PLUS
4447 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4448 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4449 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4450 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4451 return 1;
76d2b81d 4452 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4453 return 1;
4454 if (mode != TImode
76d2b81d 4455 && mode != TFmode
7393f7f8 4456 && mode != TDmode
a3170dc6
AH
4457 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4458 || TARGET_POWERPC64
4f011e1e
JM
4459 || (mode != DFmode && mode != DDmode)
4460 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4461 && (TARGET_POWERPC64 || mode != DImode)
001b9eb6 4462 && !avoiding_indexed_address_p (mode)
4d588c14 4463 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4464 return 1;
6fb5fa3c
DB
4465 if (GET_CODE (x) == PRE_MODIFY
4466 && mode != TImode
4467 && mode != TFmode
4468 && mode != TDmode
696e45ba 4469 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
6fb5fa3c 4470 || TARGET_POWERPC64
4d4447b5 4471 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4472 && (TARGET_POWERPC64 || mode != DImode)
4473 && !ALTIVEC_VECTOR_MODE (mode)
4474 && !SPE_VECTOR_MODE (mode)
4475 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4476 && !(TARGET_E500_DOUBLE
4477 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4478 && TARGET_UPDATE
4479 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4480 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
001b9eb6
PH
4481 || (!avoiding_indexed_address_p (mode)
4482 && legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict)))
6fb5fa3c
DB
4483 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4484 return 1;
4d588c14 4485 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4486 return 1;
4487 return 0;
4488}
4d588c14
RH
4489
4490/* Go to LABEL if ADDR (a legitimate address expression)
4491 has an effect that depends on the machine mode it is used for.
4492
4493 On the RS/6000 this is true of all integral offsets (since AltiVec
4494 modes don't allow them) or is a pre-increment or decrement.
4495
4496 ??? Except that due to conceptual problems in offsettable_address_p
4497 we can't really report the problems of integral offsets. So leave
f676971a 4498 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4499 sub-words of a TFmode operand, which is what we had before. */
4500
4501bool
a2369ed3 4502rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4503{
4504 switch (GET_CODE (addr))
4505 {
4506 case PLUS:
4507 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4508 {
4509 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4510 return val + 12 + 0x8000 >= 0x10000;
4511 }
4512 break;
4513
4514 case LO_SUM:
4515 return true;
4516
619fe064 4517 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4518 case PRE_MODIFY:
4519 return TARGET_UPDATE;
4d588c14
RH
4520
4521 default:
4522 break;
4523 }
4524
4525 return false;
4526}
d8ecbcdb 4527
944258eb
RS
4528/* Implement FIND_BASE_TERM. */
4529
4530rtx
4531rs6000_find_base_term (rtx op)
4532{
4533 rtx base, offset;
4534
4535 split_const (op, &base, &offset);
4536 if (GET_CODE (base) == UNSPEC)
4537 switch (XINT (base, 1))
4538 {
4539 case UNSPEC_TOCREL:
4540 case UNSPEC_MACHOPIC_OFFSET:
4541 /* OP represents SYM [+ OFFSET] - ANCHOR. SYM is the base term
4542 for aliasing purposes. */
4543 return XVECEXP (base, 0, 0);
4544 }
4545
4546 return op;
4547}
4548
d04b6e6e
EB
4549/* More elaborate version of recog's offsettable_memref_p predicate
4550 that works around the ??? note of rs6000_mode_dependent_address.
4551 In particular it accepts
4552
4553 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4554
4555 in 32-bit mode, that the recog predicate rejects. */
4556
4557bool
4558rs6000_offsettable_memref_p (rtx op)
4559{
4560 if (!MEM_P (op))
4561 return false;
4562
4563 /* First mimic offsettable_memref_p. */
4564 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4565 return true;
4566
4567 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4568 the latter predicate knows nothing about the mode of the memory
4569 reference and, therefore, assumes that it is the largest supported
4570 mode (TFmode). As a consequence, legitimate offsettable memory
4571 references are rejected. rs6000_legitimate_offset_address_p contains
4572 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4573 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4574}
4575
d8ecbcdb
AH
4576/* Return number of consecutive hard regs needed starting at reg REGNO
4577 to hold something of mode MODE.
4578 This is ordinarily the length in words of a value of mode MODE
4579 but can be less for certain modes in special long registers.
4580
4581 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4582 scalar instructions. The upper 32 bits are only available to the
4583 SIMD instructions.
4584
4585 POWER and PowerPC GPRs hold 32 bits worth;
4586 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4587
4588int
4589rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4590{
4591 if (FP_REGNO_P (regno))
4592 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4593
4594 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4595 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4596
4597 if (ALTIVEC_REGNO_P (regno))
4598 return
4599 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4600
8521c414
JM
4601 /* The value returned for SCmode in the E500 double case is 2 for
4602 ABI compatibility; storing an SCmode value in a single register
4603 would require function_arg and rs6000_spe_function_arg to handle
4604 SCmode so as to pass the value correctly in a pair of
4605 registers. */
4f011e1e
JM
4606 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4607 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4608 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4609
d8ecbcdb
AH
4610 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4611}
2aa4498c
AH
4612
4613/* Change register usage conditional on target flags. */
4614void
4615rs6000_conditional_register_usage (void)
4616{
4617 int i;
4618
4619 /* Set MQ register fixed (already call_used) if not POWER
4620 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4621 be allocated. */
4622 if (! TARGET_POWER)
4623 fixed_regs[64] = 1;
4624
7c9ac5c0 4625 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4626 if (TARGET_64BIT)
4627 fixed_regs[13] = call_used_regs[13]
4628 = call_really_used_regs[13] = 1;
4629
4630 /* Conditionally disable FPRs. */
4631 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4632 for (i = 32; i < 64; i++)
4633 fixed_regs[i] = call_used_regs[i]
c4ad648e 4634 = call_really_used_regs[i] = 1;
2aa4498c 4635
7c9ac5c0
PH
4636 /* The TOC register is not killed across calls in a way that is
4637 visible to the compiler. */
4638 if (DEFAULT_ABI == ABI_AIX)
4639 call_really_used_regs[2] = 0;
4640
2aa4498c
AH
4641 if (DEFAULT_ABI == ABI_V4
4642 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4643 && flag_pic == 2)
4644 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4645
4646 if (DEFAULT_ABI == ABI_V4
4647 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4648 && flag_pic == 1)
4649 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4650 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4651 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4652
4653 if (DEFAULT_ABI == ABI_DARWIN
4654 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4655 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4656 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4657 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4658
b4db40bf
JJ
4659 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4660 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4661 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4662
2aa4498c
AH
4663 if (TARGET_SPE)
4664 {
4665 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4666 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4667 registers in prologues and epilogues. We no longer use r14
4668 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4669 pool for link-compatibility with older versions of GCC. Once
4670 "old" code has died out, we can return r14 to the allocation
4671 pool. */
4672 fixed_regs[14]
4673 = call_used_regs[14]
4674 = call_really_used_regs[14] = 1;
2aa4498c
AH
4675 }
4676
0db747be 4677 if (!TARGET_ALTIVEC)
2aa4498c
AH
4678 {
4679 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4680 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4681 call_really_used_regs[VRSAVE_REGNO] = 1;
4682 }
4683
0db747be
DE
4684 if (TARGET_ALTIVEC)
4685 global_regs[VSCR_REGNO] = 1;
4686
2aa4498c 4687 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4688 {
4689 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4690 call_used_regs[i] = call_really_used_regs[i] = 1;
4691
4692 /* AIX reserves VR20:31 in non-extended ABI mode. */
4693 if (TARGET_XCOFF)
4694 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4695 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4696 }
2aa4498c 4697}
fb4d4348 4698\f
a4f6c312
SS
4699/* Try to output insns to set TARGET equal to the constant C if it can
4700 be done in less than N insns. Do all computations in MODE.
4701 Returns the place where the output has been placed if it can be
4702 done and the insns have been emitted. If it would take more than N
4703 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4704
4705rtx
f676971a 4706rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4707 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4708{
af8cb5c5 4709 rtx result, insn, set;
2bfcf297
DB
4710 HOST_WIDE_INT c0, c1;
4711
37409796 4712 switch (mode)
2bfcf297 4713 {
37409796
NS
4714 case QImode:
4715 case HImode:
2bfcf297 4716 if (dest == NULL)
c4ad648e 4717 dest = gen_reg_rtx (mode);
2bfcf297
DB
4718 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4719 return dest;
bb8df8a6 4720
37409796 4721 case SImode:
b3a13419 4722 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4723
d448860e 4724 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4725 GEN_INT (INTVAL (source)
4726 & (~ (HOST_WIDE_INT) 0xffff))));
4727 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4728 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4729 GEN_INT (INTVAL (source) & 0xffff))));
4730 result = dest;
37409796
NS
4731 break;
4732
4733 case DImode:
4734 switch (GET_CODE (source))
af8cb5c5 4735 {
37409796 4736 case CONST_INT:
af8cb5c5
DE
4737 c0 = INTVAL (source);
4738 c1 = -(c0 < 0);
37409796 4739 break;
bb8df8a6 4740
37409796 4741 case CONST_DOUBLE:
2bfcf297 4742#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4743 c0 = CONST_DOUBLE_LOW (source);
4744 c1 = -(c0 < 0);
2bfcf297 4745#else
af8cb5c5
DE
4746 c0 = CONST_DOUBLE_LOW (source);
4747 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4748#endif
37409796
NS
4749 break;
4750
4751 default:
4752 gcc_unreachable ();
af8cb5c5 4753 }
af8cb5c5
DE
4754
4755 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4756 break;
4757
4758 default:
4759 gcc_unreachable ();
2bfcf297 4760 }
2bfcf297 4761
af8cb5c5
DE
4762 insn = get_last_insn ();
4763 set = single_set (insn);
4764 if (! CONSTANT_P (SET_SRC (set)))
4765 set_unique_reg_note (insn, REG_EQUAL, source);
4766
4767 return result;
2bfcf297
DB
4768}
4769
4770/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4771 fall back to a straight forward decomposition. We do this to avoid
4772 exponential run times encountered when looking for longer sequences
4773 with rs6000_emit_set_const. */
4774static rtx
a2369ed3 4775rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4776{
4777 if (!TARGET_POWERPC64)
4778 {
4779 rtx operand1, operand2;
4780
4781 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4782 DImode);
d448860e 4783 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4784 DImode);
4785 emit_move_insn (operand1, GEN_INT (c1));
4786 emit_move_insn (operand2, GEN_INT (c2));
4787 }
4788 else
4789 {
bc06712d 4790 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4791
bc06712d 4792 ud1 = c1 & 0xffff;
f921c9c9 4793 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4794#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4795 c2 = c1 >> 32;
2bfcf297 4796#endif
bc06712d 4797 ud3 = c2 & 0xffff;
f921c9c9 4798 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4799
f676971a 4800 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4801 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4802 {
bc06712d 4803 if (ud1 & 0x8000)
b78d48dd 4804 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4805 else
4806 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4807 }
2bfcf297 4808
f676971a 4809 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4810 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4811 {
bc06712d 4812 if (ud2 & 0x8000)
f676971a 4813 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4814 - 0x80000000));
252b88f7 4815 else
bc06712d
TR
4816 emit_move_insn (dest, GEN_INT (ud2 << 16));
4817 if (ud1 != 0)
d448860e
JH
4818 emit_move_insn (copy_rtx (dest),
4819 gen_rtx_IOR (DImode, copy_rtx (dest),
4820 GEN_INT (ud1)));
252b88f7 4821 }
f676971a 4822 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4823 || (ud4 == 0 && ! (ud3 & 0x8000)))
4824 {
4825 if (ud3 & 0x8000)
f676971a 4826 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4827 - 0x80000000));
4828 else
4829 emit_move_insn (dest, GEN_INT (ud3 << 16));
4830
4831 if (ud2 != 0)
d448860e
JH
4832 emit_move_insn (copy_rtx (dest),
4833 gen_rtx_IOR (DImode, copy_rtx (dest),
4834 GEN_INT (ud2)));
4835 emit_move_insn (copy_rtx (dest),
4836 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4837 GEN_INT (16)));
bc06712d 4838 if (ud1 != 0)
d448860e
JH
4839 emit_move_insn (copy_rtx (dest),
4840 gen_rtx_IOR (DImode, copy_rtx (dest),
4841 GEN_INT (ud1)));
bc06712d 4842 }
f676971a 4843 else
bc06712d
TR
4844 {
4845 if (ud4 & 0x8000)
f676971a 4846 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4847 - 0x80000000));
4848 else
4849 emit_move_insn (dest, GEN_INT (ud4 << 16));
4850
4851 if (ud3 != 0)
d448860e
JH
4852 emit_move_insn (copy_rtx (dest),
4853 gen_rtx_IOR (DImode, copy_rtx (dest),
4854 GEN_INT (ud3)));
2bfcf297 4855
d448860e
JH
4856 emit_move_insn (copy_rtx (dest),
4857 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4858 GEN_INT (32)));
bc06712d 4859 if (ud2 != 0)
d448860e
JH
4860 emit_move_insn (copy_rtx (dest),
4861 gen_rtx_IOR (DImode, copy_rtx (dest),
4862 GEN_INT (ud2 << 16)));
bc06712d 4863 if (ud1 != 0)
d448860e
JH
4864 emit_move_insn (copy_rtx (dest),
4865 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4866 }
4867 }
2bfcf297
DB
4868 return dest;
4869}
4870
76d2b81d 4871/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4872 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4873
4874static void
4875rs6000_eliminate_indexed_memrefs (rtx operands[2])
4876{
4877 if (GET_CODE (operands[0]) == MEM
4878 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4879 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4880 && ! reload_in_progress)
4881 operands[0]
4882 = replace_equiv_address (operands[0],
4883 copy_addr_to_reg (XEXP (operands[0], 0)));
4884
4885 if (GET_CODE (operands[1]) == MEM
4886 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4887 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4888 && ! reload_in_progress)
4889 operands[1]
4890 = replace_equiv_address (operands[1],
4891 copy_addr_to_reg (XEXP (operands[1], 0)));
4892}
4893
fb4d4348
GK
4894/* Emit a move from SOURCE to DEST in mode MODE. */
4895void
a2369ed3 4896rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4897{
4898 rtx operands[2];
4899 operands[0] = dest;
4900 operands[1] = source;
f676971a 4901
fb4d4348
GK
4902 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4903 if (GET_CODE (operands[1]) == CONST_DOUBLE
4904 && ! FLOAT_MODE_P (mode)
4905 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4906 {
4907 /* FIXME. This should never happen. */
4908 /* Since it seems that it does, do the safe thing and convert
4909 to a CONST_INT. */
2496c7bd 4910 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4911 }
37409796
NS
4912 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4913 || FLOAT_MODE_P (mode)
4914 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4915 || CONST_DOUBLE_LOW (operands[1]) < 0)
4916 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4917 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4918
c9e8cb32
DD
4919 /* Check if GCC is setting up a block move that will end up using FP
4920 registers as temporaries. We must make sure this is acceptable. */
4921 if (GET_CODE (operands[0]) == MEM
4922 && GET_CODE (operands[1]) == MEM
4923 && mode == DImode
41543739
GK
4924 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4925 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4926 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4927 ? 32 : MEM_ALIGN (operands[0])))
4928 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4929 ? 32
41543739
GK
4930 : MEM_ALIGN (operands[1]))))
4931 && ! MEM_VOLATILE_P (operands [0])
4932 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4933 {
41543739
GK
4934 emit_move_insn (adjust_address (operands[0], SImode, 0),
4935 adjust_address (operands[1], SImode, 0));
d448860e
JH
4936 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4937 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4938 return;
4939 }
630d42a0 4940
b3a13419 4941 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4942 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4943 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4944
a3170dc6 4945 if (mode == SFmode && ! TARGET_POWERPC
696e45ba 4946 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
ffc14f31 4947 && GET_CODE (operands[0]) == MEM)
fb4d4348 4948 {
ffc14f31
GK
4949 int regnum;
4950
4951 if (reload_in_progress || reload_completed)
4952 regnum = true_regnum (operands[1]);
4953 else if (GET_CODE (operands[1]) == REG)
4954 regnum = REGNO (operands[1]);
4955 else
4956 regnum = -1;
f676971a 4957
fb4d4348
GK
4958 /* If operands[1] is a register, on POWER it may have
4959 double-precision data in it, so truncate it to single
4960 precision. */
4961 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4962 {
4963 rtx newreg;
b3a13419 4964 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4965 : gen_reg_rtx (mode));
fb4d4348
GK
4966 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4967 operands[1] = newreg;
4968 }
4969 }
4970
c4501e62
JJ
4971 /* Recognize the case where operand[1] is a reference to thread-local
4972 data and load its address to a register. */
84f52ebd 4973 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4974 {
84f52ebd
RH
4975 enum tls_model model;
4976 rtx tmp = operands[1];
4977 rtx addend = NULL;
4978
4979 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4980 {
4981 addend = XEXP (XEXP (tmp, 0), 1);
4982 tmp = XEXP (XEXP (tmp, 0), 0);
4983 }
4984
4985 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4986 model = SYMBOL_REF_TLS_MODEL (tmp);
4987 gcc_assert (model != 0);
4988
4989 tmp = rs6000_legitimize_tls_address (tmp, model);
4990 if (addend)
4991 {
4992 tmp = gen_rtx_PLUS (mode, tmp, addend);
4993 tmp = force_operand (tmp, operands[0]);
4994 }
4995 operands[1] = tmp;
c4501e62
JJ
4996 }
4997
8f4e6caf
RH
4998 /* Handle the case where reload calls us with an invalid address. */
4999 if (reload_in_progress && mode == Pmode
69ef87e2 5000 && (! general_operand (operands[1], mode)
8f4e6caf
RH
5001 || ! nonimmediate_operand (operands[0], mode)))
5002 goto emit_set;
5003
a9baceb1
GK
5004 /* 128-bit constant floating-point values on Darwin should really be
5005 loaded as two parts. */
8521c414 5006 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
5007 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
5008 {
5009 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
5010 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
5011 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
5012 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
5013 simplify_gen_subreg (imode, operands[1], mode, 0),
5014 imode);
5015 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
5016 GET_MODE_SIZE (imode)),
5017 simplify_gen_subreg (imode, operands[1], mode,
5018 GET_MODE_SIZE (imode)),
5019 imode);
a9baceb1
GK
5020 return;
5021 }
5022
e41b2a33
PB
5023 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
5024 cfun->machine->sdmode_stack_slot =
5025 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
5026
5027 if (reload_in_progress
5028 && mode == SDmode
5029 && MEM_P (operands[0])
5030 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
5031 && REG_P (operands[1]))
5032 {
5033 if (FP_REGNO_P (REGNO (operands[1])))
5034 {
5035 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
5036 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5037 emit_insn (gen_movsd_store (mem, operands[1]));
5038 }
5039 else if (INT_REGNO_P (REGNO (operands[1])))
5040 {
5041 rtx mem = adjust_address_nv (operands[0], mode, 4);
5042 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5043 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
5044 }
5045 else
5046 gcc_unreachable();
5047 return;
5048 }
5049 if (reload_in_progress
5050 && mode == SDmode
5051 && REG_P (operands[0])
5052 && MEM_P (operands[1])
5053 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
5054 {
5055 if (FP_REGNO_P (REGNO (operands[0])))
5056 {
5057 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
5058 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5059 emit_insn (gen_movsd_load (operands[0], mem));
5060 }
5061 else if (INT_REGNO_P (REGNO (operands[0])))
5062 {
5063 rtx mem = adjust_address_nv (operands[1], mode, 4);
5064 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5065 emit_insn (gen_movsd_hardfloat (operands[0], mem));
5066 }
5067 else
5068 gcc_unreachable();
5069 return;
5070 }
5071
fb4d4348
GK
5072 /* FIXME: In the long term, this switch statement should go away
5073 and be replaced by a sequence of tests based on things like
5074 mode == Pmode. */
5075 switch (mode)
5076 {
5077 case HImode:
5078 case QImode:
5079 if (CONSTANT_P (operands[1])
5080 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 5081 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
5082 break;
5083
06f4e019 5084 case TFmode:
7393f7f8 5085 case TDmode:
76d2b81d
DJ
5086 rs6000_eliminate_indexed_memrefs (operands);
5087 /* fall through */
5088
fb4d4348 5089 case DFmode:
7393f7f8 5090 case DDmode:
fb4d4348 5091 case SFmode:
e41b2a33 5092 case SDmode:
f676971a 5093 if (CONSTANT_P (operands[1])
fb4d4348 5094 && ! easy_fp_constant (operands[1], mode))
a9098fd0 5095 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5096 break;
f676971a 5097
0ac081f6
AH
5098 case V16QImode:
5099 case V8HImode:
5100 case V4SFmode:
5101 case V4SImode:
a3170dc6
AH
5102 case V4HImode:
5103 case V2SFmode:
5104 case V2SImode:
00a892b8 5105 case V1DImode:
69ef87e2 5106 if (CONSTANT_P (operands[1])
d744e06e 5107 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
5108 operands[1] = force_const_mem (mode, operands[1]);
5109 break;
f676971a 5110
fb4d4348 5111 case SImode:
a9098fd0 5112 case DImode:
fb4d4348
GK
5113 /* Use default pattern for address of ELF small data */
5114 if (TARGET_ELF
a9098fd0 5115 && mode == Pmode
f607bc57 5116 && DEFAULT_ABI == ABI_V4
f676971a 5117 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
5118 || GET_CODE (operands[1]) == CONST)
5119 && small_data_operand (operands[1], mode))
fb4d4348
GK
5120 {
5121 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5122 return;
5123 }
5124
f607bc57 5125 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
5126 && mode == Pmode && mode == SImode
5127 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
5128 {
5129 emit_insn (gen_movsi_got (operands[0], operands[1]));
5130 return;
5131 }
5132
ee890fe2 5133 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
5134 && TARGET_NO_TOC
5135 && ! flag_pic
a9098fd0 5136 && mode == Pmode
fb4d4348
GK
5137 && CONSTANT_P (operands[1])
5138 && GET_CODE (operands[1]) != HIGH
5139 && GET_CODE (operands[1]) != CONST_INT)
5140 {
b3a13419
ILT
5141 rtx target = (!can_create_pseudo_p ()
5142 ? operands[0]
5143 : gen_reg_rtx (mode));
fb4d4348
GK
5144
5145 /* If this is a function address on -mcall-aixdesc,
5146 convert it to the address of the descriptor. */
5147 if (DEFAULT_ABI == ABI_AIX
5148 && GET_CODE (operands[1]) == SYMBOL_REF
5149 && XSTR (operands[1], 0)[0] == '.')
5150 {
5151 const char *name = XSTR (operands[1], 0);
5152 rtx new_ref;
5153 while (*name == '.')
5154 name++;
5155 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5156 CONSTANT_POOL_ADDRESS_P (new_ref)
5157 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 5158 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 5159 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 5160 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
5161 operands[1] = new_ref;
5162 }
7509c759 5163
ee890fe2
SS
5164 if (DEFAULT_ABI == ABI_DARWIN)
5165 {
ab82a49f
AP
5166#if TARGET_MACHO
5167 if (MACHO_DYNAMIC_NO_PIC_P)
5168 {
5169 /* Take care of any required data indirection. */
5170 operands[1] = rs6000_machopic_legitimize_pic_address (
5171 operands[1], mode, operands[0]);
5172 if (operands[0] != operands[1])
5173 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 5174 operands[0], operands[1]));
ab82a49f
AP
5175 return;
5176 }
5177#endif
b8a55285
AP
5178 emit_insn (gen_macho_high (target, operands[1]));
5179 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5180 return;
5181 }
5182
fb4d4348
GK
5183 emit_insn (gen_elf_high (target, operands[1]));
5184 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5185 return;
5186 }
5187
a9098fd0
GK
5188 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5189 and we have put it in the TOC, we just need to make a TOC-relative
5190 reference to it. */
5191 if (TARGET_TOC
5192 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5193 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5194 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5195 get_pool_mode (operands[1])))
fb4d4348 5196 {
a9098fd0 5197 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5198 }
a9098fd0
GK
5199 else if (mode == Pmode
5200 && CONSTANT_P (operands[1])
38886f37
AO
5201 && ((GET_CODE (operands[1]) != CONST_INT
5202 && ! easy_fp_constant (operands[1], mode))
5203 || (GET_CODE (operands[1]) == CONST_INT
5204 && num_insns_constant (operands[1], mode) > 2)
5205 || (GET_CODE (operands[0]) == REG
5206 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5207 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5208 && ! legitimate_constant_pool_address_p (operands[1])
5209 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
5210 {
5211 /* Emit a USE operation so that the constant isn't deleted if
5212 expensive optimizations are turned on because nobody
5213 references it. This should only be done for operands that
5214 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5215 This should not be done for operands that contain LABEL_REFs.
5216 For now, we just handle the obvious case. */
5217 if (GET_CODE (operands[1]) != LABEL_REF)
c41c1387 5218 emit_use (operands[1]);
fb4d4348 5219
c859cda6 5220#if TARGET_MACHO
ee890fe2 5221 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5222 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5223 {
ee890fe2
SS
5224 operands[1] =
5225 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5226 operands[0]);
5227 if (operands[0] != operands[1])
5228 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5229 return;
5230 }
c859cda6 5231#endif
ee890fe2 5232
fb4d4348
GK
5233 /* If we are to limit the number of things we put in the TOC and
5234 this is a symbol plus a constant we can add in one insn,
5235 just put the symbol in the TOC and add the constant. Don't do
5236 this if reload is in progress. */
5237 if (GET_CODE (operands[1]) == CONST
5238 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5239 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5240 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5241 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5242 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5243 && ! side_effects_p (operands[0]))
5244 {
a4f6c312
SS
5245 rtx sym =
5246 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5247 rtx other = XEXP (XEXP (operands[1], 0), 1);
5248
a9098fd0
GK
5249 sym = force_reg (mode, sym);
5250 if (mode == SImode)
5251 emit_insn (gen_addsi3 (operands[0], sym, other));
5252 else
5253 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5254 return;
5255 }
5256
a9098fd0 5257 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5258
f676971a 5259 if (TARGET_TOC
0cdc04e8 5260 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5261 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5262 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5263 get_pool_constant (XEXP (operands[1], 0)),
5264 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5265 {
ba4828e0 5266 operands[1]
542a8afa 5267 = gen_const_mem (mode,
c4ad648e 5268 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5269 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5270 }
fb4d4348
GK
5271 }
5272 break;
a9098fd0 5273
fb4d4348 5274 case TImode:
76d2b81d
DJ
5275 rs6000_eliminate_indexed_memrefs (operands);
5276
27dc0551
DE
5277 if (TARGET_POWER)
5278 {
5279 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5280 gen_rtvec (2,
5281 gen_rtx_SET (VOIDmode,
5282 operands[0], operands[1]),
5283 gen_rtx_CLOBBER (VOIDmode,
5284 gen_rtx_SCRATCH (SImode)))));
5285 return;
5286 }
fb4d4348
GK
5287 break;
5288
5289 default:
37409796 5290 gcc_unreachable ();
fb4d4348
GK
5291 }
5292
a9098fd0
GK
5293 /* Above, we may have called force_const_mem which may have returned
5294 an invalid address. If we can, fix this up; otherwise, reload will
5295 have to deal with it. */
8f4e6caf
RH
5296 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5297 operands[1] = validize_mem (operands[1]);
a9098fd0 5298
8f4e6caf 5299 emit_set:
fb4d4348
GK
5300 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5301}
4697a36c 5302\f
2858f73a
GK
5303/* Nonzero if we can use a floating-point register to pass this arg. */
5304#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5305 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a 5306 && (CUM)->fregno <= FP_ARG_MAX_REG \
56f4cc04 5307 && TARGET_HARD_FLOAT && TARGET_FPRS)
2858f73a
GK
5308
5309/* Nonzero if we can use an AltiVec register to pass this arg. */
5310#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5311 (ALTIVEC_VECTOR_MODE (MODE) \
5312 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5313 && TARGET_ALTIVEC_ABI \
83953138 5314 && (NAMED))
2858f73a 5315
c6e8c921
GK
5316/* Return a nonzero value to say to return the function value in
5317 memory, just as large structures are always returned. TYPE will be
5318 the data type of the value, and FNTYPE will be the type of the
5319 function doing the returning, or @code{NULL} for libcalls.
5320
5321 The AIX ABI for the RS/6000 specifies that all structures are
5322 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5323 specifies that structures <= 8 bytes are returned in r3/r4, but a
5324 draft put them in memory, and GCC used to implement the draft
df01da37 5325 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5326 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5327 compatibility can change DRAFT_V4_STRUCT_RET to override the
5328 default, and -m switches get the final word. See
5329 rs6000_override_options for more details.
5330
5331 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5332 long double support is enabled. These values are returned in memory.
5333
5334 int_size_in_bytes returns -1 for variable size objects, which go in
5335 memory always. The cast to unsigned makes -1 > 8. */
5336
5337static bool
586de218 5338rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5339{
594a51fe
SS
5340 /* In the darwin64 abi, try to use registers for larger structs
5341 if possible. */
0b5383eb 5342 if (rs6000_darwin64_abi
594a51fe 5343 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5344 && int_size_in_bytes (type) > 0)
5345 {
5346 CUMULATIVE_ARGS valcum;
5347 rtx valret;
5348
5349 valcum.words = 0;
5350 valcum.fregno = FP_ARG_MIN_REG;
5351 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5352 /* Do a trial code generation as if this were going to be passed
5353 as an argument; if any part goes in memory, we return NULL. */
5354 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5355 if (valret)
5356 return false;
5357 /* Otherwise fall through to more conventional ABI rules. */
5358 }
594a51fe 5359
c6e8c921 5360 if (AGGREGATE_TYPE_P (type)
df01da37 5361 && (aix_struct_return
c6e8c921
GK
5362 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5363 return true;
b693336b 5364
bada2eb8
DE
5365 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5366 modes only exist for GCC vector types if -maltivec. */
5367 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5368 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5369 return false;
5370
b693336b
PB
5371 /* Return synthetic vectors in memory. */
5372 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5373 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5374 {
5375 static bool warned_for_return_big_vectors = false;
5376 if (!warned_for_return_big_vectors)
5377 {
d4ee4d25 5378 warning (0, "GCC vector returned by reference: "
b693336b
PB
5379 "non-standard ABI extension with no compatibility guarantee");
5380 warned_for_return_big_vectors = true;
5381 }
5382 return true;
5383 }
5384
602ea4d3 5385 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5386 return true;
ad630bef 5387
c6e8c921
GK
5388 return false;
5389}
5390
4697a36c
MM
5391/* Initialize a variable CUM of type CUMULATIVE_ARGS
5392 for a call to a function whose data type is FNTYPE.
5393 For a library call, FNTYPE is 0.
5394
5395 For incoming args we set the number of arguments in the prototype large
1c20ae99 5396 so we never return a PARALLEL. */
4697a36c
MM
5397
5398void
f676971a 5399init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5400 rtx libname ATTRIBUTE_UNUSED, int incoming,
5401 int libcall, int n_named_args)
4697a36c
MM
5402{
5403 static CUMULATIVE_ARGS zero_cumulative;
5404
5405 *cum = zero_cumulative;
5406 cum->words = 0;
5407 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5408 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5409 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5410 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5411 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5412 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5413 cum->stdarg = fntype
5414 && (TYPE_ARG_TYPES (fntype) != 0
5415 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5416 != void_type_node));
4697a36c 5417
0f6937fe
AM
5418 cum->nargs_prototype = 0;
5419 if (incoming || cum->prototype)
5420 cum->nargs_prototype = n_named_args;
4697a36c 5421
a5c76ee6 5422 /* Check for a longcall attribute. */
3eb4e360
AM
5423 if ((!fntype && rs6000_default_long_calls)
5424 || (fntype
5425 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5426 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5427 cum->call_cookie |= CALL_LONG;
6a4cee5f 5428
4697a36c
MM
5429 if (TARGET_DEBUG_ARG)
5430 {
5431 fprintf (stderr, "\ninit_cumulative_args:");
5432 if (fntype)
5433 {
5434 tree ret_type = TREE_TYPE (fntype);
5435 fprintf (stderr, " ret code = %s,",
5436 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5437 }
5438
6a4cee5f
MM
5439 if (cum->call_cookie & CALL_LONG)
5440 fprintf (stderr, " longcall,");
5441
4697a36c
MM
5442 fprintf (stderr, " proto = %d, nargs = %d\n",
5443 cum->prototype, cum->nargs_prototype);
5444 }
f676971a 5445
c4ad648e
AM
5446 if (fntype
5447 && !TARGET_ALTIVEC
5448 && TARGET_ALTIVEC_ABI
5449 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5450 {
c85ce869 5451 error ("cannot return value in vector register because"
c4ad648e 5452 " altivec instructions are disabled, use -maltivec"
c85ce869 5453 " to enable them");
c4ad648e 5454 }
4697a36c
MM
5455}
5456\f
fe984136
RH
5457/* Return true if TYPE must be passed on the stack and not in registers. */
5458
5459static bool
586de218 5460rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5461{
5462 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5463 return must_pass_in_stack_var_size (mode, type);
5464 else
5465 return must_pass_in_stack_var_size_or_pad (mode, type);
5466}
5467
c229cba9
DE
5468/* If defined, a C expression which determines whether, and in which
5469 direction, to pad out an argument with extra space. The value
5470 should be of type `enum direction': either `upward' to pad above
5471 the argument, `downward' to pad below, or `none' to inhibit
5472 padding.
5473
5474 For the AIX ABI structs are always stored left shifted in their
5475 argument slot. */
5476
9ebbca7d 5477enum direction
586de218 5478function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5479{
6e985040
AM
5480#ifndef AGGREGATE_PADDING_FIXED
5481#define AGGREGATE_PADDING_FIXED 0
5482#endif
5483#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5484#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5485#endif
5486
5487 if (!AGGREGATE_PADDING_FIXED)
5488 {
5489 /* GCC used to pass structures of the same size as integer types as
5490 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5491 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5492 passed padded downward, except that -mstrict-align further
5493 muddied the water in that multi-component structures of 2 and 4
5494 bytes in size were passed padded upward.
5495
5496 The following arranges for best compatibility with previous
5497 versions of gcc, but removes the -mstrict-align dependency. */
5498 if (BYTES_BIG_ENDIAN)
5499 {
5500 HOST_WIDE_INT size = 0;
5501
5502 if (mode == BLKmode)
5503 {
5504 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5505 size = int_size_in_bytes (type);
5506 }
5507 else
5508 size = GET_MODE_SIZE (mode);
5509
5510 if (size == 1 || size == 2 || size == 4)
5511 return downward;
5512 }
5513 return upward;
5514 }
5515
5516 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5517 {
5518 if (type != 0 && AGGREGATE_TYPE_P (type))
5519 return upward;
5520 }
c229cba9 5521
d3704c46
KH
5522 /* Fall back to the default. */
5523 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5524}
5525
b6c9286a 5526/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5527 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5528 PARM_BOUNDARY is used for all arguments.
f676971a 5529
84e9ad15
AM
5530 V.4 wants long longs and doubles to be double word aligned. Just
5531 testing the mode size is a boneheaded way to do this as it means
5532 that other types such as complex int are also double word aligned.
5533 However, we're stuck with this because changing the ABI might break
5534 existing library interfaces.
5535
b693336b
PB
5536 Doubleword align SPE vectors.
5537 Quadword align Altivec vectors.
5538 Quadword align large synthetic vector types. */
b6c9286a
MM
5539
5540int
b693336b 5541function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5542{
84e9ad15
AM
5543 if (DEFAULT_ABI == ABI_V4
5544 && (GET_MODE_SIZE (mode) == 8
5545 || (TARGET_HARD_FLOAT
5546 && TARGET_FPRS
7393f7f8 5547 && (mode == TFmode || mode == TDmode))))
4ed78545 5548 return 64;
ad630bef
DE
5549 else if (SPE_VECTOR_MODE (mode)
5550 || (type && TREE_CODE (type) == VECTOR_TYPE
5551 && int_size_in_bytes (type) >= 8
5552 && int_size_in_bytes (type) < 16))
e1f83b4d 5553 return 64;
ad630bef
DE
5554 else if (ALTIVEC_VECTOR_MODE (mode)
5555 || (type && TREE_CODE (type) == VECTOR_TYPE
5556 && int_size_in_bytes (type) >= 16))
0ac081f6 5557 return 128;
0b5383eb
DJ
5558 else if (rs6000_darwin64_abi && mode == BLKmode
5559 && type && TYPE_ALIGN (type) > 64)
5560 return 128;
9ebbca7d 5561 else
b6c9286a 5562 return PARM_BOUNDARY;
b6c9286a 5563}
c53bdcf5 5564
294bd182
AM
5565/* For a function parm of MODE and TYPE, return the starting word in
5566 the parameter area. NWORDS of the parameter area are already used. */
5567
5568static unsigned int
5569rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5570{
5571 unsigned int align;
5572 unsigned int parm_offset;
5573
5574 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5575 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5576 return nwords + (-(parm_offset + nwords) & align);
5577}
5578
c53bdcf5
AM
5579/* Compute the size (in words) of a function argument. */
5580
5581static unsigned long
5582rs6000_arg_size (enum machine_mode mode, tree type)
5583{
5584 unsigned long size;
5585
5586 if (mode != BLKmode)
5587 size = GET_MODE_SIZE (mode);
5588 else
5589 size = int_size_in_bytes (type);
5590
5591 if (TARGET_32BIT)
5592 return (size + 3) >> 2;
5593 else
5594 return (size + 7) >> 3;
5595}
b6c9286a 5596\f
0b5383eb 5597/* Use this to flush pending int fields. */
594a51fe
SS
5598
5599static void
0b5383eb
DJ
5600rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5601 HOST_WIDE_INT bitpos)
594a51fe 5602{
0b5383eb
DJ
5603 unsigned int startbit, endbit;
5604 int intregs, intoffset;
5605 enum machine_mode mode;
594a51fe 5606
0b5383eb
DJ
5607 if (cum->intoffset == -1)
5608 return;
594a51fe 5609
0b5383eb
DJ
5610 intoffset = cum->intoffset;
5611 cum->intoffset = -1;
5612
5613 if (intoffset % BITS_PER_WORD != 0)
5614 {
5615 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5616 MODE_INT, 0);
5617 if (mode == BLKmode)
594a51fe 5618 {
0b5383eb
DJ
5619 /* We couldn't find an appropriate mode, which happens,
5620 e.g., in packed structs when there are 3 bytes to load.
5621 Back intoffset back to the beginning of the word in this
5622 case. */
5623 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5624 }
594a51fe 5625 }
0b5383eb
DJ
5626
5627 startbit = intoffset & -BITS_PER_WORD;
5628 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5629 intregs = (endbit - startbit) / BITS_PER_WORD;
5630 cum->words += intregs;
5631}
5632
5633/* The darwin64 ABI calls for us to recurse down through structs,
5634 looking for elements passed in registers. Unfortunately, we have
5635 to track int register count here also because of misalignments
5636 in powerpc alignment mode. */
5637
5638static void
5639rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5640 tree type,
5641 HOST_WIDE_INT startbitpos)
5642{
5643 tree f;
5644
5645 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5646 if (TREE_CODE (f) == FIELD_DECL)
5647 {
5648 HOST_WIDE_INT bitpos = startbitpos;
5649 tree ftype = TREE_TYPE (f);
70fb00df
AP
5650 enum machine_mode mode;
5651 if (ftype == error_mark_node)
5652 continue;
5653 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5654
5655 if (DECL_SIZE (f) != 0
5656 && host_integerp (bit_position (f), 1))
5657 bitpos += int_bit_position (f);
5658
5659 /* ??? FIXME: else assume zero offset. */
5660
5661 if (TREE_CODE (ftype) == RECORD_TYPE)
5662 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5663 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5664 {
5665 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5666 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5667 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5668 }
5669 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5670 {
5671 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5672 cum->vregno++;
5673 cum->words += 2;
5674 }
5675 else if (cum->intoffset == -1)
5676 cum->intoffset = bitpos;
5677 }
594a51fe
SS
5678}
5679
4697a36c
MM
5680/* Update the data in CUM to advance over an argument
5681 of mode MODE and data type TYPE.
b2d04ecf
AM
5682 (TYPE is null for libcalls where that information may not be available.)
5683
5684 Note that for args passed by reference, function_arg will be called
5685 with MODE and TYPE set to that of the pointer to the arg, not the arg
5686 itself. */
4697a36c
MM
5687
5688void
f676971a 5689function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5690 tree type, int named, int depth)
4697a36c 5691{
0b5383eb
DJ
5692 int size;
5693
594a51fe
SS
5694 /* Only tick off an argument if we're not recursing. */
5695 if (depth == 0)
5696 cum->nargs_prototype--;
4697a36c 5697
ad630bef
DE
5698 if (TARGET_ALTIVEC_ABI
5699 && (ALTIVEC_VECTOR_MODE (mode)
5700 || (type && TREE_CODE (type) == VECTOR_TYPE
5701 && int_size_in_bytes (type) == 16)))
0ac081f6 5702 {
4ed78545
AM
5703 bool stack = false;
5704
2858f73a 5705 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5706 {
6d0ef01e
HP
5707 cum->vregno++;
5708 if (!TARGET_ALTIVEC)
c85ce869 5709 error ("cannot pass argument in vector register because"
6d0ef01e 5710 " altivec instructions are disabled, use -maltivec"
c85ce869 5711 " to enable them");
4ed78545
AM
5712
5713 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5714 even if it is going to be passed in a vector register.
4ed78545
AM
5715 Darwin does the same for variable-argument functions. */
5716 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5717 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5718 stack = true;
6d0ef01e 5719 }
4ed78545
AM
5720 else
5721 stack = true;
5722
5723 if (stack)
c4ad648e 5724 {
a594a19c 5725 int align;
f676971a 5726
2858f73a
GK
5727 /* Vector parameters must be 16-byte aligned. This places
5728 them at 2 mod 4 in terms of words in 32-bit mode, since
5729 the parameter save area starts at offset 24 from the
5730 stack. In 64-bit mode, they just have to start on an
5731 even word, since the parameter save area is 16-byte
5732 aligned. Space for GPRs is reserved even if the argument
5733 will be passed in memory. */
5734 if (TARGET_32BIT)
4ed78545 5735 align = (2 - cum->words) & 3;
2858f73a
GK
5736 else
5737 align = cum->words & 1;
c53bdcf5 5738 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5739
a594a19c
GK
5740 if (TARGET_DEBUG_ARG)
5741 {
f676971a 5742 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5743 cum->words, align);
5744 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5745 cum->nargs_prototype, cum->prototype,
2858f73a 5746 GET_MODE_NAME (mode));
a594a19c
GK
5747 }
5748 }
0ac081f6 5749 }
a4b0320c 5750 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5751 && !cum->stdarg
5752 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5753 cum->sysv_gregno++;
594a51fe
SS
5754
5755 else if (rs6000_darwin64_abi
5756 && mode == BLKmode
0b5383eb
DJ
5757 && TREE_CODE (type) == RECORD_TYPE
5758 && (size = int_size_in_bytes (type)) > 0)
5759 {
5760 /* Variable sized types have size == -1 and are
5761 treated as if consisting entirely of ints.
5762 Pad to 16 byte boundary if needed. */
5763 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5764 && (cum->words % 2) != 0)
5765 cum->words++;
5766 /* For varargs, we can just go up by the size of the struct. */
5767 if (!named)
5768 cum->words += (size + 7) / 8;
5769 else
5770 {
5771 /* It is tempting to say int register count just goes up by
5772 sizeof(type)/8, but this is wrong in a case such as
5773 { int; double; int; } [powerpc alignment]. We have to
5774 grovel through the fields for these too. */
5775 cum->intoffset = 0;
5776 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5777 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5778 size * BITS_PER_UNIT);
5779 }
5780 }
f607bc57 5781 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5782 {
a3170dc6 5783 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
5784 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
5785 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
5786 || (mode == TFmode && !TARGET_IEEEQUAD)
5787 || mode == SDmode || mode == DDmode || mode == TDmode))
4697a36c 5788 {
2d83f070
JJ
5789 /* _Decimal128 must use an even/odd register pair. This assumes
5790 that the register number is odd when fregno is odd. */
5791 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5792 cum->fregno++;
5793
5794 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5795 <= FP_ARG_V4_MAX_REG)
602ea4d3 5796 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5797 else
5798 {
602ea4d3 5799 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5800 if (mode == DFmode || mode == TFmode
5801 || mode == DDmode || mode == TDmode)
c4ad648e 5802 cum->words += cum->words & 1;
c53bdcf5 5803 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5804 }
4697a36c 5805 }
4cc833b7
RH
5806 else
5807 {
b2d04ecf 5808 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5809 int gregno = cum->sysv_gregno;
5810
4ed78545
AM
5811 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5812 (r7,r8) or (r9,r10). As does any other 2 word item such
5813 as complex int due to a historical mistake. */
5814 if (n_words == 2)
5815 gregno += (1 - gregno) & 1;
4cc833b7 5816
4ed78545 5817 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5818 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5819 {
4ed78545
AM
5820 /* Long long and SPE vectors are aligned on the stack.
5821 So are other 2 word items such as complex int due to
5822 a historical mistake. */
4cc833b7
RH
5823 if (n_words == 2)
5824 cum->words += cum->words & 1;
5825 cum->words += n_words;
5826 }
4697a36c 5827
4cc833b7
RH
5828 /* Note: continuing to accumulate gregno past when we've started
5829 spilling to the stack indicates the fact that we've started
5830 spilling to the stack to expand_builtin_saveregs. */
5831 cum->sysv_gregno = gregno + n_words;
5832 }
4697a36c 5833
4cc833b7
RH
5834 if (TARGET_DEBUG_ARG)
5835 {
5836 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5837 cum->words, cum->fregno);
5838 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5839 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5840 fprintf (stderr, "mode = %4s, named = %d\n",
5841 GET_MODE_NAME (mode), named);
5842 }
4697a36c
MM
5843 }
5844 else
4cc833b7 5845 {
b2d04ecf 5846 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5847 int start_words = cum->words;
5848 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5849
294bd182 5850 cum->words = align_words + n_words;
4697a36c 5851
ebb109ad 5852 if (SCALAR_FLOAT_MODE_P (mode)
56f4cc04 5853 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5854 {
5855 /* _Decimal128 must be passed in an even/odd float register pair.
5856 This assumes that the register number is odd when fregno is
5857 odd. */
5858 if (mode == TDmode && (cum->fregno % 2) == 1)
5859 cum->fregno++;
5860 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5861 }
4cc833b7
RH
5862
5863 if (TARGET_DEBUG_ARG)
5864 {
5865 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5866 cum->words, cum->fregno);
5867 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5868 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5869 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5870 named, align_words - start_words, depth);
4cc833b7
RH
5871 }
5872 }
4697a36c 5873}
a6c9bed4 5874
f82f556d
AH
5875static rtx
5876spe_build_register_parallel (enum machine_mode mode, int gregno)
5877{
17caeff2 5878 rtx r1, r3, r5, r7;
f82f556d 5879
37409796 5880 switch (mode)
f82f556d 5881 {
37409796 5882 case DFmode:
54b695e7
AH
5883 r1 = gen_rtx_REG (DImode, gregno);
5884 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5885 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5886
5887 case DCmode:
17caeff2 5888 case TFmode:
54b695e7
AH
5889 r1 = gen_rtx_REG (DImode, gregno);
5890 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5891 r3 = gen_rtx_REG (DImode, gregno + 2);
5892 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5893 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5894
17caeff2
JM
5895 case TCmode:
5896 r1 = gen_rtx_REG (DImode, gregno);
5897 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5898 r3 = gen_rtx_REG (DImode, gregno + 2);
5899 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5900 r5 = gen_rtx_REG (DImode, gregno + 4);
5901 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5902 r7 = gen_rtx_REG (DImode, gregno + 6);
5903 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5904 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5905
37409796
NS
5906 default:
5907 gcc_unreachable ();
f82f556d 5908 }
f82f556d 5909}
b78d48dd 5910
f82f556d 5911/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5912static rtx
f676971a 5913rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5914 tree type)
a6c9bed4 5915{
f82f556d
AH
5916 int gregno = cum->sysv_gregno;
5917
5918 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5919 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5920 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5921 || mode == DCmode || mode == TCmode))
f82f556d 5922 {
b5870bee
AH
5923 int n_words = rs6000_arg_size (mode, type);
5924
f82f556d 5925 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5926 if (mode == DFmode)
b5870bee 5927 gregno += (1 - gregno) & 1;
f82f556d 5928
b5870bee
AH
5929 /* Multi-reg args are not split between registers and stack. */
5930 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5931 return NULL_RTX;
5932
5933 return spe_build_register_parallel (mode, gregno);
5934 }
a6c9bed4
AH
5935 if (cum->stdarg)
5936 {
c53bdcf5 5937 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5938
5939 /* SPE vectors are put in odd registers. */
5940 if (n_words == 2 && (gregno & 1) == 0)
5941 gregno += 1;
5942
5943 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5944 {
5945 rtx r1, r2;
5946 enum machine_mode m = SImode;
5947
5948 r1 = gen_rtx_REG (m, gregno);
5949 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5950 r2 = gen_rtx_REG (m, gregno + 1);
5951 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5952 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5953 }
5954 else
b78d48dd 5955 return NULL_RTX;
a6c9bed4
AH
5956 }
5957 else
5958 {
f82f556d
AH
5959 if (gregno <= GP_ARG_MAX_REG)
5960 return gen_rtx_REG (mode, gregno);
a6c9bed4 5961 else
b78d48dd 5962 return NULL_RTX;
a6c9bed4
AH
5963 }
5964}
5965
0b5383eb
DJ
5966/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5967 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5968
0b5383eb 5969static void
bb8df8a6 5970rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5971 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5972{
0b5383eb
DJ
5973 enum machine_mode mode;
5974 unsigned int regno;
5975 unsigned int startbit, endbit;
5976 int this_regno, intregs, intoffset;
5977 rtx reg;
594a51fe 5978
0b5383eb
DJ
5979 if (cum->intoffset == -1)
5980 return;
5981
5982 intoffset = cum->intoffset;
5983 cum->intoffset = -1;
5984
5985 /* If this is the trailing part of a word, try to only load that
5986 much into the register. Otherwise load the whole register. Note
5987 that in the latter case we may pick up unwanted bits. It's not a
5988 problem at the moment but may wish to revisit. */
5989
5990 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5991 {
0b5383eb
DJ
5992 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5993 MODE_INT, 0);
5994 if (mode == BLKmode)
5995 {
5996 /* We couldn't find an appropriate mode, which happens,
5997 e.g., in packed structs when there are 3 bytes to load.
5998 Back intoffset back to the beginning of the word in this
5999 case. */
6000 intoffset = intoffset & -BITS_PER_WORD;
6001 mode = word_mode;
6002 }
6003 }
6004 else
6005 mode = word_mode;
6006
6007 startbit = intoffset & -BITS_PER_WORD;
6008 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
6009 intregs = (endbit - startbit) / BITS_PER_WORD;
6010 this_regno = cum->words + intoffset / BITS_PER_WORD;
6011
6012 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
6013 cum->use_stack = 1;
bb8df8a6 6014
0b5383eb
DJ
6015 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
6016 if (intregs <= 0)
6017 return;
6018
6019 intoffset /= BITS_PER_UNIT;
6020 do
6021 {
6022 regno = GP_ARG_MIN_REG + this_regno;
6023 reg = gen_rtx_REG (mode, regno);
6024 rvec[(*k)++] =
6025 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
6026
6027 this_regno += 1;
6028 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
6029 mode = word_mode;
6030 intregs -= 1;
6031 }
6032 while (intregs > 0);
6033}
6034
6035/* Recursive workhorse for the following. */
6036
6037static void
586de218 6038rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
6039 HOST_WIDE_INT startbitpos, rtx rvec[],
6040 int *k)
6041{
6042 tree f;
6043
6044 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6045 if (TREE_CODE (f) == FIELD_DECL)
6046 {
6047 HOST_WIDE_INT bitpos = startbitpos;
6048 tree ftype = TREE_TYPE (f);
70fb00df
AP
6049 enum machine_mode mode;
6050 if (ftype == error_mark_node)
6051 continue;
6052 mode = TYPE_MODE (ftype);
0b5383eb
DJ
6053
6054 if (DECL_SIZE (f) != 0
6055 && host_integerp (bit_position (f), 1))
6056 bitpos += int_bit_position (f);
6057
6058 /* ??? FIXME: else assume zero offset. */
6059
6060 if (TREE_CODE (ftype) == RECORD_TYPE)
6061 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
6062 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 6063 {
0b5383eb
DJ
6064#if 0
6065 switch (mode)
594a51fe 6066 {
0b5383eb
DJ
6067 case SCmode: mode = SFmode; break;
6068 case DCmode: mode = DFmode; break;
6069 case TCmode: mode = TFmode; break;
6070 default: break;
594a51fe 6071 }
0b5383eb
DJ
6072#endif
6073 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6074 rvec[(*k)++]
bb8df8a6 6075 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
6076 gen_rtx_REG (mode, cum->fregno++),
6077 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 6078 if (mode == TFmode || mode == TDmode)
0b5383eb 6079 cum->fregno++;
594a51fe 6080 }
0b5383eb
DJ
6081 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
6082 {
6083 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6084 rvec[(*k)++]
bb8df8a6
EC
6085 = gen_rtx_EXPR_LIST (VOIDmode,
6086 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
6087 GEN_INT (bitpos / BITS_PER_UNIT));
6088 }
6089 else if (cum->intoffset == -1)
6090 cum->intoffset = bitpos;
6091 }
6092}
594a51fe 6093
0b5383eb
DJ
6094/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
6095 the register(s) to be used for each field and subfield of a struct
6096 being passed by value, along with the offset of where the
6097 register's value may be found in the block. FP fields go in FP
6098 register, vector fields go in vector registers, and everything
bb8df8a6 6099 else goes in int registers, packed as in memory.
8ff40a74 6100
0b5383eb
DJ
6101 This code is also used for function return values. RETVAL indicates
6102 whether this is the case.
8ff40a74 6103
a4d05547 6104 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 6105 calling convention. */
594a51fe 6106
0b5383eb 6107static rtx
586de218 6108rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
6109 int named, bool retval)
6110{
6111 rtx rvec[FIRST_PSEUDO_REGISTER];
6112 int k = 1, kbase = 1;
6113 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6114 /* This is a copy; modifications are not visible to our caller. */
6115 CUMULATIVE_ARGS copy_cum = *orig_cum;
6116 CUMULATIVE_ARGS *cum = &copy_cum;
6117
6118 /* Pad to 16 byte boundary if needed. */
6119 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6120 && (cum->words % 2) != 0)
6121 cum->words++;
6122
6123 cum->intoffset = 0;
6124 cum->use_stack = 0;
6125 cum->named = named;
6126
6127 /* Put entries into rvec[] for individual FP and vector fields, and
6128 for the chunks of memory that go in int regs. Note we start at
6129 element 1; 0 is reserved for an indication of using memory, and
6130 may or may not be filled in below. */
6131 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6132 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6133
6134 /* If any part of the struct went on the stack put all of it there.
6135 This hack is because the generic code for
6136 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6137 parts of the struct are not at the beginning. */
6138 if (cum->use_stack)
6139 {
6140 if (retval)
6141 return NULL_RTX; /* doesn't go in registers at all */
6142 kbase = 0;
6143 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6144 }
6145 if (k > 1 || cum->use_stack)
6146 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
6147 else
6148 return NULL_RTX;
6149}
6150
b78d48dd
FJ
6151/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6152
6153static rtx
ec6376ab 6154rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 6155{
ec6376ab
AM
6156 int n_units;
6157 int i, k;
6158 rtx rvec[GP_ARG_NUM_REG + 1];
6159
6160 if (align_words >= GP_ARG_NUM_REG)
6161 return NULL_RTX;
6162
6163 n_units = rs6000_arg_size (mode, type);
6164
6165 /* Optimize the simple case where the arg fits in one gpr, except in
6166 the case of BLKmode due to assign_parms assuming that registers are
6167 BITS_PER_WORD wide. */
6168 if (n_units == 0
6169 || (n_units == 1 && mode != BLKmode))
6170 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6171
6172 k = 0;
6173 if (align_words + n_units > GP_ARG_NUM_REG)
6174 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6175 using a magic NULL_RTX component.
79773478
AM
6176 This is not strictly correct. Only some of the arg belongs in
6177 memory, not all of it. However, the normal scheme using
6178 function_arg_partial_nregs can result in unusual subregs, eg.
6179 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6180 store the whole arg to memory is often more efficient than code
6181 to store pieces, and we know that space is available in the right
6182 place for the whole arg. */
ec6376ab
AM
6183 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6184
6185 i = 0;
6186 do
36a454e1 6187 {
ec6376ab
AM
6188 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6189 rtx off = GEN_INT (i++ * 4);
6190 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6191 }
ec6376ab
AM
6192 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6193
6194 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6195}
6196
4697a36c
MM
6197/* Determine where to put an argument to a function.
6198 Value is zero to push the argument on the stack,
6199 or a hard register in which to store the argument.
6200
6201 MODE is the argument's machine mode.
6202 TYPE is the data type of the argument (as a tree).
6203 This is null for libcalls where that information may
6204 not be available.
6205 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6206 the preceding args and about the function being called. It is
6207 not modified in this routine.
4697a36c
MM
6208 NAMED is nonzero if this argument is a named parameter
6209 (otherwise it is an extra parameter matching an ellipsis).
6210
6211 On RS/6000 the first eight words of non-FP are normally in registers
6212 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6213 Under V.4, the first 8 FP args are in registers.
6214
6215 If this is floating-point and no prototype is specified, we use
6216 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6217 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6218 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6219 doesn't support PARALLEL anyway.
6220
6221 Note that for args passed by reference, function_arg will be called
6222 with MODE and TYPE set to that of the pointer to the arg, not the arg
6223 itself. */
4697a36c 6224
9390387d 6225rtx
f676971a 6226function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6227 tree type, int named)
4697a36c 6228{
4cc833b7 6229 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6230
a4f6c312
SS
6231 /* Return a marker to indicate whether CR1 needs to set or clear the
6232 bit that V.4 uses to say fp args were passed in registers.
6233 Assume that we don't need the marker for software floating point,
6234 or compiler generated library calls. */
4697a36c
MM
6235 if (mode == VOIDmode)
6236 {
f607bc57 6237 if (abi == ABI_V4
b9599e46 6238 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6239 && (cum->stdarg
6240 || (cum->nargs_prototype < 0
6241 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6242 {
a3170dc6
AH
6243 /* For the SPE, we need to crxor CR6 always. */
6244 if (TARGET_SPE_ABI)
6245 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6246 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6247 return GEN_INT (cum->call_cookie
6248 | ((cum->fregno == FP_ARG_MIN_REG)
6249 ? CALL_V4_SET_FP_ARGS
6250 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6251 }
4697a36c 6252
7509c759 6253 return GEN_INT (cum->call_cookie);
4697a36c
MM
6254 }
6255
0b5383eb
DJ
6256 if (rs6000_darwin64_abi && mode == BLKmode
6257 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6258 {
0b5383eb 6259 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6260 if (rslt != NULL_RTX)
6261 return rslt;
6262 /* Else fall through to usual handling. */
6263 }
6264
2858f73a 6265 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6266 if (TARGET_64BIT && ! cum->prototype)
6267 {
c4ad648e
AM
6268 /* Vector parameters get passed in vector register
6269 and also in GPRs or memory, in absence of prototype. */
6270 int align_words;
6271 rtx slot;
6272 align_words = (cum->words + 1) & ~1;
6273
6274 if (align_words >= GP_ARG_NUM_REG)
6275 {
6276 slot = NULL_RTX;
6277 }
6278 else
6279 {
6280 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6281 }
6282 return gen_rtx_PARALLEL (mode,
6283 gen_rtvec (2,
6284 gen_rtx_EXPR_LIST (VOIDmode,
6285 slot, const0_rtx),
6286 gen_rtx_EXPR_LIST (VOIDmode,
6287 gen_rtx_REG (mode, cum->vregno),
6288 const0_rtx)));
c72d6c26
HP
6289 }
6290 else
6291 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6292 else if (TARGET_ALTIVEC_ABI
6293 && (ALTIVEC_VECTOR_MODE (mode)
6294 || (type && TREE_CODE (type) == VECTOR_TYPE
6295 && int_size_in_bytes (type) == 16)))
0ac081f6 6296 {
2858f73a 6297 if (named || abi == ABI_V4)
a594a19c 6298 return NULL_RTX;
0ac081f6 6299 else
a594a19c
GK
6300 {
6301 /* Vector parameters to varargs functions under AIX or Darwin
6302 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6303 int align, align_words, n_words;
6304 enum machine_mode part_mode;
a594a19c
GK
6305
6306 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6307 2 mod 4 in terms of words in 32-bit mode, since the parameter
6308 save area starts at offset 24 from the stack. In 64-bit mode,
6309 they just have to start on an even word, since the parameter
6310 save area is 16-byte aligned. */
6311 if (TARGET_32BIT)
4ed78545 6312 align = (2 - cum->words) & 3;
2858f73a
GK
6313 else
6314 align = cum->words & 1;
a594a19c
GK
6315 align_words = cum->words + align;
6316
6317 /* Out of registers? Memory, then. */
6318 if (align_words >= GP_ARG_NUM_REG)
6319 return NULL_RTX;
ec6376ab
AM
6320
6321 if (TARGET_32BIT && TARGET_POWERPC64)
6322 return rs6000_mixed_function_arg (mode, type, align_words);
6323
2858f73a
GK
6324 /* The vector value goes in GPRs. Only the part of the
6325 value in GPRs is reported here. */
ec6376ab
AM
6326 part_mode = mode;
6327 n_words = rs6000_arg_size (mode, type);
6328 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6329 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6330 is either wholly in GPRs or half in GPRs and half not. */
6331 part_mode = DImode;
ec6376ab
AM
6332
6333 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6334 }
0ac081f6 6335 }
f82f556d
AH
6336 else if (TARGET_SPE_ABI && TARGET_SPE
6337 && (SPE_VECTOR_MODE (mode)
18f63bfa 6338 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6339 || mode == DCmode
6340 || mode == TFmode
6341 || mode == TCmode))))
a6c9bed4 6342 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6343
f607bc57 6344 else if (abi == ABI_V4)
4697a36c 6345 {
a3170dc6 6346 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
6347 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6348 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
cf8e1455
DE
6349 || (mode == TFmode && !TARGET_IEEEQUAD)
6350 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6351 {
2d83f070
JJ
6352 /* _Decimal128 must use an even/odd register pair. This assumes
6353 that the register number is odd when fregno is odd. */
6354 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6355 cum->fregno++;
6356
6357 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6358 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6359 return gen_rtx_REG (mode, cum->fregno);
6360 else
b78d48dd 6361 return NULL_RTX;
4cc833b7
RH
6362 }
6363 else
6364 {
b2d04ecf 6365 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6366 int gregno = cum->sysv_gregno;
6367
4ed78545
AM
6368 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6369 (r7,r8) or (r9,r10). As does any other 2 word item such
6370 as complex int due to a historical mistake. */
6371 if (n_words == 2)
6372 gregno += (1 - gregno) & 1;
4cc833b7 6373
4ed78545 6374 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6375 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6376 return NULL_RTX;
ec6376ab
AM
6377
6378 if (TARGET_32BIT && TARGET_POWERPC64)
6379 return rs6000_mixed_function_arg (mode, type,
6380 gregno - GP_ARG_MIN_REG);
6381 return gen_rtx_REG (mode, gregno);
4cc833b7 6382 }
4697a36c 6383 }
4cc833b7
RH
6384 else
6385 {
294bd182 6386 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6387
2d83f070
JJ
6388 /* _Decimal128 must be passed in an even/odd float register pair.
6389 This assumes that the register number is odd when fregno is odd. */
6390 if (mode == TDmode && (cum->fregno % 2) == 1)
6391 cum->fregno++;
6392
2858f73a 6393 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6394 {
ec6376ab
AM
6395 rtx rvec[GP_ARG_NUM_REG + 1];
6396 rtx r;
6397 int k;
c53bdcf5
AM
6398 bool needs_psave;
6399 enum machine_mode fmode = mode;
c53bdcf5
AM
6400 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6401
6402 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6403 {
c53bdcf5
AM
6404 /* Currently, we only ever need one reg here because complex
6405 doubles are split. */
7393f7f8
BE
6406 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6407 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6408
7393f7f8
BE
6409 /* Long double or _Decimal128 split over regs and memory. */
6410 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6411 }
c53bdcf5
AM
6412
6413 /* Do we also need to pass this arg in the parameter save
6414 area? */
6415 needs_psave = (type
6416 && (cum->nargs_prototype <= 0
6417 || (DEFAULT_ABI == ABI_AIX
de17c25f 6418 && TARGET_XL_COMPAT
c53bdcf5
AM
6419 && align_words >= GP_ARG_NUM_REG)));
6420
6421 if (!needs_psave && mode == fmode)
ec6376ab 6422 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6423
ec6376ab 6424 k = 0;
c53bdcf5
AM
6425 if (needs_psave)
6426 {
ec6376ab 6427 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6428 This piece must come first, before the fprs. */
c53bdcf5
AM
6429 if (align_words < GP_ARG_NUM_REG)
6430 {
6431 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6432
6433 if (align_words + n_words > GP_ARG_NUM_REG
6434 || (TARGET_32BIT && TARGET_POWERPC64))
6435 {
6436 /* If this is partially on the stack, then we only
6437 include the portion actually in registers here. */
6438 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6439 rtx off;
79773478
AM
6440 int i = 0;
6441 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6442 /* Not all of the arg fits in gprs. Say that it
6443 goes in memory too, using a magic NULL_RTX
6444 component. Also see comment in
6445 rs6000_mixed_function_arg for why the normal
6446 function_arg_partial_nregs scheme doesn't work
6447 in this case. */
6448 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6449 const0_rtx);
ec6376ab
AM
6450 do
6451 {
6452 r = gen_rtx_REG (rmode,
6453 GP_ARG_MIN_REG + align_words);
2e6c9641 6454 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6455 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6456 }
6457 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6458 }
6459 else
6460 {
6461 /* The whole arg fits in gprs. */
6462 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6463 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6464 }
c53bdcf5 6465 }
ec6376ab
AM
6466 else
6467 /* It's entirely in memory. */
6468 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6469 }
6470
ec6376ab
AM
6471 /* Describe where this piece goes in the fprs. */
6472 r = gen_rtx_REG (fmode, cum->fregno);
6473 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6474
6475 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6476 }
6477 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6478 {
ec6376ab
AM
6479 if (TARGET_32BIT && TARGET_POWERPC64)
6480 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6481
4eeca74f
AM
6482 if (mode == BLKmode)
6483 mode = Pmode;
6484
b2d04ecf
AM
6485 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6486 }
4cc833b7
RH
6487 else
6488 return NULL_RTX;
4697a36c 6489 }
4697a36c
MM
6490}
6491\f
ec6376ab 6492/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6493 the number of bytes passed in registers. For args passed entirely in
6494 registers or entirely in memory, zero. When an arg is described by a
6495 PARALLEL, perhaps using more than one register type, this function
6496 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6497
78a52f11
RH
6498static int
6499rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6500 tree type, bool named)
4697a36c 6501{
c53bdcf5 6502 int ret = 0;
ec6376ab 6503 int align_words;
c53bdcf5 6504
f607bc57 6505 if (DEFAULT_ABI == ABI_V4)
4697a36c 6506 return 0;
4697a36c 6507
c53bdcf5
AM
6508 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6509 && cum->nargs_prototype >= 0)
6510 return 0;
6511
0b5383eb
DJ
6512 /* In this complicated case we just disable the partial_nregs code. */
6513 if (rs6000_darwin64_abi && mode == BLKmode
6514 && TREE_CODE (type) == RECORD_TYPE
6515 && int_size_in_bytes (type) > 0)
6516 return 0;
6517
294bd182 6518 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6519
79773478
AM
6520 if (USE_FP_FOR_ARG_P (cum, mode, type))
6521 {
fb63c729
AM
6522 /* If we are passing this arg in the fixed parameter save area
6523 (gprs or memory) as well as fprs, then this function should
79773478
AM
6524 return the number of partial bytes passed in the parameter
6525 save area rather than partial bytes passed in fprs. */
6526 if (type
6527 && (cum->nargs_prototype <= 0
6528 || (DEFAULT_ABI == ABI_AIX
6529 && TARGET_XL_COMPAT
6530 && align_words >= GP_ARG_NUM_REG)))
6531 return 0;
6532 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6533 > FP_ARG_MAX_REG + 1)
ac7e839c 6534 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6535 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6536 return 0;
6537 }
6538
ec6376ab
AM
6539 if (align_words < GP_ARG_NUM_REG
6540 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6541 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6542
c53bdcf5 6543 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6544 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6545
c53bdcf5 6546 return ret;
4697a36c
MM
6547}
6548\f
6549/* A C expression that indicates when an argument must be passed by
6550 reference. If nonzero for an argument, a copy of that argument is
6551 made in memory and a pointer to the argument is passed instead of
6552 the argument itself. The pointer is passed in whatever way is
6553 appropriate for passing a pointer to that type.
6554
b2d04ecf
AM
6555 Under V.4, aggregates and long double are passed by reference.
6556
6557 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6558 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6559
6560 As an extension to all ABIs, variable sized types are passed by
6561 reference. */
4697a36c 6562
8cd5a4e0 6563static bool
f676971a 6564rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6565 enum machine_mode mode, const_tree type,
bada2eb8 6566 bool named ATTRIBUTE_UNUSED)
4697a36c 6567{
602ea4d3 6568 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6569 {
6570 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6571 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6572 return 1;
6573 }
6574
6575 if (!type)
6576 return 0;
4697a36c 6577
bada2eb8
DE
6578 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6579 {
6580 if (TARGET_DEBUG_ARG)
6581 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6582 return 1;
6583 }
6584
6585 if (int_size_in_bytes (type) < 0)
6586 {
6587 if (TARGET_DEBUG_ARG)
6588 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6589 return 1;
6590 }
6591
6592 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6593 modes only exist for GCC vector types if -maltivec. */
6594 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6595 {
6596 if (TARGET_DEBUG_ARG)
6597 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6598 return 1;
6599 }
b693336b
PB
6600
6601 /* Pass synthetic vectors in memory. */
bada2eb8 6602 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6603 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6604 {
6605 static bool warned_for_pass_big_vectors = false;
6606 if (TARGET_DEBUG_ARG)
6607 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6608 if (!warned_for_pass_big_vectors)
6609 {
d4ee4d25 6610 warning (0, "GCC vector passed by reference: "
b693336b
PB
6611 "non-standard ABI extension with no compatibility guarantee");
6612 warned_for_pass_big_vectors = true;
6613 }
6614 return 1;
6615 }
6616
b2d04ecf 6617 return 0;
4697a36c 6618}
5985c7a6
FJ
6619
6620static void
2d9db8eb 6621rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6622{
6623 int i;
6624 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6625
6626 if (nregs == 0)
6627 return;
6628
c4ad648e 6629 for (i = 0; i < nregs; i++)
5985c7a6 6630 {
9390387d 6631 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6632 if (reload_completed)
c4ad648e
AM
6633 {
6634 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6635 tem = NULL_RTX;
6636 else
6637 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6638 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6639 }
5985c7a6
FJ
6640 else
6641 tem = replace_equiv_address (tem, XEXP (tem, 0));
6642
37409796 6643 gcc_assert (tem);
5985c7a6
FJ
6644
6645 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6646 }
6647}
4697a36c
MM
6648\f
6649/* Perform any needed actions needed for a function that is receiving a
f676971a 6650 variable number of arguments.
4697a36c
MM
6651
6652 CUM is as above.
6653
6654 MODE and TYPE are the mode and type of the current parameter.
6655
6656 PRETEND_SIZE is a variable that should be set to the amount of stack
6657 that must be pushed by the prolog to pretend that our caller pushed
6658 it.
6659
6660 Normally, this macro will push all remaining incoming registers on the
6661 stack and set PRETEND_SIZE to the length of the registers pushed. */
6662
c6e8c921 6663static void
f676971a 6664setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6665 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6666 int no_rtl)
4697a36c 6667{
4cc833b7
RH
6668 CUMULATIVE_ARGS next_cum;
6669 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6670 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6671 int first_reg_offset;
6672 alias_set_type set;
4697a36c 6673
f31bf321 6674 /* Skip the last named argument. */
d34c5b80 6675 next_cum = *cum;
594a51fe 6676 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6677
f607bc57 6678 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6679 {
5b667039
JJ
6680 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6681
60e2d0ca 6682 if (! no_rtl)
5b667039
JJ
6683 {
6684 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6685 HOST_WIDE_INT offset = 0;
6686
6687 /* Try to optimize the size of the varargs save area.
6688 The ABI requires that ap.reg_save_area is doubleword
6689 aligned, but we don't need to allocate space for all
6690 the bytes, only those to which we actually will save
6691 anything. */
6692 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6693 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6694 if (TARGET_HARD_FLOAT && TARGET_FPRS
6695 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6696 && cfun->va_list_fpr_size)
6697 {
6698 if (gpr_reg_num)
6699 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6700 * UNITS_PER_FP_WORD;
6701 if (cfun->va_list_fpr_size
6702 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6703 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6704 else
6705 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6706 * UNITS_PER_FP_WORD;
6707 }
6708 if (gpr_reg_num)
6709 {
6710 offset = -((first_reg_offset * reg_size) & ~7);
6711 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6712 {
6713 gpr_reg_num = cfun->va_list_gpr_size;
6714 if (reg_size == 4 && (first_reg_offset & 1))
6715 gpr_reg_num++;
6716 }
6717 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6718 }
6719 else if (fpr_size)
6720 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6721 * UNITS_PER_FP_WORD
6722 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6723
5b667039
JJ
6724 if (gpr_size + fpr_size)
6725 {
6726 rtx reg_save_area
6727 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6728 gcc_assert (GET_CODE (reg_save_area) == MEM);
6729 reg_save_area = XEXP (reg_save_area, 0);
6730 if (GET_CODE (reg_save_area) == PLUS)
6731 {
6732 gcc_assert (XEXP (reg_save_area, 0)
6733 == virtual_stack_vars_rtx);
6734 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6735 offset += INTVAL (XEXP (reg_save_area, 1));
6736 }
6737 else
6738 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6739 }
6740
6741 cfun->machine->varargs_save_offset = offset;
6742 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6743 }
4697a36c 6744 }
60e2d0ca 6745 else
4697a36c 6746 {
d34c5b80 6747 first_reg_offset = next_cum.words;
4cc833b7 6748 save_area = virtual_incoming_args_rtx;
4697a36c 6749
fe984136 6750 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6751 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6752 }
4697a36c 6753
dfafc897 6754 set = get_varargs_alias_set ();
9d30f3c1
JJ
6755 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6756 && cfun->va_list_gpr_size)
4cc833b7 6757 {
9d30f3c1
JJ
6758 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6759
6760 if (va_list_gpr_counter_field)
6761 {
6762 /* V4 va_list_gpr_size counts number of registers needed. */
6763 if (nregs > cfun->va_list_gpr_size)
6764 nregs = cfun->va_list_gpr_size;
6765 }
6766 else
6767 {
6768 /* char * va_list instead counts number of bytes needed. */
6769 if (nregs > cfun->va_list_gpr_size / reg_size)
6770 nregs = cfun->va_list_gpr_size / reg_size;
6771 }
6772
dfafc897 6773 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6774 plus_constant (save_area,
13e2e16e
DE
6775 first_reg_offset * reg_size));
6776 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6777 set_mem_alias_set (mem, set);
8ac61af7 6778 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6779
f676971a 6780 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6781 nregs);
4697a36c
MM
6782 }
6783
4697a36c 6784 /* Save FP registers if needed. */
f607bc57 6785 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6786 && TARGET_HARD_FLOAT && TARGET_FPRS
6787 && ! no_rtl
9d30f3c1
JJ
6788 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6789 && cfun->va_list_fpr_size)
4697a36c 6790 {
9d30f3c1 6791 int fregno = next_cum.fregno, nregs;
9ebbca7d 6792 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6793 rtx lab = gen_label_rtx ();
5b667039
JJ
6794 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6795 * UNITS_PER_FP_WORD);
4697a36c 6796
c4ad648e
AM
6797 emit_jump_insn
6798 (gen_rtx_SET (VOIDmode,
6799 pc_rtx,
6800 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6801 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6802 const0_rtx),
39403d82 6803 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6804 pc_rtx)));
6805
9d30f3c1
JJ
6806 for (nregs = 0;
6807 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6808 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6809 {
696e45ba
ME
6810 mem = gen_rtx_MEM ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6811 ? DFmode : SFmode,
6812 plus_constant (save_area, off));
6813 MEM_NOTRAP_P (mem) = 1;
6814 set_mem_alias_set (mem, set);
6815 set_mem_align (mem, GET_MODE_ALIGNMENT (
6816 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6817 ? DFmode : SFmode));
6818 emit_move_insn (mem, gen_rtx_REG (
6819 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6820 ? DFmode : SFmode, fregno));
4697a36c 6821 }
4cc833b7
RH
6822
6823 emit_label (lab);
4697a36c 6824 }
4697a36c 6825}
4697a36c 6826
dfafc897 6827/* Create the va_list data type. */
2c4974b7 6828
c35d187f
RH
6829static tree
6830rs6000_build_builtin_va_list (void)
dfafc897 6831{
64c2816f 6832 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6833
9ebbca7d
GK
6834 /* For AIX, prefer 'char *' because that's what the system
6835 header files like. */
f607bc57 6836 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6837 return build_pointer_type (char_type_node);
dfafc897 6838
f1e639b1 6839 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6840 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6841
f676971a 6842 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6843 unsigned_char_type_node);
f676971a 6844 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6845 unsigned_char_type_node);
64c2816f
DT
6846 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6847 every user file. */
6848 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6849 short_unsigned_type_node);
dfafc897
FS
6850 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6851 ptr_type_node);
6852 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6853 ptr_type_node);
6854
9d30f3c1
JJ
6855 va_list_gpr_counter_field = f_gpr;
6856 va_list_fpr_counter_field = f_fpr;
6857
dfafc897
FS
6858 DECL_FIELD_CONTEXT (f_gpr) = record;
6859 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6860 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6861 DECL_FIELD_CONTEXT (f_ovf) = record;
6862 DECL_FIELD_CONTEXT (f_sav) = record;
6863
bab45a51
FS
6864 TREE_CHAIN (record) = type_decl;
6865 TYPE_NAME (record) = type_decl;
dfafc897
FS
6866 TYPE_FIELDS (record) = f_gpr;
6867 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6868 TREE_CHAIN (f_fpr) = f_res;
6869 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6870 TREE_CHAIN (f_ovf) = f_sav;
6871
6872 layout_type (record);
6873
6874 /* The correct type is an array type of one element. */
6875 return build_array_type (record, build_index_type (size_zero_node));
6876}
6877
6878/* Implement va_start. */
6879
d7bd8aeb 6880static void
a2369ed3 6881rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6882{
dfafc897 6883 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6884 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6885 tree gpr, fpr, ovf, sav, t;
2c4974b7 6886
dfafc897 6887 /* Only SVR4 needs something special. */
f607bc57 6888 if (DEFAULT_ABI != ABI_V4)
dfafc897 6889 {
e5faf155 6890 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6891 return;
6892 }
6893
973a648b 6894 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6895 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6896 f_res = TREE_CHAIN (f_fpr);
6897 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6898 f_sav = TREE_CHAIN (f_ovf);
6899
872a65b5 6900 valist = build_va_arg_indirect_ref (valist);
47a25a46 6901 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6902 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6903 f_fpr, NULL_TREE);
6904 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6905 f_ovf, NULL_TREE);
6906 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6907 f_sav, NULL_TREE);
dfafc897
FS
6908
6909 /* Count number of gp and fp argument registers used. */
38173d38
JH
6910 words = crtl->args.info.words;
6911 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6912 GP_ARG_NUM_REG);
38173d38 6913 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6914 FP_ARG_NUM_REG);
dfafc897
FS
6915
6916 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6917 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6918 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6919 words, n_gpr, n_fpr);
dfafc897 6920
9d30f3c1
JJ
6921 if (cfun->va_list_gpr_size)
6922 {
726a989a 6923 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
47a25a46 6924 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6925 TREE_SIDE_EFFECTS (t) = 1;
6926 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6927 }
58c8adc1 6928
9d30f3c1
JJ
6929 if (cfun->va_list_fpr_size)
6930 {
726a989a 6931 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 6932 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6933 TREE_SIDE_EFFECTS (t) = 1;
6934 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6935 }
dfafc897
FS
6936
6937 /* Find the overflow area. */
6938 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6939 if (words != 0)
5be014d5
AP
6940 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6941 size_int (words * UNITS_PER_WORD));
726a989a 6942 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6943 TREE_SIDE_EFFECTS (t) = 1;
6944 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6945
9d30f3c1
JJ
6946 /* If there were no va_arg invocations, don't set up the register
6947 save area. */
6948 if (!cfun->va_list_gpr_size
6949 && !cfun->va_list_fpr_size
6950 && n_gpr < GP_ARG_NUM_REG
6951 && n_fpr < FP_ARG_V4_MAX_REG)
6952 return;
6953
dfafc897
FS
6954 /* Find the register save area. */
6955 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6956 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6957 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6958 size_int (cfun->machine->varargs_save_offset));
726a989a 6959 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
6960 TREE_SIDE_EFFECTS (t) = 1;
6961 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6962}
6963
6964/* Implement va_arg. */
6965
23a60a04 6966tree
726a989a
RB
6967rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6968 gimple_seq *post_p)
cd3ce9b4 6969{
cd3ce9b4
JM
6970 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6971 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6972 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6973 tree lab_false, lab_over, addr;
6974 int align;
6975 tree ptrtype = build_pointer_type (type);
7393f7f8 6976 int regalign = 0;
726a989a 6977 gimple stmt;
cd3ce9b4 6978
08b0dc1b
RH
6979 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6980 {
6981 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6982 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6983 }
6984
cd3ce9b4
JM
6985 if (DEFAULT_ABI != ABI_V4)
6986 {
08b0dc1b 6987 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6988 {
6989 tree elem_type = TREE_TYPE (type);
6990 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6991 int elem_size = GET_MODE_SIZE (elem_mode);
6992
6993 if (elem_size < UNITS_PER_WORD)
6994 {
23a60a04 6995 tree real_part, imag_part;
726a989a 6996 gimple_seq post = NULL;
cd3ce9b4 6997
23a60a04
JM
6998 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6999 &post);
7000 /* Copy the value into a temporary, lest the formal temporary
7001 be reused out from under us. */
7002 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
726a989a 7003 gimple_seq_add_seq (pre_p, post);
cd3ce9b4 7004
23a60a04
JM
7005 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
7006 post_p);
cd3ce9b4 7007
47a25a46 7008 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
7009 }
7010 }
7011
23a60a04 7012 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
7013 }
7014
7015 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7016 f_fpr = TREE_CHAIN (f_gpr);
7017 f_res = TREE_CHAIN (f_fpr);
7018 f_ovf = TREE_CHAIN (f_res);
7019 f_sav = TREE_CHAIN (f_ovf);
7020
872a65b5 7021 valist = build_va_arg_indirect_ref (valist);
47a25a46 7022 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
7023 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
7024 f_fpr, NULL_TREE);
7025 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
7026 f_ovf, NULL_TREE);
7027 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
7028 f_sav, NULL_TREE);
cd3ce9b4
JM
7029
7030 size = int_size_in_bytes (type);
7031 rsize = (size + 3) / 4;
7032 align = 1;
7033
08b0dc1b 7034 if (TARGET_HARD_FLOAT && TARGET_FPRS
696e45ba
ME
7035 && ((TARGET_SINGLE_FLOAT && TYPE_MODE (type) == SFmode)
7036 || (TARGET_DOUBLE_FLOAT
7037 && (TYPE_MODE (type) == DFmode
7038 || TYPE_MODE (type) == TFmode
7039 || TYPE_MODE (type) == SDmode
7040 || TYPE_MODE (type) == DDmode
7041 || TYPE_MODE (type) == TDmode))))
cd3ce9b4
JM
7042 {
7043 /* FP args go in FP registers, if present. */
cd3ce9b4 7044 reg = fpr;
602ea4d3 7045 n_reg = (size + 7) / 8;
696e45ba
ME
7046 sav_ofs = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4) * 4;
7047 sav_scale = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4);
e41b2a33 7048 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
7049 align = 8;
7050 }
7051 else
7052 {
7053 /* Otherwise into GP registers. */
cd3ce9b4
JM
7054 reg = gpr;
7055 n_reg = rsize;
7056 sav_ofs = 0;
7057 sav_scale = 4;
7058 if (n_reg == 2)
7059 align = 8;
7060 }
7061
7062 /* Pull the value out of the saved registers.... */
7063
7064 lab_over = NULL;
7065 addr = create_tmp_var (ptr_type_node, "addr");
7066 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7067
7068 /* AltiVec vectors never go in registers when -mabi=altivec. */
7069 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
7070 align = 16;
7071 else
7072 {
7073 lab_false = create_artificial_label ();
7074 lab_over = create_artificial_label ();
7075
7076 /* Long long and SPE vectors are aligned in the registers.
7077 As are any other 2 gpr item such as complex int due to a
7078 historical mistake. */
7079 u = reg;
602ea4d3 7080 if (n_reg == 2 && reg == gpr)
cd3ce9b4 7081 {
7393f7f8 7082 regalign = 1;
726a989a 7083 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7084 build_int_cst (TREE_TYPE (reg), n_reg - 1));
726a989a
RB
7085 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
7086 unshare_expr (reg), u);
cd3ce9b4 7087 }
7393f7f8
BE
7088 /* _Decimal128 is passed in even/odd fpr pairs; the stored
7089 reg number is 0 for f1, so we want to make it odd. */
7090 else if (reg == fpr && TYPE_MODE (type) == TDmode)
7091 {
726a989a 7092 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
383e91e4 7093 build_int_cst (TREE_TYPE (reg), 1));
726a989a 7094 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7393f7f8 7095 }
cd3ce9b4 7096
95674810 7097 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
7098 t = build2 (GE_EXPR, boolean_type_node, u, t);
7099 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7100 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7101 gimplify_and_add (t, pre_p);
7102
7103 t = sav;
7104 if (sav_ofs)
5be014d5 7105 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 7106
726a989a 7107 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7108 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
7109 u = fold_convert (sizetype, u);
7110 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7111 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 7112
e41b2a33
PB
7113 /* _Decimal32 varargs are located in the second word of the 64-bit
7114 FP register for 32-bit binaries. */
4f011e1e
JM
7115 if (!TARGET_POWERPC64
7116 && TARGET_HARD_FLOAT && TARGET_FPRS
7117 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
7118 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7119
726a989a 7120 gimplify_assign (addr, t, pre_p);
cd3ce9b4 7121
726a989a 7122 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
cd3ce9b4 7123
726a989a
RB
7124 stmt = gimple_build_label (lab_false);
7125 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4 7126
7393f7f8 7127 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
7128 {
7129 /* Ensure that we don't find any more args in regs.
7393f7f8 7130 Alignment has taken care of for special cases. */
726a989a 7131 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
cd3ce9b4
JM
7132 }
7133 }
7134
7135 /* ... otherwise out of the overflow area. */
7136
7137 /* Care for on-stack alignment if needed. */
7138 t = ovf;
7139 if (align != 1)
7140 {
5be014d5
AP
7141 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7142 t = fold_convert (sizetype, t);
4a90aeeb 7143 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
7144 size_int (-align));
7145 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
7146 }
7147 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7148
726a989a 7149 gimplify_assign (unshare_expr (addr), t, pre_p);
cd3ce9b4 7150
5be014d5 7151 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
726a989a 7152 gimplify_assign (unshare_expr (ovf), t, pre_p);
cd3ce9b4
JM
7153
7154 if (lab_over)
7155 {
726a989a
RB
7156 stmt = gimple_build_label (lab_over);
7157 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4
JM
7158 }
7159
0cfbc62b
JM
7160 if (STRICT_ALIGNMENT
7161 && (TYPE_ALIGN (type)
7162 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7163 {
7164 /* The value (of type complex double, for example) may not be
7165 aligned in memory in the saved registers, so copy via a
7166 temporary. (This is the same code as used for SPARC.) */
7167 tree tmp = create_tmp_var (type, "va_arg_tmp");
7168 tree dest_addr = build_fold_addr_expr (tmp);
7169
5039610b
SL
7170 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7171 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
7172
7173 gimplify_and_add (copy, pre_p);
7174 addr = dest_addr;
7175 }
7176
08b0dc1b 7177 addr = fold_convert (ptrtype, addr);
872a65b5 7178 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
7179}
7180
0ac081f6
AH
7181/* Builtins. */
7182
58646b77
PB
7183static void
7184def_builtin (int mask, const char *name, tree type, int code)
7185{
96038623 7186 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
7187 {
7188 if (rs6000_builtin_decls[code])
7189 abort ();
7190
7191 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7192 add_builtin_function (name, type, code, BUILT_IN_MD,
7193 NULL, NULL_TREE);
58646b77
PB
7194 }
7195}
0ac081f6 7196
24408032
AH
7197/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7198
2212663f 7199static const struct builtin_description bdesc_3arg[] =
24408032
AH
7200{
7201 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7202 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7203 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7204 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7205 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7206 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7207 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7208 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7209 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7210 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7211 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7212 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7213 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7214 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7215 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7216 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7217 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7218 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7219 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7220 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7221 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7222 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7223 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7224
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7239 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7240
7241 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7242 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7243 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7244 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7245 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7246 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7247 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7248 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7249 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7250};
2212663f 7251
95385cbb
AH
7252/* DST operations: void foo (void *, const int, const char). */
7253
7254static const struct builtin_description bdesc_dst[] =
7255{
7256 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7257 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7258 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7259 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7260
7261 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7262 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7263 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7264 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7265};
7266
2212663f 7267/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7268
a3170dc6 7269static struct builtin_description bdesc_2arg[] =
0ac081f6 7270{
f18c054f
DB
7271 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7272 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7273 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7274 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7275 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7276 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7277 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7278 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7279 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7280 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7281 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7282 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7283 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7284 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7285 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7286 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7287 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7288 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7289 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7290 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7291 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7292 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7293 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7294 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7295 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7296 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7297 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7298 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7299 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7300 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7301 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7302 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7303 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7304 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7305 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7306 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7307 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7308 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7309 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7310 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7311 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7312 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7313 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7314 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7315 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7316 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7317 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7318 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7319 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7320 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7321 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7322 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7323 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7324 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7325 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7326 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7327 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7328 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7329 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7330 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7331 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7332 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7333 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7334 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7335 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7336 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7337 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7338 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7339 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7340 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7341 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7342 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7343 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7344 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7345 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7346 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7347 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7348 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7349 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7350 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7351 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7352 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7353 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7354 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7355 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7356 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7357 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7358 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7359 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7360 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7361 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7362 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7363 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7364 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7365 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7366 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7367 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7368 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7369 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7370 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7371 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7372 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7373 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7374 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7375 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7376 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7377 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7378 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7379 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7380 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7381 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7382
58646b77
PB
7383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7411 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7412 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7413 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7414 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7415 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7416 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7417 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7418 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7419 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7420 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7421 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7422 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7423 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7424 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7425 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7426 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7427 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7428 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7429 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7430 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7431 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7432 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7433 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7434 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7435 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7436 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7437 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7441 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7442 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7449 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7450 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7451 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7452 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7453 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7454 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7455 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7456 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7457 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7458 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7459 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7479 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7480 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7481 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7482 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7483 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7484 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7485 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7486 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7487 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7488 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7489 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7490 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7491 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7492 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7493 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7494 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7495 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7496 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7497 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7498 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7499 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7500 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7501 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7502 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7503 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7504 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7505 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7506 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7507 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7508 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7509 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7510
96038623
DE
7511 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7512 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7513 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7514 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7515 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7516 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7517 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7518 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7519 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7520 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7521
a3170dc6
AH
7522 /* Place holder, leave as first spe builtin. */
7523 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7524 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7525 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7526 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7527 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7528 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7529 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7530 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7531 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7532 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7533 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7534 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7535 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7536 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7537 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7538 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7539 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7540 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7541 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7542 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7543 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7544 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7545 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7546 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7547 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7548 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7549 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7550 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7551 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7552 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7553 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7554 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7555 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7556 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7557 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7558 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7559 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7560 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7561 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7562 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7563 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7564 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7565 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7566 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7567 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7568 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7569 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7570 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7571 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7572 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7573 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7574 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7575 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7576 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7577 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7578 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7579 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7580 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7581 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7582 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7583 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7584 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7585 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7586 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7587 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7588 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7589 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7590 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7591 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7592 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7593 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7594 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7595 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7596 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7597 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7598 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7599 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7600 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7601 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7602 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7603 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7604 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7605 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7606 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7607 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7608 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7609 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7610 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7611 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7612 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7613 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7614 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7615 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7616 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7617 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7618 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7619 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7620 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7621 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7622 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7623 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7624 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7625 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7626 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7627 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7628 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7629 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7630 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7631 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7632
7633 /* SPE binary operations expecting a 5-bit unsigned literal. */
7634 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7635
7636 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7637 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7638 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7639 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7640 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7641 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7642 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7643 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7644 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7645 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7646 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7647 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7648 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7649 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7650 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7651 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7652 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7653 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7654 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7655 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7656 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7657 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7658 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7659 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7660 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7661 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7662
7663 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7664 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7665};
7666
7667/* AltiVec predicates. */
7668
7669struct builtin_description_predicates
7670{
7671 const unsigned int mask;
7672 const enum insn_code icode;
7673 const char *opcode;
7674 const char *const name;
7675 const enum rs6000_builtins code;
7676};
7677
7678static const struct builtin_description_predicates bdesc_altivec_preds[] =
7679{
7680 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7681 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7682 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7683 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7684 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7685 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7686 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7687 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7688 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7689 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7690 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7691 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7692 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7693
81f40b79
ILT
7694 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7695 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7696 { MASK_ALTIVEC, CODE_FOR_nothing, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7697};
24408032 7698
a3170dc6
AH
7699/* SPE predicates. */
7700static struct builtin_description bdesc_spe_predicates[] =
7701{
7702 /* Place-holder. Leave as first. */
7703 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7704 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7705 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7706 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7707 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7708 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7709 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7710 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7711 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7712 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7713 /* Place-holder. Leave as last. */
7714 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7715};
7716
7717/* SPE evsel predicates. */
7718static struct builtin_description bdesc_spe_evsel[] =
7719{
7720 /* Place-holder. Leave as first. */
7721 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7722 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7723 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7724 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7725 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7726 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7727 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7728 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7729 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7730 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7731 /* Place-holder. Leave as last. */
7732 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7733};
7734
96038623
DE
7735/* PAIRED predicates. */
7736static const struct builtin_description bdesc_paired_preds[] =
7737{
7738 /* Place-holder. Leave as first. */
7739 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7740 /* Place-holder. Leave as last. */
7741 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7742};
7743
b6d08ca1 7744/* ABS* operations. */
100c4561
AH
7745
7746static const struct builtin_description bdesc_abs[] =
7747{
7748 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7749 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7750 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7751 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7752 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7753 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7754 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7755};
7756
617e0e1d
DB
7757/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7758 foo (VECa). */
24408032 7759
a3170dc6 7760static struct builtin_description bdesc_1arg[] =
2212663f 7761{
617e0e1d
DB
7762 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7763 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7764 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7765 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7766 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7767 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7768 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7769 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7770 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7771 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7772 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7773 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7774 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7775 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7776 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7777 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7778 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7779
58646b77
PB
7780 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7781 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7782 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7798 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7799
a3170dc6
AH
7800 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7801 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7802 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7803 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7804 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7805 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7806 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7807 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7808 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7809 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7810 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7811 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7812 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7813 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7814 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7815 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7816 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7817 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7818 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7819 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7820 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7821 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7822 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7823 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7824 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7825 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7826 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7827 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7828 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7829 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7830
7831 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7832 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7833
7834 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7835 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7836 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7837 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7838 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7839};
7840
7841static rtx
5039610b 7842rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7843{
7844 rtx pat;
5039610b 7845 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7846 rtx op0 = expand_normal (arg0);
2212663f
DB
7847 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7848 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7849
0559cc77
DE
7850 if (icode == CODE_FOR_nothing)
7851 /* Builtin not supported on this processor. */
7852 return 0;
7853
20e26713
AH
7854 /* If we got invalid arguments bail out before generating bad rtl. */
7855 if (arg0 == error_mark_node)
9a171fcd 7856 return const0_rtx;
20e26713 7857
0559cc77
DE
7858 if (icode == CODE_FOR_altivec_vspltisb
7859 || icode == CODE_FOR_altivec_vspltish
7860 || icode == CODE_FOR_altivec_vspltisw
7861 || icode == CODE_FOR_spe_evsplatfi
7862 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7863 {
7864 /* Only allow 5-bit *signed* literals. */
b44140e7 7865 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7866 || INTVAL (op0) > 15
7867 || INTVAL (op0) < -16)
b44140e7
AH
7868 {
7869 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7870 return const0_rtx;
b44140e7 7871 }
b44140e7
AH
7872 }
7873
c62f2db5 7874 if (target == 0
2212663f
DB
7875 || GET_MODE (target) != tmode
7876 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7877 target = gen_reg_rtx (tmode);
7878
7879 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7880 op0 = copy_to_mode_reg (mode0, op0);
7881
7882 pat = GEN_FCN (icode) (target, op0);
7883 if (! pat)
7884 return 0;
7885 emit_insn (pat);
0ac081f6 7886
2212663f
DB
7887 return target;
7888}
ae4b4a02 7889
100c4561 7890static rtx
5039610b 7891altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7892{
7893 rtx pat, scratch1, scratch2;
5039610b 7894 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7895 rtx op0 = expand_normal (arg0);
100c4561
AH
7896 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7897 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7898
7899 /* If we have invalid arguments, bail out before generating bad rtl. */
7900 if (arg0 == error_mark_node)
9a171fcd 7901 return const0_rtx;
100c4561
AH
7902
7903 if (target == 0
7904 || GET_MODE (target) != tmode
7905 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7906 target = gen_reg_rtx (tmode);
7907
7908 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7909 op0 = copy_to_mode_reg (mode0, op0);
7910
7911 scratch1 = gen_reg_rtx (mode0);
7912 scratch2 = gen_reg_rtx (mode0);
7913
7914 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7915 if (! pat)
7916 return 0;
7917 emit_insn (pat);
7918
7919 return target;
7920}
7921
0ac081f6 7922static rtx
5039610b 7923rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7924{
7925 rtx pat;
5039610b
SL
7926 tree arg0 = CALL_EXPR_ARG (exp, 0);
7927 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7928 rtx op0 = expand_normal (arg0);
7929 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7930 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7931 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7932 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7933
0559cc77
DE
7934 if (icode == CODE_FOR_nothing)
7935 /* Builtin not supported on this processor. */
7936 return 0;
7937
20e26713
AH
7938 /* If we got invalid arguments bail out before generating bad rtl. */
7939 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7940 return const0_rtx;
20e26713 7941
0559cc77
DE
7942 if (icode == CODE_FOR_altivec_vcfux
7943 || icode == CODE_FOR_altivec_vcfsx
7944 || icode == CODE_FOR_altivec_vctsxs
7945 || icode == CODE_FOR_altivec_vctuxs
7946 || icode == CODE_FOR_altivec_vspltb
7947 || icode == CODE_FOR_altivec_vsplth
7948 || icode == CODE_FOR_altivec_vspltw
7949 || icode == CODE_FOR_spe_evaddiw
7950 || icode == CODE_FOR_spe_evldd
7951 || icode == CODE_FOR_spe_evldh
7952 || icode == CODE_FOR_spe_evldw
7953 || icode == CODE_FOR_spe_evlhhesplat
7954 || icode == CODE_FOR_spe_evlhhossplat
7955 || icode == CODE_FOR_spe_evlhhousplat
7956 || icode == CODE_FOR_spe_evlwhe
7957 || icode == CODE_FOR_spe_evlwhos
7958 || icode == CODE_FOR_spe_evlwhou
7959 || icode == CODE_FOR_spe_evlwhsplat
7960 || icode == CODE_FOR_spe_evlwwsplat
7961 || icode == CODE_FOR_spe_evrlwi
7962 || icode == CODE_FOR_spe_evslwi
7963 || icode == CODE_FOR_spe_evsrwis
f5119d10 7964 || icode == CODE_FOR_spe_evsubifw
0559cc77 7965 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7966 {
7967 /* Only allow 5-bit unsigned literals. */
8bb418a3 7968 STRIP_NOPS (arg1);
b44140e7
AH
7969 if (TREE_CODE (arg1) != INTEGER_CST
7970 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7971 {
7972 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7973 return const0_rtx;
b44140e7 7974 }
b44140e7
AH
7975 }
7976
c62f2db5 7977 if (target == 0
0ac081f6
AH
7978 || GET_MODE (target) != tmode
7979 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7980 target = gen_reg_rtx (tmode);
7981
7982 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7983 op0 = copy_to_mode_reg (mode0, op0);
7984 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7985 op1 = copy_to_mode_reg (mode1, op1);
7986
7987 pat = GEN_FCN (icode) (target, op0, op1);
7988 if (! pat)
7989 return 0;
7990 emit_insn (pat);
7991
7992 return target;
7993}
6525c0e7 7994
ae4b4a02 7995static rtx
f676971a 7996altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7997 tree exp, rtx target)
ae4b4a02
AH
7998{
7999 rtx pat, scratch;
5039610b
SL
8000 tree cr6_form = CALL_EXPR_ARG (exp, 0);
8001 tree arg0 = CALL_EXPR_ARG (exp, 1);
8002 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8003 rtx op0 = expand_normal (arg0);
8004 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
8005 enum machine_mode tmode = SImode;
8006 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8007 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8008 int cr6_form_int;
8009
8010 if (TREE_CODE (cr6_form) != INTEGER_CST)
8011 {
8012 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 8013 return const0_rtx;
ae4b4a02
AH
8014 }
8015 else
8016 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
8017
37409796 8018 gcc_assert (mode0 == mode1);
ae4b4a02
AH
8019
8020 /* If we have invalid arguments, bail out before generating bad rtl. */
8021 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 8022 return const0_rtx;
ae4b4a02
AH
8023
8024 if (target == 0
8025 || GET_MODE (target) != tmode
8026 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8027 target = gen_reg_rtx (tmode);
8028
8029 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8030 op0 = copy_to_mode_reg (mode0, op0);
8031 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8032 op1 = copy_to_mode_reg (mode1, op1);
8033
8034 scratch = gen_reg_rtx (mode0);
8035
8036 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 8037 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
8038 if (! pat)
8039 return 0;
8040 emit_insn (pat);
8041
8042 /* The vec_any* and vec_all* predicates use the same opcodes for two
8043 different operations, but the bits in CR6 will be different
8044 depending on what information we want. So we have to play tricks
8045 with CR6 to get the right bits out.
8046
8047 If you think this is disgusting, look at the specs for the
8048 AltiVec predicates. */
8049
c4ad648e
AM
8050 switch (cr6_form_int)
8051 {
8052 case 0:
8053 emit_insn (gen_cr6_test_for_zero (target));
8054 break;
8055 case 1:
8056 emit_insn (gen_cr6_test_for_zero_reverse (target));
8057 break;
8058 case 2:
8059 emit_insn (gen_cr6_test_for_lt (target));
8060 break;
8061 case 3:
8062 emit_insn (gen_cr6_test_for_lt_reverse (target));
8063 break;
8064 default:
8065 error ("argument 1 of __builtin_altivec_predicate is out of range");
8066 break;
8067 }
ae4b4a02
AH
8068
8069 return target;
8070}
8071
96038623
DE
8072static rtx
8073paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
8074{
8075 rtx pat, addr;
8076 tree arg0 = CALL_EXPR_ARG (exp, 0);
8077 tree arg1 = CALL_EXPR_ARG (exp, 1);
8078 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8079 enum machine_mode mode0 = Pmode;
8080 enum machine_mode mode1 = Pmode;
8081 rtx op0 = expand_normal (arg0);
8082 rtx op1 = expand_normal (arg1);
8083
8084 if (icode == CODE_FOR_nothing)
8085 /* Builtin not supported on this processor. */
8086 return 0;
8087
8088 /* If we got invalid arguments bail out before generating bad rtl. */
8089 if (arg0 == error_mark_node || arg1 == error_mark_node)
8090 return const0_rtx;
8091
8092 if (target == 0
8093 || GET_MODE (target) != tmode
8094 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8095 target = gen_reg_rtx (tmode);
8096
8097 op1 = copy_to_mode_reg (mode1, op1);
8098
8099 if (op0 == const0_rtx)
8100 {
8101 addr = gen_rtx_MEM (tmode, op1);
8102 }
8103 else
8104 {
8105 op0 = copy_to_mode_reg (mode0, op0);
8106 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8107 }
8108
8109 pat = GEN_FCN (icode) (target, addr);
8110
8111 if (! pat)
8112 return 0;
8113 emit_insn (pat);
8114
8115 return target;
8116}
8117
b4a62fa0 8118static rtx
0b61703c 8119altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
b4a62fa0
SB
8120{
8121 rtx pat, addr;
5039610b
SL
8122 tree arg0 = CALL_EXPR_ARG (exp, 0);
8123 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
8124 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8125 enum machine_mode mode0 = Pmode;
8126 enum machine_mode mode1 = Pmode;
84217346
MD
8127 rtx op0 = expand_normal (arg0);
8128 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
8129
8130 if (icode == CODE_FOR_nothing)
8131 /* Builtin not supported on this processor. */
8132 return 0;
8133
8134 /* If we got invalid arguments bail out before generating bad rtl. */
8135 if (arg0 == error_mark_node || arg1 == error_mark_node)
8136 return const0_rtx;
8137
8138 if (target == 0
8139 || GET_MODE (target) != tmode
8140 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8141 target = gen_reg_rtx (tmode);
8142
f676971a 8143 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
8144
8145 if (op0 == const0_rtx)
8146 {
0b61703c 8147 addr = gen_rtx_MEM (blk ? BLKmode : tmode, op1);
b4a62fa0
SB
8148 }
8149 else
8150 {
8151 op0 = copy_to_mode_reg (mode0, op0);
0b61703c 8152 addr = gen_rtx_MEM (blk ? BLKmode : tmode, gen_rtx_PLUS (Pmode, op0, op1));
b4a62fa0
SB
8153 }
8154
8155 pat = GEN_FCN (icode) (target, addr);
8156
8157 if (! pat)
8158 return 0;
8159 emit_insn (pat);
8160
8161 return target;
8162}
8163
61bea3b0 8164static rtx
5039610b 8165spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 8166{
5039610b
SL
8167 tree arg0 = CALL_EXPR_ARG (exp, 0);
8168 tree arg1 = CALL_EXPR_ARG (exp, 1);
8169 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8170 rtx op0 = expand_normal (arg0);
8171 rtx op1 = expand_normal (arg1);
8172 rtx op2 = expand_normal (arg2);
61bea3b0
AH
8173 rtx pat;
8174 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8175 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8176 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8177
8178 /* Invalid arguments. Bail before doing anything stoopid! */
8179 if (arg0 == error_mark_node
8180 || arg1 == error_mark_node
8181 || arg2 == error_mark_node)
8182 return const0_rtx;
8183
8184 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8185 op0 = copy_to_mode_reg (mode2, op0);
8186 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8187 op1 = copy_to_mode_reg (mode0, op1);
8188 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8189 op2 = copy_to_mode_reg (mode1, op2);
8190
8191 pat = GEN_FCN (icode) (op1, op2, op0);
8192 if (pat)
8193 emit_insn (pat);
8194 return NULL_RTX;
8195}
8196
96038623
DE
8197static rtx
8198paired_expand_stv_builtin (enum insn_code icode, tree exp)
8199{
8200 tree arg0 = CALL_EXPR_ARG (exp, 0);
8201 tree arg1 = CALL_EXPR_ARG (exp, 1);
8202 tree arg2 = CALL_EXPR_ARG (exp, 2);
8203 rtx op0 = expand_normal (arg0);
8204 rtx op1 = expand_normal (arg1);
8205 rtx op2 = expand_normal (arg2);
8206 rtx pat, addr;
8207 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8208 enum machine_mode mode1 = Pmode;
8209 enum machine_mode mode2 = Pmode;
8210
8211 /* Invalid arguments. Bail before doing anything stoopid! */
8212 if (arg0 == error_mark_node
8213 || arg1 == error_mark_node
8214 || arg2 == error_mark_node)
8215 return const0_rtx;
8216
8217 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8218 op0 = copy_to_mode_reg (tmode, op0);
8219
8220 op2 = copy_to_mode_reg (mode2, op2);
8221
8222 if (op1 == const0_rtx)
8223 {
8224 addr = gen_rtx_MEM (tmode, op2);
8225 }
8226 else
8227 {
8228 op1 = copy_to_mode_reg (mode1, op1);
8229 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8230 }
8231
8232 pat = GEN_FCN (icode) (addr, op0);
8233 if (pat)
8234 emit_insn (pat);
8235 return NULL_RTX;
8236}
8237
6525c0e7 8238static rtx
5039610b 8239altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8240{
5039610b
SL
8241 tree arg0 = CALL_EXPR_ARG (exp, 0);
8242 tree arg1 = CALL_EXPR_ARG (exp, 1);
8243 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8244 rtx op0 = expand_normal (arg0);
8245 rtx op1 = expand_normal (arg1);
8246 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8247 rtx pat, addr;
8248 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8249 enum machine_mode mode1 = Pmode;
8250 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8251
8252 /* Invalid arguments. Bail before doing anything stoopid! */
8253 if (arg0 == error_mark_node
8254 || arg1 == error_mark_node
8255 || arg2 == error_mark_node)
9a171fcd 8256 return const0_rtx;
6525c0e7 8257
b4a62fa0
SB
8258 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8259 op0 = copy_to_mode_reg (tmode, op0);
8260
f676971a 8261 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8262
8263 if (op1 == const0_rtx)
8264 {
8265 addr = gen_rtx_MEM (tmode, op2);
8266 }
8267 else
8268 {
8269 op1 = copy_to_mode_reg (mode1, op1);
8270 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8271 }
6525c0e7 8272
b4a62fa0 8273 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8274 if (pat)
8275 emit_insn (pat);
8276 return NULL_RTX;
8277}
8278
2212663f 8279static rtx
5039610b 8280rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8281{
8282 rtx pat;
5039610b
SL
8283 tree arg0 = CALL_EXPR_ARG (exp, 0);
8284 tree arg1 = CALL_EXPR_ARG (exp, 1);
8285 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8286 rtx op0 = expand_normal (arg0);
8287 rtx op1 = expand_normal (arg1);
8288 rtx op2 = expand_normal (arg2);
2212663f
DB
8289 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8290 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8291 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8292 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8293
774b5662
DE
8294 if (icode == CODE_FOR_nothing)
8295 /* Builtin not supported on this processor. */
8296 return 0;
8297
20e26713
AH
8298 /* If we got invalid arguments bail out before generating bad rtl. */
8299 if (arg0 == error_mark_node
8300 || arg1 == error_mark_node
8301 || arg2 == error_mark_node)
9a171fcd 8302 return const0_rtx;
20e26713 8303
aba5fb01
NS
8304 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8305 || icode == CODE_FOR_altivec_vsldoi_v4si
8306 || icode == CODE_FOR_altivec_vsldoi_v8hi
8307 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8308 {
8309 /* Only allow 4-bit unsigned literals. */
8bb418a3 8310 STRIP_NOPS (arg2);
b44140e7
AH
8311 if (TREE_CODE (arg2) != INTEGER_CST
8312 || TREE_INT_CST_LOW (arg2) & ~0xf)
8313 {
8314 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8315 return const0_rtx;
b44140e7 8316 }
b44140e7
AH
8317 }
8318
c62f2db5 8319 if (target == 0
2212663f
DB
8320 || GET_MODE (target) != tmode
8321 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8322 target = gen_reg_rtx (tmode);
8323
8324 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8325 op0 = copy_to_mode_reg (mode0, op0);
8326 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8327 op1 = copy_to_mode_reg (mode1, op1);
8328 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8329 op2 = copy_to_mode_reg (mode2, op2);
8330
49e39588
RE
8331 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8332 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8333 else
8334 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8335 if (! pat)
8336 return 0;
8337 emit_insn (pat);
8338
8339 return target;
8340}
92898235 8341
3a9b8c7e 8342/* Expand the lvx builtins. */
0ac081f6 8343static rtx
a2369ed3 8344altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8345{
5039610b 8346 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8347 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8348 tree arg0;
8349 enum machine_mode tmode, mode0;
7c3abc73 8350 rtx pat, op0;
3a9b8c7e 8351 enum insn_code icode;
92898235 8352
0ac081f6
AH
8353 switch (fcode)
8354 {
f18c054f 8355 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8356 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8357 break;
f18c054f 8358 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8359 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8360 break;
8361 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8362 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8363 break;
8364 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8365 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8366 break;
8367 default:
8368 *expandedp = false;
8369 return NULL_RTX;
8370 }
0ac081f6 8371
3a9b8c7e 8372 *expandedp = true;
f18c054f 8373
5039610b 8374 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8375 op0 = expand_normal (arg0);
3a9b8c7e
AH
8376 tmode = insn_data[icode].operand[0].mode;
8377 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8378
3a9b8c7e
AH
8379 if (target == 0
8380 || GET_MODE (target) != tmode
8381 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8382 target = gen_reg_rtx (tmode);
24408032 8383
3a9b8c7e
AH
8384 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8385 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8386
3a9b8c7e
AH
8387 pat = GEN_FCN (icode) (target, op0);
8388 if (! pat)
8389 return 0;
8390 emit_insn (pat);
8391 return target;
8392}
f18c054f 8393
3a9b8c7e
AH
8394/* Expand the stvx builtins. */
8395static rtx
f676971a 8396altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8397 bool *expandedp)
3a9b8c7e 8398{
5039610b 8399 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8400 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8401 tree arg0, arg1;
8402 enum machine_mode mode0, mode1;
7c3abc73 8403 rtx pat, op0, op1;
3a9b8c7e 8404 enum insn_code icode;
f18c054f 8405
3a9b8c7e
AH
8406 switch (fcode)
8407 {
8408 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8409 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8410 break;
8411 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8412 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8413 break;
8414 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8415 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8416 break;
8417 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8418 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8419 break;
8420 default:
8421 *expandedp = false;
8422 return NULL_RTX;
8423 }
24408032 8424
5039610b
SL
8425 arg0 = CALL_EXPR_ARG (exp, 0);
8426 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8427 op0 = expand_normal (arg0);
8428 op1 = expand_normal (arg1);
3a9b8c7e
AH
8429 mode0 = insn_data[icode].operand[0].mode;
8430 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8431
3a9b8c7e
AH
8432 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8433 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8434 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8435 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8436
3a9b8c7e
AH
8437 pat = GEN_FCN (icode) (op0, op1);
8438 if (pat)
8439 emit_insn (pat);
f18c054f 8440
3a9b8c7e
AH
8441 *expandedp = true;
8442 return NULL_RTX;
8443}
f18c054f 8444
3a9b8c7e
AH
8445/* Expand the dst builtins. */
8446static rtx
f676971a 8447altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8448 bool *expandedp)
3a9b8c7e 8449{
5039610b 8450 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8451 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8452 tree arg0, arg1, arg2;
8453 enum machine_mode mode0, mode1, mode2;
7c3abc73 8454 rtx pat, op0, op1, op2;
586de218 8455 const struct builtin_description *d;
a3170dc6 8456 size_t i;
f18c054f 8457
3a9b8c7e 8458 *expandedp = false;
f18c054f 8459
3a9b8c7e 8460 /* Handle DST variants. */
586de218 8461 d = bdesc_dst;
3a9b8c7e
AH
8462 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8463 if (d->code == fcode)
8464 {
5039610b
SL
8465 arg0 = CALL_EXPR_ARG (exp, 0);
8466 arg1 = CALL_EXPR_ARG (exp, 1);
8467 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8468 op0 = expand_normal (arg0);
8469 op1 = expand_normal (arg1);
8470 op2 = expand_normal (arg2);
3a9b8c7e
AH
8471 mode0 = insn_data[d->icode].operand[0].mode;
8472 mode1 = insn_data[d->icode].operand[1].mode;
8473 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8474
3a9b8c7e
AH
8475 /* Invalid arguments, bail out before generating bad rtl. */
8476 if (arg0 == error_mark_node
8477 || arg1 == error_mark_node
8478 || arg2 == error_mark_node)
8479 return const0_rtx;
f18c054f 8480
86e7df90 8481 *expandedp = true;
8bb418a3 8482 STRIP_NOPS (arg2);
3a9b8c7e
AH
8483 if (TREE_CODE (arg2) != INTEGER_CST
8484 || TREE_INT_CST_LOW (arg2) & ~0x3)
8485 {
9e637a26 8486 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8487 return const0_rtx;
8488 }
f18c054f 8489
3a9b8c7e 8490 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8491 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8492 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8493 op1 = copy_to_mode_reg (mode1, op1);
24408032 8494
3a9b8c7e
AH
8495 pat = GEN_FCN (d->icode) (op0, op1, op2);
8496 if (pat != 0)
8497 emit_insn (pat);
f18c054f 8498
3a9b8c7e
AH
8499 return NULL_RTX;
8500 }
f18c054f 8501
3a9b8c7e
AH
8502 return NULL_RTX;
8503}
24408032 8504
7a4eca66
DE
8505/* Expand vec_init builtin. */
8506static rtx
5039610b 8507altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8508{
8509 enum machine_mode tmode = TYPE_MODE (type);
8510 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8511 int i, n_elt = GET_MODE_NUNITS (tmode);
8512 rtvec v = rtvec_alloc (n_elt);
8513
8514 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8515 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8516
5039610b 8517 for (i = 0; i < n_elt; ++i)
7a4eca66 8518 {
5039610b 8519 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8520 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8521 }
8522
7a4eca66
DE
8523 if (!target || !register_operand (target, tmode))
8524 target = gen_reg_rtx (tmode);
8525
8526 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8527 return target;
8528}
8529
8530/* Return the integer constant in ARG. Constrain it to be in the range
8531 of the subparts of VEC_TYPE; issue an error if not. */
8532
8533static int
8534get_element_number (tree vec_type, tree arg)
8535{
8536 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8537
8538 if (!host_integerp (arg, 1)
8539 || (elt = tree_low_cst (arg, 1), elt > max))
8540 {
8541 error ("selector must be an integer constant in the range 0..%wi", max);
8542 return 0;
8543 }
8544
8545 return elt;
8546}
8547
8548/* Expand vec_set builtin. */
8549static rtx
5039610b 8550altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8551{
8552 enum machine_mode tmode, mode1;
8553 tree arg0, arg1, arg2;
8554 int elt;
8555 rtx op0, op1;
8556
5039610b
SL
8557 arg0 = CALL_EXPR_ARG (exp, 0);
8558 arg1 = CALL_EXPR_ARG (exp, 1);
8559 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8560
8561 tmode = TYPE_MODE (TREE_TYPE (arg0));
8562 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8563 gcc_assert (VECTOR_MODE_P (tmode));
8564
bbbbb16a
ILT
8565 op0 = expand_expr (arg0, NULL_RTX, tmode, EXPAND_NORMAL);
8566 op1 = expand_expr (arg1, NULL_RTX, mode1, EXPAND_NORMAL);
7a4eca66
DE
8567 elt = get_element_number (TREE_TYPE (arg0), arg2);
8568
8569 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8570 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8571
8572 op0 = force_reg (tmode, op0);
8573 op1 = force_reg (mode1, op1);
8574
8575 rs6000_expand_vector_set (op0, op1, elt);
8576
8577 return op0;
8578}
8579
8580/* Expand vec_ext builtin. */
8581static rtx
5039610b 8582altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8583{
8584 enum machine_mode tmode, mode0;
8585 tree arg0, arg1;
8586 int elt;
8587 rtx op0;
8588
5039610b
SL
8589 arg0 = CALL_EXPR_ARG (exp, 0);
8590 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8591
84217346 8592 op0 = expand_normal (arg0);
7a4eca66
DE
8593 elt = get_element_number (TREE_TYPE (arg0), arg1);
8594
8595 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8596 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8597 gcc_assert (VECTOR_MODE_P (mode0));
8598
8599 op0 = force_reg (mode0, op0);
8600
8601 if (optimize || !target || !register_operand (target, tmode))
8602 target = gen_reg_rtx (tmode);
8603
8604 rs6000_expand_vector_extract (target, op0, elt);
8605
8606 return target;
8607}
8608
3a9b8c7e
AH
8609/* Expand the builtin in EXP and store the result in TARGET. Store
8610 true in *EXPANDEDP if we found a builtin to expand. */
8611static rtx
a2369ed3 8612altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8613{
586de218
KG
8614 const struct builtin_description *d;
8615 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8616 size_t i;
8617 enum insn_code icode;
5039610b 8618 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8619 tree arg0;
8620 rtx op0, pat;
8621 enum machine_mode tmode, mode0;
3a9b8c7e 8622 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8623
58646b77
PB
8624 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8625 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8626 {
8627 *expandedp = true;
ea40ba9c 8628 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8629 return const0_rtx;
8630 }
8631
3a9b8c7e
AH
8632 target = altivec_expand_ld_builtin (exp, target, expandedp);
8633 if (*expandedp)
8634 return target;
0ac081f6 8635
3a9b8c7e
AH
8636 target = altivec_expand_st_builtin (exp, target, expandedp);
8637 if (*expandedp)
8638 return target;
8639
8640 target = altivec_expand_dst_builtin (exp, target, expandedp);
8641 if (*expandedp)
8642 return target;
8643
8644 *expandedp = true;
95385cbb 8645
3a9b8c7e
AH
8646 switch (fcode)
8647 {
6525c0e7 8648 case ALTIVEC_BUILTIN_STVX:
5039610b 8649 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8650 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8651 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8652 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8653 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8654 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8655 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8656 case ALTIVEC_BUILTIN_STVXL:
5039610b 8657 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8658
0b61703c
AP
8659 case ALTIVEC_BUILTIN_STVLX:
8660 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlx, exp);
8661 case ALTIVEC_BUILTIN_STVLXL:
8662 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
8663 case ALTIVEC_BUILTIN_STVRX:
8664 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
8665 case ALTIVEC_BUILTIN_STVRXL:
8666 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
8667
95385cbb
AH
8668 case ALTIVEC_BUILTIN_MFVSCR:
8669 icode = CODE_FOR_altivec_mfvscr;
8670 tmode = insn_data[icode].operand[0].mode;
8671
8672 if (target == 0
8673 || GET_MODE (target) != tmode
8674 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8675 target = gen_reg_rtx (tmode);
f676971a 8676
95385cbb 8677 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8678 if (! pat)
8679 return 0;
8680 emit_insn (pat);
95385cbb
AH
8681 return target;
8682
8683 case ALTIVEC_BUILTIN_MTVSCR:
8684 icode = CODE_FOR_altivec_mtvscr;
5039610b 8685 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8686 op0 = expand_normal (arg0);
95385cbb
AH
8687 mode0 = insn_data[icode].operand[0].mode;
8688
8689 /* If we got invalid arguments bail out before generating bad rtl. */
8690 if (arg0 == error_mark_node)
9a171fcd 8691 return const0_rtx;
95385cbb
AH
8692
8693 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8694 op0 = copy_to_mode_reg (mode0, op0);
8695
8696 pat = GEN_FCN (icode) (op0);
8697 if (pat)
8698 emit_insn (pat);
8699 return NULL_RTX;
3a9b8c7e 8700
95385cbb
AH
8701 case ALTIVEC_BUILTIN_DSSALL:
8702 emit_insn (gen_altivec_dssall ());
8703 return NULL_RTX;
8704
8705 case ALTIVEC_BUILTIN_DSS:
8706 icode = CODE_FOR_altivec_dss;
5039610b 8707 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8708 STRIP_NOPS (arg0);
84217346 8709 op0 = expand_normal (arg0);
95385cbb
AH
8710 mode0 = insn_data[icode].operand[0].mode;
8711
8712 /* If we got invalid arguments bail out before generating bad rtl. */
8713 if (arg0 == error_mark_node)
9a171fcd 8714 return const0_rtx;
95385cbb 8715
b44140e7
AH
8716 if (TREE_CODE (arg0) != INTEGER_CST
8717 || TREE_INT_CST_LOW (arg0) & ~0x3)
8718 {
8719 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8720 return const0_rtx;
b44140e7
AH
8721 }
8722
95385cbb
AH
8723 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8724 op0 = copy_to_mode_reg (mode0, op0);
8725
8726 emit_insn (gen_altivec_dss (op0));
0ac081f6 8727 return NULL_RTX;
7a4eca66
DE
8728
8729 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8730 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8731 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8732 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8733 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8734
8735 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8736 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8737 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8738 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8739 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8740
8741 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8742 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8743 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8744 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8745 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8746
8747 default:
8748 break;
8749 /* Fall through. */
0ac081f6 8750 }
24408032 8751
100c4561 8752 /* Expand abs* operations. */
586de218 8753 d = bdesc_abs;
ca7558fc 8754 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8755 if (d->code == fcode)
5039610b 8756 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8757
ae4b4a02 8758 /* Expand the AltiVec predicates. */
586de218 8759 dp = bdesc_altivec_preds;
ca7558fc 8760 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8761 if (dp->code == fcode)
c4ad648e 8762 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8763 exp, target);
ae4b4a02 8764
6525c0e7
AH
8765 /* LV* are funky. We initialized them differently. */
8766 switch (fcode)
8767 {
8768 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8769 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
0b61703c 8770 exp, target, false);
6525c0e7 8771 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8772 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
0b61703c 8773 exp, target, false);
6525c0e7 8774 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8775 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
0b61703c 8776 exp, target, false);
6525c0e7 8777 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8778 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
0b61703c 8779 exp, target, false);
6525c0e7 8780 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8781 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
0b61703c 8782 exp, target, false);
6525c0e7 8783 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8784 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
0b61703c 8785 exp, target, false);
6525c0e7 8786 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8787 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
0b61703c
AP
8788 exp, target, false);
8789 case ALTIVEC_BUILTIN_LVLX:
8790 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
8791 exp, target, true);
8792 case ALTIVEC_BUILTIN_LVLXL:
8793 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlxl,
8794 exp, target, true);
8795 case ALTIVEC_BUILTIN_LVRX:
8796 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
8797 exp, target, true);
8798 case ALTIVEC_BUILTIN_LVRXL:
8799 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
8800 exp, target, true);
6525c0e7
AH
8801 default:
8802 break;
8803 /* Fall through. */
8804 }
95385cbb 8805
92898235 8806 *expandedp = false;
0ac081f6
AH
8807 return NULL_RTX;
8808}
8809
96038623
DE
8810/* Expand the builtin in EXP and store the result in TARGET. Store
8811 true in *EXPANDEDP if we found a builtin to expand. */
8812static rtx
8813paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8814{
8815 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8816 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8817 const struct builtin_description *d;
96038623
DE
8818 size_t i;
8819
8820 *expandedp = true;
8821
8822 switch (fcode)
8823 {
8824 case PAIRED_BUILTIN_STX:
8825 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8826 case PAIRED_BUILTIN_LX:
8827 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8828 default:
8829 break;
8830 /* Fall through. */
8831 }
8832
8833 /* Expand the paired predicates. */
23a651fc 8834 d = bdesc_paired_preds;
96038623
DE
8835 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8836 if (d->code == fcode)
8837 return paired_expand_predicate_builtin (d->icode, exp, target);
8838
8839 *expandedp = false;
8840 return NULL_RTX;
8841}
8842
a3170dc6
AH
8843/* Binops that need to be initialized manually, but can be expanded
8844 automagically by rs6000_expand_binop_builtin. */
8845static struct builtin_description bdesc_2arg_spe[] =
8846{
8847 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8848 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8849 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8850 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8851 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8852 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8853 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8854 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8855 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8856 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8857 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8858 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8859 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8860 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8861 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8862 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8863 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8864 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8865 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8866 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8867 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8868 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8869};
8870
8871/* Expand the builtin in EXP and store the result in TARGET. Store
8872 true in *EXPANDEDP if we found a builtin to expand.
8873
8874 This expands the SPE builtins that are not simple unary and binary
8875 operations. */
8876static rtx
a2369ed3 8877spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8878{
5039610b 8879 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8880 tree arg1, arg0;
8881 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8882 enum insn_code icode;
8883 enum machine_mode tmode, mode0;
8884 rtx pat, op0;
8885 struct builtin_description *d;
8886 size_t i;
8887
8888 *expandedp = true;
8889
8890 /* Syntax check for a 5-bit unsigned immediate. */
8891 switch (fcode)
8892 {
8893 case SPE_BUILTIN_EVSTDD:
8894 case SPE_BUILTIN_EVSTDH:
8895 case SPE_BUILTIN_EVSTDW:
8896 case SPE_BUILTIN_EVSTWHE:
8897 case SPE_BUILTIN_EVSTWHO:
8898 case SPE_BUILTIN_EVSTWWE:
8899 case SPE_BUILTIN_EVSTWWO:
5039610b 8900 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8901 if (TREE_CODE (arg1) != INTEGER_CST
8902 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8903 {
8904 error ("argument 2 must be a 5-bit unsigned literal");
8905 return const0_rtx;
8906 }
8907 break;
8908 default:
8909 break;
8910 }
8911
00332c9f
AH
8912 /* The evsplat*i instructions are not quite generic. */
8913 switch (fcode)
8914 {
8915 case SPE_BUILTIN_EVSPLATFI:
8916 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8917 exp, target);
00332c9f
AH
8918 case SPE_BUILTIN_EVSPLATI:
8919 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8920 exp, target);
00332c9f
AH
8921 default:
8922 break;
8923 }
8924
a3170dc6
AH
8925 d = (struct builtin_description *) bdesc_2arg_spe;
8926 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8927 if (d->code == fcode)
5039610b 8928 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8929
8930 d = (struct builtin_description *) bdesc_spe_predicates;
8931 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8932 if (d->code == fcode)
5039610b 8933 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8934
8935 d = (struct builtin_description *) bdesc_spe_evsel;
8936 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8937 if (d->code == fcode)
5039610b 8938 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8939
8940 switch (fcode)
8941 {
8942 case SPE_BUILTIN_EVSTDDX:
5039610b 8943 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8944 case SPE_BUILTIN_EVSTDHX:
5039610b 8945 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8946 case SPE_BUILTIN_EVSTDWX:
5039610b 8947 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8948 case SPE_BUILTIN_EVSTWHEX:
5039610b 8949 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8950 case SPE_BUILTIN_EVSTWHOX:
5039610b 8951 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8952 case SPE_BUILTIN_EVSTWWEX:
5039610b 8953 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8954 case SPE_BUILTIN_EVSTWWOX:
5039610b 8955 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8956 case SPE_BUILTIN_EVSTDD:
5039610b 8957 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8958 case SPE_BUILTIN_EVSTDH:
5039610b 8959 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8960 case SPE_BUILTIN_EVSTDW:
5039610b 8961 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8962 case SPE_BUILTIN_EVSTWHE:
5039610b 8963 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8964 case SPE_BUILTIN_EVSTWHO:
5039610b 8965 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8966 case SPE_BUILTIN_EVSTWWE:
5039610b 8967 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8968 case SPE_BUILTIN_EVSTWWO:
5039610b 8969 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8970 case SPE_BUILTIN_MFSPEFSCR:
8971 icode = CODE_FOR_spe_mfspefscr;
8972 tmode = insn_data[icode].operand[0].mode;
8973
8974 if (target == 0
8975 || GET_MODE (target) != tmode
8976 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8977 target = gen_reg_rtx (tmode);
f676971a 8978
a3170dc6
AH
8979 pat = GEN_FCN (icode) (target);
8980 if (! pat)
8981 return 0;
8982 emit_insn (pat);
8983 return target;
8984 case SPE_BUILTIN_MTSPEFSCR:
8985 icode = CODE_FOR_spe_mtspefscr;
5039610b 8986 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8987 op0 = expand_normal (arg0);
a3170dc6
AH
8988 mode0 = insn_data[icode].operand[0].mode;
8989
8990 if (arg0 == error_mark_node)
8991 return const0_rtx;
8992
8993 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8994 op0 = copy_to_mode_reg (mode0, op0);
8995
8996 pat = GEN_FCN (icode) (op0);
8997 if (pat)
8998 emit_insn (pat);
8999 return NULL_RTX;
9000 default:
9001 break;
9002 }
9003
9004 *expandedp = false;
9005 return NULL_RTX;
9006}
9007
96038623
DE
9008static rtx
9009paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
9010{
9011 rtx pat, scratch, tmp;
9012 tree form = CALL_EXPR_ARG (exp, 0);
9013 tree arg0 = CALL_EXPR_ARG (exp, 1);
9014 tree arg1 = CALL_EXPR_ARG (exp, 2);
9015 rtx op0 = expand_normal (arg0);
9016 rtx op1 = expand_normal (arg1);
9017 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9018 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9019 int form_int;
9020 enum rtx_code code;
9021
9022 if (TREE_CODE (form) != INTEGER_CST)
9023 {
9024 error ("argument 1 of __builtin_paired_predicate must be a constant");
9025 return const0_rtx;
9026 }
9027 else
9028 form_int = TREE_INT_CST_LOW (form);
9029
9030 gcc_assert (mode0 == mode1);
9031
9032 if (arg0 == error_mark_node || arg1 == error_mark_node)
9033 return const0_rtx;
9034
9035 if (target == 0
9036 || GET_MODE (target) != SImode
9037 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
9038 target = gen_reg_rtx (SImode);
9039 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
9040 op0 = copy_to_mode_reg (mode0, op0);
9041 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
9042 op1 = copy_to_mode_reg (mode1, op1);
9043
9044 scratch = gen_reg_rtx (CCFPmode);
9045
9046 pat = GEN_FCN (icode) (scratch, op0, op1);
9047 if (!pat)
9048 return const0_rtx;
9049
9050 emit_insn (pat);
9051
9052 switch (form_int)
9053 {
9054 /* LT bit. */
9055 case 0:
9056 code = LT;
9057 break;
9058 /* GT bit. */
9059 case 1:
9060 code = GT;
9061 break;
9062 /* EQ bit. */
9063 case 2:
9064 code = EQ;
9065 break;
9066 /* UN bit. */
9067 case 3:
9068 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9069 return target;
9070 default:
9071 error ("argument 1 of __builtin_paired_predicate is out of range");
9072 return const0_rtx;
9073 }
9074
9075 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9076 emit_move_insn (target, tmp);
9077 return target;
9078}
9079
a3170dc6 9080static rtx
5039610b 9081spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9082{
9083 rtx pat, scratch, tmp;
5039610b
SL
9084 tree form = CALL_EXPR_ARG (exp, 0);
9085 tree arg0 = CALL_EXPR_ARG (exp, 1);
9086 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
9087 rtx op0 = expand_normal (arg0);
9088 rtx op1 = expand_normal (arg1);
a3170dc6
AH
9089 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9090 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9091 int form_int;
9092 enum rtx_code code;
9093
9094 if (TREE_CODE (form) != INTEGER_CST)
9095 {
9096 error ("argument 1 of __builtin_spe_predicate must be a constant");
9097 return const0_rtx;
9098 }
9099 else
9100 form_int = TREE_INT_CST_LOW (form);
9101
37409796 9102 gcc_assert (mode0 == mode1);
a3170dc6
AH
9103
9104 if (arg0 == error_mark_node || arg1 == error_mark_node)
9105 return const0_rtx;
9106
9107 if (target == 0
9108 || GET_MODE (target) != SImode
9109 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
9110 target = gen_reg_rtx (SImode);
9111
9112 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9113 op0 = copy_to_mode_reg (mode0, op0);
9114 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9115 op1 = copy_to_mode_reg (mode1, op1);
9116
9117 scratch = gen_reg_rtx (CCmode);
9118
9119 pat = GEN_FCN (icode) (scratch, op0, op1);
9120 if (! pat)
9121 return const0_rtx;
9122 emit_insn (pat);
9123
9124 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
9125 _lower_. We use one compare, but look in different bits of the
9126 CR for each variant.
9127
9128 There are 2 elements in each SPE simd type (upper/lower). The CR
9129 bits are set as follows:
9130
9131 BIT0 | BIT 1 | BIT 2 | BIT 3
9132 U | L | (U | L) | (U & L)
9133
9134 So, for an "all" relationship, BIT 3 would be set.
9135 For an "any" relationship, BIT 2 would be set. Etc.
9136
9137 Following traditional nomenclature, these bits map to:
9138
9139 BIT0 | BIT 1 | BIT 2 | BIT 3
9140 LT | GT | EQ | OV
9141
9142 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9143 */
9144
9145 switch (form_int)
9146 {
9147 /* All variant. OV bit. */
9148 case 0:
9149 /* We need to get to the OV bit, which is the ORDERED bit. We
9150 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 9151 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
9152 So let's just use another pattern. */
9153 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9154 return target;
9155 /* Any variant. EQ bit. */
9156 case 1:
9157 code = EQ;
9158 break;
9159 /* Upper variant. LT bit. */
9160 case 2:
9161 code = LT;
9162 break;
9163 /* Lower variant. GT bit. */
9164 case 3:
9165 code = GT;
9166 break;
9167 default:
9168 error ("argument 1 of __builtin_spe_predicate is out of range");
9169 return const0_rtx;
9170 }
9171
9172 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9173 emit_move_insn (target, tmp);
9174
9175 return target;
9176}
9177
9178/* The evsel builtins look like this:
9179
9180 e = __builtin_spe_evsel_OP (a, b, c, d);
9181
9182 and work like this:
9183
9184 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9185 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9186*/
9187
9188static rtx
5039610b 9189spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9190{
9191 rtx pat, scratch;
5039610b
SL
9192 tree arg0 = CALL_EXPR_ARG (exp, 0);
9193 tree arg1 = CALL_EXPR_ARG (exp, 1);
9194 tree arg2 = CALL_EXPR_ARG (exp, 2);
9195 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
9196 rtx op0 = expand_normal (arg0);
9197 rtx op1 = expand_normal (arg1);
9198 rtx op2 = expand_normal (arg2);
9199 rtx op3 = expand_normal (arg3);
a3170dc6
AH
9200 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9201 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9202
37409796 9203 gcc_assert (mode0 == mode1);
a3170dc6
AH
9204
9205 if (arg0 == error_mark_node || arg1 == error_mark_node
9206 || arg2 == error_mark_node || arg3 == error_mark_node)
9207 return const0_rtx;
9208
9209 if (target == 0
9210 || GET_MODE (target) != mode0
9211 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9212 target = gen_reg_rtx (mode0);
9213
9214 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9215 op0 = copy_to_mode_reg (mode0, op0);
9216 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9217 op1 = copy_to_mode_reg (mode0, op1);
9218 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9219 op2 = copy_to_mode_reg (mode0, op2);
9220 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9221 op3 = copy_to_mode_reg (mode0, op3);
9222
9223 /* Generate the compare. */
9224 scratch = gen_reg_rtx (CCmode);
9225 pat = GEN_FCN (icode) (scratch, op0, op1);
9226 if (! pat)
9227 return const0_rtx;
9228 emit_insn (pat);
9229
9230 if (mode0 == V2SImode)
9231 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9232 else
9233 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9234
9235 return target;
9236}
9237
0ac081f6
AH
9238/* Expand an expression EXP that calls a built-in function,
9239 with result going to TARGET if that's convenient
9240 (and in mode MODE if that's convenient).
9241 SUBTARGET may be used as the target for computing one of EXP's operands.
9242 IGNORE is nonzero if the value is to be ignored. */
9243
9244static rtx
a2369ed3 9245rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9246 enum machine_mode mode ATTRIBUTE_UNUSED,
9247 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9248{
5039610b 9249 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9250 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9251 const struct builtin_description *d;
92898235
AH
9252 size_t i;
9253 rtx ret;
9254 bool success;
f676971a 9255
9c78b944
DE
9256 if (fcode == RS6000_BUILTIN_RECIP)
9257 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9258
9259 if (fcode == RS6000_BUILTIN_RECIPF)
9260 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9261
9262 if (fcode == RS6000_BUILTIN_RSQRTF)
9263 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9264
7ccf35ed
DN
9265 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9266 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9267 {
9268 int icode = (int) CODE_FOR_altivec_lvsr;
9269 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9270 enum machine_mode mode = insn_data[icode].operand[1].mode;
9271 tree arg;
9272 rtx op, addr, pat;
9273
37409796 9274 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9275
5039610b 9276 arg = CALL_EXPR_ARG (exp, 0);
37409796 9277 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9278 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9279 addr = memory_address (mode, op);
9280 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9281 op = addr;
9282 else
9283 {
9284 /* For the load case need to negate the address. */
9285 op = gen_reg_rtx (GET_MODE (addr));
9286 emit_insn (gen_rtx_SET (VOIDmode, op,
9287 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9288 }
7ccf35ed
DN
9289 op = gen_rtx_MEM (mode, op);
9290
9291 if (target == 0
9292 || GET_MODE (target) != tmode
9293 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9294 target = gen_reg_rtx (tmode);
9295
9296 /*pat = gen_altivec_lvsr (target, op);*/
9297 pat = GEN_FCN (icode) (target, op);
9298 if (!pat)
9299 return 0;
9300 emit_insn (pat);
9301
9302 return target;
9303 }
5039610b
SL
9304
9305 /* FIXME: There's got to be a nicer way to handle this case than
9306 constructing a new CALL_EXPR. */
f57d17f1 9307 if (fcode == ALTIVEC_BUILTIN_VCFUX
7910ae0c
DN
9308 || fcode == ALTIVEC_BUILTIN_VCFSX
9309 || fcode == ALTIVEC_BUILTIN_VCTUXS
9310 || fcode == ALTIVEC_BUILTIN_VCTSXS)
f57d17f1 9311 {
5039610b
SL
9312 if (call_expr_nargs (exp) == 1)
9313 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9314 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9315 }
7ccf35ed 9316
0ac081f6 9317 if (TARGET_ALTIVEC)
92898235
AH
9318 {
9319 ret = altivec_expand_builtin (exp, target, &success);
9320
a3170dc6
AH
9321 if (success)
9322 return ret;
9323 }
9324 if (TARGET_SPE)
9325 {
9326 ret = spe_expand_builtin (exp, target, &success);
9327
92898235
AH
9328 if (success)
9329 return ret;
9330 }
96038623
DE
9331 if (TARGET_PAIRED_FLOAT)
9332 {
9333 ret = paired_expand_builtin (exp, target, &success);
9334
9335 if (success)
9336 return ret;
9337 }
92898235 9338
96038623 9339 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9340
37409796
NS
9341 /* Handle simple unary operations. */
9342 d = (struct builtin_description *) bdesc_1arg;
9343 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9344 if (d->code == fcode)
5039610b 9345 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9346
37409796
NS
9347 /* Handle simple binary operations. */
9348 d = (struct builtin_description *) bdesc_2arg;
9349 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9350 if (d->code == fcode)
5039610b 9351 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9352
37409796 9353 /* Handle simple ternary operations. */
586de218 9354 d = bdesc_3arg;
37409796
NS
9355 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9356 if (d->code == fcode)
5039610b 9357 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9358
37409796 9359 gcc_unreachable ();
0ac081f6
AH
9360}
9361
9362static void
863d938c 9363rs6000_init_builtins (void)
0ac081f6 9364{
5afaa917
NS
9365 tree tdecl;
9366
4a5eab38
PB
9367 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9368 V2SF_type_node = build_vector_type (float_type_node, 2);
9369 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9370 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9371 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9372 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9373 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9374
9375 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9376 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9377 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9378
7c62e993
PB
9379 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9380 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9381 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
b6fc2cdb 9382 opaque_V4SI_type_node = build_opaque_vector_type (intSI_type_node, 4);
3fdaa45a 9383
8bb418a3
ZL
9384 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9385 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9386 'vector unsigned short'. */
9387
8dd16ecc
NS
9388 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9389 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9390 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9391 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9392
58646b77
PB
9393 long_integer_type_internal_node = long_integer_type_node;
9394 long_unsigned_type_internal_node = long_unsigned_type_node;
9395 intQI_type_internal_node = intQI_type_node;
9396 uintQI_type_internal_node = unsigned_intQI_type_node;
9397 intHI_type_internal_node = intHI_type_node;
9398 uintHI_type_internal_node = unsigned_intHI_type_node;
9399 intSI_type_internal_node = intSI_type_node;
9400 uintSI_type_internal_node = unsigned_intSI_type_node;
9401 float_type_internal_node = float_type_node;
9402 void_type_internal_node = void_type_node;
9403
5afaa917
NS
9404 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool char"),
9405 bool_char_type_node);
9406 TYPE_NAME (bool_char_type_node) = tdecl;
9407 (*lang_hooks.decls.pushdecl) (tdecl);
9408 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool short"),
9409 bool_short_type_node);
9410 TYPE_NAME (bool_short_type_node) = tdecl;
9411 (*lang_hooks.decls.pushdecl) (tdecl);
9412 tdecl = build_decl (TYPE_DECL, get_identifier ("__bool int"),
9413 bool_int_type_node);
9414 TYPE_NAME (bool_int_type_node) = tdecl;
9415 (*lang_hooks.decls.pushdecl) (tdecl);
9416 tdecl = build_decl (TYPE_DECL, get_identifier ("__pixel"),
9417 pixel_type_node);
9418 TYPE_NAME (pixel_type_node) = tdecl;
9419 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9420
4a5eab38
PB
9421 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9422 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9423 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9424 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3 9425
5afaa917
NS
9426 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned char"),
9427 unsigned_V16QI_type_node);
9428 TYPE_NAME (unsigned_V16QI_type_node) = tdecl;
9429 (*lang_hooks.decls.pushdecl) (tdecl);
9430 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed char"),
9431 V16QI_type_node);
9432 TYPE_NAME (V16QI_type_node) = tdecl;
9433 (*lang_hooks.decls.pushdecl) (tdecl);
9434 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool char"),
9435 bool_V16QI_type_node);
9436 TYPE_NAME ( bool_V16QI_type_node) = tdecl;
9437 (*lang_hooks.decls.pushdecl) (tdecl);
9438
9439 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned short"),
9440 unsigned_V8HI_type_node);
9441 TYPE_NAME (unsigned_V8HI_type_node) = tdecl;
9442 (*lang_hooks.decls.pushdecl) (tdecl);
9443 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed short"),
9444 V8HI_type_node);
9445 TYPE_NAME (V8HI_type_node) = tdecl;
9446 (*lang_hooks.decls.pushdecl) (tdecl);
9447 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool short"),
9448 bool_V8HI_type_node);
9449 TYPE_NAME (bool_V8HI_type_node) = tdecl;
9450 (*lang_hooks.decls.pushdecl) (tdecl);
9451
9452 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector unsigned int"),
9453 unsigned_V4SI_type_node);
9454 TYPE_NAME (unsigned_V4SI_type_node) = tdecl;
9455 (*lang_hooks.decls.pushdecl) (tdecl);
9456 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector signed int"),
9457 V4SI_type_node);
9458 TYPE_NAME (V4SI_type_node) = tdecl;
9459 (*lang_hooks.decls.pushdecl) (tdecl);
9460 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __bool int"),
9461 bool_V4SI_type_node);
9462 TYPE_NAME (bool_V4SI_type_node) = tdecl;
9463 (*lang_hooks.decls.pushdecl) (tdecl);
9464
9465 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector float"),
9466 V4SF_type_node);
9467 TYPE_NAME (V4SF_type_node) = tdecl;
9468 (*lang_hooks.decls.pushdecl) (tdecl);
9469 tdecl = build_decl (TYPE_DECL, get_identifier ("__vector __pixel"),
9470 pixel_V8HI_type_node);
9471 TYPE_NAME (pixel_V8HI_type_node) = tdecl;
9472 (*lang_hooks.decls.pushdecl) (tdecl);
8bb418a3 9473
96038623
DE
9474 if (TARGET_PAIRED_FLOAT)
9475 paired_init_builtins ();
a3170dc6 9476 if (TARGET_SPE)
3fdaa45a 9477 spe_init_builtins ();
0ac081f6
AH
9478 if (TARGET_ALTIVEC)
9479 altivec_init_builtins ();
96038623 9480 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9481 rs6000_common_init_builtins ();
9c78b944
DE
9482 if (TARGET_PPC_GFXOPT)
9483 {
9484 tree ftype = build_function_type_list (float_type_node,
9485 float_type_node,
9486 float_type_node,
9487 NULL_TREE);
9488 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9489 RS6000_BUILTIN_RECIPF);
9490
9491 ftype = build_function_type_list (float_type_node,
9492 float_type_node,
9493 NULL_TREE);
9494 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9495 RS6000_BUILTIN_RSQRTF);
9496 }
9497 if (TARGET_POPCNTB)
9498 {
9499 tree ftype = build_function_type_list (double_type_node,
9500 double_type_node,
9501 double_type_node,
9502 NULL_TREE);
9503 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9504 RS6000_BUILTIN_RECIP);
9505
9506 }
69ca3549
DE
9507
9508#if TARGET_XCOFF
9509 /* AIX libm provides clog as __clog. */
9510 if (built_in_decls [BUILT_IN_CLOG])
9511 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9512#endif
fb220235
FXC
9513
9514#ifdef SUBTARGET_INIT_BUILTINS
9515 SUBTARGET_INIT_BUILTINS;
9516#endif
0ac081f6
AH
9517}
9518
a3170dc6
AH
9519/* Search through a set of builtins and enable the mask bits.
9520 DESC is an array of builtins.
b6d08ca1 9521 SIZE is the total number of builtins.
a3170dc6
AH
9522 START is the builtin enum at which to start.
9523 END is the builtin enum at which to end. */
0ac081f6 9524static void
a2369ed3 9525enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9526 enum rs6000_builtins start,
a2369ed3 9527 enum rs6000_builtins end)
a3170dc6
AH
9528{
9529 int i;
9530
9531 for (i = 0; i < size; ++i)
9532 if (desc[i].code == start)
9533 break;
9534
9535 if (i == size)
9536 return;
9537
9538 for (; i < size; ++i)
9539 {
9540 /* Flip all the bits on. */
9541 desc[i].mask = target_flags;
9542 if (desc[i].code == end)
9543 break;
9544 }
9545}
9546
9547static void
863d938c 9548spe_init_builtins (void)
0ac081f6 9549{
a3170dc6
AH
9550 tree endlink = void_list_node;
9551 tree puint_type_node = build_pointer_type (unsigned_type_node);
9552 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9553 struct builtin_description *d;
0ac081f6
AH
9554 size_t i;
9555
a3170dc6
AH
9556 tree v2si_ftype_4_v2si
9557 = build_function_type
3fdaa45a
AH
9558 (opaque_V2SI_type_node,
9559 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9560 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9561 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9562 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9563 endlink)))));
9564
9565 tree v2sf_ftype_4_v2sf
9566 = build_function_type
3fdaa45a
AH
9567 (opaque_V2SF_type_node,
9568 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9569 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9570 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9571 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9572 endlink)))));
9573
9574 tree int_ftype_int_v2si_v2si
9575 = build_function_type
9576 (integer_type_node,
9577 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9578 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9579 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9580 endlink))));
9581
9582 tree int_ftype_int_v2sf_v2sf
9583 = build_function_type
9584 (integer_type_node,
9585 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9586 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9587 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9588 endlink))));
9589
9590 tree void_ftype_v2si_puint_int
9591 = build_function_type (void_type_node,
3fdaa45a 9592 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9593 tree_cons (NULL_TREE, puint_type_node,
9594 tree_cons (NULL_TREE,
9595 integer_type_node,
9596 endlink))));
9597
9598 tree void_ftype_v2si_puint_char
9599 = build_function_type (void_type_node,
3fdaa45a 9600 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9601 tree_cons (NULL_TREE, puint_type_node,
9602 tree_cons (NULL_TREE,
9603 char_type_node,
9604 endlink))));
9605
9606 tree void_ftype_v2si_pv2si_int
9607 = build_function_type (void_type_node,
3fdaa45a 9608 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9609 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9610 tree_cons (NULL_TREE,
9611 integer_type_node,
9612 endlink))));
9613
9614 tree void_ftype_v2si_pv2si_char
9615 = build_function_type (void_type_node,
3fdaa45a 9616 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9617 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9618 tree_cons (NULL_TREE,
9619 char_type_node,
9620 endlink))));
9621
9622 tree void_ftype_int
9623 = build_function_type (void_type_node,
9624 tree_cons (NULL_TREE, integer_type_node, endlink));
9625
9626 tree int_ftype_void
36e8d515 9627 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9628
9629 tree v2si_ftype_pv2si_int
3fdaa45a 9630 = build_function_type (opaque_V2SI_type_node,
6035d635 9631 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9632 tree_cons (NULL_TREE, integer_type_node,
9633 endlink)));
9634
9635 tree v2si_ftype_puint_int
3fdaa45a 9636 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9637 tree_cons (NULL_TREE, puint_type_node,
9638 tree_cons (NULL_TREE, integer_type_node,
9639 endlink)));
9640
9641 tree v2si_ftype_pushort_int
3fdaa45a 9642 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9643 tree_cons (NULL_TREE, pushort_type_node,
9644 tree_cons (NULL_TREE, integer_type_node,
9645 endlink)));
9646
00332c9f
AH
9647 tree v2si_ftype_signed_char
9648 = build_function_type (opaque_V2SI_type_node,
9649 tree_cons (NULL_TREE, signed_char_type_node,
9650 endlink));
9651
a3170dc6
AH
9652 /* The initialization of the simple binary and unary builtins is
9653 done in rs6000_common_init_builtins, but we have to enable the
9654 mask bits here manually because we have run out of `target_flags'
9655 bits. We really need to redesign this mask business. */
9656
9657 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9658 ARRAY_SIZE (bdesc_2arg),
9659 SPE_BUILTIN_EVADDW,
9660 SPE_BUILTIN_EVXOR);
9661 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9662 ARRAY_SIZE (bdesc_1arg),
9663 SPE_BUILTIN_EVABS,
9664 SPE_BUILTIN_EVSUBFUSIAAW);
9665 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9666 ARRAY_SIZE (bdesc_spe_predicates),
9667 SPE_BUILTIN_EVCMPEQ,
9668 SPE_BUILTIN_EVFSTSTLT);
9669 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9670 ARRAY_SIZE (bdesc_spe_evsel),
9671 SPE_BUILTIN_EVSEL_CMPGTS,
9672 SPE_BUILTIN_EVSEL_FSTSTEQ);
9673
36252949
AH
9674 (*lang_hooks.decls.pushdecl)
9675 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9676 opaque_V2SI_type_node));
9677
a3170dc6 9678 /* Initialize irregular SPE builtins. */
f676971a 9679
a3170dc6
AH
9680 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9681 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9682 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9683 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9684 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9685 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9686 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9687 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9688 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9689 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9690 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9691 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9692 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9693 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9694 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9695 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9696 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9697 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9698
9699 /* Loads. */
9700 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9701 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9702 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9703 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9704 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9705 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9706 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9707 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9708 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9709 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9710 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9711 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9712 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9713 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9714 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9715 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9716 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9717 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9718 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9719 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9720 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9721 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9722
9723 /* Predicates. */
9724 d = (struct builtin_description *) bdesc_spe_predicates;
9725 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9726 {
9727 tree type;
9728
9729 switch (insn_data[d->icode].operand[1].mode)
9730 {
9731 case V2SImode:
9732 type = int_ftype_int_v2si_v2si;
9733 break;
9734 case V2SFmode:
9735 type = int_ftype_int_v2sf_v2sf;
9736 break;
9737 default:
37409796 9738 gcc_unreachable ();
a3170dc6
AH
9739 }
9740
9741 def_builtin (d->mask, d->name, type, d->code);
9742 }
9743
9744 /* Evsel predicates. */
9745 d = (struct builtin_description *) bdesc_spe_evsel;
9746 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9747 {
9748 tree type;
9749
9750 switch (insn_data[d->icode].operand[1].mode)
9751 {
9752 case V2SImode:
9753 type = v2si_ftype_4_v2si;
9754 break;
9755 case V2SFmode:
9756 type = v2sf_ftype_4_v2sf;
9757 break;
9758 default:
37409796 9759 gcc_unreachable ();
a3170dc6
AH
9760 }
9761
9762 def_builtin (d->mask, d->name, type, d->code);
9763 }
9764}
9765
96038623
DE
9766static void
9767paired_init_builtins (void)
9768{
23a651fc 9769 const struct builtin_description *d;
96038623
DE
9770 size_t i;
9771 tree endlink = void_list_node;
9772
9773 tree int_ftype_int_v2sf_v2sf
9774 = build_function_type
9775 (integer_type_node,
9776 tree_cons (NULL_TREE, integer_type_node,
9777 tree_cons (NULL_TREE, V2SF_type_node,
9778 tree_cons (NULL_TREE, V2SF_type_node,
9779 endlink))));
9780 tree pcfloat_type_node =
9781 build_pointer_type (build_qualified_type
9782 (float_type_node, TYPE_QUAL_CONST));
9783
9784 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9785 long_integer_type_node,
9786 pcfloat_type_node,
9787 NULL_TREE);
9788 tree void_ftype_v2sf_long_pcfloat =
9789 build_function_type_list (void_type_node,
9790 V2SF_type_node,
9791 long_integer_type_node,
9792 pcfloat_type_node,
9793 NULL_TREE);
9794
9795
9796 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9797 PAIRED_BUILTIN_LX);
9798
9799
9800 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9801 PAIRED_BUILTIN_STX);
9802
9803 /* Predicates. */
23a651fc 9804 d = bdesc_paired_preds;
96038623
DE
9805 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9806 {
9807 tree type;
9808
9809 switch (insn_data[d->icode].operand[1].mode)
9810 {
9811 case V2SFmode:
9812 type = int_ftype_int_v2sf_v2sf;
9813 break;
9814 default:
9815 gcc_unreachable ();
9816 }
9817
9818 def_builtin (d->mask, d->name, type, d->code);
9819 }
9820}
9821
a3170dc6 9822static void
863d938c 9823altivec_init_builtins (void)
a3170dc6 9824{
586de218
KG
9825 const struct builtin_description *d;
9826 const struct builtin_description_predicates *dp;
a3170dc6 9827 size_t i;
7a4eca66
DE
9828 tree ftype;
9829
a3170dc6
AH
9830 tree pfloat_type_node = build_pointer_type (float_type_node);
9831 tree pint_type_node = build_pointer_type (integer_type_node);
9832 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9833 tree pchar_type_node = build_pointer_type (char_type_node);
9834
9835 tree pvoid_type_node = build_pointer_type (void_type_node);
9836
0dbc3651
ZW
9837 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9838 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9839 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9840 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9841
9842 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9843
58646b77
PB
9844 tree int_ftype_opaque
9845 = build_function_type_list (integer_type_node,
9846 opaque_V4SI_type_node, NULL_TREE);
266b4890
AP
9847 tree opaque_ftype_opaque
9848 = build_function_type (integer_type_node,
9849 NULL_TREE);
58646b77
PB
9850 tree opaque_ftype_opaque_int
9851 = build_function_type_list (opaque_V4SI_type_node,
9852 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9853 tree opaque_ftype_opaque_opaque_int
9854 = build_function_type_list (opaque_V4SI_type_node,
9855 opaque_V4SI_type_node, opaque_V4SI_type_node,
9856 integer_type_node, NULL_TREE);
9857 tree int_ftype_int_opaque_opaque
9858 = build_function_type_list (integer_type_node,
9859 integer_type_node, opaque_V4SI_type_node,
9860 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9861 tree int_ftype_int_v4si_v4si
9862 = build_function_type_list (integer_type_node,
9863 integer_type_node, V4SI_type_node,
9864 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9865 tree v4sf_ftype_pcfloat
9866 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9867 tree void_ftype_pfloat_v4sf
b4de2f7d 9868 = build_function_type_list (void_type_node,
a3170dc6 9869 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9870 tree v4si_ftype_pcint
9871 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9872 tree void_ftype_pint_v4si
b4de2f7d
AH
9873 = build_function_type_list (void_type_node,
9874 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9875 tree v8hi_ftype_pcshort
9876 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9877 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9878 = build_function_type_list (void_type_node,
9879 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9880 tree v16qi_ftype_pcchar
9881 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9882 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9883 = build_function_type_list (void_type_node,
9884 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9885 tree void_ftype_v4si
b4de2f7d 9886 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9887 tree v8hi_ftype_void
9888 = build_function_type (V8HI_type_node, void_list_node);
9889 tree void_ftype_void
9890 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9891 tree void_ftype_int
9892 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9893
58646b77
PB
9894 tree opaque_ftype_long_pcvoid
9895 = build_function_type_list (opaque_V4SI_type_node,
9896 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9897 tree v16qi_ftype_long_pcvoid
a3170dc6 9898 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9899 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9900 tree v8hi_ftype_long_pcvoid
a3170dc6 9901 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9902 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9903 tree v4si_ftype_long_pcvoid
a3170dc6 9904 = build_function_type_list (V4SI_type_node,
b4a62fa0 9905 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9906
58646b77
PB
9907 tree void_ftype_opaque_long_pvoid
9908 = build_function_type_list (void_type_node,
9909 opaque_V4SI_type_node, long_integer_type_node,
9910 pvoid_type_node, NULL_TREE);
b4a62fa0 9911 tree void_ftype_v4si_long_pvoid
b4de2f7d 9912 = build_function_type_list (void_type_node,
b4a62fa0 9913 V4SI_type_node, long_integer_type_node,
b4de2f7d 9914 pvoid_type_node, NULL_TREE);
b4a62fa0 9915 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9916 = build_function_type_list (void_type_node,
b4a62fa0 9917 V16QI_type_node, long_integer_type_node,
b4de2f7d 9918 pvoid_type_node, NULL_TREE);
b4a62fa0 9919 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9920 = build_function_type_list (void_type_node,
b4a62fa0 9921 V8HI_type_node, long_integer_type_node,
b4de2f7d 9922 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9923 tree int_ftype_int_v8hi_v8hi
9924 = build_function_type_list (integer_type_node,
9925 integer_type_node, V8HI_type_node,
9926 V8HI_type_node, NULL_TREE);
9927 tree int_ftype_int_v16qi_v16qi
9928 = build_function_type_list (integer_type_node,
9929 integer_type_node, V16QI_type_node,
9930 V16QI_type_node, NULL_TREE);
9931 tree int_ftype_int_v4sf_v4sf
9932 = build_function_type_list (integer_type_node,
9933 integer_type_node, V4SF_type_node,
9934 V4SF_type_node, NULL_TREE);
9935 tree v4si_ftype_v4si
9936 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9937 tree v8hi_ftype_v8hi
9938 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9939 tree v16qi_ftype_v16qi
9940 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9941 tree v4sf_ftype_v4sf
9942 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9943 tree void_ftype_pcvoid_int_int
a3170dc6 9944 = build_function_type_list (void_type_node,
0dbc3651 9945 pcvoid_type_node, integer_type_node,
8bb418a3 9946 integer_type_node, NULL_TREE);
8bb418a3 9947
0dbc3651
ZW
9948 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9949 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9950 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9951 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9952 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9953 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9954 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9955 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9956 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9957 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9958 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9959 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9960 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9961 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9962 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9963 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9964 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9965 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9966 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9967 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9968 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9969 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9970 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9971 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9972 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9973 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9974 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9975 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9976 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9977 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9978 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9979 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9980 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9981 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9982 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9983 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9984 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9985 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9986 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9987 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9988 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9989 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9990 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9991 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9992 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9993 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9994
0b61703c
AP
9995 if (rs6000_cpu == PROCESSOR_CELL)
9996 {
9997 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
9998 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
9999 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
10000 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRXL);
10001
10002 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLX);
10003 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLXL);
10004 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRX);
10005 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRXL);
10006
10007 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLX);
10008 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLXL);
10009 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRX);
10010 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRXL);
10011
10012 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLX);
10013 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLXL);
10014 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRX);
10015 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRXL);
10016 }
58646b77 10017 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
266b4890
AP
10018 def_builtin (MASK_ALTIVEC, "__builtin_vec_splats", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_SPLATS);
10019 def_builtin (MASK_ALTIVEC, "__builtin_vec_promote", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_PROMOTE);
58646b77
PB
10020
10021 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
10022 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
266b4890
AP
10023 def_builtin (MASK_ALTIVEC, "__builtin_vec_extract", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_EXTRACT);
10024 def_builtin (MASK_ALTIVEC, "__builtin_vec_insert", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_INSERT);
58646b77
PB
10025 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
10026 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
10027 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
10028 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
10029 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
10030 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
10031 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
10032 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 10033
a3170dc6 10034 /* Add the DST variants. */
586de218 10035 d = bdesc_dst;
a3170dc6 10036 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 10037 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
10038
10039 /* Initialize the predicates. */
586de218 10040 dp = bdesc_altivec_preds;
a3170dc6
AH
10041 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
10042 {
10043 enum machine_mode mode1;
10044 tree type;
58646b77
PB
10045 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10046 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 10047
58646b77
PB
10048 if (is_overloaded)
10049 mode1 = VOIDmode;
10050 else
10051 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
10052
10053 switch (mode1)
10054 {
58646b77
PB
10055 case VOIDmode:
10056 type = int_ftype_int_opaque_opaque;
10057 break;
a3170dc6
AH
10058 case V4SImode:
10059 type = int_ftype_int_v4si_v4si;
10060 break;
10061 case V8HImode:
10062 type = int_ftype_int_v8hi_v8hi;
10063 break;
10064 case V16QImode:
10065 type = int_ftype_int_v16qi_v16qi;
10066 break;
10067 case V4SFmode:
10068 type = int_ftype_int_v4sf_v4sf;
10069 break;
10070 default:
37409796 10071 gcc_unreachable ();
a3170dc6 10072 }
f676971a 10073
a3170dc6
AH
10074 def_builtin (dp->mask, dp->name, type, dp->code);
10075 }
10076
10077 /* Initialize the abs* operators. */
586de218 10078 d = bdesc_abs;
a3170dc6
AH
10079 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
10080 {
10081 enum machine_mode mode0;
10082 tree type;
10083
10084 mode0 = insn_data[d->icode].operand[0].mode;
10085
10086 switch (mode0)
10087 {
10088 case V4SImode:
10089 type = v4si_ftype_v4si;
10090 break;
10091 case V8HImode:
10092 type = v8hi_ftype_v8hi;
10093 break;
10094 case V16QImode:
10095 type = v16qi_ftype_v16qi;
10096 break;
10097 case V4SFmode:
10098 type = v4sf_ftype_v4sf;
10099 break;
10100 default:
37409796 10101 gcc_unreachable ();
a3170dc6 10102 }
f676971a 10103
a3170dc6
AH
10104 def_builtin (d->mask, d->name, type, d->code);
10105 }
7ccf35ed 10106
13c62176
DN
10107 if (TARGET_ALTIVEC)
10108 {
10109 tree decl;
10110
10111 /* Initialize target builtin that implements
10112 targetm.vectorize.builtin_mask_for_load. */
10113
c79efc4d
RÁE
10114 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
10115 v16qi_ftype_long_pcvoid,
10116 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
10117 BUILT_IN_MD, NULL, NULL_TREE);
10118 TREE_READONLY (decl) = 1;
13c62176
DN
10119 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
10120 altivec_builtin_mask_for_load = decl;
13c62176 10121 }
7a4eca66
DE
10122
10123 /* Access to the vec_init patterns. */
10124 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
10125 integer_type_node, integer_type_node,
10126 integer_type_node, NULL_TREE);
10127 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
10128 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
10129
10130 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
10131 short_integer_type_node,
10132 short_integer_type_node,
10133 short_integer_type_node,
10134 short_integer_type_node,
10135 short_integer_type_node,
10136 short_integer_type_node,
10137 short_integer_type_node, NULL_TREE);
10138 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
10139 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
10140
10141 ftype = build_function_type_list (V16QI_type_node, char_type_node,
10142 char_type_node, char_type_node,
10143 char_type_node, char_type_node,
10144 char_type_node, char_type_node,
10145 char_type_node, char_type_node,
10146 char_type_node, char_type_node,
10147 char_type_node, char_type_node,
10148 char_type_node, char_type_node,
10149 char_type_node, NULL_TREE);
10150 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
10151 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
10152
10153 ftype = build_function_type_list (V4SF_type_node, float_type_node,
10154 float_type_node, float_type_node,
10155 float_type_node, NULL_TREE);
10156 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
10157 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
10158
10159 /* Access to the vec_set patterns. */
10160 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
10161 intSI_type_node,
10162 integer_type_node, NULL_TREE);
10163 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
10164 ALTIVEC_BUILTIN_VEC_SET_V4SI);
10165
10166 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10167 intHI_type_node,
10168 integer_type_node, NULL_TREE);
10169 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10170 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10171
10172 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10173 intQI_type_node,
10174 integer_type_node, NULL_TREE);
10175 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10176 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10177
10178 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10179 float_type_node,
10180 integer_type_node, NULL_TREE);
10181 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10182 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10183
10184 /* Access to the vec_extract patterns. */
10185 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10186 integer_type_node, NULL_TREE);
10187 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10188 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10189
10190 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10191 integer_type_node, NULL_TREE);
10192 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10193 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10194
10195 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10196 integer_type_node, NULL_TREE);
10197 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10198 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10199
10200 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10201 integer_type_node, NULL_TREE);
10202 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10203 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
10204}
10205
10206static void
863d938c 10207rs6000_common_init_builtins (void)
a3170dc6 10208{
586de218 10209 const struct builtin_description *d;
a3170dc6
AH
10210 size_t i;
10211
96038623
DE
10212 tree v2sf_ftype_v2sf_v2sf_v2sf
10213 = build_function_type_list (V2SF_type_node,
10214 V2SF_type_node, V2SF_type_node,
10215 V2SF_type_node, NULL_TREE);
10216
a3170dc6
AH
10217 tree v4sf_ftype_v4sf_v4sf_v16qi
10218 = build_function_type_list (V4SF_type_node,
10219 V4SF_type_node, V4SF_type_node,
10220 V16QI_type_node, NULL_TREE);
10221 tree v4si_ftype_v4si_v4si_v16qi
10222 = build_function_type_list (V4SI_type_node,
10223 V4SI_type_node, V4SI_type_node,
10224 V16QI_type_node, NULL_TREE);
10225 tree v8hi_ftype_v8hi_v8hi_v16qi
10226 = build_function_type_list (V8HI_type_node,
10227 V8HI_type_node, V8HI_type_node,
10228 V16QI_type_node, NULL_TREE);
10229 tree v16qi_ftype_v16qi_v16qi_v16qi
10230 = build_function_type_list (V16QI_type_node,
10231 V16QI_type_node, V16QI_type_node,
10232 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
10233 tree v4si_ftype_int
10234 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10235 tree v8hi_ftype_int
10236 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10237 tree v16qi_ftype_int
10238 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
10239 tree v8hi_ftype_v16qi
10240 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10241 tree v4sf_ftype_v4sf
10242 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10243
10244 tree v2si_ftype_v2si_v2si
2abe3e28
AH
10245 = build_function_type_list (opaque_V2SI_type_node,
10246 opaque_V2SI_type_node,
10247 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10248
96038623 10249 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10250 = build_function_type_list (opaque_V2SF_type_node,
10251 opaque_V2SF_type_node,
10252 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10253
96038623
DE
10254 tree v2sf_ftype_v2sf_v2sf
10255 = build_function_type_list (V2SF_type_node,
10256 V2SF_type_node,
10257 V2SF_type_node, NULL_TREE);
10258
10259
a3170dc6 10260 tree v2si_ftype_int_int
2abe3e28 10261 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10262 integer_type_node, integer_type_node,
10263 NULL_TREE);
10264
58646b77
PB
10265 tree opaque_ftype_opaque
10266 = build_function_type_list (opaque_V4SI_type_node,
10267 opaque_V4SI_type_node, NULL_TREE);
10268
a3170dc6 10269 tree v2si_ftype_v2si
2abe3e28
AH
10270 = build_function_type_list (opaque_V2SI_type_node,
10271 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10272
96038623 10273 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10274 = build_function_type_list (opaque_V2SF_type_node,
10275 opaque_V2SF_type_node, NULL_TREE);
f676971a 10276
96038623
DE
10277 tree v2sf_ftype_v2sf
10278 = build_function_type_list (V2SF_type_node,
10279 V2SF_type_node, NULL_TREE);
10280
a3170dc6 10281 tree v2sf_ftype_v2si
2abe3e28
AH
10282 = build_function_type_list (opaque_V2SF_type_node,
10283 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10284
10285 tree v2si_ftype_v2sf
2abe3e28
AH
10286 = build_function_type_list (opaque_V2SI_type_node,
10287 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10288
10289 tree v2si_ftype_v2si_char
2abe3e28
AH
10290 = build_function_type_list (opaque_V2SI_type_node,
10291 opaque_V2SI_type_node,
10292 char_type_node, NULL_TREE);
a3170dc6
AH
10293
10294 tree v2si_ftype_int_char
2abe3e28 10295 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10296 integer_type_node, char_type_node, NULL_TREE);
10297
10298 tree v2si_ftype_char
2abe3e28
AH
10299 = build_function_type_list (opaque_V2SI_type_node,
10300 char_type_node, NULL_TREE);
a3170dc6
AH
10301
10302 tree int_ftype_int_int
10303 = build_function_type_list (integer_type_node,
10304 integer_type_node, integer_type_node,
10305 NULL_TREE);
95385cbb 10306
58646b77
PB
10307 tree opaque_ftype_opaque_opaque
10308 = build_function_type_list (opaque_V4SI_type_node,
10309 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10310 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10311 = build_function_type_list (V4SI_type_node,
10312 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10313 tree v4sf_ftype_v4si_int
b4de2f7d 10314 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10315 V4SI_type_node, integer_type_node, NULL_TREE);
10316 tree v4si_ftype_v4sf_int
b4de2f7d 10317 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10318 V4SF_type_node, integer_type_node, NULL_TREE);
10319 tree v4si_ftype_v4si_int
b4de2f7d 10320 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10321 V4SI_type_node, integer_type_node, NULL_TREE);
10322 tree v8hi_ftype_v8hi_int
b4de2f7d 10323 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10324 V8HI_type_node, integer_type_node, NULL_TREE);
10325 tree v16qi_ftype_v16qi_int
b4de2f7d 10326 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10327 V16QI_type_node, integer_type_node, NULL_TREE);
10328 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10329 = build_function_type_list (V16QI_type_node,
10330 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10331 integer_type_node, NULL_TREE);
10332 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10333 = build_function_type_list (V8HI_type_node,
10334 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10335 integer_type_node, NULL_TREE);
10336 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10337 = build_function_type_list (V4SI_type_node,
10338 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10339 integer_type_node, NULL_TREE);
10340 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10341 = build_function_type_list (V4SF_type_node,
10342 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10343 integer_type_node, NULL_TREE);
0ac081f6 10344 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10345 = build_function_type_list (V4SF_type_node,
10346 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10347 tree opaque_ftype_opaque_opaque_opaque
10348 = build_function_type_list (opaque_V4SI_type_node,
10349 opaque_V4SI_type_node, opaque_V4SI_type_node,
10350 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10351 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10352 = build_function_type_list (V4SF_type_node,
10353 V4SF_type_node, V4SF_type_node,
10354 V4SI_type_node, NULL_TREE);
2212663f 10355 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10356 = build_function_type_list (V4SF_type_node,
10357 V4SF_type_node, V4SF_type_node,
10358 V4SF_type_node, NULL_TREE);
f676971a 10359 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10360 = build_function_type_list (V4SI_type_node,
10361 V4SI_type_node, V4SI_type_node,
10362 V4SI_type_node, NULL_TREE);
0ac081f6 10363 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10364 = build_function_type_list (V8HI_type_node,
10365 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10366 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10367 = build_function_type_list (V8HI_type_node,
10368 V8HI_type_node, V8HI_type_node,
10369 V8HI_type_node, NULL_TREE);
c4ad648e 10370 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10371 = build_function_type_list (V4SI_type_node,
10372 V8HI_type_node, V8HI_type_node,
10373 V4SI_type_node, NULL_TREE);
c4ad648e 10374 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10375 = build_function_type_list (V4SI_type_node,
10376 V16QI_type_node, V16QI_type_node,
10377 V4SI_type_node, NULL_TREE);
0ac081f6 10378 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10379 = build_function_type_list (V16QI_type_node,
10380 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10381 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10382 = build_function_type_list (V4SI_type_node,
10383 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10384 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10385 = build_function_type_list (V8HI_type_node,
10386 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10387 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10388 = build_function_type_list (V4SI_type_node,
10389 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10390 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10391 = build_function_type_list (V8HI_type_node,
10392 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10393 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10394 = build_function_type_list (V16QI_type_node,
10395 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10396 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10397 = build_function_type_list (V4SI_type_node,
10398 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10399 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10400 = build_function_type_list (V4SI_type_node,
10401 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10402 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10403 = build_function_type_list (V4SI_type_node,
10404 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10405 tree v4si_ftype_v8hi
10406 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10407 tree int_ftype_v4si_v4si
10408 = build_function_type_list (integer_type_node,
10409 V4SI_type_node, V4SI_type_node, NULL_TREE);
10410 tree int_ftype_v4sf_v4sf
10411 = build_function_type_list (integer_type_node,
10412 V4SF_type_node, V4SF_type_node, NULL_TREE);
10413 tree int_ftype_v16qi_v16qi
10414 = build_function_type_list (integer_type_node,
10415 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10416 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10417 = build_function_type_list (integer_type_node,
10418 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10419
6f317ef3 10420 /* Add the simple ternary operators. */
586de218 10421 d = bdesc_3arg;
ca7558fc 10422 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10423 {
2212663f
DB
10424 enum machine_mode mode0, mode1, mode2, mode3;
10425 tree type;
58646b77
PB
10426 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10427 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10428
58646b77
PB
10429 if (is_overloaded)
10430 {
10431 mode0 = VOIDmode;
10432 mode1 = VOIDmode;
10433 mode2 = VOIDmode;
10434 mode3 = VOIDmode;
10435 }
10436 else
10437 {
10438 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10439 continue;
f676971a 10440
58646b77
PB
10441 mode0 = insn_data[d->icode].operand[0].mode;
10442 mode1 = insn_data[d->icode].operand[1].mode;
10443 mode2 = insn_data[d->icode].operand[2].mode;
10444 mode3 = insn_data[d->icode].operand[3].mode;
10445 }
bb8df8a6 10446
2212663f
DB
10447 /* When all four are of the same mode. */
10448 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10449 {
10450 switch (mode0)
10451 {
58646b77
PB
10452 case VOIDmode:
10453 type = opaque_ftype_opaque_opaque_opaque;
10454 break;
617e0e1d
DB
10455 case V4SImode:
10456 type = v4si_ftype_v4si_v4si_v4si;
10457 break;
2212663f
DB
10458 case V4SFmode:
10459 type = v4sf_ftype_v4sf_v4sf_v4sf;
10460 break;
10461 case V8HImode:
10462 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10463 break;
2212663f
DB
10464 case V16QImode:
10465 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10466 break;
96038623
DE
10467 case V2SFmode:
10468 type = v2sf_ftype_v2sf_v2sf_v2sf;
10469 break;
2212663f 10470 default:
37409796 10471 gcc_unreachable ();
2212663f
DB
10472 }
10473 }
10474 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10475 {
2212663f
DB
10476 switch (mode0)
10477 {
10478 case V4SImode:
10479 type = v4si_ftype_v4si_v4si_v16qi;
10480 break;
10481 case V4SFmode:
10482 type = v4sf_ftype_v4sf_v4sf_v16qi;
10483 break;
10484 case V8HImode:
10485 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10486 break;
2212663f
DB
10487 case V16QImode:
10488 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10489 break;
2212663f 10490 default:
37409796 10491 gcc_unreachable ();
2212663f
DB
10492 }
10493 }
f676971a 10494 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10495 && mode3 == V4SImode)
24408032 10496 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10497 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10498 && mode3 == V4SImode)
24408032 10499 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10500 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10501 && mode3 == V4SImode)
24408032
AH
10502 type = v4sf_ftype_v4sf_v4sf_v4si;
10503
a7b376ee 10504 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10505 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10506 && mode3 == QImode)
b9e4e5d1 10507 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10508
a7b376ee 10509 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10510 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10511 && mode3 == QImode)
b9e4e5d1 10512 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10513
a7b376ee 10514 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10515 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10516 && mode3 == QImode)
b9e4e5d1 10517 type = v4si_ftype_v4si_v4si_int;
24408032 10518
a7b376ee 10519 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10520 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10521 && mode3 == QImode)
b9e4e5d1 10522 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10523
2212663f 10524 else
37409796 10525 gcc_unreachable ();
2212663f
DB
10526
10527 def_builtin (d->mask, d->name, type, d->code);
10528 }
10529
0ac081f6 10530 /* Add the simple binary operators. */
00b960c7 10531 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10532 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10533 {
10534 enum machine_mode mode0, mode1, mode2;
10535 tree type;
58646b77
PB
10536 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10537 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10538
58646b77
PB
10539 if (is_overloaded)
10540 {
10541 mode0 = VOIDmode;
10542 mode1 = VOIDmode;
10543 mode2 = VOIDmode;
10544 }
10545 else
bb8df8a6 10546 {
58646b77
PB
10547 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10548 continue;
f676971a 10549
58646b77
PB
10550 mode0 = insn_data[d->icode].operand[0].mode;
10551 mode1 = insn_data[d->icode].operand[1].mode;
10552 mode2 = insn_data[d->icode].operand[2].mode;
10553 }
0ac081f6
AH
10554
10555 /* When all three operands are of the same mode. */
10556 if (mode0 == mode1 && mode1 == mode2)
10557 {
10558 switch (mode0)
10559 {
58646b77
PB
10560 case VOIDmode:
10561 type = opaque_ftype_opaque_opaque;
10562 break;
0ac081f6
AH
10563 case V4SFmode:
10564 type = v4sf_ftype_v4sf_v4sf;
10565 break;
10566 case V4SImode:
10567 type = v4si_ftype_v4si_v4si;
10568 break;
10569 case V16QImode:
10570 type = v16qi_ftype_v16qi_v16qi;
10571 break;
10572 case V8HImode:
10573 type = v8hi_ftype_v8hi_v8hi;
10574 break;
a3170dc6
AH
10575 case V2SImode:
10576 type = v2si_ftype_v2si_v2si;
10577 break;
96038623
DE
10578 case V2SFmode:
10579 if (TARGET_PAIRED_FLOAT)
10580 type = v2sf_ftype_v2sf_v2sf;
10581 else
10582 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10583 break;
10584 case SImode:
10585 type = int_ftype_int_int;
10586 break;
0ac081f6 10587 default:
37409796 10588 gcc_unreachable ();
0ac081f6
AH
10589 }
10590 }
10591
10592 /* A few other combos we really don't want to do manually. */
10593
10594 /* vint, vfloat, vfloat. */
10595 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10596 type = v4si_ftype_v4sf_v4sf;
10597
10598 /* vshort, vchar, vchar. */
10599 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10600 type = v8hi_ftype_v16qi_v16qi;
10601
10602 /* vint, vshort, vshort. */
10603 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10604 type = v4si_ftype_v8hi_v8hi;
10605
10606 /* vshort, vint, vint. */
10607 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10608 type = v8hi_ftype_v4si_v4si;
10609
10610 /* vchar, vshort, vshort. */
10611 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10612 type = v16qi_ftype_v8hi_v8hi;
10613
10614 /* vint, vchar, vint. */
10615 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10616 type = v4si_ftype_v16qi_v4si;
10617
fa066a23
AH
10618 /* vint, vchar, vchar. */
10619 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10620 type = v4si_ftype_v16qi_v16qi;
10621
0ac081f6
AH
10622 /* vint, vshort, vint. */
10623 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10624 type = v4si_ftype_v8hi_v4si;
f676971a 10625
a7b376ee 10626 /* vint, vint, 5-bit literal. */
2212663f 10627 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10628 type = v4si_ftype_v4si_int;
f676971a 10629
a7b376ee 10630 /* vshort, vshort, 5-bit literal. */
2212663f 10631 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10632 type = v8hi_ftype_v8hi_int;
f676971a 10633
a7b376ee 10634 /* vchar, vchar, 5-bit literal. */
2212663f 10635 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10636 type = v16qi_ftype_v16qi_int;
0ac081f6 10637
a7b376ee 10638 /* vfloat, vint, 5-bit literal. */
617e0e1d 10639 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10640 type = v4sf_ftype_v4si_int;
f676971a 10641
a7b376ee 10642 /* vint, vfloat, 5-bit literal. */
617e0e1d 10643 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10644 type = v4si_ftype_v4sf_int;
617e0e1d 10645
a3170dc6
AH
10646 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10647 type = v2si_ftype_int_int;
10648
10649 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10650 type = v2si_ftype_v2si_char;
10651
10652 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10653 type = v2si_ftype_int_char;
10654
37409796 10655 else
0ac081f6 10656 {
37409796
NS
10657 /* int, x, x. */
10658 gcc_assert (mode0 == SImode);
0ac081f6
AH
10659 switch (mode1)
10660 {
10661 case V4SImode:
10662 type = int_ftype_v4si_v4si;
10663 break;
10664 case V4SFmode:
10665 type = int_ftype_v4sf_v4sf;
10666 break;
10667 case V16QImode:
10668 type = int_ftype_v16qi_v16qi;
10669 break;
10670 case V8HImode:
10671 type = int_ftype_v8hi_v8hi;
10672 break;
10673 default:
37409796 10674 gcc_unreachable ();
0ac081f6
AH
10675 }
10676 }
10677
2212663f
DB
10678 def_builtin (d->mask, d->name, type, d->code);
10679 }
24408032 10680
2212663f
DB
10681 /* Add the simple unary operators. */
10682 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10683 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10684 {
10685 enum machine_mode mode0, mode1;
10686 tree type;
58646b77
PB
10687 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10688 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10689
10690 if (is_overloaded)
10691 {
10692 mode0 = VOIDmode;
10693 mode1 = VOIDmode;
10694 }
10695 else
10696 {
10697 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10698 continue;
bb8df8a6 10699
58646b77
PB
10700 mode0 = insn_data[d->icode].operand[0].mode;
10701 mode1 = insn_data[d->icode].operand[1].mode;
10702 }
2212663f
DB
10703
10704 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10705 type = v4si_ftype_int;
2212663f 10706 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10707 type = v8hi_ftype_int;
2212663f 10708 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10709 type = v16qi_ftype_int;
58646b77
PB
10710 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10711 type = opaque_ftype_opaque;
617e0e1d
DB
10712 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10713 type = v4sf_ftype_v4sf;
20e26713
AH
10714 else if (mode0 == V8HImode && mode1 == V16QImode)
10715 type = v8hi_ftype_v16qi;
10716 else if (mode0 == V4SImode && mode1 == V8HImode)
10717 type = v4si_ftype_v8hi;
a3170dc6
AH
10718 else if (mode0 == V2SImode && mode1 == V2SImode)
10719 type = v2si_ftype_v2si;
10720 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10721 {
10722 if (TARGET_PAIRED_FLOAT)
10723 type = v2sf_ftype_v2sf;
10724 else
10725 type = v2sf_ftype_v2sf_spe;
10726 }
a3170dc6
AH
10727 else if (mode0 == V2SFmode && mode1 == V2SImode)
10728 type = v2sf_ftype_v2si;
10729 else if (mode0 == V2SImode && mode1 == V2SFmode)
10730 type = v2si_ftype_v2sf;
10731 else if (mode0 == V2SImode && mode1 == QImode)
10732 type = v2si_ftype_char;
2212663f 10733 else
37409796 10734 gcc_unreachable ();
2212663f 10735
0ac081f6
AH
10736 def_builtin (d->mask, d->name, type, d->code);
10737 }
10738}
10739
c15c90bb
ZW
10740static void
10741rs6000_init_libfuncs (void)
10742{
602ea4d3
JJ
10743 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10744 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10745 {
602ea4d3
JJ
10746 /* AIX library routines for float->int conversion. */
10747 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10748 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10749 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10750 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10751 }
c15c90bb 10752
602ea4d3 10753 if (!TARGET_IEEEQUAD)
98c41d98 10754 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10755 if (!TARGET_XL_COMPAT)
10756 {
10757 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10758 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10759 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10760 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10761
17caeff2 10762 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10763 {
10764 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10765 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10766 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10767 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10768 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10769 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10770 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10771
10772 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10773 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10774 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10775 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10776 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10777 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10778 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10779 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10780 }
b26941b4
JM
10781
10782 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10783 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10784 }
10785 else
10786 {
10787 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10788 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10789 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10790 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10791 }
c9034561 10792 else
c15c90bb 10793 {
c9034561 10794 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10795
10796 set_optab_libfunc (add_optab, TFmode, "_q_add");
10797 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10798 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10799 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10800 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10801 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10802 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10803
c9034561
ZW
10804 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10805 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10806 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10807 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10808 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10809 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10810
85363ca0
ZW
10811 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10812 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10813 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10814 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10815 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10816 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10817 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10818 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10819 }
10820}
fba73eb1
DE
10821
10822\f
10823/* Expand a block clear operation, and return 1 if successful. Return 0
10824 if we should let the compiler generate normal code.
10825
10826 operands[0] is the destination
10827 operands[1] is the length
57e84f18 10828 operands[3] is the alignment */
fba73eb1
DE
10829
10830int
10831expand_block_clear (rtx operands[])
10832{
10833 rtx orig_dest = operands[0];
10834 rtx bytes_rtx = operands[1];
57e84f18 10835 rtx align_rtx = operands[3];
5514620a
GK
10836 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10837 HOST_WIDE_INT align;
10838 HOST_WIDE_INT bytes;
fba73eb1
DE
10839 int offset;
10840 int clear_bytes;
5514620a 10841 int clear_step;
fba73eb1
DE
10842
10843 /* If this is not a fixed size move, just call memcpy */
10844 if (! constp)
10845 return 0;
10846
37409796
NS
10847 /* This must be a fixed size alignment */
10848 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10849 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10850
10851 /* Anything to clear? */
10852 bytes = INTVAL (bytes_rtx);
10853 if (bytes <= 0)
10854 return 1;
10855
5514620a
GK
10856 /* Use the builtin memset after a point, to avoid huge code bloat.
10857 When optimize_size, avoid any significant code bloat; calling
10858 memset is about 4 instructions, so allow for one instruction to
10859 load zero and three to do clearing. */
10860 if (TARGET_ALTIVEC && align >= 128)
10861 clear_step = 16;
10862 else if (TARGET_POWERPC64 && align >= 32)
10863 clear_step = 8;
21d818ff
NF
10864 else if (TARGET_SPE && align >= 64)
10865 clear_step = 8;
5514620a
GK
10866 else
10867 clear_step = 4;
fba73eb1 10868
5514620a
GK
10869 if (optimize_size && bytes > 3 * clear_step)
10870 return 0;
10871 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10872 return 0;
10873
10874 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10875 {
fba73eb1
DE
10876 enum machine_mode mode = BLKmode;
10877 rtx dest;
f676971a 10878
5514620a
GK
10879 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10880 {
10881 clear_bytes = 16;
10882 mode = V4SImode;
10883 }
21d818ff
NF
10884 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10885 {
10886 clear_bytes = 8;
10887 mode = V2SImode;
10888 }
5514620a 10889 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10890 /* 64-bit loads and stores require word-aligned
10891 displacements. */
10892 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10893 {
10894 clear_bytes = 8;
10895 mode = DImode;
fba73eb1 10896 }
5514620a 10897 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10898 { /* move 4 bytes */
10899 clear_bytes = 4;
10900 mode = SImode;
fba73eb1 10901 }
ec53fc93 10902 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10903 { /* move 2 bytes */
10904 clear_bytes = 2;
10905 mode = HImode;
fba73eb1
DE
10906 }
10907 else /* move 1 byte at a time */
10908 {
10909 clear_bytes = 1;
10910 mode = QImode;
fba73eb1 10911 }
f676971a 10912
fba73eb1 10913 dest = adjust_address (orig_dest, mode, offset);
f676971a 10914
5514620a 10915 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10916 }
10917
10918 return 1;
10919}
10920
35aff10b 10921\f
7e69e155
MM
10922/* Expand a block move operation, and return 1 if successful. Return 0
10923 if we should let the compiler generate normal code.
10924
10925 operands[0] is the destination
10926 operands[1] is the source
10927 operands[2] is the length
10928 operands[3] is the alignment */
10929
3933e0e1
MM
10930#define MAX_MOVE_REG 4
10931
7e69e155 10932int
a2369ed3 10933expand_block_move (rtx operands[])
7e69e155 10934{
b6c9286a
MM
10935 rtx orig_dest = operands[0];
10936 rtx orig_src = operands[1];
7e69e155 10937 rtx bytes_rtx = operands[2];
7e69e155 10938 rtx align_rtx = operands[3];
3933e0e1 10939 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10940 int align;
3933e0e1
MM
10941 int bytes;
10942 int offset;
7e69e155 10943 int move_bytes;
cabfd258
GK
10944 rtx stores[MAX_MOVE_REG];
10945 int num_reg = 0;
7e69e155 10946
3933e0e1 10947 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10948 if (! constp)
3933e0e1
MM
10949 return 0;
10950
37409796
NS
10951 /* This must be a fixed size alignment */
10952 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10953 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10954
7e69e155 10955 /* Anything to move? */
3933e0e1
MM
10956 bytes = INTVAL (bytes_rtx);
10957 if (bytes <= 0)
7e69e155
MM
10958 return 1;
10959
ea9982a8 10960 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10961 reg_parm_stack_space. */
ea9982a8 10962 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10963 return 0;
10964
cabfd258 10965 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10966 {
cabfd258 10967 union {
70128ad9 10968 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10969 rtx (*mov) (rtx, rtx);
cabfd258
GK
10970 } gen_func;
10971 enum machine_mode mode = BLKmode;
10972 rtx src, dest;
f676971a 10973
5514620a
GK
10974 /* Altivec first, since it will be faster than a string move
10975 when it applies, and usually not significantly larger. */
10976 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10977 {
10978 move_bytes = 16;
10979 mode = V4SImode;
10980 gen_func.mov = gen_movv4si;
10981 }
21d818ff
NF
10982 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10983 {
10984 move_bytes = 8;
10985 mode = V2SImode;
10986 gen_func.mov = gen_movv2si;
10987 }
5514620a 10988 else if (TARGET_STRING
cabfd258
GK
10989 && bytes > 24 /* move up to 32 bytes at a time */
10990 && ! fixed_regs[5]
10991 && ! fixed_regs[6]
10992 && ! fixed_regs[7]
10993 && ! fixed_regs[8]
10994 && ! fixed_regs[9]
10995 && ! fixed_regs[10]
10996 && ! fixed_regs[11]
10997 && ! fixed_regs[12])
7e69e155 10998 {
cabfd258 10999 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 11000 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
11001 }
11002 else if (TARGET_STRING
11003 && bytes > 16 /* move up to 24 bytes at a time */
11004 && ! fixed_regs[5]
11005 && ! fixed_regs[6]
11006 && ! fixed_regs[7]
11007 && ! fixed_regs[8]
11008 && ! fixed_regs[9]
11009 && ! fixed_regs[10])
11010 {
11011 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 11012 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
11013 }
11014 else if (TARGET_STRING
11015 && bytes > 8 /* move up to 16 bytes at a time */
11016 && ! fixed_regs[5]
11017 && ! fixed_regs[6]
11018 && ! fixed_regs[7]
11019 && ! fixed_regs[8])
11020 {
11021 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 11022 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
11023 }
11024 else if (bytes >= 8 && TARGET_POWERPC64
11025 /* 64-bit loads and stores require word-aligned
11026 displacements. */
fba73eb1 11027 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
11028 {
11029 move_bytes = 8;
11030 mode = DImode;
11031 gen_func.mov = gen_movdi;
11032 }
11033 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
11034 { /* move up to 8 bytes at a time */
11035 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 11036 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 11037 }
cd7d9ca4 11038 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
11039 { /* move 4 bytes */
11040 move_bytes = 4;
11041 mode = SImode;
11042 gen_func.mov = gen_movsi;
11043 }
ec53fc93 11044 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
11045 { /* move 2 bytes */
11046 move_bytes = 2;
11047 mode = HImode;
11048 gen_func.mov = gen_movhi;
11049 }
11050 else if (TARGET_STRING && bytes > 1)
11051 { /* move up to 4 bytes at a time */
11052 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 11053 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
11054 }
11055 else /* move 1 byte at a time */
11056 {
11057 move_bytes = 1;
11058 mode = QImode;
11059 gen_func.mov = gen_movqi;
11060 }
f676971a 11061
cabfd258
GK
11062 src = adjust_address (orig_src, mode, offset);
11063 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
11064
11065 if (mode != BLKmode)
cabfd258
GK
11066 {
11067 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 11068
cabfd258
GK
11069 emit_insn ((*gen_func.mov) (tmp_reg, src));
11070 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 11071 }
3933e0e1 11072
cabfd258
GK
11073 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
11074 {
11075 int i;
11076 for (i = 0; i < num_reg; i++)
11077 emit_insn (stores[i]);
11078 num_reg = 0;
11079 }
35aff10b 11080
cabfd258 11081 if (mode == BLKmode)
7e69e155 11082 {
70128ad9 11083 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
11084 patterns require zero offset. */
11085 if (!REG_P (XEXP (src, 0)))
b6c9286a 11086 {
cabfd258
GK
11087 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
11088 src = replace_equiv_address (src, src_reg);
b6c9286a 11089 }
cabfd258 11090 set_mem_size (src, GEN_INT (move_bytes));
f676971a 11091
cabfd258 11092 if (!REG_P (XEXP (dest, 0)))
3933e0e1 11093 {
cabfd258
GK
11094 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
11095 dest = replace_equiv_address (dest, dest_reg);
7e69e155 11096 }
cabfd258 11097 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 11098
70128ad9 11099 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
11100 GEN_INT (move_bytes & 31),
11101 align_rtx));
7e69e155 11102 }
7e69e155
MM
11103 }
11104
11105 return 1;
11106}
11107
d62294f5 11108\f
9caa3eb2
DE
11109/* Return a string to perform a load_multiple operation.
11110 operands[0] is the vector.
11111 operands[1] is the source address.
11112 operands[2] is the first destination register. */
11113
11114const char *
a2369ed3 11115rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
11116{
11117 /* We have to handle the case where the pseudo used to contain the address
11118 is assigned to one of the output registers. */
11119 int i, j;
11120 int words = XVECLEN (operands[0], 0);
11121 rtx xop[10];
11122
11123 if (XVECLEN (operands[0], 0) == 1)
11124 return "{l|lwz} %2,0(%1)";
11125
11126 for (i = 0; i < words; i++)
11127 if (refers_to_regno_p (REGNO (operands[2]) + i,
11128 REGNO (operands[2]) + i + 1, operands[1], 0))
11129 {
11130 if (i == words-1)
11131 {
11132 xop[0] = GEN_INT (4 * (words-1));
11133 xop[1] = operands[1];
11134 xop[2] = operands[2];
11135 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
11136 return "";
11137 }
11138 else if (i == 0)
11139 {
11140 xop[0] = GEN_INT (4 * (words-1));
11141 xop[1] = operands[1];
11142 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
11143 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
11144 return "";
11145 }
11146 else
11147 {
11148 for (j = 0; j < words; j++)
11149 if (j != i)
11150 {
11151 xop[0] = GEN_INT (j * 4);
11152 xop[1] = operands[1];
11153 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
11154 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
11155 }
11156 xop[0] = GEN_INT (i * 4);
11157 xop[1] = operands[1];
11158 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
11159 return "";
11160 }
11161 }
11162
11163 return "{lsi|lswi} %2,%1,%N0";
11164}
11165
9878760c 11166\f
a4f6c312
SS
11167/* A validation routine: say whether CODE, a condition code, and MODE
11168 match. The other alternatives either don't make sense or should
11169 never be generated. */
39a10a29 11170
48d72335 11171void
a2369ed3 11172validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 11173{
37409796
NS
11174 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11175 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11176 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
11177
11178 /* These don't make sense. */
37409796
NS
11179 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11180 || mode != CCUNSmode);
39a10a29 11181
37409796
NS
11182 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11183 || mode == CCUNSmode);
39a10a29 11184
37409796
NS
11185 gcc_assert (mode == CCFPmode
11186 || (code != ORDERED && code != UNORDERED
11187 && code != UNEQ && code != LTGT
11188 && code != UNGT && code != UNLT
11189 && code != UNGE && code != UNLE));
f676971a
EC
11190
11191 /* These should never be generated except for
bc9ec0e0 11192 flag_finite_math_only. */
37409796
NS
11193 gcc_assert (mode != CCFPmode
11194 || flag_finite_math_only
11195 || (code != LE && code != GE
11196 && code != UNEQ && code != LTGT
11197 && code != UNGT && code != UNLT));
39a10a29
GK
11198
11199 /* These are invalid; the information is not there. */
37409796 11200 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
11201}
11202
9878760c
RK
11203\f
11204/* Return 1 if ANDOP is a mask that has no bits on that are not in the
11205 mask required to convert the result of a rotate insn into a shift
b1765bde 11206 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
11207
11208int
a2369ed3 11209includes_lshift_p (rtx shiftop, rtx andop)
9878760c 11210{
e2c953b6
DE
11211 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11212
11213 shift_mask <<= INTVAL (shiftop);
9878760c 11214
b1765bde 11215 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
11216}
11217
11218/* Similar, but for right shift. */
11219
11220int
a2369ed3 11221includes_rshift_p (rtx shiftop, rtx andop)
9878760c 11222{
a7653a2c 11223 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
11224
11225 shift_mask >>= INTVAL (shiftop);
11226
b1765bde 11227 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
11228}
11229
c5059423
AM
11230/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11231 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 11232 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
11233
11234int
a2369ed3 11235includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 11236{
c5059423
AM
11237 if (GET_CODE (andop) == CONST_INT)
11238 {
02071907 11239 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 11240
c5059423 11241 c = INTVAL (andop);
02071907 11242 if (c == 0 || c == ~0)
c5059423 11243 return 0;
e2c953b6 11244
02071907 11245 shift_mask = ~0;
c5059423
AM
11246 shift_mask <<= INTVAL (shiftop);
11247
b6d08ca1 11248 /* Find the least significant one bit. */
c5059423
AM
11249 lsb = c & -c;
11250
11251 /* It must coincide with the LSB of the shift mask. */
11252 if (-lsb != shift_mask)
11253 return 0;
e2c953b6 11254
c5059423
AM
11255 /* Invert to look for the next transition (if any). */
11256 c = ~c;
11257
11258 /* Remove the low group of ones (originally low group of zeros). */
11259 c &= -lsb;
11260
11261 /* Again find the lsb, and check we have all 1's above. */
11262 lsb = c & -c;
11263 return c == -lsb;
11264 }
11265 else if (GET_CODE (andop) == CONST_DOUBLE
11266 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11267 {
02071907
AM
11268 HOST_WIDE_INT low, high, lsb;
11269 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11270
11271 low = CONST_DOUBLE_LOW (andop);
11272 if (HOST_BITS_PER_WIDE_INT < 64)
11273 high = CONST_DOUBLE_HIGH (andop);
11274
11275 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11276 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11277 return 0;
11278
11279 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11280 {
02071907 11281 shift_mask_high = ~0;
c5059423
AM
11282 if (INTVAL (shiftop) > 32)
11283 shift_mask_high <<= INTVAL (shiftop) - 32;
11284
11285 lsb = high & -high;
11286
11287 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11288 return 0;
11289
11290 high = ~high;
11291 high &= -lsb;
11292
11293 lsb = high & -high;
11294 return high == -lsb;
11295 }
11296
02071907 11297 shift_mask_low = ~0;
c5059423
AM
11298 shift_mask_low <<= INTVAL (shiftop);
11299
11300 lsb = low & -low;
11301
11302 if (-lsb != shift_mask_low)
11303 return 0;
11304
11305 if (HOST_BITS_PER_WIDE_INT < 64)
11306 high = ~high;
11307 low = ~low;
11308 low &= -lsb;
11309
11310 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11311 {
11312 lsb = high & -high;
11313 return high == -lsb;
11314 }
11315
11316 lsb = low & -low;
11317 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11318 }
11319 else
11320 return 0;
11321}
e2c953b6 11322
c5059423
AM
11323/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11324 to perform a left shift. It must have SHIFTOP or more least
c1207243 11325 significant 0's, with the remainder of the word 1's. */
e2c953b6 11326
c5059423 11327int
a2369ed3 11328includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11329{
e2c953b6 11330 if (GET_CODE (andop) == CONST_INT)
c5059423 11331 {
02071907 11332 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11333
02071907 11334 shift_mask = ~0;
c5059423
AM
11335 shift_mask <<= INTVAL (shiftop);
11336 c = INTVAL (andop);
11337
c1207243 11338 /* Find the least significant one bit. */
c5059423
AM
11339 lsb = c & -c;
11340
11341 /* It must be covered by the shift mask.
a4f6c312 11342 This test also rejects c == 0. */
c5059423
AM
11343 if ((lsb & shift_mask) == 0)
11344 return 0;
11345
11346 /* Check we have all 1's above the transition, and reject all 1's. */
11347 return c == -lsb && lsb != 1;
11348 }
11349 else if (GET_CODE (andop) == CONST_DOUBLE
11350 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11351 {
02071907 11352 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11353
11354 low = CONST_DOUBLE_LOW (andop);
11355
11356 if (HOST_BITS_PER_WIDE_INT < 64)
11357 {
02071907 11358 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11359
11360 high = CONST_DOUBLE_HIGH (andop);
11361
11362 if (low == 0)
11363 {
02071907 11364 shift_mask_high = ~0;
c5059423
AM
11365 if (INTVAL (shiftop) > 32)
11366 shift_mask_high <<= INTVAL (shiftop) - 32;
11367
11368 lsb = high & -high;
11369
11370 if ((lsb & shift_mask_high) == 0)
11371 return 0;
11372
11373 return high == -lsb;
11374 }
11375 if (high != ~0)
11376 return 0;
11377 }
11378
02071907 11379 shift_mask_low = ~0;
c5059423
AM
11380 shift_mask_low <<= INTVAL (shiftop);
11381
11382 lsb = low & -low;
11383
11384 if ((lsb & shift_mask_low) == 0)
11385 return 0;
11386
11387 return low == -lsb && lsb != 1;
11388 }
e2c953b6 11389 else
c5059423 11390 return 0;
9878760c 11391}
35068b43 11392
11ac38b2
DE
11393/* Return 1 if operands will generate a valid arguments to rlwimi
11394instruction for insert with right shift in 64-bit mode. The mask may
11395not start on the first bit or stop on the last bit because wrap-around
11396effects of instruction do not correspond to semantics of RTL insn. */
11397
11398int
11399insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11400{
429ec7dc
DE
11401 if (INTVAL (startop) > 32
11402 && INTVAL (startop) < 64
11403 && INTVAL (sizeop) > 1
11404 && INTVAL (sizeop) + INTVAL (startop) < 64
11405 && INTVAL (shiftop) > 0
11406 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11407 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11408 return 1;
11409
11410 return 0;
11411}
11412
35068b43 11413/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11414 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11415
11416int
a2369ed3 11417registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11418{
11419 /* We might have been passed a SUBREG. */
f676971a 11420 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11421 return 0;
f676971a 11422
90f81f99
AP
11423 /* We might have been passed non floating point registers. */
11424 if (!FP_REGNO_P (REGNO (reg1))
11425 || !FP_REGNO_P (REGNO (reg2)))
11426 return 0;
35068b43
RK
11427
11428 return (REGNO (reg1) == REGNO (reg2) - 1);
11429}
11430
a4f6c312
SS
11431/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11432 addr1 and addr2 must be in consecutive memory locations
11433 (addr2 == addr1 + 8). */
35068b43
RK
11434
11435int
90f81f99 11436mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11437{
90f81f99 11438 rtx addr1, addr2;
bb8df8a6
EC
11439 unsigned int reg1, reg2;
11440 int offset1, offset2;
35068b43 11441
90f81f99
AP
11442 /* The mems cannot be volatile. */
11443 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11444 return 0;
f676971a 11445
90f81f99
AP
11446 addr1 = XEXP (mem1, 0);
11447 addr2 = XEXP (mem2, 0);
11448
35068b43
RK
11449 /* Extract an offset (if used) from the first addr. */
11450 if (GET_CODE (addr1) == PLUS)
11451 {
11452 /* If not a REG, return zero. */
11453 if (GET_CODE (XEXP (addr1, 0)) != REG)
11454 return 0;
11455 else
11456 {
c4ad648e 11457 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11458 /* The offset must be constant! */
11459 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11460 return 0;
11461 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11462 }
11463 }
11464 else if (GET_CODE (addr1) != REG)
11465 return 0;
11466 else
11467 {
11468 reg1 = REGNO (addr1);
11469 /* This was a simple (mem (reg)) expression. Offset is 0. */
11470 offset1 = 0;
11471 }
11472
bb8df8a6
EC
11473 /* And now for the second addr. */
11474 if (GET_CODE (addr2) == PLUS)
11475 {
11476 /* If not a REG, return zero. */
11477 if (GET_CODE (XEXP (addr2, 0)) != REG)
11478 return 0;
11479 else
11480 {
11481 reg2 = REGNO (XEXP (addr2, 0));
11482 /* The offset must be constant. */
11483 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11484 return 0;
11485 offset2 = INTVAL (XEXP (addr2, 1));
11486 }
11487 }
11488 else if (GET_CODE (addr2) != REG)
35068b43 11489 return 0;
bb8df8a6
EC
11490 else
11491 {
11492 reg2 = REGNO (addr2);
11493 /* This was a simple (mem (reg)) expression. Offset is 0. */
11494 offset2 = 0;
11495 }
35068b43 11496
bb8df8a6
EC
11497 /* Both of these must have the same base register. */
11498 if (reg1 != reg2)
35068b43
RK
11499 return 0;
11500
11501 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11502 if (offset2 != offset1 + 8)
35068b43
RK
11503 return 0;
11504
11505 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11506 instructions. */
11507 return 1;
11508}
9878760c 11509\f
e41b2a33
PB
11510
11511rtx
11512rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11513{
11514 static bool eliminated = false;
11515 if (mode != SDmode)
11516 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11517 else
11518 {
11519 rtx mem = cfun->machine->sdmode_stack_slot;
11520 gcc_assert (mem != NULL_RTX);
11521
11522 if (!eliminated)
11523 {
11524 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11525 cfun->machine->sdmode_stack_slot = mem;
11526 eliminated = true;
11527 }
11528 return mem;
11529 }
11530}
11531
11532static tree
11533rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11534{
11535 /* Don't walk into types. */
11536 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11537 {
11538 *walk_subtrees = 0;
11539 return NULL_TREE;
11540 }
11541
11542 switch (TREE_CODE (*tp))
11543 {
11544 case VAR_DECL:
11545 case PARM_DECL:
11546 case FIELD_DECL:
11547 case RESULT_DECL:
48f5b722 11548 case SSA_NAME:
e41b2a33 11549 case REAL_CST:
fdf4f148 11550 case INDIRECT_REF:
a0f39282
JJ
11551 case ALIGN_INDIRECT_REF:
11552 case MISALIGNED_INDIRECT_REF:
fdf4f148 11553 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11554 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11555 return *tp;
11556 break;
11557 default:
11558 break;
11559 }
11560
11561 return NULL_TREE;
11562}
11563
11564
11565/* Allocate a 64-bit stack slot to be used for copying SDmode
11566 values through if this function has any SDmode references. */
11567
11568static void
11569rs6000_alloc_sdmode_stack_slot (void)
11570{
11571 tree t;
11572 basic_block bb;
726a989a 11573 gimple_stmt_iterator gsi;
e41b2a33
PB
11574
11575 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11576
11577 FOR_EACH_BB (bb)
726a989a 11578 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e41b2a33 11579 {
726a989a 11580 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
e41b2a33
PB
11581 if (ret)
11582 {
11583 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11584 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11585 SDmode, 0);
11586 return;
11587 }
11588 }
11589
11590 /* Check for any SDmode parameters of the function. */
11591 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11592 {
11593 if (TREE_TYPE (t) == error_mark_node)
11594 continue;
11595
11596 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11597 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11598 {
11599 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11600 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11601 SDmode, 0);
11602 return;
11603 }
11604 }
11605}
11606
11607static void
11608rs6000_instantiate_decls (void)
11609{
11610 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11611 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11612}
11613
9878760c 11614/* Return the register class of a scratch register needed to copy IN into
0a2aaacc 11615 or out of a register in RCLASS in MODE. If it can be done directly,
9878760c
RK
11616 NO_REGS is returned. */
11617
11618enum reg_class
0a2aaacc 11619rs6000_secondary_reload_class (enum reg_class rclass,
3c4774e0
R
11620 enum machine_mode mode ATTRIBUTE_UNUSED,
11621 rtx in)
9878760c 11622{
5accd822 11623 int regno;
9878760c 11624
ab82a49f
AP
11625 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11626#if TARGET_MACHO
c4ad648e 11627 && MACHOPIC_INDIRECT
ab82a49f 11628#endif
c4ad648e 11629 ))
46fad5b7
DJ
11630 {
11631 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11632 other than BASE_REGS for TARGET_ELF. So indicate that a
11633 register from BASE_REGS is needed as an intermediate
11634 register.
f676971a 11635
46fad5b7
DJ
11636 On Darwin, pic addresses require a load from memory, which
11637 needs a base register. */
0a2aaacc 11638 if (rclass != BASE_REGS
c4ad648e
AM
11639 && (GET_CODE (in) == SYMBOL_REF
11640 || GET_CODE (in) == HIGH
11641 || GET_CODE (in) == LABEL_REF
11642 || GET_CODE (in) == CONST))
11643 return BASE_REGS;
46fad5b7 11644 }
e7b7998a 11645
5accd822
DE
11646 if (GET_CODE (in) == REG)
11647 {
11648 regno = REGNO (in);
11649 if (regno >= FIRST_PSEUDO_REGISTER)
11650 {
11651 regno = true_regnum (in);
11652 if (regno >= FIRST_PSEUDO_REGISTER)
11653 regno = -1;
11654 }
11655 }
11656 else if (GET_CODE (in) == SUBREG)
11657 {
11658 regno = true_regnum (in);
11659 if (regno >= FIRST_PSEUDO_REGISTER)
11660 regno = -1;
11661 }
11662 else
11663 regno = -1;
11664
9878760c
RK
11665 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11666 into anything. */
0a2aaacc 11667 if (rclass == GENERAL_REGS || rclass == BASE_REGS
9878760c
RK
11668 || (regno >= 0 && INT_REGNO_P (regno)))
11669 return NO_REGS;
11670
11671 /* Constants, memory, and FP registers can go into FP registers. */
11672 if ((regno == -1 || FP_REGNO_P (regno))
0a2aaacc 11673 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
e41b2a33 11674 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11675
0ac081f6
AH
11676 /* Memory, and AltiVec registers can go into AltiVec registers. */
11677 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
0a2aaacc 11678 && rclass == ALTIVEC_REGS)
0ac081f6
AH
11679 return NO_REGS;
11680
9878760c 11681 /* We can copy among the CR registers. */
0a2aaacc 11682 if ((rclass == CR_REGS || rclass == CR0_REGS)
9878760c
RK
11683 && regno >= 0 && CR_REGNO_P (regno))
11684 return NO_REGS;
11685
11686 /* Otherwise, we need GENERAL_REGS. */
11687 return GENERAL_REGS;
11688}
11689\f
11690/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11691 know this is a valid comparison.
9878760c
RK
11692
11693 SCC_P is 1 if this is for an scc. That means that %D will have been
11694 used instead of %C, so the bits will be in different places.
11695
b4ac57ab 11696 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11697
11698int
a2369ed3 11699ccr_bit (rtx op, int scc_p)
9878760c
RK
11700{
11701 enum rtx_code code = GET_CODE (op);
11702 enum machine_mode cc_mode;
11703 int cc_regnum;
11704 int base_bit;
9ebbca7d 11705 rtx reg;
9878760c 11706
ec8e098d 11707 if (!COMPARISON_P (op))
9878760c
RK
11708 return -1;
11709
9ebbca7d
GK
11710 reg = XEXP (op, 0);
11711
37409796 11712 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11713
11714 cc_mode = GET_MODE (reg);
11715 cc_regnum = REGNO (reg);
11716 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11717
39a10a29 11718 validate_condition_mode (code, cc_mode);
c5defebb 11719
b7053a3f
GK
11720 /* When generating a sCOND operation, only positive conditions are
11721 allowed. */
37409796
NS
11722 gcc_assert (!scc_p
11723 || code == EQ || code == GT || code == LT || code == UNORDERED
11724 || code == GTU || code == LTU);
f676971a 11725
9878760c
RK
11726 switch (code)
11727 {
11728 case NE:
11729 return scc_p ? base_bit + 3 : base_bit + 2;
11730 case EQ:
11731 return base_bit + 2;
1c882ea4 11732 case GT: case GTU: case UNLE:
9878760c 11733 return base_bit + 1;
1c882ea4 11734 case LT: case LTU: case UNGE:
9878760c 11735 return base_bit;
1c882ea4
GK
11736 case ORDERED: case UNORDERED:
11737 return base_bit + 3;
9878760c
RK
11738
11739 case GE: case GEU:
39a10a29 11740 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11741 unordered position. So test that bit. For integer, this is ! LT
11742 unless this is an scc insn. */
39a10a29 11743 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11744
11745 case LE: case LEU:
39a10a29 11746 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11747
9878760c 11748 default:
37409796 11749 gcc_unreachable ();
9878760c
RK
11750 }
11751}
1ff7789b 11752\f
8d30c4ee 11753/* Return the GOT register. */
1ff7789b 11754
9390387d 11755rtx
a2369ed3 11756rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11757{
a4f6c312
SS
11758 /* The second flow pass currently (June 1999) can't update
11759 regs_ever_live without disturbing other parts of the compiler, so
11760 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11761 if (!can_create_pseudo_p ()
11762 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11763 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11764
e3b5732b 11765 crtl->uses_pic_offset_table = 1;
3cb999d8 11766
1ff7789b
MM
11767 return pic_offset_table_rtx;
11768}
a7df97e6 11769\f
e2500fed
GK
11770/* Function to init struct machine_function.
11771 This will be called, via a pointer variable,
11772 from push_function_context. */
a7df97e6 11773
e2500fed 11774static struct machine_function *
863d938c 11775rs6000_init_machine_status (void)
a7df97e6 11776{
5ead67f6 11777 return GGC_CNEW (machine_function);
a7df97e6 11778}
9878760c 11779\f
0ba1b2ff
AM
11780/* These macros test for integers and extract the low-order bits. */
11781#define INT_P(X) \
11782((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11783 && GET_MODE (X) == VOIDmode)
11784
11785#define INT_LOWPART(X) \
11786 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11787
11788int
a2369ed3 11789extract_MB (rtx op)
0ba1b2ff
AM
11790{
11791 int i;
11792 unsigned long val = INT_LOWPART (op);
11793
11794 /* If the high bit is zero, the value is the first 1 bit we find
11795 from the left. */
11796 if ((val & 0x80000000) == 0)
11797 {
37409796 11798 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11799
11800 i = 1;
11801 while (((val <<= 1) & 0x80000000) == 0)
11802 ++i;
11803 return i;
11804 }
11805
11806 /* If the high bit is set and the low bit is not, or the mask is all
11807 1's, the value is zero. */
11808 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11809 return 0;
11810
11811 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11812 from the right. */
11813 i = 31;
11814 while (((val >>= 1) & 1) != 0)
11815 --i;
11816
11817 return i;
11818}
11819
11820int
a2369ed3 11821extract_ME (rtx op)
0ba1b2ff
AM
11822{
11823 int i;
11824 unsigned long val = INT_LOWPART (op);
11825
11826 /* If the low bit is zero, the value is the first 1 bit we find from
11827 the right. */
11828 if ((val & 1) == 0)
11829 {
37409796 11830 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11831
11832 i = 30;
11833 while (((val >>= 1) & 1) == 0)
11834 --i;
11835
11836 return i;
11837 }
11838
11839 /* If the low bit is set and the high bit is not, or the mask is all
11840 1's, the value is 31. */
11841 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11842 return 31;
11843
11844 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11845 from the left. */
11846 i = 0;
11847 while (((val <<= 1) & 0x80000000) != 0)
11848 ++i;
11849
11850 return i;
11851}
11852
c4501e62
JJ
11853/* Locate some local-dynamic symbol still in use by this function
11854 so that we can print its name in some tls_ld pattern. */
11855
11856static const char *
863d938c 11857rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11858{
11859 rtx insn;
11860
11861 if (cfun->machine->some_ld_name)
11862 return cfun->machine->some_ld_name;
11863
11864 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11865 if (INSN_P (insn)
11866 && for_each_rtx (&PATTERN (insn),
11867 rs6000_get_some_local_dynamic_name_1, 0))
11868 return cfun->machine->some_ld_name;
11869
37409796 11870 gcc_unreachable ();
c4501e62
JJ
11871}
11872
11873/* Helper function for rs6000_get_some_local_dynamic_name. */
11874
11875static int
a2369ed3 11876rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11877{
11878 rtx x = *px;
11879
11880 if (GET_CODE (x) == SYMBOL_REF)
11881 {
11882 const char *str = XSTR (x, 0);
11883 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11884 {
11885 cfun->machine->some_ld_name = str;
11886 return 1;
11887 }
11888 }
11889
11890 return 0;
11891}
11892
85b776df
AM
11893/* Write out a function code label. */
11894
11895void
11896rs6000_output_function_entry (FILE *file, const char *fname)
11897{
11898 if (fname[0] != '.')
11899 {
11900 switch (DEFAULT_ABI)
11901 {
11902 default:
37409796 11903 gcc_unreachable ();
85b776df
AM
11904
11905 case ABI_AIX:
11906 if (DOT_SYMBOLS)
11907 putc ('.', file);
11908 else
11909 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11910 break;
11911
11912 case ABI_V4:
11913 case ABI_DARWIN:
11914 break;
11915 }
11916 }
11917 if (TARGET_AIX)
11918 RS6000_OUTPUT_BASENAME (file, fname);
11919 else
11920 assemble_name (file, fname);
11921}
11922
9878760c
RK
11923/* Print an operand. Recognize special options, documented below. */
11924
38c1f2d7 11925#if TARGET_ELF
d9407988 11926#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11927#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11928#else
11929#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11930#define SMALL_DATA_REG 0
ba5e43aa
MM
11931#endif
11932
9878760c 11933void
a2369ed3 11934print_operand (FILE *file, rtx x, int code)
9878760c
RK
11935{
11936 int i;
a260abc9 11937 HOST_WIDE_INT val;
0ba1b2ff 11938 unsigned HOST_WIDE_INT uval;
9878760c
RK
11939
11940 switch (code)
11941 {
a8b3aeda 11942 case '.':
a85d226b
RK
11943 /* Write out an instruction after the call which may be replaced
11944 with glue code by the loader. This depends on the AIX version. */
11945 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11946 return;
11947
81eace42
GK
11948 /* %a is output_address. */
11949
9854d9ed
RK
11950 case 'A':
11951 /* If X is a constant integer whose low-order 5 bits are zero,
11952 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11953 in the AIX assembler where "sri" with a zero shift count
20e26713 11954 writes a trash instruction. */
9854d9ed 11955 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11956 putc ('l', file);
9854d9ed 11957 else
76229ac8 11958 putc ('r', file);
9854d9ed
RK
11959 return;
11960
11961 case 'b':
e2c953b6
DE
11962 /* If constant, low-order 16 bits of constant, unsigned.
11963 Otherwise, write normally. */
11964 if (INT_P (x))
11965 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11966 else
11967 print_operand (file, x, 0);
cad12a8d
RK
11968 return;
11969
a260abc9
DE
11970 case 'B':
11971 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11972 for 64-bit mask direction. */
9390387d 11973 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11974 return;
a260abc9 11975
81eace42
GK
11976 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11977 output_operand. */
11978
423c1189
AH
11979 case 'c':
11980 /* X is a CR register. Print the number of the GT bit of the CR. */
11981 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11982 output_operand_lossage ("invalid %%E value");
11983 else
11984 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11985 return;
11986
11987 case 'D':
cef6b86c 11988 /* Like 'J' but get to the GT bit only. */
37409796 11989 gcc_assert (GET_CODE (x) == REG);
423c1189 11990
cef6b86c
EB
11991 /* Bit 1 is GT bit. */
11992 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11993
cef6b86c
EB
11994 /* Add one for shift count in rlinm for scc. */
11995 fprintf (file, "%d", i + 1);
423c1189
AH
11996 return;
11997
9854d9ed 11998 case 'E':
39a10a29 11999 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
12000 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12001 output_operand_lossage ("invalid %%E value");
78fbdbf7 12002 else
39a10a29 12003 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 12004 return;
9854d9ed
RK
12005
12006 case 'f':
12007 /* X is a CR register. Print the shift count needed to move it
12008 to the high-order four bits. */
12009 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12010 output_operand_lossage ("invalid %%f value");
12011 else
9ebbca7d 12012 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12013 return;
12014
12015 case 'F':
12016 /* Similar, but print the count for the rotate in the opposite
12017 direction. */
12018 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12019 output_operand_lossage ("invalid %%F value");
12020 else
9ebbca7d 12021 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
12022 return;
12023
12024 case 'G':
12025 /* X is a constant integer. If it is negative, print "m",
43aa4e05 12026 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
12027 if (GET_CODE (x) != CONST_INT)
12028 output_operand_lossage ("invalid %%G value");
12029 else if (INTVAL (x) >= 0)
76229ac8 12030 putc ('z', file);
9854d9ed 12031 else
76229ac8 12032 putc ('m', file);
9854d9ed 12033 return;
e2c953b6 12034
9878760c 12035 case 'h':
a4f6c312
SS
12036 /* If constant, output low-order five bits. Otherwise, write
12037 normally. */
9878760c 12038 if (INT_P (x))
5f59ecb7 12039 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
12040 else
12041 print_operand (file, x, 0);
12042 return;
12043
64305719 12044 case 'H':
a4f6c312
SS
12045 /* If constant, output low-order six bits. Otherwise, write
12046 normally. */
64305719 12047 if (INT_P (x))
5f59ecb7 12048 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
12049 else
12050 print_operand (file, x, 0);
12051 return;
12052
9854d9ed
RK
12053 case 'I':
12054 /* Print `i' if this is a constant, else nothing. */
9878760c 12055 if (INT_P (x))
76229ac8 12056 putc ('i', file);
9878760c
RK
12057 return;
12058
9854d9ed
RK
12059 case 'j':
12060 /* Write the bit number in CCR for jump. */
12061 i = ccr_bit (x, 0);
12062 if (i == -1)
12063 output_operand_lossage ("invalid %%j code");
9878760c 12064 else
9854d9ed 12065 fprintf (file, "%d", i);
9878760c
RK
12066 return;
12067
9854d9ed
RK
12068 case 'J':
12069 /* Similar, but add one for shift count in rlinm for scc and pass
12070 scc flag to `ccr_bit'. */
12071 i = ccr_bit (x, 1);
12072 if (i == -1)
12073 output_operand_lossage ("invalid %%J code");
12074 else
a0466a68
RK
12075 /* If we want bit 31, write a shift count of zero, not 32. */
12076 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
12077 return;
12078
9854d9ed
RK
12079 case 'k':
12080 /* X must be a constant. Write the 1's complement of the
12081 constant. */
9878760c 12082 if (! INT_P (x))
9854d9ed 12083 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
12084 else
12085 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
12086 return;
12087
81eace42 12088 case 'K':
9ebbca7d
GK
12089 /* X must be a symbolic constant on ELF. Write an
12090 expression suitable for an 'addi' that adds in the low 16
12091 bits of the MEM. */
12092 if (GET_CODE (x) != CONST)
12093 {
12094 print_operand_address (file, x);
12095 fputs ("@l", file);
12096 }
12097 else
12098 {
12099 if (GET_CODE (XEXP (x, 0)) != PLUS
12100 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
12101 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
12102 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 12103 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
12104 print_operand_address (file, XEXP (XEXP (x, 0), 0));
12105 fputs ("@l", file);
ed8d2920
MM
12106 /* For GNU as, there must be a non-alphanumeric character
12107 between 'l' and the number. The '-' is added by
12108 print_operand() already. */
12109 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
12110 fputs ("+", file);
9ebbca7d
GK
12111 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
12112 }
81eace42
GK
12113 return;
12114
12115 /* %l is output_asm_label. */
9ebbca7d 12116
9854d9ed
RK
12117 case 'L':
12118 /* Write second word of DImode or DFmode reference. Works on register
12119 or non-indexed memory only. */
12120 if (GET_CODE (x) == REG)
fb5c67a7 12121 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
12122 else if (GET_CODE (x) == MEM)
12123 {
12124 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 12125 we have already done it, we can just use an offset of word. */
9854d9ed
RK
12126 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12127 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
12128 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12129 UNITS_PER_WORD));
6fb5fa3c
DB
12130 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12131 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12132 UNITS_PER_WORD));
9854d9ed 12133 else
d7624dc0
RK
12134 output_address (XEXP (adjust_address_nv (x, SImode,
12135 UNITS_PER_WORD),
12136 0));
ed8908e7 12137
ba5e43aa 12138 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12139 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12140 reg_names[SMALL_DATA_REG]);
9854d9ed 12141 }
9878760c 12142 return;
f676971a 12143
9878760c
RK
12144 case 'm':
12145 /* MB value for a mask operand. */
b1765bde 12146 if (! mask_operand (x, SImode))
9878760c
RK
12147 output_operand_lossage ("invalid %%m value");
12148
0ba1b2ff 12149 fprintf (file, "%d", extract_MB (x));
9878760c
RK
12150 return;
12151
12152 case 'M':
12153 /* ME value for a mask operand. */
b1765bde 12154 if (! mask_operand (x, SImode))
a260abc9 12155 output_operand_lossage ("invalid %%M value");
9878760c 12156
0ba1b2ff 12157 fprintf (file, "%d", extract_ME (x));
9878760c
RK
12158 return;
12159
81eace42
GK
12160 /* %n outputs the negative of its operand. */
12161
9878760c
RK
12162 case 'N':
12163 /* Write the number of elements in the vector times 4. */
12164 if (GET_CODE (x) != PARALLEL)
12165 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
12166 else
12167 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
12168 return;
12169
12170 case 'O':
12171 /* Similar, but subtract 1 first. */
12172 if (GET_CODE (x) != PARALLEL)
1427100a 12173 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
12174 else
12175 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
12176 return;
12177
9854d9ed
RK
12178 case 'p':
12179 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12180 if (! INT_P (x)
2bfcf297 12181 || INT_LOWPART (x) < 0
9854d9ed
RK
12182 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12183 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
12184 else
12185 fprintf (file, "%d", i);
9854d9ed
RK
12186 return;
12187
9878760c
RK
12188 case 'P':
12189 /* The operand must be an indirect memory reference. The result
8bb418a3 12190 is the register name. */
9878760c
RK
12191 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12192 || REGNO (XEXP (x, 0)) >= 32)
12193 output_operand_lossage ("invalid %%P value");
e2c953b6 12194 else
fb5c67a7 12195 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
12196 return;
12197
dfbdccdb
GK
12198 case 'q':
12199 /* This outputs the logical code corresponding to a boolean
12200 expression. The expression may have one or both operands
39a10a29 12201 negated (if one, only the first one). For condition register
c4ad648e
AM
12202 logical operations, it will also treat the negated
12203 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 12204 {
63bc1d05 12205 const char *const *t = 0;
dfbdccdb
GK
12206 const char *s;
12207 enum rtx_code code = GET_CODE (x);
12208 static const char * const tbl[3][3] = {
12209 { "and", "andc", "nor" },
12210 { "or", "orc", "nand" },
12211 { "xor", "eqv", "xor" } };
12212
12213 if (code == AND)
12214 t = tbl[0];
12215 else if (code == IOR)
12216 t = tbl[1];
12217 else if (code == XOR)
12218 t = tbl[2];
12219 else
12220 output_operand_lossage ("invalid %%q value");
12221
12222 if (GET_CODE (XEXP (x, 0)) != NOT)
12223 s = t[0];
12224 else
12225 {
12226 if (GET_CODE (XEXP (x, 1)) == NOT)
12227 s = t[2];
12228 else
12229 s = t[1];
12230 }
f676971a 12231
dfbdccdb
GK
12232 fputs (s, file);
12233 }
12234 return;
12235
2c4a9cff
DE
12236 case 'Q':
12237 if (TARGET_MFCRF)
3b6ce0af 12238 fputc (',', file);
5efb1046 12239 /* FALLTHRU */
2c4a9cff
DE
12240 else
12241 return;
12242
9854d9ed
RK
12243 case 'R':
12244 /* X is a CR register. Print the mask for `mtcrf'. */
12245 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12246 output_operand_lossage ("invalid %%R value");
12247 else
9ebbca7d 12248 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12249 return;
9854d9ed
RK
12250
12251 case 's':
12252 /* Low 5 bits of 32 - value */
12253 if (! INT_P (x))
12254 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12255 else
12256 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12257 return;
9854d9ed 12258
a260abc9 12259 case 'S':
0ba1b2ff 12260 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12261 CONST_INT 32-bit mask is considered sign-extended so any
12262 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12263 if (! mask64_operand (x, DImode))
a260abc9
DE
12264 output_operand_lossage ("invalid %%S value");
12265
0ba1b2ff 12266 uval = INT_LOWPART (x);
a260abc9 12267
0ba1b2ff 12268 if (uval & 1) /* Clear Left */
a260abc9 12269 {
f099d360
GK
12270#if HOST_BITS_PER_WIDE_INT > 64
12271 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12272#endif
0ba1b2ff 12273 i = 64;
a260abc9 12274 }
0ba1b2ff 12275 else /* Clear Right */
a260abc9 12276 {
0ba1b2ff 12277 uval = ~uval;
f099d360
GK
12278#if HOST_BITS_PER_WIDE_INT > 64
12279 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12280#endif
0ba1b2ff 12281 i = 63;
a260abc9 12282 }
0ba1b2ff
AM
12283 while (uval != 0)
12284 --i, uval >>= 1;
37409796 12285 gcc_assert (i >= 0);
0ba1b2ff
AM
12286 fprintf (file, "%d", i);
12287 return;
a260abc9 12288
a3170dc6
AH
12289 case 't':
12290 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12291 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12292
12293 /* Bit 3 is OV bit. */
12294 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12295
12296 /* If we want bit 31, write a shift count of zero, not 32. */
12297 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12298 return;
12299
cccf3bdc
DE
12300 case 'T':
12301 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12302 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12303 && REGNO (x) != CTR_REGNO))
cccf3bdc 12304 output_operand_lossage ("invalid %%T value");
1de43f85 12305 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12306 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12307 else
12308 fputs ("ctr", file);
12309 return;
12310
9854d9ed 12311 case 'u':
802a0058 12312 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12313 if (! INT_P (x))
12314 output_operand_lossage ("invalid %%u value");
e2c953b6 12315 else
f676971a 12316 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12317 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12318 return;
12319
802a0058
MM
12320 case 'v':
12321 /* High-order 16 bits of constant for use in signed operand. */
12322 if (! INT_P (x))
12323 output_operand_lossage ("invalid %%v value");
e2c953b6 12324 else
134c32f6
DE
12325 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12326 (INT_LOWPART (x) >> 16) & 0xffff);
12327 return;
802a0058 12328
9854d9ed
RK
12329 case 'U':
12330 /* Print `u' if this has an auto-increment or auto-decrement. */
12331 if (GET_CODE (x) == MEM
12332 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12333 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12334 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12335 putc ('u', file);
9854d9ed 12336 return;
9878760c 12337
e0cd0770
JC
12338 case 'V':
12339 /* Print the trap code for this operand. */
12340 switch (GET_CODE (x))
12341 {
12342 case EQ:
12343 fputs ("eq", file); /* 4 */
12344 break;
12345 case NE:
12346 fputs ("ne", file); /* 24 */
12347 break;
12348 case LT:
12349 fputs ("lt", file); /* 16 */
12350 break;
12351 case LE:
12352 fputs ("le", file); /* 20 */
12353 break;
12354 case GT:
12355 fputs ("gt", file); /* 8 */
12356 break;
12357 case GE:
12358 fputs ("ge", file); /* 12 */
12359 break;
12360 case LTU:
12361 fputs ("llt", file); /* 2 */
12362 break;
12363 case LEU:
12364 fputs ("lle", file); /* 6 */
12365 break;
12366 case GTU:
12367 fputs ("lgt", file); /* 1 */
12368 break;
12369 case GEU:
12370 fputs ("lge", file); /* 5 */
12371 break;
12372 default:
37409796 12373 gcc_unreachable ();
e0cd0770
JC
12374 }
12375 break;
12376
9854d9ed
RK
12377 case 'w':
12378 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12379 normally. */
12380 if (INT_P (x))
f676971a 12381 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12382 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12383 else
12384 print_operand (file, x, 0);
9878760c
RK
12385 return;
12386
9854d9ed 12387 case 'W':
e2c953b6 12388 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12389 val = (GET_CODE (x) == CONST_INT
12390 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12391
12392 if (val < 0)
12393 i = -1;
9854d9ed 12394 else
e2c953b6
DE
12395 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12396 if ((val <<= 1) < 0)
12397 break;
12398
12399#if HOST_BITS_PER_WIDE_INT == 32
12400 if (GET_CODE (x) == CONST_INT && i >= 0)
12401 i += 32; /* zero-extend high-part was all 0's */
12402 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12403 {
12404 val = CONST_DOUBLE_LOW (x);
12405
37409796
NS
12406 gcc_assert (val);
12407 if (val < 0)
e2c953b6
DE
12408 --i;
12409 else
12410 for ( ; i < 64; i++)
12411 if ((val <<= 1) < 0)
12412 break;
12413 }
12414#endif
12415
12416 fprintf (file, "%d", i + 1);
9854d9ed 12417 return;
9878760c 12418
9854d9ed
RK
12419 case 'X':
12420 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12421 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12422 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12423 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12424 putc ('x', file);
9854d9ed 12425 return;
9878760c 12426
9854d9ed
RK
12427 case 'Y':
12428 /* Like 'L', for third word of TImode */
12429 if (GET_CODE (x) == REG)
fb5c67a7 12430 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12431 else if (GET_CODE (x) == MEM)
9878760c 12432 {
9854d9ed
RK
12433 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12434 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12435 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12436 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12437 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12438 else
d7624dc0 12439 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12440 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12441 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12442 reg_names[SMALL_DATA_REG]);
9878760c
RK
12443 }
12444 return;
f676971a 12445
9878760c 12446 case 'z':
b4ac57ab
RS
12447 /* X is a SYMBOL_REF. Write out the name preceded by a
12448 period and without any trailing data in brackets. Used for function
4d30c363
MM
12449 names. If we are configured for System V (or the embedded ABI) on
12450 the PowerPC, do not emit the period, since those systems do not use
12451 TOCs and the like. */
37409796 12452 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12453
c4ad648e
AM
12454 /* Mark the decl as referenced so that cgraph will output the
12455 function. */
9bf6462a 12456 if (SYMBOL_REF_DECL (x))
c4ad648e 12457 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12458
85b776df 12459 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12460 if (TARGET_MACHO)
12461 {
12462 const char *name = XSTR (x, 0);
a031e781 12463#if TARGET_MACHO
3b48085e 12464 if (MACHOPIC_INDIRECT
11abc112
MM
12465 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12466 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12467#endif
12468 assemble_name (file, name);
12469 }
85b776df 12470 else if (!DOT_SYMBOLS)
9739c90c 12471 assemble_name (file, XSTR (x, 0));
85b776df
AM
12472 else
12473 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12474 return;
12475
9854d9ed
RK
12476 case 'Z':
12477 /* Like 'L', for last word of TImode. */
12478 if (GET_CODE (x) == REG)
fb5c67a7 12479 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12480 else if (GET_CODE (x) == MEM)
12481 {
12482 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12483 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12484 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12485 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12486 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12487 else
d7624dc0 12488 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12489 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12490 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12491 reg_names[SMALL_DATA_REG]);
9854d9ed 12492 }
5c23c401 12493 return;
0ac081f6 12494
a3170dc6 12495 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12496 case 'y':
12497 {
12498 rtx tmp;
12499
37409796 12500 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12501
12502 tmp = XEXP (x, 0);
12503
90d3ff1c 12504 /* Ugly hack because %y is overloaded. */
8ef65e3d 12505 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12506 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12507 || GET_MODE (x) == TFmode
12508 || GET_MODE (x) == TImode))
a3170dc6
AH
12509 {
12510 /* Handle [reg]. */
12511 if (GET_CODE (tmp) == REG)
12512 {
12513 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12514 break;
12515 }
12516 /* Handle [reg+UIMM]. */
12517 else if (GET_CODE (tmp) == PLUS &&
12518 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12519 {
12520 int x;
12521
37409796 12522 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12523
12524 x = INTVAL (XEXP (tmp, 1));
12525 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12526 break;
12527 }
12528
12529 /* Fall through. Must be [reg+reg]. */
12530 }
850e8d3d
DN
12531 if (TARGET_ALTIVEC
12532 && GET_CODE (tmp) == AND
12533 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12534 && INTVAL (XEXP (tmp, 1)) == -16)
12535 tmp = XEXP (tmp, 0);
0ac081f6 12536 if (GET_CODE (tmp) == REG)
c62f2db5 12537 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12538 else
0ac081f6 12539 {
cb8cc791
AP
12540 if (!GET_CODE (tmp) == PLUS
12541 || !REG_P (XEXP (tmp, 0))
12542 || !REG_P (XEXP (tmp, 1)))
12543 {
12544 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12545 break;
12546 }
bb8df8a6 12547
0ac081f6
AH
12548 if (REGNO (XEXP (tmp, 0)) == 0)
12549 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12550 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12551 else
12552 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12553 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12554 }
0ac081f6
AH
12555 break;
12556 }
f676971a 12557
9878760c
RK
12558 case 0:
12559 if (GET_CODE (x) == REG)
12560 fprintf (file, "%s", reg_names[REGNO (x)]);
12561 else if (GET_CODE (x) == MEM)
12562 {
12563 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12564 know the width from the mode. */
12565 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12566 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12567 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12568 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12569 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12570 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12571 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12572 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12573 else
a54d04b7 12574 output_address (XEXP (x, 0));
9878760c
RK
12575 }
12576 else
a54d04b7 12577 output_addr_const (file, x);
a85d226b 12578 return;
9878760c 12579
c4501e62
JJ
12580 case '&':
12581 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12582 return;
12583
9878760c
RK
12584 default:
12585 output_operand_lossage ("invalid %%xn code");
12586 }
12587}
12588\f
12589/* Print the address of an operand. */
12590
12591void
a2369ed3 12592print_operand_address (FILE *file, rtx x)
9878760c
RK
12593{
12594 if (GET_CODE (x) == REG)
4697a36c 12595 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12596 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12597 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12598 {
12599 output_addr_const (file, x);
ba5e43aa 12600 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12601 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12602 reg_names[SMALL_DATA_REG]);
37409796
NS
12603 else
12604 gcc_assert (!TARGET_TOC);
9878760c
RK
12605 }
12606 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12607 {
9024f4b8 12608 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12609 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12610 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12611 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12612 else
4697a36c
MM
12613 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12614 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12615 }
12616 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12617 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12618 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12619#if TARGET_ELF
12620 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12621 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12622 {
12623 output_addr_const (file, XEXP (x, 1));
12624 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12625 }
c859cda6
DJ
12626#endif
12627#if TARGET_MACHO
12628 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12629 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12630 {
12631 fprintf (file, "lo16(");
12632 output_addr_const (file, XEXP (x, 1));
12633 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12634 }
3cb999d8 12635#endif
4d588c14 12636 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12637 {
2e4316da 12638 output_addr_const (file, XEXP (x, 1));
9ebbca7d
GK
12639 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12640 }
9878760c 12641 else
37409796 12642 gcc_unreachable ();
9878760c
RK
12643}
12644\f
2e4316da
RS
12645/* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */
12646
12647bool
12648rs6000_output_addr_const_extra (FILE *file, rtx x)
12649{
12650 if (GET_CODE (x) == UNSPEC)
12651 switch (XINT (x, 1))
12652 {
12653 case UNSPEC_TOCREL:
12654 x = XVECEXP (x, 0, 0);
12655 gcc_assert (GET_CODE (x) == SYMBOL_REF);
12656 output_addr_const (file, x);
12657 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
12658 {
12659 putc ('-', file);
12660 assemble_name (file, toc_label_name);
12661 }
12662 else if (TARGET_ELF)
12663 fputs ("@toc", file);
12664 return true;
08a6a74b
RS
12665
12666#if TARGET_MACHO
12667 case UNSPEC_MACHOPIC_OFFSET:
12668 output_addr_const (file, XVECEXP (x, 0, 0));
12669 putc ('-', file);
12670 machopic_output_function_base_name (file);
12671 return true;
12672#endif
2e4316da
RS
12673 }
12674 return false;
12675}
12676\f
88cad84b 12677/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12678 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12679 is defined. It also needs to handle DI-mode objects on 64-bit
12680 targets. */
12681
12682static bool
a2369ed3 12683rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12684{
f4f4921e 12685#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12686 /* Special handling for SI values. */
84dcde01 12687 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12688 {
301d03af 12689 static int recurse = 0;
f676971a 12690
301d03af
RS
12691 /* For -mrelocatable, we mark all addresses that need to be fixed up
12692 in the .fixup section. */
12693 if (TARGET_RELOCATABLE
d6b5193b
RS
12694 && in_section != toc_section
12695 && in_section != text_section
4325ca90 12696 && !unlikely_text_section_p (in_section)
301d03af
RS
12697 && !recurse
12698 && GET_CODE (x) != CONST_INT
12699 && GET_CODE (x) != CONST_DOUBLE
12700 && CONSTANT_P (x))
12701 {
12702 char buf[256];
12703
12704 recurse = 1;
12705 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12706 fixuplabelno++;
12707 ASM_OUTPUT_LABEL (asm_out_file, buf);
12708 fprintf (asm_out_file, "\t.long\t(");
12709 output_addr_const (asm_out_file, x);
12710 fprintf (asm_out_file, ")@fixup\n");
12711 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12712 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12713 fprintf (asm_out_file, "\t.long\t");
12714 assemble_name (asm_out_file, buf);
12715 fprintf (asm_out_file, "\n\t.previous\n");
12716 recurse = 0;
12717 return true;
12718 }
12719 /* Remove initial .'s to turn a -mcall-aixdesc function
12720 address into the address of the descriptor, not the function
12721 itself. */
12722 else if (GET_CODE (x) == SYMBOL_REF
12723 && XSTR (x, 0)[0] == '.'
12724 && DEFAULT_ABI == ABI_AIX)
12725 {
12726 const char *name = XSTR (x, 0);
12727 while (*name == '.')
12728 name++;
12729
12730 fprintf (asm_out_file, "\t.long\t%s\n", name);
12731 return true;
12732 }
12733 }
f4f4921e 12734#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12735 return default_assemble_integer (x, size, aligned_p);
12736}
93638d7a
AM
12737
12738#ifdef HAVE_GAS_HIDDEN
12739/* Emit an assembler directive to set symbol visibility for DECL to
12740 VISIBILITY_TYPE. */
12741
5add3202 12742static void
a2369ed3 12743rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12744{
93638d7a
AM
12745 /* Functions need to have their entry point symbol visibility set as
12746 well as their descriptor symbol visibility. */
85b776df
AM
12747 if (DEFAULT_ABI == ABI_AIX
12748 && DOT_SYMBOLS
12749 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12750 {
25fdb4dc 12751 static const char * const visibility_types[] = {
c4ad648e 12752 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12753 };
12754
12755 const char *name, *type;
93638d7a
AM
12756
12757 name = ((* targetm.strip_name_encoding)
12758 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12759 type = visibility_types[vis];
93638d7a 12760
25fdb4dc
RH
12761 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12762 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12763 }
25fdb4dc
RH
12764 else
12765 default_assemble_visibility (decl, vis);
93638d7a
AM
12766}
12767#endif
301d03af 12768\f
39a10a29 12769enum rtx_code
a2369ed3 12770rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12771{
12772 /* Reversal of FP compares takes care -- an ordered compare
12773 becomes an unordered compare and vice versa. */
f676971a 12774 if (mode == CCFPmode
bc9ec0e0
GK
12775 && (!flag_finite_math_only
12776 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12777 || code == UNEQ || code == LTGT))
bab6226b 12778 return reverse_condition_maybe_unordered (code);
39a10a29 12779 else
bab6226b 12780 return reverse_condition (code);
39a10a29
GK
12781}
12782
39a10a29
GK
12783/* Generate a compare for CODE. Return a brand-new rtx that
12784 represents the result of the compare. */
a4f6c312 12785
39a10a29 12786static rtx
a2369ed3 12787rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12788{
12789 enum machine_mode comp_mode;
12790 rtx compare_result;
12791
12792 if (rs6000_compare_fp_p)
12793 comp_mode = CCFPmode;
12794 else if (code == GTU || code == LTU
c4ad648e 12795 || code == GEU || code == LEU)
39a10a29 12796 comp_mode = CCUNSmode;
60934f9c
NS
12797 else if ((code == EQ || code == NE)
12798 && GET_CODE (rs6000_compare_op0) == SUBREG
12799 && GET_CODE (rs6000_compare_op1) == SUBREG
12800 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12801 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12802 /* These are unsigned values, perhaps there will be a later
12803 ordering compare that can be shared with this one.
12804 Unfortunately we cannot detect the signedness of the operands
12805 for non-subregs. */
12806 comp_mode = CCUNSmode;
39a10a29
GK
12807 else
12808 comp_mode = CCmode;
12809
12810 /* First, the compare. */
12811 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12812
cef6b86c 12813 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12814 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12815 && rs6000_compare_fp_p)
a3170dc6 12816 {
64022b5d 12817 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12818 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12819
12820 if (op_mode == VOIDmode)
12821 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12822
cef6b86c
EB
12823 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12824 This explains the following mess. */
423c1189 12825
a3170dc6
AH
12826 switch (code)
12827 {
423c1189 12828 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12829 switch (op_mode)
12830 {
12831 case SFmode:
1cdc0d8f 12832 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12833 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12834 rs6000_compare_op1)
12835 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12836 rs6000_compare_op1);
12837 break;
12838
12839 case DFmode:
1cdc0d8f 12840 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12841 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12842 rs6000_compare_op1)
12843 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12844 rs6000_compare_op1);
12845 break;
12846
17caeff2 12847 case TFmode:
1cdc0d8f 12848 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12849 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12850 rs6000_compare_op1)
12851 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12852 rs6000_compare_op1);
12853 break;
12854
37409796
NS
12855 default:
12856 gcc_unreachable ();
12857 }
a3170dc6 12858 break;
bb8df8a6 12859
423c1189 12860 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12861 switch (op_mode)
12862 {
12863 case SFmode:
1cdc0d8f 12864 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12865 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12866 rs6000_compare_op1)
12867 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12868 rs6000_compare_op1);
12869 break;
bb8df8a6 12870
37409796 12871 case DFmode:
1cdc0d8f 12872 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12873 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12874 rs6000_compare_op1)
12875 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12876 rs6000_compare_op1);
12877 break;
12878
17caeff2 12879 case TFmode:
1cdc0d8f 12880 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12881 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12882 rs6000_compare_op1)
12883 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12884 rs6000_compare_op1);
12885 break;
12886
37409796
NS
12887 default:
12888 gcc_unreachable ();
12889 }
a3170dc6 12890 break;
bb8df8a6 12891
423c1189 12892 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12893 switch (op_mode)
12894 {
12895 case SFmode:
1cdc0d8f 12896 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12897 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12898 rs6000_compare_op1)
12899 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12900 rs6000_compare_op1);
12901 break;
bb8df8a6 12902
37409796 12903 case DFmode:
1cdc0d8f 12904 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12905 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12906 rs6000_compare_op1)
12907 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12908 rs6000_compare_op1);
12909 break;
12910
17caeff2 12911 case TFmode:
1cdc0d8f 12912 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12913 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12914 rs6000_compare_op1)
12915 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12916 rs6000_compare_op1);
12917 break;
12918
37409796
NS
12919 default:
12920 gcc_unreachable ();
12921 }
a3170dc6 12922 break;
4d4cbc0e 12923 default:
37409796 12924 gcc_unreachable ();
a3170dc6
AH
12925 }
12926
12927 /* Synthesize LE and GE from LT/GT || EQ. */
12928 if (code == LE || code == GE || code == LEU || code == GEU)
12929 {
a3170dc6
AH
12930 emit_insn (cmp);
12931
12932 switch (code)
12933 {
12934 case LE: code = LT; break;
12935 case GE: code = GT; break;
12936 case LEU: code = LT; break;
12937 case GEU: code = GT; break;
37409796 12938 default: gcc_unreachable ();
a3170dc6
AH
12939 }
12940
a3170dc6
AH
12941 compare_result2 = gen_reg_rtx (CCFPmode);
12942
12943 /* Do the EQ. */
37409796
NS
12944 switch (op_mode)
12945 {
12946 case SFmode:
1cdc0d8f 12947 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12948 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12949 rs6000_compare_op1)
12950 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12951 rs6000_compare_op1);
12952 break;
12953
12954 case DFmode:
1cdc0d8f 12955 cmp = (flag_finite_math_only && !flag_trapping_math)
37409796
NS
12956 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12957 rs6000_compare_op1)
12958 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12959 rs6000_compare_op1);
12960 break;
12961
17caeff2 12962 case TFmode:
1cdc0d8f 12963 cmp = (flag_finite_math_only && !flag_trapping_math)
17caeff2
JM
12964 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12965 rs6000_compare_op1)
12966 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12967 rs6000_compare_op1);
12968 break;
12969
37409796
NS
12970 default:
12971 gcc_unreachable ();
12972 }
a3170dc6
AH
12973 emit_insn (cmp);
12974
a3170dc6 12975 /* OR them together. */
64022b5d
AH
12976 or_result = gen_reg_rtx (CCFPmode);
12977 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12978 compare_result2);
a3170dc6
AH
12979 compare_result = or_result;
12980 code = EQ;
12981 }
12982 else
12983 {
a3170dc6 12984 if (code == NE || code == LTGT)
a3170dc6 12985 code = NE;
423c1189
AH
12986 else
12987 code = EQ;
a3170dc6
AH
12988 }
12989
12990 emit_insn (cmp);
12991 }
12992 else
de17c25f
DE
12993 {
12994 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12995 CLOBBERs to match cmptf_internal2 pattern. */
12996 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12997 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12998 && !TARGET_IEEEQUAD
de17c25f
DE
12999 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
13000 emit_insn (gen_rtx_PARALLEL (VOIDmode,
13001 gen_rtvec (9,
13002 gen_rtx_SET (VOIDmode,
13003 compare_result,
13004 gen_rtx_COMPARE (comp_mode,
13005 rs6000_compare_op0,
13006 rs6000_compare_op1)),
13007 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13008 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13009 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13010 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13011 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13012 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13013 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13014 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
13015 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
13016 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
13017 {
13018 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
13019 comp_mode = CCEQmode;
13020 compare_result = gen_reg_rtx (CCEQmode);
13021 if (TARGET_64BIT)
13022 emit_insn (gen_stack_protect_testdi (compare_result,
13023 rs6000_compare_op0, op1));
13024 else
13025 emit_insn (gen_stack_protect_testsi (compare_result,
13026 rs6000_compare_op0, op1));
13027 }
de17c25f
DE
13028 else
13029 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
13030 gen_rtx_COMPARE (comp_mode,
13031 rs6000_compare_op0,
13032 rs6000_compare_op1)));
13033 }
f676971a 13034
ca5adc63 13035 /* Some kinds of FP comparisons need an OR operation;
e7108df9 13036 under flag_finite_math_only we don't bother. */
39a10a29 13037 if (rs6000_compare_fp_p
e7108df9 13038 && !flag_finite_math_only
8ef65e3d 13039 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
13040 && (code == LE || code == GE
13041 || code == UNEQ || code == LTGT
13042 || code == UNGT || code == UNLT))
13043 {
13044 enum rtx_code or1, or2;
13045 rtx or1_rtx, or2_rtx, compare2_rtx;
13046 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 13047
39a10a29
GK
13048 switch (code)
13049 {
13050 case LE: or1 = LT; or2 = EQ; break;
13051 case GE: or1 = GT; or2 = EQ; break;
13052 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
13053 case LTGT: or1 = LT; or2 = GT; break;
13054 case UNGT: or1 = UNORDERED; or2 = GT; break;
13055 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 13056 default: gcc_unreachable ();
39a10a29
GK
13057 }
13058 validate_condition_mode (or1, comp_mode);
13059 validate_condition_mode (or2, comp_mode);
1c563bed
KH
13060 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
13061 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
13062 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
13063 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
13064 const_true_rtx);
13065 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
13066
13067 compare_result = or_result;
13068 code = EQ;
13069 }
13070
13071 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 13072
1c563bed 13073 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
13074}
13075
13076
13077/* Emit the RTL for an sCOND pattern. */
13078
13079void
a2369ed3 13080rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
13081{
13082 rtx condition_rtx;
13083 enum machine_mode op_mode;
b7053a3f 13084 enum rtx_code cond_code;
39a10a29
GK
13085
13086 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
13087 cond_code = GET_CODE (condition_rtx);
13088
8ef65e3d 13089 if (rs6000_compare_fp_p
423c1189
AH
13090 && !TARGET_FPRS && TARGET_HARD_FLOAT)
13091 {
13092 rtx t;
13093
13094 PUT_MODE (condition_rtx, SImode);
13095 t = XEXP (condition_rtx, 0);
13096
37409796 13097 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
13098
13099 if (cond_code == NE)
64022b5d 13100 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 13101
64022b5d 13102 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
13103 return;
13104 }
13105
b7053a3f
GK
13106 if (cond_code == NE
13107 || cond_code == GE || cond_code == LE
13108 || cond_code == GEU || cond_code == LEU
13109 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
13110 {
13111 rtx not_result = gen_reg_rtx (CCEQmode);
13112 rtx not_op, rev_cond_rtx;
13113 enum machine_mode cc_mode;
f676971a 13114
b7053a3f
GK
13115 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
13116
1c563bed 13117 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 13118 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
13119 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
13120 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
13121 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
13122 }
39a10a29
GK
13123
13124 op_mode = GET_MODE (rs6000_compare_op0);
13125 if (op_mode == VOIDmode)
13126 op_mode = GET_MODE (rs6000_compare_op1);
13127
13128 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
13129 {
13130 PUT_MODE (condition_rtx, DImode);
13131 convert_move (result, condition_rtx, 0);
13132 }
13133 else
13134 {
13135 PUT_MODE (condition_rtx, SImode);
13136 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
13137 }
13138}
13139
39a10a29
GK
13140/* Emit a branch of kind CODE to location LOC. */
13141
13142void
a2369ed3 13143rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
13144{
13145 rtx condition_rtx, loc_ref;
13146
13147 condition_rtx = rs6000_generate_compare (code);
13148 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
13149 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
13150 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
13151 loc_ref, pc_rtx)));
13152}
13153
12a4e8c5
GK
13154/* Return the string to output a conditional branch to LABEL, which is
13155 the operand number of the label, or -1 if the branch is really a
f676971a 13156 conditional return.
12a4e8c5
GK
13157
13158 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
13159 condition code register and its mode specifies what kind of
13160 comparison we made.
13161
a0ab749a 13162 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
13163
13164 INSN is the insn. */
13165
13166char *
a2369ed3 13167output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
13168{
13169 static char string[64];
13170 enum rtx_code code = GET_CODE (op);
13171 rtx cc_reg = XEXP (op, 0);
13172 enum machine_mode mode = GET_MODE (cc_reg);
13173 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 13174 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
13175 int really_reversed = reversed ^ need_longbranch;
13176 char *s = string;
13177 const char *ccode;
13178 const char *pred;
13179 rtx note;
13180
39a10a29
GK
13181 validate_condition_mode (code, mode);
13182
13183 /* Work out which way this really branches. We could use
13184 reverse_condition_maybe_unordered here always but this
13185 makes the resulting assembler clearer. */
12a4e8c5 13186 if (really_reversed)
de40e1df
DJ
13187 {
13188 /* Reversal of FP compares takes care -- an ordered compare
13189 becomes an unordered compare and vice versa. */
13190 if (mode == CCFPmode)
13191 code = reverse_condition_maybe_unordered (code);
13192 else
13193 code = reverse_condition (code);
13194 }
12a4e8c5 13195
8ef65e3d 13196 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
13197 {
13198 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13199 to the GT bit. */
37409796
NS
13200 switch (code)
13201 {
13202 case EQ:
13203 /* Opposite of GT. */
13204 code = GT;
13205 break;
13206
13207 case NE:
13208 code = UNLE;
13209 break;
13210
13211 default:
13212 gcc_unreachable ();
13213 }
a3170dc6
AH
13214 }
13215
39a10a29 13216 switch (code)
12a4e8c5
GK
13217 {
13218 /* Not all of these are actually distinct opcodes, but
13219 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
13220 case NE: case LTGT:
13221 ccode = "ne"; break;
13222 case EQ: case UNEQ:
13223 ccode = "eq"; break;
f676971a 13224 case GE: case GEU:
50a0b056 13225 ccode = "ge"; break;
f676971a 13226 case GT: case GTU: case UNGT:
50a0b056 13227 ccode = "gt"; break;
f676971a 13228 case LE: case LEU:
50a0b056 13229 ccode = "le"; break;
f676971a 13230 case LT: case LTU: case UNLT:
50a0b056 13231 ccode = "lt"; break;
12a4e8c5
GK
13232 case UNORDERED: ccode = "un"; break;
13233 case ORDERED: ccode = "nu"; break;
13234 case UNGE: ccode = "nl"; break;
13235 case UNLE: ccode = "ng"; break;
13236 default:
37409796 13237 gcc_unreachable ();
12a4e8c5 13238 }
f676971a
EC
13239
13240 /* Maybe we have a guess as to how likely the branch is.
94a54f47 13241 The old mnemonics don't have a way to specify this information. */
f4857b9b 13242 pred = "";
12a4e8c5
GK
13243 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13244 if (note != NULL_RTX)
13245 {
13246 /* PROB is the difference from 50%. */
13247 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13248
13249 /* Only hint for highly probable/improbable branches on newer
13250 cpus as static prediction overrides processor dynamic
13251 prediction. For older cpus we may as well always hint, but
13252 assume not taken for branches that are very close to 50% as a
13253 mispredicted taken branch is more expensive than a
f676971a 13254 mispredicted not-taken branch. */
ec507f2d 13255 if (rs6000_always_hint
2c9e13f3
JH
13256 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13257 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13258 {
13259 if (abs (prob) > REG_BR_PROB_BASE / 20
13260 && ((prob > 0) ^ need_longbranch))
c4ad648e 13261 pred = "+";
f4857b9b
AM
13262 else
13263 pred = "-";
13264 }
12a4e8c5 13265 }
12a4e8c5
GK
13266
13267 if (label == NULL)
94a54f47 13268 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13269 else
94a54f47 13270 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13271
37c67319 13272 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13273 Assume they'd only be the first character.... */
37c67319
GK
13274 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13275 *s++ = '%';
94a54f47 13276 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13277
13278 if (label != NULL)
13279 {
13280 /* If the branch distance was too far, we may have to use an
13281 unconditional branch to go the distance. */
13282 if (need_longbranch)
44518ddd 13283 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13284 else
13285 s += sprintf (s, ",%s", label);
13286 }
13287
13288 return string;
13289}
50a0b056 13290
64022b5d 13291/* Return the string to flip the GT bit on a CR. */
423c1189 13292char *
64022b5d 13293output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13294{
13295 static char string[64];
13296 int a, b;
13297
37409796
NS
13298 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13299 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13300
64022b5d
AH
13301 /* GT bit. */
13302 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13303 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13304
13305 sprintf (string, "crnot %d,%d", a, b);
13306 return string;
13307}
13308
21213b4c
DP
13309/* Return insn index for the vector compare instruction for given CODE,
13310 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13311 not available. */
13312
13313static int
94ff898d 13314get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13315 enum machine_mode dest_mode,
13316 enum machine_mode op_mode)
13317{
13318 if (!TARGET_ALTIVEC)
13319 return INSN_NOT_AVAILABLE;
13320
13321 switch (code)
13322 {
13323 case EQ:
13324 if (dest_mode == V16QImode && op_mode == V16QImode)
13325 return UNSPEC_VCMPEQUB;
13326 if (dest_mode == V8HImode && op_mode == V8HImode)
13327 return UNSPEC_VCMPEQUH;
13328 if (dest_mode == V4SImode && op_mode == V4SImode)
13329 return UNSPEC_VCMPEQUW;
13330 if (dest_mode == V4SImode && op_mode == V4SFmode)
13331 return UNSPEC_VCMPEQFP;
13332 break;
13333 case GE:
13334 if (dest_mode == V4SImode && op_mode == V4SFmode)
13335 return UNSPEC_VCMPGEFP;
13336 case GT:
13337 if (dest_mode == V16QImode && op_mode == V16QImode)
13338 return UNSPEC_VCMPGTSB;
13339 if (dest_mode == V8HImode && op_mode == V8HImode)
13340 return UNSPEC_VCMPGTSH;
13341 if (dest_mode == V4SImode && op_mode == V4SImode)
13342 return UNSPEC_VCMPGTSW;
13343 if (dest_mode == V4SImode && op_mode == V4SFmode)
13344 return UNSPEC_VCMPGTFP;
13345 break;
13346 case GTU:
13347 if (dest_mode == V16QImode && op_mode == V16QImode)
13348 return UNSPEC_VCMPGTUB;
13349 if (dest_mode == V8HImode && op_mode == V8HImode)
13350 return UNSPEC_VCMPGTUH;
13351 if (dest_mode == V4SImode && op_mode == V4SImode)
13352 return UNSPEC_VCMPGTUW;
13353 break;
13354 default:
13355 break;
13356 }
13357 return INSN_NOT_AVAILABLE;
13358}
13359
13360/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13361 DMODE is expected destination mode. This is a recursive function. */
13362
13363static rtx
13364rs6000_emit_vector_compare (enum rtx_code rcode,
13365 rtx op0, rtx op1,
13366 enum machine_mode dmode)
13367{
13368 int vec_cmp_insn;
13369 rtx mask;
13370 enum machine_mode dest_mode;
13371 enum machine_mode op_mode = GET_MODE (op1);
13372
37409796
NS
13373 gcc_assert (TARGET_ALTIVEC);
13374 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13375
13376 /* Floating point vector compare instructions uses destination V4SImode.
13377 Move destination to appropriate mode later. */
13378 if (dmode == V4SFmode)
13379 dest_mode = V4SImode;
13380 else
13381 dest_mode = dmode;
13382
13383 mask = gen_reg_rtx (dest_mode);
13384 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13385
13386 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13387 {
13388 bool swap_operands = false;
13389 bool try_again = false;
13390 switch (rcode)
13391 {
13392 case LT:
13393 rcode = GT;
13394 swap_operands = true;
13395 try_again = true;
13396 break;
13397 case LTU:
13398 rcode = GTU;
13399 swap_operands = true;
13400 try_again = true;
13401 break;
13402 case NE:
370df7db
JC
13403 case UNLE:
13404 case UNLT:
13405 case UNGE:
13406 case UNGT:
13407 /* Invert condition and try again.
13408 e.g., A != B becomes ~(A==B). */
21213b4c 13409 {
370df7db 13410 enum rtx_code rev_code;
21213b4c 13411 enum insn_code nor_code;
d1123cde 13412 rtx eq_rtx;
370df7db
JC
13413
13414 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13415 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13416 dest_mode);
94ff898d 13417
166cdb08 13418 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13419 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13420 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13421
13422 if (dmode != dest_mode)
13423 {
13424 rtx temp = gen_reg_rtx (dest_mode);
13425 convert_move (temp, mask, 0);
13426 return temp;
13427 }
13428 return mask;
13429 }
13430 break;
13431 case GE:
13432 case GEU:
13433 case LE:
13434 case LEU:
13435 /* Try GT/GTU/LT/LTU OR EQ */
13436 {
13437 rtx c_rtx, eq_rtx;
13438 enum insn_code ior_code;
13439 enum rtx_code new_code;
13440
37409796
NS
13441 switch (rcode)
13442 {
13443 case GE:
13444 new_code = GT;
13445 break;
13446
13447 case GEU:
13448 new_code = GTU;
13449 break;
13450
13451 case LE:
13452 new_code = LT;
13453 break;
13454
13455 case LEU:
13456 new_code = LTU;
13457 break;
13458
13459 default:
13460 gcc_unreachable ();
13461 }
21213b4c
DP
13462
13463 c_rtx = rs6000_emit_vector_compare (new_code,
13464 op0, op1, dest_mode);
13465 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13466 dest_mode);
13467
166cdb08 13468 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13469 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13470 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13471 if (dmode != dest_mode)
13472 {
13473 rtx temp = gen_reg_rtx (dest_mode);
13474 convert_move (temp, mask, 0);
13475 return temp;
13476 }
13477 return mask;
13478 }
13479 break;
13480 default:
37409796 13481 gcc_unreachable ();
21213b4c
DP
13482 }
13483
13484 if (try_again)
13485 {
13486 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13487 /* You only get two chances. */
13488 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13489 }
13490
13491 if (swap_operands)
13492 {
13493 rtx tmp;
13494 tmp = op0;
13495 op0 = op1;
13496 op1 = tmp;
13497 }
13498 }
13499
915167f5
GK
13500 emit_insn (gen_rtx_SET (VOIDmode, mask,
13501 gen_rtx_UNSPEC (dest_mode,
13502 gen_rtvec (2, op0, op1),
13503 vec_cmp_insn)));
21213b4c
DP
13504 if (dmode != dest_mode)
13505 {
13506 rtx temp = gen_reg_rtx (dest_mode);
13507 convert_move (temp, mask, 0);
13508 return temp;
13509 }
13510 return mask;
13511}
13512
13513/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13514 valid insn doesn exist for given mode. */
13515
13516static int
13517get_vsel_insn (enum machine_mode mode)
13518{
13519 switch (mode)
13520 {
13521 case V4SImode:
13522 return UNSPEC_VSEL4SI;
13523 break;
13524 case V4SFmode:
13525 return UNSPEC_VSEL4SF;
13526 break;
13527 case V8HImode:
13528 return UNSPEC_VSEL8HI;
13529 break;
13530 case V16QImode:
13531 return UNSPEC_VSEL16QI;
13532 break;
13533 default:
13534 return INSN_NOT_AVAILABLE;
13535 break;
13536 }
13537 return INSN_NOT_AVAILABLE;
13538}
13539
13540/* Emit vector select insn where DEST is destination using
13541 operands OP1, OP2 and MASK. */
13542
13543static void
13544rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13545{
13546 rtx t, temp;
13547 enum machine_mode dest_mode = GET_MODE (dest);
13548 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13549
13550 temp = gen_reg_rtx (dest_mode);
94ff898d 13551
bb8df8a6 13552 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13553 select op2. */
915167f5
GK
13554 t = gen_rtx_SET (VOIDmode, temp,
13555 gen_rtx_UNSPEC (dest_mode,
13556 gen_rtvec (3, op2, op1, mask),
13557 vsel_insn_index));
21213b4c
DP
13558 emit_insn (t);
13559 emit_move_insn (dest, temp);
13560 return;
13561}
13562
94ff898d 13563/* Emit vector conditional expression.
21213b4c
DP
13564 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13565 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13566
13567int
13568rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13569 rtx cond, rtx cc_op0, rtx cc_op1)
13570{
13571 enum machine_mode dest_mode = GET_MODE (dest);
13572 enum rtx_code rcode = GET_CODE (cond);
13573 rtx mask;
13574
13575 if (!TARGET_ALTIVEC)
13576 return 0;
13577
13578 /* Get the vector mask for the given relational operations. */
13579 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13580
13581 rs6000_emit_vector_select (dest, op1, op2, mask);
13582
13583 return 1;
13584}
13585
50a0b056
GK
13586/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13587 operands of the last comparison is nonzero/true, FALSE_COND if it
13588 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13589
50a0b056 13590int
a2369ed3 13591rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13592{
13593 enum rtx_code code = GET_CODE (op);
13594 rtx op0 = rs6000_compare_op0;
13595 rtx op1 = rs6000_compare_op1;
13596 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13597 enum machine_mode compare_mode = GET_MODE (op0);
13598 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13599 rtx temp;
add2402e 13600 bool is_against_zero;
50a0b056 13601
a3c9585f 13602 /* These modes should always match. */
a3170dc6
AH
13603 if (GET_MODE (op1) != compare_mode
13604 /* In the isel case however, we can use a compare immediate, so
13605 op1 may be a small constant. */
13606 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13607 return 0;
178c3eff 13608 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13609 return 0;
178c3eff 13610 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13611 return 0;
13612
50a0b056 13613 /* First, work out if the hardware can do this at all, or
a3c9585f 13614 if it's too slow.... */
50a0b056 13615 if (! rs6000_compare_fp_p)
a3170dc6
AH
13616 {
13617 if (TARGET_ISEL)
13618 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13619 return 0;
13620 }
8ef65e3d 13621 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13622 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13623 return 0;
50a0b056 13624
add2402e 13625 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13626
add2402e
GK
13627 /* A floating-point subtract might overflow, underflow, or produce
13628 an inexact result, thus changing the floating-point flags, so it
13629 can't be generated if we care about that. It's safe if one side
13630 of the construct is zero, since then no subtract will be
13631 generated. */
ebb109ad 13632 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13633 && flag_trapping_math && ! is_against_zero)
13634 return 0;
13635
50a0b056
GK
13636 /* Eliminate half of the comparisons by switching operands, this
13637 makes the remaining code simpler. */
13638 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13639 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13640 {
13641 code = reverse_condition_maybe_unordered (code);
13642 temp = true_cond;
13643 true_cond = false_cond;
13644 false_cond = temp;
13645 }
13646
13647 /* UNEQ and LTGT take four instructions for a comparison with zero,
13648 it'll probably be faster to use a branch here too. */
bc9ec0e0 13649 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13650 return 0;
f676971a 13651
50a0b056
GK
13652 if (GET_CODE (op1) == CONST_DOUBLE)
13653 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13654
b6d08ca1 13655 /* We're going to try to implement comparisons by performing
50a0b056
GK
13656 a subtract, then comparing against zero. Unfortunately,
13657 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13658 know that the operand is finite and the comparison
50a0b056 13659 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13660 if (HONOR_INFINITIES (compare_mode)
50a0b056 13661 && code != GT && code != UNGE
045572c7 13662 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13663 /* Constructs of the form (a OP b ? a : b) are safe. */
13664 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13665 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13666 && ! rtx_equal_p (op1, true_cond))))
13667 return 0;
add2402e 13668
50a0b056
GK
13669 /* At this point we know we can use fsel. */
13670
13671 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13672 if (! is_against_zero)
13673 {
13674 temp = gen_reg_rtx (compare_mode);
13675 emit_insn (gen_rtx_SET (VOIDmode, temp,
13676 gen_rtx_MINUS (compare_mode, op0, op1)));
13677 op0 = temp;
13678 op1 = CONST0_RTX (compare_mode);
13679 }
50a0b056
GK
13680
13681 /* If we don't care about NaNs we can reduce some of the comparisons
13682 down to faster ones. */
bc9ec0e0 13683 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13684 switch (code)
13685 {
13686 case GT:
13687 code = LE;
13688 temp = true_cond;
13689 true_cond = false_cond;
13690 false_cond = temp;
13691 break;
13692 case UNGE:
13693 code = GE;
13694 break;
13695 case UNEQ:
13696 code = EQ;
13697 break;
13698 default:
13699 break;
13700 }
13701
13702 /* Now, reduce everything down to a GE. */
13703 switch (code)
13704 {
13705 case GE:
13706 break;
13707
13708 case LE:
3148ad6d
DJ
13709 temp = gen_reg_rtx (compare_mode);
13710 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13711 op0 = temp;
13712 break;
13713
13714 case ORDERED:
3148ad6d
DJ
13715 temp = gen_reg_rtx (compare_mode);
13716 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13717 op0 = temp;
13718 break;
13719
13720 case EQ:
3148ad6d 13721 temp = gen_reg_rtx (compare_mode);
f676971a 13722 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13723 gen_rtx_NEG (compare_mode,
13724 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13725 op0 = temp;
13726 break;
13727
13728 case UNGE:
bc9ec0e0 13729 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13730 temp = gen_reg_rtx (result_mode);
50a0b056 13731 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13732 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13733 gen_rtx_GE (VOIDmode,
13734 op0, op1),
13735 true_cond, false_cond)));
bc9ec0e0
GK
13736 false_cond = true_cond;
13737 true_cond = temp;
50a0b056 13738
3148ad6d
DJ
13739 temp = gen_reg_rtx (compare_mode);
13740 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13741 op0 = temp;
13742 break;
13743
13744 case GT:
bc9ec0e0 13745 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13746 temp = gen_reg_rtx (result_mode);
50a0b056 13747 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13748 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13749 gen_rtx_GE (VOIDmode,
13750 op0, op1),
13751 true_cond, false_cond)));
bc9ec0e0
GK
13752 true_cond = false_cond;
13753 false_cond = temp;
50a0b056 13754
3148ad6d
DJ
13755 temp = gen_reg_rtx (compare_mode);
13756 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13757 op0 = temp;
13758 break;
13759
13760 default:
37409796 13761 gcc_unreachable ();
50a0b056
GK
13762 }
13763
13764 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13765 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13766 gen_rtx_GE (VOIDmode,
13767 op0, op1),
13768 true_cond, false_cond)));
13769 return 1;
13770}
13771
a3170dc6
AH
13772/* Same as above, but for ints (isel). */
13773
13774static int
a2369ed3 13775rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13776{
13777 rtx condition_rtx, cr;
13778
13779 /* All isel implementations thus far are 32-bits. */
13780 if (GET_MODE (rs6000_compare_op0) != SImode)
13781 return 0;
13782
13783 /* We still have to do the compare, because isel doesn't do a
13784 compare, it just looks at the CRx bits set by a previous compare
13785 instruction. */
13786 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13787 cr = XEXP (condition_rtx, 0);
13788
13789 if (GET_MODE (cr) == CCmode)
13790 emit_insn (gen_isel_signed (dest, condition_rtx,
13791 true_cond, false_cond, cr));
13792 else
13793 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13794 true_cond, false_cond, cr));
13795
13796 return 1;
13797}
13798
13799const char *
a2369ed3 13800output_isel (rtx *operands)
a3170dc6
AH
13801{
13802 enum rtx_code code;
13803
13804 code = GET_CODE (operands[1]);
13805 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13806 {
13807 PUT_CODE (operands[1], reverse_condition (code));
13808 return "isel %0,%3,%2,%j1";
13809 }
13810 else
13811 return "isel %0,%2,%3,%j1";
13812}
13813
50a0b056 13814void
a2369ed3 13815rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13816{
13817 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13818 enum rtx_code c;
50a0b056 13819 rtx target;
5dc8d536
AH
13820
13821 if (code == SMAX || code == SMIN)
13822 c = GE;
13823 else
13824 c = GEU;
13825
50a0b056 13826 if (code == SMAX || code == UMAX)
f676971a 13827 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13828 op0, op1, mode, 0);
13829 else
f676971a 13830 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13831 op1, op0, mode, 0);
37409796 13832 gcc_assert (target);
50a0b056
GK
13833 if (target != dest)
13834 emit_move_insn (dest, target);
13835}
46c07df8 13836
915167f5
GK
13837/* Emit instructions to perform a load-reserved/store-conditional operation.
13838 The operation performed is an atomic
13839 (set M (CODE:MODE M OP))
13840 If not NULL, BEFORE is atomically set to M before the operation, and
13841 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13842 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13843 Either OP or M may be wrapped in a NOT operation. */
13844
13845void
13846rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13847 rtx m, rtx op, rtx before_param, rtx after_param,
13848 bool sync_p)
13849{
13850 enum machine_mode used_mode;
13851 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13852 rtx used_m;
13853 rtvec vec;
13854 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13855 rtx shift = NULL_RTX;
bb8df8a6 13856
915167f5 13857 if (sync_p)
a3cb4b82 13858 emit_insn (gen_lwsync ());
bb8df8a6 13859
915167f5
GK
13860 used_m = m;
13861
13862 /* If this is smaller than SImode, we'll have to use SImode with
13863 adjustments. */
13864 if (mode == QImode || mode == HImode)
13865 {
13866 rtx newop, oldop;
13867
13868 if (MEM_ALIGN (used_m) >= 32)
13869 {
13870 int ishift = 0;
13871 if (BYTES_BIG_ENDIAN)
13872 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13873
915167f5 13874 shift = GEN_INT (ishift);
c75c6d11 13875 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13876 }
13877 else
13878 {
13879 rtx addrSI, aligned_addr;
a9c9d3fa 13880 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13881
c75c6d11
JJ
13882 addrSI = gen_lowpart_common (SImode,
13883 force_reg (Pmode, XEXP (used_m, 0)));
13884 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13885 shift = gen_reg_rtx (SImode);
13886
13887 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13888 GEN_INT (shift_mask)));
13889 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13890
13891 aligned_addr = expand_binop (Pmode, and_optab,
13892 XEXP (used_m, 0),
13893 GEN_INT (-4), NULL_RTX,
13894 1, OPTAB_LIB_WIDEN);
13895 used_m = change_address (used_m, SImode, aligned_addr);
13896 set_mem_align (used_m, 32);
915167f5 13897 }
c75c6d11
JJ
13898 /* It's safe to keep the old alias set of USED_M, because
13899 the operation is atomic and only affects the original
13900 USED_M. */
f66f9865 13901 m = used_m;
915167f5
GK
13902
13903 if (GET_CODE (op) == NOT)
13904 {
13905 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13906 oldop = gen_rtx_NOT (SImode, oldop);
13907 }
13908 else
13909 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13910
915167f5
GK
13911 switch (code)
13912 {
13913 case IOR:
13914 case XOR:
13915 newop = expand_binop (SImode, and_optab,
13916 oldop, GEN_INT (imask), NULL_RTX,
13917 1, OPTAB_LIB_WIDEN);
13918 emit_insn (gen_ashlsi3 (newop, newop, shift));
13919 break;
13920
f66f9865
DE
13921 case NOT: /* NAND */
13922 newop = expand_binop (SImode, ior_optab,
13923 oldop, GEN_INT (~imask), NULL_RTX,
13924 1, OPTAB_LIB_WIDEN);
13925 emit_insn (gen_rotlsi3 (newop, newop, shift));
13926 break;
13927
915167f5
GK
13928 case AND:
13929 newop = expand_binop (SImode, ior_optab,
13930 oldop, GEN_INT (~imask), NULL_RTX,
13931 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13932 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13933 break;
13934
13935 case PLUS:
9f0076e5 13936 case MINUS:
915167f5
GK
13937 {
13938 rtx mask;
bb8df8a6 13939
915167f5
GK
13940 newop = expand_binop (SImode, and_optab,
13941 oldop, GEN_INT (imask), NULL_RTX,
13942 1, OPTAB_LIB_WIDEN);
13943 emit_insn (gen_ashlsi3 (newop, newop, shift));
13944
13945 mask = gen_reg_rtx (SImode);
13946 emit_move_insn (mask, GEN_INT (imask));
13947 emit_insn (gen_ashlsi3 (mask, mask, shift));
13948
9f0076e5
DE
13949 if (code == PLUS)
13950 newop = gen_rtx_PLUS (SImode, m, newop);
13951 else
13952 newop = gen_rtx_MINUS (SImode, m, newop);
13953 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13954 newop = gen_rtx_IOR (SImode, newop,
13955 gen_rtx_AND (SImode,
13956 gen_rtx_NOT (SImode, mask),
13957 m));
13958 break;
13959 }
13960
13961 default:
13962 gcc_unreachable ();
13963 }
13964
13965 op = newop;
13966 used_mode = SImode;
13967 before = gen_reg_rtx (used_mode);
13968 after = gen_reg_rtx (used_mode);
13969 }
13970 else
13971 {
13972 used_mode = mode;
13973 before = before_param;
13974 after = after_param;
13975
13976 if (before == NULL_RTX)
13977 before = gen_reg_rtx (used_mode);
13978 if (after == NULL_RTX)
13979 after = gen_reg_rtx (used_mode);
13980 }
bb8df8a6 13981
f66f9865 13982 if ((code == PLUS || code == MINUS)
9f0076e5 13983 && used_mode != mode)
915167f5
GK
13984 the_op = op; /* Computed above. */
13985 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13986 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
f66f9865
DE
13987 else if (code == NOT)
13988 the_op = gen_rtx_fmt_ee (IOR, used_mode,
13989 gen_rtx_NOT (used_mode, m),
13990 gen_rtx_NOT (used_mode, op));
915167f5
GK
13991 else
13992 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13993
13994 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13995 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13996 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13997 gen_rtx_UNSPEC (used_mode,
13998 gen_rtvec (1, the_op),
13999 UNSPEC_SYNC_OP));
915167f5
GK
14000 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
14001
9f0076e5 14002 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
14003 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
14004 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
14005 else
14006 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
14007 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
14008
14009 /* Shift and mask the return values properly. */
14010 if (used_mode != mode && before_param)
14011 {
14012 emit_insn (gen_lshrsi3 (before, before, shift));
14013 convert_move (before_param, before, 1);
14014 }
14015
14016 if (used_mode != mode && after_param)
14017 {
14018 emit_insn (gen_lshrsi3 (after, after, shift));
14019 convert_move (after_param, after, 1);
14020 }
14021
14022 /* The previous sequence will end with a branch that's dependent on
14023 the conditional store, so placing an isync will ensure that no
14024 other instructions (especially, no load or store instructions)
14025 can start before the atomic operation completes. */
14026 if (sync_p)
14027 emit_insn (gen_isync ());
14028}
14029
b52110d4
DE
14030/* A subroutine of the atomic operation splitters. Jump to LABEL if
14031 COND is true. Mark the jump as unlikely to be taken. */
14032
14033static void
14034emit_unlikely_jump (rtx cond, rtx label)
14035{
14036 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
14037 rtx x;
14038
14039 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
14040 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
bbbbb16a 14041 add_reg_note (x, REG_BR_PROB, very_unlikely);
b52110d4
DE
14042}
14043
14044/* A subroutine of the atomic operation splitters. Emit a load-locked
14045 instruction in MODE. */
14046
14047static void
14048emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
14049{
14050 rtx (*fn) (rtx, rtx) = NULL;
14051 if (mode == SImode)
14052 fn = gen_load_locked_si;
14053 else if (mode == DImode)
14054 fn = gen_load_locked_di;
14055 emit_insn (fn (reg, mem));
14056}
14057
14058/* A subroutine of the atomic operation splitters. Emit a store-conditional
14059 instruction in MODE. */
14060
14061static void
14062emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
14063{
14064 rtx (*fn) (rtx, rtx, rtx) = NULL;
14065 if (mode == SImode)
14066 fn = gen_store_conditional_si;
14067 else if (mode == DImode)
14068 fn = gen_store_conditional_di;
14069
9f0076e5 14070 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
14071 if (PPC405_ERRATUM77)
14072 emit_insn (gen_memory_barrier ());
14073
14074 emit_insn (fn (res, mem, val));
14075}
14076
ea2c620c 14077/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 14078 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
14079 operand of the binary operator. BEFORE and AFTER are optional locations to
14080 return the value of MEM either before of after the operation. SCRATCH is
14081 a scratch register. */
14082
14083void
14084rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
14085 rtx before, rtx after, rtx scratch)
14086{
14087 enum machine_mode mode = GET_MODE (mem);
14088 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14089
a3cb4b82 14090 emit_insn (gen_lwsync ());
9f0076e5
DE
14091
14092 label = gen_label_rtx ();
14093 emit_label (label);
14094 label = gen_rtx_LABEL_REF (VOIDmode, label);
14095
14096 if (before == NULL_RTX)
14097 before = scratch;
14098 emit_load_locked (mode, before, mem);
14099
14100 if (code == NOT)
f66f9865
DE
14101 x = gen_rtx_IOR (mode,
14102 gen_rtx_NOT (mode, before),
14103 gen_rtx_NOT (mode, val));
9f0076e5
DE
14104 else if (code == AND)
14105 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
14106 else
14107 x = gen_rtx_fmt_ee (code, mode, before, val);
14108
14109 if (after != NULL_RTX)
14110 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
14111 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
14112
14113 emit_store_conditional (mode, cond, mem, scratch);
14114
14115 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14116 emit_unlikely_jump (x, label);
14117
14118 emit_insn (gen_isync ());
14119}
14120
b52110d4
DE
14121/* Expand an atomic compare and swap operation. MEM is the memory on which
14122 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
14123 value to be stored. SCRATCH is a scratch GPR. */
14124
14125void
14126rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
14127 rtx scratch)
14128{
14129 enum machine_mode mode = GET_MODE (mem);
14130 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14131
a3cb4b82 14132 emit_insn (gen_lwsync ());
b52110d4
DE
14133
14134 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14135 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14136 emit_label (XEXP (label1, 0));
14137
14138 emit_load_locked (mode, retval, mem);
14139
14140 x = gen_rtx_COMPARE (CCmode, retval, oldval);
14141 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14142
14143 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14144 emit_unlikely_jump (x, label2);
14145
14146 emit_move_insn (scratch, newval);
14147 emit_store_conditional (mode, cond, mem, scratch);
14148
14149 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14150 emit_unlikely_jump (x, label1);
14151
14152 emit_insn (gen_isync ());
14153 emit_label (XEXP (label2, 0));
14154}
14155
14156/* Expand an atomic test and set operation. MEM is the memory on which
14157 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
14158
14159void
14160rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
14161{
14162 enum machine_mode mode = GET_MODE (mem);
14163 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14164
b52110d4
DE
14165 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14166 emit_label (XEXP (label, 0));
14167
14168 emit_load_locked (mode, retval, mem);
14169 emit_move_insn (scratch, val);
14170 emit_store_conditional (mode, cond, mem, scratch);
14171
14172 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14173 emit_unlikely_jump (x, label);
14174
14175 emit_insn (gen_isync ());
14176}
14177
9fc75b97
DE
14178void
14179rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14180{
14181 enum machine_mode mode = GET_MODE (mem);
14182 rtx addrSI, align, wdst, shift, mask;
14183 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14184 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14185
14186 /* Shift amount for subword relative to aligned word. */
4b49d9ce
AP
14187 addrSI = force_reg (GET_MODE (XEXP (mem, 0)), XEXP (mem, 0));
14188 addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI));
9fc75b97
DE
14189 shift = gen_reg_rtx (SImode);
14190 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14191 GEN_INT (shift_mask)));
14192 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14193
14194 /* Shift and mask old value into position within word. */
14195 oldval = convert_modes (SImode, mode, oldval, 1);
14196 oldval = expand_binop (SImode, and_optab,
14197 oldval, GEN_INT (imask), NULL_RTX,
14198 1, OPTAB_LIB_WIDEN);
14199 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14200
14201 /* Shift and mask new value into position within word. */
14202 newval = convert_modes (SImode, mode, newval, 1);
14203 newval = expand_binop (SImode, and_optab,
14204 newval, GEN_INT (imask), NULL_RTX,
14205 1, OPTAB_LIB_WIDEN);
14206 emit_insn (gen_ashlsi3 (newval, newval, shift));
14207
14208 /* Mask for insertion. */
14209 mask = gen_reg_rtx (SImode);
14210 emit_move_insn (mask, GEN_INT (imask));
14211 emit_insn (gen_ashlsi3 (mask, mask, shift));
14212
14213 /* Address of aligned word containing subword. */
14214 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14215 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14216 mem = change_address (mem, SImode, align);
14217 set_mem_align (mem, 32);
14218 MEM_VOLATILE_P (mem) = 1;
14219
14220 wdst = gen_reg_rtx (SImode);
14221 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14222 oldval, newval, mem));
14223
2725b75c
JJ
14224 /* Shift the result back. */
14225 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14226
9fc75b97
DE
14227 emit_move_insn (dst, gen_lowpart (mode, wdst));
14228}
14229
14230void
14231rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14232 rtx oldval, rtx newval, rtx mem,
14233 rtx scratch)
14234{
14235 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14236
a3cb4b82 14237 emit_insn (gen_lwsync ());
9fc75b97
DE
14238 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14239 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14240 emit_label (XEXP (label1, 0));
14241
14242 emit_load_locked (SImode, scratch, mem);
14243
14244 /* Mask subword within loaded value for comparison with oldval.
14245 Use UNSPEC_AND to avoid clobber.*/
14246 emit_insn (gen_rtx_SET (SImode, dest,
14247 gen_rtx_UNSPEC (SImode,
14248 gen_rtvec (2, scratch, mask),
14249 UNSPEC_AND)));
14250
14251 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14252 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14253
14254 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14255 emit_unlikely_jump (x, label2);
14256
14257 /* Clear subword within loaded value for insertion of new value. */
14258 emit_insn (gen_rtx_SET (SImode, scratch,
14259 gen_rtx_AND (SImode,
14260 gen_rtx_NOT (SImode, mask), scratch)));
14261 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14262 emit_store_conditional (SImode, cond, mem, scratch);
14263
14264 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14265 emit_unlikely_jump (x, label1);
14266
14267 emit_insn (gen_isync ());
14268 emit_label (XEXP (label2, 0));
14269}
14270
14271
b52110d4 14272 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14273 multi-register moves. It will emit at most one instruction for
14274 each register that is accessed; that is, it won't emit li/lis pairs
14275 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14276 register. */
46c07df8 14277
46c07df8 14278void
a9baceb1 14279rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14280{
a9baceb1
GK
14281 /* The register number of the first register being moved. */
14282 int reg;
14283 /* The mode that is to be moved. */
14284 enum machine_mode mode;
14285 /* The mode that the move is being done in, and its size. */
14286 enum machine_mode reg_mode;
14287 int reg_mode_size;
14288 /* The number of registers that will be moved. */
14289 int nregs;
14290
14291 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14292 mode = GET_MODE (dst);
c8b622ff 14293 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14294 if (FP_REGNO_P (reg))
696e45ba
ME
14295 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode :
14296 ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? DFmode : SFmode);
a9baceb1
GK
14297 else if (ALTIVEC_REGNO_P (reg))
14298 reg_mode = V16QImode;
4f011e1e
JM
14299 else if (TARGET_E500_DOUBLE && mode == TFmode)
14300 reg_mode = DFmode;
a9baceb1
GK
14301 else
14302 reg_mode = word_mode;
14303 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14304
37409796 14305 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14306
a9baceb1
GK
14307 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14308 {
14309 /* Move register range backwards, if we might have destructive
14310 overlap. */
14311 int i;
14312 for (i = nregs - 1; i >= 0; i--)
f676971a 14313 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14314 simplify_gen_subreg (reg_mode, dst, mode,
14315 i * reg_mode_size),
14316 simplify_gen_subreg (reg_mode, src, mode,
14317 i * reg_mode_size)));
14318 }
46c07df8
HP
14319 else
14320 {
a9baceb1
GK
14321 int i;
14322 int j = -1;
14323 bool used_update = false;
46c07df8 14324
c1e55850 14325 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14326 {
14327 rtx breg;
3a1f863f 14328
a9baceb1
GK
14329 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14330 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14331 {
14332 rtx delta_rtx;
a9baceb1 14333 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14334 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14335 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14336 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14337 emit_insn (TARGET_32BIT
14338 ? gen_addsi3 (breg, breg, delta_rtx)
14339 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14340 src = replace_equiv_address (src, breg);
3a1f863f 14341 }
d04b6e6e 14342 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14343 {
13e2e16e 14344 rtx basereg;
c1e55850
GK
14345 basereg = gen_rtx_REG (Pmode, reg);
14346 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14347 src = replace_equiv_address (src, basereg);
c1e55850 14348 }
3a1f863f 14349
0423421f
AM
14350 breg = XEXP (src, 0);
14351 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14352 breg = XEXP (breg, 0);
14353
14354 /* If the base register we are using to address memory is
14355 also a destination reg, then change that register last. */
14356 if (REG_P (breg)
14357 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14358 && REGNO (breg) < REGNO (dst) + nregs)
14359 j = REGNO (breg) - REGNO (dst);
c4ad648e 14360 }
46c07df8 14361
a9baceb1 14362 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14363 {
14364 rtx breg;
14365
a9baceb1
GK
14366 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14367 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14368 {
14369 rtx delta_rtx;
a9baceb1 14370 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14371 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14372 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14373 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14374
14375 /* We have to update the breg before doing the store.
14376 Use store with update, if available. */
14377
14378 if (TARGET_UPDATE)
14379 {
a9baceb1 14380 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14381 emit_insn (TARGET_32BIT
14382 ? (TARGET_POWERPC64
14383 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14384 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14385 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14386 used_update = true;
3a1f863f
DE
14387 }
14388 else
a9baceb1
GK
14389 emit_insn (TARGET_32BIT
14390 ? gen_addsi3 (breg, breg, delta_rtx)
14391 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14392 dst = replace_equiv_address (dst, breg);
3a1f863f 14393 }
37409796 14394 else
d04b6e6e 14395 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14396 }
14397
46c07df8 14398 for (i = 0; i < nregs; i++)
f676971a 14399 {
3a1f863f
DE
14400 /* Calculate index to next subword. */
14401 ++j;
f676971a 14402 if (j == nregs)
3a1f863f 14403 j = 0;
46c07df8 14404
112cdef5 14405 /* If compiler already emitted move of first word by
a9baceb1 14406 store with update, no need to do anything. */
3a1f863f 14407 if (j == 0 && used_update)
a9baceb1 14408 continue;
f676971a 14409
a9baceb1
GK
14410 emit_insn (gen_rtx_SET (VOIDmode,
14411 simplify_gen_subreg (reg_mode, dst, mode,
14412 j * reg_mode_size),
14413 simplify_gen_subreg (reg_mode, src, mode,
14414 j * reg_mode_size)));
3a1f863f 14415 }
46c07df8
HP
14416 }
14417}
14418
12a4e8c5 14419\f
a4f6c312
SS
14420/* This page contains routines that are used to determine what the
14421 function prologue and epilogue code will do and write them out. */
9878760c 14422
a4f6c312
SS
14423/* Return the first fixed-point register that is required to be
14424 saved. 32 if none. */
9878760c
RK
14425
14426int
863d938c 14427first_reg_to_save (void)
9878760c
RK
14428{
14429 int first_reg;
14430
14431 /* Find lowest numbered live register. */
14432 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14433 if (df_regs_ever_live_p (first_reg)
a38d360d 14434 && (! call_used_regs[first_reg]
1db02437 14435 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14436 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14437 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14438 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14439 break;
14440
ee890fe2 14441#if TARGET_MACHO
93638d7a 14442 if (flag_pic
e3b5732b 14443 && crtl->uses_pic_offset_table
93638d7a 14444 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14445 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14446#endif
14447
9878760c
RK
14448 return first_reg;
14449}
14450
14451/* Similar, for FP regs. */
14452
14453int
863d938c 14454first_fp_reg_to_save (void)
9878760c
RK
14455{
14456 int first_reg;
14457
14458 /* Find lowest numbered live register. */
14459 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14460 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14461 break;
14462
14463 return first_reg;
14464}
00b960c7
AH
14465
14466/* Similar, for AltiVec regs. */
14467
14468static int
863d938c 14469first_altivec_reg_to_save (void)
00b960c7
AH
14470{
14471 int i;
14472
14473 /* Stack frame remains as is unless we are in AltiVec ABI. */
14474 if (! TARGET_ALTIVEC_ABI)
14475 return LAST_ALTIVEC_REGNO + 1;
14476
22fa69da 14477 /* On Darwin, the unwind routines are compiled without
982afe02 14478 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14479 altivec registers when necessary. */
e3b5732b 14480 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14481 && ! TARGET_ALTIVEC)
14482 return FIRST_ALTIVEC_REGNO + 20;
14483
00b960c7
AH
14484 /* Find lowest numbered live register. */
14485 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14486 if (df_regs_ever_live_p (i))
00b960c7
AH
14487 break;
14488
14489 return i;
14490}
14491
14492/* Return a 32-bit mask of the AltiVec registers we need to set in
14493 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14494 the 32-bit word is 0. */
14495
14496static unsigned int
863d938c 14497compute_vrsave_mask (void)
00b960c7
AH
14498{
14499 unsigned int i, mask = 0;
14500
22fa69da 14501 /* On Darwin, the unwind routines are compiled without
982afe02 14502 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14503 call-saved altivec registers when necessary. */
e3b5732b 14504 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14505 && ! TARGET_ALTIVEC)
14506 mask |= 0xFFF;
14507
00b960c7
AH
14508 /* First, find out if we use _any_ altivec registers. */
14509 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14510 if (df_regs_ever_live_p (i))
00b960c7
AH
14511 mask |= ALTIVEC_REG_BIT (i);
14512
14513 if (mask == 0)
14514 return mask;
14515
00b960c7
AH
14516 /* Next, remove the argument registers from the set. These must
14517 be in the VRSAVE mask set by the caller, so we don't need to add
14518 them in again. More importantly, the mask we compute here is
14519 used to generate CLOBBERs in the set_vrsave insn, and we do not
14520 wish the argument registers to die. */
38173d38 14521 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14522 mask &= ~ALTIVEC_REG_BIT (i);
14523
14524 /* Similarly, remove the return value from the set. */
14525 {
14526 bool yes = false;
14527 diddle_return_value (is_altivec_return_reg, &yes);
14528 if (yes)
14529 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14530 }
14531
14532 return mask;
14533}
14534
d62294f5 14535/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14536 size of prologues/epilogues by calling our own save/restore-the-world
14537 routines. */
d62294f5
FJ
14538
14539static void
f57fe068
AM
14540compute_save_world_info (rs6000_stack_t *info_ptr)
14541{
14542 info_ptr->world_save_p = 1;
14543 info_ptr->world_save_p
14544 = (WORLD_SAVE_P (info_ptr)
14545 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14546 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14547 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14548 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14549 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14550 && info_ptr->cr_save_p);
f676971a 14551
d62294f5
FJ
14552 /* This will not work in conjunction with sibcalls. Make sure there
14553 are none. (This check is expensive, but seldom executed.) */
f57fe068 14554 if (WORLD_SAVE_P (info_ptr))
f676971a 14555 {
d62294f5
FJ
14556 rtx insn;
14557 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14558 if ( GET_CODE (insn) == CALL_INSN
14559 && SIBLING_CALL_P (insn))
14560 {
14561 info_ptr->world_save_p = 0;
14562 break;
14563 }
d62294f5 14564 }
f676971a 14565
f57fe068 14566 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14567 {
14568 /* Even if we're not touching VRsave, make sure there's room on the
14569 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14570 will attempt to save it. */
d62294f5
FJ
14571 info_ptr->vrsave_size = 4;
14572
298ac1dd
AP
14573 /* If we are going to save the world, we need to save the link register too. */
14574 info_ptr->lr_save_p = 1;
14575
d62294f5
FJ
14576 /* "Save" the VRsave register too if we're saving the world. */
14577 if (info_ptr->vrsave_mask == 0)
c4ad648e 14578 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14579
14580 /* Because the Darwin register save/restore routines only handle
c4ad648e 14581 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14582 check. */
37409796
NS
14583 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14584 && (info_ptr->first_altivec_reg_save
14585 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14586 }
f676971a 14587 return;
d62294f5
FJ
14588}
14589
14590
00b960c7 14591static void
a2369ed3 14592is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14593{
14594 bool *yes = (bool *) xyes;
14595 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14596 *yes = true;
14597}
14598
4697a36c
MM
14599\f
14600/* Calculate the stack information for the current function. This is
14601 complicated by having two separate calling sequences, the AIX calling
14602 sequence and the V.4 calling sequence.
14603
592696dd 14604 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14605 32-bit 64-bit
4697a36c 14606 SP----> +---------------------------------------+
a260abc9 14607 | back chain to caller | 0 0
4697a36c 14608 +---------------------------------------+
a260abc9 14609 | saved CR | 4 8 (8-11)
4697a36c 14610 +---------------------------------------+
a260abc9 14611 | saved LR | 8 16
4697a36c 14612 +---------------------------------------+
a260abc9 14613 | reserved for compilers | 12 24
4697a36c 14614 +---------------------------------------+
a260abc9 14615 | reserved for binders | 16 32
4697a36c 14616 +---------------------------------------+
a260abc9 14617 | saved TOC pointer | 20 40
4697a36c 14618 +---------------------------------------+
a260abc9 14619 | Parameter save area (P) | 24 48
4697a36c 14620 +---------------------------------------+
a260abc9 14621 | Alloca space (A) | 24+P etc.
802a0058 14622 +---------------------------------------+
a7df97e6 14623 | Local variable space (L) | 24+P+A
4697a36c 14624 +---------------------------------------+
a7df97e6 14625 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14626 +---------------------------------------+
00b960c7
AH
14627 | Save area for AltiVec registers (W) | 24+P+A+L+X
14628 +---------------------------------------+
14629 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14630 +---------------------------------------+
14631 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14632 +---------------------------------------+
00b960c7
AH
14633 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14634 +---------------------------------------+
14635 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14636 +---------------------------------------+
14637 old SP->| back chain to caller's caller |
14638 +---------------------------------------+
14639
5376a30c
KR
14640 The required alignment for AIX configurations is two words (i.e., 8
14641 or 16 bytes).
14642
14643
4697a36c
MM
14644 V.4 stack frames look like:
14645
14646 SP----> +---------------------------------------+
14647 | back chain to caller | 0
14648 +---------------------------------------+
5eb387b8 14649 | caller's saved LR | 4
4697a36c
MM
14650 +---------------------------------------+
14651 | Parameter save area (P) | 8
14652 +---------------------------------------+
a7df97e6 14653 | Alloca space (A) | 8+P
f676971a 14654 +---------------------------------------+
a7df97e6 14655 | Varargs save area (V) | 8+P+A
f676971a 14656 +---------------------------------------+
a7df97e6 14657 | Local variable space (L) | 8+P+A+V
f676971a 14658 +---------------------------------------+
a7df97e6 14659 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14660 +---------------------------------------+
00b960c7
AH
14661 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14662 +---------------------------------------+
14663 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14664 +---------------------------------------+
14665 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14666 +---------------------------------------+
c4ad648e
AM
14667 | SPE: area for 64-bit GP registers |
14668 +---------------------------------------+
14669 | SPE alignment padding |
14670 +---------------------------------------+
00b960c7 14671 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14672 +---------------------------------------+
00b960c7 14673 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14674 +---------------------------------------+
00b960c7 14675 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14676 +---------------------------------------+
14677 old SP->| back chain to caller's caller |
14678 +---------------------------------------+
b6c9286a 14679
5376a30c
KR
14680 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14681 given. (But note below and in sysv4.h that we require only 8 and
14682 may round up the size of our stack frame anyways. The historical
14683 reason is early versions of powerpc-linux which didn't properly
14684 align the stack at program startup. A happy side-effect is that
14685 -mno-eabi libraries can be used with -meabi programs.)
14686
50d440bc 14687 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14688 the stack alignment requirements may differ. If -mno-eabi is not
14689 given, the required stack alignment is 8 bytes; if -mno-eabi is
14690 given, the required alignment is 16 bytes. (But see V.4 comment
14691 above.) */
4697a36c 14692
61b2fbe7
MM
14693#ifndef ABI_STACK_BOUNDARY
14694#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14695#endif
14696
d1d0c603 14697static rs6000_stack_t *
863d938c 14698rs6000_stack_info (void)
4697a36c 14699{
022123e6 14700 static rs6000_stack_t info;
4697a36c 14701 rs6000_stack_t *info_ptr = &info;
327e5343 14702 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14703 int ehrd_size;
64045029 14704 int save_align;
8070c91a 14705 int first_gp;
44688022 14706 HOST_WIDE_INT non_fixed_size;
4697a36c 14707
022123e6 14708 memset (&info, 0, sizeof (info));
4697a36c 14709
c19de7aa
AH
14710 if (TARGET_SPE)
14711 {
14712 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14713 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14714 cfun->machine->insn_chain_scanned_p
14715 = spe_func_has_64bit_regs_p () + 1;
14716 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14717 }
14718
a4f6c312 14719 /* Select which calling sequence. */
178274da 14720 info_ptr->abi = DEFAULT_ABI;
9878760c 14721
a4f6c312 14722 /* Calculate which registers need to be saved & save area size. */
4697a36c 14723 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14724 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14725 even if it currently looks like we won't. Reload may need it to
14726 get at a constant; if so, it will have already created a constant
14727 pool entry for it. */
2bfcf297 14728 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14729 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14730 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14731 && crtl->uses_const_pool
1db02437 14732 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14733 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14734 else
8070c91a
DJ
14735 first_gp = info_ptr->first_gp_reg_save;
14736
14737 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14738
a3170dc6
AH
14739 /* For the SPE, we have an additional upper 32-bits on each GPR.
14740 Ideally we should save the entire 64-bits only when the upper
14741 half is used in SIMD instructions. Since we only record
14742 registers live (not the size they are used in), this proves
14743 difficult because we'd have to traverse the instruction chain at
14744 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14745 so we opt to save the GPRs in 64-bits always if but one register
14746 gets used in 64-bits. Otherwise, all the registers in the frame
14747 get saved in 32-bits.
a3170dc6 14748
c19de7aa 14749 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14750 traditional GP save area will be empty. */
c19de7aa 14751 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14752 info_ptr->gp_size = 0;
14753
4697a36c
MM
14754 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14755 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14756
00b960c7
AH
14757 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14758 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14759 - info_ptr->first_altivec_reg_save);
14760
592696dd 14761 /* Does this function call anything? */
71f123ca
FS
14762 info_ptr->calls_p = (! current_function_is_leaf
14763 || cfun->machine->ra_needs_full_frame);
b6c9286a 14764
a4f6c312 14765 /* Determine if we need to save the link register. */
022123e6 14766 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14767 && crtl->profile
022123e6 14768 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14769#ifdef TARGET_RELOCATABLE
14770 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14771#endif
14772 || (info_ptr->first_fp_reg_save != 64
14773 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14774 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14775 || info_ptr->calls_p
14776 || rs6000_ra_ever_killed ())
4697a36c
MM
14777 {
14778 info_ptr->lr_save_p = 1;
1de43f85 14779 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14780 }
14781
9ebbca7d 14782 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14783 if (df_regs_ever_live_p (CR2_REGNO)
14784 || df_regs_ever_live_p (CR3_REGNO)
14785 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14786 {
14787 info_ptr->cr_save_p = 1;
178274da 14788 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14789 info_ptr->cr_size = reg_size;
14790 }
14791
83720594
RH
14792 /* If the current function calls __builtin_eh_return, then we need
14793 to allocate stack space for registers that will hold data for
14794 the exception handler. */
e3b5732b 14795 if (crtl->calls_eh_return)
83720594
RH
14796 {
14797 unsigned int i;
14798 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14799 continue;
a3170dc6
AH
14800
14801 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14802 ehrd_size = i * (TARGET_SPE_ABI
14803 && info_ptr->spe_64bit_regs_used != 0
14804 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14805 }
14806 else
14807 ehrd_size = 0;
14808
592696dd 14809 /* Determine various sizes. */
4697a36c
MM
14810 info_ptr->reg_size = reg_size;
14811 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14812 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14813 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14814 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14815 if (FRAME_GROWS_DOWNWARD)
14816 info_ptr->vars_size
5b667039
JJ
14817 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14818 + info_ptr->parm_size,
7d5175e1 14819 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14820 - (info_ptr->fixed_size + info_ptr->vars_size
14821 + info_ptr->parm_size);
00b960c7 14822
c19de7aa 14823 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14824 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14825 else
14826 info_ptr->spe_gp_size = 0;
14827
4d774ff8
HP
14828 if (TARGET_ALTIVEC_ABI)
14829 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14830 else
4d774ff8
HP
14831 info_ptr->vrsave_mask = 0;
14832
14833 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14834 info_ptr->vrsave_size = 4;
14835 else
14836 info_ptr->vrsave_size = 0;
b6c9286a 14837
d62294f5
FJ
14838 compute_save_world_info (info_ptr);
14839
592696dd 14840 /* Calculate the offsets. */
178274da 14841 switch (DEFAULT_ABI)
4697a36c 14842 {
b6c9286a 14843 case ABI_NONE:
24d304eb 14844 default:
37409796 14845 gcc_unreachable ();
b6c9286a
MM
14846
14847 case ABI_AIX:
ee890fe2 14848 case ABI_DARWIN:
b6c9286a
MM
14849 info_ptr->fp_save_offset = - info_ptr->fp_size;
14850 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14851
14852 if (TARGET_ALTIVEC_ABI)
14853 {
14854 info_ptr->vrsave_save_offset
14855 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14856
982afe02 14857 /* Align stack so vector save area is on a quadword boundary.
9278121c 14858 The padding goes above the vectors. */
00b960c7
AH
14859 if (info_ptr->altivec_size != 0)
14860 info_ptr->altivec_padding_size
9278121c 14861 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14862 else
14863 info_ptr->altivec_padding_size = 0;
14864
14865 info_ptr->altivec_save_offset
14866 = info_ptr->vrsave_save_offset
14867 - info_ptr->altivec_padding_size
14868 - info_ptr->altivec_size;
9278121c
GK
14869 gcc_assert (info_ptr->altivec_size == 0
14870 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14871
14872 /* Adjust for AltiVec case. */
14873 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14874 }
14875 else
14876 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14877 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14878 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14879 break;
14880
14881 case ABI_V4:
b6c9286a
MM
14882 info_ptr->fp_save_offset = - info_ptr->fp_size;
14883 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14884 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14885
c19de7aa 14886 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14887 {
14888 /* Align stack so SPE GPR save area is aligned on a
14889 double-word boundary. */
f78c3290 14890 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14891 info_ptr->spe_padding_size
14892 = 8 - (-info_ptr->cr_save_offset % 8);
14893 else
14894 info_ptr->spe_padding_size = 0;
14895
14896 info_ptr->spe_gp_save_offset
14897 = info_ptr->cr_save_offset
14898 - info_ptr->spe_padding_size
14899 - info_ptr->spe_gp_size;
14900
14901 /* Adjust for SPE case. */
022123e6 14902 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14903 }
a3170dc6 14904 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14905 {
14906 info_ptr->vrsave_save_offset
14907 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14908
14909 /* Align stack so vector save area is on a quadword boundary. */
14910 if (info_ptr->altivec_size != 0)
14911 info_ptr->altivec_padding_size
14912 = 16 - (-info_ptr->vrsave_save_offset % 16);
14913 else
14914 info_ptr->altivec_padding_size = 0;
14915
14916 info_ptr->altivec_save_offset
14917 = info_ptr->vrsave_save_offset
14918 - info_ptr->altivec_padding_size
14919 - info_ptr->altivec_size;
14920
14921 /* Adjust for AltiVec case. */
022123e6 14922 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14923 }
14924 else
022123e6
AM
14925 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14926 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14927 info_ptr->lr_save_offset = reg_size;
14928 break;
4697a36c
MM
14929 }
14930
64045029 14931 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14932 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14933 + info_ptr->gp_size
14934 + info_ptr->altivec_size
14935 + info_ptr->altivec_padding_size
a3170dc6
AH
14936 + info_ptr->spe_gp_size
14937 + info_ptr->spe_padding_size
00b960c7
AH
14938 + ehrd_size
14939 + info_ptr->cr_size
022123e6 14940 + info_ptr->vrsave_size,
64045029 14941 save_align);
00b960c7 14942
44688022 14943 non_fixed_size = (info_ptr->vars_size
ff381587 14944 + info_ptr->parm_size
5b667039 14945 + info_ptr->save_size);
ff381587 14946
44688022
AM
14947 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14948 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14949
14950 /* Determine if we need to allocate any stack frame:
14951
a4f6c312
SS
14952 For AIX we need to push the stack if a frame pointer is needed
14953 (because the stack might be dynamically adjusted), if we are
14954 debugging, if we make calls, or if the sum of fp_save, gp_save,
14955 and local variables are more than the space needed to save all
14956 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14957 + 18*8 = 288 (GPR13 reserved).
ff381587 14958
a4f6c312
SS
14959 For V.4 we don't have the stack cushion that AIX uses, but assume
14960 that the debugger can handle stackless frames. */
ff381587
MM
14961
14962 if (info_ptr->calls_p)
14963 info_ptr->push_p = 1;
14964
178274da 14965 else if (DEFAULT_ABI == ABI_V4)
44688022 14966 info_ptr->push_p = non_fixed_size != 0;
ff381587 14967
178274da
AM
14968 else if (frame_pointer_needed)
14969 info_ptr->push_p = 1;
14970
14971 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14972 info_ptr->push_p = 1;
14973
ff381587 14974 else
44688022 14975 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14976
a4f6c312 14977 /* Zero offsets if we're not saving those registers. */
8dda1a21 14978 if (info_ptr->fp_size == 0)
4697a36c
MM
14979 info_ptr->fp_save_offset = 0;
14980
8dda1a21 14981 if (info_ptr->gp_size == 0)
4697a36c
MM
14982 info_ptr->gp_save_offset = 0;
14983
00b960c7
AH
14984 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14985 info_ptr->altivec_save_offset = 0;
14986
14987 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14988 info_ptr->vrsave_save_offset = 0;
14989
c19de7aa
AH
14990 if (! TARGET_SPE_ABI
14991 || info_ptr->spe_64bit_regs_used == 0
14992 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14993 info_ptr->spe_gp_save_offset = 0;
14994
c81fc13e 14995 if (! info_ptr->lr_save_p)
4697a36c
MM
14996 info_ptr->lr_save_offset = 0;
14997
c81fc13e 14998 if (! info_ptr->cr_save_p)
4697a36c
MM
14999 info_ptr->cr_save_offset = 0;
15000
15001 return info_ptr;
15002}
15003
c19de7aa
AH
15004/* Return true if the current function uses any GPRs in 64-bit SIMD
15005 mode. */
15006
15007static bool
863d938c 15008spe_func_has_64bit_regs_p (void)
c19de7aa
AH
15009{
15010 rtx insns, insn;
15011
15012 /* Functions that save and restore all the call-saved registers will
15013 need to save/restore the registers in 64-bits. */
e3b5732b
JH
15014 if (crtl->calls_eh_return
15015 || cfun->calls_setjmp
15016 || crtl->has_nonlocal_goto)
c19de7aa
AH
15017 return true;
15018
15019 insns = get_insns ();
15020
15021 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
15022 {
15023 if (INSN_P (insn))
15024 {
15025 rtx i;
15026
b5a5beb9
AH
15027 /* FIXME: This should be implemented with attributes...
15028
15029 (set_attr "spe64" "true")....then,
15030 if (get_spe64(insn)) return true;
15031
15032 It's the only reliable way to do the stuff below. */
15033
c19de7aa 15034 i = PATTERN (insn);
f82f556d
AH
15035 if (GET_CODE (i) == SET)
15036 {
15037 enum machine_mode mode = GET_MODE (SET_SRC (i));
15038
15039 if (SPE_VECTOR_MODE (mode))
15040 return true;
4f011e1e 15041 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
15042 return true;
15043 }
c19de7aa
AH
15044 }
15045 }
15046
15047 return false;
15048}
15049
d1d0c603 15050static void
a2369ed3 15051debug_stack_info (rs6000_stack_t *info)
9878760c 15052{
d330fd93 15053 const char *abi_string;
24d304eb 15054
c81fc13e 15055 if (! info)
4697a36c
MM
15056 info = rs6000_stack_info ();
15057
15058 fprintf (stderr, "\nStack information for function %s:\n",
15059 ((current_function_decl && DECL_NAME (current_function_decl))
15060 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
15061 : "<unknown>"));
15062
24d304eb
RK
15063 switch (info->abi)
15064 {
b6c9286a
MM
15065 default: abi_string = "Unknown"; break;
15066 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 15067 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 15068 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 15069 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
15070 }
15071
15072 fprintf (stderr, "\tABI = %5s\n", abi_string);
15073
00b960c7
AH
15074 if (TARGET_ALTIVEC_ABI)
15075 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
15076
a3170dc6
AH
15077 if (TARGET_SPE_ABI)
15078 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
15079
4697a36c
MM
15080 if (info->first_gp_reg_save != 32)
15081 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
15082
15083 if (info->first_fp_reg_save != 64)
15084 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 15085
00b960c7
AH
15086 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
15087 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
15088 info->first_altivec_reg_save);
15089
4697a36c
MM
15090 if (info->lr_save_p)
15091 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 15092
4697a36c
MM
15093 if (info->cr_save_p)
15094 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
15095
00b960c7
AH
15096 if (info->vrsave_mask)
15097 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
15098
4697a36c
MM
15099 if (info->push_p)
15100 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
15101
15102 if (info->calls_p)
15103 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
15104
4697a36c
MM
15105 if (info->gp_save_offset)
15106 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
15107
15108 if (info->fp_save_offset)
15109 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
15110
00b960c7
AH
15111 if (info->altivec_save_offset)
15112 fprintf (stderr, "\taltivec_save_offset = %5d\n",
15113 info->altivec_save_offset);
15114
a3170dc6
AH
15115 if (info->spe_gp_save_offset)
15116 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
15117 info->spe_gp_save_offset);
15118
00b960c7
AH
15119 if (info->vrsave_save_offset)
15120 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
15121 info->vrsave_save_offset);
15122
4697a36c
MM
15123 if (info->lr_save_offset)
15124 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
15125
15126 if (info->cr_save_offset)
15127 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
15128
15129 if (info->varargs_save_offset)
15130 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
15131
15132 if (info->total_size)
d1d0c603
JJ
15133 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15134 info->total_size);
4697a36c 15135
4697a36c 15136 if (info->vars_size)
d1d0c603
JJ
15137 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15138 info->vars_size);
4697a36c
MM
15139
15140 if (info->parm_size)
15141 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
15142
15143 if (info->fixed_size)
15144 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
15145
15146 if (info->gp_size)
15147 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
15148
a3170dc6
AH
15149 if (info->spe_gp_size)
15150 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
15151
4697a36c
MM
15152 if (info->fp_size)
15153 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
15154
00b960c7
AH
15155 if (info->altivec_size)
15156 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
15157
15158 if (info->vrsave_size)
15159 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
15160
15161 if (info->altivec_padding_size)
15162 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15163 info->altivec_padding_size);
15164
a3170dc6
AH
15165 if (info->spe_padding_size)
15166 fprintf (stderr, "\tspe_padding_size = %5d\n",
15167 info->spe_padding_size);
15168
4697a36c
MM
15169 if (info->cr_size)
15170 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15171
15172 if (info->save_size)
15173 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15174
15175 if (info->reg_size != 4)
15176 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15177
15178 fprintf (stderr, "\n");
9878760c 15179}
71f123ca
FS
15180
15181rtx
a2369ed3 15182rs6000_return_addr (int count, rtx frame)
71f123ca 15183{
a4f6c312
SS
15184 /* Currently we don't optimize very well between prolog and body
15185 code and for PIC code the code can be actually quite bad, so
15186 don't try to be too clever here. */
f1384257 15187 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
15188 {
15189 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
15190
15191 return
15192 gen_rtx_MEM
15193 (Pmode,
15194 memory_address
15195 (Pmode,
15196 plus_constant (copy_to_reg
15197 (gen_rtx_MEM (Pmode,
15198 memory_address (Pmode, frame))),
15199 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
15200 }
15201
8c29550d 15202 cfun->machine->ra_need_lr = 1;
1de43f85 15203 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
15204}
15205
5e1bf043
DJ
15206/* Say whether a function is a candidate for sibcall handling or not.
15207 We do not allow indirect calls to be optimized into sibling calls.
15208 Also, we can't do it if there are any vector parameters; there's
15209 nowhere to put the VRsave code so it works; note that functions with
15210 vector parameters are required to have a prototype, so the argument
15211 type info must be available here. (The tail recursion case can work
15212 with vector parameters, but there's no way to distinguish here.) */
4977bab6 15213static bool
a2369ed3 15214rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
15215{
15216 tree type;
4977bab6 15217 if (decl)
5e1bf043
DJ
15218 {
15219 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 15220 {
4977bab6 15221 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
15222 type; type = TREE_CHAIN (type))
15223 {
c15b529f 15224 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 15225 return false;
5e1bf043 15226 }
c4ad648e 15227 }
5e1bf043 15228 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
15229 || ((*targetm.binds_local_p) (decl)
15230 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 15231 {
4977bab6 15232 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
15233
15234 if (!lookup_attribute ("longcall", attr_list)
15235 || lookup_attribute ("shortcall", attr_list))
4977bab6 15236 return true;
2bcc50d0 15237 }
5e1bf043 15238 }
4977bab6 15239 return false;
5e1bf043
DJ
15240}
15241
e7e64a25
AS
15242/* NULL if INSN insn is valid within a low-overhead loop.
15243 Otherwise return why doloop cannot be applied.
9419649c
DE
15244 PowerPC uses the COUNT register for branch on table instructions. */
15245
e7e64a25 15246static const char *
3101faab 15247rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15248{
15249 if (CALL_P (insn))
e7e64a25 15250 return "Function call in the loop.";
9419649c
DE
15251
15252 if (JUMP_P (insn)
15253 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15254 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15255 return "Computed branch in the loop.";
9419649c 15256
e7e64a25 15257 return NULL;
9419649c
DE
15258}
15259
71f123ca 15260static int
863d938c 15261rs6000_ra_ever_killed (void)
71f123ca
FS
15262{
15263 rtx top;
5e1bf043
DJ
15264 rtx reg;
15265 rtx insn;
71f123ca 15266
3c072c6b 15267 if (cfun->is_thunk)
71f123ca 15268 return 0;
eb0424da 15269
36f7e964
AH
15270 /* regs_ever_live has LR marked as used if any sibcalls are present,
15271 but this should not force saving and restoring in the
15272 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15273 clobbers LR, so that is inappropriate. */
36f7e964 15274
5e1bf043
DJ
15275 /* Also, the prologue can generate a store into LR that
15276 doesn't really count, like this:
36f7e964 15277
5e1bf043
DJ
15278 move LR->R0
15279 bcl to set PIC register
15280 move LR->R31
15281 move R0->LR
36f7e964
AH
15282
15283 When we're called from the epilogue, we need to avoid counting
15284 this as a store. */
f676971a 15285
71f123ca
FS
15286 push_topmost_sequence ();
15287 top = get_insns ();
15288 pop_topmost_sequence ();
1de43f85 15289 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15290
5e1bf043
DJ
15291 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15292 {
15293 if (INSN_P (insn))
15294 {
022123e6
AM
15295 if (CALL_P (insn))
15296 {
15297 if (!SIBLING_CALL_P (insn))
15298 return 1;
15299 }
1de43f85 15300 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15301 return 1;
36f7e964
AH
15302 else if (set_of (reg, insn) != NULL_RTX
15303 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15304 return 1;
15305 }
15306 }
15307 return 0;
71f123ca 15308}
4697a36c 15309\f
9ebbca7d 15310/* Emit instructions needed to load the TOC register.
c7ca610e 15311 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15312 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15313
15314void
a2369ed3 15315rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15316{
6fb5fa3c 15317 rtx dest;
1db02437 15318 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15319
7f970b70 15320 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15321 {
7f970b70 15322 char buf[30];
e65a3857 15323 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15324
15325 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15326 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15327 if (flag_pic == 2)
15328 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15329 else
15330 got = rs6000_got_sym ();
15331 tmp1 = tmp2 = dest;
15332 if (!fromprolog)
15333 {
15334 tmp1 = gen_reg_rtx (Pmode);
15335 tmp2 = gen_reg_rtx (Pmode);
15336 }
6fb5fa3c
DB
15337 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15338 emit_move_insn (tmp1,
1de43f85 15339 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15340 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15341 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15342 }
15343 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15344 {
6fb5fa3c 15345 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15346 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15347 }
15348 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15349 {
15350 char buf[30];
20b71b17
AM
15351 rtx temp0 = (fromprolog
15352 ? gen_rtx_REG (Pmode, 0)
15353 : gen_reg_rtx (Pmode));
20b71b17 15354
20b71b17
AM
15355 if (fromprolog)
15356 {
ccbca5e4 15357 rtx symF, symL;
38c1f2d7 15358
20b71b17
AM
15359 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15360 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15361
20b71b17
AM
15362 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15363 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15364
6fb5fa3c
DB
15365 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15366 emit_move_insn (dest,
1de43f85 15367 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15368 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15369 }
15370 else
20b71b17
AM
15371 {
15372 rtx tocsym;
20b71b17
AM
15373
15374 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15375 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15376 emit_move_insn (dest,
1de43f85 15377 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15378 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15379 }
6fb5fa3c 15380 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15381 }
20b71b17
AM
15382 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15383 {
15384 /* This is for AIX code running in non-PIC ELF32. */
15385 char buf[30];
15386 rtx realsym;
15387 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15388 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15389
6fb5fa3c
DB
15390 emit_insn (gen_elf_high (dest, realsym));
15391 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15392 }
37409796 15393 else
9ebbca7d 15394 {
37409796 15395 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15396
9ebbca7d 15397 if (TARGET_32BIT)
6fb5fa3c 15398 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15399 else
6fb5fa3c 15400 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15401 }
15402}
15403
d1d0c603
JJ
15404/* Emit instructions to restore the link register after determining where
15405 its value has been stored. */
15406
15407void
15408rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15409{
15410 rs6000_stack_t *info = rs6000_stack_info ();
15411 rtx operands[2];
15412
15413 operands[0] = source;
15414 operands[1] = scratch;
15415
15416 if (info->lr_save_p)
15417 {
15418 rtx frame_rtx = stack_pointer_rtx;
15419 HOST_WIDE_INT sp_offset = 0;
15420 rtx tmp;
15421
15422 if (frame_pointer_needed
e3b5732b 15423 || cfun->calls_alloca
d1d0c603
JJ
15424 || info->total_size > 32767)
15425 {
0be76840 15426 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15427 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15428 frame_rtx = operands[1];
15429 }
15430 else if (info->push_p)
15431 sp_offset = info->total_size;
15432
15433 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15434 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15435 emit_move_insn (tmp, operands[0]);
15436 }
15437 else
1de43f85 15438 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15439}
15440
4862826d 15441static GTY(()) alias_set_type set = -1;
f103e34d 15442
4862826d 15443alias_set_type
863d938c 15444get_TOC_alias_set (void)
9ebbca7d 15445{
f103e34d
GK
15446 if (set == -1)
15447 set = new_alias_set ();
15448 return set;
f676971a 15449}
9ebbca7d 15450
c1207243 15451/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15452 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15453 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15454#if TARGET_ELF
3c9eb5f4 15455static int
f676971a 15456uses_TOC (void)
9ebbca7d 15457{
c4501e62 15458 rtx insn;
38c1f2d7 15459
c4501e62
JJ
15460 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15461 if (INSN_P (insn))
15462 {
15463 rtx pat = PATTERN (insn);
15464 int i;
9ebbca7d 15465
f676971a 15466 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15467 for (i = 0; i < XVECLEN (pat, 0); i++)
15468 {
15469 rtx sub = XVECEXP (pat, 0, i);
15470 if (GET_CODE (sub) == USE)
15471 {
15472 sub = XEXP (sub, 0);
15473 if (GET_CODE (sub) == UNSPEC
15474 && XINT (sub, 1) == UNSPEC_TOC)
15475 return 1;
15476 }
15477 }
15478 }
15479 return 0;
9ebbca7d 15480}
c954844a 15481#endif
38c1f2d7 15482
9ebbca7d 15483rtx
f676971a 15484create_TOC_reference (rtx symbol)
9ebbca7d 15485{
b3a13419 15486 if (!can_create_pseudo_p ())
6fb5fa3c 15487 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15488 return gen_rtx_PLUS (Pmode,
a8a05998 15489 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a 15490 gen_rtx_CONST (Pmode,
2e4316da 15491 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL)));
9ebbca7d 15492}
38c1f2d7 15493
fc4767bb
JJ
15494/* If _Unwind_* has been called from within the same module,
15495 toc register is not guaranteed to be saved to 40(1) on function
15496 entry. Save it there in that case. */
c7ca610e 15497
9ebbca7d 15498void
863d938c 15499rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15500{
15501 rtx mem;
15502 rtx stack_top = gen_reg_rtx (Pmode);
15503 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15504 rtx opcode = gen_reg_rtx (SImode);
15505 rtx tocompare = gen_reg_rtx (SImode);
15506 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15507
8308679f 15508 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15509 emit_move_insn (stack_top, mem);
15510
8308679f
DE
15511 mem = gen_frame_mem (Pmode,
15512 gen_rtx_PLUS (Pmode, stack_top,
15513 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15514 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15515 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15516 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15517 : 0xE8410028, SImode));
9ebbca7d 15518
fc4767bb 15519 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15520 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15521 no_toc_save_needed);
9ebbca7d 15522
8308679f
DE
15523 mem = gen_frame_mem (Pmode,
15524 gen_rtx_PLUS (Pmode, stack_top,
15525 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15526 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15527 emit_label (no_toc_save_needed);
9ebbca7d 15528}
38c1f2d7 15529\f
0be76840
DE
15530/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15531 and the change to the stack pointer. */
ba4828e0 15532
9ebbca7d 15533static void
863d938c 15534rs6000_emit_stack_tie (void)
9ebbca7d 15535{
0be76840
DE
15536 rtx mem = gen_frame_mem (BLKmode,
15537 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15538
9ebbca7d
GK
15539 emit_insn (gen_stack_tie (mem));
15540}
38c1f2d7 15541
9ebbca7d
GK
15542/* Emit the correct code for allocating stack space, as insns.
15543 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15544 If COPY_R11, make sure a copy of the old frame is left in r11,
15545 in preference to r12 if COPY_R12.
9ebbca7d
GK
15546 The generated code may use hard register 0 as a temporary. */
15547
15548static void
f78c3290 15549rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15550{
9ebbca7d
GK
15551 rtx insn;
15552 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15553 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1 15554 rtx todec = gen_int_mode (-size, Pmode);
d4bacef0 15555 rtx par, set, mem;
61168ff1
RS
15556
15557 if (INTVAL (todec) != -size)
15558 {
d4ee4d25 15559 warning (0, "stack frame too large");
61168ff1
RS
15560 emit_insn (gen_trap ());
15561 return;
15562 }
a157febd 15563
e3b5732b 15564 if (crtl->limit_stack)
a157febd
GK
15565 {
15566 if (REG_P (stack_limit_rtx)
f676971a 15567 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15568 && REGNO (stack_limit_rtx) <= 31)
15569 {
5b71a4e7 15570 emit_insn (TARGET_32BIT
9ebbca7d
GK
15571 ? gen_addsi3 (tmp_reg,
15572 stack_limit_rtx,
15573 GEN_INT (size))
15574 : gen_adddi3 (tmp_reg,
15575 stack_limit_rtx,
15576 GEN_INT (size)));
5b71a4e7 15577
9ebbca7d
GK
15578 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15579 const0_rtx));
a157febd
GK
15580 }
15581 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15582 && TARGET_32BIT
f607bc57 15583 && DEFAULT_ABI == ABI_V4)
a157febd 15584 {
9ebbca7d 15585 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15586 gen_rtx_PLUS (Pmode,
15587 stack_limit_rtx,
9ebbca7d 15588 GEN_INT (size)));
5b71a4e7 15589
9ebbca7d
GK
15590 emit_insn (gen_elf_high (tmp_reg, toload));
15591 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15592 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15593 const0_rtx));
a157febd
GK
15594 }
15595 else
d4ee4d25 15596 warning (0, "stack limit expression is not supported");
a157febd
GK
15597 }
15598
d4bacef0 15599 if (copy_r12 || copy_r11)
f78c3290
NF
15600 emit_move_insn (copy_r11
15601 ? gen_rtx_REG (Pmode, 11)
15602 : gen_rtx_REG (Pmode, 12),
15603 stack_reg);
9ebbca7d 15604
d4bacef0 15605 if (size > 32767)
38c1f2d7 15606 {
d4bacef0
BE
15607 /* Need a note here so that try_split doesn't get confused. */
15608 if (get_last_insn () == NULL_RTX)
15609 emit_note (NOTE_INSN_DELETED);
15610 insn = emit_move_insn (tmp_reg, todec);
15611 try_split (PATTERN (insn), insn, 0);
15612 todec = tmp_reg;
9ebbca7d 15613 }
d4bacef0
BE
15614
15615 insn = emit_insn (TARGET_32BIT
15616 ? gen_movsi_update_stack (stack_reg, stack_reg,
15617 todec, stack_reg)
15618 : gen_movdi_di_update_stack (stack_reg, stack_reg,
15619 todec, stack_reg));
15620 /* Since we didn't use gen_frame_mem to generate the MEM, grab
15621 it now and set the alias set/attributes. The above gen_*_update
15622 calls will generate a PARALLEL with the MEM set being the first
15623 operation. */
15624 par = PATTERN (insn);
15625 gcc_assert (GET_CODE (par) == PARALLEL);
15626 set = XVECEXP (par, 0, 0);
15627 gcc_assert (GET_CODE (set) == SET);
15628 mem = SET_DEST (set);
15629 gcc_assert (MEM_P (mem));
15630 MEM_NOTRAP_P (mem) = 1;
15631 set_mem_alias_set (mem, get_frame_alias_set ());
f676971a 15632
9ebbca7d 15633 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a
ILT
15634 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
15635 gen_rtx_SET (VOIDmode, stack_reg,
15636 gen_rtx_PLUS (Pmode, stack_reg,
15637 GEN_INT (-size))));
9ebbca7d
GK
15638}
15639
a4f6c312
SS
15640/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15641 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15642 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15643 deduce these equivalences by itself so it wasn't necessary to hold
15644 its hand so much. */
9ebbca7d
GK
15645
15646static void
f676971a 15647rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15648 rtx reg2, rtx rreg)
9ebbca7d
GK
15649{
15650 rtx real, temp;
15651
e56c4463
JL
15652 /* copy_rtx will not make unique copies of registers, so we need to
15653 ensure we don't have unwanted sharing here. */
15654 if (reg == reg2)
15655 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15656
15657 if (reg == rreg)
15658 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15659
9ebbca7d
GK
15660 real = copy_rtx (PATTERN (insn));
15661
89e7058f
AH
15662 if (reg2 != NULL_RTX)
15663 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15664
15665 real = replace_rtx (real, reg,
9ebbca7d
GK
15666 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15667 STACK_POINTER_REGNUM),
15668 GEN_INT (val)));
f676971a 15669
9ebbca7d
GK
15670 /* We expect that 'real' is either a SET or a PARALLEL containing
15671 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15672 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15673
15674 if (GET_CODE (real) == SET)
15675 {
15676 rtx set = real;
f676971a 15677
9ebbca7d
GK
15678 temp = simplify_rtx (SET_SRC (set));
15679 if (temp)
15680 SET_SRC (set) = temp;
15681 temp = simplify_rtx (SET_DEST (set));
15682 if (temp)
15683 SET_DEST (set) = temp;
15684 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15685 {
9ebbca7d
GK
15686 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15687 if (temp)
15688 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15689 }
38c1f2d7 15690 }
37409796 15691 else
9ebbca7d
GK
15692 {
15693 int i;
37409796
NS
15694
15695 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15696 for (i = 0; i < XVECLEN (real, 0); i++)
15697 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15698 {
15699 rtx set = XVECEXP (real, 0, i);
f676971a 15700
9ebbca7d
GK
15701 temp = simplify_rtx (SET_SRC (set));
15702 if (temp)
15703 SET_SRC (set) = temp;
15704 temp = simplify_rtx (SET_DEST (set));
15705 if (temp)
15706 SET_DEST (set) = temp;
15707 if (GET_CODE (SET_DEST (set)) == MEM)
15708 {
15709 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15710 if (temp)
15711 XEXP (SET_DEST (set), 0) = temp;
15712 }
15713 RTX_FRAME_RELATED_P (set) = 1;
15714 }
15715 }
c19de7aa 15716
9ebbca7d 15717 RTX_FRAME_RELATED_P (insn) = 1;
bbbbb16a 15718 add_reg_note (insn, REG_FRAME_RELATED_EXPR, real);
38c1f2d7
MM
15719}
15720
00b960c7
AH
15721/* Returns an insn that has a vrsave set operation with the
15722 appropriate CLOBBERs. */
15723
15724static rtx
a2369ed3 15725generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15726{
15727 int nclobs, i;
15728 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15729 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15730
a004eb82
AH
15731 clobs[0]
15732 = gen_rtx_SET (VOIDmode,
15733 vrsave,
15734 gen_rtx_UNSPEC_VOLATILE (SImode,
15735 gen_rtvec (2, reg, vrsave),
3aca4bff 15736 UNSPECV_SET_VRSAVE));
00b960c7
AH
15737
15738 nclobs = 1;
15739
9aa86737
AH
15740 /* We need to clobber the registers in the mask so the scheduler
15741 does not move sets to VRSAVE before sets of AltiVec registers.
15742
15743 However, if the function receives nonlocal gotos, reload will set
15744 all call saved registers live. We will end up with:
15745
15746 (set (reg 999) (mem))
15747 (parallel [ (set (reg vrsave) (unspec blah))
15748 (clobber (reg 999))])
15749
15750 The clobber will cause the store into reg 999 to be dead, and
15751 flow will attempt to delete an epilogue insn. In this case, we
15752 need an unspec use/set of the register. */
00b960c7
AH
15753
15754 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15755 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15756 {
15757 if (!epiloguep || call_used_regs [i])
15758 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15759 gen_rtx_REG (V4SImode, i));
15760 else
15761 {
15762 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15763
15764 clobs[nclobs++]
a004eb82
AH
15765 = gen_rtx_SET (VOIDmode,
15766 reg,
15767 gen_rtx_UNSPEC (V4SImode,
15768 gen_rtvec (1, reg), 27));
9aa86737
AH
15769 }
15770 }
00b960c7
AH
15771
15772 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15773
15774 for (i = 0; i < nclobs; ++i)
15775 XVECEXP (insn, 0, i) = clobs[i];
15776
15777 return insn;
15778}
15779
89e7058f
AH
15780/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15781 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15782
15783static void
f676971a 15784emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15785 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15786{
15787 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15788 rtx replacea, replaceb;
15789
15790 int_rtx = GEN_INT (offset);
15791
15792 /* Some cases that need register indexed addressing. */
15793 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15794 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15795 || (TARGET_SPE_ABI
15796 && SPE_VECTOR_MODE (mode)
15797 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15798 {
15799 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15800 flow path of instructions in the prologue. */
89e7058f
AH
15801 offset_rtx = gen_rtx_REG (Pmode, 11);
15802 emit_move_insn (offset_rtx, int_rtx);
15803
15804 replacea = offset_rtx;
15805 replaceb = int_rtx;
15806 }
15807 else
15808 {
15809 offset_rtx = int_rtx;
15810 replacea = NULL_RTX;
15811 replaceb = NULL_RTX;
15812 }
15813
15814 reg = gen_rtx_REG (mode, regno);
15815 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15816 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15817
15818 insn = emit_move_insn (mem, reg);
15819
15820 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15821}
15822
a3170dc6
AH
15823/* Emit an offset memory reference suitable for a frame store, while
15824 converting to a valid addressing mode. */
15825
15826static rtx
a2369ed3 15827gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15828{
15829 rtx int_rtx, offset_rtx;
15830
15831 int_rtx = GEN_INT (offset);
15832
4d4cbc0e 15833 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15834 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15835 {
15836 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15837 emit_move_insn (offset_rtx, int_rtx);
15838 }
15839 else
15840 offset_rtx = int_rtx;
15841
0be76840 15842 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15843}
15844
6d0a8091
DJ
15845/* Look for user-defined global regs. We should not save and restore these,
15846 and cannot use stmw/lmw if there are any in its range. */
15847
15848static bool
f78c3290 15849no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15850{
15851 int i;
e1ece9f1 15852 for (i = first; i < gpr ? 32 : 64 ; i++)
f78c3290 15853 if (global_regs[i])
6d0a8091
DJ
15854 return false;
15855 return true;
15856}
15857
699c914a
MS
15858#ifndef TARGET_FIX_AND_CONTINUE
15859#define TARGET_FIX_AND_CONTINUE 0
15860#endif
15861
f78c3290
NF
15862/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15863#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15864#define LAST_SAVRES_REGISTER 31
15865#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15866
15867static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15868
15869/* Return the symbol for an out-of-line register save/restore routine.
15870 We are saving/restoring GPRs if GPR is true. */
15871
15872static rtx
15873rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15874{
15875 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15876 rtx sym;
15877 int select = ((savep ? 1 : 0) << 2
15878 | (gpr
15879 /* On the SPE, we never have any FPRs, but we do have
15880 32/64-bit versions of the routines. */
15881 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15882 : 0) << 1
15883 | (exitp ? 1: 0));
15884
15885 /* Don't generate bogus routine names. */
15886 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15887
15888 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15889
15890 if (sym == NULL)
15891 {
15892 char name[30];
15893 const char *action;
15894 const char *regkind;
15895 const char *exit_suffix;
15896
15897 action = savep ? "save" : "rest";
15898
15899 /* SPE has slightly different names for its routines depending on
15900 whether we are saving 32-bit or 64-bit registers. */
15901 if (TARGET_SPE_ABI)
15902 {
15903 /* No floating point saves on the SPE. */
15904 gcc_assert (gpr);
15905
15906 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15907 }
15908 else
15909 regkind = gpr ? "gpr" : "fpr";
15910
15911 exit_suffix = exitp ? "_x" : "";
15912
15913 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15914
15915 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15916 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15917 }
15918
15919 return sym;
15920}
15921
15922/* Emit a sequence of insns, including a stack tie if needed, for
15923 resetting the stack pointer. If SAVRES is true, then don't reset the
15924 stack pointer, but move the base of the frame into r11 for use by
15925 out-of-line register restore routines. */
15926
15927static void
15928rs6000_emit_stack_reset (rs6000_stack_t *info,
15929 rtx sp_reg_rtx, rtx frame_reg_rtx,
15930 int sp_offset, bool savres)
15931{
15932 /* This blockage is needed so that sched doesn't decide to move
15933 the sp change before the register restores. */
15934 if (frame_reg_rtx != sp_reg_rtx
15935 || (TARGET_SPE_ABI
15936 && info->spe_64bit_regs_used != 0
15937 && info->first_gp_reg_save != 32))
15938 rs6000_emit_stack_tie ();
15939
15940 if (frame_reg_rtx != sp_reg_rtx)
15941 {
f78c3290
NF
15942 if (sp_offset != 0)
15943 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15944 GEN_INT (sp_offset)));
15945 else if (!savres)
15946 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15947 }
15948 else if (sp_offset != 0)
15949 {
15950 /* If we are restoring registers out-of-line, we will be using the
15951 "exit" variants of the restore routines, which will reset the
15952 stack for us. But we do need to point r11 into the right place
15953 for those routines. */
15954 rtx dest_reg = (savres
15955 ? gen_rtx_REG (Pmode, 11)
15956 : sp_reg_rtx);
15957
15958 emit_insn (TARGET_32BIT
15959 ? gen_addsi3 (dest_reg, sp_reg_rtx,
15960 GEN_INT (sp_offset))
15961 : gen_adddi3 (dest_reg, sp_reg_rtx,
15962 GEN_INT (sp_offset)));
15963 }
15964}
15965
15966/* Construct a parallel rtx describing the effect of a call to an
15967 out-of-line register save/restore routine. */
15968
15969static rtx
15970rs6000_make_savres_rtx (rs6000_stack_t *info,
15971 rtx frame_reg_rtx, int save_area_offset,
15972 enum machine_mode reg_mode,
15973 bool savep, bool gpr, bool exitp)
15974{
15975 int i;
15976 int offset, start_reg, end_reg, n_regs;
15977 int reg_size = GET_MODE_SIZE (reg_mode);
15978 rtx sym;
15979 rtvec p;
15980
15981 offset = 0;
15982 start_reg = (gpr
15983 ? info->first_gp_reg_save
15984 : info->first_fp_reg_save);
15985 end_reg = gpr ? 32 : 64;
15986 n_regs = end_reg - start_reg;
15987 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15988
15989 /* If we're saving registers, then we should never say we're exiting. */
15990 gcc_assert ((savep && !exitp) || !savep);
15991
15992 if (exitp)
15993 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15994
15995 RTVEC_ELT (p, offset++)
15996 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15997
15998 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15999 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
16000 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
16001
16002 for (i = 0; i < end_reg - start_reg; i++)
16003 {
16004 rtx addr, reg, mem;
16005 reg = gen_rtx_REG (reg_mode, start_reg + i);
16006 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16007 GEN_INT (save_area_offset + reg_size*i));
16008 mem = gen_frame_mem (reg_mode, addr);
16009
16010 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
16011 savep ? mem : reg,
16012 savep ? reg : mem);
16013 }
16014
16015 return gen_rtx_PARALLEL (VOIDmode, p);
16016}
16017
52ff33d0
NF
16018/* Determine whether the gp REG is really used. */
16019
16020static bool
16021rs6000_reg_live_or_pic_offset_p (int reg)
16022{
6fb5fa3c 16023 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
16024 && (!call_used_regs[reg]
16025 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16026 && TARGET_TOC && TARGET_MINIMAL_TOC)))
16027 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16028 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
16029 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
16030}
16031
f78c3290
NF
16032enum {
16033 SAVRES_MULTIPLE = 0x1,
16034 SAVRES_INLINE_FPRS = 0x2,
16035 SAVRES_INLINE_GPRS = 0x4
16036};
16037
16038/* Determine the strategy for savings/restoring registers. */
16039
16040static int
16041rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
16042 int using_static_chain_p, int sibcall)
16043{
16044 bool using_multiple_p;
16045 bool common;
16046 bool savres_fprs_inline;
16047 bool savres_gprs_inline;
16048 bool noclobber_global_gprs
16049 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
16050
16051 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
16052 && (!TARGET_SPE_ABI
16053 || info->spe_64bit_regs_used == 0)
16054 && info->first_gp_reg_save < 31
16055 && noclobber_global_gprs);
16056 /* Don't bother to try to save things out-of-line if r11 is occupied
16057 by the static chain. It would require too much fiddling and the
16058 static chain is rarely used anyway. */
16059 common = (using_static_chain_p
16060 || sibcall
16061 || crtl->calls_eh_return
16062 || !info->lr_save_p
16063 || cfun->machine->ra_need_lr
16064 || info->total_size > 32767);
16065 savres_fprs_inline = (common
16066 || info->first_fp_reg_save == 64
16067 || !no_global_regs_above (info->first_fp_reg_save,
16068 /*gpr=*/false)
16069 || FP_SAVE_INLINE (info->first_fp_reg_save));
16070 savres_gprs_inline = (common
16071 /* Saving CR interferes with the exit routines
16072 used on the SPE, so just punt here. */
16073 || (!savep
16074 && TARGET_SPE_ABI
16075 && info->spe_64bit_regs_used != 0
16076 && info->cr_save_p != 0)
16077 || info->first_gp_reg_save == 32
16078 || !noclobber_global_gprs
16079 || GP_SAVE_INLINE (info->first_gp_reg_save));
16080
16081 if (savep)
16082 /* If we are going to use store multiple, then don't even bother
16083 with the out-of-line routines, since the store-multiple instruction
16084 will always be smaller. */
16085 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16086 else
16087 {
16088 /* The situation is more complicated with load multiple. We'd
16089 prefer to use the out-of-line routines for restores, since the
16090 "exit" out-of-line routines can handle the restore of LR and
16091 the frame teardown. But we can only use the out-of-line
16092 routines if we know that we've used store multiple or
16093 out-of-line routines in the prologue, i.e. if we've saved all
16094 the registers from first_gp_reg_save. Otherwise, we risk
16095 loading garbage from the stack. Furthermore, we can only use
16096 the "exit" out-of-line gpr restore if we haven't saved any
16097 fprs. */
16098 bool saved_all = !savres_gprs_inline || using_multiple_p;
16099
16100 if (saved_all && info->first_fp_reg_save != 64)
16101 /* We can't use the exit routine; use load multiple if it's
16102 available. */
16103 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16104 }
16105
16106 return (using_multiple_p
16107 | (savres_fprs_inline << 1)
16108 | (savres_gprs_inline << 2));
16109}
16110
9ebbca7d
GK
16111/* Emit function prologue as insns. */
16112
9878760c 16113void
863d938c 16114rs6000_emit_prologue (void)
9878760c 16115{
4697a36c 16116 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 16117 enum machine_mode reg_mode = Pmode;
327e5343 16118 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16119 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
16120 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
16121 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 16122 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 16123 rtx insn;
f78c3290 16124 int strategy;
9ebbca7d 16125 int saving_FPRs_inline;
f78c3290 16126 int saving_GPRs_inline;
9ebbca7d 16127 int using_store_multiple;
f78c3290
NF
16128 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
16129 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
e1ece9f1 16130 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 16131 HOST_WIDE_INT sp_offset = 0;
f676971a 16132
699c914a
MS
16133 if (TARGET_FIX_AND_CONTINUE)
16134 {
16135 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 16136 address by modifying the first 5 instructions of the function
699c914a
MS
16137 to branch to the overriding function. This is necessary to
16138 permit function pointers that point to the old function to
16139 actually forward to the new function. */
16140 emit_insn (gen_nop ());
16141 emit_insn (gen_nop ());
de2ab0ca 16142 emit_insn (gen_nop ());
699c914a
MS
16143 emit_insn (gen_nop ());
16144 emit_insn (gen_nop ());
16145 }
16146
16147 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16148 {
16149 reg_mode = V2SImode;
16150 reg_size = 8;
16151 }
a3170dc6 16152
f78c3290
NF
16153 strategy = rs6000_savres_strategy (info, /*savep=*/true,
16154 /*static_chain_p=*/using_static_chain_p,
16155 /*sibcall=*/0);
16156 using_store_multiple = strategy & SAVRES_MULTIPLE;
16157 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16158 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
16159
16160 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
16161 if (! WORLD_SAVE_P (info)
16162 && info->push_p
acd0b319 16163 && (DEFAULT_ABI == ABI_V4
e3b5732b 16164 || crtl->calls_eh_return))
9ebbca7d 16165 {
f78c3290
NF
16166 bool need_r11 = (TARGET_SPE
16167 ? (!saving_GPRs_inline
16168 && info->spe_64bit_regs_used == 0)
16169 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
16170 if (info->total_size < 32767)
16171 sp_offset = info->total_size;
16172 else
f78c3290
NF
16173 frame_reg_rtx = (need_r11
16174 ? gen_rtx_REG (Pmode, 11)
16175 : frame_ptr_rtx);
f676971a 16176 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
16177 (frame_reg_rtx != sp_reg_rtx
16178 && (info->cr_save_p
16179 || info->lr_save_p
16180 || info->first_fp_reg_save < 64
16181 || info->first_gp_reg_save < 32
f78c3290
NF
16182 )),
16183 need_r11);
9ebbca7d
GK
16184 if (frame_reg_rtx != sp_reg_rtx)
16185 rs6000_emit_stack_tie ();
16186 }
16187
d62294f5 16188 /* Handle world saves specially here. */
f57fe068 16189 if (WORLD_SAVE_P (info))
d62294f5
FJ
16190 {
16191 int i, j, sz;
16192 rtx treg;
16193 rtvec p;
22fa69da 16194 rtx reg0;
d62294f5
FJ
16195
16196 /* save_world expects lr in r0. */
22fa69da 16197 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 16198 if (info->lr_save_p)
c4ad648e 16199 {
22fa69da 16200 insn = emit_move_insn (reg0,
1de43f85 16201 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
16202 RTX_FRAME_RELATED_P (insn) = 1;
16203 }
d62294f5
FJ
16204
16205 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 16206 assumptions about the offsets of various bits of the stack
992d08b1 16207 frame. */
37409796
NS
16208 gcc_assert (info->gp_save_offset == -220
16209 && info->fp_save_offset == -144
16210 && info->lr_save_offset == 8
16211 && info->cr_save_offset == 4
16212 && info->push_p
16213 && info->lr_save_p
e3b5732b 16214 && (!crtl->calls_eh_return
37409796
NS
16215 || info->ehrd_offset == -432)
16216 && info->vrsave_save_offset == -224
22fa69da 16217 && info->altivec_save_offset == -416);
d62294f5
FJ
16218
16219 treg = gen_rtx_REG (SImode, 11);
16220 emit_move_insn (treg, GEN_INT (-info->total_size));
16221
16222 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 16223 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
16224
16225 /* Preserve CR2 for save_world prologues */
22fa69da 16226 sz = 5;
d62294f5
FJ
16227 sz += 32 - info->first_gp_reg_save;
16228 sz += 64 - info->first_fp_reg_save;
16229 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16230 p = rtvec_alloc (sz);
16231 j = 0;
16232 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 16233 gen_rtx_REG (SImode,
1de43f85 16234 LR_REGNO));
d62294f5 16235 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16236 gen_rtx_SYMBOL_REF (Pmode,
16237 "*save_world"));
d62294f5 16238 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16239 properly. */
16240 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16241 {
696e45ba
ME
16242 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16243 ? DFmode : SFmode),
16244 info->first_fp_reg_save + i);
c4ad648e
AM
16245 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16246 GEN_INT (info->fp_save_offset
16247 + sp_offset + 8 * i));
696e45ba
ME
16248 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16249 ? DFmode : SFmode), addr);
c4ad648e
AM
16250
16251 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16252 }
d62294f5 16253 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16254 {
16255 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16256 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16257 GEN_INT (info->altivec_save_offset
16258 + sp_offset + 16 * i));
0be76840 16259 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16260
16261 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16262 }
d62294f5 16263 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16264 {
16265 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16266 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16267 GEN_INT (info->gp_save_offset
16268 + sp_offset + reg_size * i));
0be76840 16269 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16270
16271 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16272 }
16273
16274 {
16275 /* CR register traditionally saved as CR2. */
16276 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16277 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16278 GEN_INT (info->cr_save_offset
16279 + sp_offset));
0be76840 16280 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16281
16282 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16283 }
22fa69da
GK
16284 /* Explain about use of R0. */
16285 if (info->lr_save_p)
16286 {
16287 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16288 GEN_INT (info->lr_save_offset
16289 + sp_offset));
16290 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16291
22fa69da
GK
16292 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16293 }
16294 /* Explain what happens to the stack pointer. */
16295 {
16296 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16297 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16298 }
d62294f5
FJ
16299
16300 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16301 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16302 treg, GEN_INT (-info->total_size));
16303 sp_offset = info->total_size;
d62294f5
FJ
16304 }
16305
9ebbca7d 16306 /* If we use the link register, get it into r0. */
f57fe068 16307 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16308 {
52ff33d0
NF
16309 rtx addr, reg, mem;
16310
f8a57be8 16311 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16312 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16313 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16314
16315 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16316 GEN_INT (info->lr_save_offset + sp_offset));
16317 reg = gen_rtx_REG (Pmode, 0);
16318 mem = gen_rtx_MEM (Pmode, addr);
16319 /* This should not be of rs6000_sr_alias_set, because of
16320 __builtin_return_address. */
16321
16322 insn = emit_move_insn (mem, reg);
16323 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16324 NULL_RTX, NULL_RTX);
f8a57be8 16325 }
9ebbca7d
GK
16326
16327 /* If we need to save CR, put it into r12. */
f57fe068 16328 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16329 {
f8a57be8 16330 rtx set;
f676971a 16331
9ebbca7d 16332 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16333 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16334 RTX_FRAME_RELATED_P (insn) = 1;
16335 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16336 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16337 But that's OK. All we have to do is specify that _one_ condition
16338 code register is saved in this stack slot. The thrower's epilogue
16339 will then restore all the call-saved registers.
16340 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16341 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16342 gen_rtx_REG (SImode, CR2_REGNO));
bbbbb16a 16343 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16344 }
16345
a4f6c312
SS
16346 /* Do any required saving of fpr's. If only one or two to save, do
16347 it ourselves. Otherwise, call function. */
f57fe068 16348 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16349 {
16350 int i;
16351 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16352 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16353 && ! call_used_regs[info->first_fp_reg_save+i]))
696e45ba
ME
16354 emit_frame_save (frame_reg_rtx, frame_ptr_rtx,
16355 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16356 ? DFmode : SFmode,
89e7058f
AH
16357 info->first_fp_reg_save + i,
16358 info->fp_save_offset + sp_offset + 8 * i,
16359 info->total_size);
9ebbca7d 16360 }
f57fe068 16361 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16362 {
16363 rtx par;
16364
16365 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16366 info->fp_save_offset + sp_offset,
16367 DFmode,
16368 /*savep=*/true, /*gpr=*/false,
16369 /*exitp=*/false);
16370 insn = emit_insn (par);
16371 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16372 NULL_RTX, NULL_RTX);
16373 }
16374
16375 /* Save GPRs. This is done as a PARALLEL if we are using
16376 the store-multiple instructions. */
16377 if (!WORLD_SAVE_P (info)
16378 && TARGET_SPE_ABI
16379 && info->spe_64bit_regs_used != 0
16380 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16381 {
16382 int i;
f78c3290
NF
16383 rtx spe_save_area_ptr;
16384
16385 /* Determine whether we can address all of the registers that need
16386 to be saved with an offset from the stack pointer that fits in
16387 the small const field for SPE memory instructions. */
16388 int spe_regs_addressable_via_sp
16389 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16390 + (32 - info->first_gp_reg_save - 1) * reg_size)
16391 && saving_GPRs_inline);
16392 int spe_offset;
16393
16394 if (spe_regs_addressable_via_sp)
16395 {
16396 spe_save_area_ptr = frame_reg_rtx;
16397 spe_offset = info->spe_gp_save_offset + sp_offset;
16398 }
16399 else
16400 {
16401 /* Make r11 point to the start of the SPE save area. We need
16402 to be careful here if r11 is holding the static chain. If
16403 it is, then temporarily save it in r0. We would use r0 as
16404 our base register here, but using r0 as a base register in
16405 loads and stores means something different from what we
16406 would like. */
16407 int ool_adjust = (saving_GPRs_inline
16408 ? 0
16409 : (info->first_gp_reg_save
16410 - (FIRST_SAVRES_REGISTER+1))*8);
16411 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16412 + sp_offset - ool_adjust);
16413
16414 if (using_static_chain_p)
16415 {
16416 rtx r0 = gen_rtx_REG (Pmode, 0);
16417 gcc_assert (info->first_gp_reg_save > 11);
16418
16419 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16420 }
16421
16422 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16423 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16424 frame_reg_rtx,
16425 GEN_INT (offset)));
16426 /* We need to make sure the move to r11 gets noted for
16427 properly outputting unwind information. */
16428 if (!saving_GPRs_inline)
16429 rs6000_frame_related (insn, frame_reg_rtx, offset,
16430 NULL_RTX, NULL_RTX);
16431 spe_offset = 0;
16432 }
16433
16434 if (saving_GPRs_inline)
16435 {
16436 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16437 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16438 {
16439 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16440 rtx offset, addr, mem;
f676971a 16441
f78c3290
NF
16442 /* We're doing all this to ensure that the offset fits into
16443 the immediate offset of 'evstdd'. */
16444 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16445
16446 offset = GEN_INT (reg_size * i + spe_offset);
16447 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16448 mem = gen_rtx_MEM (V2SImode, addr);
16449
16450 insn = emit_move_insn (mem, reg);
16451
16452 rs6000_frame_related (insn, spe_save_area_ptr,
16453 info->spe_gp_save_offset
16454 + sp_offset + reg_size * i,
16455 offset, const0_rtx);
16456 }
16457 }
16458 else
9ebbca7d 16459 {
f78c3290 16460 rtx par;
9ebbca7d 16461
f78c3290
NF
16462 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16463 0, reg_mode,
16464 /*savep=*/true, /*gpr=*/true,
16465 /*exitp=*/false);
16466 insn = emit_insn (par);
16467 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16468 NULL_RTX, NULL_RTX);
9ebbca7d 16469 }
f78c3290
NF
16470
16471
16472 /* Move the static chain pointer back. */
16473 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16474 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16475 }
16476 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16477 {
16478 rtx par;
16479
16480 /* Need to adjust r11 if we saved any FPRs. */
16481 if (info->first_fp_reg_save != 64)
16482 {
16483 rtx r11 = gen_rtx_REG (reg_mode, 11);
16484 rtx offset = GEN_INT (info->total_size
16485 + (-8 * (64-info->first_fp_reg_save)));
16486 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16487 ? sp_reg_rtx : r11);
16488
16489 emit_insn (TARGET_32BIT
16490 ? gen_addsi3 (r11, ptr_reg, offset)
16491 : gen_adddi3 (r11, ptr_reg, offset));
16492 }
16493
16494 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16495 info->gp_save_offset + sp_offset,
16496 reg_mode,
16497 /*savep=*/true, /*gpr=*/true,
16498 /*exitp=*/false);
16499 insn = emit_insn (par);
f676971a 16500 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16501 NULL_RTX, NULL_RTX);
16502 }
f78c3290 16503 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16504 {
308c142a 16505 rtvec p;
9ebbca7d
GK
16506 int i;
16507 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16508 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16509 {
16510 rtx addr, reg, mem;
16511 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16512 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16513 GEN_INT (info->gp_save_offset
16514 + sp_offset
9ebbca7d 16515 + reg_size * i));
0be76840 16516 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16517
16518 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16519 }
16520 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16521 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16522 NULL_RTX, NULL_RTX);
b6c9286a 16523 }
f57fe068 16524 else if (!WORLD_SAVE_P (info))
b6c9286a 16525 {
9ebbca7d
GK
16526 int i;
16527 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16528 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16529 {
16530 rtx addr, reg, mem;
16531 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16532
52ff33d0
NF
16533 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16534 GEN_INT (info->gp_save_offset
16535 + sp_offset
16536 + reg_size * i));
16537 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16538
52ff33d0
NF
16539 insn = emit_move_insn (mem, reg);
16540 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16541 NULL_RTX, NULL_RTX);
16542 }
9ebbca7d
GK
16543 }
16544
83720594
RH
16545 /* ??? There's no need to emit actual instructions here, but it's the
16546 easiest way to get the frame unwind information emitted. */
e3b5732b 16547 if (crtl->calls_eh_return)
83720594 16548 {
78e1b90d
DE
16549 unsigned int i, regno;
16550
fc4767bb
JJ
16551 /* In AIX ABI we need to pretend we save r2 here. */
16552 if (TARGET_AIX)
16553 {
16554 rtx addr, reg, mem;
16555
16556 reg = gen_rtx_REG (reg_mode, 2);
16557 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16558 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16559 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16560
16561 insn = emit_move_insn (mem, reg);
f676971a 16562 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16563 NULL_RTX, NULL_RTX);
16564 PATTERN (insn) = gen_blockage ();
16565 }
16566
83720594
RH
16567 for (i = 0; ; ++i)
16568 {
83720594
RH
16569 regno = EH_RETURN_DATA_REGNO (i);
16570 if (regno == INVALID_REGNUM)
16571 break;
16572
89e7058f
AH
16573 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16574 info->ehrd_offset + sp_offset
16575 + reg_size * (int) i,
16576 info->total_size);
83720594
RH
16577 }
16578 }
16579
9ebbca7d 16580 /* Save CR if we use any that must be preserved. */
f57fe068 16581 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16582 {
16583 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16584 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16585 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16586 /* See the large comment above about why CR2_REGNO is used. */
16587 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16588
9ebbca7d
GK
16589 /* If r12 was used to hold the original sp, copy cr into r0 now
16590 that it's free. */
16591 if (REGNO (frame_reg_rtx) == 12)
16592 {
f8a57be8
GK
16593 rtx set;
16594
9ebbca7d 16595 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16596 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16597 RTX_FRAME_RELATED_P (insn) = 1;
16598 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
bbbbb16a 16599 add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
9ebbca7d
GK
16600 }
16601 insn = emit_move_insn (mem, cr_save_rtx);
16602
f676971a 16603 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16604 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16605 }
16606
f676971a 16607 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16608 for which it was done previously. */
f57fe068 16609 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16610 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16611 {
bcb2d701 16612 if (info->total_size < 32767)
2b2c2fe5 16613 sp_offset = info->total_size;
bcb2d701
EC
16614 else
16615 frame_reg_rtx = frame_ptr_rtx;
16616 rs6000_emit_allocate_stack (info->total_size,
16617 (frame_reg_rtx != sp_reg_rtx
16618 && ((info->altivec_size != 0)
16619 || (info->vrsave_mask != 0)
f78c3290
NF
16620 )),
16621 FALSE);
bcb2d701
EC
16622 if (frame_reg_rtx != sp_reg_rtx)
16623 rs6000_emit_stack_tie ();
2b2c2fe5 16624 }
9ebbca7d
GK
16625
16626 /* Set frame pointer, if needed. */
16627 if (frame_pointer_needed)
16628 {
7d5175e1 16629 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16630 sp_reg_rtx);
16631 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16632 }
9878760c 16633
2b2c2fe5
EC
16634 /* Save AltiVec registers if needed. Save here because the red zone does
16635 not include AltiVec registers. */
16636 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16637 {
16638 int i;
16639
16640 /* There should be a non inline version of this, for when we
16641 are saving lots of vector registers. */
16642 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16643 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16644 {
16645 rtx areg, savereg, mem;
16646 int offset;
16647
16648 offset = info->altivec_save_offset + sp_offset
16649 + 16 * (i - info->first_altivec_reg_save);
16650
16651 savereg = gen_rtx_REG (V4SImode, i);
16652
16653 areg = gen_rtx_REG (Pmode, 0);
16654 emit_move_insn (areg, GEN_INT (offset));
16655
16656 /* AltiVec addressing mode is [reg+reg]. */
16657 mem = gen_frame_mem (V4SImode,
16658 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16659
16660 insn = emit_move_insn (mem, savereg);
16661
16662 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16663 areg, GEN_INT (offset));
16664 }
16665 }
16666
16667 /* VRSAVE is a bit vector representing which AltiVec registers
16668 are used. The OS uses this to determine which vector
16669 registers to save on a context switch. We need to save
16670 VRSAVE on the stack frame, add whatever AltiVec registers we
16671 used in this function, and do the corresponding magic in the
16672 epilogue. */
16673
16674 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16675 && info->vrsave_mask != 0)
16676 {
16677 rtx reg, mem, vrsave;
16678 int offset;
16679
16680 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16681 as frame_reg_rtx and r11 as the static chain pointer for
16682 nested functions. */
16683 reg = gen_rtx_REG (SImode, 0);
16684 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16685 if (TARGET_MACHO)
16686 emit_insn (gen_get_vrsave_internal (reg));
16687 else
16688 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16689
16690 if (!WORLD_SAVE_P (info))
16691 {
16692 /* Save VRSAVE. */
16693 offset = info->vrsave_save_offset + sp_offset;
16694 mem = gen_frame_mem (SImode,
16695 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16696 GEN_INT (offset)));
16697 insn = emit_move_insn (mem, reg);
16698 }
16699
16700 /* Include the registers in the mask. */
16701 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16702
16703 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16704 }
16705
1db02437 16706 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16707 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16708 || (DEFAULT_ABI == ABI_V4
16709 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16710 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16711 {
16712 /* If emit_load_toc_table will use the link register, we need to save
16713 it. We use R12 for this purpose because emit_load_toc_table
16714 can use register 0. This allows us to use a plain 'blr' to return
16715 from the procedure more often. */
16716 int save_LR_around_toc_setup = (TARGET_ELF
16717 && DEFAULT_ABI != ABI_AIX
16718 && flag_pic
16719 && ! info->lr_save_p
16720 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16721 if (save_LR_around_toc_setup)
16722 {
1de43f85 16723 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16724
c4ad648e 16725 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16726 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16727
c4ad648e 16728 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16729
c4ad648e 16730 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16731 RTX_FRAME_RELATED_P (insn) = 1;
16732 }
16733 else
16734 rs6000_emit_load_toc_table (TRUE);
16735 }
ee890fe2 16736
fcce224d 16737#if TARGET_MACHO
ee890fe2 16738 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16739 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16740 {
1de43f85 16741 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
08a6a74b 16742 rtx src = gen_rtx_SYMBOL_REF (Pmode, MACHOPIC_FUNCTION_BASE_NAME);
ee890fe2 16743
6d0a8091
DJ
16744 /* Save and restore LR locally around this call (in R0). */
16745 if (!info->lr_save_p)
6fb5fa3c 16746 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16747
6fb5fa3c 16748 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16749
6fb5fa3c
DB
16750 emit_move_insn (gen_rtx_REG (Pmode,
16751 RS6000_PIC_OFFSET_TABLE_REGNUM),
16752 lr);
6d0a8091
DJ
16753
16754 if (!info->lr_save_p)
6fb5fa3c 16755 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16756 }
fcce224d 16757#endif
9ebbca7d
GK
16758}
16759
9ebbca7d 16760/* Write function prologue. */
a4f6c312 16761
08c148a8 16762static void
f676971a 16763rs6000_output_function_prologue (FILE *file,
a2369ed3 16764 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16765{
16766 rs6000_stack_t *info = rs6000_stack_info ();
16767
4697a36c
MM
16768 if (TARGET_DEBUG_STACK)
16769 debug_stack_info (info);
9878760c 16770
a4f6c312
SS
16771 /* Write .extern for any function we will call to save and restore
16772 fp values. */
16773 if (info->first_fp_reg_save < 64
16774 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16775 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16776 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16777 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16778
c764f757
RK
16779 /* Write .extern for AIX common mode routines, if needed. */
16780 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16781 {
f6709c70
JW
16782 fputs ("\t.extern __mulh\n", file);
16783 fputs ("\t.extern __mull\n", file);
16784 fputs ("\t.extern __divss\n", file);
16785 fputs ("\t.extern __divus\n", file);
16786 fputs ("\t.extern __quoss\n", file);
16787 fputs ("\t.extern __quous\n", file);
c764f757
RK
16788 common_mode_defined = 1;
16789 }
9878760c 16790
9ebbca7d 16791 if (! HAVE_prologue)
979721f8 16792 {
9ebbca7d 16793 start_sequence ();
9dda4cc8 16794
a4f6c312
SS
16795 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16796 the "toplevel" insn chain. */
2e040219 16797 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16798 rs6000_emit_prologue ();
2e040219 16799 emit_note (NOTE_INSN_DELETED);
178c3eff 16800
a3c9585f 16801 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16802 {
16803 rtx insn;
16804 unsigned addr = 0;
16805 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16806 {
16807 INSN_ADDRESSES_NEW (insn, addr);
16808 addr += 4;
16809 }
16810 }
9dda4cc8 16811
9ebbca7d 16812 if (TARGET_DEBUG_STACK)
a4f6c312 16813 debug_rtx_list (get_insns (), 100);
c9d691e9 16814 final (get_insns (), file, FALSE);
9ebbca7d 16815 end_sequence ();
979721f8
MM
16816 }
16817
9ebbca7d
GK
16818 rs6000_pic_labelno++;
16819}
f676971a 16820
1c9c5e43
AM
16821/* Non-zero if vmx regs are restored before the frame pop, zero if
16822 we restore after the pop when possible. */
16823#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16824
f78c3290
NF
16825/* Reload CR from REG. */
16826
16827static void
16828rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16829{
16830 int count = 0;
16831 int i;
16832
16833 if (using_mfcr_multiple)
16834 {
16835 for (i = 0; i < 8; i++)
16836 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16837 count++;
16838 gcc_assert (count);
16839 }
16840
16841 if (using_mfcr_multiple && count > 1)
16842 {
16843 rtvec p;
16844 int ndx;
16845
16846 p = rtvec_alloc (count);
16847
16848 ndx = 0;
16849 for (i = 0; i < 8; i++)
16850 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16851 {
16852 rtvec r = rtvec_alloc (2);
16853 RTVEC_ELT (r, 0) = reg;
16854 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16855 RTVEC_ELT (p, ndx) =
16856 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16857 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16858 ndx++;
16859 }
16860 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16861 gcc_assert (ndx == count);
16862 }
16863 else
16864 for (i = 0; i < 8; i++)
16865 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16866 {
16867 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16868 CR0_REGNO+i),
16869 reg));
16870 }
16871}
16872
9ebbca7d 16873/* Emit function epilogue as insns.
9878760c 16874
9ebbca7d
GK
16875 At present, dwarf2out_frame_debug_expr doesn't understand
16876 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16877 anywhere in the epilogue. Most of the insns below would in any case
16878 need special notes to explain where r11 is in relation to the stack. */
9878760c 16879
9ebbca7d 16880void
a2369ed3 16881rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16882{
16883 rs6000_stack_t *info;
f78c3290 16884 int restoring_GPRs_inline;
9ebbca7d
GK
16885 int restoring_FPRs_inline;
16886 int using_load_multiple;
d296e02e 16887 int using_mtcr_multiple;
9ebbca7d 16888 int use_backchain_to_restore_sp;
f78c3290
NF
16889 int restore_lr;
16890 int strategy;
9ebbca7d
GK
16891 int sp_offset = 0;
16892 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16893 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16894 enum machine_mode reg_mode = Pmode;
327e5343 16895 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16896 int i;
16897
c19de7aa
AH
16898 info = rs6000_stack_info ();
16899
16900 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16901 {
16902 reg_mode = V2SImode;
16903 reg_size = 8;
16904 }
16905
f78c3290
NF
16906 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16907 /*static_chain_p=*/0, sibcall);
16908 using_load_multiple = strategy & SAVRES_MULTIPLE;
16909 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16910 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16911 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16912 || rs6000_cpu == PROCESSOR_PPC603
16913 || rs6000_cpu == PROCESSOR_PPC750
16914 || optimize_size);
1c9c5e43
AM
16915 /* Restore via the backchain when we have a large frame, since this
16916 is more efficient than an addis, addi pair. The second condition
16917 here will not trigger at the moment; We don't actually need a
16918 frame pointer for alloca, but the generic parts of the compiler
16919 give us one anyway. */
16920 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16921 || info->total_size
16922 + (info->lr_save_p ? info->lr_save_offset : 0)
16923 > 32767
1c9c5e43
AM
16924 || (cfun->calls_alloca
16925 && !frame_pointer_needed));
f78c3290
NF
16926 restore_lr = (info->lr_save_p
16927 && restoring_GPRs_inline
16928 && restoring_FPRs_inline);
9ebbca7d 16929
f57fe068 16930 if (WORLD_SAVE_P (info))
d62294f5
FJ
16931 {
16932 int i, j;
16933 char rname[30];
16934 const char *alloc_rname;
16935 rtvec p;
16936
16937 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16938 stack slot (which is not likely to be our caller.)
16939 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16940 rest_world is similar, except any R10 parameter is ignored.
16941 The exception-handling stuff that was here in 2.95 is no
16942 longer necessary. */
d62294f5
FJ
16943
16944 p = rtvec_alloc (9
16945 + 1
f676971a 16946 + 32 - info->first_gp_reg_save
c4ad648e
AM
16947 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16948 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16949
e3b5732b 16950 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16951 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16952 alloc_rname = ggc_strdup (rname);
16953
16954 j = 0;
16955 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16956 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16957 gen_rtx_REG (Pmode,
1de43f85 16958 LR_REGNO));
d62294f5 16959 RTVEC_ELT (p, j++)
c4ad648e 16960 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16961 /* The instruction pattern requires a clobber here;
c4ad648e 16962 it is shared with the restVEC helper. */
d62294f5 16963 RTVEC_ELT (p, j++)
c4ad648e 16964 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16965
16966 {
c4ad648e
AM
16967 /* CR register traditionally saved as CR2. */
16968 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16969 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16970 GEN_INT (info->cr_save_offset));
0be76840 16971 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16972
16973 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16974 }
16975
16976 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16977 {
16978 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16979 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16980 GEN_INT (info->gp_save_offset
16981 + reg_size * i));
0be76840 16982 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16983
16984 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16985 }
d62294f5 16986 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16987 {
16988 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16989 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16990 GEN_INT (info->altivec_save_offset
16991 + 16 * i));
0be76840 16992 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16993
16994 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16995 }
d62294f5 16996 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e 16997 {
696e45ba
ME
16998 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16999 ? DFmode : SFmode),
17000 info->first_fp_reg_save + i);
c4ad648e
AM
17001 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17002 GEN_INT (info->fp_save_offset
17003 + 8 * i));
696e45ba
ME
17004 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17005 ? DFmode : SFmode), addr);
c4ad648e
AM
17006
17007 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17008 }
d62294f5 17009 RTVEC_ELT (p, j++)
c4ad648e 17010 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 17011 RTVEC_ELT (p, j++)
c4ad648e 17012 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 17013 RTVEC_ELT (p, j++)
c4ad648e 17014 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 17015 RTVEC_ELT (p, j++)
c4ad648e 17016 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 17017 RTVEC_ELT (p, j++)
c4ad648e 17018 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
17019 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
17020
17021 return;
17022 }
17023
45b194f8
AM
17024 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
17025 if (info->push_p)
2b2c2fe5 17026 sp_offset = info->total_size;
f676971a 17027
e6477eaa
AM
17028 /* Restore AltiVec registers if we must do so before adjusting the
17029 stack. */
17030 if (TARGET_ALTIVEC_ABI
17031 && info->altivec_size != 0
1c9c5e43
AM
17032 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17033 || (DEFAULT_ABI != ABI_V4
17034 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))))
9aa86737
AH
17035 {
17036 int i;
17037
e6477eaa
AM
17038 if (use_backchain_to_restore_sp)
17039 {
17040 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17041 emit_move_insn (frame_reg_rtx,
17042 gen_rtx_MEM (Pmode, sp_reg_rtx));
17043 sp_offset = 0;
17044 }
1c9c5e43
AM
17045 else if (frame_pointer_needed)
17046 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 17047
9aa86737
AH
17048 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17049 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17050 {
17051 rtx addr, areg, mem;
17052
17053 areg = gen_rtx_REG (Pmode, 0);
17054 emit_move_insn
17055 (areg, GEN_INT (info->altivec_save_offset
17056 + sp_offset
17057 + 16 * (i - info->first_altivec_reg_save)));
17058
17059 /* AltiVec addressing mode is [reg+reg]. */
17060 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 17061 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
17062
17063 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17064 }
17065 }
17066
e6477eaa
AM
17067 /* Restore VRSAVE if we must do so before adjusting the stack. */
17068 if (TARGET_ALTIVEC
17069 && TARGET_ALTIVEC_VRSAVE
17070 && info->vrsave_mask != 0
1c9c5e43
AM
17071 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17072 || (DEFAULT_ABI != ABI_V4
17073 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))))
e6477eaa
AM
17074 {
17075 rtx addr, mem, reg;
17076
1c9c5e43 17077 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 17078 {
1c9c5e43
AM
17079 if (use_backchain_to_restore_sp)
17080 {
17081 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17082 emit_move_insn (frame_reg_rtx,
17083 gen_rtx_MEM (Pmode, sp_reg_rtx));
17084 sp_offset = 0;
17085 }
17086 else if (frame_pointer_needed)
17087 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
17088 }
17089
17090 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17091 GEN_INT (info->vrsave_save_offset + sp_offset));
17092 mem = gen_frame_mem (SImode, addr);
17093 reg = gen_rtx_REG (SImode, 12);
17094 emit_move_insn (reg, mem);
17095
17096 emit_insn (generate_set_vrsave (reg, info, 1));
17097 }
17098
1c9c5e43
AM
17099 /* If we have a large stack frame, restore the old stack pointer
17100 using the backchain. */
2b2c2fe5
EC
17101 if (use_backchain_to_restore_sp)
17102 {
1c9c5e43 17103 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
17104 {
17105 /* Under V.4, don't reset the stack pointer until after we're done
17106 loading the saved registers. */
17107 if (DEFAULT_ABI == ABI_V4)
17108 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17109
17110 emit_move_insn (frame_reg_rtx,
17111 gen_rtx_MEM (Pmode, sp_reg_rtx));
17112 sp_offset = 0;
17113 }
1c9c5e43
AM
17114 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17115 && DEFAULT_ABI == ABI_V4)
17116 /* frame_reg_rtx has been set up by the altivec restore. */
17117 ;
17118 else
17119 {
17120 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
17121 frame_reg_rtx = sp_reg_rtx;
17122 }
17123 }
17124 /* If we have a frame pointer, we can restore the old stack pointer
17125 from it. */
17126 else if (frame_pointer_needed)
17127 {
17128 frame_reg_rtx = sp_reg_rtx;
17129 if (DEFAULT_ABI == ABI_V4)
17130 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17131
17132 emit_insn (TARGET_32BIT
17133 ? gen_addsi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17134 GEN_INT (info->total_size))
17135 : gen_adddi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17136 GEN_INT (info->total_size)));
17137 sp_offset = 0;
2b2c2fe5 17138 }
45b194f8
AM
17139 else if (info->push_p
17140 && DEFAULT_ABI != ABI_V4
e3b5732b 17141 && !crtl->calls_eh_return)
2b2c2fe5 17142 {
45b194f8
AM
17143 emit_insn (TARGET_32BIT
17144 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
17145 GEN_INT (info->total_size))
17146 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
17147 GEN_INT (info->total_size)));
17148 sp_offset = 0;
2b2c2fe5
EC
17149 }
17150
e6477eaa 17151 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
17152 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17153 && TARGET_ALTIVEC_ABI
e6477eaa
AM
17154 && info->altivec_size != 0
17155 && (DEFAULT_ABI == ABI_V4
17156 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
17157 {
17158 int i;
17159
17160 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17161 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17162 {
17163 rtx addr, areg, mem;
17164
17165 areg = gen_rtx_REG (Pmode, 0);
17166 emit_move_insn
17167 (areg, GEN_INT (info->altivec_save_offset
17168 + sp_offset
17169 + 16 * (i - info->first_altivec_reg_save)));
17170
17171 /* AltiVec addressing mode is [reg+reg]. */
17172 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17173 mem = gen_frame_mem (V4SImode, addr);
17174
17175 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17176 }
17177 }
17178
17179 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
17180 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17181 && TARGET_ALTIVEC
e6477eaa
AM
17182 && TARGET_ALTIVEC_VRSAVE
17183 && info->vrsave_mask != 0
17184 && (DEFAULT_ABI == ABI_V4
17185 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
554c2941
AM
17186 {
17187 rtx addr, mem, reg;
17188
17189 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17190 GEN_INT (info->vrsave_save_offset + sp_offset));
17191 mem = gen_frame_mem (SImode, addr);
17192 reg = gen_rtx_REG (SImode, 12);
17193 emit_move_insn (reg, mem);
17194
17195 emit_insn (generate_set_vrsave (reg, info, 1));
17196 }
17197
f78c3290
NF
17198 /* Get the old lr if we saved it. If we are restoring registers
17199 out-of-line, then the out-of-line routines can do this for us. */
17200 if (restore_lr)
b6c9286a 17201 {
a3170dc6
AH
17202 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17203 info->lr_save_offset + sp_offset);
ba4828e0 17204
9ebbca7d 17205 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 17206 }
f676971a 17207
9ebbca7d
GK
17208 /* Get the old cr if we saved it. */
17209 if (info->cr_save_p)
17210 {
17211 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17212 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 17213 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 17214
9ebbca7d
GK
17215 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17216 }
f676971a 17217
9ebbca7d 17218 /* Set LR here to try to overlap restores below. */
f78c3290 17219 if (restore_lr)
1de43f85 17220 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 17221 gen_rtx_REG (Pmode, 0));
f676971a 17222
83720594 17223 /* Load exception handler data registers, if needed. */
e3b5732b 17224 if (crtl->calls_eh_return)
83720594 17225 {
78e1b90d
DE
17226 unsigned int i, regno;
17227
fc4767bb
JJ
17228 if (TARGET_AIX)
17229 {
17230 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17231 GEN_INT (sp_offset + 5 * reg_size));
0be76840 17232 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
17233
17234 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17235 }
17236
83720594
RH
17237 for (i = 0; ; ++i)
17238 {
a3170dc6 17239 rtx mem;
83720594
RH
17240
17241 regno = EH_RETURN_DATA_REGNO (i);
17242 if (regno == INVALID_REGNUM)
17243 break;
17244
a3170dc6
AH
17245 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17246 info->ehrd_offset + sp_offset
17247 + reg_size * (int) i);
83720594
RH
17248
17249 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17250 }
17251 }
f676971a 17252
9ebbca7d
GK
17253 /* Restore GPRs. This is done as a PARALLEL if we are using
17254 the load-multiple instructions. */
f78c3290
NF
17255 if (TARGET_SPE_ABI
17256 && info->spe_64bit_regs_used != 0
17257 && info->first_gp_reg_save != 32)
52ff33d0 17258 {
52ff33d0
NF
17259 /* Determine whether we can address all of the registers that need
17260 to be saved with an offset from the stack pointer that fits in
17261 the small const field for SPE memory instructions. */
17262 int spe_regs_addressable_via_sp
f78c3290
NF
17263 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17264 + (32 - info->first_gp_reg_save - 1) * reg_size)
17265 && restoring_GPRs_inline);
52ff33d0
NF
17266 int spe_offset;
17267
17268 if (spe_regs_addressable_via_sp)
45b194f8 17269 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17270 else
17271 {
45b194f8 17272 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17273 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17274 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17275 There's no need to worry here because the static chain is passed
17276 anew to every function. */
f78c3290
NF
17277 int ool_adjust = (restoring_GPRs_inline
17278 ? 0
17279 : (info->first_gp_reg_save
17280 - (FIRST_SAVRES_REGISTER+1))*8);
17281
45b194f8
AM
17282 if (frame_reg_rtx == sp_reg_rtx)
17283 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17284 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17285 GEN_INT (info->spe_gp_save_offset
17286 + sp_offset
17287 - ool_adjust)));
45b194f8
AM
17288 /* Keep the invariant that frame_reg_rtx + sp_offset points
17289 at the top of the stack frame. */
17290 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17291
17292 spe_offset = 0;
17293 }
17294
f78c3290
NF
17295 if (restoring_GPRs_inline)
17296 {
17297 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17298 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17299 {
17300 rtx offset, addr, mem;
52ff33d0 17301
f78c3290
NF
17302 /* We're doing all this to ensure that the immediate offset
17303 fits into the immediate field of 'evldd'. */
17304 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17305
f78c3290
NF
17306 offset = GEN_INT (spe_offset + reg_size * i);
17307 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17308 mem = gen_rtx_MEM (V2SImode, addr);
52ff33d0 17309
f78c3290
NF
17310 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17311 mem);
17312 }
17313 }
17314 else
17315 {
17316 rtx par;
17317
17318 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17319 0, reg_mode,
17320 /*savep=*/false, /*gpr=*/true,
17321 /*exitp=*/true);
17322 emit_jump_insn (par);
17323
17324 /* We don't want anybody else emitting things after we jumped
17325 back. */
17326 return;
17327 }
52ff33d0 17328 }
f78c3290
NF
17329 else if (!restoring_GPRs_inline)
17330 {
17331 /* We are jumping to an out-of-line function. */
17332 bool can_use_exit = info->first_fp_reg_save == 64;
17333 rtx par;
17334
17335 /* Emit stack reset code if we need it. */
17336 if (can_use_exit)
17337 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17338 sp_offset, can_use_exit);
17339 else
17340 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17341 sp_reg_rtx,
17342 GEN_INT (sp_offset - info->fp_size)));
17343
17344 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17345 info->gp_save_offset, reg_mode,
17346 /*savep=*/false, /*gpr=*/true,
17347 /*exitp=*/can_use_exit);
17348
17349 if (can_use_exit)
17350 {
17351 if (info->cr_save_p)
17352 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17353 using_mtcr_multiple);
17354
17355 emit_jump_insn (par);
17356
17357 /* We don't want anybody else emitting things after we jumped
17358 back. */
17359 return;
17360 }
17361 else
17362 emit_insn (par);
17363 }
17364 else if (using_load_multiple)
17365 {
17366 rtvec p;
17367 p = rtvec_alloc (32 - info->first_gp_reg_save);
17368 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17369 {
f676971a
EC
17370 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17371 GEN_INT (info->gp_save_offset
17372 + sp_offset
9ebbca7d 17373 + reg_size * i));
0be76840 17374 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 17375
f78c3290
NF
17376 RTVEC_ELT (p, i) =
17377 gen_rtx_SET (VOIDmode,
17378 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17379 mem);
9ebbca7d 17380 }
f78c3290
NF
17381 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17382 }
17383 else
17384 {
17385 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17386 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17387 {
17388 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17389 GEN_INT (info->gp_save_offset
17390 + sp_offset
17391 + reg_size * i));
17392 rtx mem = gen_frame_mem (reg_mode, addr);
17393
17394 emit_move_insn (gen_rtx_REG (reg_mode,
17395 info->first_gp_reg_save + i), mem);
17396 }
17397 }
9878760c 17398
9ebbca7d
GK
17399 /* Restore fpr's if we need to do it without calling a function. */
17400 if (restoring_FPRs_inline)
17401 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17402 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17403 && ! call_used_regs[info->first_fp_reg_save+i]))
17404 {
17405 rtx addr, mem;
17406 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17407 GEN_INT (info->fp_save_offset
17408 + sp_offset
a4f6c312 17409 + 8 * i));
696e45ba
ME
17410 mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17411 ? DFmode : SFmode), addr);
9ebbca7d 17412
696e45ba
ME
17413 emit_move_insn (gen_rtx_REG (((TARGET_HARD_FLOAT
17414 && TARGET_DOUBLE_FLOAT)
17415 ? DFmode : SFmode),
9ebbca7d
GK
17416 info->first_fp_reg_save + i),
17417 mem);
17418 }
8d30c4ee 17419
9ebbca7d
GK
17420 /* If we saved cr, restore it here. Just those that were used. */
17421 if (info->cr_save_p)
f78c3290 17422 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
979721f8 17423
9ebbca7d 17424 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17425 have been done. */
f78c3290
NF
17426 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17427 sp_offset, !restoring_FPRs_inline);
b6c9286a 17428
e3b5732b 17429 if (crtl->calls_eh_return)
83720594
RH
17430 {
17431 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 17432 emit_insn (TARGET_32BIT
83720594
RH
17433 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
17434 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
17435 }
17436
9ebbca7d
GK
17437 if (!sibcall)
17438 {
17439 rtvec p;
17440 if (! restoring_FPRs_inline)
f78c3290 17441 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17442 else
17443 p = rtvec_alloc (2);
b6c9286a 17444
e35b9579 17445 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17446 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17447 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17448 : gen_rtx_CLOBBER (VOIDmode,
17449 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17450
17451 /* If we have to restore more than two FP registers, branch to the
17452 restore function. It will return to our caller. */
17453 if (! restoring_FPRs_inline)
17454 {
17455 int i;
f78c3290
NF
17456 rtx sym;
17457
17458 sym = rs6000_savres_routine_sym (info,
17459 /*savep=*/false,
17460 /*gpr=*/false,
17461 /*exitp=*/true);
17462 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17463 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17464 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17465 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17466 {
17467 rtx addr, mem;
17468 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17469 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17470 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17471
f78c3290 17472 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17473 gen_rtx_SET (VOIDmode,
17474 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17475 mem);
b6c9286a
MM
17476 }
17477 }
f676971a 17478
9ebbca7d 17479 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17480 }
9878760c
RK
17481}
17482
17483/* Write function epilogue. */
17484
08c148a8 17485static void
f676971a 17486rs6000_output_function_epilogue (FILE *file,
a2369ed3 17487 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17488{
9ebbca7d 17489 if (! HAVE_epilogue)
9878760c 17490 {
9ebbca7d
GK
17491 rtx insn = get_last_insn ();
17492 /* If the last insn was a BARRIER, we don't have to write anything except
17493 the trace table. */
17494 if (GET_CODE (insn) == NOTE)
17495 insn = prev_nonnote_insn (insn);
17496 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17497 {
9ebbca7d
GK
17498 /* This is slightly ugly, but at least we don't have two
17499 copies of the epilogue-emitting code. */
17500 start_sequence ();
17501
17502 /* A NOTE_INSN_DELETED is supposed to be at the start
17503 and end of the "toplevel" insn chain. */
2e040219 17504 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17505 rs6000_emit_epilogue (FALSE);
2e040219 17506 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17507
a3c9585f 17508 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17509 {
17510 rtx insn;
17511 unsigned addr = 0;
17512 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17513 {
17514 INSN_ADDRESSES_NEW (insn, addr);
17515 addr += 4;
17516 }
17517 }
17518
9ebbca7d 17519 if (TARGET_DEBUG_STACK)
a4f6c312 17520 debug_rtx_list (get_insns (), 100);
c9d691e9 17521 final (get_insns (), file, FALSE);
9ebbca7d 17522 end_sequence ();
4697a36c 17523 }
9878760c 17524 }
b4ac57ab 17525
efdba735
SH
17526#if TARGET_MACHO
17527 macho_branch_islands ();
0e5da0be
GK
17528 /* Mach-O doesn't support labels at the end of objects, so if
17529 it looks like we might want one, insert a NOP. */
17530 {
17531 rtx insn = get_last_insn ();
17532 while (insn
17533 && NOTE_P (insn)
a38e7aa5 17534 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17535 insn = PREV_INSN (insn);
f676971a
EC
17536 if (insn
17537 && (LABEL_P (insn)
0e5da0be 17538 || (NOTE_P (insn)
a38e7aa5 17539 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17540 fputs ("\tnop\n", file);
17541 }
17542#endif
17543
9b30bae2 17544 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17545 on its format.
17546
17547 We don't output a traceback table if -finhibit-size-directive was
17548 used. The documentation for -finhibit-size-directive reads
17549 ``don't output a @code{.size} assembler directive, or anything
17550 else that would cause trouble if the function is split in the
17551 middle, and the two halves are placed at locations far apart in
17552 memory.'' The traceback table has this property, since it
17553 includes the offset from the start of the function to the
4d30c363
MM
17554 traceback table itself.
17555
17556 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17557 different traceback table. */
57ac7be9 17558 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
3c072c6b 17559 && rs6000_traceback != traceback_none && !cfun->is_thunk)
9b30bae2 17560 {
69c75916 17561 const char *fname = NULL;
3ac88239 17562 const char *language_string = lang_hooks.name;
6041bf2f 17563 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17564 int i;
57ac7be9 17565 int optional_tbtab;
8097c268 17566 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17567
17568 if (rs6000_traceback == traceback_full)
17569 optional_tbtab = 1;
17570 else if (rs6000_traceback == traceback_part)
17571 optional_tbtab = 0;
17572 else
17573 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17574
69c75916
AM
17575 if (optional_tbtab)
17576 {
17577 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17578 while (*fname == '.') /* V.4 encodes . in the name */
17579 fname++;
17580
17581 /* Need label immediately before tbtab, so we can compute
17582 its offset from the function start. */
17583 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17584 ASM_OUTPUT_LABEL (file, fname);
17585 }
314fc5a9
ILT
17586
17587 /* The .tbtab pseudo-op can only be used for the first eight
17588 expressions, since it can't handle the possibly variable
17589 length fields that follow. However, if you omit the optional
17590 fields, the assembler outputs zeros for all optional fields
17591 anyways, giving each variable length field is minimum length
17592 (as defined in sys/debug.h). Thus we can not use the .tbtab
17593 pseudo-op at all. */
17594
17595 /* An all-zero word flags the start of the tbtab, for debuggers
17596 that have to find it by searching forward from the entry
17597 point or from the current pc. */
19d2d16f 17598 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17599
17600 /* Tbtab format type. Use format type 0. */
19d2d16f 17601 fputs ("\t.byte 0,", file);
314fc5a9 17602
5fc921c1
DE
17603 /* Language type. Unfortunately, there does not seem to be any
17604 official way to discover the language being compiled, so we
17605 use language_string.
17606 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17607 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17608 a number, so for now use 9. */
5fc921c1 17609 if (! strcmp (language_string, "GNU C"))
314fc5a9 17610 i = 0;
6de9cd9a 17611 else if (! strcmp (language_string, "GNU F77")
7f62878c 17612 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17613 i = 1;
8b83775b 17614 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17615 i = 2;
5fc921c1
DE
17616 else if (! strcmp (language_string, "GNU Ada"))
17617 i = 3;
56438901
AM
17618 else if (! strcmp (language_string, "GNU C++")
17619 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17620 i = 9;
9517ead8
AG
17621 else if (! strcmp (language_string, "GNU Java"))
17622 i = 13;
5fc921c1
DE
17623 else if (! strcmp (language_string, "GNU Objective-C"))
17624 i = 14;
314fc5a9 17625 else
37409796 17626 gcc_unreachable ();
314fc5a9
ILT
17627 fprintf (file, "%d,", i);
17628
17629 /* 8 single bit fields: global linkage (not set for C extern linkage,
17630 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17631 from start of procedure stored in tbtab, internal function, function
17632 has controlled storage, function has no toc, function uses fp,
17633 function logs/aborts fp operations. */
17634 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17635 fprintf (file, "%d,",
17636 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17637
17638 /* 6 bitfields: function is interrupt handler, name present in
17639 proc table, function calls alloca, on condition directives
17640 (controls stack walks, 3 bits), saves condition reg, saves
17641 link reg. */
17642 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17643 set up as a frame pointer, even when there is no alloca call. */
17644 fprintf (file, "%d,",
6041bf2f
DE
17645 ((optional_tbtab << 6)
17646 | ((optional_tbtab & frame_pointer_needed) << 5)
17647 | (info->cr_save_p << 1)
17648 | (info->lr_save_p)));
314fc5a9 17649
6041bf2f 17650 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17651 (6 bits). */
17652 fprintf (file, "%d,",
4697a36c 17653 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17654
17655 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17656 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17657
6041bf2f
DE
17658 if (optional_tbtab)
17659 {
17660 /* Compute the parameter info from the function decl argument
17661 list. */
17662 tree decl;
17663 int next_parm_info_bit = 31;
314fc5a9 17664
6041bf2f
DE
17665 for (decl = DECL_ARGUMENTS (current_function_decl);
17666 decl; decl = TREE_CHAIN (decl))
17667 {
17668 rtx parameter = DECL_INCOMING_RTL (decl);
17669 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17670
6041bf2f
DE
17671 if (GET_CODE (parameter) == REG)
17672 {
ebb109ad 17673 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17674 {
17675 int bits;
17676
17677 float_parms++;
17678
37409796
NS
17679 switch (mode)
17680 {
17681 case SFmode:
e41b2a33 17682 case SDmode:
37409796
NS
17683 bits = 0x2;
17684 break;
17685
17686 case DFmode:
7393f7f8 17687 case DDmode:
37409796 17688 case TFmode:
7393f7f8 17689 case TDmode:
37409796
NS
17690 bits = 0x3;
17691 break;
17692
17693 default:
17694 gcc_unreachable ();
17695 }
6041bf2f
DE
17696
17697 /* If only one bit will fit, don't or in this entry. */
17698 if (next_parm_info_bit > 0)
17699 parm_info |= (bits << (next_parm_info_bit - 1));
17700 next_parm_info_bit -= 2;
17701 }
17702 else
17703 {
17704 fixed_parms += ((GET_MODE_SIZE (mode)
17705 + (UNITS_PER_WORD - 1))
17706 / UNITS_PER_WORD);
17707 next_parm_info_bit -= 1;
17708 }
17709 }
17710 }
17711 }
314fc5a9
ILT
17712
17713 /* Number of fixed point parameters. */
17714 /* This is actually the number of words of fixed point parameters; thus
17715 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17716 fprintf (file, "%d,", fixed_parms);
17717
17718 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17719 all on stack. */
17720 /* This is actually the number of fp registers that hold parameters;
17721 and thus the maximum value is 13. */
17722 /* Set parameters on stack bit if parameters are not in their original
17723 registers, regardless of whether they are on the stack? Xlc
17724 seems to set the bit when not optimizing. */
17725 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17726
6041bf2f
DE
17727 if (! optional_tbtab)
17728 return;
17729
314fc5a9
ILT
17730 /* Optional fields follow. Some are variable length. */
17731
17732 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17733 11 double float. */
17734 /* There is an entry for each parameter in a register, in the order that
17735 they occur in the parameter list. Any intervening arguments on the
17736 stack are ignored. If the list overflows a long (max possible length
17737 34 bits) then completely leave off all elements that don't fit. */
17738 /* Only emit this long if there was at least one parameter. */
17739 if (fixed_parms || float_parms)
17740 fprintf (file, "\t.long %d\n", parm_info);
17741
17742 /* Offset from start of code to tb table. */
19d2d16f 17743 fputs ("\t.long ", file);
314fc5a9 17744 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17745 if (TARGET_AIX)
17746 RS6000_OUTPUT_BASENAME (file, fname);
17747 else
17748 assemble_name (file, fname);
17749 putc ('-', file);
17750 rs6000_output_function_entry (file, fname);
19d2d16f 17751 putc ('\n', file);
314fc5a9
ILT
17752
17753 /* Interrupt handler mask. */
17754 /* Omit this long, since we never set the interrupt handler bit
17755 above. */
17756
17757 /* Number of CTL (controlled storage) anchors. */
17758 /* Omit this long, since the has_ctl bit is never set above. */
17759
17760 /* Displacement into stack of each CTL anchor. */
17761 /* Omit this list of longs, because there are no CTL anchors. */
17762
17763 /* Length of function name. */
69c75916
AM
17764 if (*fname == '*')
17765 ++fname;
296b8152 17766 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17767
17768 /* Function name. */
17769 assemble_string (fname, strlen (fname));
17770
17771 /* Register for alloca automatic storage; this is always reg 31.
17772 Only emit this if the alloca bit was set above. */
17773 if (frame_pointer_needed)
19d2d16f 17774 fputs ("\t.byte 31\n", file);
b1765bde
DE
17775
17776 fputs ("\t.align 2\n", file);
9b30bae2 17777 }
9878760c 17778}
17167fd8 17779\f
a4f6c312
SS
17780/* A C compound statement that outputs the assembler code for a thunk
17781 function, used to implement C++ virtual function calls with
17782 multiple inheritance. The thunk acts as a wrapper around a virtual
17783 function, adjusting the implicit object parameter before handing
17784 control off to the real function.
17785
17786 First, emit code to add the integer DELTA to the location that
17787 contains the incoming first argument. Assume that this argument
17788 contains a pointer, and is the one used to pass the `this' pointer
17789 in C++. This is the incoming argument *before* the function
17790 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17791 values of all other incoming arguments.
17167fd8
MM
17792
17793 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17794 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17795 not touch the return address. Hence returning from FUNCTION will
17796 return to whoever called the current `thunk'.
17167fd8 17797
a4f6c312
SS
17798 The effect must be as if FUNCTION had been called directly with the
17799 adjusted first argument. This macro is responsible for emitting
17800 all of the code for a thunk function; output_function_prologue()
17801 and output_function_epilogue() are not invoked.
17167fd8 17802
a4f6c312
SS
17803 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17804 been extracted from it.) It might possibly be useful on some
17805 targets, but probably not.
17167fd8 17806
a4f6c312
SS
17807 If you do not define this macro, the target-independent code in the
17808 C++ frontend will generate a less efficient heavyweight thunk that
17809 calls FUNCTION instead of jumping to it. The generic approach does
17810 not support varargs. */
17167fd8 17811
3961e8fe 17812static void
f676971a
EC
17813rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17814 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17815 tree function)
17167fd8 17816{
0a2aaacc 17817 rtx this_rtx, insn, funexp;
17167fd8 17818
5b71a4e7 17819 reload_completed = 1;
fe3ad572 17820 epilogue_completed = 1;
56a7189a 17821
5b71a4e7 17822 /* Mark the end of the (empty) prologue. */
2e040219 17823 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17824
5b71a4e7
DE
17825 /* Find the "this" pointer. If the function returns a structure,
17826 the structure return pointer is in r3. */
61f71b34 17827 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 17828 this_rtx = gen_rtx_REG (Pmode, 4);
56a7189a 17829 else
0a2aaacc 17830 this_rtx = gen_rtx_REG (Pmode, 3);
17167fd8 17831
5b71a4e7
DE
17832 /* Apply the constant offset, if required. */
17833 if (delta)
17834 {
17835 rtx delta_rtx = GEN_INT (delta);
17836 emit_insn (TARGET_32BIT
0a2aaacc
KG
17837 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
17838 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17167fd8
MM
17839 }
17840
5b71a4e7
DE
17841 /* Apply the offset from the vtable, if required. */
17842 if (vcall_offset)
17167fd8 17843 {
5b71a4e7
DE
17844 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17845 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17846
0a2aaacc 17847 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
eeff9307
JJ
17848 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17849 {
17850 emit_insn (TARGET_32BIT
17851 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17852 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17853 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17854 }
17855 else
17856 {
17857 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17858
17859 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17860 }
5b71a4e7 17861 emit_insn (TARGET_32BIT
0a2aaacc
KG
17862 ? gen_addsi3 (this_rtx, this_rtx, tmp)
17863 : gen_adddi3 (this_rtx, this_rtx, tmp));
17167fd8
MM
17864 }
17865
5b71a4e7
DE
17866 /* Generate a tail call to the target function. */
17867 if (!TREE_USED (function))
17868 {
17869 assemble_external (function);
17870 TREE_USED (function) = 1;
17871 }
17872 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17873 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17874
17875#if TARGET_MACHO
ab82a49f 17876 if (MACHOPIC_INDIRECT)
5b71a4e7 17877 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17878#endif
5b71a4e7
DE
17879
17880 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17881 generate sibcall RTL explicitly. */
5b71a4e7
DE
17882 insn = emit_call_insn (
17883 gen_rtx_PARALLEL (VOIDmode,
17884 gen_rtvec (4,
17885 gen_rtx_CALL (VOIDmode,
17886 funexp, const0_rtx),
17887 gen_rtx_USE (VOIDmode, const0_rtx),
17888 gen_rtx_USE (VOIDmode,
17889 gen_rtx_REG (SImode,
1de43f85 17890 LR_REGNO)),
5b71a4e7
DE
17891 gen_rtx_RETURN (VOIDmode))));
17892 SIBLING_CALL_P (insn) = 1;
17893 emit_barrier ();
17894
17895 /* Run just enough of rest_of_compilation to get the insns emitted.
17896 There's not really enough bulk here to make other passes such as
17897 instruction scheduling worth while. Note that use_thunk calls
17898 assemble_start_function and assemble_end_function. */
17899 insn = get_insns ();
55e092c4 17900 insn_locators_alloc ();
5b71a4e7
DE
17901 shorten_branches (insn);
17902 final_start_function (insn, file, 1);
c9d691e9 17903 final (insn, file, 1);
5b71a4e7 17904 final_end_function ();
d7087dd2 17905 free_after_compilation (cfun);
5b71a4e7
DE
17906
17907 reload_completed = 0;
fe3ad572 17908 epilogue_completed = 0;
9ebbca7d 17909}
9ebbca7d
GK
17910\f
17911/* A quick summary of the various types of 'constant-pool tables'
17912 under PowerPC:
17913
f676971a 17914 Target Flags Name One table per
9ebbca7d
GK
17915 AIX (none) AIX TOC object file
17916 AIX -mfull-toc AIX TOC object file
17917 AIX -mminimal-toc AIX minimal TOC translation unit
17918 SVR4/EABI (none) SVR4 SDATA object file
17919 SVR4/EABI -fpic SVR4 pic object file
17920 SVR4/EABI -fPIC SVR4 PIC translation unit
17921 SVR4/EABI -mrelocatable EABI TOC function
17922 SVR4/EABI -maix AIX TOC object file
f676971a 17923 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17924 AIX minimal TOC translation unit
17925
17926 Name Reg. Set by entries contains:
17927 made by addrs? fp? sum?
17928
17929 AIX TOC 2 crt0 as Y option option
17930 AIX minimal TOC 30 prolog gcc Y Y option
17931 SVR4 SDATA 13 crt0 gcc N Y N
17932 SVR4 pic 30 prolog ld Y not yet N
17933 SVR4 PIC 30 prolog gcc Y option option
17934 EABI TOC 30 prolog gcc Y option option
17935
17936*/
17937
9ebbca7d
GK
17938/* Hash functions for the hash table. */
17939
17940static unsigned
a2369ed3 17941rs6000_hash_constant (rtx k)
9ebbca7d 17942{
46b33600
RH
17943 enum rtx_code code = GET_CODE (k);
17944 enum machine_mode mode = GET_MODE (k);
17945 unsigned result = (code << 3) ^ mode;
17946 const char *format;
17947 int flen, fidx;
f676971a 17948
46b33600
RH
17949 format = GET_RTX_FORMAT (code);
17950 flen = strlen (format);
17951 fidx = 0;
9ebbca7d 17952
46b33600
RH
17953 switch (code)
17954 {
17955 case LABEL_REF:
17956 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17957
17958 case CONST_DOUBLE:
17959 if (mode != VOIDmode)
17960 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17961 flen = 2;
17962 break;
17963
17964 case CODE_LABEL:
17965 fidx = 3;
17966 break;
17967
17968 default:
17969 break;
17970 }
9ebbca7d
GK
17971
17972 for (; fidx < flen; fidx++)
17973 switch (format[fidx])
17974 {
17975 case 's':
17976 {
17977 unsigned i, len;
17978 const char *str = XSTR (k, fidx);
17979 len = strlen (str);
17980 result = result * 613 + len;
17981 for (i = 0; i < len; i++)
17982 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17983 break;
17984 }
9ebbca7d
GK
17985 case 'u':
17986 case 'e':
17987 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17988 break;
17989 case 'i':
17990 case 'n':
17991 result = result * 613 + (unsigned) XINT (k, fidx);
17992 break;
17993 case 'w':
17994 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17995 result = result * 613 + (unsigned) XWINT (k, fidx);
17996 else
17997 {
17998 size_t i;
9390387d 17999 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
18000 result = result * 613 + (unsigned) (XWINT (k, fidx)
18001 >> CHAR_BIT * i);
18002 }
18003 break;
09501938
DE
18004 case '0':
18005 break;
9ebbca7d 18006 default:
37409796 18007 gcc_unreachable ();
9ebbca7d 18008 }
46b33600 18009
9ebbca7d
GK
18010 return result;
18011}
18012
18013static unsigned
a2369ed3 18014toc_hash_function (const void *hash_entry)
9ebbca7d 18015{
f676971a 18016 const struct toc_hash_struct *thc =
a9098fd0
GK
18017 (const struct toc_hash_struct *) hash_entry;
18018 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
18019}
18020
18021/* Compare H1 and H2 for equivalence. */
18022
18023static int
a2369ed3 18024toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
18025{
18026 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
18027 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
18028
a9098fd0
GK
18029 if (((const struct toc_hash_struct *) h1)->key_mode
18030 != ((const struct toc_hash_struct *) h2)->key_mode)
18031 return 0;
18032
5692c7bc 18033 return rtx_equal_p (r1, r2);
9ebbca7d
GK
18034}
18035
28e510bd
MM
18036/* These are the names given by the C++ front-end to vtables, and
18037 vtable-like objects. Ideally, this logic should not be here;
18038 instead, there should be some programmatic way of inquiring as
18039 to whether or not an object is a vtable. */
18040
18041#define VTABLE_NAME_P(NAME) \
9390387d 18042 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
18043 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
18044 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 18045 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 18046 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd 18047
ee06c6a5
DE
18048#ifdef NO_DOLLAR_IN_LABEL
18049/* Return a GGC-allocated character string translating dollar signs in
18050 input NAME to underscores. Used by XCOFF ASM_OUTPUT_LABELREF. */
18051
18052const char *
18053rs6000_xcoff_strip_dollar (const char *name)
18054{
18055 char *strip, *p;
18056 int len;
18057
18058 p = strchr (name, '$');
18059
18060 if (p == 0 || p == name)
18061 return name;
18062
18063 len = strlen (name);
18064 strip = (char *) alloca (len + 1);
18065 strcpy (strip, name);
18066 p = strchr (strip, '$');
18067 while (p)
18068 {
18069 *p = '_';
18070 p = strchr (p + 1, '$');
18071 }
18072
18073 return ggc_alloc_string (strip, len);
18074}
18075#endif
18076
28e510bd 18077void
a2369ed3 18078rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
18079{
18080 /* Currently C++ toc references to vtables can be emitted before it
18081 is decided whether the vtable is public or private. If this is
18082 the case, then the linker will eventually complain that there is
f676971a 18083 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
18084 we emit the TOC reference to reference the symbol and not the
18085 section. */
18086 const char *name = XSTR (x, 0);
54ee9799 18087
f676971a 18088 if (VTABLE_NAME_P (name))
54ee9799
DE
18089 {
18090 RS6000_OUTPUT_BASENAME (file, name);
18091 }
18092 else
18093 assemble_name (file, name);
28e510bd
MM
18094}
18095
a4f6c312
SS
18096/* Output a TOC entry. We derive the entry name from what is being
18097 written. */
9878760c
RK
18098
18099void
a2369ed3 18100output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
18101{
18102 char buf[256];
3cce094d 18103 const char *name = buf;
9878760c 18104 rtx base = x;
16fdeb48 18105 HOST_WIDE_INT offset = 0;
9878760c 18106
37409796 18107 gcc_assert (!TARGET_NO_TOC);
4697a36c 18108
9ebbca7d
GK
18109 /* When the linker won't eliminate them, don't output duplicate
18110 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
18111 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
18112 CODE_LABELs. */
18113 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
18114 {
18115 struct toc_hash_struct *h;
18116 void * * found;
f676971a 18117
17211ab5 18118 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 18119 time because GGC is not initialized at that point. */
17211ab5 18120 if (toc_hash_table == NULL)
f676971a 18121 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
18122 toc_hash_eq, NULL);
18123
5ead67f6 18124 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 18125 h->key = x;
a9098fd0 18126 h->key_mode = mode;
9ebbca7d 18127 h->labelno = labelno;
f676971a 18128
bbbbb16a 18129 found = htab_find_slot (toc_hash_table, h, INSERT);
9ebbca7d
GK
18130 if (*found == NULL)
18131 *found = h;
f676971a 18132 else /* This is indeed a duplicate.
9ebbca7d
GK
18133 Set this label equal to that label. */
18134 {
18135 fputs ("\t.set ", file);
18136 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18137 fprintf (file, "%d,", labelno);
18138 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 18139 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
18140 found)->labelno));
18141 return;
18142 }
18143 }
18144
18145 /* If we're going to put a double constant in the TOC, make sure it's
18146 aligned properly when strict alignment is on. */
ff1720ed
RK
18147 if (GET_CODE (x) == CONST_DOUBLE
18148 && STRICT_ALIGNMENT
a9098fd0 18149 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
18150 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
18151 ASM_OUTPUT_ALIGN (file, 3);
18152 }
18153
4977bab6 18154 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 18155
37c37a57
RK
18156 /* Handle FP constants specially. Note that if we have a minimal
18157 TOC, things we put here aren't actually in the TOC, so we can allow
18158 FP constants. */
00b79d54
BE
18159 if (GET_CODE (x) == CONST_DOUBLE &&
18160 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
18161 {
18162 REAL_VALUE_TYPE rv;
18163 long k[4];
18164
18165 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18166 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18167 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
18168 else
18169 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
18170
18171 if (TARGET_64BIT)
18172 {
18173 if (TARGET_MINIMAL_TOC)
18174 fputs (DOUBLE_INT_ASM_OP, file);
18175 else
18176 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18177 k[0] & 0xffffffff, k[1] & 0xffffffff,
18178 k[2] & 0xffffffff, k[3] & 0xffffffff);
18179 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
18180 k[0] & 0xffffffff, k[1] & 0xffffffff,
18181 k[2] & 0xffffffff, k[3] & 0xffffffff);
18182 return;
18183 }
18184 else
18185 {
18186 if (TARGET_MINIMAL_TOC)
18187 fputs ("\t.long ", file);
18188 else
18189 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18190 k[0] & 0xffffffff, k[1] & 0xffffffff,
18191 k[2] & 0xffffffff, k[3] & 0xffffffff);
18192 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18193 k[0] & 0xffffffff, k[1] & 0xffffffff,
18194 k[2] & 0xffffffff, k[3] & 0xffffffff);
18195 return;
18196 }
18197 }
00b79d54
BE
18198 else if (GET_CODE (x) == CONST_DOUBLE &&
18199 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 18200 {
042259f2
DE
18201 REAL_VALUE_TYPE rv;
18202 long k[2];
0adc764e 18203
042259f2 18204 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18205
18206 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18207 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18208 else
18209 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 18210
13ded975
DE
18211 if (TARGET_64BIT)
18212 {
18213 if (TARGET_MINIMAL_TOC)
2bfcf297 18214 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18215 else
2f0552b6
AM
18216 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18217 k[0] & 0xffffffff, k[1] & 0xffffffff);
18218 fprintf (file, "0x%lx%08lx\n",
18219 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18220 return;
18221 }
1875cc88 18222 else
13ded975
DE
18223 {
18224 if (TARGET_MINIMAL_TOC)
2bfcf297 18225 fputs ("\t.long ", file);
13ded975 18226 else
2f0552b6
AM
18227 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18228 k[0] & 0xffffffff, k[1] & 0xffffffff);
18229 fprintf (file, "0x%lx,0x%lx\n",
18230 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18231 return;
18232 }
9878760c 18233 }
00b79d54
BE
18234 else if (GET_CODE (x) == CONST_DOUBLE &&
18235 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 18236 {
042259f2
DE
18237 REAL_VALUE_TYPE rv;
18238 long l;
9878760c 18239
042259f2 18240 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18241 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18242 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18243 else
18244 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 18245
31bfaa0b
DE
18246 if (TARGET_64BIT)
18247 {
18248 if (TARGET_MINIMAL_TOC)
2bfcf297 18249 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 18250 else
2f0552b6
AM
18251 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18252 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
18253 return;
18254 }
042259f2 18255 else
31bfaa0b
DE
18256 {
18257 if (TARGET_MINIMAL_TOC)
2bfcf297 18258 fputs ("\t.long ", file);
31bfaa0b 18259 else
2f0552b6
AM
18260 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18261 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
18262 return;
18263 }
042259f2 18264 }
f176e826 18265 else if (GET_MODE (x) == VOIDmode
a9098fd0 18266 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 18267 {
e2c953b6 18268 unsigned HOST_WIDE_INT low;
042259f2
DE
18269 HOST_WIDE_INT high;
18270
18271 if (GET_CODE (x) == CONST_DOUBLE)
18272 {
18273 low = CONST_DOUBLE_LOW (x);
18274 high = CONST_DOUBLE_HIGH (x);
18275 }
18276 else
18277#if HOST_BITS_PER_WIDE_INT == 32
18278 {
18279 low = INTVAL (x);
0858c623 18280 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18281 }
18282#else
18283 {
c4ad648e
AM
18284 low = INTVAL (x) & 0xffffffff;
18285 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18286 }
18287#endif
9878760c 18288
a9098fd0
GK
18289 /* TOC entries are always Pmode-sized, but since this
18290 is a bigendian machine then if we're putting smaller
18291 integer constants in the TOC we have to pad them.
18292 (This is still a win over putting the constants in
18293 a separate constant pool, because then we'd have
02a4ec28
FS
18294 to have both a TOC entry _and_ the actual constant.)
18295
18296 For a 32-bit target, CONST_INT values are loaded and shifted
18297 entirely within `low' and can be stored in one TOC entry. */
18298
37409796
NS
18299 /* It would be easy to make this work, but it doesn't now. */
18300 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18301
18302 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18303 {
18304#if HOST_BITS_PER_WIDE_INT == 32
18305 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18306 POINTER_SIZE, &low, &high, 0);
18307#else
18308 low |= high << 32;
18309 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18310 high = (HOST_WIDE_INT) low >> 32;
18311 low &= 0xffffffff;
18312#endif
18313 }
a9098fd0 18314
13ded975
DE
18315 if (TARGET_64BIT)
18316 {
18317 if (TARGET_MINIMAL_TOC)
2bfcf297 18318 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18319 else
2f0552b6
AM
18320 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18321 (long) high & 0xffffffff, (long) low & 0xffffffff);
18322 fprintf (file, "0x%lx%08lx\n",
18323 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18324 return;
18325 }
1875cc88 18326 else
13ded975 18327 {
02a4ec28
FS
18328 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18329 {
18330 if (TARGET_MINIMAL_TOC)
2bfcf297 18331 fputs ("\t.long ", file);
02a4ec28 18332 else
2bfcf297 18333 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18334 (long) high & 0xffffffff, (long) low & 0xffffffff);
18335 fprintf (file, "0x%lx,0x%lx\n",
18336 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18337 }
13ded975 18338 else
02a4ec28
FS
18339 {
18340 if (TARGET_MINIMAL_TOC)
2bfcf297 18341 fputs ("\t.long ", file);
02a4ec28 18342 else
2f0552b6
AM
18343 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18344 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18345 }
13ded975
DE
18346 return;
18347 }
9878760c
RK
18348 }
18349
18350 if (GET_CODE (x) == CONST)
18351 {
37409796 18352 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18353
9878760c
RK
18354 base = XEXP (XEXP (x, 0), 0);
18355 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18356 }
f676971a 18357
37409796
NS
18358 switch (GET_CODE (base))
18359 {
18360 case SYMBOL_REF:
18361 name = XSTR (base, 0);
18362 break;
18363
18364 case LABEL_REF:
18365 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18366 CODE_LABEL_NUMBER (XEXP (base, 0)));
18367 break;
18368
18369 case CODE_LABEL:
18370 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18371 break;
18372
18373 default:
18374 gcc_unreachable ();
18375 }
9878760c 18376
1875cc88 18377 if (TARGET_MINIMAL_TOC)
2bfcf297 18378 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18379 else
18380 {
5773a50f
DE
18381 fputs ("\t.tc ", file);
18382 RS6000_OUTPUT_BASENAME (file, name);
9878760c 18383
1875cc88 18384 if (offset < 0)
16fdeb48 18385 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18386 else if (offset)
16fdeb48 18387 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18388
19d2d16f 18389 fputs ("[TC],", file);
1875cc88 18390 }
581bc4de
MM
18391
18392 /* Currently C++ toc references to vtables can be emitted before it
18393 is decided whether the vtable is public or private. If this is
18394 the case, then the linker will eventually complain that there is
18395 a TOC reference to an unknown section. Thus, for vtables only,
18396 we emit the TOC reference to reference the symbol and not the
18397 section. */
28e510bd 18398 if (VTABLE_NAME_P (name))
581bc4de 18399 {
54ee9799 18400 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18401 if (offset < 0)
16fdeb48 18402 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18403 else if (offset > 0)
16fdeb48 18404 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18405 }
18406 else
18407 output_addr_const (file, x);
19d2d16f 18408 putc ('\n', file);
9878760c
RK
18409}
18410\f
18411/* Output an assembler pseudo-op to write an ASCII string of N characters
18412 starting at P to FILE.
18413
18414 On the RS/6000, we have to do this using the .byte operation and
18415 write out special characters outside the quoted string.
18416 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18417 so we must artificially break them up early. */
9878760c
RK
18418
18419void
a2369ed3 18420output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18421{
18422 char c;
18423 int i, count_string;
d330fd93
KG
18424 const char *for_string = "\t.byte \"";
18425 const char *for_decimal = "\t.byte ";
18426 const char *to_close = NULL;
9878760c
RK
18427
18428 count_string = 0;
18429 for (i = 0; i < n; i++)
18430 {
18431 c = *p++;
18432 if (c >= ' ' && c < 0177)
18433 {
18434 if (for_string)
18435 fputs (for_string, file);
18436 putc (c, file);
18437
18438 /* Write two quotes to get one. */
18439 if (c == '"')
18440 {
18441 putc (c, file);
18442 ++count_string;
18443 }
18444
18445 for_string = NULL;
18446 for_decimal = "\"\n\t.byte ";
18447 to_close = "\"\n";
18448 ++count_string;
18449
18450 if (count_string >= 512)
18451 {
18452 fputs (to_close, file);
18453
18454 for_string = "\t.byte \"";
18455 for_decimal = "\t.byte ";
18456 to_close = NULL;
18457 count_string = 0;
18458 }
18459 }
18460 else
18461 {
18462 if (for_decimal)
18463 fputs (for_decimal, file);
18464 fprintf (file, "%d", c);
18465
18466 for_string = "\n\t.byte \"";
18467 for_decimal = ", ";
18468 to_close = "\n";
18469 count_string = 0;
18470 }
18471 }
18472
18473 /* Now close the string if we have written one. Then end the line. */
18474 if (to_close)
9ebbca7d 18475 fputs (to_close, file);
9878760c
RK
18476}
18477\f
18478/* Generate a unique section name for FILENAME for a section type
18479 represented by SECTION_DESC. Output goes into BUF.
18480
18481 SECTION_DESC can be any string, as long as it is different for each
18482 possible section type.
18483
18484 We name the section in the same manner as xlc. The name begins with an
18485 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18486 names) with the last period replaced by the string SECTION_DESC. If
18487 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18488 the name. */
9878760c
RK
18489
18490void
f676971a 18491rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18492 const char *section_desc)
9878760c 18493{
9ebbca7d 18494 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18495 char *p;
18496 int len;
9878760c
RK
18497
18498 after_last_slash = filename;
18499 for (q = filename; *q; q++)
11e5fe42
RK
18500 {
18501 if (*q == '/')
18502 after_last_slash = q + 1;
18503 else if (*q == '.')
18504 last_period = q;
18505 }
9878760c 18506
11e5fe42 18507 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18508 *buf = (char *) xmalloc (len);
9878760c
RK
18509
18510 p = *buf;
18511 *p++ = '_';
18512
18513 for (q = after_last_slash; *q; q++)
18514 {
11e5fe42 18515 if (q == last_period)
c4ad648e 18516 {
9878760c
RK
18517 strcpy (p, section_desc);
18518 p += strlen (section_desc);
e3981aab 18519 break;
c4ad648e 18520 }
9878760c 18521
e9a780ec 18522 else if (ISALNUM (*q))
c4ad648e 18523 *p++ = *q;
9878760c
RK
18524 }
18525
11e5fe42 18526 if (last_period == 0)
9878760c
RK
18527 strcpy (p, section_desc);
18528 else
18529 *p = '\0';
18530}
e165f3f0 18531\f
a4f6c312 18532/* Emit profile function. */
411707f4 18533
411707f4 18534void
a2369ed3 18535output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18536{
858081ad
AH
18537 /* Non-standard profiling for kernels, which just saves LR then calls
18538 _mcount without worrying about arg saves. The idea is to change
18539 the function prologue as little as possible as it isn't easy to
18540 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18541 if (TARGET_PROFILE_KERNEL)
18542 return;
18543
8480e480
CC
18544 if (DEFAULT_ABI == ABI_AIX)
18545 {
9739c90c
JJ
18546#ifndef NO_PROFILE_COUNTERS
18547# define NO_PROFILE_COUNTERS 0
18548#endif
f676971a 18549 if (NO_PROFILE_COUNTERS)
bbbbb16a
ILT
18550 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
18551 LCT_NORMAL, VOIDmode, 0);
9739c90c
JJ
18552 else
18553 {
18554 char buf[30];
18555 const char *label_name;
18556 rtx fun;
411707f4 18557
9739c90c
JJ
18558 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18559 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18560 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18561
bbbbb16a
ILT
18562 emit_library_call (init_one_libfunc (RS6000_MCOUNT),
18563 LCT_NORMAL, VOIDmode, 1, fun, Pmode);
9739c90c 18564 }
8480e480 18565 }
ee890fe2
SS
18566 else if (DEFAULT_ABI == ABI_DARWIN)
18567 {
d5fa86ba 18568 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18569 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18570
18571 /* Be conservative and always set this, at least for now. */
e3b5732b 18572 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18573
18574#if TARGET_MACHO
18575 /* For PIC code, set up a stub and collect the caller's address
18576 from r0, which is where the prologue puts it. */
11abc112 18577 if (MACHOPIC_INDIRECT
e3b5732b 18578 && crtl->uses_pic_offset_table)
11abc112 18579 caller_addr_regno = 0;
ee890fe2
SS
18580#endif
18581 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
bbbbb16a 18582 LCT_NORMAL, VOIDmode, 1,
ee890fe2
SS
18583 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18584 }
411707f4
CC
18585}
18586
a4f6c312 18587/* Write function profiler code. */
e165f3f0
RK
18588
18589void
a2369ed3 18590output_function_profiler (FILE *file, int labelno)
e165f3f0 18591{
3daf36a4 18592 char buf[100];
e165f3f0 18593
38c1f2d7 18594 switch (DEFAULT_ABI)
3daf36a4 18595 {
38c1f2d7 18596 default:
37409796 18597 gcc_unreachable ();
38c1f2d7
MM
18598
18599 case ABI_V4:
09eeeacb
AM
18600 if (!TARGET_32BIT)
18601 {
d4ee4d25 18602 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18603 return;
18604 }
ffcfcb5f 18605 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18606 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18607 if (NO_PROFILE_COUNTERS)
18608 {
18609 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18610 reg_names[0], reg_names[1]);
18611 }
18612 else if (TARGET_SECURE_PLT && flag_pic)
18613 {
18614 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18615 reg_names[0], reg_names[1]);
18616 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18617 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18618 reg_names[12], reg_names[12]);
18619 assemble_name (file, buf);
18620 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18621 assemble_name (file, buf);
18622 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18623 }
18624 else if (flag_pic == 1)
38c1f2d7 18625 {
dfdfa60f 18626 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18627 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18628 reg_names[0], reg_names[1]);
17167fd8 18629 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18630 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18631 assemble_name (file, buf);
17167fd8 18632 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18633 }
9ebbca7d 18634 else if (flag_pic > 1)
38c1f2d7 18635 {
71625f3d
AM
18636 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18637 reg_names[0], reg_names[1]);
9ebbca7d 18638 /* Now, we need to get the address of the label. */
71625f3d 18639 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18640 assemble_name (file, buf);
9ebbca7d
GK
18641 fputs ("-.\n1:", file);
18642 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18643 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18644 reg_names[0], reg_names[11]);
18645 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18646 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18647 }
38c1f2d7
MM
18648 else
18649 {
17167fd8 18650 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18651 assemble_name (file, buf);
dfdfa60f 18652 fputs ("@ha\n", file);
71625f3d
AM
18653 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18654 reg_names[0], reg_names[1]);
a260abc9 18655 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18656 assemble_name (file, buf);
17167fd8 18657 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18658 }
18659
50d440bc 18660 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18661 fprintf (file, "\tbl %s%s\n",
18662 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18663 break;
18664
18665 case ABI_AIX:
ee890fe2 18666 case ABI_DARWIN:
ffcfcb5f
AM
18667 if (!TARGET_PROFILE_KERNEL)
18668 {
a3c9585f 18669 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18670 }
18671 else
18672 {
37409796 18673 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18674
18675 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18676 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18677
6de9cd9a 18678 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18679 {
18680 asm_fprintf (file, "\tstd %s,24(%s)\n",
18681 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18682 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18683 asm_fprintf (file, "\tld %s,24(%s)\n",
18684 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18685 }
18686 else
18687 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18688 }
38c1f2d7
MM
18689 break;
18690 }
e165f3f0 18691}
a251ffd0 18692
b54cf83a 18693\f
44cd321e
PS
18694
18695/* The following variable value is the last issued insn. */
18696
18697static rtx last_scheduled_insn;
18698
18699/* The following variable helps to balance issuing of load and
18700 store instructions */
18701
18702static int load_store_pendulum;
18703
b54cf83a
DE
18704/* Power4 load update and store update instructions are cracked into a
18705 load or store and an integer insn which are executed in the same cycle.
18706 Branches have their own dispatch slot which does not count against the
18707 GCC issue rate, but it changes the program flow so there are no other
18708 instructions to issue in this cycle. */
18709
18710static int
f676971a
EC
18711rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18712 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18713 rtx insn, int more)
b54cf83a 18714{
44cd321e 18715 last_scheduled_insn = insn;
b54cf83a
DE
18716 if (GET_CODE (PATTERN (insn)) == USE
18717 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18718 {
18719 cached_can_issue_more = more;
18720 return cached_can_issue_more;
18721 }
18722
18723 if (insn_terminates_group_p (insn, current_group))
18724 {
18725 cached_can_issue_more = 0;
18726 return cached_can_issue_more;
18727 }
b54cf83a 18728
d296e02e
AP
18729 /* If no reservation, but reach here */
18730 if (recog_memoized (insn) < 0)
18731 return more;
18732
ec507f2d 18733 if (rs6000_sched_groups)
b54cf83a 18734 {
cbe26ab8 18735 if (is_microcoded_insn (insn))
44cd321e 18736 cached_can_issue_more = 0;
cbe26ab8 18737 else if (is_cracked_insn (insn))
44cd321e
PS
18738 cached_can_issue_more = more > 2 ? more - 2 : 0;
18739 else
18740 cached_can_issue_more = more - 1;
18741
18742 return cached_can_issue_more;
b54cf83a 18743 }
165b263e 18744
d296e02e
AP
18745 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18746 return 0;
18747
44cd321e
PS
18748 cached_can_issue_more = more - 1;
18749 return cached_can_issue_more;
b54cf83a
DE
18750}
18751
a251ffd0
TG
18752/* Adjust the cost of a scheduling dependency. Return the new cost of
18753 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18754
c237e94a 18755static int
0a4f0294 18756rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18757{
44cd321e 18758 enum attr_type attr_type;
a251ffd0 18759
44cd321e 18760 if (! recog_memoized (insn))
a251ffd0
TG
18761 return 0;
18762
44cd321e 18763 switch (REG_NOTE_KIND (link))
a251ffd0 18764 {
44cd321e
PS
18765 case REG_DEP_TRUE:
18766 {
18767 /* Data dependency; DEP_INSN writes a register that INSN reads
18768 some cycles later. */
18769
18770 /* Separate a load from a narrower, dependent store. */
18771 if (rs6000_sched_groups
18772 && GET_CODE (PATTERN (insn)) == SET
18773 && GET_CODE (PATTERN (dep_insn)) == SET
18774 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18775 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18776 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18777 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18778 return cost + 14;
18779
18780 attr_type = get_attr_type (insn);
18781
18782 switch (attr_type)
18783 {
18784 case TYPE_JMPREG:
18785 /* Tell the first scheduling pass about the latency between
18786 a mtctr and bctr (and mtlr and br/blr). The first
18787 scheduling pass will not know about this latency since
18788 the mtctr instruction, which has the latency associated
18789 to it, will be generated by reload. */
18790 return TARGET_POWER ? 5 : 4;
18791 case TYPE_BRANCH:
18792 /* Leave some extra cycles between a compare and its
18793 dependent branch, to inhibit expensive mispredicts. */
18794 if ((rs6000_cpu_attr == CPU_PPC603
18795 || rs6000_cpu_attr == CPU_PPC604
18796 || rs6000_cpu_attr == CPU_PPC604E
18797 || rs6000_cpu_attr == CPU_PPC620
18798 || rs6000_cpu_attr == CPU_PPC630
18799 || rs6000_cpu_attr == CPU_PPC750
18800 || rs6000_cpu_attr == CPU_PPC7400
18801 || rs6000_cpu_attr == CPU_PPC7450
18802 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18803 || rs6000_cpu_attr == CPU_POWER5
18804 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18805 && recog_memoized (dep_insn)
18806 && (INSN_CODE (dep_insn) >= 0))
982afe02 18807
44cd321e
PS
18808 switch (get_attr_type (dep_insn))
18809 {
18810 case TYPE_CMP:
18811 case TYPE_COMPARE:
18812 case TYPE_DELAYED_COMPARE:
18813 case TYPE_IMUL_COMPARE:
18814 case TYPE_LMUL_COMPARE:
18815 case TYPE_FPCOMPARE:
18816 case TYPE_CR_LOGICAL:
18817 case TYPE_DELAYED_CR:
18818 return cost + 2;
18819 default:
18820 break;
18821 }
18822 break;
18823
18824 case TYPE_STORE:
18825 case TYPE_STORE_U:
18826 case TYPE_STORE_UX:
18827 case TYPE_FPSTORE:
18828 case TYPE_FPSTORE_U:
18829 case TYPE_FPSTORE_UX:
18830 if ((rs6000_cpu == PROCESSOR_POWER6)
18831 && recog_memoized (dep_insn)
18832 && (INSN_CODE (dep_insn) >= 0))
18833 {
18834
18835 if (GET_CODE (PATTERN (insn)) != SET)
18836 /* If this happens, we have to extend this to schedule
18837 optimally. Return default for now. */
18838 return cost;
18839
18840 /* Adjust the cost for the case where the value written
18841 by a fixed point operation is used as the address
18842 gen value on a store. */
18843 switch (get_attr_type (dep_insn))
18844 {
18845 case TYPE_LOAD:
18846 case TYPE_LOAD_U:
18847 case TYPE_LOAD_UX:
18848 case TYPE_CNTLZ:
18849 {
18850 if (! store_data_bypass_p (dep_insn, insn))
18851 return 4;
18852 break;
18853 }
18854 case TYPE_LOAD_EXT:
18855 case TYPE_LOAD_EXT_U:
18856 case TYPE_LOAD_EXT_UX:
18857 case TYPE_VAR_SHIFT_ROTATE:
18858 case TYPE_VAR_DELAYED_COMPARE:
18859 {
18860 if (! store_data_bypass_p (dep_insn, insn))
18861 return 6;
18862 break;
18863 }
18864 case TYPE_INTEGER:
18865 case TYPE_COMPARE:
18866 case TYPE_FAST_COMPARE:
18867 case TYPE_EXTS:
18868 case TYPE_SHIFT:
18869 case TYPE_INSERT_WORD:
18870 case TYPE_INSERT_DWORD:
18871 case TYPE_FPLOAD_U:
18872 case TYPE_FPLOAD_UX:
18873 case TYPE_STORE_U:
18874 case TYPE_STORE_UX:
18875 case TYPE_FPSTORE_U:
18876 case TYPE_FPSTORE_UX:
18877 {
18878 if (! store_data_bypass_p (dep_insn, insn))
18879 return 3;
18880 break;
18881 }
18882 case TYPE_IMUL:
18883 case TYPE_IMUL2:
18884 case TYPE_IMUL3:
18885 case TYPE_LMUL:
18886 case TYPE_IMUL_COMPARE:
18887 case TYPE_LMUL_COMPARE:
18888 {
18889 if (! store_data_bypass_p (dep_insn, insn))
18890 return 17;
18891 break;
18892 }
18893 case TYPE_IDIV:
18894 {
18895 if (! store_data_bypass_p (dep_insn, insn))
18896 return 45;
18897 break;
18898 }
18899 case TYPE_LDIV:
18900 {
18901 if (! store_data_bypass_p (dep_insn, insn))
18902 return 57;
18903 break;
18904 }
18905 default:
18906 break;
18907 }
18908 }
18909 break;
18910
18911 case TYPE_LOAD:
18912 case TYPE_LOAD_U:
18913 case TYPE_LOAD_UX:
18914 case TYPE_LOAD_EXT:
18915 case TYPE_LOAD_EXT_U:
18916 case TYPE_LOAD_EXT_UX:
18917 if ((rs6000_cpu == PROCESSOR_POWER6)
18918 && recog_memoized (dep_insn)
18919 && (INSN_CODE (dep_insn) >= 0))
18920 {
18921
18922 /* Adjust the cost for the case where the value written
18923 by a fixed point instruction is used within the address
18924 gen portion of a subsequent load(u)(x) */
18925 switch (get_attr_type (dep_insn))
18926 {
18927 case TYPE_LOAD:
18928 case TYPE_LOAD_U:
18929 case TYPE_LOAD_UX:
18930 case TYPE_CNTLZ:
18931 {
18932 if (set_to_load_agen (dep_insn, insn))
18933 return 4;
18934 break;
18935 }
18936 case TYPE_LOAD_EXT:
18937 case TYPE_LOAD_EXT_U:
18938 case TYPE_LOAD_EXT_UX:
18939 case TYPE_VAR_SHIFT_ROTATE:
18940 case TYPE_VAR_DELAYED_COMPARE:
18941 {
18942 if (set_to_load_agen (dep_insn, insn))
18943 return 6;
18944 break;
18945 }
18946 case TYPE_INTEGER:
18947 case TYPE_COMPARE:
18948 case TYPE_FAST_COMPARE:
18949 case TYPE_EXTS:
18950 case TYPE_SHIFT:
18951 case TYPE_INSERT_WORD:
18952 case TYPE_INSERT_DWORD:
18953 case TYPE_FPLOAD_U:
18954 case TYPE_FPLOAD_UX:
18955 case TYPE_STORE_U:
18956 case TYPE_STORE_UX:
18957 case TYPE_FPSTORE_U:
18958 case TYPE_FPSTORE_UX:
18959 {
18960 if (set_to_load_agen (dep_insn, insn))
18961 return 3;
18962 break;
18963 }
18964 case TYPE_IMUL:
18965 case TYPE_IMUL2:
18966 case TYPE_IMUL3:
18967 case TYPE_LMUL:
18968 case TYPE_IMUL_COMPARE:
18969 case TYPE_LMUL_COMPARE:
18970 {
18971 if (set_to_load_agen (dep_insn, insn))
18972 return 17;
18973 break;
18974 }
18975 case TYPE_IDIV:
18976 {
18977 if (set_to_load_agen (dep_insn, insn))
18978 return 45;
18979 break;
18980 }
18981 case TYPE_LDIV:
18982 {
18983 if (set_to_load_agen (dep_insn, insn))
18984 return 57;
18985 break;
18986 }
18987 default:
18988 break;
18989 }
18990 }
18991 break;
18992
18993 case TYPE_FPLOAD:
18994 if ((rs6000_cpu == PROCESSOR_POWER6)
18995 && recog_memoized (dep_insn)
18996 && (INSN_CODE (dep_insn) >= 0)
18997 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18998 return 2;
18999
19000 default:
19001 break;
19002 }
c9dbf840 19003
a251ffd0 19004 /* Fall out to return default cost. */
44cd321e
PS
19005 }
19006 break;
19007
19008 case REG_DEP_OUTPUT:
19009 /* Output dependency; DEP_INSN writes a register that INSN writes some
19010 cycles later. */
19011 if ((rs6000_cpu == PROCESSOR_POWER6)
19012 && recog_memoized (dep_insn)
19013 && (INSN_CODE (dep_insn) >= 0))
19014 {
19015 attr_type = get_attr_type (insn);
19016
19017 switch (attr_type)
19018 {
19019 case TYPE_FP:
19020 if (get_attr_type (dep_insn) == TYPE_FP)
19021 return 1;
19022 break;
19023 case TYPE_FPLOAD:
19024 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
19025 return 2;
19026 break;
19027 default:
19028 break;
19029 }
19030 }
19031 case REG_DEP_ANTI:
19032 /* Anti dependency; DEP_INSN reads a register that INSN writes some
19033 cycles later. */
19034 return 0;
19035
19036 default:
19037 gcc_unreachable ();
a251ffd0
TG
19038 }
19039
19040 return cost;
19041}
b6c9286a 19042
cbe26ab8 19043/* The function returns a true if INSN is microcoded.
839a4992 19044 Return false otherwise. */
cbe26ab8
DN
19045
19046static bool
19047is_microcoded_insn (rtx insn)
19048{
19049 if (!insn || !INSN_P (insn)
19050 || GET_CODE (PATTERN (insn)) == USE
19051 || GET_CODE (PATTERN (insn)) == CLOBBER)
19052 return false;
19053
d296e02e
AP
19054 if (rs6000_cpu_attr == CPU_CELL)
19055 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
19056
ec507f2d 19057 if (rs6000_sched_groups)
cbe26ab8
DN
19058 {
19059 enum attr_type type = get_attr_type (insn);
19060 if (type == TYPE_LOAD_EXT_U
19061 || type == TYPE_LOAD_EXT_UX
19062 || type == TYPE_LOAD_UX
19063 || type == TYPE_STORE_UX
19064 || type == TYPE_MFCR)
c4ad648e 19065 return true;
cbe26ab8
DN
19066 }
19067
19068 return false;
19069}
19070
cbe26ab8
DN
19071/* The function returns true if INSN is cracked into 2 instructions
19072 by the processor (and therefore occupies 2 issue slots). */
19073
19074static bool
19075is_cracked_insn (rtx insn)
19076{
19077 if (!insn || !INSN_P (insn)
19078 || GET_CODE (PATTERN (insn)) == USE
19079 || GET_CODE (PATTERN (insn)) == CLOBBER)
19080 return false;
19081
ec507f2d 19082 if (rs6000_sched_groups)
cbe26ab8
DN
19083 {
19084 enum attr_type type = get_attr_type (insn);
19085 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
19086 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
19087 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
19088 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
19089 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
19090 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
19091 || type == TYPE_IDIV || type == TYPE_LDIV
19092 || type == TYPE_INSERT_WORD)
19093 return true;
cbe26ab8
DN
19094 }
19095
19096 return false;
19097}
19098
19099/* The function returns true if INSN can be issued only from
a3c9585f 19100 the branch slot. */
cbe26ab8
DN
19101
19102static bool
19103is_branch_slot_insn (rtx insn)
19104{
19105 if (!insn || !INSN_P (insn)
19106 || GET_CODE (PATTERN (insn)) == USE
19107 || GET_CODE (PATTERN (insn)) == CLOBBER)
19108 return false;
19109
ec507f2d 19110 if (rs6000_sched_groups)
cbe26ab8
DN
19111 {
19112 enum attr_type type = get_attr_type (insn);
19113 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 19114 return true;
cbe26ab8
DN
19115 return false;
19116 }
19117
19118 return false;
19119}
79ae11c4 19120
44cd321e
PS
19121/* The function returns true if out_inst sets a value that is
19122 used in the address generation computation of in_insn */
19123static bool
19124set_to_load_agen (rtx out_insn, rtx in_insn)
19125{
19126 rtx out_set, in_set;
19127
19128 /* For performance reasons, only handle the simple case where
19129 both loads are a single_set. */
19130 out_set = single_set (out_insn);
19131 if (out_set)
19132 {
19133 in_set = single_set (in_insn);
19134 if (in_set)
19135 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
19136 }
19137
19138 return false;
19139}
19140
19141/* The function returns true if the target storage location of
19142 out_insn is adjacent to the target storage location of in_insn */
19143/* Return 1 if memory locations are adjacent. */
19144
19145static bool
19146adjacent_mem_locations (rtx insn1, rtx insn2)
19147{
19148
e3a0e200
PB
19149 rtx a = get_store_dest (PATTERN (insn1));
19150 rtx b = get_store_dest (PATTERN (insn2));
19151
44cd321e
PS
19152 if ((GET_CODE (XEXP (a, 0)) == REG
19153 || (GET_CODE (XEXP (a, 0)) == PLUS
19154 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
19155 && (GET_CODE (XEXP (b, 0)) == REG
19156 || (GET_CODE (XEXP (b, 0)) == PLUS
19157 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
19158 {
f98e8938 19159 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 19160 rtx reg0, reg1;
44cd321e
PS
19161
19162 if (GET_CODE (XEXP (a, 0)) == PLUS)
19163 {
19164 reg0 = XEXP (XEXP (a, 0), 0);
19165 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
19166 }
19167 else
19168 reg0 = XEXP (a, 0);
19169
19170 if (GET_CODE (XEXP (b, 0)) == PLUS)
19171 {
19172 reg1 = XEXP (XEXP (b, 0), 0);
19173 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
19174 }
19175 else
19176 reg1 = XEXP (b, 0);
19177
19178 val_diff = val1 - val0;
19179
19180 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
19181 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
19182 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
19183 }
19184
19185 return false;
19186}
19187
a4f6c312 19188/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
19189 priority INSN_PRIORITY (INSN). Increase the priority to execute the
19190 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
19191 define this macro if you do not need to adjust the scheduling
19192 priorities of insns. */
bef84347 19193
c237e94a 19194static int
a2369ed3 19195rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 19196{
a4f6c312
SS
19197 /* On machines (like the 750) which have asymmetric integer units,
19198 where one integer unit can do multiply and divides and the other
19199 can't, reduce the priority of multiply/divide so it is scheduled
19200 before other integer operations. */
bef84347
VM
19201
19202#if 0
2c3c49de 19203 if (! INSN_P (insn))
bef84347
VM
19204 return priority;
19205
19206 if (GET_CODE (PATTERN (insn)) == USE)
19207 return priority;
19208
19209 switch (rs6000_cpu_attr) {
19210 case CPU_PPC750:
19211 switch (get_attr_type (insn))
19212 {
19213 default:
19214 break;
19215
19216 case TYPE_IMUL:
19217 case TYPE_IDIV:
3cb999d8
DE
19218 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19219 priority, priority);
bef84347
VM
19220 if (priority >= 0 && priority < 0x01000000)
19221 priority >>= 3;
19222 break;
19223 }
19224 }
19225#endif
19226
44cd321e 19227 if (insn_must_be_first_in_group (insn)
79ae11c4 19228 && reload_completed
f676971a 19229 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
19230 && rs6000_sched_restricted_insns_priority)
19231 {
19232
c4ad648e
AM
19233 /* Prioritize insns that can be dispatched only in the first
19234 dispatch slot. */
79ae11c4 19235 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
19236 /* Attach highest priority to insn. This means that in
19237 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 19238 precede 'priority' (critical path) considerations. */
f676971a 19239 return current_sched_info->sched_max_insns_priority;
79ae11c4 19240 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 19241 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
19242 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19243 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
19244 return (priority + 1);
19245 }
79ae11c4 19246
44cd321e
PS
19247 if (rs6000_cpu == PROCESSOR_POWER6
19248 && ((load_store_pendulum == -2 && is_load_insn (insn))
19249 || (load_store_pendulum == 2 && is_store_insn (insn))))
19250 /* Attach highest priority to insn if the scheduler has just issued two
19251 stores and this instruction is a load, or two loads and this instruction
19252 is a store. Power6 wants loads and stores scheduled alternately
19253 when possible */
19254 return current_sched_info->sched_max_insns_priority;
19255
bef84347
VM
19256 return priority;
19257}
19258
d296e02e
AP
19259/* Return true if the instruction is nonpipelined on the Cell. */
19260static bool
19261is_nonpipeline_insn (rtx insn)
19262{
19263 enum attr_type type;
19264 if (!insn || !INSN_P (insn)
19265 || GET_CODE (PATTERN (insn)) == USE
19266 || GET_CODE (PATTERN (insn)) == CLOBBER)
19267 return false;
19268
19269 type = get_attr_type (insn);
19270 if (type == TYPE_IMUL
19271 || type == TYPE_IMUL2
19272 || type == TYPE_IMUL3
19273 || type == TYPE_LMUL
19274 || type == TYPE_IDIV
19275 || type == TYPE_LDIV
19276 || type == TYPE_SDIV
19277 || type == TYPE_DDIV
19278 || type == TYPE_SSQRT
19279 || type == TYPE_DSQRT
19280 || type == TYPE_MFCR
19281 || type == TYPE_MFCRF
19282 || type == TYPE_MFJMPR)
19283 {
19284 return true;
19285 }
19286 return false;
19287}
19288
19289
a4f6c312
SS
19290/* Return how many instructions the machine can issue per cycle. */
19291
c237e94a 19292static int
863d938c 19293rs6000_issue_rate (void)
b6c9286a 19294{
3317bab1
DE
19295 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19296 if (!reload_completed)
19297 return 1;
19298
b6c9286a 19299 switch (rs6000_cpu_attr) {
3cb999d8
DE
19300 case CPU_RIOS1: /* ? */
19301 case CPU_RS64A:
19302 case CPU_PPC601: /* ? */
ed947a96 19303 case CPU_PPC7450:
3cb999d8 19304 return 3;
b54cf83a 19305 case CPU_PPC440:
b6c9286a 19306 case CPU_PPC603:
bef84347 19307 case CPU_PPC750:
ed947a96 19308 case CPU_PPC7400:
be12c2b0 19309 case CPU_PPC8540:
d296e02e 19310 case CPU_CELL:
fa41c305
EW
19311 case CPU_PPCE300C2:
19312 case CPU_PPCE300C3:
edae5fe3 19313 case CPU_PPCE500MC:
f676971a 19314 return 2;
3cb999d8 19315 case CPU_RIOS2:
b6c9286a 19316 case CPU_PPC604:
19684119 19317 case CPU_PPC604E:
b6c9286a 19318 case CPU_PPC620:
3cb999d8 19319 case CPU_PPC630:
b6c9286a 19320 return 4;
cbe26ab8 19321 case CPU_POWER4:
ec507f2d 19322 case CPU_POWER5:
44cd321e 19323 case CPU_POWER6:
cbe26ab8 19324 return 5;
b6c9286a
MM
19325 default:
19326 return 1;
19327 }
19328}
19329
be12c2b0
VM
19330/* Return how many instructions to look ahead for better insn
19331 scheduling. */
19332
19333static int
863d938c 19334rs6000_use_sched_lookahead (void)
be12c2b0
VM
19335{
19336 if (rs6000_cpu_attr == CPU_PPC8540)
19337 return 4;
d296e02e
AP
19338 if (rs6000_cpu_attr == CPU_CELL)
19339 return (reload_completed ? 8 : 0);
be12c2b0
VM
19340 return 0;
19341}
19342
d296e02e
AP
19343/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19344static int
19345rs6000_use_sched_lookahead_guard (rtx insn)
19346{
19347 if (rs6000_cpu_attr != CPU_CELL)
19348 return 1;
19349
19350 if (insn == NULL_RTX || !INSN_P (insn))
19351 abort ();
982afe02 19352
d296e02e
AP
19353 if (!reload_completed
19354 || is_nonpipeline_insn (insn)
19355 || is_microcoded_insn (insn))
19356 return 0;
19357
19358 return 1;
19359}
19360
569fa502
DN
19361/* Determine is PAT refers to memory. */
19362
19363static bool
19364is_mem_ref (rtx pat)
19365{
19366 const char * fmt;
19367 int i, j;
19368 bool ret = false;
19369
1de59bbd
DE
19370 /* stack_tie does not produce any real memory traffic. */
19371 if (GET_CODE (pat) == UNSPEC
19372 && XINT (pat, 1) == UNSPEC_TIE)
19373 return false;
19374
569fa502
DN
19375 if (GET_CODE (pat) == MEM)
19376 return true;
19377
19378 /* Recursively process the pattern. */
19379 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19380
19381 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19382 {
19383 if (fmt[i] == 'e')
19384 ret |= is_mem_ref (XEXP (pat, i));
19385 else if (fmt[i] == 'E')
19386 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19387 ret |= is_mem_ref (XVECEXP (pat, i, j));
19388 }
19389
19390 return ret;
19391}
19392
19393/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19394
569fa502
DN
19395static bool
19396is_load_insn1 (rtx pat)
19397{
19398 if (!pat || pat == NULL_RTX)
19399 return false;
19400
19401 if (GET_CODE (pat) == SET)
19402 return is_mem_ref (SET_SRC (pat));
19403
19404 if (GET_CODE (pat) == PARALLEL)
19405 {
19406 int i;
19407
19408 for (i = 0; i < XVECLEN (pat, 0); i++)
19409 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19410 return true;
19411 }
19412
19413 return false;
19414}
19415
19416/* Determine if INSN loads from memory. */
19417
19418static bool
19419is_load_insn (rtx insn)
19420{
19421 if (!insn || !INSN_P (insn))
19422 return false;
19423
19424 if (GET_CODE (insn) == CALL_INSN)
19425 return false;
19426
19427 return is_load_insn1 (PATTERN (insn));
19428}
19429
19430/* Determine if PAT is a PATTERN of a store insn. */
19431
19432static bool
19433is_store_insn1 (rtx pat)
19434{
19435 if (!pat || pat == NULL_RTX)
19436 return false;
19437
19438 if (GET_CODE (pat) == SET)
19439 return is_mem_ref (SET_DEST (pat));
19440
19441 if (GET_CODE (pat) == PARALLEL)
19442 {
19443 int i;
19444
19445 for (i = 0; i < XVECLEN (pat, 0); i++)
19446 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19447 return true;
19448 }
19449
19450 return false;
19451}
19452
19453/* Determine if INSN stores to memory. */
19454
19455static bool
19456is_store_insn (rtx insn)
19457{
19458 if (!insn || !INSN_P (insn))
19459 return false;
19460
19461 return is_store_insn1 (PATTERN (insn));
19462}
19463
e3a0e200
PB
19464/* Return the dest of a store insn. */
19465
19466static rtx
19467get_store_dest (rtx pat)
19468{
19469 gcc_assert (is_store_insn1 (pat));
19470
19471 if (GET_CODE (pat) == SET)
19472 return SET_DEST (pat);
19473 else if (GET_CODE (pat) == PARALLEL)
19474 {
19475 int i;
19476
19477 for (i = 0; i < XVECLEN (pat, 0); i++)
19478 {
19479 rtx inner_pat = XVECEXP (pat, 0, i);
19480 if (GET_CODE (inner_pat) == SET
19481 && is_mem_ref (SET_DEST (inner_pat)))
19482 return inner_pat;
19483 }
19484 }
19485 /* We shouldn't get here, because we should have either a simple
19486 store insn or a store with update which are covered above. */
19487 gcc_unreachable();
19488}
19489
569fa502
DN
19490/* Returns whether the dependence between INSN and NEXT is considered
19491 costly by the given target. */
19492
19493static bool
b198261f 19494rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19495{
b198261f
MK
19496 rtx insn;
19497 rtx next;
19498
aabcd309 19499 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19500 allow all dependent insns in the same group.
569fa502
DN
19501 This is the most aggressive option. */
19502 if (rs6000_sched_costly_dep == no_dep_costly)
19503 return false;
19504
f676971a 19505 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19506 do not allow dependent instructions in the same group.
19507 This is the most conservative option. */
19508 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19509 return true;
569fa502 19510
b198261f
MK
19511 insn = DEP_PRO (dep);
19512 next = DEP_CON (dep);
19513
f676971a
EC
19514 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19515 && is_load_insn (next)
569fa502
DN
19516 && is_store_insn (insn))
19517 /* Prevent load after store in the same group. */
19518 return true;
19519
19520 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19521 && is_load_insn (next)
569fa502 19522 && is_store_insn (insn)
e2f6ff94 19523 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19524 /* Prevent load after store in the same group if it is a true
19525 dependence. */
569fa502 19526 return true;
f676971a
EC
19527
19528 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19529 and will not be scheduled in the same group. */
19530 if (rs6000_sched_costly_dep <= max_dep_latency
19531 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19532 return true;
19533
19534 return false;
19535}
19536
f676971a 19537/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19538 skipping any "non-active" insns - insns that will not actually occupy
19539 an issue slot. Return NULL_RTX if such an insn is not found. */
19540
19541static rtx
19542get_next_active_insn (rtx insn, rtx tail)
19543{
f489aff8 19544 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19545 return NULL_RTX;
19546
f489aff8 19547 while (1)
cbe26ab8 19548 {
f489aff8
AM
19549 insn = NEXT_INSN (insn);
19550 if (insn == NULL_RTX || insn == tail)
19551 return NULL_RTX;
cbe26ab8 19552
f489aff8
AM
19553 if (CALL_P (insn)
19554 || JUMP_P (insn)
19555 || (NONJUMP_INSN_P (insn)
19556 && GET_CODE (PATTERN (insn)) != USE
19557 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19558 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19559 break;
19560 }
19561 return insn;
cbe26ab8
DN
19562}
19563
44cd321e
PS
19564/* We are about to begin issuing insns for this clock cycle. */
19565
19566static int
19567rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19568 rtx *ready ATTRIBUTE_UNUSED,
19569 int *pn_ready ATTRIBUTE_UNUSED,
19570 int clock_var ATTRIBUTE_UNUSED)
19571{
d296e02e
AP
19572 int n_ready = *pn_ready;
19573
44cd321e
PS
19574 if (sched_verbose)
19575 fprintf (dump, "// rs6000_sched_reorder :\n");
19576
d296e02e
AP
19577 /* Reorder the ready list, if the second to last ready insn
19578 is a nonepipeline insn. */
19579 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19580 {
19581 if (is_nonpipeline_insn (ready[n_ready - 1])
19582 && (recog_memoized (ready[n_ready - 2]) > 0))
19583 /* Simply swap first two insns. */
19584 {
19585 rtx tmp = ready[n_ready - 1];
19586 ready[n_ready - 1] = ready[n_ready - 2];
19587 ready[n_ready - 2] = tmp;
19588 }
19589 }
19590
44cd321e
PS
19591 if (rs6000_cpu == PROCESSOR_POWER6)
19592 load_store_pendulum = 0;
19593
19594 return rs6000_issue_rate ();
19595}
19596
19597/* Like rs6000_sched_reorder, but called after issuing each insn. */
19598
19599static int
19600rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19601 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19602{
19603 if (sched_verbose)
19604 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19605
19606 /* For Power6, we need to handle some special cases to try and keep the
19607 store queue from overflowing and triggering expensive flushes.
19608
19609 This code monitors how load and store instructions are being issued
19610 and skews the ready list one way or the other to increase the likelihood
19611 that a desired instruction is issued at the proper time.
19612
19613 A couple of things are done. First, we maintain a "load_store_pendulum"
19614 to track the current state of load/store issue.
19615
19616 - If the pendulum is at zero, then no loads or stores have been
19617 issued in the current cycle so we do nothing.
19618
19619 - If the pendulum is 1, then a single load has been issued in this
19620 cycle and we attempt to locate another load in the ready list to
19621 issue with it.
19622
2f8e468b 19623 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19624 issued in this cycle, so we increase the priority of the first load
19625 in the ready list to increase it's likelihood of being chosen first
19626 in the next cycle.
19627
19628 - If the pendulum is -1, then a single store has been issued in this
19629 cycle and we attempt to locate another store in the ready list to
19630 issue with it, preferring a store to an adjacent memory location to
19631 facilitate store pairing in the store queue.
19632
19633 - If the pendulum is 2, then two loads have already been
19634 issued in this cycle, so we increase the priority of the first store
19635 in the ready list to increase it's likelihood of being chosen first
19636 in the next cycle.
19637
19638 - If the pendulum < -2 or > 2, then do nothing.
19639
19640 Note: This code covers the most common scenarios. There exist non
19641 load/store instructions which make use of the LSU and which
19642 would need to be accounted for to strictly model the behavior
19643 of the machine. Those instructions are currently unaccounted
19644 for to help minimize compile time overhead of this code.
19645 */
19646 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19647 {
19648 int pos;
19649 int i;
19650 rtx tmp;
19651
19652 if (is_store_insn (last_scheduled_insn))
19653 /* Issuing a store, swing the load_store_pendulum to the left */
19654 load_store_pendulum--;
19655 else if (is_load_insn (last_scheduled_insn))
19656 /* Issuing a load, swing the load_store_pendulum to the right */
19657 load_store_pendulum++;
19658 else
19659 return cached_can_issue_more;
19660
19661 /* If the pendulum is balanced, or there is only one instruction on
19662 the ready list, then all is well, so return. */
19663 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19664 return cached_can_issue_more;
19665
19666 if (load_store_pendulum == 1)
19667 {
19668 /* A load has been issued in this cycle. Scan the ready list
19669 for another load to issue with it */
19670 pos = *pn_ready-1;
19671
19672 while (pos >= 0)
19673 {
19674 if (is_load_insn (ready[pos]))
19675 {
19676 /* Found a load. Move it to the head of the ready list,
19677 and adjust it's priority so that it is more likely to
19678 stay there */
19679 tmp = ready[pos];
19680 for (i=pos; i<*pn_ready-1; i++)
19681 ready[i] = ready[i + 1];
19682 ready[*pn_ready-1] = tmp;
e855c69d
AB
19683
19684 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19685 INSN_PRIORITY (tmp)++;
19686 break;
19687 }
19688 pos--;
19689 }
19690 }
19691 else if (load_store_pendulum == -2)
19692 {
19693 /* Two stores have been issued in this cycle. Increase the
19694 priority of the first load in the ready list to favor it for
19695 issuing in the next cycle. */
19696 pos = *pn_ready-1;
19697
19698 while (pos >= 0)
19699 {
19700 if (is_load_insn (ready[pos])
e855c69d
AB
19701 && !sel_sched_p ()
19702 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19703 {
19704 INSN_PRIORITY (ready[pos])++;
19705
19706 /* Adjust the pendulum to account for the fact that a load
19707 was found and increased in priority. This is to prevent
19708 increasing the priority of multiple loads */
19709 load_store_pendulum--;
19710
19711 break;
19712 }
19713 pos--;
19714 }
19715 }
19716 else if (load_store_pendulum == -1)
19717 {
19718 /* A store has been issued in this cycle. Scan the ready list for
19719 another store to issue with it, preferring a store to an adjacent
19720 memory location */
19721 int first_store_pos = -1;
19722
19723 pos = *pn_ready-1;
19724
19725 while (pos >= 0)
19726 {
19727 if (is_store_insn (ready[pos]))
19728 {
19729 /* Maintain the index of the first store found on the
19730 list */
19731 if (first_store_pos == -1)
19732 first_store_pos = pos;
19733
19734 if (is_store_insn (last_scheduled_insn)
19735 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19736 {
19737 /* Found an adjacent store. Move it to the head of the
19738 ready list, and adjust it's priority so that it is
19739 more likely to stay there */
19740 tmp = ready[pos];
19741 for (i=pos; i<*pn_ready-1; i++)
19742 ready[i] = ready[i + 1];
19743 ready[*pn_ready-1] = tmp;
e855c69d
AB
19744
19745 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e 19746 INSN_PRIORITY (tmp)++;
e855c69d 19747
44cd321e
PS
19748 first_store_pos = -1;
19749
19750 break;
19751 };
19752 }
19753 pos--;
19754 }
19755
19756 if (first_store_pos >= 0)
19757 {
19758 /* An adjacent store wasn't found, but a non-adjacent store was,
19759 so move the non-adjacent store to the front of the ready
19760 list, and adjust its priority so that it is more likely to
19761 stay there. */
19762 tmp = ready[first_store_pos];
19763 for (i=first_store_pos; i<*pn_ready-1; i++)
19764 ready[i] = ready[i + 1];
19765 ready[*pn_ready-1] = tmp;
e855c69d 19766 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19767 INSN_PRIORITY (tmp)++;
19768 }
19769 }
19770 else if (load_store_pendulum == 2)
19771 {
19772 /* Two loads have been issued in this cycle. Increase the priority
19773 of the first store in the ready list to favor it for issuing in
19774 the next cycle. */
19775 pos = *pn_ready-1;
19776
19777 while (pos >= 0)
19778 {
19779 if (is_store_insn (ready[pos])
e855c69d
AB
19780 && !sel_sched_p ()
19781 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19782 {
19783 INSN_PRIORITY (ready[pos])++;
19784
19785 /* Adjust the pendulum to account for the fact that a store
19786 was found and increased in priority. This is to prevent
19787 increasing the priority of multiple stores */
19788 load_store_pendulum++;
19789
19790 break;
19791 }
19792 pos--;
19793 }
19794 }
19795 }
19796
19797 return cached_can_issue_more;
19798}
19799
839a4992 19800/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19801 of group WHICH_GROUP.
19802
19803 If WHICH_GROUP == current_group, this function will return true if INSN
19804 causes the termination of the current group (i.e, the dispatch group to
19805 which INSN belongs). This means that INSN will be the last insn in the
19806 group it belongs to.
19807
19808 If WHICH_GROUP == previous_group, this function will return true if INSN
19809 causes the termination of the previous group (i.e, the dispatch group that
19810 precedes the group to which INSN belongs). This means that INSN will be
19811 the first insn in the group it belongs to). */
19812
19813static bool
19814insn_terminates_group_p (rtx insn, enum group_termination which_group)
19815{
44cd321e 19816 bool first, last;
cbe26ab8
DN
19817
19818 if (! insn)
19819 return false;
569fa502 19820
44cd321e
PS
19821 first = insn_must_be_first_in_group (insn);
19822 last = insn_must_be_last_in_group (insn);
cbe26ab8 19823
44cd321e 19824 if (first && last)
cbe26ab8
DN
19825 return true;
19826
19827 if (which_group == current_group)
44cd321e 19828 return last;
cbe26ab8 19829 else if (which_group == previous_group)
44cd321e
PS
19830 return first;
19831
19832 return false;
19833}
19834
19835
19836static bool
19837insn_must_be_first_in_group (rtx insn)
19838{
19839 enum attr_type type;
19840
19841 if (!insn
19842 || insn == NULL_RTX
19843 || GET_CODE (insn) == NOTE
19844 || GET_CODE (PATTERN (insn)) == USE
19845 || GET_CODE (PATTERN (insn)) == CLOBBER)
19846 return false;
19847
19848 switch (rs6000_cpu)
cbe26ab8 19849 {
44cd321e
PS
19850 case PROCESSOR_POWER5:
19851 if (is_cracked_insn (insn))
19852 return true;
19853 case PROCESSOR_POWER4:
19854 if (is_microcoded_insn (insn))
19855 return true;
19856
19857 if (!rs6000_sched_groups)
19858 return false;
19859
19860 type = get_attr_type (insn);
19861
19862 switch (type)
19863 {
19864 case TYPE_MFCR:
19865 case TYPE_MFCRF:
19866 case TYPE_MTCR:
19867 case TYPE_DELAYED_CR:
19868 case TYPE_CR_LOGICAL:
19869 case TYPE_MTJMPR:
19870 case TYPE_MFJMPR:
19871 case TYPE_IDIV:
19872 case TYPE_LDIV:
19873 case TYPE_LOAD_L:
19874 case TYPE_STORE_C:
19875 case TYPE_ISYNC:
19876 case TYPE_SYNC:
19877 return true;
19878 default:
19879 break;
19880 }
19881 break;
19882 case PROCESSOR_POWER6:
19883 type = get_attr_type (insn);
19884
19885 switch (type)
19886 {
19887 case TYPE_INSERT_DWORD:
19888 case TYPE_EXTS:
19889 case TYPE_CNTLZ:
19890 case TYPE_SHIFT:
19891 case TYPE_VAR_SHIFT_ROTATE:
19892 case TYPE_TRAP:
19893 case TYPE_IMUL:
19894 case TYPE_IMUL2:
19895 case TYPE_IMUL3:
19896 case TYPE_LMUL:
19897 case TYPE_IDIV:
19898 case TYPE_INSERT_WORD:
19899 case TYPE_DELAYED_COMPARE:
19900 case TYPE_IMUL_COMPARE:
19901 case TYPE_LMUL_COMPARE:
19902 case TYPE_FPCOMPARE:
19903 case TYPE_MFCR:
19904 case TYPE_MTCR:
19905 case TYPE_MFJMPR:
19906 case TYPE_MTJMPR:
19907 case TYPE_ISYNC:
19908 case TYPE_SYNC:
19909 case TYPE_LOAD_L:
19910 case TYPE_STORE_C:
19911 case TYPE_LOAD_U:
19912 case TYPE_LOAD_UX:
19913 case TYPE_LOAD_EXT_UX:
19914 case TYPE_STORE_U:
19915 case TYPE_STORE_UX:
19916 case TYPE_FPLOAD_U:
19917 case TYPE_FPLOAD_UX:
19918 case TYPE_FPSTORE_U:
19919 case TYPE_FPSTORE_UX:
19920 return true;
19921 default:
19922 break;
19923 }
19924 break;
19925 default:
19926 break;
19927 }
19928
19929 return false;
19930}
19931
19932static bool
19933insn_must_be_last_in_group (rtx insn)
19934{
19935 enum attr_type type;
19936
19937 if (!insn
19938 || insn == NULL_RTX
19939 || GET_CODE (insn) == NOTE
19940 || GET_CODE (PATTERN (insn)) == USE
19941 || GET_CODE (PATTERN (insn)) == CLOBBER)
19942 return false;
19943
19944 switch (rs6000_cpu) {
19945 case PROCESSOR_POWER4:
19946 case PROCESSOR_POWER5:
19947 if (is_microcoded_insn (insn))
19948 return true;
19949
19950 if (is_branch_slot_insn (insn))
19951 return true;
19952
19953 break;
19954 case PROCESSOR_POWER6:
19955 type = get_attr_type (insn);
19956
19957 switch (type)
19958 {
19959 case TYPE_EXTS:
19960 case TYPE_CNTLZ:
19961 case TYPE_SHIFT:
19962 case TYPE_VAR_SHIFT_ROTATE:
19963 case TYPE_TRAP:
19964 case TYPE_IMUL:
19965 case TYPE_IMUL2:
19966 case TYPE_IMUL3:
19967 case TYPE_LMUL:
19968 case TYPE_IDIV:
19969 case TYPE_DELAYED_COMPARE:
19970 case TYPE_IMUL_COMPARE:
19971 case TYPE_LMUL_COMPARE:
19972 case TYPE_FPCOMPARE:
19973 case TYPE_MFCR:
19974 case TYPE_MTCR:
19975 case TYPE_MFJMPR:
19976 case TYPE_MTJMPR:
19977 case TYPE_ISYNC:
19978 case TYPE_SYNC:
19979 case TYPE_LOAD_L:
19980 case TYPE_STORE_C:
19981 return true;
19982 default:
19983 break;
cbe26ab8 19984 }
44cd321e
PS
19985 break;
19986 default:
19987 break;
19988 }
cbe26ab8
DN
19989
19990 return false;
19991}
19992
839a4992 19993/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19994 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19995
19996static bool
19997is_costly_group (rtx *group_insns, rtx next_insn)
19998{
19999 int i;
cbe26ab8
DN
20000 int issue_rate = rs6000_issue_rate ();
20001
20002 for (i = 0; i < issue_rate; i++)
20003 {
e2f6ff94
MK
20004 sd_iterator_def sd_it;
20005 dep_t dep;
cbe26ab8 20006 rtx insn = group_insns[i];
b198261f 20007
cbe26ab8 20008 if (!insn)
c4ad648e 20009 continue;
b198261f 20010
e2f6ff94 20011 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 20012 {
b198261f
MK
20013 rtx next = DEP_CON (dep);
20014
20015 if (next == next_insn
20016 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
20017 return true;
c4ad648e 20018 }
cbe26ab8
DN
20019 }
20020
20021 return false;
20022}
20023
f676971a 20024/* Utility of the function redefine_groups.
cbe26ab8
DN
20025 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
20026 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
20027 to keep it "far" (in a separate group) from GROUP_INSNS, following
20028 one of the following schemes, depending on the value of the flag
20029 -minsert_sched_nops = X:
20030 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 20031 in order to force NEXT_INSN into a separate group.
f676971a
EC
20032 (2) X < sched_finish_regroup_exact: insert exactly X nops.
20033 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
20034 insertion (has a group just ended, how many vacant issue slots remain in the
20035 last group, and how many dispatch groups were encountered so far). */
20036
f676971a 20037static int
c4ad648e
AM
20038force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
20039 rtx next_insn, bool *group_end, int can_issue_more,
20040 int *group_count)
cbe26ab8
DN
20041{
20042 rtx nop;
20043 bool force;
20044 int issue_rate = rs6000_issue_rate ();
20045 bool end = *group_end;
20046 int i;
20047
20048 if (next_insn == NULL_RTX)
20049 return can_issue_more;
20050
20051 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
20052 return can_issue_more;
20053
20054 force = is_costly_group (group_insns, next_insn);
20055 if (!force)
20056 return can_issue_more;
20057
20058 if (sched_verbose > 6)
20059 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 20060 *group_count ,can_issue_more);
cbe26ab8
DN
20061
20062 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
20063 {
20064 if (*group_end)
c4ad648e 20065 can_issue_more = 0;
cbe26ab8
DN
20066
20067 /* Since only a branch can be issued in the last issue_slot, it is
20068 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
20069 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
20070 in this case the last nop will start a new group and the branch
20071 will be forced to the new group. */
cbe26ab8 20072 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 20073 can_issue_more--;
cbe26ab8
DN
20074
20075 while (can_issue_more > 0)
c4ad648e 20076 {
9390387d 20077 nop = gen_nop ();
c4ad648e
AM
20078 emit_insn_before (nop, next_insn);
20079 can_issue_more--;
20080 }
cbe26ab8
DN
20081
20082 *group_end = true;
20083 return 0;
f676971a 20084 }
cbe26ab8
DN
20085
20086 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
20087 {
20088 int n_nops = rs6000_sched_insert_nops;
20089
f676971a 20090 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 20091 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 20092 if (can_issue_more == 0)
c4ad648e 20093 can_issue_more = issue_rate;
cbe26ab8
DN
20094 can_issue_more--;
20095 if (can_issue_more == 0)
c4ad648e
AM
20096 {
20097 can_issue_more = issue_rate - 1;
20098 (*group_count)++;
20099 end = true;
20100 for (i = 0; i < issue_rate; i++)
20101 {
20102 group_insns[i] = 0;
20103 }
20104 }
cbe26ab8
DN
20105
20106 while (n_nops > 0)
c4ad648e
AM
20107 {
20108 nop = gen_nop ();
20109 emit_insn_before (nop, next_insn);
20110 if (can_issue_more == issue_rate - 1) /* new group begins */
20111 end = false;
20112 can_issue_more--;
20113 if (can_issue_more == 0)
20114 {
20115 can_issue_more = issue_rate - 1;
20116 (*group_count)++;
20117 end = true;
20118 for (i = 0; i < issue_rate; i++)
20119 {
20120 group_insns[i] = 0;
20121 }
20122 }
20123 n_nops--;
20124 }
cbe26ab8
DN
20125
20126 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 20127 can_issue_more++;
cbe26ab8 20128
c4ad648e
AM
20129 /* Is next_insn going to start a new group? */
20130 *group_end
20131 = (end
cbe26ab8
DN
20132 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20133 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20134 || (can_issue_more < issue_rate &&
c4ad648e 20135 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20136 if (*group_end && end)
c4ad648e 20137 (*group_count)--;
cbe26ab8
DN
20138
20139 if (sched_verbose > 6)
c4ad648e
AM
20140 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
20141 *group_count, can_issue_more);
f676971a
EC
20142 return can_issue_more;
20143 }
cbe26ab8
DN
20144
20145 return can_issue_more;
20146}
20147
20148/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 20149 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
20150 form in practice. It tries to achieve this synchronization by forcing the
20151 estimated processor grouping on the compiler (as opposed to the function
20152 'pad_goups' which tries to force the scheduler's grouping on the processor).
20153
20154 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
20155 examines the (estimated) dispatch groups that will be formed by the processor
20156 dispatcher. It marks these group boundaries to reflect the estimated
20157 processor grouping, overriding the grouping that the scheduler had marked.
20158 Depending on the value of the flag '-minsert-sched-nops' this function can
20159 force certain insns into separate groups or force a certain distance between
20160 them by inserting nops, for example, if there exists a "costly dependence"
20161 between the insns.
20162
20163 The function estimates the group boundaries that the processor will form as
0fa2e4df 20164 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
20165 each insn. A subsequent insn will start a new group if one of the following
20166 4 cases applies:
20167 - no more vacant issue slots remain in the current dispatch group.
20168 - only the last issue slot, which is the branch slot, is vacant, but the next
20169 insn is not a branch.
20170 - only the last 2 or less issue slots, including the branch slot, are vacant,
20171 which means that a cracked insn (which occupies two issue slots) can't be
20172 issued in this group.
f676971a 20173 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
20174 start a new group. */
20175
20176static int
20177redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20178{
20179 rtx insn, next_insn;
20180 int issue_rate;
20181 int can_issue_more;
20182 int slot, i;
20183 bool group_end;
20184 int group_count = 0;
20185 rtx *group_insns;
20186
20187 /* Initialize. */
20188 issue_rate = rs6000_issue_rate ();
5ead67f6 20189 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 20190 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
20191 {
20192 group_insns[i] = 0;
20193 }
20194 can_issue_more = issue_rate;
20195 slot = 0;
20196 insn = get_next_active_insn (prev_head_insn, tail);
20197 group_end = false;
20198
20199 while (insn != NULL_RTX)
20200 {
20201 slot = (issue_rate - can_issue_more);
20202 group_insns[slot] = insn;
20203 can_issue_more =
c4ad648e 20204 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 20205 if (insn_terminates_group_p (insn, current_group))
c4ad648e 20206 can_issue_more = 0;
cbe26ab8
DN
20207
20208 next_insn = get_next_active_insn (insn, tail);
20209 if (next_insn == NULL_RTX)
c4ad648e 20210 return group_count + 1;
cbe26ab8 20211
c4ad648e
AM
20212 /* Is next_insn going to start a new group? */
20213 group_end
20214 = (can_issue_more == 0
20215 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20216 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20217 || (can_issue_more < issue_rate &&
20218 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20219
f676971a 20220 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
20221 next_insn, &group_end, can_issue_more,
20222 &group_count);
cbe26ab8
DN
20223
20224 if (group_end)
c4ad648e
AM
20225 {
20226 group_count++;
20227 can_issue_more = 0;
20228 for (i = 0; i < issue_rate; i++)
20229 {
20230 group_insns[i] = 0;
20231 }
20232 }
cbe26ab8
DN
20233
20234 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 20235 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 20236 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 20237 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
20238
20239 insn = next_insn;
20240 if (can_issue_more == 0)
c4ad648e
AM
20241 can_issue_more = issue_rate;
20242 } /* while */
cbe26ab8
DN
20243
20244 return group_count;
20245}
20246
20247/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20248 dispatch group boundaries that the scheduler had marked. Pad with nops
20249 any dispatch groups which have vacant issue slots, in order to force the
20250 scheduler's grouping on the processor dispatcher. The function
20251 returns the number of dispatch groups found. */
20252
20253static int
20254pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20255{
20256 rtx insn, next_insn;
20257 rtx nop;
20258 int issue_rate;
20259 int can_issue_more;
20260 int group_end;
20261 int group_count = 0;
20262
20263 /* Initialize issue_rate. */
20264 issue_rate = rs6000_issue_rate ();
20265 can_issue_more = issue_rate;
20266
20267 insn = get_next_active_insn (prev_head_insn, tail);
20268 next_insn = get_next_active_insn (insn, tail);
20269
20270 while (insn != NULL_RTX)
20271 {
20272 can_issue_more =
20273 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20274
20275 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20276
20277 if (next_insn == NULL_RTX)
c4ad648e 20278 break;
cbe26ab8
DN
20279
20280 if (group_end)
c4ad648e
AM
20281 {
20282 /* If the scheduler had marked group termination at this location
e855c69d 20283 (between insn and next_insn), and neither insn nor next_insn will
c4ad648e
AM
20284 force group termination, pad the group with nops to force group
20285 termination. */
20286 if (can_issue_more
20287 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20288 && !insn_terminates_group_p (insn, current_group)
20289 && !insn_terminates_group_p (next_insn, previous_group))
20290 {
9390387d 20291 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20292 can_issue_more--;
20293
20294 while (can_issue_more)
20295 {
20296 nop = gen_nop ();
20297 emit_insn_before (nop, next_insn);
20298 can_issue_more--;
20299 }
20300 }
20301
20302 can_issue_more = issue_rate;
20303 group_count++;
20304 }
cbe26ab8
DN
20305
20306 insn = next_insn;
20307 next_insn = get_next_active_insn (insn, tail);
20308 }
20309
20310 return group_count;
20311}
20312
44cd321e
PS
20313/* We're beginning a new block. Initialize data structures as necessary. */
20314
20315static void
20316rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20317 int sched_verbose ATTRIBUTE_UNUSED,
20318 int max_ready ATTRIBUTE_UNUSED)
982afe02 20319{
44cd321e
PS
20320 last_scheduled_insn = NULL_RTX;
20321 load_store_pendulum = 0;
20322}
20323
cbe26ab8
DN
20324/* The following function is called at the end of scheduling BB.
20325 After reload, it inserts nops at insn group bundling. */
20326
20327static void
38f391a5 20328rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20329{
20330 int n_groups;
20331
20332 if (sched_verbose)
20333 fprintf (dump, "=== Finishing schedule.\n");
20334
ec507f2d 20335 if (reload_completed && rs6000_sched_groups)
cbe26ab8 20336 {
e855c69d
AB
20337 /* Do not run sched_finish hook when selective scheduling enabled. */
20338 if (sel_sched_p ())
20339 return;
20340
cbe26ab8 20341 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20342 return;
cbe26ab8
DN
20343
20344 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20345 n_groups = pad_groups (dump, sched_verbose,
20346 current_sched_info->prev_head,
20347 current_sched_info->next_tail);
cbe26ab8 20348 else
c4ad648e
AM
20349 n_groups = redefine_groups (dump, sched_verbose,
20350 current_sched_info->prev_head,
20351 current_sched_info->next_tail);
cbe26ab8
DN
20352
20353 if (sched_verbose >= 6)
20354 {
20355 fprintf (dump, "ngroups = %d\n", n_groups);
20356 print_rtl (dump, current_sched_info->prev_head);
20357 fprintf (dump, "Done finish_sched\n");
20358 }
20359 }
20360}
e855c69d
AB
20361
20362struct _rs6000_sched_context
20363{
20364 short cached_can_issue_more;
20365 rtx last_scheduled_insn;
20366 int load_store_pendulum;
20367};
20368
20369typedef struct _rs6000_sched_context rs6000_sched_context_def;
20370typedef rs6000_sched_context_def *rs6000_sched_context_t;
20371
20372/* Allocate store for new scheduling context. */
20373static void *
20374rs6000_alloc_sched_context (void)
20375{
20376 return xmalloc (sizeof (rs6000_sched_context_def));
20377}
20378
20379/* If CLEAN_P is true then initializes _SC with clean data,
20380 and from the global context otherwise. */
20381static void
20382rs6000_init_sched_context (void *_sc, bool clean_p)
20383{
20384 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20385
20386 if (clean_p)
20387 {
20388 sc->cached_can_issue_more = 0;
20389 sc->last_scheduled_insn = NULL_RTX;
20390 sc->load_store_pendulum = 0;
20391 }
20392 else
20393 {
20394 sc->cached_can_issue_more = cached_can_issue_more;
20395 sc->last_scheduled_insn = last_scheduled_insn;
20396 sc->load_store_pendulum = load_store_pendulum;
20397 }
20398}
20399
20400/* Sets the global scheduling context to the one pointed to by _SC. */
20401static void
20402rs6000_set_sched_context (void *_sc)
20403{
20404 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20405
20406 gcc_assert (sc != NULL);
20407
20408 cached_can_issue_more = sc->cached_can_issue_more;
20409 last_scheduled_insn = sc->last_scheduled_insn;
20410 load_store_pendulum = sc->load_store_pendulum;
20411}
20412
20413/* Free _SC. */
20414static void
20415rs6000_free_sched_context (void *_sc)
20416{
20417 gcc_assert (_sc != NULL);
20418
20419 free (_sc);
20420}
20421
b6c9286a 20422\f
b6c9286a
MM
20423/* Length in units of the trampoline for entering a nested function. */
20424
20425int
863d938c 20426rs6000_trampoline_size (void)
b6c9286a
MM
20427{
20428 int ret = 0;
20429
20430 switch (DEFAULT_ABI)
20431 {
20432 default:
37409796 20433 gcc_unreachable ();
b6c9286a
MM
20434
20435 case ABI_AIX:
8f802bfb 20436 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20437 break;
20438
4dabc42d 20439 case ABI_DARWIN:
b6c9286a 20440 case ABI_V4:
03a7e1a5 20441 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20442 break;
b6c9286a
MM
20443 }
20444
20445 return ret;
20446}
20447
20448/* Emit RTL insns to initialize the variable parts of a trampoline.
20449 FNADDR is an RTX for the address of the function's pure code.
20450 CXT is an RTX for the static chain value for the function. */
20451
20452void
a2369ed3 20453rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20454{
8bd04c56 20455 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20456 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20457
20458 switch (DEFAULT_ABI)
20459 {
20460 default:
37409796 20461 gcc_unreachable ();
b6c9286a 20462
8bd04c56 20463/* Macros to shorten the code expansions below. */
9613eaff 20464#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20465#define MEM_PLUS(addr,offset) \
9613eaff 20466 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20467
b6c9286a
MM
20468 /* Under AIX, just build the 3 word function descriptor */
20469 case ABI_AIX:
8bd04c56 20470 {
9613eaff
SH
20471 rtx fn_reg = gen_reg_rtx (Pmode);
20472 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20473 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20474 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20475 emit_move_insn (MEM_DEREF (addr), fn_reg);
20476 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20477 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20478 }
b6c9286a
MM
20479 break;
20480
4dabc42d
TC
20481 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20482 case ABI_DARWIN:
b6c9286a 20483 case ABI_V4:
9613eaff 20484 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
bbbbb16a 20485 LCT_NORMAL, VOIDmode, 4,
9613eaff 20486 addr, Pmode,
eaf1bcf1 20487 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20488 fnaddr, Pmode,
20489 ctx_reg, Pmode);
b6c9286a 20490 break;
b6c9286a
MM
20491 }
20492
20493 return;
20494}
7509c759
MM
20495
20496\f
91d231cb 20497/* Table of valid machine attributes. */
a4f6c312 20498
91d231cb 20499const struct attribute_spec rs6000_attribute_table[] =
7509c759 20500{
91d231cb 20501 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20502 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20503 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20504 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20505 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20506 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20507#ifdef SUBTARGET_ATTRIBUTE_TABLE
20508 SUBTARGET_ATTRIBUTE_TABLE,
20509#endif
a5c76ee6 20510 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20511};
7509c759 20512
8bb418a3
ZL
20513/* Handle the "altivec" attribute. The attribute may have
20514 arguments as follows:
f676971a 20515
8bb418a3
ZL
20516 __attribute__((altivec(vector__)))
20517 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20518 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20519
20520 and may appear more than once (e.g., 'vector bool char') in a
20521 given declaration. */
20522
20523static tree
f90ac3f0
UP
20524rs6000_handle_altivec_attribute (tree *node,
20525 tree name ATTRIBUTE_UNUSED,
20526 tree args,
8bb418a3
ZL
20527 int flags ATTRIBUTE_UNUSED,
20528 bool *no_add_attrs)
20529{
20530 tree type = *node, result = NULL_TREE;
20531 enum machine_mode mode;
20532 int unsigned_p;
20533 char altivec_type
20534 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20535 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20536 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20537 : '?');
8bb418a3
ZL
20538
20539 while (POINTER_TYPE_P (type)
20540 || TREE_CODE (type) == FUNCTION_TYPE
20541 || TREE_CODE (type) == METHOD_TYPE
20542 || TREE_CODE (type) == ARRAY_TYPE)
20543 type = TREE_TYPE (type);
20544
20545 mode = TYPE_MODE (type);
20546
f90ac3f0
UP
20547 /* Check for invalid AltiVec type qualifiers. */
20548 if (type == long_unsigned_type_node || type == long_integer_type_node)
20549 {
20550 if (TARGET_64BIT)
20551 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20552 else if (rs6000_warn_altivec_long)
d4ee4d25 20553 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20554 }
20555 else if (type == long_long_unsigned_type_node
20556 || type == long_long_integer_type_node)
20557 error ("use of %<long long%> in AltiVec types is invalid");
20558 else if (type == double_type_node)
20559 error ("use of %<double%> in AltiVec types is invalid");
20560 else if (type == long_double_type_node)
20561 error ("use of %<long double%> in AltiVec types is invalid");
20562 else if (type == boolean_type_node)
20563 error ("use of boolean types in AltiVec types is invalid");
20564 else if (TREE_CODE (type) == COMPLEX_TYPE)
20565 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20566 else if (DECIMAL_FLOAT_MODE_P (mode))
20567 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20568
20569 switch (altivec_type)
20570 {
20571 case 'v':
8df83eae 20572 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20573 switch (mode)
20574 {
c4ad648e
AM
20575 case SImode:
20576 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20577 break;
20578 case HImode:
20579 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20580 break;
20581 case QImode:
20582 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20583 break;
20584 case SFmode: result = V4SF_type_node; break;
20585 /* If the user says 'vector int bool', we may be handed the 'bool'
20586 attribute _before_ the 'vector' attribute, and so select the
20587 proper type in the 'b' case below. */
20588 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20589 result = type;
20590 default: break;
8bb418a3
ZL
20591 }
20592 break;
20593 case 'b':
20594 switch (mode)
20595 {
c4ad648e
AM
20596 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20597 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20598 case QImode: case V16QImode: result = bool_V16QI_type_node;
20599 default: break;
8bb418a3
ZL
20600 }
20601 break;
20602 case 'p':
20603 switch (mode)
20604 {
c4ad648e
AM
20605 case V8HImode: result = pixel_V8HI_type_node;
20606 default: break;
8bb418a3
ZL
20607 }
20608 default: break;
20609 }
20610
4f538d42
UW
20611 /* Propagate qualifiers attached to the element type
20612 onto the vector type. */
20613 if (result && result != type && TYPE_QUALS (type))
20614 result = build_qualified_type (result, TYPE_QUALS (type));
7958a2a6 20615
8bb418a3
ZL
20616 *no_add_attrs = true; /* No need to hang on to the attribute. */
20617
f90ac3f0 20618 if (result)
5dc11954 20619 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20620
20621 return NULL_TREE;
20622}
20623
f18eca82
ZL
20624/* AltiVec defines four built-in scalar types that serve as vector
20625 elements; we must teach the compiler how to mangle them. */
20626
20627static const char *
3101faab 20628rs6000_mangle_type (const_tree type)
f18eca82 20629{
608063c3
JB
20630 type = TYPE_MAIN_VARIANT (type);
20631
20632 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20633 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20634 return NULL;
20635
f18eca82
ZL
20636 if (type == bool_char_type_node) return "U6__boolc";
20637 if (type == bool_short_type_node) return "U6__bools";
20638 if (type == pixel_type_node) return "u7__pixel";
20639 if (type == bool_int_type_node) return "U6__booli";
20640
337bde91
DE
20641 /* Mangle IBM extended float long double as `g' (__float128) on
20642 powerpc*-linux where long-double-64 previously was the default. */
20643 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20644 && TARGET_ELF
20645 && TARGET_LONG_DOUBLE_128
20646 && !TARGET_IEEEQUAD)
20647 return "g";
20648
f18eca82
ZL
20649 /* For all other types, use normal C++ mangling. */
20650 return NULL;
20651}
20652
a5c76ee6
ZW
20653/* Handle a "longcall" or "shortcall" attribute; arguments as in
20654 struct attribute_spec.handler. */
a4f6c312 20655
91d231cb 20656static tree
f676971a
EC
20657rs6000_handle_longcall_attribute (tree *node, tree name,
20658 tree args ATTRIBUTE_UNUSED,
20659 int flags ATTRIBUTE_UNUSED,
a2369ed3 20660 bool *no_add_attrs)
91d231cb
JM
20661{
20662 if (TREE_CODE (*node) != FUNCTION_TYPE
20663 && TREE_CODE (*node) != FIELD_DECL
20664 && TREE_CODE (*node) != TYPE_DECL)
20665 {
5c498b10 20666 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
20667 IDENTIFIER_POINTER (name));
20668 *no_add_attrs = true;
20669 }
6a4cee5f 20670
91d231cb 20671 return NULL_TREE;
7509c759
MM
20672}
20673
a5c76ee6
ZW
20674/* Set longcall attributes on all functions declared when
20675 rs6000_default_long_calls is true. */
20676static void
a2369ed3 20677rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20678{
20679 if (rs6000_default_long_calls
20680 && (TREE_CODE (type) == FUNCTION_TYPE
20681 || TREE_CODE (type) == METHOD_TYPE))
20682 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20683 NULL_TREE,
20684 TYPE_ATTRIBUTES (type));
16d6f994
EC
20685
20686#if TARGET_MACHO
20687 darwin_set_default_type_attributes (type);
20688#endif
a5c76ee6
ZW
20689}
20690
3cb999d8
DE
20691/* Return a reference suitable for calling a function with the
20692 longcall attribute. */
a4f6c312 20693
9390387d 20694rtx
a2369ed3 20695rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20696{
d330fd93 20697 const char *call_name;
6a4cee5f
MM
20698 tree node;
20699
20700 if (GET_CODE (call_ref) != SYMBOL_REF)
20701 return call_ref;
20702
20703 /* System V adds '.' to the internal name, so skip them. */
20704 call_name = XSTR (call_ref, 0);
20705 if (*call_name == '.')
20706 {
20707 while (*call_name == '.')
20708 call_name++;
20709
20710 node = get_identifier (call_name);
39403d82 20711 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20712 }
20713
20714 return force_reg (Pmode, call_ref);
20715}
7509c759 20716\f
77ccdfed
EC
20717#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20718#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20719#endif
20720
20721/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20722 struct attribute_spec.handler. */
20723static tree
20724rs6000_handle_struct_attribute (tree *node, tree name,
20725 tree args ATTRIBUTE_UNUSED,
20726 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20727{
20728 tree *type = NULL;
20729 if (DECL_P (*node))
20730 {
20731 if (TREE_CODE (*node) == TYPE_DECL)
20732 type = &TREE_TYPE (*node);
20733 }
20734 else
20735 type = node;
20736
20737 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20738 || TREE_CODE (*type) == UNION_TYPE)))
20739 {
20740 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
20741 *no_add_attrs = true;
20742 }
20743
20744 else if ((is_attribute_p ("ms_struct", name)
20745 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20746 || ((is_attribute_p ("gcc_struct", name)
20747 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20748 {
20749 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
20750 IDENTIFIER_POINTER (name));
20751 *no_add_attrs = true;
20752 }
20753
20754 return NULL_TREE;
20755}
20756
20757static bool
3101faab 20758rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20759{
20760 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20761 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20762 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20763}
20764\f
b64a1b53
RH
20765#ifdef USING_ELFOS_H
20766
d6b5193b 20767/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20768
d6b5193b
RS
20769static void
20770rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20771{
20772 if (DEFAULT_ABI == ABI_AIX
20773 && TARGET_MINIMAL_TOC
20774 && !TARGET_RELOCATABLE)
20775 {
20776 if (!toc_initialized)
20777 {
20778 toc_initialized = 1;
20779 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20780 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20781 fprintf (asm_out_file, "\t.tc ");
20782 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20783 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20784 fprintf (asm_out_file, "\n");
20785
20786 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20787 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20788 fprintf (asm_out_file, " = .+32768\n");
20789 }
20790 else
20791 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20792 }
20793 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20794 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20795 else
20796 {
20797 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20798 if (!toc_initialized)
20799 {
20800 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20801 fprintf (asm_out_file, " = .+32768\n");
20802 toc_initialized = 1;
20803 }
20804 }
20805}
20806
20807/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20808
b64a1b53 20809static void
d6b5193b
RS
20810rs6000_elf_asm_init_sections (void)
20811{
20812 toc_section
20813 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20814
20815 sdata2_section
20816 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20817 SDATA2_SECTION_ASM_OP);
20818}
20819
20820/* Implement TARGET_SELECT_RTX_SECTION. */
20821
20822static section *
f676971a 20823rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20824 unsigned HOST_WIDE_INT align)
7509c759 20825{
a9098fd0 20826 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20827 return toc_section;
7509c759 20828 else
d6b5193b 20829 return default_elf_select_rtx_section (mode, x, align);
7509c759 20830}
d9407988 20831\f
d1908feb
JJ
20832/* For a SYMBOL_REF, set generic flags and then perform some
20833 target-specific processing.
20834
d1908feb
JJ
20835 When the AIX ABI is requested on a non-AIX system, replace the
20836 function name with the real name (with a leading .) rather than the
20837 function descriptor name. This saves a lot of overriding code to
20838 read the prefixes. */
d9407988 20839
fb49053f 20840static void
a2369ed3 20841rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20842{
d1908feb 20843 default_encode_section_info (decl, rtl, first);
b2003250 20844
d1908feb
JJ
20845 if (first
20846 && TREE_CODE (decl) == FUNCTION_DECL
20847 && !TARGET_AIX
20848 && DEFAULT_ABI == ABI_AIX)
d9407988 20849 {
c6a2438a 20850 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20851 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20852 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20853 str[0] = '.';
20854 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20855 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20856 }
d9407988
MM
20857}
20858
21d9bb3f 20859static inline bool
0a2aaacc 20860compare_section_name (const char *section, const char *templ)
21d9bb3f
PB
20861{
20862 int len;
20863
0a2aaacc
KG
20864 len = strlen (templ);
20865 return (strncmp (section, templ, len) == 0
21d9bb3f
PB
20866 && (section[len] == 0 || section[len] == '.'));
20867}
20868
c1b7d95a 20869bool
3101faab 20870rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20871{
20872 if (rs6000_sdata == SDATA_NONE)
20873 return false;
20874
7482ad25
AF
20875 /* We want to merge strings, so we never consider them small data. */
20876 if (TREE_CODE (decl) == STRING_CST)
20877 return false;
20878
20879 /* Functions are never in the small data area. */
20880 if (TREE_CODE (decl) == FUNCTION_DECL)
20881 return false;
20882
0e5dbd9b
DE
20883 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20884 {
20885 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20886 if (compare_section_name (section, ".sdata")
20887 || compare_section_name (section, ".sdata2")
20888 || compare_section_name (section, ".gnu.linkonce.s")
20889 || compare_section_name (section, ".sbss")
20890 || compare_section_name (section, ".sbss2")
20891 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20892 || strcmp (section, ".PPC.EMB.sdata0") == 0
20893 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20894 return true;
20895 }
20896 else
20897 {
20898 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20899
20900 if (size > 0
307b599c 20901 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20902 /* If it's not public, and we're not going to reference it there,
20903 there's no need to put it in the small data section. */
0e5dbd9b
DE
20904 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20905 return true;
20906 }
20907
20908 return false;
20909}
20910
b91da81f 20911#endif /* USING_ELFOS_H */
aacd3885
RS
20912\f
20913/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20914
aacd3885 20915static bool
3101faab 20916rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20917{
20918 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20919}
a6c2a102 20920\f
000034eb 20921/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20922 ADDR can be effectively incremented by incrementing REG.
20923
20924 r0 is special and we must not select it as an address
20925 register by this routine since our caller will try to
20926 increment the returned register via an "la" instruction. */
000034eb 20927
9390387d 20928rtx
a2369ed3 20929find_addr_reg (rtx addr)
000034eb
DE
20930{
20931 while (GET_CODE (addr) == PLUS)
20932 {
02441cd6
JL
20933 if (GET_CODE (XEXP (addr, 0)) == REG
20934 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20935 addr = XEXP (addr, 0);
02441cd6
JL
20936 else if (GET_CODE (XEXP (addr, 1)) == REG
20937 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20938 addr = XEXP (addr, 1);
20939 else if (CONSTANT_P (XEXP (addr, 0)))
20940 addr = XEXP (addr, 1);
20941 else if (CONSTANT_P (XEXP (addr, 1)))
20942 addr = XEXP (addr, 0);
20943 else
37409796 20944 gcc_unreachable ();
000034eb 20945 }
37409796
NS
20946 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20947 return addr;
000034eb
DE
20948}
20949
a6c2a102 20950void
a2369ed3 20951rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20952{
20953 fatal_insn ("bad address", op);
20954}
c8023011 20955
ee890fe2
SS
20956#if TARGET_MACHO
20957
efdba735 20958static tree branch_island_list = 0;
ee890fe2 20959
efdba735
SH
20960/* Remember to generate a branch island for far calls to the given
20961 function. */
ee890fe2 20962
f676971a 20963static void
c4ad648e
AM
20964add_compiler_branch_island (tree label_name, tree function_name,
20965 int line_number)
ee890fe2 20966{
efdba735 20967 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20968 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20969 TREE_CHAIN (branch_island) = branch_island_list;
20970 branch_island_list = branch_island;
ee890fe2
SS
20971}
20972
efdba735
SH
20973#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20974#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20975#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20976 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20977
efdba735
SH
20978/* Generate far-jump branch islands for everything on the
20979 branch_island_list. Invoked immediately after the last instruction
20980 of the epilogue has been emitted; the branch-islands must be
20981 appended to, and contiguous with, the function body. Mach-O stubs
20982 are generated in machopic_output_stub(). */
ee890fe2 20983
efdba735
SH
20984static void
20985macho_branch_islands (void)
20986{
20987 char tmp_buf[512];
20988 tree branch_island;
20989
20990 for (branch_island = branch_island_list;
20991 branch_island;
20992 branch_island = TREE_CHAIN (branch_island))
20993 {
20994 const char *label =
20995 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20996 const char *name =
11abc112 20997 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20998 char name_buf[512];
20999 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
21000 if (name[0] == '*' || name[0] == '&')
21001 strcpy (name_buf, name+1);
21002 else
21003 {
21004 name_buf[0] = '_';
21005 strcpy (name_buf+1, name);
21006 }
21007 strcpy (tmp_buf, "\n");
21008 strcat (tmp_buf, label);
ee890fe2 21009#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21010 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21011 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21012#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
21013 if (flag_pic)
21014 {
21015 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
21016 strcat (tmp_buf, label);
21017 strcat (tmp_buf, "_pic\n");
21018 strcat (tmp_buf, label);
21019 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 21020
efdba735
SH
21021 strcat (tmp_buf, "\taddis r11,r11,ha16(");
21022 strcat (tmp_buf, name_buf);
21023 strcat (tmp_buf, " - ");
21024 strcat (tmp_buf, label);
21025 strcat (tmp_buf, "_pic)\n");
f676971a 21026
efdba735 21027 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 21028
efdba735
SH
21029 strcat (tmp_buf, "\taddi r12,r11,lo16(");
21030 strcat (tmp_buf, name_buf);
21031 strcat (tmp_buf, " - ");
21032 strcat (tmp_buf, label);
21033 strcat (tmp_buf, "_pic)\n");
f676971a 21034
efdba735
SH
21035 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
21036 }
21037 else
21038 {
21039 strcat (tmp_buf, ":\nlis r12,hi16(");
21040 strcat (tmp_buf, name_buf);
21041 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
21042 strcat (tmp_buf, name_buf);
21043 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
21044 }
21045 output_asm_insn (tmp_buf, 0);
ee890fe2 21046#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21047 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21048 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21049#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 21050 }
ee890fe2 21051
efdba735 21052 branch_island_list = 0;
ee890fe2
SS
21053}
21054
21055/* NO_PREVIOUS_DEF checks in the link list whether the function name is
21056 already there or not. */
21057
efdba735 21058static int
a2369ed3 21059no_previous_def (tree function_name)
ee890fe2 21060{
efdba735
SH
21061 tree branch_island;
21062 for (branch_island = branch_island_list;
21063 branch_island;
21064 branch_island = TREE_CHAIN (branch_island))
21065 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
21066 return 0;
21067 return 1;
21068}
21069
21070/* GET_PREV_LABEL gets the label name from the previous definition of
21071 the function. */
21072
efdba735 21073static tree
a2369ed3 21074get_prev_label (tree function_name)
ee890fe2 21075{
efdba735
SH
21076 tree branch_island;
21077 for (branch_island = branch_island_list;
21078 branch_island;
21079 branch_island = TREE_CHAIN (branch_island))
21080 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21081 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
21082 return 0;
21083}
21084
75b1b789
MS
21085#ifndef DARWIN_LINKER_GENERATES_ISLANDS
21086#define DARWIN_LINKER_GENERATES_ISLANDS 0
21087#endif
21088
21089/* KEXTs still need branch islands. */
21090#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
21091 || flag_mkernel || flag_apple_kext)
21092
ee890fe2 21093/* INSN is either a function call or a millicode call. It may have an
f676971a 21094 unconditional jump in its delay slot.
ee890fe2
SS
21095
21096 CALL_DEST is the routine we are calling. */
21097
21098char *
c4ad648e
AM
21099output_call (rtx insn, rtx *operands, int dest_operand_number,
21100 int cookie_operand_number)
ee890fe2
SS
21101{
21102 static char buf[256];
75b1b789
MS
21103 if (DARWIN_GENERATE_ISLANDS
21104 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 21105 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
21106 {
21107 tree labelname;
efdba735 21108 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 21109
ee890fe2
SS
21110 if (no_previous_def (funname))
21111 {
ee890fe2
SS
21112 rtx label_rtx = gen_label_rtx ();
21113 char *label_buf, temp_buf[256];
21114 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
21115 CODE_LABEL_NUMBER (label_rtx));
21116 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
21117 labelname = get_identifier (label_buf);
a38e7aa5 21118 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
21119 }
21120 else
21121 labelname = get_prev_label (funname);
21122
efdba735
SH
21123 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
21124 instruction will reach 'foo', otherwise link as 'bl L42'".
21125 "L42" should be a 'branch island', that will do a far jump to
21126 'foo'. Branch islands are generated in
21127 macho_branch_islands(). */
ee890fe2 21128 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 21129 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
21130 }
21131 else
efdba735
SH
21132 sprintf (buf, "bl %%z%d", dest_operand_number);
21133 return buf;
ee890fe2
SS
21134}
21135
ee890fe2
SS
21136/* Generate PIC and indirect symbol stubs. */
21137
21138void
a2369ed3 21139machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
21140{
21141 unsigned int length;
a4f6c312
SS
21142 char *symbol_name, *lazy_ptr_name;
21143 char *local_label_0;
ee890fe2
SS
21144 static int label = 0;
21145
df56a27f 21146 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 21147 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 21148
ee890fe2 21149
ee890fe2 21150 length = strlen (symb);
5ead67f6 21151 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21152 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
21153
5ead67f6 21154 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21155 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
21156
ee890fe2 21157 if (flag_pic == 2)
56c779bc 21158 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 21159 else
56c779bc 21160 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
21161
21162 if (flag_pic == 2)
21163 {
d974312d
DJ
21164 fprintf (file, "\t.align 5\n");
21165
21166 fprintf (file, "%s:\n", stub);
21167 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21168
876455fa 21169 label++;
5ead67f6 21170 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 21171 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 21172
ee890fe2
SS
21173 fprintf (file, "\tmflr r0\n");
21174 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
21175 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
21176 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
21177 lazy_ptr_name, local_label_0);
21178 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
21179 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
21180 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
21181 lazy_ptr_name, local_label_0);
21182 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
21183 fprintf (file, "\tbctr\n");
21184 }
21185 else
d974312d
DJ
21186 {
21187 fprintf (file, "\t.align 4\n");
21188
21189 fprintf (file, "%s:\n", stub);
21190 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21191
21192 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
21193 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21194 (TARGET_64BIT ? "ldu" : "lwzu"),
21195 lazy_ptr_name);
d974312d
DJ
21196 fprintf (file, "\tmtctr r12\n");
21197 fprintf (file, "\tbctr\n");
21198 }
f676971a 21199
56c779bc 21200 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
21201 fprintf (file, "%s:\n", lazy_ptr_name);
21202 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
21203 fprintf (file, "%sdyld_stub_binding_helper\n",
21204 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
21205}
21206
21207/* Legitimize PIC addresses. If the address is already
21208 position-independent, we return ORIG. Newly generated
21209 position-independent addresses go into a reg. This is REG if non
21210 zero, otherwise we allocate register(s) as necessary. */
21211
4fbbe694 21212#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
21213
21214rtx
f676971a 21215rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 21216 rtx reg)
ee890fe2
SS
21217{
21218 rtx base, offset;
21219
21220 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21221 reg = gen_reg_rtx (Pmode);
21222
21223 if (GET_CODE (orig) == CONST)
21224 {
37409796
NS
21225 rtx reg_temp;
21226
ee890fe2
SS
21227 if (GET_CODE (XEXP (orig, 0)) == PLUS
21228 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21229 return orig;
21230
37409796 21231 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 21232
37409796
NS
21233 /* Use a different reg for the intermediate value, as
21234 it will be marked UNCHANGING. */
b3a13419 21235 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
21236 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21237 Pmode, reg_temp);
21238 offset =
21239 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21240 Pmode, reg);
bb8df8a6 21241
ee890fe2
SS
21242 if (GET_CODE (offset) == CONST_INT)
21243 {
21244 if (SMALL_INT (offset))
ed8908e7 21245 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
21246 else if (! reload_in_progress && ! reload_completed)
21247 offset = force_reg (Pmode, offset);
21248 else
c859cda6
DJ
21249 {
21250 rtx mem = force_const_mem (Pmode, orig);
21251 return machopic_legitimize_pic_address (mem, Pmode, reg);
21252 }
ee890fe2 21253 }
f1c25d3b 21254 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
21255 }
21256
21257 /* Fall back on generic machopic code. */
21258 return machopic_legitimize_pic_address (orig, mode, reg);
21259}
21260
c4e18b1c
GK
21261/* Output a .machine directive for the Darwin assembler, and call
21262 the generic start_file routine. */
21263
21264static void
21265rs6000_darwin_file_start (void)
21266{
94ff898d 21267 static const struct
c4e18b1c
GK
21268 {
21269 const char *arg;
21270 const char *name;
21271 int if_set;
21272 } mapping[] = {
55dbfb48 21273 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
21274 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21275 { "power4", "ppc970", 0 },
21276 { "G5", "ppc970", 0 },
21277 { "7450", "ppc7450", 0 },
21278 { "7400", "ppc7400", MASK_ALTIVEC },
21279 { "G4", "ppc7400", 0 },
21280 { "750", "ppc750", 0 },
21281 { "740", "ppc750", 0 },
21282 { "G3", "ppc750", 0 },
21283 { "604e", "ppc604e", 0 },
21284 { "604", "ppc604", 0 },
21285 { "603e", "ppc603", 0 },
21286 { "603", "ppc603", 0 },
21287 { "601", "ppc601", 0 },
21288 { NULL, "ppc", 0 } };
21289 const char *cpu_id = "";
21290 size_t i;
94ff898d 21291
9390387d 21292 rs6000_file_start ();
192d0f89 21293 darwin_file_start ();
c4e18b1c
GK
21294
21295 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21296 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21297 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21298 && rs6000_select[i].string[0] != '\0')
21299 cpu_id = rs6000_select[i].string;
21300
21301 /* Look through the mapping array. Pick the first name that either
21302 matches the argument, has a bit set in IF_SET that is also set
21303 in the target flags, or has a NULL name. */
21304
21305 i = 0;
21306 while (mapping[i].arg != NULL
21307 && strcmp (mapping[i].arg, cpu_id) != 0
21308 && (mapping[i].if_set & target_flags) == 0)
21309 i++;
21310
21311 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21312}
21313
ee890fe2 21314#endif /* TARGET_MACHO */
7c262518
RH
21315
21316#if TARGET_ELF
9b580a0b
RH
21317static int
21318rs6000_elf_reloc_rw_mask (void)
7c262518 21319{
9b580a0b
RH
21320 if (flag_pic)
21321 return 3;
21322 else if (DEFAULT_ABI == ABI_AIX)
21323 return 2;
21324 else
21325 return 0;
7c262518 21326}
d9f6800d
RH
21327
21328/* Record an element in the table of global constructors. SYMBOL is
21329 a SYMBOL_REF of the function to be called; PRIORITY is a number
21330 between 0 and MAX_INIT_PRIORITY.
21331
21332 This differs from default_named_section_asm_out_constructor in
21333 that we have special handling for -mrelocatable. */
21334
21335static void
a2369ed3 21336rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
21337{
21338 const char *section = ".ctors";
21339 char buf[16];
21340
21341 if (priority != DEFAULT_INIT_PRIORITY)
21342 {
21343 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21344 /* Invert the numbering so the linker puts us in the proper
21345 order; constructors are run from right to left, and the
21346 linker sorts in increasing order. */
21347 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21348 section = buf;
21349 }
21350
d6b5193b 21351 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21352 assemble_align (POINTER_SIZE);
d9f6800d
RH
21353
21354 if (TARGET_RELOCATABLE)
21355 {
21356 fputs ("\t.long (", asm_out_file);
21357 output_addr_const (asm_out_file, symbol);
21358 fputs (")@fixup\n", asm_out_file);
21359 }
21360 else
c8af3574 21361 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21362}
21363
21364static void
a2369ed3 21365rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21366{
21367 const char *section = ".dtors";
21368 char buf[16];
21369
21370 if (priority != DEFAULT_INIT_PRIORITY)
21371 {
21372 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21373 /* Invert the numbering so the linker puts us in the proper
21374 order; constructors are run from right to left, and the
21375 linker sorts in increasing order. */
21376 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21377 section = buf;
21378 }
21379
d6b5193b 21380 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21381 assemble_align (POINTER_SIZE);
d9f6800d
RH
21382
21383 if (TARGET_RELOCATABLE)
21384 {
21385 fputs ("\t.long (", asm_out_file);
21386 output_addr_const (asm_out_file, symbol);
21387 fputs (")@fixup\n", asm_out_file);
21388 }
21389 else
c8af3574 21390 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21391}
9739c90c
JJ
21392
21393void
a2369ed3 21394rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21395{
21396 if (TARGET_64BIT)
21397 {
21398 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21399 ASM_OUTPUT_LABEL (file, name);
21400 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21401 rs6000_output_function_entry (file, name);
21402 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21403 if (DOT_SYMBOLS)
9739c90c 21404 {
85b776df 21405 fputs ("\t.size\t", file);
9739c90c 21406 assemble_name (file, name);
85b776df
AM
21407 fputs (",24\n\t.type\t.", file);
21408 assemble_name (file, name);
21409 fputs (",@function\n", file);
21410 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21411 {
21412 fputs ("\t.globl\t.", file);
21413 assemble_name (file, name);
21414 putc ('\n', file);
21415 }
9739c90c 21416 }
85b776df
AM
21417 else
21418 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21419 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21420 rs6000_output_function_entry (file, name);
21421 fputs (":\n", file);
9739c90c
JJ
21422 return;
21423 }
21424
21425 if (TARGET_RELOCATABLE
7f970b70 21426 && !TARGET_SECURE_PLT
e3b5732b 21427 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21428 && uses_TOC ())
9739c90c
JJ
21429 {
21430 char buf[256];
21431
21432 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21433
21434 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21435 fprintf (file, "\t.long ");
21436 assemble_name (file, buf);
21437 putc ('-', file);
21438 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21439 assemble_name (file, buf);
21440 putc ('\n', file);
21441 }
21442
21443 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21444 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21445
21446 if (DEFAULT_ABI == ABI_AIX)
21447 {
21448 const char *desc_name, *orig_name;
21449
21450 orig_name = (*targetm.strip_name_encoding) (name);
21451 desc_name = orig_name;
21452 while (*desc_name == '.')
21453 desc_name++;
21454
21455 if (TREE_PUBLIC (decl))
21456 fprintf (file, "\t.globl %s\n", desc_name);
21457
21458 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21459 fprintf (file, "%s:\n", desc_name);
21460 fprintf (file, "\t.long %s\n", orig_name);
21461 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21462 if (DEFAULT_ABI == ABI_AIX)
21463 fputs ("\t.long 0\n", file);
21464 fprintf (file, "\t.previous\n");
21465 }
21466 ASM_OUTPUT_LABEL (file, name);
21467}
1334b570
AM
21468
21469static void
21470rs6000_elf_end_indicate_exec_stack (void)
21471{
21472 if (TARGET_32BIT)
21473 file_end_indicate_exec_stack ();
21474}
7c262518
RH
21475#endif
21476
cbaaba19 21477#if TARGET_XCOFF
0d5817b2
DE
21478static void
21479rs6000_xcoff_asm_output_anchor (rtx symbol)
21480{
21481 char buffer[100];
21482
21483 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21484 SYMBOL_REF_BLOCK_OFFSET (symbol));
21485 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21486}
21487
7c262518 21488static void
a2369ed3 21489rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21490{
21491 fputs (GLOBAL_ASM_OP, stream);
21492 RS6000_OUTPUT_BASENAME (stream, name);
21493 putc ('\n', stream);
21494}
21495
d6b5193b
RS
21496/* A get_unnamed_decl callback, used for read-only sections. PTR
21497 points to the section string variable. */
21498
21499static void
21500rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21501{
890f9edf
OH
21502 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21503 *(const char *const *) directive,
21504 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21505}
21506
21507/* Likewise for read-write sections. */
21508
21509static void
21510rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21511{
890f9edf
OH
21512 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21513 *(const char *const *) directive,
21514 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21515}
21516
21517/* A get_unnamed_section callback, used for switching to toc_section. */
21518
21519static void
21520rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21521{
21522 if (TARGET_MINIMAL_TOC)
21523 {
21524 /* toc_section is always selected at least once from
21525 rs6000_xcoff_file_start, so this is guaranteed to
21526 always be defined once and only once in each file. */
21527 if (!toc_initialized)
21528 {
21529 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21530 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21531 toc_initialized = 1;
21532 }
21533 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21534 (TARGET_32BIT ? "" : ",3"));
21535 }
21536 else
21537 fputs ("\t.toc\n", asm_out_file);
21538}
21539
21540/* Implement TARGET_ASM_INIT_SECTIONS. */
21541
21542static void
21543rs6000_xcoff_asm_init_sections (void)
21544{
21545 read_only_data_section
21546 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21547 &xcoff_read_only_section_name);
21548
21549 private_data_section
21550 = get_unnamed_section (SECTION_WRITE,
21551 rs6000_xcoff_output_readwrite_section_asm_op,
21552 &xcoff_private_data_section_name);
21553
21554 read_only_private_data_section
21555 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21556 &xcoff_private_data_section_name);
21557
21558 toc_section
21559 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21560
21561 readonly_data_section = read_only_data_section;
21562 exception_section = data_section;
21563}
21564
9b580a0b
RH
21565static int
21566rs6000_xcoff_reloc_rw_mask (void)
21567{
21568 return 3;
21569}
21570
b275d088 21571static void
c18a5b6c
MM
21572rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21573 tree decl ATTRIBUTE_UNUSED)
7c262518 21574{
0e5dbd9b
DE
21575 int smclass;
21576 static const char * const suffix[3] = { "PR", "RO", "RW" };
21577
21578 if (flags & SECTION_CODE)
21579 smclass = 0;
21580 else if (flags & SECTION_WRITE)
21581 smclass = 2;
21582 else
21583 smclass = 1;
21584
5b5198f7 21585 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21586 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21587 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21588}
ae46c4e0 21589
d6b5193b 21590static section *
f676971a 21591rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21592 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21593{
9b580a0b 21594 if (decl_readonly_section (decl, reloc))
ae46c4e0 21595 {
0e5dbd9b 21596 if (TREE_PUBLIC (decl))
d6b5193b 21597 return read_only_data_section;
ae46c4e0 21598 else
d6b5193b 21599 return read_only_private_data_section;
ae46c4e0
RH
21600 }
21601 else
21602 {
0e5dbd9b 21603 if (TREE_PUBLIC (decl))
d6b5193b 21604 return data_section;
ae46c4e0 21605 else
d6b5193b 21606 return private_data_section;
ae46c4e0
RH
21607 }
21608}
21609
21610static void
a2369ed3 21611rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21612{
21613 const char *name;
ae46c4e0 21614
5b5198f7
DE
21615 /* Use select_section for private and uninitialized data. */
21616 if (!TREE_PUBLIC (decl)
21617 || DECL_COMMON (decl)
0e5dbd9b
DE
21618 || DECL_INITIAL (decl) == NULL_TREE
21619 || DECL_INITIAL (decl) == error_mark_node
21620 || (flag_zero_initialized_in_bss
21621 && initializer_zerop (DECL_INITIAL (decl))))
21622 return;
21623
21624 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21625 name = (*targetm.strip_name_encoding) (name);
21626 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21627}
b64a1b53 21628
fb49053f
RH
21629/* Select section for constant in constant pool.
21630
21631 On RS/6000, all constants are in the private read-only data area.
21632 However, if this is being placed in the TOC it must be output as a
21633 toc entry. */
21634
d6b5193b 21635static section *
f676971a 21636rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21637 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21638{
21639 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21640 return toc_section;
b64a1b53 21641 else
d6b5193b 21642 return read_only_private_data_section;
b64a1b53 21643}
772c5265
RH
21644
21645/* Remove any trailing [DS] or the like from the symbol name. */
21646
21647static const char *
a2369ed3 21648rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21649{
21650 size_t len;
21651 if (*name == '*')
21652 name++;
21653 len = strlen (name);
21654 if (name[len - 1] == ']')
21655 return ggc_alloc_string (name, len - 4);
21656 else
21657 return name;
21658}
21659
5add3202
DE
21660/* Section attributes. AIX is always PIC. */
21661
21662static unsigned int
a2369ed3 21663rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21664{
5b5198f7 21665 unsigned int align;
9b580a0b 21666 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21667
21668 /* Align to at least UNIT size. */
21669 if (flags & SECTION_CODE)
21670 align = MIN_UNITS_PER_WORD;
21671 else
21672 /* Increase alignment of large objects if not already stricter. */
21673 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21674 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21675 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21676
21677 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21678}
a5fe455b 21679
1bc7c5b6
ZW
21680/* Output at beginning of assembler file.
21681
21682 Initialize the section names for the RS/6000 at this point.
21683
21684 Specify filename, including full path, to assembler.
21685
21686 We want to go into the TOC section so at least one .toc will be emitted.
21687 Also, in order to output proper .bs/.es pairs, we need at least one static
21688 [RW] section emitted.
21689
21690 Finally, declare mcount when profiling to make the assembler happy. */
21691
21692static void
863d938c 21693rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21694{
21695 rs6000_gen_section_name (&xcoff_bss_section_name,
21696 main_input_filename, ".bss_");
21697 rs6000_gen_section_name (&xcoff_private_data_section_name,
21698 main_input_filename, ".rw_");
21699 rs6000_gen_section_name (&xcoff_read_only_section_name,
21700 main_input_filename, ".ro_");
21701
21702 fputs ("\t.file\t", asm_out_file);
21703 output_quoted_string (asm_out_file, main_input_filename);
21704 fputc ('\n', asm_out_file);
1bc7c5b6 21705 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21706 switch_to_section (private_data_section);
21707 switch_to_section (text_section);
1bc7c5b6
ZW
21708 if (profile_flag)
21709 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21710 rs6000_file_start ();
21711}
21712
a5fe455b
ZW
21713/* Output at end of assembler file.
21714 On the RS/6000, referencing data should automatically pull in text. */
21715
21716static void
863d938c 21717rs6000_xcoff_file_end (void)
a5fe455b 21718{
d6b5193b 21719 switch_to_section (text_section);
a5fe455b 21720 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21721 switch_to_section (data_section);
a5fe455b
ZW
21722 fputs (TARGET_32BIT
21723 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21724 asm_out_file);
21725}
f1384257 21726#endif /* TARGET_XCOFF */
0e5dbd9b 21727
3c50106f
RH
21728/* Compute a (partial) cost for rtx X. Return true if the complete
21729 cost has been computed, and false if subexpressions should be
21730 scanned. In either case, *TOTAL contains the cost result. */
21731
21732static bool
f40751dd
JH
21733rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
21734 bool speed)
3c50106f 21735{
f0517163
RS
21736 enum machine_mode mode = GET_MODE (x);
21737
3c50106f
RH
21738 switch (code)
21739 {
30a555d9 21740 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21741 case CONST_INT:
066cd967
DE
21742 if (((outer_code == SET
21743 || outer_code == PLUS
21744 || outer_code == MINUS)
279bb624
DE
21745 && (satisfies_constraint_I (x)
21746 || satisfies_constraint_L (x)))
066cd967 21747 || (outer_code == AND
279bb624
DE
21748 && (satisfies_constraint_K (x)
21749 || (mode == SImode
21750 ? satisfies_constraint_L (x)
21751 : satisfies_constraint_J (x))
1990cd79
AM
21752 || mask_operand (x, mode)
21753 || (mode == DImode
21754 && mask64_operand (x, DImode))))
22e54023 21755 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21756 && (satisfies_constraint_K (x)
21757 || (mode == SImode
21758 ? satisfies_constraint_L (x)
21759 : satisfies_constraint_J (x))))
066cd967
DE
21760 || outer_code == ASHIFT
21761 || outer_code == ASHIFTRT
21762 || outer_code == LSHIFTRT
21763 || outer_code == ROTATE
21764 || outer_code == ROTATERT
d5861a7a 21765 || outer_code == ZERO_EXTRACT
066cd967 21766 || (outer_code == MULT
279bb624 21767 && satisfies_constraint_I (x))
22e54023
DE
21768 || ((outer_code == DIV || outer_code == UDIV
21769 || outer_code == MOD || outer_code == UMOD)
21770 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21771 || (outer_code == COMPARE
279bb624
DE
21772 && (satisfies_constraint_I (x)
21773 || satisfies_constraint_K (x)))
22e54023 21774 || (outer_code == EQ
279bb624
DE
21775 && (satisfies_constraint_I (x)
21776 || satisfies_constraint_K (x)
21777 || (mode == SImode
21778 ? satisfies_constraint_L (x)
21779 : satisfies_constraint_J (x))))
22e54023 21780 || (outer_code == GTU
279bb624 21781 && satisfies_constraint_I (x))
22e54023 21782 || (outer_code == LTU
279bb624 21783 && satisfies_constraint_P (x)))
066cd967
DE
21784 {
21785 *total = 0;
21786 return true;
21787 }
21788 else if ((outer_code == PLUS
4ae234b0 21789 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21790 || (outer_code == MINUS
4ae234b0 21791 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21792 || ((outer_code == SET
21793 || outer_code == IOR
21794 || outer_code == XOR)
21795 && (INTVAL (x)
21796 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21797 {
21798 *total = COSTS_N_INSNS (1);
21799 return true;
21800 }
21801 /* FALLTHRU */
21802
21803 case CONST_DOUBLE:
f6fe3a22 21804 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21805 {
f6fe3a22
DE
21806 if ((outer_code == IOR || outer_code == XOR)
21807 && CONST_DOUBLE_HIGH (x) == 0
21808 && (CONST_DOUBLE_LOW (x)
21809 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21810 {
21811 *total = 0;
21812 return true;
21813 }
21814 else if ((outer_code == AND && and64_2_operand (x, DImode))
21815 || ((outer_code == SET
21816 || outer_code == IOR
21817 || outer_code == XOR)
21818 && CONST_DOUBLE_HIGH (x) == 0))
21819 {
21820 *total = COSTS_N_INSNS (1);
21821 return true;
21822 }
066cd967
DE
21823 }
21824 /* FALLTHRU */
21825
3c50106f 21826 case CONST:
066cd967 21827 case HIGH:
3c50106f 21828 case SYMBOL_REF:
066cd967
DE
21829 case MEM:
21830 /* When optimizing for size, MEM should be slightly more expensive
21831 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21832 L1 cache latency is about two instructions. */
f40751dd 21833 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21834 return true;
21835
30a555d9
DE
21836 case LABEL_REF:
21837 *total = 0;
21838 return true;
21839
3c50106f 21840 case PLUS:
f0517163 21841 if (mode == DFmode)
066cd967
DE
21842 {
21843 if (GET_CODE (XEXP (x, 0)) == MULT)
21844 {
21845 /* FNMA accounted in outer NEG. */
21846 if (outer_code == NEG)
21847 *total = rs6000_cost->dmul - rs6000_cost->fp;
21848 else
21849 *total = rs6000_cost->dmul;
21850 }
21851 else
21852 *total = rs6000_cost->fp;
21853 }
f0517163 21854 else if (mode == SFmode)
066cd967
DE
21855 {
21856 /* FNMA accounted in outer NEG. */
21857 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21858 *total = 0;
21859 else
21860 *total = rs6000_cost->fp;
21861 }
f0517163 21862 else
066cd967
DE
21863 *total = COSTS_N_INSNS (1);
21864 return false;
3c50106f 21865
52190329 21866 case MINUS:
f0517163 21867 if (mode == DFmode)
066cd967 21868 {
762c919f
JM
21869 if (GET_CODE (XEXP (x, 0)) == MULT
21870 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21871 {
21872 /* FNMA accounted in outer NEG. */
21873 if (outer_code == NEG)
762c919f 21874 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21875 else
21876 *total = rs6000_cost->dmul;
21877 }
21878 else
21879 *total = rs6000_cost->fp;
21880 }
f0517163 21881 else if (mode == SFmode)
066cd967
DE
21882 {
21883 /* FNMA accounted in outer NEG. */
21884 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21885 *total = 0;
21886 else
21887 *total = rs6000_cost->fp;
21888 }
f0517163 21889 else
c4ad648e 21890 *total = COSTS_N_INSNS (1);
066cd967 21891 return false;
3c50106f
RH
21892
21893 case MULT:
c9dbf840 21894 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21895 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21896 {
8b897cfa
RS
21897 if (INTVAL (XEXP (x, 1)) >= -256
21898 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21899 *total = rs6000_cost->mulsi_const9;
8b897cfa 21900 else
06a67bdd 21901 *total = rs6000_cost->mulsi_const;
3c50106f 21902 }
066cd967
DE
21903 /* FMA accounted in outer PLUS/MINUS. */
21904 else if ((mode == DFmode || mode == SFmode)
21905 && (outer_code == PLUS || outer_code == MINUS))
21906 *total = 0;
f0517163 21907 else if (mode == DFmode)
06a67bdd 21908 *total = rs6000_cost->dmul;
f0517163 21909 else if (mode == SFmode)
06a67bdd 21910 *total = rs6000_cost->fp;
f0517163 21911 else if (mode == DImode)
06a67bdd 21912 *total = rs6000_cost->muldi;
8b897cfa 21913 else
06a67bdd 21914 *total = rs6000_cost->mulsi;
066cd967 21915 return false;
3c50106f
RH
21916
21917 case DIV:
21918 case MOD:
f0517163
RS
21919 if (FLOAT_MODE_P (mode))
21920 {
06a67bdd
RS
21921 *total = mode == DFmode ? rs6000_cost->ddiv
21922 : rs6000_cost->sdiv;
066cd967 21923 return false;
f0517163 21924 }
5efb1046 21925 /* FALLTHRU */
3c50106f
RH
21926
21927 case UDIV:
21928 case UMOD:
627b6fe2
DJ
21929 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21930 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21931 {
21932 if (code == DIV || code == MOD)
21933 /* Shift, addze */
21934 *total = COSTS_N_INSNS (2);
21935 else
21936 /* Shift */
21937 *total = COSTS_N_INSNS (1);
21938 }
c4ad648e 21939 else
627b6fe2
DJ
21940 {
21941 if (GET_MODE (XEXP (x, 1)) == DImode)
21942 *total = rs6000_cost->divdi;
21943 else
21944 *total = rs6000_cost->divsi;
21945 }
21946 /* Add in shift and subtract for MOD. */
21947 if (code == MOD || code == UMOD)
21948 *total += COSTS_N_INSNS (2);
066cd967 21949 return false;
3c50106f 21950
32f56aad 21951 case CTZ:
3c50106f
RH
21952 case FFS:
21953 *total = COSTS_N_INSNS (4);
066cd967 21954 return false;
3c50106f 21955
32f56aad
DE
21956 case POPCOUNT:
21957 *total = COSTS_N_INSNS (6);
21958 return false;
21959
06a67bdd 21960 case NOT:
066cd967
DE
21961 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21962 {
21963 *total = 0;
21964 return false;
21965 }
21966 /* FALLTHRU */
21967
21968 case AND:
32f56aad 21969 case CLZ:
066cd967
DE
21970 case IOR:
21971 case XOR:
d5861a7a
DE
21972 case ZERO_EXTRACT:
21973 *total = COSTS_N_INSNS (1);
21974 return false;
21975
066cd967
DE
21976 case ASHIFT:
21977 case ASHIFTRT:
21978 case LSHIFTRT:
21979 case ROTATE:
21980 case ROTATERT:
d5861a7a 21981 /* Handle mul_highpart. */
066cd967
DE
21982 if (outer_code == TRUNCATE
21983 && GET_CODE (XEXP (x, 0)) == MULT)
21984 {
21985 if (mode == DImode)
21986 *total = rs6000_cost->muldi;
21987 else
21988 *total = rs6000_cost->mulsi;
21989 return true;
21990 }
d5861a7a
DE
21991 else if (outer_code == AND)
21992 *total = 0;
21993 else
21994 *total = COSTS_N_INSNS (1);
21995 return false;
21996
21997 case SIGN_EXTEND:
21998 case ZERO_EXTEND:
21999 if (GET_CODE (XEXP (x, 0)) == MEM)
22000 *total = 0;
22001 else
22002 *total = COSTS_N_INSNS (1);
066cd967 22003 return false;
06a67bdd 22004
066cd967
DE
22005 case COMPARE:
22006 case NEG:
22007 case ABS:
22008 if (!FLOAT_MODE_P (mode))
22009 {
22010 *total = COSTS_N_INSNS (1);
22011 return false;
22012 }
22013 /* FALLTHRU */
22014
22015 case FLOAT:
22016 case UNSIGNED_FLOAT:
22017 case FIX:
22018 case UNSIGNED_FIX:
06a67bdd
RS
22019 case FLOAT_TRUNCATE:
22020 *total = rs6000_cost->fp;
066cd967 22021 return false;
06a67bdd 22022
a2af5043
DJ
22023 case FLOAT_EXTEND:
22024 if (mode == DFmode)
22025 *total = 0;
22026 else
22027 *total = rs6000_cost->fp;
22028 return false;
22029
06a67bdd
RS
22030 case UNSPEC:
22031 switch (XINT (x, 1))
22032 {
22033 case UNSPEC_FRSP:
22034 *total = rs6000_cost->fp;
22035 return true;
22036
22037 default:
22038 break;
22039 }
22040 break;
22041
22042 case CALL:
22043 case IF_THEN_ELSE:
f40751dd 22044 if (!speed)
06a67bdd
RS
22045 {
22046 *total = COSTS_N_INSNS (1);
22047 return true;
22048 }
066cd967
DE
22049 else if (FLOAT_MODE_P (mode)
22050 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
22051 {
22052 *total = rs6000_cost->fp;
22053 return false;
22054 }
06a67bdd
RS
22055 break;
22056
c0600ecd
DE
22057 case EQ:
22058 case GTU:
22059 case LTU:
22e54023
DE
22060 /* Carry bit requires mode == Pmode.
22061 NEG or PLUS already counted so only add one. */
22062 if (mode == Pmode
22063 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 22064 {
22e54023
DE
22065 *total = COSTS_N_INSNS (1);
22066 return true;
22067 }
22068 if (outer_code == SET)
22069 {
22070 if (XEXP (x, 1) == const0_rtx)
c0600ecd 22071 {
22e54023 22072 *total = COSTS_N_INSNS (2);
c0600ecd 22073 return true;
c0600ecd 22074 }
22e54023
DE
22075 else if (mode == Pmode)
22076 {
22077 *total = COSTS_N_INSNS (3);
22078 return false;
22079 }
22080 }
22081 /* FALLTHRU */
22082
22083 case GT:
22084 case LT:
22085 case UNORDERED:
22086 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
22087 {
22088 *total = COSTS_N_INSNS (2);
22089 return true;
c0600ecd 22090 }
22e54023
DE
22091 /* CC COMPARE. */
22092 if (outer_code == COMPARE)
22093 {
22094 *total = 0;
22095 return true;
22096 }
22097 break;
c0600ecd 22098
3c50106f 22099 default:
06a67bdd 22100 break;
3c50106f 22101 }
06a67bdd
RS
22102
22103 return false;
3c50106f
RH
22104}
22105
34bb030a
DE
22106/* A C expression returning the cost of moving data from a register of class
22107 CLASS1 to one of CLASS2. */
22108
22109int
f676971a 22110rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 22111 enum reg_class from, enum reg_class to)
34bb030a
DE
22112{
22113 /* Moves from/to GENERAL_REGS. */
22114 if (reg_classes_intersect_p (to, GENERAL_REGS)
22115 || reg_classes_intersect_p (from, GENERAL_REGS))
22116 {
22117 if (! reg_classes_intersect_p (to, GENERAL_REGS))
22118 from = to;
22119
22120 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
22121 return (rs6000_memory_move_cost (mode, from, 0)
22122 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
22123
c4ad648e
AM
22124 /* It's more expensive to move CR_REGS than CR0_REGS because of the
22125 shift. */
34bb030a
DE
22126 else if (from == CR_REGS)
22127 return 4;
22128
aafc759a
PH
22129 /* Power6 has slower LR/CTR moves so make them more expensive than
22130 memory in order to bias spills to memory .*/
22131 else if (rs6000_cpu == PROCESSOR_POWER6
22132 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
22133 return 6 * hard_regno_nregs[0][mode];
22134
34bb030a 22135 else
c4ad648e 22136 /* A move will cost one instruction per GPR moved. */
c8b622ff 22137 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
22138 }
22139
c4ad648e 22140 /* Moving between two similar registers is just one instruction. */
34bb030a 22141 else if (reg_classes_intersect_p (to, from))
7393f7f8 22142 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 22143
c4ad648e 22144 /* Everything else has to go through GENERAL_REGS. */
34bb030a 22145 else
f676971a 22146 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
22147 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
22148}
22149
22150/* A C expressions returning the cost of moving data of MODE from a register to
22151 or from memory. */
22152
22153int
0a2aaacc 22154rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
a2369ed3 22155 int in ATTRIBUTE_UNUSED)
34bb030a 22156{
0a2aaacc 22157 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
c8b622ff 22158 return 4 * hard_regno_nregs[0][mode];
0a2aaacc 22159 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
c8b622ff 22160 return 4 * hard_regno_nregs[32][mode];
0a2aaacc 22161 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
c8b622ff 22162 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a 22163 else
0a2aaacc 22164 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
34bb030a
DE
22165}
22166
9c78b944
DE
22167/* Returns a code for a target-specific builtin that implements
22168 reciprocal of the function, or NULL_TREE if not available. */
22169
22170static tree
22171rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
22172 bool sqrt ATTRIBUTE_UNUSED)
22173{
22174 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
22175 && flag_finite_math_only && !flag_trapping_math
22176 && flag_unsafe_math_optimizations))
22177 return NULL_TREE;
22178
22179 if (md_fn)
22180 return NULL_TREE;
22181 else
22182 switch (fn)
22183 {
22184 case BUILT_IN_SQRTF:
22185 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
22186
22187 default:
22188 return NULL_TREE;
22189 }
22190}
22191
ef765ea9
DE
22192/* Newton-Raphson approximation of single-precision floating point divide n/d.
22193 Assumes no trapping math and finite arguments. */
22194
22195void
9c78b944 22196rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22197{
22198 rtx x0, e0, e1, y1, u0, v0, one;
22199
22200 x0 = gen_reg_rtx (SFmode);
22201 e0 = gen_reg_rtx (SFmode);
22202 e1 = gen_reg_rtx (SFmode);
22203 y1 = gen_reg_rtx (SFmode);
22204 u0 = gen_reg_rtx (SFmode);
22205 v0 = gen_reg_rtx (SFmode);
22206 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22207
22208 /* x0 = 1./d estimate */
22209 emit_insn (gen_rtx_SET (VOIDmode, x0,
22210 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22211 UNSPEC_FRES)));
22212 /* e0 = 1. - d * x0 */
22213 emit_insn (gen_rtx_SET (VOIDmode, e0,
22214 gen_rtx_MINUS (SFmode, one,
22215 gen_rtx_MULT (SFmode, d, x0))));
22216 /* e1 = e0 + e0 * e0 */
22217 emit_insn (gen_rtx_SET (VOIDmode, e1,
22218 gen_rtx_PLUS (SFmode,
22219 gen_rtx_MULT (SFmode, e0, e0), e0)));
22220 /* y1 = x0 + e1 * x0 */
22221 emit_insn (gen_rtx_SET (VOIDmode, y1,
22222 gen_rtx_PLUS (SFmode,
22223 gen_rtx_MULT (SFmode, e1, x0), x0)));
22224 /* u0 = n * y1 */
22225 emit_insn (gen_rtx_SET (VOIDmode, u0,
22226 gen_rtx_MULT (SFmode, n, y1)));
22227 /* v0 = n - d * u0 */
22228 emit_insn (gen_rtx_SET (VOIDmode, v0,
22229 gen_rtx_MINUS (SFmode, n,
22230 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
22231 /* dst = u0 + v0 * y1 */
22232 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22233 gen_rtx_PLUS (SFmode,
22234 gen_rtx_MULT (SFmode, v0, y1), u0)));
22235}
22236
22237/* Newton-Raphson approximation of double-precision floating point divide n/d.
22238 Assumes no trapping math and finite arguments. */
22239
22240void
9c78b944 22241rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22242{
22243 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22244
22245 x0 = gen_reg_rtx (DFmode);
22246 e0 = gen_reg_rtx (DFmode);
22247 e1 = gen_reg_rtx (DFmode);
22248 e2 = gen_reg_rtx (DFmode);
22249 y1 = gen_reg_rtx (DFmode);
22250 y2 = gen_reg_rtx (DFmode);
22251 y3 = gen_reg_rtx (DFmode);
22252 u0 = gen_reg_rtx (DFmode);
22253 v0 = gen_reg_rtx (DFmode);
22254 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22255
22256 /* x0 = 1./d estimate */
22257 emit_insn (gen_rtx_SET (VOIDmode, x0,
22258 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22259 UNSPEC_FRES)));
22260 /* e0 = 1. - d * x0 */
22261 emit_insn (gen_rtx_SET (VOIDmode, e0,
22262 gen_rtx_MINUS (DFmode, one,
22263 gen_rtx_MULT (SFmode, d, x0))));
22264 /* y1 = x0 + e0 * x0 */
22265 emit_insn (gen_rtx_SET (VOIDmode, y1,
22266 gen_rtx_PLUS (DFmode,
22267 gen_rtx_MULT (DFmode, e0, x0), x0)));
22268 /* e1 = e0 * e0 */
22269 emit_insn (gen_rtx_SET (VOIDmode, e1,
22270 gen_rtx_MULT (DFmode, e0, e0)));
22271 /* y2 = y1 + e1 * y1 */
22272 emit_insn (gen_rtx_SET (VOIDmode, y2,
22273 gen_rtx_PLUS (DFmode,
22274 gen_rtx_MULT (DFmode, e1, y1), y1)));
22275 /* e2 = e1 * e1 */
22276 emit_insn (gen_rtx_SET (VOIDmode, e2,
22277 gen_rtx_MULT (DFmode, e1, e1)));
22278 /* y3 = y2 + e2 * y2 */
22279 emit_insn (gen_rtx_SET (VOIDmode, y3,
22280 gen_rtx_PLUS (DFmode,
22281 gen_rtx_MULT (DFmode, e2, y2), y2)));
22282 /* u0 = n * y3 */
22283 emit_insn (gen_rtx_SET (VOIDmode, u0,
22284 gen_rtx_MULT (DFmode, n, y3)));
22285 /* v0 = n - d * u0 */
22286 emit_insn (gen_rtx_SET (VOIDmode, v0,
22287 gen_rtx_MINUS (DFmode, n,
22288 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
22289 /* dst = u0 + v0 * y3 */
22290 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22291 gen_rtx_PLUS (DFmode,
22292 gen_rtx_MULT (DFmode, v0, y3), u0)));
22293}
22294
565ef4ba 22295
9c78b944
DE
22296/* Newton-Raphson approximation of single-precision floating point rsqrt.
22297 Assumes no trapping math and finite arguments. */
22298
22299void
22300rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22301{
22302 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22303 half, one, halfthree, c1, cond, label;
22304
22305 x0 = gen_reg_rtx (SFmode);
22306 x1 = gen_reg_rtx (SFmode);
22307 x2 = gen_reg_rtx (SFmode);
22308 y1 = gen_reg_rtx (SFmode);
22309 u0 = gen_reg_rtx (SFmode);
22310 u1 = gen_reg_rtx (SFmode);
22311 u2 = gen_reg_rtx (SFmode);
22312 v0 = gen_reg_rtx (SFmode);
22313 v1 = gen_reg_rtx (SFmode);
22314 v2 = gen_reg_rtx (SFmode);
22315 t0 = gen_reg_rtx (SFmode);
22316 halfthree = gen_reg_rtx (SFmode);
22317 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22318 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22319
22320 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22321 emit_insn (gen_rtx_SET (VOIDmode, t0,
22322 gen_rtx_MULT (SFmode, src, src)));
22323
22324 emit_insn (gen_rtx_SET (VOIDmode, cond,
22325 gen_rtx_COMPARE (CCFPmode, t0, src)));
22326 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22327 emit_unlikely_jump (c1, label);
22328
22329 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22330 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22331
22332 /* halfthree = 1.5 = 1.0 + 0.5 */
22333 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22334 gen_rtx_PLUS (SFmode, one, half)));
22335
22336 /* x0 = rsqrt estimate */
22337 emit_insn (gen_rtx_SET (VOIDmode, x0,
22338 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22339 UNSPEC_RSQRT)));
22340
22341 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22342 emit_insn (gen_rtx_SET (VOIDmode, y1,
22343 gen_rtx_MINUS (SFmode,
22344 gen_rtx_MULT (SFmode, src, halfthree),
22345 src)));
22346
22347 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22348 emit_insn (gen_rtx_SET (VOIDmode, u0,
22349 gen_rtx_MULT (SFmode, x0, x0)));
22350 emit_insn (gen_rtx_SET (VOIDmode, v0,
22351 gen_rtx_MINUS (SFmode,
22352 halfthree,
22353 gen_rtx_MULT (SFmode, y1, u0))));
22354 emit_insn (gen_rtx_SET (VOIDmode, x1,
22355 gen_rtx_MULT (SFmode, x0, v0)));
22356
22357 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22358 emit_insn (gen_rtx_SET (VOIDmode, u1,
22359 gen_rtx_MULT (SFmode, x1, x1)));
22360 emit_insn (gen_rtx_SET (VOIDmode, v1,
22361 gen_rtx_MINUS (SFmode,
22362 halfthree,
22363 gen_rtx_MULT (SFmode, y1, u1))));
22364 emit_insn (gen_rtx_SET (VOIDmode, x2,
22365 gen_rtx_MULT (SFmode, x1, v1)));
22366
22367 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22368 emit_insn (gen_rtx_SET (VOIDmode, u2,
22369 gen_rtx_MULT (SFmode, x2, x2)));
22370 emit_insn (gen_rtx_SET (VOIDmode, v2,
22371 gen_rtx_MINUS (SFmode,
22372 halfthree,
22373 gen_rtx_MULT (SFmode, y1, u2))));
22374 emit_insn (gen_rtx_SET (VOIDmode, dst,
22375 gen_rtx_MULT (SFmode, x2, v2)));
22376
22377 emit_label (XEXP (label, 0));
22378}
22379
565ef4ba
RS
22380/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22381 target, and SRC is the argument operand. */
22382
22383void
22384rs6000_emit_popcount (rtx dst, rtx src)
22385{
22386 enum machine_mode mode = GET_MODE (dst);
22387 rtx tmp1, tmp2;
22388
22389 tmp1 = gen_reg_rtx (mode);
22390
22391 if (mode == SImode)
22392 {
22393 emit_insn (gen_popcntbsi2 (tmp1, src));
22394 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22395 NULL_RTX, 0);
22396 tmp2 = force_reg (SImode, tmp2);
22397 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22398 }
22399 else
22400 {
22401 emit_insn (gen_popcntbdi2 (tmp1, src));
22402 tmp2 = expand_mult (DImode, tmp1,
22403 GEN_INT ((HOST_WIDE_INT)
22404 0x01010101 << 32 | 0x01010101),
22405 NULL_RTX, 0);
22406 tmp2 = force_reg (DImode, tmp2);
22407 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22408 }
22409}
22410
22411
22412/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22413 target, and SRC is the argument operand. */
22414
22415void
22416rs6000_emit_parity (rtx dst, rtx src)
22417{
22418 enum machine_mode mode = GET_MODE (dst);
22419 rtx tmp;
22420
22421 tmp = gen_reg_rtx (mode);
22422 if (mode == SImode)
22423 {
22424 /* Is mult+shift >= shift+xor+shift+xor? */
22425 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22426 {
22427 rtx tmp1, tmp2, tmp3, tmp4;
22428
22429 tmp1 = gen_reg_rtx (SImode);
22430 emit_insn (gen_popcntbsi2 (tmp1, src));
22431
22432 tmp2 = gen_reg_rtx (SImode);
22433 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22434 tmp3 = gen_reg_rtx (SImode);
22435 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22436
22437 tmp4 = gen_reg_rtx (SImode);
22438 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22439 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22440 }
22441 else
22442 rs6000_emit_popcount (tmp, src);
22443 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22444 }
22445 else
22446 {
22447 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22448 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22449 {
22450 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22451
22452 tmp1 = gen_reg_rtx (DImode);
22453 emit_insn (gen_popcntbdi2 (tmp1, src));
22454
22455 tmp2 = gen_reg_rtx (DImode);
22456 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22457 tmp3 = gen_reg_rtx (DImode);
22458 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22459
22460 tmp4 = gen_reg_rtx (DImode);
22461 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22462 tmp5 = gen_reg_rtx (DImode);
22463 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22464
22465 tmp6 = gen_reg_rtx (DImode);
22466 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22467 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22468 }
22469 else
22470 rs6000_emit_popcount (tmp, src);
22471 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22472 }
22473}
22474
ded9bf77
AH
22475/* Return an RTX representing where to find the function value of a
22476 function returning MODE. */
22477static rtx
22478rs6000_complex_function_value (enum machine_mode mode)
22479{
22480 unsigned int regno;
22481 rtx r1, r2;
22482 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22483 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22484
18f63bfa
AH
22485 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22486 regno = FP_ARG_RETURN;
354ed18f
AH
22487 else
22488 {
18f63bfa 22489 regno = GP_ARG_RETURN;
ded9bf77 22490
18f63bfa
AH
22491 /* 32-bit is OK since it'll go in r3/r4. */
22492 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22493 return gen_rtx_REG (mode, regno);
22494 }
22495
18f63bfa
AH
22496 if (inner_bytes >= 8)
22497 return gen_rtx_REG (mode, regno);
22498
ded9bf77
AH
22499 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22500 const0_rtx);
22501 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22502 GEN_INT (inner_bytes));
ded9bf77
AH
22503 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22504}
22505
a6ebc39a
AH
22506/* Define how to find the value returned by a function.
22507 VALTYPE is the data type of the value (as a tree).
22508 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22509 otherwise, FUNC is 0.
22510
22511 On the SPE, both FPs and vectors are returned in r3.
22512
22513 On RS/6000 an integer value is in r3 and a floating-point value is in
22514 fp1, unless -msoft-float. */
22515
22516rtx
586de218 22517rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22518{
22519 enum machine_mode mode;
2a8fa26c 22520 unsigned int regno;
a6ebc39a 22521
594a51fe
SS
22522 /* Special handling for structs in darwin64. */
22523 if (rs6000_darwin64_abi
22524 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22525 && TREE_CODE (valtype) == RECORD_TYPE
22526 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22527 {
22528 CUMULATIVE_ARGS valcum;
22529 rtx valret;
22530
0b5383eb 22531 valcum.words = 0;
594a51fe
SS
22532 valcum.fregno = FP_ARG_MIN_REG;
22533 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22534 /* Do a trial code generation as if this were going to be passed as
22535 an argument; if any part goes in memory, we return NULL. */
22536 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22537 if (valret)
22538 return valret;
22539 /* Otherwise fall through to standard ABI rules. */
22540 }
22541
0e67400a
FJ
22542 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22543 {
22544 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22545 return gen_rtx_PARALLEL (DImode,
22546 gen_rtvec (2,
22547 gen_rtx_EXPR_LIST (VOIDmode,
22548 gen_rtx_REG (SImode, GP_ARG_RETURN),
22549 const0_rtx),
22550 gen_rtx_EXPR_LIST (VOIDmode,
22551 gen_rtx_REG (SImode,
22552 GP_ARG_RETURN + 1),
22553 GEN_INT (4))));
22554 }
0f086e42
FJ
22555 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22556 {
22557 return gen_rtx_PARALLEL (DCmode,
22558 gen_rtvec (4,
22559 gen_rtx_EXPR_LIST (VOIDmode,
22560 gen_rtx_REG (SImode, GP_ARG_RETURN),
22561 const0_rtx),
22562 gen_rtx_EXPR_LIST (VOIDmode,
22563 gen_rtx_REG (SImode,
22564 GP_ARG_RETURN + 1),
22565 GEN_INT (4)),
22566 gen_rtx_EXPR_LIST (VOIDmode,
22567 gen_rtx_REG (SImode,
22568 GP_ARG_RETURN + 2),
22569 GEN_INT (8)),
22570 gen_rtx_EXPR_LIST (VOIDmode,
22571 gen_rtx_REG (SImode,
22572 GP_ARG_RETURN + 3),
22573 GEN_INT (12))));
22574 }
602ea4d3 22575
7348aa7f
FXC
22576 mode = TYPE_MODE (valtype);
22577 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22578 || POINTER_TYPE_P (valtype))
b78d48dd 22579 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22580
e41b2a33
PB
22581 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22582 /* _Decimal128 must use an even/odd register pair. */
22583 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
2c5cac98
ME
22584 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS
22585 && ((TARGET_SINGLE_FLOAT && (mode == SFmode)) || TARGET_DOUBLE_FLOAT))
2a8fa26c 22586 regno = FP_ARG_RETURN;
ded9bf77 22587 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22588 && targetm.calls.split_complex_arg)
ded9bf77 22589 return rs6000_complex_function_value (mode);
44688022 22590 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22591 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22592 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22593 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22594 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22595 && (mode == DFmode || mode == DCmode
22596 || mode == TFmode || mode == TCmode))
18f63bfa 22597 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22598 else
22599 regno = GP_ARG_RETURN;
22600
22601 return gen_rtx_REG (mode, regno);
22602}
22603
ded9bf77
AH
22604/* Define how to find the value returned by a library function
22605 assuming the value has mode MODE. */
22606rtx
22607rs6000_libcall_value (enum machine_mode mode)
22608{
22609 unsigned int regno;
22610
2e6c9641
FJ
22611 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22612 {
22613 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22614 return gen_rtx_PARALLEL (DImode,
22615 gen_rtvec (2,
22616 gen_rtx_EXPR_LIST (VOIDmode,
22617 gen_rtx_REG (SImode, GP_ARG_RETURN),
22618 const0_rtx),
22619 gen_rtx_EXPR_LIST (VOIDmode,
22620 gen_rtx_REG (SImode,
22621 GP_ARG_RETURN + 1),
22622 GEN_INT (4))));
22623 }
22624
e41b2a33
PB
22625 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22626 /* _Decimal128 must use an even/odd register pair. */
22627 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22628 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
22629 && TARGET_HARD_FLOAT && TARGET_FPRS)
22630 regno = FP_ARG_RETURN;
44688022
AM
22631 else if (ALTIVEC_VECTOR_MODE (mode)
22632 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22633 regno = ALTIVEC_ARG_RETURN;
42ba5130 22634 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22635 return rs6000_complex_function_value (mode);
18f63bfa 22636 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22637 && (mode == DFmode || mode == DCmode
22638 || mode == TFmode || mode == TCmode))
18f63bfa 22639 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22640 else
22641 regno = GP_ARG_RETURN;
22642
22643 return gen_rtx_REG (mode, regno);
22644}
22645
d1d0c603
JJ
22646/* Define the offset between two registers, FROM to be eliminated and its
22647 replacement TO, at the start of a routine. */
22648HOST_WIDE_INT
22649rs6000_initial_elimination_offset (int from, int to)
22650{
22651 rs6000_stack_t *info = rs6000_stack_info ();
22652 HOST_WIDE_INT offset;
22653
7d5175e1 22654 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22655 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22656 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22657 {
22658 offset = info->push_p ? 0 : -info->total_size;
22659 if (FRAME_GROWS_DOWNWARD)
5b667039 22660 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22661 }
22662 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22663 offset = FRAME_GROWS_DOWNWARD
5b667039 22664 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22665 : 0;
22666 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22667 offset = info->total_size;
22668 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22669 offset = info->push_p ? info->total_size : 0;
22670 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22671 offset = 0;
22672 else
37409796 22673 gcc_unreachable ();
d1d0c603
JJ
22674
22675 return offset;
22676}
22677
96714395 22678static rtx
a2369ed3 22679rs6000_dwarf_register_span (rtx reg)
96714395 22680{
6cd1d2e2
DJ
22681 rtx parts[8];
22682 int i, words;
22683 unsigned regno = REGNO (reg);
22684 enum machine_mode mode = GET_MODE (reg);
96714395 22685
4d4cbc0e 22686 if (TARGET_SPE
6cd1d2e2 22687 && regno < 32
4d4cbc0e 22688 && (SPE_VECTOR_MODE (GET_MODE (reg))
6cd1d2e2
DJ
22689 || (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode)
22690 && mode != SFmode && mode != SDmode && mode != SCmode)))
4d4cbc0e
AH
22691 ;
22692 else
96714395
AH
22693 return NULL_RTX;
22694
22695 regno = REGNO (reg);
22696
22697 /* The duality of the SPE register size wreaks all kinds of havoc.
22698 This is a way of distinguishing r0 in 32-bits from r0 in
22699 64-bits. */
6cd1d2e2
DJ
22700 words = (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
22701 gcc_assert (words <= 4);
22702 for (i = 0; i < words; i++, regno++)
22703 {
22704 if (BYTES_BIG_ENDIAN)
22705 {
22706 parts[2 * i] = gen_rtx_REG (SImode, regno + 1200);
22707 parts[2 * i + 1] = gen_rtx_REG (SImode, regno);
22708 }
22709 else
22710 {
22711 parts[2 * i] = gen_rtx_REG (SImode, regno);
22712 parts[2 * i + 1] = gen_rtx_REG (SImode, regno + 1200);
22713 }
22714 }
22715
22716 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (words * 2, parts));
96714395
AH
22717}
22718
37ea0b7e
JM
22719/* Fill in sizes for SPE register high parts in table used by unwinder. */
22720
22721static void
22722rs6000_init_dwarf_reg_sizes_extra (tree address)
22723{
22724 if (TARGET_SPE)
22725 {
22726 int i;
22727 enum machine_mode mode = TYPE_MODE (char_type_node);
bbbbb16a 22728 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, EXPAND_NORMAL);
37ea0b7e
JM
22729 rtx mem = gen_rtx_MEM (BLKmode, addr);
22730 rtx value = gen_int_mode (4, mode);
22731
22732 for (i = 1201; i < 1232; i++)
22733 {
22734 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22735 HOST_WIDE_INT offset
22736 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22737
22738 emit_move_insn (adjust_address (mem, mode, offset), value);
22739 }
22740 }
22741}
22742
93c9d1ba
AM
22743/* Map internal gcc register numbers to DWARF2 register numbers. */
22744
22745unsigned int
22746rs6000_dbx_register_number (unsigned int regno)
22747{
22748 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22749 return regno;
22750 if (regno == MQ_REGNO)
22751 return 100;
1de43f85 22752 if (regno == LR_REGNO)
93c9d1ba 22753 return 108;
1de43f85 22754 if (regno == CTR_REGNO)
93c9d1ba
AM
22755 return 109;
22756 if (CR_REGNO_P (regno))
22757 return regno - CR0_REGNO + 86;
22758 if (regno == XER_REGNO)
22759 return 101;
22760 if (ALTIVEC_REGNO_P (regno))
22761 return regno - FIRST_ALTIVEC_REGNO + 1124;
22762 if (regno == VRSAVE_REGNO)
22763 return 356;
22764 if (regno == VSCR_REGNO)
22765 return 67;
22766 if (regno == SPE_ACC_REGNO)
22767 return 99;
22768 if (regno == SPEFSCR_REGNO)
22769 return 612;
22770 /* SPE high reg number. We get these values of regno from
22771 rs6000_dwarf_register_span. */
37409796
NS
22772 gcc_assert (regno >= 1200 && regno < 1232);
22773 return regno;
93c9d1ba
AM
22774}
22775
93f90be6 22776/* target hook eh_return_filter_mode */
f676971a 22777static enum machine_mode
93f90be6
FJ
22778rs6000_eh_return_filter_mode (void)
22779{
22780 return TARGET_32BIT ? SImode : word_mode;
22781}
22782
00b79d54
BE
22783/* Target hook for scalar_mode_supported_p. */
22784static bool
22785rs6000_scalar_mode_supported_p (enum machine_mode mode)
22786{
22787 if (DECIMAL_FLOAT_MODE_P (mode))
22788 return true;
22789 else
22790 return default_scalar_mode_supported_p (mode);
22791}
22792
f676971a
EC
22793/* Target hook for vector_mode_supported_p. */
22794static bool
22795rs6000_vector_mode_supported_p (enum machine_mode mode)
22796{
22797
96038623
DE
22798 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22799 return true;
22800
f676971a
EC
22801 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22802 return true;
22803
22804 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22805 return true;
22806
22807 else
22808 return false;
22809}
22810
bb8df8a6
EC
22811/* Target hook for invalid_arg_for_unprototyped_fn. */
22812static const char *
3101faab 22813invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22814{
22815 return (!rs6000_darwin64_abi
22816 && typelist == 0
22817 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22818 && (funcdecl == NULL_TREE
22819 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22820 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22821 ? N_("AltiVec argument passed to unprototyped function")
22822 : NULL;
22823}
22824
3aebbe5f
JJ
22825/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22826 setup by using __stack_chk_fail_local hidden function instead of
22827 calling __stack_chk_fail directly. Otherwise it is better to call
22828 __stack_chk_fail directly. */
22829
22830static tree
22831rs6000_stack_protect_fail (void)
22832{
22833 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22834 ? default_hidden_stack_protect_fail ()
22835 : default_external_stack_protect_fail ();
22836}
22837
c921bad8
AP
22838void
22839rs6000_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
22840 int num_operands ATTRIBUTE_UNUSED)
22841{
22842 if (rs6000_warn_cell_microcode)
22843 {
22844 const char *temp;
22845 int insn_code_number = recog_memoized (insn);
22846 location_t location = locator_location (INSN_LOCATOR (insn));
22847
22848 /* Punt on insns we cannot recognize. */
22849 if (insn_code_number < 0)
22850 return;
22851
22852 temp = get_insn_template (insn_code_number, insn);
22853
22854 if (get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS)
22855 warning_at (location, OPT_mwarn_cell_microcode,
22856 "emitting microcode insn %s\t[%s] #%d",
22857 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22858 else if (get_attr_cell_micro (insn) == CELL_MICRO_CONDITIONAL)
22859 warning_at (location, OPT_mwarn_cell_microcode,
22860 "emitting conditional microcode insn %s\t[%s] #%d",
22861 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22862 }
22863}
22864
17211ab5 22865#include "gt-rs6000.h"