]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
re PR middle-end/37601 (gcc-4.4-20080919 ada build failure)
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
1c9c5e43 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
726a989a 55#include "gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
94f4765c
NF
177/* Nonzero if we want SPE SIMD instructions. */
178int rs6000_spe;
179
a3170dc6
AH
180/* Nonzero if we want SPE ABI extensions. */
181int rs6000_spe_abi;
182
94f4765c
NF
183/* Nonzero to use isel instructions. */
184int rs6000_isel;
185
5da702b1
AH
186/* Nonzero if floating point operations are done in the GPRs. */
187int rs6000_float_gprs = 0;
188
594a51fe
SS
189/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
190int rs6000_darwin64_abi;
191
a0ab749a 192/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 193static GTY(()) int common_mode_defined;
c81bebd7 194
9878760c
RK
195/* Save information from a "cmpxx" operation until the branch or scc is
196 emitted. */
9878760c
RK
197rtx rs6000_compare_op0, rs6000_compare_op1;
198int rs6000_compare_fp_p;
874a0744 199
874a0744
MM
200/* Label number of label created for -mrelocatable, to call to so we can
201 get the address of the GOT section */
202int rs6000_pic_labelno;
c81bebd7 203
b91da81f 204#ifdef USING_ELFOS_H
c81bebd7 205/* Which abi to adhere to */
9739c90c 206const char *rs6000_abi_name;
d9407988
MM
207
208/* Semantics of the small data area */
209enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
210
211/* Which small data model to use */
815cdc52 212const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
213
214/* Counter for labels which are to be placed in .fixup. */
215int fixuplabelno = 0;
874a0744 216#endif
4697a36c 217
c4501e62
JJ
218/* Bit size of immediate TLS offsets and string from which it is decoded. */
219int rs6000_tls_size = 32;
220const char *rs6000_tls_size_string;
221
b6c9286a
MM
222/* ABI enumeration available for subtarget to use. */
223enum rs6000_abi rs6000_current_abi;
224
85b776df
AM
225/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
226int dot_symbols;
227
38c1f2d7 228/* Debug flags */
815cdc52 229const char *rs6000_debug_name;
38c1f2d7
MM
230int rs6000_debug_stack; /* debug stack applications */
231int rs6000_debug_arg; /* debug argument handling */
232
aabcd309 233/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
234bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
235
58646b77
PB
236/* Built in types. */
237
238tree rs6000_builtin_types[RS6000_BTI_MAX];
239tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 240
57ac7be9
AM
241const char *rs6000_traceback_name;
242static enum {
243 traceback_default = 0,
244 traceback_none,
245 traceback_part,
246 traceback_full
247} rs6000_traceback;
248
38c1f2d7
MM
249/* Flag to say the TOC is initialized */
250int toc_initialized;
9ebbca7d 251char toc_label_name[10];
38c1f2d7 252
44cd321e
PS
253/* Cached value of rs6000_variable_issue. This is cached in
254 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
255static short cached_can_issue_more;
256
d6b5193b
RS
257static GTY(()) section *read_only_data_section;
258static GTY(()) section *private_data_section;
259static GTY(()) section *read_only_private_data_section;
260static GTY(()) section *sdata2_section;
261static GTY(()) section *toc_section;
262
a3c9585f
KH
263/* Control alignment for fields within structures. */
264/* String from -malign-XXXXX. */
025d9908
KH
265int rs6000_alignment_flags;
266
78f5898b
AH
267/* True for any options that were explicitly set. */
268struct {
df01da37 269 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 270 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
271 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
272 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
273 bool spe; /* True if -mspe= was used. */
274 bool float_gprs; /* True if -mfloat-gprs= was used. */
275 bool isel; /* True if -misel was used. */
276 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 277 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 278 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
279} rs6000_explicit_options;
280
a3170dc6
AH
281struct builtin_description
282{
283 /* mask is not const because we're going to alter it below. This
284 nonsense will go away when we rewrite the -march infrastructure
285 to give us more target flag bits. */
286 unsigned int mask;
287 const enum insn_code icode;
288 const char *const name;
289 const enum rs6000_builtins code;
290};
8b897cfa
RS
291\f
292/* Target cpu costs. */
293
294struct processor_costs {
c4ad648e 295 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
296 const int mulsi_const; /* cost of SImode multiplication by constant. */
297 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
298 const int muldi; /* cost of DImode multiplication. */
299 const int divsi; /* cost of SImode division. */
300 const int divdi; /* cost of DImode division. */
301 const int fp; /* cost of simple SFmode and DFmode insns. */
302 const int dmul; /* cost of DFmode multiplication (and fmadd). */
303 const int sdiv; /* cost of SFmode division (fdivs). */
304 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
305 const int cache_line_size; /* cache line size in bytes. */
306 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
307 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
308 const int simultaneous_prefetches; /* number of parallel prefetch
309 operations. */
8b897cfa
RS
310};
311
312const struct processor_costs *rs6000_cost;
313
314/* Processor costs (relative to an add) */
315
316/* Instruction size costs on 32bit processors. */
317static const
318struct processor_costs size32_cost = {
06a67bdd
RS
319 COSTS_N_INSNS (1), /* mulsi */
320 COSTS_N_INSNS (1), /* mulsi_const */
321 COSTS_N_INSNS (1), /* mulsi_const9 */
322 COSTS_N_INSNS (1), /* muldi */
323 COSTS_N_INSNS (1), /* divsi */
324 COSTS_N_INSNS (1), /* divdi */
325 COSTS_N_INSNS (1), /* fp */
326 COSTS_N_INSNS (1), /* dmul */
327 COSTS_N_INSNS (1), /* sdiv */
328 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
329 32,
330 0,
331 0,
5f732aba 332 0,
8b897cfa
RS
333};
334
335/* Instruction size costs on 64bit processors. */
336static const
337struct processor_costs size64_cost = {
06a67bdd
RS
338 COSTS_N_INSNS (1), /* mulsi */
339 COSTS_N_INSNS (1), /* mulsi_const */
340 COSTS_N_INSNS (1), /* mulsi_const9 */
341 COSTS_N_INSNS (1), /* muldi */
342 COSTS_N_INSNS (1), /* divsi */
343 COSTS_N_INSNS (1), /* divdi */
344 COSTS_N_INSNS (1), /* fp */
345 COSTS_N_INSNS (1), /* dmul */
346 COSTS_N_INSNS (1), /* sdiv */
347 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
348 128,
349 0,
350 0,
5f732aba 351 0,
8b897cfa
RS
352};
353
354/* Instruction costs on RIOS1 processors. */
355static const
356struct processor_costs rios1_cost = {
06a67bdd
RS
357 COSTS_N_INSNS (5), /* mulsi */
358 COSTS_N_INSNS (4), /* mulsi_const */
359 COSTS_N_INSNS (3), /* mulsi_const9 */
360 COSTS_N_INSNS (5), /* muldi */
361 COSTS_N_INSNS (19), /* divsi */
362 COSTS_N_INSNS (19), /* divdi */
363 COSTS_N_INSNS (2), /* fp */
364 COSTS_N_INSNS (2), /* dmul */
365 COSTS_N_INSNS (19), /* sdiv */
366 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 367 128, /* cache line size */
5f732aba
DE
368 64, /* l1 cache */
369 512, /* l2 cache */
0b11da67 370 0, /* streams */
8b897cfa
RS
371};
372
373/* Instruction costs on RIOS2 processors. */
374static const
375struct processor_costs rios2_cost = {
06a67bdd
RS
376 COSTS_N_INSNS (2), /* mulsi */
377 COSTS_N_INSNS (2), /* mulsi_const */
378 COSTS_N_INSNS (2), /* mulsi_const9 */
379 COSTS_N_INSNS (2), /* muldi */
380 COSTS_N_INSNS (13), /* divsi */
381 COSTS_N_INSNS (13), /* divdi */
382 COSTS_N_INSNS (2), /* fp */
383 COSTS_N_INSNS (2), /* dmul */
384 COSTS_N_INSNS (17), /* sdiv */
385 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 386 256, /* cache line size */
5f732aba
DE
387 256, /* l1 cache */
388 1024, /* l2 cache */
0b11da67 389 0, /* streams */
8b897cfa
RS
390};
391
392/* Instruction costs on RS64A processors. */
393static const
394struct processor_costs rs64a_cost = {
06a67bdd
RS
395 COSTS_N_INSNS (20), /* mulsi */
396 COSTS_N_INSNS (12), /* mulsi_const */
397 COSTS_N_INSNS (8), /* mulsi_const9 */
398 COSTS_N_INSNS (34), /* muldi */
399 COSTS_N_INSNS (65), /* divsi */
400 COSTS_N_INSNS (67), /* divdi */
401 COSTS_N_INSNS (4), /* fp */
402 COSTS_N_INSNS (4), /* dmul */
403 COSTS_N_INSNS (31), /* sdiv */
404 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 405 128, /* cache line size */
5f732aba
DE
406 128, /* l1 cache */
407 2048, /* l2 cache */
0b11da67 408 1, /* streams */
8b897cfa
RS
409};
410
411/* Instruction costs on MPCCORE processors. */
412static const
413struct processor_costs mpccore_cost = {
06a67bdd
RS
414 COSTS_N_INSNS (2), /* mulsi */
415 COSTS_N_INSNS (2), /* mulsi_const */
416 COSTS_N_INSNS (2), /* mulsi_const9 */
417 COSTS_N_INSNS (2), /* muldi */
418 COSTS_N_INSNS (6), /* divsi */
419 COSTS_N_INSNS (6), /* divdi */
420 COSTS_N_INSNS (4), /* fp */
421 COSTS_N_INSNS (5), /* dmul */
422 COSTS_N_INSNS (10), /* sdiv */
423 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 424 32, /* cache line size */
5f732aba
DE
425 4, /* l1 cache */
426 16, /* l2 cache */
0b11da67 427 1, /* streams */
8b897cfa
RS
428};
429
430/* Instruction costs on PPC403 processors. */
431static const
432struct processor_costs ppc403_cost = {
06a67bdd
RS
433 COSTS_N_INSNS (4), /* mulsi */
434 COSTS_N_INSNS (4), /* mulsi_const */
435 COSTS_N_INSNS (4), /* mulsi_const9 */
436 COSTS_N_INSNS (4), /* muldi */
437 COSTS_N_INSNS (33), /* divsi */
438 COSTS_N_INSNS (33), /* divdi */
439 COSTS_N_INSNS (11), /* fp */
440 COSTS_N_INSNS (11), /* dmul */
441 COSTS_N_INSNS (11), /* sdiv */
442 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 443 32, /* cache line size */
5f732aba
DE
444 4, /* l1 cache */
445 16, /* l2 cache */
0b11da67 446 1, /* streams */
8b897cfa
RS
447};
448
449/* Instruction costs on PPC405 processors. */
450static const
451struct processor_costs ppc405_cost = {
06a67bdd
RS
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (4), /* mulsi_const */
454 COSTS_N_INSNS (3), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (35), /* divsi */
457 COSTS_N_INSNS (35), /* divdi */
458 COSTS_N_INSNS (11), /* fp */
459 COSTS_N_INSNS (11), /* dmul */
460 COSTS_N_INSNS (11), /* sdiv */
461 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 462 32, /* cache line size */
5f732aba
DE
463 16, /* l1 cache */
464 128, /* l2 cache */
0b11da67 465 1, /* streams */
8b897cfa
RS
466};
467
468/* Instruction costs on PPC440 processors. */
469static const
470struct processor_costs ppc440_cost = {
06a67bdd
RS
471 COSTS_N_INSNS (3), /* mulsi */
472 COSTS_N_INSNS (2), /* mulsi_const */
473 COSTS_N_INSNS (2), /* mulsi_const9 */
474 COSTS_N_INSNS (3), /* muldi */
475 COSTS_N_INSNS (34), /* divsi */
476 COSTS_N_INSNS (34), /* divdi */
477 COSTS_N_INSNS (5), /* fp */
478 COSTS_N_INSNS (5), /* dmul */
479 COSTS_N_INSNS (19), /* sdiv */
480 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 481 32, /* cache line size */
5f732aba
DE
482 32, /* l1 cache */
483 256, /* l2 cache */
0b11da67 484 1, /* streams */
8b897cfa
RS
485};
486
487/* Instruction costs on PPC601 processors. */
488static const
489struct processor_costs ppc601_cost = {
06a67bdd
RS
490 COSTS_N_INSNS (5), /* mulsi */
491 COSTS_N_INSNS (5), /* mulsi_const */
492 COSTS_N_INSNS (5), /* mulsi_const9 */
493 COSTS_N_INSNS (5), /* muldi */
494 COSTS_N_INSNS (36), /* divsi */
495 COSTS_N_INSNS (36), /* divdi */
496 COSTS_N_INSNS (4), /* fp */
497 COSTS_N_INSNS (5), /* dmul */
498 COSTS_N_INSNS (17), /* sdiv */
499 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 500 32, /* cache line size */
5f732aba
DE
501 32, /* l1 cache */
502 256, /* l2 cache */
0b11da67 503 1, /* streams */
8b897cfa
RS
504};
505
506/* Instruction costs on PPC603 processors. */
507static const
508struct processor_costs ppc603_cost = {
06a67bdd
RS
509 COSTS_N_INSNS (5), /* mulsi */
510 COSTS_N_INSNS (3), /* mulsi_const */
511 COSTS_N_INSNS (2), /* mulsi_const9 */
512 COSTS_N_INSNS (5), /* muldi */
513 COSTS_N_INSNS (37), /* divsi */
514 COSTS_N_INSNS (37), /* divdi */
515 COSTS_N_INSNS (3), /* fp */
516 COSTS_N_INSNS (4), /* dmul */
517 COSTS_N_INSNS (18), /* sdiv */
518 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 519 32, /* cache line size */
5f732aba
DE
520 8, /* l1 cache */
521 64, /* l2 cache */
0b11da67 522 1, /* streams */
8b897cfa
RS
523};
524
525/* Instruction costs on PPC604 processors. */
526static const
527struct processor_costs ppc604_cost = {
06a67bdd
RS
528 COSTS_N_INSNS (4), /* mulsi */
529 COSTS_N_INSNS (4), /* mulsi_const */
530 COSTS_N_INSNS (4), /* mulsi_const9 */
531 COSTS_N_INSNS (4), /* muldi */
532 COSTS_N_INSNS (20), /* divsi */
533 COSTS_N_INSNS (20), /* divdi */
534 COSTS_N_INSNS (3), /* fp */
535 COSTS_N_INSNS (3), /* dmul */
536 COSTS_N_INSNS (18), /* sdiv */
537 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 538 32, /* cache line size */
5f732aba
DE
539 16, /* l1 cache */
540 512, /* l2 cache */
0b11da67 541 1, /* streams */
8b897cfa
RS
542};
543
544/* Instruction costs on PPC604e processors. */
545static const
546struct processor_costs ppc604e_cost = {
06a67bdd
RS
547 COSTS_N_INSNS (2), /* mulsi */
548 COSTS_N_INSNS (2), /* mulsi_const */
549 COSTS_N_INSNS (2), /* mulsi_const9 */
550 COSTS_N_INSNS (2), /* muldi */
551 COSTS_N_INSNS (20), /* divsi */
552 COSTS_N_INSNS (20), /* divdi */
553 COSTS_N_INSNS (3), /* fp */
554 COSTS_N_INSNS (3), /* dmul */
555 COSTS_N_INSNS (18), /* sdiv */
556 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 557 32, /* cache line size */
5f732aba
DE
558 32, /* l1 cache */
559 1024, /* l2 cache */
0b11da67 560 1, /* streams */
8b897cfa
RS
561};
562
f0517163 563/* Instruction costs on PPC620 processors. */
8b897cfa
RS
564static const
565struct processor_costs ppc620_cost = {
06a67bdd
RS
566 COSTS_N_INSNS (5), /* mulsi */
567 COSTS_N_INSNS (4), /* mulsi_const */
568 COSTS_N_INSNS (3), /* mulsi_const9 */
569 COSTS_N_INSNS (7), /* muldi */
570 COSTS_N_INSNS (21), /* divsi */
571 COSTS_N_INSNS (37), /* divdi */
572 COSTS_N_INSNS (3), /* fp */
573 COSTS_N_INSNS (3), /* dmul */
574 COSTS_N_INSNS (18), /* sdiv */
575 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 576 128, /* cache line size */
5f732aba
DE
577 32, /* l1 cache */
578 1024, /* l2 cache */
0b11da67 579 1, /* streams */
f0517163
RS
580};
581
582/* Instruction costs on PPC630 processors. */
583static const
584struct processor_costs ppc630_cost = {
06a67bdd
RS
585 COSTS_N_INSNS (5), /* mulsi */
586 COSTS_N_INSNS (4), /* mulsi_const */
587 COSTS_N_INSNS (3), /* mulsi_const9 */
588 COSTS_N_INSNS (7), /* muldi */
589 COSTS_N_INSNS (21), /* divsi */
590 COSTS_N_INSNS (37), /* divdi */
591 COSTS_N_INSNS (3), /* fp */
592 COSTS_N_INSNS (3), /* dmul */
593 COSTS_N_INSNS (17), /* sdiv */
594 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 595 128, /* cache line size */
5f732aba
DE
596 64, /* l1 cache */
597 1024, /* l2 cache */
0b11da67 598 1, /* streams */
8b897cfa
RS
599};
600
d296e02e
AP
601/* Instruction costs on Cell processor. */
602/* COSTS_N_INSNS (1) ~ one add. */
603static const
604struct processor_costs ppccell_cost = {
605 COSTS_N_INSNS (9/2)+2, /* mulsi */
606 COSTS_N_INSNS (6/2), /* mulsi_const */
607 COSTS_N_INSNS (6/2), /* mulsi_const9 */
608 COSTS_N_INSNS (15/2)+2, /* muldi */
609 COSTS_N_INSNS (38/2), /* divsi */
610 COSTS_N_INSNS (70/2), /* divdi */
611 COSTS_N_INSNS (10/2), /* fp */
612 COSTS_N_INSNS (10/2), /* dmul */
613 COSTS_N_INSNS (74/2), /* sdiv */
614 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 615 128, /* cache line size */
5f732aba
DE
616 32, /* l1 cache */
617 512, /* l2 cache */
618 6, /* streams */
d296e02e
AP
619};
620
8b897cfa
RS
621/* Instruction costs on PPC750 and PPC7400 processors. */
622static const
623struct processor_costs ppc750_cost = {
06a67bdd
RS
624 COSTS_N_INSNS (5), /* mulsi */
625 COSTS_N_INSNS (3), /* mulsi_const */
626 COSTS_N_INSNS (2), /* mulsi_const9 */
627 COSTS_N_INSNS (5), /* muldi */
628 COSTS_N_INSNS (17), /* divsi */
629 COSTS_N_INSNS (17), /* divdi */
630 COSTS_N_INSNS (3), /* fp */
631 COSTS_N_INSNS (3), /* dmul */
632 COSTS_N_INSNS (17), /* sdiv */
633 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 634 32, /* cache line size */
5f732aba
DE
635 32, /* l1 cache */
636 512, /* l2 cache */
0b11da67 637 1, /* streams */
8b897cfa
RS
638};
639
640/* Instruction costs on PPC7450 processors. */
641static const
642struct processor_costs ppc7450_cost = {
06a67bdd
RS
643 COSTS_N_INSNS (4), /* mulsi */
644 COSTS_N_INSNS (3), /* mulsi_const */
645 COSTS_N_INSNS (3), /* mulsi_const9 */
646 COSTS_N_INSNS (4), /* muldi */
647 COSTS_N_INSNS (23), /* divsi */
648 COSTS_N_INSNS (23), /* divdi */
649 COSTS_N_INSNS (5), /* fp */
650 COSTS_N_INSNS (5), /* dmul */
651 COSTS_N_INSNS (21), /* sdiv */
652 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 653 32, /* cache line size */
5f732aba
DE
654 32, /* l1 cache */
655 1024, /* l2 cache */
0b11da67 656 1, /* streams */
8b897cfa 657};
a3170dc6 658
8b897cfa
RS
659/* Instruction costs on PPC8540 processors. */
660static const
661struct processor_costs ppc8540_cost = {
06a67bdd
RS
662 COSTS_N_INSNS (4), /* mulsi */
663 COSTS_N_INSNS (4), /* mulsi_const */
664 COSTS_N_INSNS (4), /* mulsi_const9 */
665 COSTS_N_INSNS (4), /* muldi */
666 COSTS_N_INSNS (19), /* divsi */
667 COSTS_N_INSNS (19), /* divdi */
668 COSTS_N_INSNS (4), /* fp */
669 COSTS_N_INSNS (4), /* dmul */
670 COSTS_N_INSNS (29), /* sdiv */
671 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 672 32, /* cache line size */
5f732aba
DE
673 32, /* l1 cache */
674 256, /* l2 cache */
0b11da67 675 1, /* prefetch streams /*/
8b897cfa
RS
676};
677
fa41c305
EW
678/* Instruction costs on E300C2 and E300C3 cores. */
679static const
680struct processor_costs ppce300c2c3_cost = {
681 COSTS_N_INSNS (4), /* mulsi */
682 COSTS_N_INSNS (4), /* mulsi_const */
683 COSTS_N_INSNS (4), /* mulsi_const9 */
684 COSTS_N_INSNS (4), /* muldi */
685 COSTS_N_INSNS (19), /* divsi */
686 COSTS_N_INSNS (19), /* divdi */
687 COSTS_N_INSNS (3), /* fp */
688 COSTS_N_INSNS (4), /* dmul */
689 COSTS_N_INSNS (18), /* sdiv */
690 COSTS_N_INSNS (33), /* ddiv */
642639ce 691 32,
a19b7d46
EW
692 16, /* l1 cache */
693 16, /* l2 cache */
642639ce 694 1, /* prefetch streams /*/
fa41c305
EW
695};
696
edae5fe3
DE
697/* Instruction costs on PPCE500MC processors. */
698static const
699struct processor_costs ppce500mc_cost = {
700 COSTS_N_INSNS (4), /* mulsi */
701 COSTS_N_INSNS (4), /* mulsi_const */
702 COSTS_N_INSNS (4), /* mulsi_const9 */
703 COSTS_N_INSNS (4), /* muldi */
704 COSTS_N_INSNS (14), /* divsi */
705 COSTS_N_INSNS (14), /* divdi */
706 COSTS_N_INSNS (8), /* fp */
707 COSTS_N_INSNS (10), /* dmul */
708 COSTS_N_INSNS (36), /* sdiv */
709 COSTS_N_INSNS (66), /* ddiv */
710 64, /* cache line size */
711 32, /* l1 cache */
712 128, /* l2 cache */
713 1, /* prefetch streams /*/
714};
715
8b897cfa
RS
716/* Instruction costs on POWER4 and POWER5 processors. */
717static const
718struct processor_costs power4_cost = {
06a67bdd
RS
719 COSTS_N_INSNS (3), /* mulsi */
720 COSTS_N_INSNS (2), /* mulsi_const */
721 COSTS_N_INSNS (2), /* mulsi_const9 */
722 COSTS_N_INSNS (4), /* muldi */
723 COSTS_N_INSNS (18), /* divsi */
724 COSTS_N_INSNS (34), /* divdi */
725 COSTS_N_INSNS (3), /* fp */
726 COSTS_N_INSNS (3), /* dmul */
727 COSTS_N_INSNS (17), /* sdiv */
728 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 729 128, /* cache line size */
5f732aba
DE
730 32, /* l1 cache */
731 1024, /* l2 cache */
0b11da67 732 8, /* prefetch streams /*/
8b897cfa
RS
733};
734
44cd321e
PS
735/* Instruction costs on POWER6 processors. */
736static const
737struct processor_costs power6_cost = {
738 COSTS_N_INSNS (8), /* mulsi */
739 COSTS_N_INSNS (8), /* mulsi_const */
740 COSTS_N_INSNS (8), /* mulsi_const9 */
741 COSTS_N_INSNS (8), /* muldi */
742 COSTS_N_INSNS (22), /* divsi */
743 COSTS_N_INSNS (28), /* divdi */
744 COSTS_N_INSNS (3), /* fp */
745 COSTS_N_INSNS (3), /* dmul */
746 COSTS_N_INSNS (13), /* sdiv */
747 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 748 128, /* cache line size */
5f732aba
DE
749 64, /* l1 cache */
750 2048, /* l2 cache */
0b11da67 751 16, /* prefetch streams */
44cd321e
PS
752};
753
8b897cfa 754\f
a2369ed3 755static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 756static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 757static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
758static void rs6000_emit_stack_tie (void);
759static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 760static bool spe_func_has_64bit_regs_p (void);
b20a9cca 761static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 762 int, HOST_WIDE_INT);
a2369ed3 763static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 764static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
765static unsigned rs6000_hash_constant (rtx);
766static unsigned toc_hash_function (const void *);
767static int toc_hash_eq (const void *, const void *);
a2369ed3 768static bool constant_pool_expr_p (rtx);
d04b6e6e 769static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
770static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
771static struct machine_function * rs6000_init_machine_status (void);
772static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 773static bool no_global_regs_above (int, bool);
5add3202 774#ifdef HAVE_GAS_HIDDEN
a2369ed3 775static void rs6000_assemble_visibility (tree, int);
5add3202 776#endif
a2369ed3
DJ
777static int rs6000_ra_ever_killed (void);
778static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 779static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 780static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 781static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 782static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 783static const char *rs6000_mangle_type (const_tree);
b86fe7b4 784extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 785static void rs6000_set_default_type_attributes (tree);
f78c3290
NF
786static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
787static void rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
788static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
789 enum machine_mode, bool, bool, bool);
52ff33d0 790static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
791static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
792static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
793static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
794static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
795static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
796 tree);
a2369ed3 797static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 798static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 799static void rs6000_file_start (void);
7c262518 800#if TARGET_ELF
9b580a0b 801static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
802static void rs6000_elf_asm_out_constructor (rtx, int);
803static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 804static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 805static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
806static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
807 unsigned HOST_WIDE_INT);
a56d7372 808static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 809 ATTRIBUTE_UNUSED;
7c262518 810#endif
3101faab 811static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
812static void rs6000_alloc_sdmode_stack_slot (void);
813static void rs6000_instantiate_decls (void);
cbaaba19 814#if TARGET_XCOFF
0d5817b2 815static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 816static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 817static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 818static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 819static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 820static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 821 unsigned HOST_WIDE_INT);
d6b5193b
RS
822static void rs6000_xcoff_unique_section (tree, int);
823static section *rs6000_xcoff_select_rtx_section
824 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
825static const char * rs6000_xcoff_strip_name_encoding (const char *);
826static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
827static void rs6000_xcoff_file_start (void);
828static void rs6000_xcoff_file_end (void);
f1384257 829#endif
a2369ed3 830static int rs6000_variable_issue (FILE *, int, rtx, int);
f40751dd 831static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
a2369ed3 832static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 833static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 834static bool is_microcoded_insn (rtx);
d296e02e 835static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
836static bool is_cracked_insn (rtx);
837static bool is_branch_slot_insn (rtx);
44cd321e 838static bool is_load_insn (rtx);
e3a0e200 839static rtx get_store_dest (rtx pat);
44cd321e
PS
840static bool is_store_insn (rtx);
841static bool set_to_load_agen (rtx,rtx);
982afe02 842static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
843static int rs6000_adjust_priority (rtx, int);
844static int rs6000_issue_rate (void);
b198261f 845static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
846static rtx get_next_active_insn (rtx, rtx);
847static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
848static bool insn_must_be_first_in_group (rtx);
849static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
850static bool is_costly_group (rtx *, rtx);
851static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
852static int redefine_groups (FILE *, int, rtx, rtx);
853static int pad_groups (FILE *, int, rtx, rtx);
854static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
855static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
856static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 857static int rs6000_use_sched_lookahead (void);
d296e02e 858static int rs6000_use_sched_lookahead_guard (rtx);
e855c69d
AB
859static void * rs6000_alloc_sched_context (void);
860static void rs6000_init_sched_context (void *, bool);
861static void rs6000_set_sched_context (void *);
862static void rs6000_free_sched_context (void *);
9c78b944 863static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 864static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
865static tree rs6000_builtin_mul_widen_even (tree);
866static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 867static tree rs6000_builtin_conversion (enum tree_code, tree);
0fca40f5 868static tree rs6000_builtin_vec_perm (tree, tree *);
a2369ed3 869
58646b77 870static void def_builtin (int, const char *, tree, int);
3101faab 871static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
872static void rs6000_init_builtins (void);
873static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
874static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
875static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
876static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
877static void altivec_init_builtins (void);
878static void rs6000_common_init_builtins (void);
c15c90bb 879static void rs6000_init_libfuncs (void);
a2369ed3 880
96038623
DE
881static void paired_init_builtins (void);
882static rtx paired_expand_builtin (tree, rtx, bool *);
883static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
884static rtx paired_expand_stv_builtin (enum insn_code, tree);
885static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
886
b20a9cca
AM
887static void enable_mask_for_builtins (struct builtin_description *, int,
888 enum rs6000_builtins,
889 enum rs6000_builtins);
7c62e993 890static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
891static void spe_init_builtins (void);
892static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 893static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
894static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
895static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
896static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
897static rs6000_stack_t *rs6000_stack_info (void);
898static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
899
900static rtx altivec_expand_builtin (tree, rtx, bool *);
901static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
902static rtx altivec_expand_st_builtin (tree, rtx, bool *);
903static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
904static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 905static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 906 const char *, tree, rtx);
a2369ed3 907static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
908static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
909static rtx altivec_expand_vec_set_builtin (tree);
910static rtx altivec_expand_vec_ext_builtin (tree, rtx);
911static int get_element_number (tree, tree);
78f5898b 912static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 913static void rs6000_parse_tls_size_option (void);
5da702b1 914static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
915static int first_altivec_reg_to_save (void);
916static unsigned int compute_vrsave_mask (void);
9390387d 917static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
918static void is_altivec_return_reg (rtx, void *);
919static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
920int easy_vector_constant (rtx, enum machine_mode);
3101faab 921static bool rs6000_is_opaque_type (const_tree);
a2369ed3 922static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 923static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 924static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 925static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
926static rtx rs6000_tls_get_addr (void);
927static rtx rs6000_got_sym (void);
9390387d 928static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
929static const char *rs6000_get_some_local_dynamic_name (void);
930static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 931static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 932static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 933 enum machine_mode, tree);
0b5383eb
DJ
934static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
935 HOST_WIDE_INT);
936static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
937 tree, HOST_WIDE_INT);
938static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
939 HOST_WIDE_INT,
940 rtx[], int *);
941static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
942 const_tree, HOST_WIDE_INT,
943 rtx[], int *);
944static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 945static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 946static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
947static void setup_incoming_varargs (CUMULATIVE_ARGS *,
948 enum machine_mode, tree,
949 int *, int);
8cd5a4e0 950static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 951 const_tree, bool);
78a52f11
RH
952static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
953 tree, bool);
3101faab 954static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
955#if TARGET_MACHO
956static void macho_branch_islands (void);
efdba735
SH
957static int no_previous_def (tree function_name);
958static tree get_prev_label (tree function_name);
c4e18b1c 959static void rs6000_darwin_file_start (void);
efdba735
SH
960#endif
961
c35d187f 962static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 963static void rs6000_va_start (tree, rtx);
726a989a 964static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
586de218 965static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 966static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 967static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 968static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 969 enum machine_mode);
94ff898d 970static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
971 enum machine_mode);
972static int get_vsel_insn (enum machine_mode);
973static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 974static tree rs6000_stack_protect_fail (void);
21213b4c
DP
975
976const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
977static enum machine_mode rs6000_eh_return_filter_mode (void);
978
17211ab5
GK
979/* Hash table stuff for keeping track of TOC entries. */
980
981struct toc_hash_struct GTY(())
982{
983 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
984 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
985 rtx key;
986 enum machine_mode key_mode;
987 int labelno;
988};
989
990static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
991\f
992/* Default register names. */
993char rs6000_reg_names[][8] =
994{
802a0058
MM
995 "0", "1", "2", "3", "4", "5", "6", "7",
996 "8", "9", "10", "11", "12", "13", "14", "15",
997 "16", "17", "18", "19", "20", "21", "22", "23",
998 "24", "25", "26", "27", "28", "29", "30", "31",
999 "0", "1", "2", "3", "4", "5", "6", "7",
1000 "8", "9", "10", "11", "12", "13", "14", "15",
1001 "16", "17", "18", "19", "20", "21", "22", "23",
1002 "24", "25", "26", "27", "28", "29", "30", "31",
1003 "mq", "lr", "ctr","ap",
1004 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
1005 "xer",
1006 /* AltiVec registers. */
0cd5e3a1
AH
1007 "0", "1", "2", "3", "4", "5", "6", "7",
1008 "8", "9", "10", "11", "12", "13", "14", "15",
1009 "16", "17", "18", "19", "20", "21", "22", "23",
1010 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1011 "vrsave", "vscr",
1012 /* SPE registers. */
7d5175e1
JJ
1013 "spe_acc", "spefscr",
1014 /* Soft frame pointer. */
1015 "sfp"
c81bebd7
MM
1016};
1017
1018#ifdef TARGET_REGNAMES
8b60264b 1019static const char alt_reg_names[][8] =
c81bebd7 1020{
802a0058
MM
1021 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1022 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1023 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1024 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1025 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1026 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1027 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1028 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1029 "mq", "lr", "ctr", "ap",
1030 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1031 "xer",
59a4c851 1032 /* AltiVec registers. */
0ac081f6 1033 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1034 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1035 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1036 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1037 "vrsave", "vscr",
1038 /* SPE registers. */
7d5175e1
JJ
1039 "spe_acc", "spefscr",
1040 /* Soft frame pointer. */
1041 "sfp"
c81bebd7
MM
1042};
1043#endif
9878760c 1044\f
daf11973
MM
1045#ifndef MASK_STRICT_ALIGN
1046#define MASK_STRICT_ALIGN 0
1047#endif
ffcfcb5f
AM
1048#ifndef TARGET_PROFILE_KERNEL
1049#define TARGET_PROFILE_KERNEL 0
1050#endif
3961e8fe
RH
1051
1052/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1053#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1054\f
1055/* Initialize the GCC target structure. */
91d231cb
JM
1056#undef TARGET_ATTRIBUTE_TABLE
1057#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1058#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1059#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1060
301d03af
RS
1061#undef TARGET_ASM_ALIGNED_DI_OP
1062#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1063
1064/* Default unaligned ops are only provided for ELF. Find the ops needed
1065 for non-ELF systems. */
1066#ifndef OBJECT_FORMAT_ELF
cbaaba19 1067#if TARGET_XCOFF
ae6c1efd 1068/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1069 64-bit targets. */
1070#undef TARGET_ASM_UNALIGNED_HI_OP
1071#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1072#undef TARGET_ASM_UNALIGNED_SI_OP
1073#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1074#undef TARGET_ASM_UNALIGNED_DI_OP
1075#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1076#else
1077/* For Darwin. */
1078#undef TARGET_ASM_UNALIGNED_HI_OP
1079#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1080#undef TARGET_ASM_UNALIGNED_SI_OP
1081#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1082#undef TARGET_ASM_UNALIGNED_DI_OP
1083#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1084#undef TARGET_ASM_ALIGNED_DI_OP
1085#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1086#endif
1087#endif
1088
1089/* This hook deals with fixups for relocatable code and DI-mode objects
1090 in 64-bit code. */
1091#undef TARGET_ASM_INTEGER
1092#define TARGET_ASM_INTEGER rs6000_assemble_integer
1093
93638d7a
AM
1094#ifdef HAVE_GAS_HIDDEN
1095#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1096#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1097#endif
1098
c4501e62
JJ
1099#undef TARGET_HAVE_TLS
1100#define TARGET_HAVE_TLS HAVE_AS_TLS
1101
1102#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1103#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1104
08c148a8
NB
1105#undef TARGET_ASM_FUNCTION_PROLOGUE
1106#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1107#undef TARGET_ASM_FUNCTION_EPILOGUE
1108#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1109
b54cf83a
DE
1110#undef TARGET_SCHED_VARIABLE_ISSUE
1111#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1112
c237e94a
ZW
1113#undef TARGET_SCHED_ISSUE_RATE
1114#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1115#undef TARGET_SCHED_ADJUST_COST
1116#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1117#undef TARGET_SCHED_ADJUST_PRIORITY
1118#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1119#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1120#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1121#undef TARGET_SCHED_INIT
1122#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1123#undef TARGET_SCHED_FINISH
1124#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1125#undef TARGET_SCHED_REORDER
1126#define TARGET_SCHED_REORDER rs6000_sched_reorder
1127#undef TARGET_SCHED_REORDER2
1128#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1129
be12c2b0
VM
1130#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1131#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1132
d296e02e
AP
1133#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1134#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1135
e855c69d
AB
1136#undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1137#define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1138#undef TARGET_SCHED_INIT_SCHED_CONTEXT
1139#define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1140#undef TARGET_SCHED_SET_SCHED_CONTEXT
1141#define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1142#undef TARGET_SCHED_FREE_SCHED_CONTEXT
1143#define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1144
7ccf35ed
DN
1145#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1146#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1147#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1148#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1149#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1150#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1151#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1152#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
0fca40f5
IR
1153#undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1154#define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
7ccf35ed 1155
5b900a4c
DN
1156#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1157#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1158
0ac081f6
AH
1159#undef TARGET_INIT_BUILTINS
1160#define TARGET_INIT_BUILTINS rs6000_init_builtins
1161
1162#undef TARGET_EXPAND_BUILTIN
1163#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1164
608063c3
JB
1165#undef TARGET_MANGLE_TYPE
1166#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1167
c15c90bb
ZW
1168#undef TARGET_INIT_LIBFUNCS
1169#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1170
f1384257 1171#if TARGET_MACHO
0e5dbd9b 1172#undef TARGET_BINDS_LOCAL_P
31920d83 1173#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1174#endif
0e5dbd9b 1175
77ccdfed
EC
1176#undef TARGET_MS_BITFIELD_LAYOUT_P
1177#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1178
3961e8fe
RH
1179#undef TARGET_ASM_OUTPUT_MI_THUNK
1180#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1181
3961e8fe 1182#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1183#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1184
4977bab6
ZW
1185#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1186#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1187
2e3f0db6
DJ
1188#undef TARGET_INVALID_WITHIN_DOLOOP
1189#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1190
3c50106f
RH
1191#undef TARGET_RTX_COSTS
1192#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67 1193#undef TARGET_ADDRESS_COST
f40751dd 1194#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 1195
c8e4f0e9 1196#undef TARGET_VECTOR_OPAQUE_P
58646b77 1197#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1198
96714395
AH
1199#undef TARGET_DWARF_REGISTER_SPAN
1200#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1201
37ea0b7e
JM
1202#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1203#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1204
c6e8c921
GK
1205/* On rs6000, function arguments are promoted, as are function return
1206 values. */
1207#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1208#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1209#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1210#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1211
c6e8c921
GK
1212#undef TARGET_RETURN_IN_MEMORY
1213#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1214
1215#undef TARGET_SETUP_INCOMING_VARARGS
1216#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1217
1218/* Always strict argument naming on rs6000. */
1219#undef TARGET_STRICT_ARGUMENT_NAMING
1220#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1221#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1222#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1223#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1224#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1225#undef TARGET_MUST_PASS_IN_STACK
1226#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1227#undef TARGET_PASS_BY_REFERENCE
1228#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1229#undef TARGET_ARG_PARTIAL_BYTES
1230#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1231
c35d187f
RH
1232#undef TARGET_BUILD_BUILTIN_VA_LIST
1233#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1234
d7bd8aeb
JJ
1235#undef TARGET_EXPAND_BUILTIN_VA_START
1236#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1237
cd3ce9b4
JM
1238#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1239#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1240
93f90be6
FJ
1241#undef TARGET_EH_RETURN_FILTER_MODE
1242#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1243
00b79d54
BE
1244#undef TARGET_SCALAR_MODE_SUPPORTED_P
1245#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1246
f676971a
EC
1247#undef TARGET_VECTOR_MODE_SUPPORTED_P
1248#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1249
4d3e6fae
FJ
1250#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1251#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1252
78f5898b
AH
1253#undef TARGET_HANDLE_OPTION
1254#define TARGET_HANDLE_OPTION rs6000_handle_option
1255
1256#undef TARGET_DEFAULT_TARGET_FLAGS
1257#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1258 (TARGET_DEFAULT)
78f5898b 1259
3aebbe5f
JJ
1260#undef TARGET_STACK_PROTECT_FAIL
1261#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1262
445cf5eb
JM
1263/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1264 The PowerPC architecture requires only weak consistency among
1265 processors--that is, memory accesses between processors need not be
1266 sequentially consistent and memory accesses among processors can occur
1267 in any order. The ability to order memory accesses weakly provides
1268 opportunities for more efficient use of the system bus. Unless a
1269 dependency exists, the 604e allows read operations to precede store
1270 operations. */
1271#undef TARGET_RELAXED_ORDERING
1272#define TARGET_RELAXED_ORDERING true
1273
fdbe66f2
EB
1274#ifdef HAVE_AS_TLS
1275#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1276#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1277#endif
1278
aacd3885
RS
1279/* Use a 32-bit anchor range. This leads to sequences like:
1280
1281 addis tmp,anchor,high
1282 add dest,tmp,low
1283
1284 where tmp itself acts as an anchor, and can be shared between
1285 accesses to the same 64k page. */
1286#undef TARGET_MIN_ANCHOR_OFFSET
1287#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1288#undef TARGET_MAX_ANCHOR_OFFSET
1289#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1290#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1291#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1292
9c78b944
DE
1293#undef TARGET_BUILTIN_RECIPROCAL
1294#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1295
e41b2a33
PB
1296#undef TARGET_EXPAND_TO_RTL_HOOK
1297#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1298
1299#undef TARGET_INSTANTIATE_DECLS
1300#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1301
f6897b10 1302struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1303\f
0d1fbc8c
AH
1304
1305/* Value is 1 if hard register REGNO can hold a value of machine-mode
1306 MODE. */
1307static int
1308rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1309{
1310 /* The GPRs can hold any mode, but values bigger than one register
1311 cannot go past R31. */
1312 if (INT_REGNO_P (regno))
1313 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1314
a5a97921 1315 /* The float registers can only hold floating modes and DImode.
7393f7f8 1316 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1317 if (FP_REGNO_P (regno))
1318 return
96038623 1319 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1320 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1321 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1322 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1323 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1324 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1325 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1326
1327 /* The CR register can only hold CC modes. */
1328 if (CR_REGNO_P (regno))
1329 return GET_MODE_CLASS (mode) == MODE_CC;
1330
1331 if (XER_REGNO_P (regno))
1332 return mode == PSImode;
1333
1334 /* AltiVec only in AldyVec registers. */
1335 if (ALTIVEC_REGNO_P (regno))
1336 return ALTIVEC_VECTOR_MODE (mode);
1337
1338 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1339 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1340 return 1;
1341
1342 /* We cannot put TImode anywhere except general register and it must be
1343 able to fit within the register set. */
1344
1345 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1346}
1347
1348/* Initialize rs6000_hard_regno_mode_ok_p table. */
1349static void
1350rs6000_init_hard_regno_mode_ok (void)
1351{
1352 int r, m;
1353
1354 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1355 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1356 if (rs6000_hard_regno_mode_ok (r, m))
1357 rs6000_hard_regno_mode_ok_p[m][r] = true;
1358}
1359
e4cad568
GK
1360#if TARGET_MACHO
1361/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1362
1363static void
1364darwin_rs6000_override_options (void)
1365{
1366 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1367 off. */
1368 rs6000_altivec_abi = 1;
1369 TARGET_ALTIVEC_VRSAVE = 1;
1370 if (DEFAULT_ABI == ABI_DARWIN)
1371 {
1372 if (MACHO_DYNAMIC_NO_PIC_P)
1373 {
1374 if (flag_pic)
1375 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1376 flag_pic = 0;
1377 }
1378 else if (flag_pic == 1)
1379 {
1380 flag_pic = 2;
1381 }
1382 }
1383 if (TARGET_64BIT && ! TARGET_POWERPC64)
1384 {
1385 target_flags |= MASK_POWERPC64;
1386 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1387 }
1388 if (flag_mkernel)
1389 {
1390 rs6000_default_long_calls = 1;
1391 target_flags |= MASK_SOFT_FLOAT;
1392 }
1393
1394 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1395 Altivec. */
1396 if (!flag_mkernel && !flag_apple_kext
1397 && TARGET_64BIT
1398 && ! (target_flags_explicit & MASK_ALTIVEC))
1399 target_flags |= MASK_ALTIVEC;
1400
1401 /* Unless the user (not the configurer) has explicitly overridden
1402 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1403 G4 unless targetting the kernel. */
1404 if (!flag_mkernel
1405 && !flag_apple_kext
1406 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1407 && ! (target_flags_explicit & MASK_ALTIVEC)
1408 && ! rs6000_select[1].string)
1409 {
1410 target_flags |= MASK_ALTIVEC;
1411 }
1412}
1413#endif
1414
c1e55850
GK
1415/* If not otherwise specified by a target, make 'long double' equivalent to
1416 'double'. */
1417
1418#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1419#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1420#endif
1421
5248c961
RK
1422/* Override command line options. Mostly we process the processor
1423 type and sometimes adjust other TARGET_ options. */
1424
1425void
d779d0dc 1426rs6000_override_options (const char *default_cpu)
5248c961 1427{
c4d38ccb 1428 size_t i, j;
8e3f41e7 1429 struct rs6000_cpu_select *ptr;
66188a7e 1430 int set_masks;
5248c961 1431
66188a7e 1432 /* Simplifications for entries below. */
85638c0d 1433
66188a7e
GK
1434 enum {
1435 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1436 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1437 };
85638c0d 1438
66188a7e
GK
1439 /* This table occasionally claims that a processor does not support
1440 a particular feature even though it does, but the feature is slower
1441 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1442 complete description of the processor's support.
66188a7e
GK
1443
1444 Please keep this list in order, and don't forget to update the
1445 documentation in invoke.texi when adding a new processor or
1446 flag. */
5248c961
RK
1447 static struct ptt
1448 {
8b60264b
KG
1449 const char *const name; /* Canonical processor name. */
1450 const enum processor_type processor; /* Processor type enum value. */
1451 const int target_enable; /* Target flags to enable. */
8b60264b 1452 } const processor_target_table[]
66188a7e 1453 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1454 {"403", PROCESSOR_PPC403,
66188a7e 1455 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1456 {"405", PROCESSOR_PPC405,
716019c0
JM
1457 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1458 {"405fp", PROCESSOR_PPC405,
1459 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1460 {"440", PROCESSOR_PPC440,
716019c0
JM
1461 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1462 {"440fp", PROCESSOR_PPC440,
1463 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1464 {"464", PROCESSOR_PPC440,
1465 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1466 {"464fp", PROCESSOR_PPC440,
1467 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1468 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1469 {"601", PROCESSOR_PPC601,
66188a7e
GK
1470 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1471 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1472 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1473 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1474 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1475 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1476 {"620", PROCESSOR_PPC620,
1477 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1478 {"630", PROCESSOR_PPC630,
1479 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1480 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1481 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1482 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1483 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1484 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1485 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1486 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1487 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1488 /* 8548 has a dummy entry for now. */
a45bce6e 1489 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1490 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1491 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1492 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1493 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1494 {"970", PROCESSOR_POWER4,
66188a7e 1495 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1496 {"cell", PROCESSOR_CELL,
1497 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1498 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1499 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1500 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1501 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1502 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1503 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1504 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1505 {"power2", PROCESSOR_POWER,
1506 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1507 {"power3", PROCESSOR_PPC630,
1508 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1509 {"power4", PROCESSOR_POWER4,
9a8d7941 1510 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1bc39d2f 1511 | MASK_MFCRF},
ec507f2d 1512 {"power5", PROCESSOR_POWER5,
9a8d7941 1513 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1514 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1515 {"power5+", PROCESSOR_POWER5,
9a8d7941 1516 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1517 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1518 {"power6", PROCESSOR_POWER6,
0783d48d
DE
1519 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1520 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1521 {"power6x", PROCESSOR_POWER6,
0783d48d
DE
1522 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1523 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1524 | MASK_MFPGPR},
d40c9e33
PB
1525 {"power7", PROCESSOR_POWER5,
1526 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1527 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
66188a7e
GK
1528 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1529 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1530 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1531 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1532 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1533 {"rios2", PROCESSOR_RIOS2,
1534 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1535 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1536 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1537 {"rs64", PROCESSOR_RS64A,
1538 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1539 };
5248c961 1540
ca7558fc 1541 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1542
66188a7e
GK
1543 /* Some OSs don't support saving the high part of 64-bit registers on
1544 context switch. Other OSs don't support saving Altivec registers.
1545 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1546 settings; if the user wants either, the user must explicitly specify
1547 them and we won't interfere with the user's specification. */
1548
1549 enum {
1550 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1551 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1552 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1553 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1554 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1555 };
0d1fbc8c 1556
c4ad648e 1557 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1558#ifdef OS_MISSING_POWERPC64
1559 if (OS_MISSING_POWERPC64)
1560 set_masks &= ~MASK_POWERPC64;
1561#endif
1562#ifdef OS_MISSING_ALTIVEC
1563 if (OS_MISSING_ALTIVEC)
1564 set_masks &= ~MASK_ALTIVEC;
1565#endif
1566
768875a8
AM
1567 /* Don't override by the processor default if given explicitly. */
1568 set_masks &= ~target_flags_explicit;
957211c3 1569
a4f6c312 1570 /* Identify the processor type. */
8e3f41e7 1571 rs6000_select[0].string = default_cpu;
3cb999d8 1572 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1573
b6a1cbae 1574 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1575 {
8e3f41e7
MM
1576 ptr = &rs6000_select[i];
1577 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1578 {
8e3f41e7
MM
1579 for (j = 0; j < ptt_size; j++)
1580 if (! strcmp (ptr->string, processor_target_table[j].name))
1581 {
1582 if (ptr->set_tune_p)
1583 rs6000_cpu = processor_target_table[j].processor;
1584
1585 if (ptr->set_arch_p)
1586 {
66188a7e
GK
1587 target_flags &= ~set_masks;
1588 target_flags |= (processor_target_table[j].target_enable
1589 & set_masks);
8e3f41e7
MM
1590 }
1591 break;
1592 }
1593
4406229e 1594 if (j == ptt_size)
8e3f41e7 1595 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1596 }
1597 }
8a61d227 1598
edae5fe3
DE
1599 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1600 && !rs6000_explicit_options.isel)
a3170dc6
AH
1601 rs6000_isel = 1;
1602
edae5fe3
DE
1603 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1604 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1605 {
1606 if (TARGET_ALTIVEC)
1607 error ("AltiVec not supported in this target");
1608 if (TARGET_SPE)
1609 error ("Spe not supported in this target");
1610 }
1611
dff9f1b6
DE
1612 /* If we are optimizing big endian systems for space, use the load/store
1613 multiple and string instructions. */
ef792183 1614 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1615 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1616
a4f6c312
SS
1617 /* Don't allow -mmultiple or -mstring on little endian systems
1618 unless the cpu is a 750, because the hardware doesn't support the
1619 instructions used in little endian mode, and causes an alignment
1620 trap. The 750 does not cause an alignment trap (except when the
1621 target is unaligned). */
bef84347 1622
b21fb038 1623 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1624 {
1625 if (TARGET_MULTIPLE)
1626 {
1627 target_flags &= ~MASK_MULTIPLE;
b21fb038 1628 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1629 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1630 }
1631
1632 if (TARGET_STRING)
1633 {
1634 target_flags &= ~MASK_STRING;
b21fb038 1635 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1636 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1637 }
1638 }
3933e0e1 1639
38c1f2d7
MM
1640 /* Set debug flags */
1641 if (rs6000_debug_name)
1642 {
bfc79d3b 1643 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1644 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1645 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1646 rs6000_debug_stack = 1;
bfc79d3b 1647 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1648 rs6000_debug_arg = 1;
1649 else
c725bd79 1650 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1651 }
1652
57ac7be9
AM
1653 if (rs6000_traceback_name)
1654 {
1655 if (! strncmp (rs6000_traceback_name, "full", 4))
1656 rs6000_traceback = traceback_full;
1657 else if (! strncmp (rs6000_traceback_name, "part", 4))
1658 rs6000_traceback = traceback_part;
1659 else if (! strncmp (rs6000_traceback_name, "no", 2))
1660 rs6000_traceback = traceback_none;
1661 else
9e637a26 1662 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1663 rs6000_traceback_name);
1664 }
1665
78f5898b
AH
1666 if (!rs6000_explicit_options.long_double)
1667 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1668
602ea4d3 1669#ifndef POWERPC_LINUX
d3603e8c 1670 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1671 rs6000_ieeequad = 1;
1672#endif
1673
0db747be
DE
1674 /* Enable Altivec ABI for AIX -maltivec. */
1675 if (TARGET_XCOFF && TARGET_ALTIVEC)
1676 rs6000_altivec_abi = 1;
1677
a2db2771
JJ
1678 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1679 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1680 be explicitly overridden in either case. */
1681 if (TARGET_ELF)
6d0ef01e 1682 {
a2db2771
JJ
1683 if (!rs6000_explicit_options.altivec_abi
1684 && (TARGET_64BIT || TARGET_ALTIVEC))
1685 rs6000_altivec_abi = 1;
1686
1687 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1688 if (!rs6000_explicit_options.vrsave)
1689 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1690 }
1691
594a51fe
SS
1692 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1693 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1694 {
1695 rs6000_darwin64_abi = 1;
9c7956fd 1696#if TARGET_MACHO
6ac49599 1697 darwin_one_byte_bool = 1;
9c7956fd 1698#endif
d9168963
SS
1699 /* Default to natural alignment, for better performance. */
1700 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1701 }
1702
194c524a
DE
1703 /* Place FP constants in the constant pool instead of TOC
1704 if section anchors enabled. */
1705 if (flag_section_anchors)
1706 TARGET_NO_FP_IN_TOC = 1;
1707
c4501e62
JJ
1708 /* Handle -mtls-size option. */
1709 rs6000_parse_tls_size_option ();
1710
a7ae18e2
AH
1711#ifdef SUBTARGET_OVERRIDE_OPTIONS
1712 SUBTARGET_OVERRIDE_OPTIONS;
1713#endif
1714#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1715 SUBSUBTARGET_OVERRIDE_OPTIONS;
1716#endif
4d4cbc0e
AH
1717#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1718 SUB3TARGET_OVERRIDE_OPTIONS;
1719#endif
a7ae18e2 1720
edae5fe3 1721 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1722 {
edae5fe3 1723 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1724 MASK_STRING above when optimizing for size. */
1725 if ((target_flags & MASK_STRING) != 0)
1726 target_flags = target_flags & ~MASK_STRING;
1727 }
1728 else if (rs6000_select[1].string != NULL)
1729 {
1730 /* For the powerpc-eabispe configuration, we set all these by
1731 default, so let's unset them if we manually set another
1732 CPU that is not the E500. */
a2db2771 1733 if (!rs6000_explicit_options.spe_abi)
5da702b1 1734 rs6000_spe_abi = 0;
78f5898b 1735 if (!rs6000_explicit_options.spe)
5da702b1 1736 rs6000_spe = 0;
78f5898b 1737 if (!rs6000_explicit_options.float_gprs)
5da702b1 1738 rs6000_float_gprs = 0;
78f5898b 1739 if (!rs6000_explicit_options.isel)
5da702b1
AH
1740 rs6000_isel = 0;
1741 }
b5044283 1742
eca0d5e8
JM
1743 /* Detect invalid option combinations with E500. */
1744 CHECK_E500_OPTIONS;
1745
ec507f2d 1746 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1747 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1748 && rs6000_cpu != PROCESSOR_POWER6
1749 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1750 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1751 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1752 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1753 || rs6000_cpu == PROCESSOR_POWER5
1754 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1755
ec507f2d
DE
1756 rs6000_sched_restricted_insns_priority
1757 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1758
569fa502 1759 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1760 rs6000_sched_costly_dep
1761 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1762
569fa502
DN
1763 if (rs6000_sched_costly_dep_str)
1764 {
f676971a 1765 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1766 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1767 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1768 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1769 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1770 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1771 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1772 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1773 else
c4ad648e 1774 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1775 }
1776
1777 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1778 rs6000_sched_insert_nops
1779 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1780
cbe26ab8
DN
1781 if (rs6000_sched_insert_nops_str)
1782 {
1783 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1784 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1785 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1786 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1787 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1788 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1789 else
c4ad648e 1790 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1791 }
1792
c81bebd7 1793#ifdef TARGET_REGNAMES
a4f6c312
SS
1794 /* If the user desires alternate register names, copy in the
1795 alternate names now. */
c81bebd7 1796 if (TARGET_REGNAMES)
4e135bdd 1797 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1798#endif
1799
df01da37 1800 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1801 If -maix-struct-return or -msvr4-struct-return was explicitly
1802 used, don't override with the ABI default. */
df01da37
DE
1803 if (!rs6000_explicit_options.aix_struct_ret)
1804 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1805
602ea4d3 1806 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1807 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1808
f676971a 1809 if (TARGET_TOC)
9ebbca7d 1810 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1811
301d03af
RS
1812 /* We can only guarantee the availability of DI pseudo-ops when
1813 assembling for 64-bit targets. */
ae6c1efd 1814 if (!TARGET_64BIT)
301d03af
RS
1815 {
1816 targetm.asm_out.aligned_op.di = NULL;
1817 targetm.asm_out.unaligned_op.di = NULL;
1818 }
1819
1494c534
DE
1820 /* Set branch target alignment, if not optimizing for size. */
1821 if (!optimize_size)
1822 {
d296e02e
AP
1823 /* Cell wants to be aligned 8byte for dual issue. */
1824 if (rs6000_cpu == PROCESSOR_CELL)
1825 {
1826 if (align_functions <= 0)
1827 align_functions = 8;
1828 if (align_jumps <= 0)
1829 align_jumps = 8;
1830 if (align_loops <= 0)
1831 align_loops = 8;
1832 }
44cd321e 1833 if (rs6000_align_branch_targets)
1494c534
DE
1834 {
1835 if (align_functions <= 0)
1836 align_functions = 16;
1837 if (align_jumps <= 0)
1838 align_jumps = 16;
1839 if (align_loops <= 0)
1840 align_loops = 16;
1841 }
1842 if (align_jumps_max_skip <= 0)
1843 align_jumps_max_skip = 15;
1844 if (align_loops_max_skip <= 0)
1845 align_loops_max_skip = 15;
1846 }
2792d578 1847
71f123ca
FS
1848 /* Arrange to save and restore machine status around nested functions. */
1849 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1850
1851 /* We should always be splitting complex arguments, but we can't break
1852 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1853 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1854 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1855
1856 /* Initialize rs6000_cost with the appropriate target costs. */
1857 if (optimize_size)
1858 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1859 else
1860 switch (rs6000_cpu)
1861 {
1862 case PROCESSOR_RIOS1:
1863 rs6000_cost = &rios1_cost;
1864 break;
1865
1866 case PROCESSOR_RIOS2:
1867 rs6000_cost = &rios2_cost;
1868 break;
1869
1870 case PROCESSOR_RS64A:
1871 rs6000_cost = &rs64a_cost;
1872 break;
1873
1874 case PROCESSOR_MPCCORE:
1875 rs6000_cost = &mpccore_cost;
1876 break;
1877
1878 case PROCESSOR_PPC403:
1879 rs6000_cost = &ppc403_cost;
1880 break;
1881
1882 case PROCESSOR_PPC405:
1883 rs6000_cost = &ppc405_cost;
1884 break;
1885
1886 case PROCESSOR_PPC440:
1887 rs6000_cost = &ppc440_cost;
1888 break;
1889
1890 case PROCESSOR_PPC601:
1891 rs6000_cost = &ppc601_cost;
1892 break;
1893
1894 case PROCESSOR_PPC603:
1895 rs6000_cost = &ppc603_cost;
1896 break;
1897
1898 case PROCESSOR_PPC604:
1899 rs6000_cost = &ppc604_cost;
1900 break;
1901
1902 case PROCESSOR_PPC604e:
1903 rs6000_cost = &ppc604e_cost;
1904 break;
1905
1906 case PROCESSOR_PPC620:
8b897cfa
RS
1907 rs6000_cost = &ppc620_cost;
1908 break;
1909
f0517163
RS
1910 case PROCESSOR_PPC630:
1911 rs6000_cost = &ppc630_cost;
1912 break;
1913
982afe02 1914 case PROCESSOR_CELL:
d296e02e
AP
1915 rs6000_cost = &ppccell_cost;
1916 break;
1917
8b897cfa
RS
1918 case PROCESSOR_PPC750:
1919 case PROCESSOR_PPC7400:
1920 rs6000_cost = &ppc750_cost;
1921 break;
1922
1923 case PROCESSOR_PPC7450:
1924 rs6000_cost = &ppc7450_cost;
1925 break;
1926
1927 case PROCESSOR_PPC8540:
1928 rs6000_cost = &ppc8540_cost;
1929 break;
1930
fa41c305
EW
1931 case PROCESSOR_PPCE300C2:
1932 case PROCESSOR_PPCE300C3:
1933 rs6000_cost = &ppce300c2c3_cost;
1934 break;
1935
edae5fe3
DE
1936 case PROCESSOR_PPCE500MC:
1937 rs6000_cost = &ppce500mc_cost;
1938 break;
1939
8b897cfa
RS
1940 case PROCESSOR_POWER4:
1941 case PROCESSOR_POWER5:
1942 rs6000_cost = &power4_cost;
1943 break;
1944
44cd321e
PS
1945 case PROCESSOR_POWER6:
1946 rs6000_cost = &power6_cost;
1947 break;
1948
8b897cfa 1949 default:
37409796 1950 gcc_unreachable ();
8b897cfa 1951 }
0b11da67
DE
1952
1953 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1954 set_param_value ("simultaneous-prefetches",
1955 rs6000_cost->simultaneous_prefetches);
1956 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1957 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1958 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1959 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1960 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1961 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1962
1963 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1964 can be optimized to ap = __builtin_next_arg (0). */
1965 if (DEFAULT_ABI != ABI_V4)
1966 targetm.expand_builtin_va_start = NULL;
696e45ba
ME
1967
1968 /* Set up single/double float flags.
1969 If TARGET_HARD_FLOAT is set, but neither single or double is set,
1970 then set both flags. */
1971 if (TARGET_HARD_FLOAT && TARGET_FPRS
1972 && rs6000_single_float == 0 && rs6000_double_float == 0)
1973 rs6000_single_float = rs6000_double_float = 1;
1974
1975 /* Reset single and double FP flags if target is E500. */
1976 if (TARGET_E500)
1977 {
1978 rs6000_single_float = rs6000_double_float = 0;
1979 if (TARGET_E500_SINGLE)
1980 rs6000_single_float = 1;
1981 if (TARGET_E500_DOUBLE)
1982 rs6000_single_float = rs6000_double_float = 1;
1983 }
1984
1985 rs6000_init_hard_regno_mode_ok ();
5248c961 1986}
5accd822 1987
7ccf35ed
DN
1988/* Implement targetm.vectorize.builtin_mask_for_load. */
1989static tree
1990rs6000_builtin_mask_for_load (void)
1991{
1992 if (TARGET_ALTIVEC)
1993 return altivec_builtin_mask_for_load;
1994 else
1995 return 0;
1996}
1997
7910ae0c
DN
1998/* Implement targetm.vectorize.builtin_conversion.
1999 Returns a decl of a function that implements conversion of an integer vector
2000 into a floating-point vector, or vice-versa. TYPE is the type of the integer
2001 side of the conversion.
2002 Return NULL_TREE if it is not available. */
f57d17f1
TM
2003static tree
2004rs6000_builtin_conversion (enum tree_code code, tree type)
2005{
2006 if (!TARGET_ALTIVEC)
2007 return NULL_TREE;
982afe02 2008
f57d17f1
TM
2009 switch (code)
2010 {
7910ae0c
DN
2011 case FIX_TRUNC_EXPR:
2012 switch (TYPE_MODE (type))
2013 {
2014 case V4SImode:
2015 return TYPE_UNSIGNED (type)
2016 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2017 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2018 default:
2019 return NULL_TREE;
2020 }
2021
f57d17f1
TM
2022 case FLOAT_EXPR:
2023 switch (TYPE_MODE (type))
2024 {
2025 case V4SImode:
7910ae0c
DN
2026 return TYPE_UNSIGNED (type)
2027 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2028 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
f57d17f1
TM
2029 default:
2030 return NULL_TREE;
2031 }
7910ae0c 2032
f57d17f1
TM
2033 default:
2034 return NULL_TREE;
2035 }
2036}
2037
89d67cca
DN
2038/* Implement targetm.vectorize.builtin_mul_widen_even. */
2039static tree
2040rs6000_builtin_mul_widen_even (tree type)
2041{
2042 if (!TARGET_ALTIVEC)
2043 return NULL_TREE;
2044
2045 switch (TYPE_MODE (type))
2046 {
2047 case V8HImode:
7910ae0c
DN
2048 return TYPE_UNSIGNED (type)
2049 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2050 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
89d67cca
DN
2051
2052 case V16QImode:
7910ae0c
DN
2053 return TYPE_UNSIGNED (type)
2054 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2055 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
89d67cca
DN
2056 default:
2057 return NULL_TREE;
2058 }
2059}
2060
2061/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2062static tree
2063rs6000_builtin_mul_widen_odd (tree type)
2064{
2065 if (!TARGET_ALTIVEC)
2066 return NULL_TREE;
2067
2068 switch (TYPE_MODE (type))
2069 {
2070 case V8HImode:
7910ae0c
DN
2071 return TYPE_UNSIGNED (type)
2072 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2073 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
89d67cca
DN
2074
2075 case V16QImode:
7910ae0c
DN
2076 return TYPE_UNSIGNED (type)
2077 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2078 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
89d67cca
DN
2079 default:
2080 return NULL_TREE;
2081 }
2082}
2083
5b900a4c
DN
2084
2085/* Return true iff, data reference of TYPE can reach vector alignment (16)
2086 after applying N number of iterations. This routine does not determine
2087 how may iterations are required to reach desired alignment. */
2088
2089static bool
3101faab 2090rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2091{
2092 if (is_packed)
2093 return false;
2094
2095 if (TARGET_32BIT)
2096 {
2097 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2098 return true;
2099
2100 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2101 return true;
2102
2103 return false;
2104 }
2105 else
2106 {
2107 if (TARGET_MACHO)
2108 return false;
2109
2110 /* Assuming that all other types are naturally aligned. CHECKME! */
2111 return true;
2112 }
2113}
2114
0fca40f5
IR
2115/* Implement targetm.vectorize.builtin_vec_perm. */
2116tree
2117rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2118{
2119 tree d;
2120
2121 *mask_element_type = unsigned_char_type_node;
2122
2123 switch (TYPE_MODE (type))
2124 {
2125 case V16QImode:
2126 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2127 break;
2128
2129 case V8HImode:
2130 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2131 break;
2132
2133 case V4SImode:
2134 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2135 break;
2136
2137 case V4SFmode:
2138 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2139 break;
2140
2141 default:
2142 return NULL_TREE;
2143 }
2144
2145 gcc_assert (d);
2146 return d;
2147}
2148
5da702b1
AH
2149/* Handle generic options of the form -mfoo=yes/no.
2150 NAME is the option name.
2151 VALUE is the option value.
2152 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2153 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2154static void
5da702b1 2155rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2156{
5da702b1 2157 if (value == 0)
993f19a8 2158 return;
5da702b1
AH
2159 else if (!strcmp (value, "yes"))
2160 *flag = 1;
2161 else if (!strcmp (value, "no"))
2162 *flag = 0;
08b57fb3 2163 else
5da702b1 2164 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2165}
2166
c4501e62
JJ
2167/* Validate and record the size specified with the -mtls-size option. */
2168
2169static void
863d938c 2170rs6000_parse_tls_size_option (void)
c4501e62
JJ
2171{
2172 if (rs6000_tls_size_string == 0)
2173 return;
2174 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2175 rs6000_tls_size = 16;
2176 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2177 rs6000_tls_size = 32;
2178 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2179 rs6000_tls_size = 64;
2180 else
9e637a26 2181 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2182}
2183
5accd822 2184void
a2369ed3 2185optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2186{
2e3f0db6
DJ
2187 if (DEFAULT_ABI == ABI_DARWIN)
2188 /* The Darwin libraries never set errno, so we might as well
2189 avoid calling them when that's the only reason we would. */
2190 flag_errno_math = 0;
59d6560b
DE
2191
2192 /* Double growth factor to counter reduced min jump length. */
2193 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2194
2195 /* Enable section anchors by default.
2196 Skip section anchors for Objective C and Objective C++
2f3b0d4a
ST
2197 until front-ends fixed. */
2198 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
d6cc6ec9 2199 flag_section_anchors = 2;
5accd822 2200}
78f5898b 2201
0bb7b92e
ME
2202static enum fpu_type_t
2203rs6000_parse_fpu_option (const char *option)
2204{
2205 if (!strcmp("none", option)) return FPU_NONE;
2206 if (!strcmp("sp_lite", option)) return FPU_SF_LITE;
2207 if (!strcmp("dp_lite", option)) return FPU_DF_LITE;
2208 if (!strcmp("sp_full", option)) return FPU_SF_FULL;
2209 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
2210 error("unknown value %s for -mfpu", option);
2211 return FPU_NONE;
2212}
2213
78f5898b
AH
2214/* Implement TARGET_HANDLE_OPTION. */
2215
2216static bool
2217rs6000_handle_option (size_t code, const char *arg, int value)
2218{
0bb7b92e
ME
2219 enum fpu_type_t fpu_type = FPU_NONE;
2220
78f5898b
AH
2221 switch (code)
2222 {
2223 case OPT_mno_power:
2224 target_flags &= ~(MASK_POWER | MASK_POWER2
2225 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2226 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2227 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2228 break;
2229 case OPT_mno_powerpc:
2230 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2231 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2232 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2233 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2234 break;
2235 case OPT_mfull_toc:
d2894ab5
DE
2236 target_flags &= ~MASK_MINIMAL_TOC;
2237 TARGET_NO_FP_IN_TOC = 0;
2238 TARGET_NO_SUM_IN_TOC = 0;
2239 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2240#ifdef TARGET_USES_SYSV4_OPT
2241 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2242 just the same as -mminimal-toc. */
2243 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2244 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2245#endif
2246 break;
2247
2248#ifdef TARGET_USES_SYSV4_OPT
2249 case OPT_mtoc:
2250 /* Make -mtoc behave like -mminimal-toc. */
2251 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2252 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2253 break;
2254#endif
2255
2256#ifdef TARGET_USES_AIX64_OPT
2257 case OPT_maix64:
2258#else
2259 case OPT_m64:
2260#endif
2c9c9afd
AM
2261 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2262 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2263 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2264 break;
2265
2266#ifdef TARGET_USES_AIX64_OPT
2267 case OPT_maix32:
2268#else
2269 case OPT_m32:
2270#endif
2271 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2272 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2273 break;
2274
2275 case OPT_minsert_sched_nops_:
2276 rs6000_sched_insert_nops_str = arg;
2277 break;
2278
2279 case OPT_mminimal_toc:
2280 if (value == 1)
2281 {
d2894ab5
DE
2282 TARGET_NO_FP_IN_TOC = 0;
2283 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2284 }
2285 break;
2286
2287 case OPT_mpower:
2288 if (value == 1)
c2dba4ab
AH
2289 {
2290 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2291 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2292 }
78f5898b
AH
2293 break;
2294
2295 case OPT_mpower2:
2296 if (value == 1)
c2dba4ab
AH
2297 {
2298 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2299 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2300 }
78f5898b
AH
2301 break;
2302
2303 case OPT_mpowerpc_gpopt:
2304 case OPT_mpowerpc_gfxopt:
2305 if (value == 1)
c2dba4ab
AH
2306 {
2307 target_flags |= MASK_POWERPC;
2308 target_flags_explicit |= MASK_POWERPC;
2309 }
78f5898b
AH
2310 break;
2311
df01da37
DE
2312 case OPT_maix_struct_return:
2313 case OPT_msvr4_struct_return:
2314 rs6000_explicit_options.aix_struct_ret = true;
2315 break;
2316
78f5898b 2317 case OPT_mvrsave_:
a2db2771 2318 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2319 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2320 break;
78f5898b 2321
94f4765c
NF
2322 case OPT_misel:
2323 rs6000_explicit_options.isel = true;
2324 rs6000_isel = value;
2325 break;
2326
78f5898b
AH
2327 case OPT_misel_:
2328 rs6000_explicit_options.isel = true;
2329 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2330 break;
2331
94f4765c
NF
2332 case OPT_mspe:
2333 rs6000_explicit_options.spe = true;
2334 rs6000_spe = value;
2335 break;
2336
78f5898b
AH
2337 case OPT_mspe_:
2338 rs6000_explicit_options.spe = true;
2339 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2340 break;
2341
2342 case OPT_mdebug_:
2343 rs6000_debug_name = arg;
2344 break;
2345
2346#ifdef TARGET_USES_SYSV4_OPT
2347 case OPT_mcall_:
2348 rs6000_abi_name = arg;
2349 break;
2350
2351 case OPT_msdata_:
2352 rs6000_sdata_name = arg;
2353 break;
2354
2355 case OPT_mtls_size_:
2356 rs6000_tls_size_string = arg;
2357 break;
2358
2359 case OPT_mrelocatable:
2360 if (value == 1)
c2dba4ab 2361 {
e0bf274f
AM
2362 target_flags |= MASK_MINIMAL_TOC;
2363 target_flags_explicit |= MASK_MINIMAL_TOC;
2364 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2365 }
78f5898b
AH
2366 break;
2367
2368 case OPT_mrelocatable_lib:
2369 if (value == 1)
c2dba4ab 2370 {
e0bf274f
AM
2371 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2372 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2373 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2374 }
78f5898b 2375 else
c2dba4ab
AH
2376 {
2377 target_flags &= ~MASK_RELOCATABLE;
2378 target_flags_explicit |= MASK_RELOCATABLE;
2379 }
78f5898b
AH
2380 break;
2381#endif
2382
2383 case OPT_mabi_:
78f5898b
AH
2384 if (!strcmp (arg, "altivec"))
2385 {
a2db2771 2386 rs6000_explicit_options.altivec_abi = true;
78f5898b 2387 rs6000_altivec_abi = 1;
a2db2771
JJ
2388
2389 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2390 rs6000_spe_abi = 0;
2391 }
2392 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2393 {
a2db2771 2394 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2395 rs6000_altivec_abi = 0;
2396 }
78f5898b
AH
2397 else if (! strcmp (arg, "spe"))
2398 {
a2db2771 2399 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2400 rs6000_spe_abi = 1;
2401 rs6000_altivec_abi = 0;
2402 if (!TARGET_SPE_ABI)
2403 error ("not configured for ABI: '%s'", arg);
2404 }
2405 else if (! strcmp (arg, "no-spe"))
d3603e8c 2406 {
a2db2771 2407 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2408 rs6000_spe_abi = 0;
2409 }
78f5898b
AH
2410
2411 /* These are here for testing during development only, do not
2412 document in the manual please. */
2413 else if (! strcmp (arg, "d64"))
2414 {
2415 rs6000_darwin64_abi = 1;
2416 warning (0, "Using darwin64 ABI");
2417 }
2418 else if (! strcmp (arg, "d32"))
2419 {
2420 rs6000_darwin64_abi = 0;
2421 warning (0, "Using old darwin ABI");
2422 }
2423
602ea4d3
JJ
2424 else if (! strcmp (arg, "ibmlongdouble"))
2425 {
d3603e8c 2426 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2427 rs6000_ieeequad = 0;
2428 warning (0, "Using IBM extended precision long double");
2429 }
2430 else if (! strcmp (arg, "ieeelongdouble"))
2431 {
d3603e8c 2432 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2433 rs6000_ieeequad = 1;
2434 warning (0, "Using IEEE extended precision long double");
2435 }
2436
78f5898b
AH
2437 else
2438 {
2439 error ("unknown ABI specified: '%s'", arg);
2440 return false;
2441 }
2442 break;
2443
2444 case OPT_mcpu_:
2445 rs6000_select[1].string = arg;
2446 break;
2447
2448 case OPT_mtune_:
2449 rs6000_select[2].string = arg;
2450 break;
2451
2452 case OPT_mtraceback_:
2453 rs6000_traceback_name = arg;
2454 break;
2455
2456 case OPT_mfloat_gprs_:
2457 rs6000_explicit_options.float_gprs = true;
2458 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2459 rs6000_float_gprs = 1;
2460 else if (! strcmp (arg, "double"))
2461 rs6000_float_gprs = 2;
2462 else if (! strcmp (arg, "no"))
2463 rs6000_float_gprs = 0;
2464 else
2465 {
2466 error ("invalid option for -mfloat-gprs: '%s'", arg);
2467 return false;
2468 }
2469 break;
2470
2471 case OPT_mlong_double_:
2472 rs6000_explicit_options.long_double = true;
2473 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2474 if (value != 64 && value != 128)
2475 {
2476 error ("Unknown switch -mlong-double-%s", arg);
2477 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2478 return false;
2479 }
2480 else
2481 rs6000_long_double_type_size = value;
2482 break;
2483
2484 case OPT_msched_costly_dep_:
2485 rs6000_sched_costly_dep_str = arg;
2486 break;
2487
2488 case OPT_malign_:
2489 rs6000_explicit_options.alignment = true;
2490 if (! strcmp (arg, "power"))
2491 {
2492 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2493 some C library functions, so warn about it. The flag may be
2494 useful for performance studies from time to time though, so
2495 don't disable it entirely. */
2496 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2497 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2498 " it is incompatible with the installed C and C++ libraries");
2499 rs6000_alignment_flags = MASK_ALIGN_POWER;
2500 }
2501 else if (! strcmp (arg, "natural"))
2502 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2503 else
2504 {
2505 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2506 return false;
2507 }
2508 break;
696e45ba
ME
2509
2510 case OPT_msingle_float:
2511 if (!TARGET_SINGLE_FPU)
2512 warning (0, "-msingle-float option equivalent to -mhard-float");
2513 /* -msingle-float implies -mno-double-float and TARGET_HARD_FLOAT. */
2514 rs6000_double_float = 0;
2515 target_flags &= ~MASK_SOFT_FLOAT;
2516 target_flags_explicit |= MASK_SOFT_FLOAT;
2517 break;
2518
2519 case OPT_mdouble_float:
2520 /* -mdouble-float implies -msingle-float and TARGET_HARD_FLOAT. */
2521 rs6000_single_float = 1;
2522 target_flags &= ~MASK_SOFT_FLOAT;
2523 target_flags_explicit |= MASK_SOFT_FLOAT;
2524 break;
2525
2526 case OPT_msimple_fpu:
2527 if (!TARGET_SINGLE_FPU)
2528 warning (0, "-msimple-fpu option ignored");
2529 break;
2530
2531 case OPT_mhard_float:
2532 /* -mhard_float implies -msingle-float and -mdouble-float. */
2533 rs6000_single_float = rs6000_double_float = 1;
2534 break;
2535
2536 case OPT_msoft_float:
2537 /* -msoft_float implies -mnosingle-float and -mnodouble-float. */
2538 rs6000_single_float = rs6000_double_float = 0;
2539 break;
0bb7b92e
ME
2540
2541 case OPT_mfpu_:
2542 fpu_type = rs6000_parse_fpu_option(arg);
2543 if (fpu_type != FPU_NONE)
2544 /* If -mfpu is not none, then turn off SOFT_FLOAT, turn on HARD_FLOAT. */
2545 {
2546 target_flags &= ~MASK_SOFT_FLOAT;
2547 target_flags_explicit |= MASK_SOFT_FLOAT;
2548 rs6000_xilinx_fpu = 1;
2549 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_SF_FULL)
2550 rs6000_single_float = 1;
2551 if (fpu_type == FPU_DF_LITE || fpu_type == FPU_DF_FULL)
2552 rs6000_single_float = rs6000_double_float = 1;
2553 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_DF_LITE)
2554 rs6000_simple_fpu = 1;
2555 }
2556 else
2557 {
2558 /* -mfpu=none is equivalent to -msoft-float */
2559 target_flags |= MASK_SOFT_FLOAT;
2560 target_flags_explicit |= MASK_SOFT_FLOAT;
2561 rs6000_single_float = rs6000_double_float = 0;
2562 }
2563 break;
78f5898b
AH
2564 }
2565 return true;
2566}
3cfa4909
MM
2567\f
2568/* Do anything needed at the start of the asm file. */
2569
1bc7c5b6 2570static void
863d938c 2571rs6000_file_start (void)
3cfa4909 2572{
c4d38ccb 2573 size_t i;
3cfa4909 2574 char buffer[80];
d330fd93 2575 const char *start = buffer;
3cfa4909 2576 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2577 const char *default_cpu = TARGET_CPU_DEFAULT;
2578 FILE *file = asm_out_file;
2579
2580 default_file_start ();
2581
2582#ifdef TARGET_BI_ARCH
2583 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2584 default_cpu = 0;
2585#endif
3cfa4909
MM
2586
2587 if (flag_verbose_asm)
2588 {
2589 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2590 rs6000_select[0].string = default_cpu;
2591
b6a1cbae 2592 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2593 {
2594 ptr = &rs6000_select[i];
2595 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2596 {
2597 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2598 start = "";
2599 }
2600 }
2601
9c6b4ed9 2602 if (PPC405_ERRATUM77)
b0bfee6e 2603 {
9c6b4ed9 2604 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2605 start = "";
2606 }
b0bfee6e 2607
b91da81f 2608#ifdef USING_ELFOS_H
3cfa4909
MM
2609 switch (rs6000_sdata)
2610 {
2611 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2612 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2613 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2614 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2615 }
2616
2617 if (rs6000_sdata && g_switch_value)
2618 {
307b599c
MK
2619 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2620 g_switch_value);
3cfa4909
MM
2621 start = "";
2622 }
2623#endif
2624
2625 if (*start == '\0')
949ea356 2626 putc ('\n', file);
3cfa4909 2627 }
b723e82f 2628
e51917ae
JM
2629#ifdef HAVE_AS_GNU_ATTRIBUTE
2630 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2631 {
2632 fprintf (file, "\t.gnu_attribute 4, %d\n",
696e45ba
ME
2633 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
2634 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
2635 : 2));
aaa42494
DJ
2636 fprintf (file, "\t.gnu_attribute 8, %d\n",
2637 (TARGET_ALTIVEC_ABI ? 2
2638 : TARGET_SPE_ABI ? 3
2639 : 1));
2640 }
e51917ae
JM
2641#endif
2642
b723e82f
JJ
2643 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2644 {
d6b5193b
RS
2645 switch_to_section (toc_section);
2646 switch_to_section (text_section);
b723e82f 2647 }
3cfa4909 2648}
c4e18b1c 2649
5248c961 2650\f
a0ab749a 2651/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2652
2653int
863d938c 2654direct_return (void)
9878760c 2655{
4697a36c
MM
2656 if (reload_completed)
2657 {
2658 rs6000_stack_t *info = rs6000_stack_info ();
2659
2660 if (info->first_gp_reg_save == 32
2661 && info->first_fp_reg_save == 64
00b960c7 2662 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2663 && ! info->lr_save_p
2664 && ! info->cr_save_p
00b960c7 2665 && info->vrsave_mask == 0
c81fc13e 2666 && ! info->push_p)
4697a36c
MM
2667 return 1;
2668 }
2669
2670 return 0;
9878760c
RK
2671}
2672
4e74d8ec
MM
2673/* Return the number of instructions it takes to form a constant in an
2674 integer register. */
2675
48d72335 2676int
a2369ed3 2677num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2678{
2679 /* signed constant loadable with {cal|addi} */
547b216d 2680 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2681 return 1;
2682
4e74d8ec 2683 /* constant loadable with {cau|addis} */
547b216d
DE
2684 else if ((value & 0xffff) == 0
2685 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2686 return 1;
2687
5f59ecb7 2688#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2689 else if (TARGET_POWERPC64)
4e74d8ec 2690 {
a65c591c
DE
2691 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2692 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2693
a65c591c 2694 if (high == 0 || high == -1)
4e74d8ec
MM
2695 return 2;
2696
a65c591c 2697 high >>= 1;
4e74d8ec 2698
a65c591c 2699 if (low == 0)
4e74d8ec 2700 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2701 else
2702 return (num_insns_constant_wide (high)
e396202a 2703 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2704 }
2705#endif
2706
2707 else
2708 return 2;
2709}
2710
2711int
a2369ed3 2712num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2713{
37409796 2714 HOST_WIDE_INT low, high;
bb8df8a6 2715
37409796 2716 switch (GET_CODE (op))
0d30d435 2717 {
37409796 2718 case CONST_INT:
0d30d435 2719#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2720 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2721 && mask64_operand (op, mode))
c4ad648e 2722 return 2;
0d30d435
DE
2723 else
2724#endif
2725 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2726
37409796 2727 case CONST_DOUBLE:
e41b2a33 2728 if (mode == SFmode || mode == SDmode)
37409796
NS
2729 {
2730 long l;
2731 REAL_VALUE_TYPE rv;
bb8df8a6 2732
37409796 2733 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2734 if (DECIMAL_FLOAT_MODE_P (mode))
2735 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2736 else
2737 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2738 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2739 }
a260abc9 2740
37409796
NS
2741 if (mode == VOIDmode || mode == DImode)
2742 {
2743 high = CONST_DOUBLE_HIGH (op);
2744 low = CONST_DOUBLE_LOW (op);
2745 }
2746 else
2747 {
2748 long l[2];
2749 REAL_VALUE_TYPE rv;
bb8df8a6 2750
37409796 2751 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2752 if (DECIMAL_FLOAT_MODE_P (mode))
2753 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2754 else
2755 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2756 high = l[WORDS_BIG_ENDIAN == 0];
2757 low = l[WORDS_BIG_ENDIAN != 0];
2758 }
47ad8c61 2759
37409796
NS
2760 if (TARGET_32BIT)
2761 return (num_insns_constant_wide (low)
2762 + num_insns_constant_wide (high));
2763 else
2764 {
2765 if ((high == 0 && low >= 0)
2766 || (high == -1 && low < 0))
2767 return num_insns_constant_wide (low);
bb8df8a6 2768
1990cd79 2769 else if (mask64_operand (op, mode))
37409796 2770 return 2;
bb8df8a6 2771
37409796
NS
2772 else if (low == 0)
2773 return num_insns_constant_wide (high) + 1;
bb8df8a6 2774
37409796
NS
2775 else
2776 return (num_insns_constant_wide (high)
2777 + num_insns_constant_wide (low) + 1);
2778 }
bb8df8a6 2779
37409796
NS
2780 default:
2781 gcc_unreachable ();
4e74d8ec 2782 }
4e74d8ec
MM
2783}
2784
0972012c
RS
2785/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2786 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2787 corresponding element of the vector, but for V4SFmode and V2SFmode,
2788 the corresponding "float" is interpreted as an SImode integer. */
2789
847535b6 2790HOST_WIDE_INT
0972012c
RS
2791const_vector_elt_as_int (rtx op, unsigned int elt)
2792{
2793 rtx tmp = CONST_VECTOR_ELT (op, elt);
2794 if (GET_MODE (op) == V4SFmode
2795 || GET_MODE (op) == V2SFmode)
2796 tmp = gen_lowpart (SImode, tmp);
2797 return INTVAL (tmp);
2798}
452a7d36 2799
77ccdfed 2800/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2801 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2802 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2803 all items are set to the same value and contain COPIES replicas of the
2804 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2805 operand and the others are set to the value of the operand's msb. */
2806
2807static bool
2808vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2809{
66180ff3
PB
2810 enum machine_mode mode = GET_MODE (op);
2811 enum machine_mode inner = GET_MODE_INNER (mode);
2812
2813 unsigned i;
2814 unsigned nunits = GET_MODE_NUNITS (mode);
2815 unsigned bitsize = GET_MODE_BITSIZE (inner);
2816 unsigned mask = GET_MODE_MASK (inner);
2817
0972012c 2818 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2819 HOST_WIDE_INT splat_val = val;
2820 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2821
2822 /* Construct the value to be splatted, if possible. If not, return 0. */
2823 for (i = 2; i <= copies; i *= 2)
452a7d36 2824 {
66180ff3
PB
2825 HOST_WIDE_INT small_val;
2826 bitsize /= 2;
2827 small_val = splat_val >> bitsize;
2828 mask >>= bitsize;
2829 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2830 return false;
2831 splat_val = small_val;
2832 }
c4ad648e 2833
66180ff3
PB
2834 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2835 if (EASY_VECTOR_15 (splat_val))
2836 ;
2837
2838 /* Also check if we can splat, and then add the result to itself. Do so if
2839 the value is positive, of if the splat instruction is using OP's mode;
2840 for splat_val < 0, the splat and the add should use the same mode. */
2841 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2842 && (splat_val >= 0 || (step == 1 && copies == 1)))
2843 ;
2844
2845 else
2846 return false;
2847
2848 /* Check if VAL is present in every STEP-th element, and the
2849 other elements are filled with its most significant bit. */
2850 for (i = 0; i < nunits - 1; ++i)
2851 {
2852 HOST_WIDE_INT desired_val;
2853 if (((i + 1) & (step - 1)) == 0)
2854 desired_val = val;
2855 else
2856 desired_val = msb_val;
2857
0972012c 2858 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2859 return false;
452a7d36 2860 }
66180ff3
PB
2861
2862 return true;
452a7d36
HP
2863}
2864
69ef87e2 2865
77ccdfed 2866/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2867 with a vspltisb, vspltish or vspltisw. */
2868
2869bool
2870easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2871{
66180ff3 2872 unsigned step, copies;
d744e06e 2873
66180ff3
PB
2874 if (mode == VOIDmode)
2875 mode = GET_MODE (op);
2876 else if (mode != GET_MODE (op))
2877 return false;
d744e06e 2878
66180ff3
PB
2879 /* Start with a vspltisw. */
2880 step = GET_MODE_NUNITS (mode) / 4;
2881 copies = 1;
2882
2883 if (vspltis_constant (op, step, copies))
2884 return true;
2885
2886 /* Then try with a vspltish. */
2887 if (step == 1)
2888 copies <<= 1;
2889 else
2890 step >>= 1;
2891
2892 if (vspltis_constant (op, step, copies))
2893 return true;
2894
2895 /* And finally a vspltisb. */
2896 if (step == 1)
2897 copies <<= 1;
2898 else
2899 step >>= 1;
2900
2901 if (vspltis_constant (op, step, copies))
2902 return true;
2903
2904 return false;
d744e06e
AH
2905}
2906
66180ff3
PB
2907/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2908 result is OP. Abort if it is not possible. */
d744e06e 2909
f676971a 2910rtx
66180ff3 2911gen_easy_altivec_constant (rtx op)
452a7d36 2912{
66180ff3
PB
2913 enum machine_mode mode = GET_MODE (op);
2914 int nunits = GET_MODE_NUNITS (mode);
2915 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2916 unsigned step = nunits / 4;
2917 unsigned copies = 1;
2918
2919 /* Start with a vspltisw. */
2920 if (vspltis_constant (op, step, copies))
2921 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2922
2923 /* Then try with a vspltish. */
2924 if (step == 1)
2925 copies <<= 1;
2926 else
2927 step >>= 1;
2928
2929 if (vspltis_constant (op, step, copies))
2930 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2931
2932 /* And finally a vspltisb. */
2933 if (step == 1)
2934 copies <<= 1;
2935 else
2936 step >>= 1;
2937
2938 if (vspltis_constant (op, step, copies))
2939 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2940
2941 gcc_unreachable ();
d744e06e
AH
2942}
2943
2944const char *
a2369ed3 2945output_vec_const_move (rtx *operands)
d744e06e
AH
2946{
2947 int cst, cst2;
2948 enum machine_mode mode;
2949 rtx dest, vec;
2950
2951 dest = operands[0];
2952 vec = operands[1];
d744e06e 2953 mode = GET_MODE (dest);
69ef87e2 2954
d744e06e
AH
2955 if (TARGET_ALTIVEC)
2956 {
66180ff3 2957 rtx splat_vec;
d744e06e
AH
2958 if (zero_constant (vec, mode))
2959 return "vxor %0,%0,%0";
37409796 2960
66180ff3
PB
2961 splat_vec = gen_easy_altivec_constant (vec);
2962 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2963 operands[1] = XEXP (splat_vec, 0);
2964 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2965 return "#";
bb8df8a6 2966
66180ff3 2967 switch (GET_MODE (splat_vec))
98ef3137 2968 {
37409796 2969 case V4SImode:
66180ff3 2970 return "vspltisw %0,%1";
c4ad648e 2971
37409796 2972 case V8HImode:
66180ff3 2973 return "vspltish %0,%1";
c4ad648e 2974
37409796 2975 case V16QImode:
66180ff3 2976 return "vspltisb %0,%1";
bb8df8a6 2977
37409796
NS
2978 default:
2979 gcc_unreachable ();
98ef3137 2980 }
69ef87e2
AH
2981 }
2982
37409796 2983 gcc_assert (TARGET_SPE);
bb8df8a6 2984
37409796
NS
2985 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2986 pattern of V1DI, V4HI, and V2SF.
2987
2988 FIXME: We should probably return # and add post reload
2989 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2990 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2991 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2992 operands[1] = CONST_VECTOR_ELT (vec, 0);
2993 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2994 if (cst == cst2)
2995 return "li %0,%1\n\tevmergelo %0,%0,%0";
2996 else
2997 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2998}
2999
f5027409
RE
3000/* Initialize TARGET of vector PAIRED to VALS. */
3001
3002void
3003paired_expand_vector_init (rtx target, rtx vals)
3004{
3005 enum machine_mode mode = GET_MODE (target);
3006 int n_elts = GET_MODE_NUNITS (mode);
3007 int n_var = 0;
0a2aaacc 3008 rtx x, new_rtx, tmp, constant_op, op1, op2;
f5027409
RE
3009 int i;
3010
3011 for (i = 0; i < n_elts; ++i)
3012 {
3013 x = XVECEXP (vals, 0, i);
3014 if (!CONSTANT_P (x))
3015 ++n_var;
3016 }
3017 if (n_var == 0)
3018 {
3019 /* Load from constant pool. */
3020 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
3021 return;
3022 }
3023
3024 if (n_var == 2)
3025 {
3026 /* The vector is initialized only with non-constants. */
0a2aaacc 3027 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
f5027409
RE
3028 XVECEXP (vals, 0, 1));
3029
0a2aaacc 3030 emit_move_insn (target, new_rtx);
f5027409
RE
3031 return;
3032 }
3033
3034 /* One field is non-constant and the other one is a constant. Load the
3035 constant from the constant pool and use ps_merge instruction to
3036 construct the whole vector. */
3037 op1 = XVECEXP (vals, 0, 0);
3038 op2 = XVECEXP (vals, 0, 1);
3039
3040 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
3041
3042 tmp = gen_reg_rtx (GET_MODE (constant_op));
3043 emit_move_insn (tmp, constant_op);
3044
3045 if (CONSTANT_P (op1))
0a2aaacc 3046 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
f5027409 3047 else
0a2aaacc 3048 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
f5027409 3049
0a2aaacc 3050 emit_move_insn (target, new_rtx);
f5027409
RE
3051}
3052
e2e95f45
RE
3053void
3054paired_expand_vector_move (rtx operands[])
3055{
3056 rtx op0 = operands[0], op1 = operands[1];
3057
3058 emit_move_insn (op0, op1);
3059}
3060
3061/* Emit vector compare for code RCODE. DEST is destination, OP1 and
3062 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
3063 operands for the relation operation COND. This is a recursive
3064 function. */
3065
3066static void
3067paired_emit_vector_compare (enum rtx_code rcode,
3068 rtx dest, rtx op0, rtx op1,
3069 rtx cc_op0, rtx cc_op1)
3070{
3071 rtx tmp = gen_reg_rtx (V2SFmode);
3072 rtx tmp1, max, min, equal_zero;
3073
3074 gcc_assert (TARGET_PAIRED_FLOAT);
3075 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
3076
3077 switch (rcode)
3078 {
3079 case LT:
3080 case LTU:
3081 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3082 return;
3083 case GE:
3084 case GEU:
3085 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3086 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3087 return;
3088 case LE:
3089 case LEU:
3090 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3091 return;
3092 case GT:
3093 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3094 return;
3095 case EQ:
3096 tmp1 = gen_reg_rtx (V2SFmode);
3097 max = gen_reg_rtx (V2SFmode);
3098 min = gen_reg_rtx (V2SFmode);
3099 equal_zero = gen_reg_rtx (V2SFmode);
3100
3101 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3102 emit_insn (gen_selv2sf4
3103 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3104 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3105 emit_insn (gen_selv2sf4
3106 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3107 emit_insn (gen_subv2sf3 (tmp1, min, max));
3108 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3109 return;
3110 case NE:
3111 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3112 return;
3113 case UNLE:
3114 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3115 return;
3116 case UNLT:
3117 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3118 return;
3119 case UNGE:
3120 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3121 return;
3122 case UNGT:
3123 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3124 return;
3125 default:
3126 gcc_unreachable ();
3127 }
3128
3129 return;
3130}
3131
3132/* Emit vector conditional expression.
3133 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3134 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3135
3136int
3137paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3138 rtx cond, rtx cc_op0, rtx cc_op1)
3139{
3140 enum rtx_code rcode = GET_CODE (cond);
3141
3142 if (!TARGET_PAIRED_FLOAT)
3143 return 0;
3144
3145 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3146
3147 return 1;
3148}
3149
7a4eca66
DE
3150/* Initialize vector TARGET to VALS. */
3151
3152void
3153rs6000_expand_vector_init (rtx target, rtx vals)
3154{
3155 enum machine_mode mode = GET_MODE (target);
3156 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3157 int n_elts = GET_MODE_NUNITS (mode);
3158 int n_var = 0, one_var = -1;
3159 bool all_same = true, all_const_zero = true;
3160 rtx x, mem;
3161 int i;
3162
3163 for (i = 0; i < n_elts; ++i)
3164 {
3165 x = XVECEXP (vals, 0, i);
3166 if (!CONSTANT_P (x))
3167 ++n_var, one_var = i;
3168 else if (x != CONST0_RTX (inner_mode))
3169 all_const_zero = false;
3170
3171 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3172 all_same = false;
3173 }
3174
3175 if (n_var == 0)
3176 {
501fb355 3177 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3178 if (mode != V4SFmode && all_const_zero)
3179 {
3180 /* Zero register. */
3181 emit_insn (gen_rtx_SET (VOIDmode, target,
3182 gen_rtx_XOR (mode, target, target)));
3183 return;
3184 }
501fb355 3185 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3186 {
3187 /* Splat immediate. */
501fb355 3188 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3189 return;
3190 }
3191 else if (all_same)
3192 ; /* Splat vector element. */
3193 else
3194 {
3195 /* Load from constant pool. */
501fb355 3196 emit_move_insn (target, const_vec);
7a4eca66
DE
3197 return;
3198 }
3199 }
3200
3201 /* Store value to stack temp. Load vector element. Splat. */
3202 if (all_same)
3203 {
3204 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3205 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3206 XVECEXP (vals, 0, 0));
3207 x = gen_rtx_UNSPEC (VOIDmode,
3208 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3209 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3210 gen_rtvec (2,
3211 gen_rtx_SET (VOIDmode,
3212 target, mem),
3213 x)));
3214 x = gen_rtx_VEC_SELECT (inner_mode, target,
3215 gen_rtx_PARALLEL (VOIDmode,
3216 gen_rtvec (1, const0_rtx)));
3217 emit_insn (gen_rtx_SET (VOIDmode, target,
3218 gen_rtx_VEC_DUPLICATE (mode, x)));
3219 return;
3220 }
3221
3222 /* One field is non-constant. Load constant then overwrite
3223 varying field. */
3224 if (n_var == 1)
3225 {
3226 rtx copy = copy_rtx (vals);
3227
57b51d4d 3228 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3229 varying element. */
3230 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3231 rs6000_expand_vector_init (target, copy);
3232
3233 /* Insert variable. */
3234 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3235 return;
3236 }
3237
3238 /* Construct the vector in memory one field at a time
3239 and load the whole vector. */
3240 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3241 for (i = 0; i < n_elts; i++)
3242 emit_move_insn (adjust_address_nv (mem, inner_mode,
3243 i * GET_MODE_SIZE (inner_mode)),
3244 XVECEXP (vals, 0, i));
3245 emit_move_insn (target, mem);
3246}
3247
3248/* Set field ELT of TARGET to VAL. */
3249
3250void
3251rs6000_expand_vector_set (rtx target, rtx val, int elt)
3252{
3253 enum machine_mode mode = GET_MODE (target);
3254 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3255 rtx reg = gen_reg_rtx (mode);
3256 rtx mask, mem, x;
3257 int width = GET_MODE_SIZE (inner_mode);
3258 int i;
3259
3260 /* Load single variable value. */
3261 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3262 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3263 x = gen_rtx_UNSPEC (VOIDmode,
3264 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3265 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3266 gen_rtvec (2,
3267 gen_rtx_SET (VOIDmode,
3268 reg, mem),
3269 x)));
3270
3271 /* Linear sequence. */
3272 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3273 for (i = 0; i < 16; ++i)
3274 XVECEXP (mask, 0, i) = GEN_INT (i);
3275
3276 /* Set permute mask to insert element into target. */
3277 for (i = 0; i < width; ++i)
3278 XVECEXP (mask, 0, elt*width + i)
3279 = GEN_INT (i + 0x10);
3280 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3281 x = gen_rtx_UNSPEC (mode,
3282 gen_rtvec (3, target, reg,
3283 force_reg (V16QImode, x)),
3284 UNSPEC_VPERM);
3285 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3286}
3287
3288/* Extract field ELT from VEC into TARGET. */
3289
3290void
3291rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3292{
3293 enum machine_mode mode = GET_MODE (vec);
3294 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3295 rtx mem, x;
3296
3297 /* Allocate mode-sized buffer. */
3298 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3299
3300 /* Add offset to field within buffer matching vector element. */
3301 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3302
3303 /* Store single field into mode-sized buffer. */
3304 x = gen_rtx_UNSPEC (VOIDmode,
3305 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3306 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3307 gen_rtvec (2,
3308 gen_rtx_SET (VOIDmode,
3309 mem, vec),
3310 x)));
3311 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3312}
3313
0ba1b2ff
AM
3314/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3315 implement ANDing by the mask IN. */
3316void
a2369ed3 3317build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3318{
3319#if HOST_BITS_PER_WIDE_INT >= 64
3320 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3321 int shift;
3322
37409796 3323 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3324
3325 c = INTVAL (in);
3326 if (c & 1)
3327 {
3328 /* Assume c initially something like 0x00fff000000fffff. The idea
3329 is to rotate the word so that the middle ^^^^^^ group of zeros
3330 is at the MS end and can be cleared with an rldicl mask. We then
3331 rotate back and clear off the MS ^^ group of zeros with a
3332 second rldicl. */
3333 c = ~c; /* c == 0xff000ffffff00000 */
3334 lsb = c & -c; /* lsb == 0x0000000000100000 */
3335 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3336 c = ~c; /* c == 0x00fff000000fffff */
3337 c &= -lsb; /* c == 0x00fff00000000000 */
3338 lsb = c & -c; /* lsb == 0x0000100000000000 */
3339 c = ~c; /* c == 0xff000fffffffffff */
3340 c &= -lsb; /* c == 0xff00000000000000 */
3341 shift = 0;
3342 while ((lsb >>= 1) != 0)
3343 shift++; /* shift == 44 on exit from loop */
3344 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3345 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3346 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3347 }
3348 else
0ba1b2ff
AM
3349 {
3350 /* Assume c initially something like 0xff000f0000000000. The idea
3351 is to rotate the word so that the ^^^ middle group of zeros
3352 is at the LS end and can be cleared with an rldicr mask. We then
3353 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3354 a second rldicr. */
3355 lsb = c & -c; /* lsb == 0x0000010000000000 */
3356 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3357 c = ~c; /* c == 0x00fff0ffffffffff */
3358 c &= -lsb; /* c == 0x00fff00000000000 */
3359 lsb = c & -c; /* lsb == 0x0000100000000000 */
3360 c = ~c; /* c == 0xff000fffffffffff */
3361 c &= -lsb; /* c == 0xff00000000000000 */
3362 shift = 0;
3363 while ((lsb >>= 1) != 0)
3364 shift++; /* shift == 44 on exit from loop */
3365 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3366 m1 >>= shift; /* m1 == 0x0000000000000fff */
3367 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3368 }
3369
3370 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3371 masks will be all 1's. We are guaranteed more than one transition. */
3372 out[0] = GEN_INT (64 - shift);
3373 out[1] = GEN_INT (m1);
3374 out[2] = GEN_INT (shift);
3375 out[3] = GEN_INT (m2);
3376#else
045572c7
GK
3377 (void)in;
3378 (void)out;
37409796 3379 gcc_unreachable ();
0ba1b2ff 3380#endif
a260abc9
DE
3381}
3382
54b695e7 3383/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3384
3385bool
54b695e7
AH
3386invalid_e500_subreg (rtx op, enum machine_mode mode)
3387{
61c76239
JM
3388 if (TARGET_E500_DOUBLE)
3389 {
17caeff2 3390 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3391 subreg:TI and reg:TF. Decimal float modes are like integer
3392 modes (only low part of each register used) for this
3393 purpose. */
61c76239 3394 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3395 && (mode == SImode || mode == DImode || mode == TImode
3396 || mode == DDmode || mode == TDmode)
61c76239 3397 && REG_P (SUBREG_REG (op))
17caeff2 3398 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3399 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3400 return true;
3401
17caeff2
JM
3402 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3403 reg:TI. */
61c76239 3404 if (GET_CODE (op) == SUBREG
4f011e1e 3405 && (mode == DFmode || mode == TFmode)
61c76239 3406 && REG_P (SUBREG_REG (op))
17caeff2 3407 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3408 || GET_MODE (SUBREG_REG (op)) == TImode
3409 || GET_MODE (SUBREG_REG (op)) == DDmode
3410 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3411 return true;
3412 }
54b695e7 3413
61c76239
JM
3414 if (TARGET_SPE
3415 && GET_CODE (op) == SUBREG
3416 && mode == SImode
54b695e7 3417 && REG_P (SUBREG_REG (op))
14502dad 3418 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3419 return true;
3420
3421 return false;
3422}
3423
58182de3 3424/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3425 field is an FP double while the FP fields remain word aligned. */
3426
19d66194 3427unsigned int
fa5b0972
AM
3428rs6000_special_round_type_align (tree type, unsigned int computed,
3429 unsigned int specified)
95727fb8 3430{
fa5b0972 3431 unsigned int align = MAX (computed, specified);
95727fb8 3432 tree field = TYPE_FIELDS (type);
95727fb8 3433
bb8df8a6 3434 /* Skip all non field decls */
85962ac8 3435 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3436 field = TREE_CHAIN (field);
3437
fa5b0972
AM
3438 if (field != NULL && field != type)
3439 {
3440 type = TREE_TYPE (field);
3441 while (TREE_CODE (type) == ARRAY_TYPE)
3442 type = TREE_TYPE (type);
3443
3444 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3445 align = MAX (align, 64);
3446 }
95727fb8 3447
fa5b0972 3448 return align;
95727fb8
AP
3449}
3450
58182de3
GK
3451/* Darwin increases record alignment to the natural alignment of
3452 the first field. */
3453
3454unsigned int
3455darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3456 unsigned int specified)
3457{
3458 unsigned int align = MAX (computed, specified);
3459
3460 if (TYPE_PACKED (type))
3461 return align;
3462
3463 /* Find the first field, looking down into aggregates. */
3464 do {
3465 tree field = TYPE_FIELDS (type);
3466 /* Skip all non field decls */
3467 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3468 field = TREE_CHAIN (field);
3469 if (! field)
3470 break;
3471 type = TREE_TYPE (field);
3472 while (TREE_CODE (type) == ARRAY_TYPE)
3473 type = TREE_TYPE (type);
3474 } while (AGGREGATE_TYPE_P (type));
3475
3476 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3477 align = MAX (align, TYPE_ALIGN (type));
3478
3479 return align;
3480}
3481
a4f6c312 3482/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3483
3484int
f676971a 3485small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3486 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3487{
38c1f2d7 3488#if TARGET_ELF
5f59ecb7 3489 rtx sym_ref;
7509c759 3490
d9407988 3491 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3492 return 0;
a54d04b7 3493
f607bc57 3494 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3495 return 0;
3496
2aa42e6e
NF
3497 /* Vector and float memory instructions have a limited offset on the
3498 SPE, so using a vector or float variable directly as an operand is
3499 not useful. */
3500 if (TARGET_SPE
3501 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3502 return 0;
3503
88228c4b
MM
3504 if (GET_CODE (op) == SYMBOL_REF)
3505 sym_ref = op;
3506
3507 else if (GET_CODE (op) != CONST
3508 || GET_CODE (XEXP (op, 0)) != PLUS
3509 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3510 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3511 return 0;
3512
88228c4b 3513 else
dbf55e53
MM
3514 {
3515 rtx sum = XEXP (op, 0);
3516 HOST_WIDE_INT summand;
3517
3518 /* We have to be careful here, because it is the referenced address
c4ad648e 3519 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3520 summand = INTVAL (XEXP (sum, 1));
307b599c 3521 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3522 return 0;
dbf55e53
MM
3523
3524 sym_ref = XEXP (sum, 0);
3525 }
88228c4b 3526
20bfcd69 3527 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3528#else
3529 return 0;
3530#endif
7509c759 3531}
46c07df8 3532
3a1f863f 3533/* Return true if either operand is a general purpose register. */
46c07df8 3534
3a1f863f
DE
3535bool
3536gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3537{
3a1f863f
DE
3538 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3539 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3540}
3541
9ebbca7d 3542\f
4d588c14
RH
3543/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3544
4d588c14 3545static bool
a2369ed3 3546constant_pool_expr_p (rtx op)
9ebbca7d 3547{
2e4316da
RS
3548 rtx base, offset;
3549
3550 split_const (op, &base, &offset);
3551 return (GET_CODE (base) == SYMBOL_REF
3552 && CONSTANT_POOL_ADDRESS_P (base)
3553 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
9ebbca7d
GK
3554}
3555
48d72335 3556bool
a2369ed3 3557toc_relative_expr_p (rtx op)
9ebbca7d 3558{
2e4316da
RS
3559 rtx base, offset;
3560
3561 if (GET_CODE (op) != CONST)
3562 return false;
3563
3564 split_const (op, &base, &offset);
3565 return (GET_CODE (base) == UNSPEC
3566 && XINT (base, 1) == UNSPEC_TOCREL);
4d588c14
RH
3567}
3568
4d588c14 3569bool
a2369ed3 3570legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3571{
3572 return (TARGET_TOC
3573 && GET_CODE (x) == PLUS
3574 && GET_CODE (XEXP (x, 0)) == REG
3575 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2e4316da 3576 && toc_relative_expr_p (XEXP (x, 1)));
4d588c14
RH
3577}
3578
d04b6e6e
EB
3579static bool
3580legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3581{
3582 return (DEFAULT_ABI == ABI_V4
3583 && !flag_pic && !TARGET_TOC
3584 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3585 && small_data_operand (x, mode));
3586}
3587
60cdabab
DE
3588/* SPE offset addressing is limited to 5-bits worth of double words. */
3589#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3590
76d2b81d
DJ
3591bool
3592rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3593{
3594 unsigned HOST_WIDE_INT offset, extra;
3595
3596 if (GET_CODE (x) != PLUS)
3597 return false;
3598 if (GET_CODE (XEXP (x, 0)) != REG)
3599 return false;
3600 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3601 return false;
60cdabab
DE
3602 if (legitimate_constant_pool_address_p (x))
3603 return true;
4d588c14
RH
3604 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3605 return false;
3606
3607 offset = INTVAL (XEXP (x, 1));
3608 extra = 0;
3609 switch (mode)
3610 {
3611 case V16QImode:
3612 case V8HImode:
3613 case V4SFmode:
3614 case V4SImode:
7a4eca66 3615 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3616 constant offset zero should not occur due to canonicalization. */
3617 return false;
4d588c14
RH
3618
3619 case V4HImode:
3620 case V2SImode:
3621 case V1DImode:
3622 case V2SFmode:
d42a3bae 3623 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3624 constant offset zero should not occur due to canonicalization. */
d42a3bae 3625 if (TARGET_PAIRED_FLOAT)
1a23970d 3626 return false;
4d588c14
RH
3627 /* SPE vector modes. */
3628 return SPE_CONST_OFFSET_OK (offset);
3629
3630 case DFmode:
4d4cbc0e
AH
3631 if (TARGET_E500_DOUBLE)
3632 return SPE_CONST_OFFSET_OK (offset);
3633
4f011e1e 3634 case DDmode:
4d588c14 3635 case DImode:
54b695e7
AH
3636 /* On e500v2, we may have:
3637
3638 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3639
3640 Which gets addressed with evldd instructions. */
3641 if (TARGET_E500_DOUBLE)
3642 return SPE_CONST_OFFSET_OK (offset);
3643
7393f7f8 3644 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3645 extra = 4;
3646 else if (offset & 3)
3647 return false;
3648 break;
3649
3650 case TFmode:
17caeff2
JM
3651 if (TARGET_E500_DOUBLE)
3652 return (SPE_CONST_OFFSET_OK (offset)
3653 && SPE_CONST_OFFSET_OK (offset + 8));
3654
4f011e1e 3655 case TDmode:
4d588c14 3656 case TImode:
7393f7f8 3657 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3658 extra = 12;
3659 else if (offset & 3)
3660 return false;
3661 else
3662 extra = 8;
3663 break;
3664
3665 default:
3666 break;
3667 }
3668
b1917422
AM
3669 offset += 0x8000;
3670 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3671}
3672
6fb5fa3c 3673bool
a2369ed3 3674legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3675{
3676 rtx op0, op1;
3677
3678 if (GET_CODE (x) != PLUS)
3679 return false;
850e8d3d 3680
4d588c14
RH
3681 op0 = XEXP (x, 0);
3682 op1 = XEXP (x, 1);
3683
bf00cc0f 3684 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3685 replaced with proper base and index regs. */
3686 if (!strict
3687 && reload_in_progress
3688 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3689 && REG_P (op1))
3690 return true;
3691
3692 return (REG_P (op0) && REG_P (op1)
3693 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3694 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3695 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3696 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3697}
3698
48d72335 3699inline bool
a2369ed3 3700legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3701{
3702 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3703}
3704
48d72335 3705bool
4c81e946
FJ
3706macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3707{
c4ad648e 3708 if (!TARGET_MACHO || !flag_pic
9390387d 3709 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3710 return false;
3711 x = XEXP (x, 0);
4c81e946
FJ
3712
3713 if (GET_CODE (x) != LO_SUM)
3714 return false;
3715 if (GET_CODE (XEXP (x, 0)) != REG)
3716 return false;
3717 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3718 return false;
3719 x = XEXP (x, 1);
3720
3721 return CONSTANT_P (x);
3722}
3723
4d588c14 3724static bool
a2369ed3 3725legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3726{
3727 if (GET_CODE (x) != LO_SUM)
3728 return false;
3729 if (GET_CODE (XEXP (x, 0)) != REG)
3730 return false;
3731 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3732 return false;
54b695e7 3733 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3734 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3735 || mode == DDmode || mode == TDmode
17caeff2 3736 || mode == DImode))
f82f556d 3737 return false;
4d588c14
RH
3738 x = XEXP (x, 1);
3739
8622e235 3740 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3741 {
a29077da 3742 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3743 return false;
3744 if (TARGET_TOC)
3745 return false;
3746 if (GET_MODE_NUNITS (mode) != 1)
3747 return false;
5e5f01b9 3748 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3749 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
696e45ba 3750 && !(TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
4d4447b5 3751 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3752 return false;
3753
3754 return CONSTANT_P (x);
3755 }
3756
3757 return false;
3758}
3759
3760
9ebbca7d
GK
3761/* Try machine-dependent ways of modifying an illegitimate address
3762 to be legitimate. If we find one, return the new, valid address.
3763 This is used from only one place: `memory_address' in explow.c.
3764
a4f6c312
SS
3765 OLDX is the address as it was before break_out_memory_refs was
3766 called. In some cases it is useful to look at this to decide what
3767 needs to be done.
9ebbca7d 3768
a4f6c312 3769 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3770
a4f6c312
SS
3771 It is always safe for this function to do nothing. It exists to
3772 recognize opportunities to optimize the output.
9ebbca7d
GK
3773
3774 On RS/6000, first check for the sum of a register with a constant
3775 integer that is out of range. If so, generate code to add the
3776 constant with the low-order 16 bits masked to the register and force
3777 this result into another register (this can be done with `cau').
3778 Then generate an address of REG+(CONST&0xffff), allowing for the
3779 possibility of bit 16 being a one.
3780
3781 Then check for the sum of a register and something not constant, try to
3782 load the other things into a register and return the sum. */
4d588c14 3783
9ebbca7d 3784rtx
a2369ed3
DJ
3785rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3786 enum machine_mode mode)
0ac081f6 3787{
c4501e62
JJ
3788 if (GET_CODE (x) == SYMBOL_REF)
3789 {
3790 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3791 if (model != 0)
3792 return rs6000_legitimize_tls_address (x, model);
3793 }
3794
f676971a 3795 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3796 && GET_CODE (XEXP (x, 0)) == REG
3797 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3798 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3799 && !(SPE_VECTOR_MODE (mode)
efc05e3c 3800 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3801 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3802 || mode == DImode || mode == DDmode
3803 || mode == TDmode))))
f676971a 3804 {
9ebbca7d
GK
3805 HOST_WIDE_INT high_int, low_int;
3806 rtx sum;
a65c591c
DE
3807 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3808 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3809 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3810 GEN_INT (high_int)), 0);
3811 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3812 }
f676971a 3813 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3814 && GET_CODE (XEXP (x, 0)) == REG
3815 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3816 && GET_MODE_NUNITS (mode) == 1
696e45ba 3817 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
a3170dc6 3818 || TARGET_POWERPC64
efc05e3c 3819 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3820 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3821 && (TARGET_POWERPC64 || mode != DImode)
efc05e3c
PB
3822 && mode != TImode
3823 && mode != TFmode
3824 && mode != TDmode)
9ebbca7d
GK
3825 {
3826 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3827 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3828 }
0ac081f6
AH
3829 else if (ALTIVEC_VECTOR_MODE (mode))
3830 {
3831 rtx reg;
3832
3833 /* Make sure both operands are registers. */
3834 if (GET_CODE (x) == PLUS)
9f85ed45 3835 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3836 force_reg (Pmode, XEXP (x, 1)));
3837
3838 reg = force_reg (Pmode, x);
3839 return reg;
3840 }
4d4cbc0e 3841 else if (SPE_VECTOR_MODE (mode)
17caeff2 3842 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3843 || mode == DDmode || mode == TDmode
54b695e7 3844 || mode == DImode)))
a3170dc6 3845 {
54b695e7
AH
3846 if (mode == DImode)
3847 return NULL_RTX;
a3170dc6
AH
3848 /* We accept [reg + reg] and [reg + OFFSET]. */
3849
3850 if (GET_CODE (x) == PLUS)
61dd226f
NF
3851 {
3852 rtx op1 = XEXP (x, 0);
3853 rtx op2 = XEXP (x, 1);
3854 rtx y;
3855
3856 op1 = force_reg (Pmode, op1);
3857
3858 if (GET_CODE (op2) != REG
3859 && (GET_CODE (op2) != CONST_INT
3860 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3861 || (GET_MODE_SIZE (mode) > 8
3862 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3863 op2 = force_reg (Pmode, op2);
3864
3865 /* We can't always do [reg + reg] for these, because [reg +
3866 reg + offset] is not a legitimate addressing mode. */
3867 y = gen_rtx_PLUS (Pmode, op1, op2);
3868
4f011e1e 3869 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3870 return force_reg (Pmode, y);
3871 else
3872 return y;
3873 }
a3170dc6
AH
3874
3875 return force_reg (Pmode, x);
3876 }
f1384257
AM
3877 else if (TARGET_ELF
3878 && TARGET_32BIT
3879 && TARGET_NO_TOC
3880 && ! flag_pic
9ebbca7d 3881 && GET_CODE (x) != CONST_INT
f676971a 3882 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3883 && CONSTANT_P (x)
6ac7bf2c
GK
3884 && GET_MODE_NUNITS (mode) == 1
3885 && (GET_MODE_BITSIZE (mode) <= 32
696e45ba 3886 || ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3887 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3888 {
3889 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3890 emit_insn (gen_elf_high (reg, x));
3891 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3892 }
ee890fe2
SS
3893 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3894 && ! flag_pic
ab82a49f
AP
3895#if TARGET_MACHO
3896 && ! MACHO_DYNAMIC_NO_PIC_P
3897#endif
ee890fe2 3898 && GET_CODE (x) != CONST_INT
f676971a 3899 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3900 && CONSTANT_P (x)
506a7bc8 3901 && GET_MODE_NUNITS (mode) == 1
696e45ba 3902 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4d4447b5 3903 || (mode != DFmode && mode != DDmode))
f676971a 3904 && mode != DImode
ee890fe2
SS
3905 && mode != TImode)
3906 {
3907 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3908 emit_insn (gen_macho_high (reg, x));
3909 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3910 }
f676971a 3911 else if (TARGET_TOC
0cdc04e8 3912 && GET_CODE (x) == SYMBOL_REF
4d588c14 3913 && constant_pool_expr_p (x)
a9098fd0 3914 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3915 {
3916 return create_TOC_reference (x);
3917 }
3918 else
3919 return NULL_RTX;
3920}
258bfae2 3921
fdbe66f2 3922/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3923 We need to emit DTP-relative relocations. */
3924
fdbe66f2 3925static void
c973d557
JJ
3926rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3927{
3928 switch (size)
3929 {
3930 case 4:
3931 fputs ("\t.long\t", file);
3932 break;
3933 case 8:
3934 fputs (DOUBLE_INT_ASM_OP, file);
3935 break;
3936 default:
37409796 3937 gcc_unreachable ();
c973d557
JJ
3938 }
3939 output_addr_const (file, x);
3940 fputs ("@dtprel+0x8000", file);
3941}
3942
c4501e62
JJ
3943/* Construct the SYMBOL_REF for the tls_get_addr function. */
3944
3945static GTY(()) rtx rs6000_tls_symbol;
3946static rtx
863d938c 3947rs6000_tls_get_addr (void)
c4501e62
JJ
3948{
3949 if (!rs6000_tls_symbol)
3950 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3951
3952 return rs6000_tls_symbol;
3953}
3954
3955/* Construct the SYMBOL_REF for TLS GOT references. */
3956
3957static GTY(()) rtx rs6000_got_symbol;
3958static rtx
863d938c 3959rs6000_got_sym (void)
c4501e62
JJ
3960{
3961 if (!rs6000_got_symbol)
3962 {
3963 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3964 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3965 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3966 }
c4501e62
JJ
3967
3968 return rs6000_got_symbol;
3969}
3970
3971/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3972 this (thread-local) address. */
3973
3974static rtx
a2369ed3 3975rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3976{
3977 rtx dest, insn;
3978
3979 dest = gen_reg_rtx (Pmode);
3980 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3981 {
3982 rtx tlsreg;
3983
3984 if (TARGET_64BIT)
3985 {
3986 tlsreg = gen_rtx_REG (Pmode, 13);
3987 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3988 }
3989 else
3990 {
3991 tlsreg = gen_rtx_REG (Pmode, 2);
3992 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3993 }
3994 emit_insn (insn);
3995 }
3996 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3997 {
3998 rtx tlsreg, tmp;
3999
4000 tmp = gen_reg_rtx (Pmode);
4001 if (TARGET_64BIT)
4002 {
4003 tlsreg = gen_rtx_REG (Pmode, 13);
4004 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
4005 }
4006 else
4007 {
4008 tlsreg = gen_rtx_REG (Pmode, 2);
4009 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
4010 }
4011 emit_insn (insn);
4012 if (TARGET_64BIT)
4013 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
4014 else
4015 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
4016 emit_insn (insn);
4017 }
4018 else
4019 {
4020 rtx r3, got, tga, tmp1, tmp2, eqv;
4021
4fed8f8f
AM
4022 /* We currently use relocations like @got@tlsgd for tls, which
4023 means the linker will handle allocation of tls entries, placing
4024 them in the .got section. So use a pointer to the .got section,
4025 not one to secondary TOC sections used by 64-bit -mminimal-toc,
4026 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 4027 if (TARGET_64BIT)
972f427b 4028 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
4029 else
4030 {
4031 if (flag_pic == 1)
4032 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
4033 else
4034 {
4035 rtx gsym = rs6000_got_sym ();
4036 got = gen_reg_rtx (Pmode);
4037 if (flag_pic == 0)
4038 rs6000_emit_move (got, gsym, Pmode);
4039 else
4040 {
e65a3857 4041 rtx tmp3, mem;
c4501e62
JJ
4042 rtx first, last;
4043
c4501e62
JJ
4044 tmp1 = gen_reg_rtx (Pmode);
4045 tmp2 = gen_reg_rtx (Pmode);
4046 tmp3 = gen_reg_rtx (Pmode);
542a8afa 4047 mem = gen_const_mem (Pmode, tmp1);
c4501e62 4048
e65a3857
DE
4049 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
4050 emit_move_insn (tmp1,
1de43f85 4051 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
4052 emit_move_insn (tmp2, mem);
4053 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
4054 last = emit_move_insn (got, tmp3);
bd94cb6e 4055 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
4056 }
4057 }
4058 }
4059
4060 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4061 {
4062 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4063 tga = rs6000_tls_get_addr ();
4064
4065 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4066 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4067 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4068 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4069 else if (DEFAULT_ABI == ABI_V4)
4070 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 4071 else
02135bc1
SB
4072 gcc_unreachable ();
4073
c4501e62 4074 start_sequence ();
c4501e62 4075 insn = emit_call_insn (insn);
becfd6e5 4076 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4077 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4078 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4079 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4080 insn = get_insns ();
4081 end_sequence ();
4082 emit_libcall_block (insn, dest, r3, addr);
4083 }
4084 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4085 {
4086 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4087 tga = rs6000_tls_get_addr ();
4088
4089 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4090 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4091 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4092 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4093 else if (DEFAULT_ABI == ABI_V4)
4094 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 4095 else
02135bc1
SB
4096 gcc_unreachable ();
4097
c4501e62 4098 start_sequence ();
c4501e62 4099 insn = emit_call_insn (insn);
becfd6e5 4100 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4101 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4102 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4103 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4104 insn = get_insns ();
4105 end_sequence ();
4106 tmp1 = gen_reg_rtx (Pmode);
4107 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4108 UNSPEC_TLSLD);
4109 emit_libcall_block (insn, tmp1, r3, eqv);
4110 if (rs6000_tls_size == 16)
4111 {
4112 if (TARGET_64BIT)
4113 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4114 else
4115 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4116 }
4117 else if (rs6000_tls_size == 32)
4118 {
4119 tmp2 = gen_reg_rtx (Pmode);
4120 if (TARGET_64BIT)
4121 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4122 else
4123 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4124 emit_insn (insn);
4125 if (TARGET_64BIT)
4126 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4127 else
4128 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4129 }
4130 else
4131 {
4132 tmp2 = gen_reg_rtx (Pmode);
4133 if (TARGET_64BIT)
4134 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4135 else
4136 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4137 emit_insn (insn);
4138 insn = gen_rtx_SET (Pmode, dest,
4139 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4140 }
4141 emit_insn (insn);
4142 }
4143 else
4144 {
a7b376ee 4145 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4146 tmp2 = gen_reg_rtx (Pmode);
4147 if (TARGET_64BIT)
4148 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4149 else
4150 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4151 emit_insn (insn);
4152 if (TARGET_64BIT)
4153 insn = gen_tls_tls_64 (dest, tmp2, addr);
4154 else
4155 insn = gen_tls_tls_32 (dest, tmp2, addr);
4156 emit_insn (insn);
4157 }
4158 }
4159
4160 return dest;
4161}
4162
c4501e62
JJ
4163/* Return 1 if X contains a thread-local symbol. */
4164
4165bool
a2369ed3 4166rs6000_tls_referenced_p (rtx x)
c4501e62 4167{
cd413cab
AP
4168 if (! TARGET_HAVE_TLS)
4169 return false;
4170
c4501e62
JJ
4171 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4172}
4173
4174/* Return 1 if *X is a thread-local symbol. This is the same as
4175 rs6000_tls_symbol_ref except for the type of the unused argument. */
4176
9390387d 4177static int
a2369ed3 4178rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4179{
4180 return RS6000_SYMBOL_REF_TLS_P (*x);
4181}
4182
24ea750e
DJ
4183/* The convention appears to be to define this wherever it is used.
4184 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
4185 is now used here. */
4186#ifndef REG_MODE_OK_FOR_BASE_P
4187#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
4188#endif
4189
4190/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4191 replace the input X, or the original X if no replacement is called for.
4192 The output parameter *WIN is 1 if the calling macro should goto WIN,
4193 0 if it should not.
4194
4195 For RS/6000, we wish to handle large displacements off a base
4196 register by splitting the addend across an addiu/addis and the mem insn.
4197 This cuts number of extra insns needed from 3 to 1.
4198
4199 On Darwin, we use this to generate code for floating point constants.
4200 A movsf_low is generated so we wind up with 2 instructions rather than 3.
08a6a74b
RS
4201 The Darwin code is inside #if TARGET_MACHO because only then are the
4202 machopic_* functions defined. */
24ea750e 4203rtx
f676971a 4204rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4205 int opnum, int type,
4206 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4207{
f676971a 4208 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4209 if (GET_CODE (x) == PLUS
4210 && GET_CODE (XEXP (x, 0)) == PLUS
4211 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4212 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4213 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4214 {
4215 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4216 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4217 opnum, (enum reload_type)type);
24ea750e
DJ
4218 *win = 1;
4219 return x;
4220 }
3deb2758 4221
24ea750e
DJ
4222#if TARGET_MACHO
4223 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4224 && GET_CODE (x) == LO_SUM
4225 && GET_CODE (XEXP (x, 0)) == PLUS
4226 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4227 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
24ea750e 4228 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
08a6a74b 4229 && machopic_operand_p (XEXP (x, 1)))
24ea750e
DJ
4230 {
4231 /* Result of previous invocation of this function on Darwin
6f317ef3 4232 floating point constant. */
24ea750e 4233 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4234 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4235 opnum, (enum reload_type)type);
24ea750e
DJ
4236 *win = 1;
4237 return x;
4238 }
4239#endif
4937d02d
DE
4240
4241 /* Force ld/std non-word aligned offset into base register by wrapping
4242 in offset 0. */
4243 if (GET_CODE (x) == PLUS
4244 && GET_CODE (XEXP (x, 0)) == REG
4245 && REGNO (XEXP (x, 0)) < 32
4246 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4247 && GET_CODE (XEXP (x, 1)) == CONST_INT
4248 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4249 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4250 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4251 && TARGET_POWERPC64)
4252 {
4253 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4254 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4255 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4256 opnum, (enum reload_type) type);
4257 *win = 1;
4258 return x;
4259 }
4260
24ea750e
DJ
4261 if (GET_CODE (x) == PLUS
4262 && GET_CODE (XEXP (x, 0)) == REG
4263 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4264 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4265 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4266 && !SPE_VECTOR_MODE (mode)
17caeff2 4267 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4268 || mode == DDmode || mode == TDmode
54b695e7 4269 || mode == DImode))
78c875e8 4270 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4271 {
4272 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4273 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4274 HOST_WIDE_INT high
c4ad648e 4275 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4276
4277 /* Check for 32-bit overflow. */
4278 if (high + low != val)
c4ad648e 4279 {
24ea750e
DJ
4280 *win = 0;
4281 return x;
4282 }
4283
4284 /* Reload the high part into a base reg; leave the low part
c4ad648e 4285 in the mem directly. */
24ea750e
DJ
4286
4287 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4288 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4289 GEN_INT (high)),
4290 GEN_INT (low));
24ea750e
DJ
4291
4292 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4293 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4294 opnum, (enum reload_type)type);
24ea750e
DJ
4295 *win = 1;
4296 return x;
4297 }
4937d02d 4298
24ea750e 4299 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4300 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4301 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4302#if TARGET_MACHO
4303 && DEFAULT_ABI == ABI_DARWIN
a29077da 4304 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4305#else
4306 && DEFAULT_ABI == ABI_V4
4307 && !flag_pic
4308#endif
7393f7f8 4309 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4310 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4311 without fprs. */
0d8c1c97 4312 && mode != TFmode
7393f7f8 4313 && mode != TDmode
7b5d92b2 4314 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4315 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
696e45ba 4316 || (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)))
24ea750e 4317 {
8308679f 4318#if TARGET_MACHO
a29077da
GK
4319 if (flag_pic)
4320 {
08a6a74b 4321 rtx offset = machopic_gen_offset (x);
a29077da
GK
4322 x = gen_rtx_LO_SUM (GET_MODE (x),
4323 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4324 gen_rtx_HIGH (Pmode, offset)), offset);
4325 }
4326 else
8308679f 4327#endif
a29077da 4328 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4329 gen_rtx_HIGH (Pmode, x), x);
a29077da 4330
24ea750e 4331 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4332 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4333 opnum, (enum reload_type)type);
24ea750e
DJ
4334 *win = 1;
4335 return x;
4336 }
4937d02d 4337
dec1f3aa
DE
4338 /* Reload an offset address wrapped by an AND that represents the
4339 masking of the lower bits. Strip the outer AND and let reload
4340 convert the offset address into an indirect address. */
4341 if (TARGET_ALTIVEC
4342 && ALTIVEC_VECTOR_MODE (mode)
4343 && GET_CODE (x) == AND
4344 && GET_CODE (XEXP (x, 0)) == PLUS
4345 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4346 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4347 && GET_CODE (XEXP (x, 1)) == CONST_INT
4348 && INTVAL (XEXP (x, 1)) == -16)
4349 {
4350 x = XEXP (x, 0);
4351 *win = 1;
4352 return x;
4353 }
4354
24ea750e 4355 if (TARGET_TOC
0cdc04e8 4356 && GET_CODE (x) == SYMBOL_REF
4d588c14 4357 && constant_pool_expr_p (x)
c1f11548 4358 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4359 {
194c524a 4360 x = create_TOC_reference (x);
24ea750e
DJ
4361 *win = 1;
4362 return x;
4363 }
4364 *win = 0;
4365 return x;
f676971a 4366}
24ea750e 4367
258bfae2
FS
4368/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4369 that is a valid memory address for an instruction.
4370 The MODE argument is the machine mode for the MEM expression
4371 that wants to use this address.
4372
4373 On the RS/6000, there are four valid address: a SYMBOL_REF that
4374 refers to a constant pool entry of an address (or the sum of it
4375 plus a constant), a short (16-bit signed) constant plus a register,
4376 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4377 auto-increment. For DFmode, DDmode and DImode with a constant plus
4378 register, we must ensure that both words are addressable or PowerPC64
4379 with offset word aligned.
258bfae2 4380
4d4447b5 4381 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4382 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4383 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4384 during assembly output. */
4385int
a2369ed3 4386rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4387{
850e8d3d
DN
4388 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4389 if (TARGET_ALTIVEC
4390 && ALTIVEC_VECTOR_MODE (mode)
4391 && GET_CODE (x) == AND
4392 && GET_CODE (XEXP (x, 1)) == CONST_INT
4393 && INTVAL (XEXP (x, 1)) == -16)
4394 x = XEXP (x, 0);
4395
c4501e62
JJ
4396 if (RS6000_SYMBOL_REF_TLS_P (x))
4397 return 0;
4d588c14 4398 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4399 return 1;
4400 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4401 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4402 && !SPE_VECTOR_MODE (mode)
429ec7dc 4403 && mode != TFmode
7393f7f8 4404 && mode != TDmode
54b695e7 4405 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4406 && !(TARGET_E500_DOUBLE
4407 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4408 && TARGET_UPDATE
4d588c14 4409 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4410 return 1;
d04b6e6e 4411 if (legitimate_small_data_p (mode, x))
258bfae2 4412 return 1;
4d588c14 4413 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4414 return 1;
4415 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4416 if (! reg_ok_strict
4417 && GET_CODE (x) == PLUS
4418 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4419 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4420 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4421 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4422 return 1;
76d2b81d 4423 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4424 return 1;
4425 if (mode != TImode
76d2b81d 4426 && mode != TFmode
7393f7f8 4427 && mode != TDmode
a3170dc6
AH
4428 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4429 || TARGET_POWERPC64
4f011e1e
JM
4430 || (mode != DFmode && mode != DDmode)
4431 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4432 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4433 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4434 return 1;
6fb5fa3c
DB
4435 if (GET_CODE (x) == PRE_MODIFY
4436 && mode != TImode
4437 && mode != TFmode
4438 && mode != TDmode
696e45ba 4439 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
6fb5fa3c 4440 || TARGET_POWERPC64
4d4447b5 4441 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4442 && (TARGET_POWERPC64 || mode != DImode)
4443 && !ALTIVEC_VECTOR_MODE (mode)
4444 && !SPE_VECTOR_MODE (mode)
4445 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4446 && !(TARGET_E500_DOUBLE
4447 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4448 && TARGET_UPDATE
4449 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4450 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4451 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4452 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4453 return 1;
4d588c14 4454 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4455 return 1;
4456 return 0;
4457}
4d588c14
RH
4458
4459/* Go to LABEL if ADDR (a legitimate address expression)
4460 has an effect that depends on the machine mode it is used for.
4461
4462 On the RS/6000 this is true of all integral offsets (since AltiVec
4463 modes don't allow them) or is a pre-increment or decrement.
4464
4465 ??? Except that due to conceptual problems in offsettable_address_p
4466 we can't really report the problems of integral offsets. So leave
f676971a 4467 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4468 sub-words of a TFmode operand, which is what we had before. */
4469
4470bool
a2369ed3 4471rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4472{
4473 switch (GET_CODE (addr))
4474 {
4475 case PLUS:
4476 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4477 {
4478 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4479 return val + 12 + 0x8000 >= 0x10000;
4480 }
4481 break;
4482
4483 case LO_SUM:
4484 return true;
4485
619fe064 4486 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4487 case PRE_MODIFY:
4488 return TARGET_UPDATE;
4d588c14
RH
4489
4490 default:
4491 break;
4492 }
4493
4494 return false;
4495}
d8ecbcdb 4496
944258eb
RS
4497/* Implement FIND_BASE_TERM. */
4498
4499rtx
4500rs6000_find_base_term (rtx op)
4501{
4502 rtx base, offset;
4503
4504 split_const (op, &base, &offset);
4505 if (GET_CODE (base) == UNSPEC)
4506 switch (XINT (base, 1))
4507 {
4508 case UNSPEC_TOCREL:
4509 case UNSPEC_MACHOPIC_OFFSET:
4510 /* OP represents SYM [+ OFFSET] - ANCHOR. SYM is the base term
4511 for aliasing purposes. */
4512 return XVECEXP (base, 0, 0);
4513 }
4514
4515 return op;
4516}
4517
d04b6e6e
EB
4518/* More elaborate version of recog's offsettable_memref_p predicate
4519 that works around the ??? note of rs6000_mode_dependent_address.
4520 In particular it accepts
4521
4522 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4523
4524 in 32-bit mode, that the recog predicate rejects. */
4525
4526bool
4527rs6000_offsettable_memref_p (rtx op)
4528{
4529 if (!MEM_P (op))
4530 return false;
4531
4532 /* First mimic offsettable_memref_p. */
4533 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4534 return true;
4535
4536 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4537 the latter predicate knows nothing about the mode of the memory
4538 reference and, therefore, assumes that it is the largest supported
4539 mode (TFmode). As a consequence, legitimate offsettable memory
4540 references are rejected. rs6000_legitimate_offset_address_p contains
4541 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4542 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4543}
4544
d8ecbcdb
AH
4545/* Return number of consecutive hard regs needed starting at reg REGNO
4546 to hold something of mode MODE.
4547 This is ordinarily the length in words of a value of mode MODE
4548 but can be less for certain modes in special long registers.
4549
4550 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4551 scalar instructions. The upper 32 bits are only available to the
4552 SIMD instructions.
4553
4554 POWER and PowerPC GPRs hold 32 bits worth;
4555 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4556
4557int
4558rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4559{
4560 if (FP_REGNO_P (regno))
4561 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4562
4563 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4564 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4565
4566 if (ALTIVEC_REGNO_P (regno))
4567 return
4568 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4569
8521c414
JM
4570 /* The value returned for SCmode in the E500 double case is 2 for
4571 ABI compatibility; storing an SCmode value in a single register
4572 would require function_arg and rs6000_spe_function_arg to handle
4573 SCmode so as to pass the value correctly in a pair of
4574 registers. */
4f011e1e
JM
4575 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4576 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4577 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4578
d8ecbcdb
AH
4579 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4580}
2aa4498c
AH
4581
4582/* Change register usage conditional on target flags. */
4583void
4584rs6000_conditional_register_usage (void)
4585{
4586 int i;
4587
4588 /* Set MQ register fixed (already call_used) if not POWER
4589 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4590 be allocated. */
4591 if (! TARGET_POWER)
4592 fixed_regs[64] = 1;
4593
7c9ac5c0 4594 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4595 if (TARGET_64BIT)
4596 fixed_regs[13] = call_used_regs[13]
4597 = call_really_used_regs[13] = 1;
4598
4599 /* Conditionally disable FPRs. */
4600 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4601 for (i = 32; i < 64; i++)
4602 fixed_regs[i] = call_used_regs[i]
c4ad648e 4603 = call_really_used_regs[i] = 1;
2aa4498c 4604
7c9ac5c0
PH
4605 /* The TOC register is not killed across calls in a way that is
4606 visible to the compiler. */
4607 if (DEFAULT_ABI == ABI_AIX)
4608 call_really_used_regs[2] = 0;
4609
2aa4498c
AH
4610 if (DEFAULT_ABI == ABI_V4
4611 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4612 && flag_pic == 2)
4613 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4614
4615 if (DEFAULT_ABI == ABI_V4
4616 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4617 && flag_pic == 1)
4618 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4619 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4620 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4621
4622 if (DEFAULT_ABI == ABI_DARWIN
4623 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4624 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4625 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4626 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4627
b4db40bf
JJ
4628 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4629 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4630 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4631
2aa4498c
AH
4632 if (TARGET_SPE)
4633 {
4634 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4635 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4636 registers in prologues and epilogues. We no longer use r14
4637 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4638 pool for link-compatibility with older versions of GCC. Once
4639 "old" code has died out, we can return r14 to the allocation
4640 pool. */
4641 fixed_regs[14]
4642 = call_used_regs[14]
4643 = call_really_used_regs[14] = 1;
2aa4498c
AH
4644 }
4645
0db747be 4646 if (!TARGET_ALTIVEC)
2aa4498c
AH
4647 {
4648 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4649 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4650 call_really_used_regs[VRSAVE_REGNO] = 1;
4651 }
4652
0db747be
DE
4653 if (TARGET_ALTIVEC)
4654 global_regs[VSCR_REGNO] = 1;
4655
2aa4498c 4656 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4657 {
4658 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4659 call_used_regs[i] = call_really_used_regs[i] = 1;
4660
4661 /* AIX reserves VR20:31 in non-extended ABI mode. */
4662 if (TARGET_XCOFF)
4663 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4664 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4665 }
2aa4498c 4666}
fb4d4348 4667\f
a4f6c312
SS
4668/* Try to output insns to set TARGET equal to the constant C if it can
4669 be done in less than N insns. Do all computations in MODE.
4670 Returns the place where the output has been placed if it can be
4671 done and the insns have been emitted. If it would take more than N
4672 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4673
4674rtx
f676971a 4675rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4676 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4677{
af8cb5c5 4678 rtx result, insn, set;
2bfcf297
DB
4679 HOST_WIDE_INT c0, c1;
4680
37409796 4681 switch (mode)
2bfcf297 4682 {
37409796
NS
4683 case QImode:
4684 case HImode:
2bfcf297 4685 if (dest == NULL)
c4ad648e 4686 dest = gen_reg_rtx (mode);
2bfcf297
DB
4687 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4688 return dest;
bb8df8a6 4689
37409796 4690 case SImode:
b3a13419 4691 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4692
d448860e 4693 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4694 GEN_INT (INTVAL (source)
4695 & (~ (HOST_WIDE_INT) 0xffff))));
4696 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4697 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4698 GEN_INT (INTVAL (source) & 0xffff))));
4699 result = dest;
37409796
NS
4700 break;
4701
4702 case DImode:
4703 switch (GET_CODE (source))
af8cb5c5 4704 {
37409796 4705 case CONST_INT:
af8cb5c5
DE
4706 c0 = INTVAL (source);
4707 c1 = -(c0 < 0);
37409796 4708 break;
bb8df8a6 4709
37409796 4710 case CONST_DOUBLE:
2bfcf297 4711#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4712 c0 = CONST_DOUBLE_LOW (source);
4713 c1 = -(c0 < 0);
2bfcf297 4714#else
af8cb5c5
DE
4715 c0 = CONST_DOUBLE_LOW (source);
4716 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4717#endif
37409796
NS
4718 break;
4719
4720 default:
4721 gcc_unreachable ();
af8cb5c5 4722 }
af8cb5c5
DE
4723
4724 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4725 break;
4726
4727 default:
4728 gcc_unreachable ();
2bfcf297 4729 }
2bfcf297 4730
af8cb5c5
DE
4731 insn = get_last_insn ();
4732 set = single_set (insn);
4733 if (! CONSTANT_P (SET_SRC (set)))
4734 set_unique_reg_note (insn, REG_EQUAL, source);
4735
4736 return result;
2bfcf297
DB
4737}
4738
4739/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4740 fall back to a straight forward decomposition. We do this to avoid
4741 exponential run times encountered when looking for longer sequences
4742 with rs6000_emit_set_const. */
4743static rtx
a2369ed3 4744rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4745{
4746 if (!TARGET_POWERPC64)
4747 {
4748 rtx operand1, operand2;
4749
4750 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4751 DImode);
d448860e 4752 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4753 DImode);
4754 emit_move_insn (operand1, GEN_INT (c1));
4755 emit_move_insn (operand2, GEN_INT (c2));
4756 }
4757 else
4758 {
bc06712d 4759 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4760
bc06712d 4761 ud1 = c1 & 0xffff;
f921c9c9 4762 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4763#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4764 c2 = c1 >> 32;
2bfcf297 4765#endif
bc06712d 4766 ud3 = c2 & 0xffff;
f921c9c9 4767 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4768
f676971a 4769 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4770 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4771 {
bc06712d 4772 if (ud1 & 0x8000)
b78d48dd 4773 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4774 else
4775 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4776 }
2bfcf297 4777
f676971a 4778 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4779 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4780 {
bc06712d 4781 if (ud2 & 0x8000)
f676971a 4782 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4783 - 0x80000000));
252b88f7 4784 else
bc06712d
TR
4785 emit_move_insn (dest, GEN_INT (ud2 << 16));
4786 if (ud1 != 0)
d448860e
JH
4787 emit_move_insn (copy_rtx (dest),
4788 gen_rtx_IOR (DImode, copy_rtx (dest),
4789 GEN_INT (ud1)));
252b88f7 4790 }
f676971a 4791 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4792 || (ud4 == 0 && ! (ud3 & 0x8000)))
4793 {
4794 if (ud3 & 0x8000)
f676971a 4795 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4796 - 0x80000000));
4797 else
4798 emit_move_insn (dest, GEN_INT (ud3 << 16));
4799
4800 if (ud2 != 0)
d448860e
JH
4801 emit_move_insn (copy_rtx (dest),
4802 gen_rtx_IOR (DImode, copy_rtx (dest),
4803 GEN_INT (ud2)));
4804 emit_move_insn (copy_rtx (dest),
4805 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4806 GEN_INT (16)));
bc06712d 4807 if (ud1 != 0)
d448860e
JH
4808 emit_move_insn (copy_rtx (dest),
4809 gen_rtx_IOR (DImode, copy_rtx (dest),
4810 GEN_INT (ud1)));
bc06712d 4811 }
f676971a 4812 else
bc06712d
TR
4813 {
4814 if (ud4 & 0x8000)
f676971a 4815 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4816 - 0x80000000));
4817 else
4818 emit_move_insn (dest, GEN_INT (ud4 << 16));
4819
4820 if (ud3 != 0)
d448860e
JH
4821 emit_move_insn (copy_rtx (dest),
4822 gen_rtx_IOR (DImode, copy_rtx (dest),
4823 GEN_INT (ud3)));
2bfcf297 4824
d448860e
JH
4825 emit_move_insn (copy_rtx (dest),
4826 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4827 GEN_INT (32)));
bc06712d 4828 if (ud2 != 0)
d448860e
JH
4829 emit_move_insn (copy_rtx (dest),
4830 gen_rtx_IOR (DImode, copy_rtx (dest),
4831 GEN_INT (ud2 << 16)));
bc06712d 4832 if (ud1 != 0)
d448860e
JH
4833 emit_move_insn (copy_rtx (dest),
4834 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4835 }
4836 }
2bfcf297
DB
4837 return dest;
4838}
4839
76d2b81d 4840/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4841 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4842
4843static void
4844rs6000_eliminate_indexed_memrefs (rtx operands[2])
4845{
4846 if (GET_CODE (operands[0]) == MEM
4847 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4848 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4849 && ! reload_in_progress)
4850 operands[0]
4851 = replace_equiv_address (operands[0],
4852 copy_addr_to_reg (XEXP (operands[0], 0)));
4853
4854 if (GET_CODE (operands[1]) == MEM
4855 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4856 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4857 && ! reload_in_progress)
4858 operands[1]
4859 = replace_equiv_address (operands[1],
4860 copy_addr_to_reg (XEXP (operands[1], 0)));
4861}
4862
fb4d4348
GK
4863/* Emit a move from SOURCE to DEST in mode MODE. */
4864void
a2369ed3 4865rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4866{
4867 rtx operands[2];
4868 operands[0] = dest;
4869 operands[1] = source;
f676971a 4870
fb4d4348
GK
4871 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4872 if (GET_CODE (operands[1]) == CONST_DOUBLE
4873 && ! FLOAT_MODE_P (mode)
4874 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4875 {
4876 /* FIXME. This should never happen. */
4877 /* Since it seems that it does, do the safe thing and convert
4878 to a CONST_INT. */
2496c7bd 4879 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4880 }
37409796
NS
4881 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4882 || FLOAT_MODE_P (mode)
4883 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4884 || CONST_DOUBLE_LOW (operands[1]) < 0)
4885 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4886 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4887
c9e8cb32
DD
4888 /* Check if GCC is setting up a block move that will end up using FP
4889 registers as temporaries. We must make sure this is acceptable. */
4890 if (GET_CODE (operands[0]) == MEM
4891 && GET_CODE (operands[1]) == MEM
4892 && mode == DImode
41543739
GK
4893 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4894 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4895 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4896 ? 32 : MEM_ALIGN (operands[0])))
4897 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4898 ? 32
41543739
GK
4899 : MEM_ALIGN (operands[1]))))
4900 && ! MEM_VOLATILE_P (operands [0])
4901 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4902 {
41543739
GK
4903 emit_move_insn (adjust_address (operands[0], SImode, 0),
4904 adjust_address (operands[1], SImode, 0));
d448860e
JH
4905 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4906 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4907 return;
4908 }
630d42a0 4909
b3a13419 4910 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4911 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4912 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4913
a3170dc6 4914 if (mode == SFmode && ! TARGET_POWERPC
696e45ba 4915 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
ffc14f31 4916 && GET_CODE (operands[0]) == MEM)
fb4d4348 4917 {
ffc14f31
GK
4918 int regnum;
4919
4920 if (reload_in_progress || reload_completed)
4921 regnum = true_regnum (operands[1]);
4922 else if (GET_CODE (operands[1]) == REG)
4923 regnum = REGNO (operands[1]);
4924 else
4925 regnum = -1;
f676971a 4926
fb4d4348
GK
4927 /* If operands[1] is a register, on POWER it may have
4928 double-precision data in it, so truncate it to single
4929 precision. */
4930 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4931 {
4932 rtx newreg;
b3a13419 4933 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4934 : gen_reg_rtx (mode));
fb4d4348
GK
4935 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4936 operands[1] = newreg;
4937 }
4938 }
4939
c4501e62
JJ
4940 /* Recognize the case where operand[1] is a reference to thread-local
4941 data and load its address to a register. */
84f52ebd 4942 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4943 {
84f52ebd
RH
4944 enum tls_model model;
4945 rtx tmp = operands[1];
4946 rtx addend = NULL;
4947
4948 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4949 {
4950 addend = XEXP (XEXP (tmp, 0), 1);
4951 tmp = XEXP (XEXP (tmp, 0), 0);
4952 }
4953
4954 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4955 model = SYMBOL_REF_TLS_MODEL (tmp);
4956 gcc_assert (model != 0);
4957
4958 tmp = rs6000_legitimize_tls_address (tmp, model);
4959 if (addend)
4960 {
4961 tmp = gen_rtx_PLUS (mode, tmp, addend);
4962 tmp = force_operand (tmp, operands[0]);
4963 }
4964 operands[1] = tmp;
c4501e62
JJ
4965 }
4966
8f4e6caf
RH
4967 /* Handle the case where reload calls us with an invalid address. */
4968 if (reload_in_progress && mode == Pmode
69ef87e2 4969 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4970 || ! nonimmediate_operand (operands[0], mode)))
4971 goto emit_set;
4972
a9baceb1
GK
4973 /* 128-bit constant floating-point values on Darwin should really be
4974 loaded as two parts. */
8521c414 4975 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4976 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4977 {
4978 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4979 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4980 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4981 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4982 simplify_gen_subreg (imode, operands[1], mode, 0),
4983 imode);
4984 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4985 GET_MODE_SIZE (imode)),
4986 simplify_gen_subreg (imode, operands[1], mode,
4987 GET_MODE_SIZE (imode)),
4988 imode);
a9baceb1
GK
4989 return;
4990 }
4991
e41b2a33
PB
4992 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4993 cfun->machine->sdmode_stack_slot =
4994 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4995
4996 if (reload_in_progress
4997 && mode == SDmode
4998 && MEM_P (operands[0])
4999 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
5000 && REG_P (operands[1]))
5001 {
5002 if (FP_REGNO_P (REGNO (operands[1])))
5003 {
5004 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
5005 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5006 emit_insn (gen_movsd_store (mem, operands[1]));
5007 }
5008 else if (INT_REGNO_P (REGNO (operands[1])))
5009 {
5010 rtx mem = adjust_address_nv (operands[0], mode, 4);
5011 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5012 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
5013 }
5014 else
5015 gcc_unreachable();
5016 return;
5017 }
5018 if (reload_in_progress
5019 && mode == SDmode
5020 && REG_P (operands[0])
5021 && MEM_P (operands[1])
5022 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
5023 {
5024 if (FP_REGNO_P (REGNO (operands[0])))
5025 {
5026 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
5027 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5028 emit_insn (gen_movsd_load (operands[0], mem));
5029 }
5030 else if (INT_REGNO_P (REGNO (operands[0])))
5031 {
5032 rtx mem = adjust_address_nv (operands[1], mode, 4);
5033 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5034 emit_insn (gen_movsd_hardfloat (operands[0], mem));
5035 }
5036 else
5037 gcc_unreachable();
5038 return;
5039 }
5040
fb4d4348
GK
5041 /* FIXME: In the long term, this switch statement should go away
5042 and be replaced by a sequence of tests based on things like
5043 mode == Pmode. */
5044 switch (mode)
5045 {
5046 case HImode:
5047 case QImode:
5048 if (CONSTANT_P (operands[1])
5049 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 5050 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
5051 break;
5052
06f4e019 5053 case TFmode:
7393f7f8 5054 case TDmode:
76d2b81d
DJ
5055 rs6000_eliminate_indexed_memrefs (operands);
5056 /* fall through */
5057
fb4d4348 5058 case DFmode:
7393f7f8 5059 case DDmode:
fb4d4348 5060 case SFmode:
e41b2a33 5061 case SDmode:
f676971a 5062 if (CONSTANT_P (operands[1])
fb4d4348 5063 && ! easy_fp_constant (operands[1], mode))
a9098fd0 5064 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5065 break;
f676971a 5066
0ac081f6
AH
5067 case V16QImode:
5068 case V8HImode:
5069 case V4SFmode:
5070 case V4SImode:
a3170dc6
AH
5071 case V4HImode:
5072 case V2SFmode:
5073 case V2SImode:
00a892b8 5074 case V1DImode:
69ef87e2 5075 if (CONSTANT_P (operands[1])
d744e06e 5076 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
5077 operands[1] = force_const_mem (mode, operands[1]);
5078 break;
f676971a 5079
fb4d4348 5080 case SImode:
a9098fd0 5081 case DImode:
fb4d4348
GK
5082 /* Use default pattern for address of ELF small data */
5083 if (TARGET_ELF
a9098fd0 5084 && mode == Pmode
f607bc57 5085 && DEFAULT_ABI == ABI_V4
f676971a 5086 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
5087 || GET_CODE (operands[1]) == CONST)
5088 && small_data_operand (operands[1], mode))
fb4d4348
GK
5089 {
5090 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5091 return;
5092 }
5093
f607bc57 5094 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
5095 && mode == Pmode && mode == SImode
5096 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
5097 {
5098 emit_insn (gen_movsi_got (operands[0], operands[1]));
5099 return;
5100 }
5101
ee890fe2 5102 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
5103 && TARGET_NO_TOC
5104 && ! flag_pic
a9098fd0 5105 && mode == Pmode
fb4d4348
GK
5106 && CONSTANT_P (operands[1])
5107 && GET_CODE (operands[1]) != HIGH
5108 && GET_CODE (operands[1]) != CONST_INT)
5109 {
b3a13419
ILT
5110 rtx target = (!can_create_pseudo_p ()
5111 ? operands[0]
5112 : gen_reg_rtx (mode));
fb4d4348
GK
5113
5114 /* If this is a function address on -mcall-aixdesc,
5115 convert it to the address of the descriptor. */
5116 if (DEFAULT_ABI == ABI_AIX
5117 && GET_CODE (operands[1]) == SYMBOL_REF
5118 && XSTR (operands[1], 0)[0] == '.')
5119 {
5120 const char *name = XSTR (operands[1], 0);
5121 rtx new_ref;
5122 while (*name == '.')
5123 name++;
5124 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5125 CONSTANT_POOL_ADDRESS_P (new_ref)
5126 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 5127 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 5128 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 5129 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
5130 operands[1] = new_ref;
5131 }
7509c759 5132
ee890fe2
SS
5133 if (DEFAULT_ABI == ABI_DARWIN)
5134 {
ab82a49f
AP
5135#if TARGET_MACHO
5136 if (MACHO_DYNAMIC_NO_PIC_P)
5137 {
5138 /* Take care of any required data indirection. */
5139 operands[1] = rs6000_machopic_legitimize_pic_address (
5140 operands[1], mode, operands[0]);
5141 if (operands[0] != operands[1])
5142 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 5143 operands[0], operands[1]));
ab82a49f
AP
5144 return;
5145 }
5146#endif
b8a55285
AP
5147 emit_insn (gen_macho_high (target, operands[1]));
5148 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5149 return;
5150 }
5151
fb4d4348
GK
5152 emit_insn (gen_elf_high (target, operands[1]));
5153 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5154 return;
5155 }
5156
a9098fd0
GK
5157 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5158 and we have put it in the TOC, we just need to make a TOC-relative
5159 reference to it. */
5160 if (TARGET_TOC
5161 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5162 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5163 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5164 get_pool_mode (operands[1])))
fb4d4348 5165 {
a9098fd0 5166 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5167 }
a9098fd0
GK
5168 else if (mode == Pmode
5169 && CONSTANT_P (operands[1])
38886f37
AO
5170 && ((GET_CODE (operands[1]) != CONST_INT
5171 && ! easy_fp_constant (operands[1], mode))
5172 || (GET_CODE (operands[1]) == CONST_INT
5173 && num_insns_constant (operands[1], mode) > 2)
5174 || (GET_CODE (operands[0]) == REG
5175 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5176 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5177 && ! legitimate_constant_pool_address_p (operands[1])
5178 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
5179 {
5180 /* Emit a USE operation so that the constant isn't deleted if
5181 expensive optimizations are turned on because nobody
5182 references it. This should only be done for operands that
5183 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5184 This should not be done for operands that contain LABEL_REFs.
5185 For now, we just handle the obvious case. */
5186 if (GET_CODE (operands[1]) != LABEL_REF)
c41c1387 5187 emit_use (operands[1]);
fb4d4348 5188
c859cda6 5189#if TARGET_MACHO
ee890fe2 5190 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5191 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5192 {
ee890fe2
SS
5193 operands[1] =
5194 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5195 operands[0]);
5196 if (operands[0] != operands[1])
5197 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5198 return;
5199 }
c859cda6 5200#endif
ee890fe2 5201
fb4d4348
GK
5202 /* If we are to limit the number of things we put in the TOC and
5203 this is a symbol plus a constant we can add in one insn,
5204 just put the symbol in the TOC and add the constant. Don't do
5205 this if reload is in progress. */
5206 if (GET_CODE (operands[1]) == CONST
5207 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5208 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5209 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5210 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5211 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5212 && ! side_effects_p (operands[0]))
5213 {
a4f6c312
SS
5214 rtx sym =
5215 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5216 rtx other = XEXP (XEXP (operands[1], 0), 1);
5217
a9098fd0
GK
5218 sym = force_reg (mode, sym);
5219 if (mode == SImode)
5220 emit_insn (gen_addsi3 (operands[0], sym, other));
5221 else
5222 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5223 return;
5224 }
5225
a9098fd0 5226 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5227
f676971a 5228 if (TARGET_TOC
0cdc04e8 5229 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5230 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5231 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5232 get_pool_constant (XEXP (operands[1], 0)),
5233 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5234 {
ba4828e0 5235 operands[1]
542a8afa 5236 = gen_const_mem (mode,
c4ad648e 5237 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5238 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5239 }
fb4d4348
GK
5240 }
5241 break;
a9098fd0 5242
fb4d4348 5243 case TImode:
76d2b81d
DJ
5244 rs6000_eliminate_indexed_memrefs (operands);
5245
27dc0551
DE
5246 if (TARGET_POWER)
5247 {
5248 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5249 gen_rtvec (2,
5250 gen_rtx_SET (VOIDmode,
5251 operands[0], operands[1]),
5252 gen_rtx_CLOBBER (VOIDmode,
5253 gen_rtx_SCRATCH (SImode)))));
5254 return;
5255 }
fb4d4348
GK
5256 break;
5257
5258 default:
37409796 5259 gcc_unreachable ();
fb4d4348
GK
5260 }
5261
a9098fd0
GK
5262 /* Above, we may have called force_const_mem which may have returned
5263 an invalid address. If we can, fix this up; otherwise, reload will
5264 have to deal with it. */
8f4e6caf
RH
5265 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5266 operands[1] = validize_mem (operands[1]);
a9098fd0 5267
8f4e6caf 5268 emit_set:
fb4d4348
GK
5269 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5270}
4697a36c 5271\f
2858f73a
GK
5272/* Nonzero if we can use a floating-point register to pass this arg. */
5273#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5274 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a 5275 && (CUM)->fregno <= FP_ARG_MAX_REG \
56f4cc04 5276 && TARGET_HARD_FLOAT && TARGET_FPRS)
2858f73a
GK
5277
5278/* Nonzero if we can use an AltiVec register to pass this arg. */
5279#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5280 (ALTIVEC_VECTOR_MODE (MODE) \
5281 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5282 && TARGET_ALTIVEC_ABI \
83953138 5283 && (NAMED))
2858f73a 5284
c6e8c921
GK
5285/* Return a nonzero value to say to return the function value in
5286 memory, just as large structures are always returned. TYPE will be
5287 the data type of the value, and FNTYPE will be the type of the
5288 function doing the returning, or @code{NULL} for libcalls.
5289
5290 The AIX ABI for the RS/6000 specifies that all structures are
5291 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5292 specifies that structures <= 8 bytes are returned in r3/r4, but a
5293 draft put them in memory, and GCC used to implement the draft
df01da37 5294 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5295 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5296 compatibility can change DRAFT_V4_STRUCT_RET to override the
5297 default, and -m switches get the final word. See
5298 rs6000_override_options for more details.
5299
5300 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5301 long double support is enabled. These values are returned in memory.
5302
5303 int_size_in_bytes returns -1 for variable size objects, which go in
5304 memory always. The cast to unsigned makes -1 > 8. */
5305
5306static bool
586de218 5307rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5308{
594a51fe
SS
5309 /* In the darwin64 abi, try to use registers for larger structs
5310 if possible. */
0b5383eb 5311 if (rs6000_darwin64_abi
594a51fe 5312 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5313 && int_size_in_bytes (type) > 0)
5314 {
5315 CUMULATIVE_ARGS valcum;
5316 rtx valret;
5317
5318 valcum.words = 0;
5319 valcum.fregno = FP_ARG_MIN_REG;
5320 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5321 /* Do a trial code generation as if this were going to be passed
5322 as an argument; if any part goes in memory, we return NULL. */
5323 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5324 if (valret)
5325 return false;
5326 /* Otherwise fall through to more conventional ABI rules. */
5327 }
594a51fe 5328
c6e8c921 5329 if (AGGREGATE_TYPE_P (type)
df01da37 5330 && (aix_struct_return
c6e8c921
GK
5331 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5332 return true;
b693336b 5333
bada2eb8
DE
5334 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5335 modes only exist for GCC vector types if -maltivec. */
5336 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5337 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5338 return false;
5339
b693336b
PB
5340 /* Return synthetic vectors in memory. */
5341 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5342 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5343 {
5344 static bool warned_for_return_big_vectors = false;
5345 if (!warned_for_return_big_vectors)
5346 {
d4ee4d25 5347 warning (0, "GCC vector returned by reference: "
b693336b
PB
5348 "non-standard ABI extension with no compatibility guarantee");
5349 warned_for_return_big_vectors = true;
5350 }
5351 return true;
5352 }
5353
602ea4d3 5354 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5355 return true;
ad630bef 5356
c6e8c921
GK
5357 return false;
5358}
5359
4697a36c
MM
5360/* Initialize a variable CUM of type CUMULATIVE_ARGS
5361 for a call to a function whose data type is FNTYPE.
5362 For a library call, FNTYPE is 0.
5363
5364 For incoming args we set the number of arguments in the prototype large
1c20ae99 5365 so we never return a PARALLEL. */
4697a36c
MM
5366
5367void
f676971a 5368init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5369 rtx libname ATTRIBUTE_UNUSED, int incoming,
5370 int libcall, int n_named_args)
4697a36c
MM
5371{
5372 static CUMULATIVE_ARGS zero_cumulative;
5373
5374 *cum = zero_cumulative;
5375 cum->words = 0;
5376 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5377 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5378 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5379 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5380 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5381 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5382 cum->stdarg = fntype
5383 && (TYPE_ARG_TYPES (fntype) != 0
5384 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5385 != void_type_node));
4697a36c 5386
0f6937fe
AM
5387 cum->nargs_prototype = 0;
5388 if (incoming || cum->prototype)
5389 cum->nargs_prototype = n_named_args;
4697a36c 5390
a5c76ee6 5391 /* Check for a longcall attribute. */
3eb4e360
AM
5392 if ((!fntype && rs6000_default_long_calls)
5393 || (fntype
5394 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5395 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5396 cum->call_cookie |= CALL_LONG;
6a4cee5f 5397
4697a36c
MM
5398 if (TARGET_DEBUG_ARG)
5399 {
5400 fprintf (stderr, "\ninit_cumulative_args:");
5401 if (fntype)
5402 {
5403 tree ret_type = TREE_TYPE (fntype);
5404 fprintf (stderr, " ret code = %s,",
5405 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5406 }
5407
6a4cee5f
MM
5408 if (cum->call_cookie & CALL_LONG)
5409 fprintf (stderr, " longcall,");
5410
4697a36c
MM
5411 fprintf (stderr, " proto = %d, nargs = %d\n",
5412 cum->prototype, cum->nargs_prototype);
5413 }
f676971a 5414
c4ad648e
AM
5415 if (fntype
5416 && !TARGET_ALTIVEC
5417 && TARGET_ALTIVEC_ABI
5418 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5419 {
c85ce869 5420 error ("cannot return value in vector register because"
c4ad648e 5421 " altivec instructions are disabled, use -maltivec"
c85ce869 5422 " to enable them");
c4ad648e 5423 }
4697a36c
MM
5424}
5425\f
fe984136
RH
5426/* Return true if TYPE must be passed on the stack and not in registers. */
5427
5428static bool
586de218 5429rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5430{
5431 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5432 return must_pass_in_stack_var_size (mode, type);
5433 else
5434 return must_pass_in_stack_var_size_or_pad (mode, type);
5435}
5436
c229cba9
DE
5437/* If defined, a C expression which determines whether, and in which
5438 direction, to pad out an argument with extra space. The value
5439 should be of type `enum direction': either `upward' to pad above
5440 the argument, `downward' to pad below, or `none' to inhibit
5441 padding.
5442
5443 For the AIX ABI structs are always stored left shifted in their
5444 argument slot. */
5445
9ebbca7d 5446enum direction
586de218 5447function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5448{
6e985040
AM
5449#ifndef AGGREGATE_PADDING_FIXED
5450#define AGGREGATE_PADDING_FIXED 0
5451#endif
5452#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5453#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5454#endif
5455
5456 if (!AGGREGATE_PADDING_FIXED)
5457 {
5458 /* GCC used to pass structures of the same size as integer types as
5459 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5460 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5461 passed padded downward, except that -mstrict-align further
5462 muddied the water in that multi-component structures of 2 and 4
5463 bytes in size were passed padded upward.
5464
5465 The following arranges for best compatibility with previous
5466 versions of gcc, but removes the -mstrict-align dependency. */
5467 if (BYTES_BIG_ENDIAN)
5468 {
5469 HOST_WIDE_INT size = 0;
5470
5471 if (mode == BLKmode)
5472 {
5473 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5474 size = int_size_in_bytes (type);
5475 }
5476 else
5477 size = GET_MODE_SIZE (mode);
5478
5479 if (size == 1 || size == 2 || size == 4)
5480 return downward;
5481 }
5482 return upward;
5483 }
5484
5485 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5486 {
5487 if (type != 0 && AGGREGATE_TYPE_P (type))
5488 return upward;
5489 }
c229cba9 5490
d3704c46
KH
5491 /* Fall back to the default. */
5492 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5493}
5494
b6c9286a 5495/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5496 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5497 PARM_BOUNDARY is used for all arguments.
f676971a 5498
84e9ad15
AM
5499 V.4 wants long longs and doubles to be double word aligned. Just
5500 testing the mode size is a boneheaded way to do this as it means
5501 that other types such as complex int are also double word aligned.
5502 However, we're stuck with this because changing the ABI might break
5503 existing library interfaces.
5504
b693336b
PB
5505 Doubleword align SPE vectors.
5506 Quadword align Altivec vectors.
5507 Quadword align large synthetic vector types. */
b6c9286a
MM
5508
5509int
b693336b 5510function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5511{
84e9ad15
AM
5512 if (DEFAULT_ABI == ABI_V4
5513 && (GET_MODE_SIZE (mode) == 8
5514 || (TARGET_HARD_FLOAT
5515 && TARGET_FPRS
7393f7f8 5516 && (mode == TFmode || mode == TDmode))))
4ed78545 5517 return 64;
ad630bef
DE
5518 else if (SPE_VECTOR_MODE (mode)
5519 || (type && TREE_CODE (type) == VECTOR_TYPE
5520 && int_size_in_bytes (type) >= 8
5521 && int_size_in_bytes (type) < 16))
e1f83b4d 5522 return 64;
ad630bef
DE
5523 else if (ALTIVEC_VECTOR_MODE (mode)
5524 || (type && TREE_CODE (type) == VECTOR_TYPE
5525 && int_size_in_bytes (type) >= 16))
0ac081f6 5526 return 128;
0b5383eb
DJ
5527 else if (rs6000_darwin64_abi && mode == BLKmode
5528 && type && TYPE_ALIGN (type) > 64)
5529 return 128;
9ebbca7d 5530 else
b6c9286a 5531 return PARM_BOUNDARY;
b6c9286a 5532}
c53bdcf5 5533
294bd182
AM
5534/* For a function parm of MODE and TYPE, return the starting word in
5535 the parameter area. NWORDS of the parameter area are already used. */
5536
5537static unsigned int
5538rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5539{
5540 unsigned int align;
5541 unsigned int parm_offset;
5542
5543 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5544 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5545 return nwords + (-(parm_offset + nwords) & align);
5546}
5547
c53bdcf5
AM
5548/* Compute the size (in words) of a function argument. */
5549
5550static unsigned long
5551rs6000_arg_size (enum machine_mode mode, tree type)
5552{
5553 unsigned long size;
5554
5555 if (mode != BLKmode)
5556 size = GET_MODE_SIZE (mode);
5557 else
5558 size = int_size_in_bytes (type);
5559
5560 if (TARGET_32BIT)
5561 return (size + 3) >> 2;
5562 else
5563 return (size + 7) >> 3;
5564}
b6c9286a 5565\f
0b5383eb 5566/* Use this to flush pending int fields. */
594a51fe
SS
5567
5568static void
0b5383eb
DJ
5569rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5570 HOST_WIDE_INT bitpos)
594a51fe 5571{
0b5383eb
DJ
5572 unsigned int startbit, endbit;
5573 int intregs, intoffset;
5574 enum machine_mode mode;
594a51fe 5575
0b5383eb
DJ
5576 if (cum->intoffset == -1)
5577 return;
594a51fe 5578
0b5383eb
DJ
5579 intoffset = cum->intoffset;
5580 cum->intoffset = -1;
5581
5582 if (intoffset % BITS_PER_WORD != 0)
5583 {
5584 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5585 MODE_INT, 0);
5586 if (mode == BLKmode)
594a51fe 5587 {
0b5383eb
DJ
5588 /* We couldn't find an appropriate mode, which happens,
5589 e.g., in packed structs when there are 3 bytes to load.
5590 Back intoffset back to the beginning of the word in this
5591 case. */
5592 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5593 }
594a51fe 5594 }
0b5383eb
DJ
5595
5596 startbit = intoffset & -BITS_PER_WORD;
5597 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5598 intregs = (endbit - startbit) / BITS_PER_WORD;
5599 cum->words += intregs;
5600}
5601
5602/* The darwin64 ABI calls for us to recurse down through structs,
5603 looking for elements passed in registers. Unfortunately, we have
5604 to track int register count here also because of misalignments
5605 in powerpc alignment mode. */
5606
5607static void
5608rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5609 tree type,
5610 HOST_WIDE_INT startbitpos)
5611{
5612 tree f;
5613
5614 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5615 if (TREE_CODE (f) == FIELD_DECL)
5616 {
5617 HOST_WIDE_INT bitpos = startbitpos;
5618 tree ftype = TREE_TYPE (f);
70fb00df
AP
5619 enum machine_mode mode;
5620 if (ftype == error_mark_node)
5621 continue;
5622 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5623
5624 if (DECL_SIZE (f) != 0
5625 && host_integerp (bit_position (f), 1))
5626 bitpos += int_bit_position (f);
5627
5628 /* ??? FIXME: else assume zero offset. */
5629
5630 if (TREE_CODE (ftype) == RECORD_TYPE)
5631 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5632 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5633 {
5634 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5635 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5636 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5637 }
5638 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5639 {
5640 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5641 cum->vregno++;
5642 cum->words += 2;
5643 }
5644 else if (cum->intoffset == -1)
5645 cum->intoffset = bitpos;
5646 }
594a51fe
SS
5647}
5648
4697a36c
MM
5649/* Update the data in CUM to advance over an argument
5650 of mode MODE and data type TYPE.
b2d04ecf
AM
5651 (TYPE is null for libcalls where that information may not be available.)
5652
5653 Note that for args passed by reference, function_arg will be called
5654 with MODE and TYPE set to that of the pointer to the arg, not the arg
5655 itself. */
4697a36c
MM
5656
5657void
f676971a 5658function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5659 tree type, int named, int depth)
4697a36c 5660{
0b5383eb
DJ
5661 int size;
5662
594a51fe
SS
5663 /* Only tick off an argument if we're not recursing. */
5664 if (depth == 0)
5665 cum->nargs_prototype--;
4697a36c 5666
ad630bef
DE
5667 if (TARGET_ALTIVEC_ABI
5668 && (ALTIVEC_VECTOR_MODE (mode)
5669 || (type && TREE_CODE (type) == VECTOR_TYPE
5670 && int_size_in_bytes (type) == 16)))
0ac081f6 5671 {
4ed78545
AM
5672 bool stack = false;
5673
2858f73a 5674 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5675 {
6d0ef01e
HP
5676 cum->vregno++;
5677 if (!TARGET_ALTIVEC)
c85ce869 5678 error ("cannot pass argument in vector register because"
6d0ef01e 5679 " altivec instructions are disabled, use -maltivec"
c85ce869 5680 " to enable them");
4ed78545
AM
5681
5682 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5683 even if it is going to be passed in a vector register.
4ed78545
AM
5684 Darwin does the same for variable-argument functions. */
5685 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5686 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5687 stack = true;
6d0ef01e 5688 }
4ed78545
AM
5689 else
5690 stack = true;
5691
5692 if (stack)
c4ad648e 5693 {
a594a19c 5694 int align;
f676971a 5695
2858f73a
GK
5696 /* Vector parameters must be 16-byte aligned. This places
5697 them at 2 mod 4 in terms of words in 32-bit mode, since
5698 the parameter save area starts at offset 24 from the
5699 stack. In 64-bit mode, they just have to start on an
5700 even word, since the parameter save area is 16-byte
5701 aligned. Space for GPRs is reserved even if the argument
5702 will be passed in memory. */
5703 if (TARGET_32BIT)
4ed78545 5704 align = (2 - cum->words) & 3;
2858f73a
GK
5705 else
5706 align = cum->words & 1;
c53bdcf5 5707 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5708
a594a19c
GK
5709 if (TARGET_DEBUG_ARG)
5710 {
f676971a 5711 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5712 cum->words, align);
5713 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5714 cum->nargs_prototype, cum->prototype,
2858f73a 5715 GET_MODE_NAME (mode));
a594a19c
GK
5716 }
5717 }
0ac081f6 5718 }
a4b0320c 5719 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5720 && !cum->stdarg
5721 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5722 cum->sysv_gregno++;
594a51fe
SS
5723
5724 else if (rs6000_darwin64_abi
5725 && mode == BLKmode
0b5383eb
DJ
5726 && TREE_CODE (type) == RECORD_TYPE
5727 && (size = int_size_in_bytes (type)) > 0)
5728 {
5729 /* Variable sized types have size == -1 and are
5730 treated as if consisting entirely of ints.
5731 Pad to 16 byte boundary if needed. */
5732 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5733 && (cum->words % 2) != 0)
5734 cum->words++;
5735 /* For varargs, we can just go up by the size of the struct. */
5736 if (!named)
5737 cum->words += (size + 7) / 8;
5738 else
5739 {
5740 /* It is tempting to say int register count just goes up by
5741 sizeof(type)/8, but this is wrong in a case such as
5742 { int; double; int; } [powerpc alignment]. We have to
5743 grovel through the fields for these too. */
5744 cum->intoffset = 0;
5745 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5746 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5747 size * BITS_PER_UNIT);
5748 }
5749 }
f607bc57 5750 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5751 {
a3170dc6 5752 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
5753 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
5754 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
5755 || (mode == TFmode && !TARGET_IEEEQUAD)
5756 || mode == SDmode || mode == DDmode || mode == TDmode))
4697a36c 5757 {
2d83f070
JJ
5758 /* _Decimal128 must use an even/odd register pair. This assumes
5759 that the register number is odd when fregno is odd. */
5760 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5761 cum->fregno++;
5762
5763 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5764 <= FP_ARG_V4_MAX_REG)
602ea4d3 5765 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5766 else
5767 {
602ea4d3 5768 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5769 if (mode == DFmode || mode == TFmode
5770 || mode == DDmode || mode == TDmode)
c4ad648e 5771 cum->words += cum->words & 1;
c53bdcf5 5772 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5773 }
4697a36c 5774 }
4cc833b7
RH
5775 else
5776 {
b2d04ecf 5777 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5778 int gregno = cum->sysv_gregno;
5779
4ed78545
AM
5780 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5781 (r7,r8) or (r9,r10). As does any other 2 word item such
5782 as complex int due to a historical mistake. */
5783 if (n_words == 2)
5784 gregno += (1 - gregno) & 1;
4cc833b7 5785
4ed78545 5786 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5787 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5788 {
4ed78545
AM
5789 /* Long long and SPE vectors are aligned on the stack.
5790 So are other 2 word items such as complex int due to
5791 a historical mistake. */
4cc833b7
RH
5792 if (n_words == 2)
5793 cum->words += cum->words & 1;
5794 cum->words += n_words;
5795 }
4697a36c 5796
4cc833b7
RH
5797 /* Note: continuing to accumulate gregno past when we've started
5798 spilling to the stack indicates the fact that we've started
5799 spilling to the stack to expand_builtin_saveregs. */
5800 cum->sysv_gregno = gregno + n_words;
5801 }
4697a36c 5802
4cc833b7
RH
5803 if (TARGET_DEBUG_ARG)
5804 {
5805 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5806 cum->words, cum->fregno);
5807 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5808 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5809 fprintf (stderr, "mode = %4s, named = %d\n",
5810 GET_MODE_NAME (mode), named);
5811 }
4697a36c
MM
5812 }
5813 else
4cc833b7 5814 {
b2d04ecf 5815 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5816 int start_words = cum->words;
5817 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5818
294bd182 5819 cum->words = align_words + n_words;
4697a36c 5820
ebb109ad 5821 if (SCALAR_FLOAT_MODE_P (mode)
56f4cc04 5822 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5823 {
5824 /* _Decimal128 must be passed in an even/odd float register pair.
5825 This assumes that the register number is odd when fregno is
5826 odd. */
5827 if (mode == TDmode && (cum->fregno % 2) == 1)
5828 cum->fregno++;
5829 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5830 }
4cc833b7
RH
5831
5832 if (TARGET_DEBUG_ARG)
5833 {
5834 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5835 cum->words, cum->fregno);
5836 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5837 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5838 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5839 named, align_words - start_words, depth);
4cc833b7
RH
5840 }
5841 }
4697a36c 5842}
a6c9bed4 5843
f82f556d
AH
5844static rtx
5845spe_build_register_parallel (enum machine_mode mode, int gregno)
5846{
17caeff2 5847 rtx r1, r3, r5, r7;
f82f556d 5848
37409796 5849 switch (mode)
f82f556d 5850 {
37409796 5851 case DFmode:
54b695e7
AH
5852 r1 = gen_rtx_REG (DImode, gregno);
5853 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5854 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5855
5856 case DCmode:
17caeff2 5857 case TFmode:
54b695e7
AH
5858 r1 = gen_rtx_REG (DImode, gregno);
5859 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5860 r3 = gen_rtx_REG (DImode, gregno + 2);
5861 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5862 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5863
17caeff2
JM
5864 case TCmode:
5865 r1 = gen_rtx_REG (DImode, gregno);
5866 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5867 r3 = gen_rtx_REG (DImode, gregno + 2);
5868 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5869 r5 = gen_rtx_REG (DImode, gregno + 4);
5870 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5871 r7 = gen_rtx_REG (DImode, gregno + 6);
5872 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5873 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5874
37409796
NS
5875 default:
5876 gcc_unreachable ();
f82f556d 5877 }
f82f556d 5878}
b78d48dd 5879
f82f556d 5880/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5881static rtx
f676971a 5882rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5883 tree type)
a6c9bed4 5884{
f82f556d
AH
5885 int gregno = cum->sysv_gregno;
5886
5887 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5888 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5889 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5890 || mode == DCmode || mode == TCmode))
f82f556d 5891 {
b5870bee
AH
5892 int n_words = rs6000_arg_size (mode, type);
5893
f82f556d 5894 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5895 if (mode == DFmode)
b5870bee 5896 gregno += (1 - gregno) & 1;
f82f556d 5897
b5870bee
AH
5898 /* Multi-reg args are not split between registers and stack. */
5899 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5900 return NULL_RTX;
5901
5902 return spe_build_register_parallel (mode, gregno);
5903 }
a6c9bed4
AH
5904 if (cum->stdarg)
5905 {
c53bdcf5 5906 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5907
5908 /* SPE vectors are put in odd registers. */
5909 if (n_words == 2 && (gregno & 1) == 0)
5910 gregno += 1;
5911
5912 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5913 {
5914 rtx r1, r2;
5915 enum machine_mode m = SImode;
5916
5917 r1 = gen_rtx_REG (m, gregno);
5918 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5919 r2 = gen_rtx_REG (m, gregno + 1);
5920 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5921 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5922 }
5923 else
b78d48dd 5924 return NULL_RTX;
a6c9bed4
AH
5925 }
5926 else
5927 {
f82f556d
AH
5928 if (gregno <= GP_ARG_MAX_REG)
5929 return gen_rtx_REG (mode, gregno);
a6c9bed4 5930 else
b78d48dd 5931 return NULL_RTX;
a6c9bed4
AH
5932 }
5933}
5934
0b5383eb
DJ
5935/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5936 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5937
0b5383eb 5938static void
bb8df8a6 5939rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5940 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5941{
0b5383eb
DJ
5942 enum machine_mode mode;
5943 unsigned int regno;
5944 unsigned int startbit, endbit;
5945 int this_regno, intregs, intoffset;
5946 rtx reg;
594a51fe 5947
0b5383eb
DJ
5948 if (cum->intoffset == -1)
5949 return;
5950
5951 intoffset = cum->intoffset;
5952 cum->intoffset = -1;
5953
5954 /* If this is the trailing part of a word, try to only load that
5955 much into the register. Otherwise load the whole register. Note
5956 that in the latter case we may pick up unwanted bits. It's not a
5957 problem at the moment but may wish to revisit. */
5958
5959 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5960 {
0b5383eb
DJ
5961 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5962 MODE_INT, 0);
5963 if (mode == BLKmode)
5964 {
5965 /* We couldn't find an appropriate mode, which happens,
5966 e.g., in packed structs when there are 3 bytes to load.
5967 Back intoffset back to the beginning of the word in this
5968 case. */
5969 intoffset = intoffset & -BITS_PER_WORD;
5970 mode = word_mode;
5971 }
5972 }
5973 else
5974 mode = word_mode;
5975
5976 startbit = intoffset & -BITS_PER_WORD;
5977 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5978 intregs = (endbit - startbit) / BITS_PER_WORD;
5979 this_regno = cum->words + intoffset / BITS_PER_WORD;
5980
5981 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5982 cum->use_stack = 1;
bb8df8a6 5983
0b5383eb
DJ
5984 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5985 if (intregs <= 0)
5986 return;
5987
5988 intoffset /= BITS_PER_UNIT;
5989 do
5990 {
5991 regno = GP_ARG_MIN_REG + this_regno;
5992 reg = gen_rtx_REG (mode, regno);
5993 rvec[(*k)++] =
5994 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5995
5996 this_regno += 1;
5997 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5998 mode = word_mode;
5999 intregs -= 1;
6000 }
6001 while (intregs > 0);
6002}
6003
6004/* Recursive workhorse for the following. */
6005
6006static void
586de218 6007rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
6008 HOST_WIDE_INT startbitpos, rtx rvec[],
6009 int *k)
6010{
6011 tree f;
6012
6013 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6014 if (TREE_CODE (f) == FIELD_DECL)
6015 {
6016 HOST_WIDE_INT bitpos = startbitpos;
6017 tree ftype = TREE_TYPE (f);
70fb00df
AP
6018 enum machine_mode mode;
6019 if (ftype == error_mark_node)
6020 continue;
6021 mode = TYPE_MODE (ftype);
0b5383eb
DJ
6022
6023 if (DECL_SIZE (f) != 0
6024 && host_integerp (bit_position (f), 1))
6025 bitpos += int_bit_position (f);
6026
6027 /* ??? FIXME: else assume zero offset. */
6028
6029 if (TREE_CODE (ftype) == RECORD_TYPE)
6030 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
6031 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 6032 {
0b5383eb
DJ
6033#if 0
6034 switch (mode)
594a51fe 6035 {
0b5383eb
DJ
6036 case SCmode: mode = SFmode; break;
6037 case DCmode: mode = DFmode; break;
6038 case TCmode: mode = TFmode; break;
6039 default: break;
594a51fe 6040 }
0b5383eb
DJ
6041#endif
6042 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6043 rvec[(*k)++]
bb8df8a6 6044 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
6045 gen_rtx_REG (mode, cum->fregno++),
6046 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 6047 if (mode == TFmode || mode == TDmode)
0b5383eb 6048 cum->fregno++;
594a51fe 6049 }
0b5383eb
DJ
6050 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
6051 {
6052 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6053 rvec[(*k)++]
bb8df8a6
EC
6054 = gen_rtx_EXPR_LIST (VOIDmode,
6055 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
6056 GEN_INT (bitpos / BITS_PER_UNIT));
6057 }
6058 else if (cum->intoffset == -1)
6059 cum->intoffset = bitpos;
6060 }
6061}
594a51fe 6062
0b5383eb
DJ
6063/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
6064 the register(s) to be used for each field and subfield of a struct
6065 being passed by value, along with the offset of where the
6066 register's value may be found in the block. FP fields go in FP
6067 register, vector fields go in vector registers, and everything
bb8df8a6 6068 else goes in int registers, packed as in memory.
8ff40a74 6069
0b5383eb
DJ
6070 This code is also used for function return values. RETVAL indicates
6071 whether this is the case.
8ff40a74 6072
a4d05547 6073 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 6074 calling convention. */
594a51fe 6075
0b5383eb 6076static rtx
586de218 6077rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
6078 int named, bool retval)
6079{
6080 rtx rvec[FIRST_PSEUDO_REGISTER];
6081 int k = 1, kbase = 1;
6082 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6083 /* This is a copy; modifications are not visible to our caller. */
6084 CUMULATIVE_ARGS copy_cum = *orig_cum;
6085 CUMULATIVE_ARGS *cum = &copy_cum;
6086
6087 /* Pad to 16 byte boundary if needed. */
6088 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6089 && (cum->words % 2) != 0)
6090 cum->words++;
6091
6092 cum->intoffset = 0;
6093 cum->use_stack = 0;
6094 cum->named = named;
6095
6096 /* Put entries into rvec[] for individual FP and vector fields, and
6097 for the chunks of memory that go in int regs. Note we start at
6098 element 1; 0 is reserved for an indication of using memory, and
6099 may or may not be filled in below. */
6100 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6101 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6102
6103 /* If any part of the struct went on the stack put all of it there.
6104 This hack is because the generic code for
6105 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6106 parts of the struct are not at the beginning. */
6107 if (cum->use_stack)
6108 {
6109 if (retval)
6110 return NULL_RTX; /* doesn't go in registers at all */
6111 kbase = 0;
6112 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6113 }
6114 if (k > 1 || cum->use_stack)
6115 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
6116 else
6117 return NULL_RTX;
6118}
6119
b78d48dd
FJ
6120/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6121
6122static rtx
ec6376ab 6123rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 6124{
ec6376ab
AM
6125 int n_units;
6126 int i, k;
6127 rtx rvec[GP_ARG_NUM_REG + 1];
6128
6129 if (align_words >= GP_ARG_NUM_REG)
6130 return NULL_RTX;
6131
6132 n_units = rs6000_arg_size (mode, type);
6133
6134 /* Optimize the simple case where the arg fits in one gpr, except in
6135 the case of BLKmode due to assign_parms assuming that registers are
6136 BITS_PER_WORD wide. */
6137 if (n_units == 0
6138 || (n_units == 1 && mode != BLKmode))
6139 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6140
6141 k = 0;
6142 if (align_words + n_units > GP_ARG_NUM_REG)
6143 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6144 using a magic NULL_RTX component.
79773478
AM
6145 This is not strictly correct. Only some of the arg belongs in
6146 memory, not all of it. However, the normal scheme using
6147 function_arg_partial_nregs can result in unusual subregs, eg.
6148 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6149 store the whole arg to memory is often more efficient than code
6150 to store pieces, and we know that space is available in the right
6151 place for the whole arg. */
ec6376ab
AM
6152 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6153
6154 i = 0;
6155 do
36a454e1 6156 {
ec6376ab
AM
6157 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6158 rtx off = GEN_INT (i++ * 4);
6159 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6160 }
ec6376ab
AM
6161 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6162
6163 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6164}
6165
4697a36c
MM
6166/* Determine where to put an argument to a function.
6167 Value is zero to push the argument on the stack,
6168 or a hard register in which to store the argument.
6169
6170 MODE is the argument's machine mode.
6171 TYPE is the data type of the argument (as a tree).
6172 This is null for libcalls where that information may
6173 not be available.
6174 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6175 the preceding args and about the function being called. It is
6176 not modified in this routine.
4697a36c
MM
6177 NAMED is nonzero if this argument is a named parameter
6178 (otherwise it is an extra parameter matching an ellipsis).
6179
6180 On RS/6000 the first eight words of non-FP are normally in registers
6181 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6182 Under V.4, the first 8 FP args are in registers.
6183
6184 If this is floating-point and no prototype is specified, we use
6185 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6186 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6187 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6188 doesn't support PARALLEL anyway.
6189
6190 Note that for args passed by reference, function_arg will be called
6191 with MODE and TYPE set to that of the pointer to the arg, not the arg
6192 itself. */
4697a36c 6193
9390387d 6194rtx
f676971a 6195function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6196 tree type, int named)
4697a36c 6197{
4cc833b7 6198 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6199
a4f6c312
SS
6200 /* Return a marker to indicate whether CR1 needs to set or clear the
6201 bit that V.4 uses to say fp args were passed in registers.
6202 Assume that we don't need the marker for software floating point,
6203 or compiler generated library calls. */
4697a36c
MM
6204 if (mode == VOIDmode)
6205 {
f607bc57 6206 if (abi == ABI_V4
b9599e46 6207 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6208 && (cum->stdarg
6209 || (cum->nargs_prototype < 0
6210 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6211 {
a3170dc6
AH
6212 /* For the SPE, we need to crxor CR6 always. */
6213 if (TARGET_SPE_ABI)
6214 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6215 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6216 return GEN_INT (cum->call_cookie
6217 | ((cum->fregno == FP_ARG_MIN_REG)
6218 ? CALL_V4_SET_FP_ARGS
6219 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6220 }
4697a36c 6221
7509c759 6222 return GEN_INT (cum->call_cookie);
4697a36c
MM
6223 }
6224
0b5383eb
DJ
6225 if (rs6000_darwin64_abi && mode == BLKmode
6226 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6227 {
0b5383eb 6228 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6229 if (rslt != NULL_RTX)
6230 return rslt;
6231 /* Else fall through to usual handling. */
6232 }
6233
2858f73a 6234 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6235 if (TARGET_64BIT && ! cum->prototype)
6236 {
c4ad648e
AM
6237 /* Vector parameters get passed in vector register
6238 and also in GPRs or memory, in absence of prototype. */
6239 int align_words;
6240 rtx slot;
6241 align_words = (cum->words + 1) & ~1;
6242
6243 if (align_words >= GP_ARG_NUM_REG)
6244 {
6245 slot = NULL_RTX;
6246 }
6247 else
6248 {
6249 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6250 }
6251 return gen_rtx_PARALLEL (mode,
6252 gen_rtvec (2,
6253 gen_rtx_EXPR_LIST (VOIDmode,
6254 slot, const0_rtx),
6255 gen_rtx_EXPR_LIST (VOIDmode,
6256 gen_rtx_REG (mode, cum->vregno),
6257 const0_rtx)));
c72d6c26
HP
6258 }
6259 else
6260 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6261 else if (TARGET_ALTIVEC_ABI
6262 && (ALTIVEC_VECTOR_MODE (mode)
6263 || (type && TREE_CODE (type) == VECTOR_TYPE
6264 && int_size_in_bytes (type) == 16)))
0ac081f6 6265 {
2858f73a 6266 if (named || abi == ABI_V4)
a594a19c 6267 return NULL_RTX;
0ac081f6 6268 else
a594a19c
GK
6269 {
6270 /* Vector parameters to varargs functions under AIX or Darwin
6271 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6272 int align, align_words, n_words;
6273 enum machine_mode part_mode;
a594a19c
GK
6274
6275 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6276 2 mod 4 in terms of words in 32-bit mode, since the parameter
6277 save area starts at offset 24 from the stack. In 64-bit mode,
6278 they just have to start on an even word, since the parameter
6279 save area is 16-byte aligned. */
6280 if (TARGET_32BIT)
4ed78545 6281 align = (2 - cum->words) & 3;
2858f73a
GK
6282 else
6283 align = cum->words & 1;
a594a19c
GK
6284 align_words = cum->words + align;
6285
6286 /* Out of registers? Memory, then. */
6287 if (align_words >= GP_ARG_NUM_REG)
6288 return NULL_RTX;
ec6376ab
AM
6289
6290 if (TARGET_32BIT && TARGET_POWERPC64)
6291 return rs6000_mixed_function_arg (mode, type, align_words);
6292
2858f73a
GK
6293 /* The vector value goes in GPRs. Only the part of the
6294 value in GPRs is reported here. */
ec6376ab
AM
6295 part_mode = mode;
6296 n_words = rs6000_arg_size (mode, type);
6297 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6298 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6299 is either wholly in GPRs or half in GPRs and half not. */
6300 part_mode = DImode;
ec6376ab
AM
6301
6302 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6303 }
0ac081f6 6304 }
f82f556d
AH
6305 else if (TARGET_SPE_ABI && TARGET_SPE
6306 && (SPE_VECTOR_MODE (mode)
18f63bfa 6307 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6308 || mode == DCmode
6309 || mode == TFmode
6310 || mode == TCmode))))
a6c9bed4 6311 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6312
f607bc57 6313 else if (abi == ABI_V4)
4697a36c 6314 {
a3170dc6 6315 if (TARGET_HARD_FLOAT && TARGET_FPRS
56f4cc04
DE
6316 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6317 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
cf8e1455
DE
6318 || (mode == TFmode && !TARGET_IEEEQUAD)
6319 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6320 {
2d83f070
JJ
6321 /* _Decimal128 must use an even/odd register pair. This assumes
6322 that the register number is odd when fregno is odd. */
6323 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6324 cum->fregno++;
6325
6326 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6327 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6328 return gen_rtx_REG (mode, cum->fregno);
6329 else
b78d48dd 6330 return NULL_RTX;
4cc833b7
RH
6331 }
6332 else
6333 {
b2d04ecf 6334 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6335 int gregno = cum->sysv_gregno;
6336
4ed78545
AM
6337 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6338 (r7,r8) or (r9,r10). As does any other 2 word item such
6339 as complex int due to a historical mistake. */
6340 if (n_words == 2)
6341 gregno += (1 - gregno) & 1;
4cc833b7 6342
4ed78545 6343 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6344 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6345 return NULL_RTX;
ec6376ab
AM
6346
6347 if (TARGET_32BIT && TARGET_POWERPC64)
6348 return rs6000_mixed_function_arg (mode, type,
6349 gregno - GP_ARG_MIN_REG);
6350 return gen_rtx_REG (mode, gregno);
4cc833b7 6351 }
4697a36c 6352 }
4cc833b7
RH
6353 else
6354 {
294bd182 6355 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6356
2d83f070
JJ
6357 /* _Decimal128 must be passed in an even/odd float register pair.
6358 This assumes that the register number is odd when fregno is odd. */
6359 if (mode == TDmode && (cum->fregno % 2) == 1)
6360 cum->fregno++;
6361
2858f73a 6362 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6363 {
ec6376ab
AM
6364 rtx rvec[GP_ARG_NUM_REG + 1];
6365 rtx r;
6366 int k;
c53bdcf5
AM
6367 bool needs_psave;
6368 enum machine_mode fmode = mode;
c53bdcf5
AM
6369 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6370
6371 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6372 {
c53bdcf5
AM
6373 /* Currently, we only ever need one reg here because complex
6374 doubles are split. */
7393f7f8
BE
6375 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6376 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6377
7393f7f8
BE
6378 /* Long double or _Decimal128 split over regs and memory. */
6379 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6380 }
c53bdcf5
AM
6381
6382 /* Do we also need to pass this arg in the parameter save
6383 area? */
6384 needs_psave = (type
6385 && (cum->nargs_prototype <= 0
6386 || (DEFAULT_ABI == ABI_AIX
de17c25f 6387 && TARGET_XL_COMPAT
c53bdcf5
AM
6388 && align_words >= GP_ARG_NUM_REG)));
6389
6390 if (!needs_psave && mode == fmode)
ec6376ab 6391 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6392
ec6376ab 6393 k = 0;
c53bdcf5
AM
6394 if (needs_psave)
6395 {
ec6376ab 6396 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6397 This piece must come first, before the fprs. */
c53bdcf5
AM
6398 if (align_words < GP_ARG_NUM_REG)
6399 {
6400 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6401
6402 if (align_words + n_words > GP_ARG_NUM_REG
6403 || (TARGET_32BIT && TARGET_POWERPC64))
6404 {
6405 /* If this is partially on the stack, then we only
6406 include the portion actually in registers here. */
6407 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6408 rtx off;
79773478
AM
6409 int i = 0;
6410 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6411 /* Not all of the arg fits in gprs. Say that it
6412 goes in memory too, using a magic NULL_RTX
6413 component. Also see comment in
6414 rs6000_mixed_function_arg for why the normal
6415 function_arg_partial_nregs scheme doesn't work
6416 in this case. */
6417 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6418 const0_rtx);
ec6376ab
AM
6419 do
6420 {
6421 r = gen_rtx_REG (rmode,
6422 GP_ARG_MIN_REG + align_words);
2e6c9641 6423 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6424 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6425 }
6426 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6427 }
6428 else
6429 {
6430 /* The whole arg fits in gprs. */
6431 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6432 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6433 }
c53bdcf5 6434 }
ec6376ab
AM
6435 else
6436 /* It's entirely in memory. */
6437 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6438 }
6439
ec6376ab
AM
6440 /* Describe where this piece goes in the fprs. */
6441 r = gen_rtx_REG (fmode, cum->fregno);
6442 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6443
6444 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6445 }
6446 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6447 {
ec6376ab
AM
6448 if (TARGET_32BIT && TARGET_POWERPC64)
6449 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6450
4eeca74f
AM
6451 if (mode == BLKmode)
6452 mode = Pmode;
6453
b2d04ecf
AM
6454 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6455 }
4cc833b7
RH
6456 else
6457 return NULL_RTX;
4697a36c 6458 }
4697a36c
MM
6459}
6460\f
ec6376ab 6461/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6462 the number of bytes passed in registers. For args passed entirely in
6463 registers or entirely in memory, zero. When an arg is described by a
6464 PARALLEL, perhaps using more than one register type, this function
6465 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6466
78a52f11
RH
6467static int
6468rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6469 tree type, bool named)
4697a36c 6470{
c53bdcf5 6471 int ret = 0;
ec6376ab 6472 int align_words;
c53bdcf5 6473
f607bc57 6474 if (DEFAULT_ABI == ABI_V4)
4697a36c 6475 return 0;
4697a36c 6476
c53bdcf5
AM
6477 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6478 && cum->nargs_prototype >= 0)
6479 return 0;
6480
0b5383eb
DJ
6481 /* In this complicated case we just disable the partial_nregs code. */
6482 if (rs6000_darwin64_abi && mode == BLKmode
6483 && TREE_CODE (type) == RECORD_TYPE
6484 && int_size_in_bytes (type) > 0)
6485 return 0;
6486
294bd182 6487 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6488
79773478
AM
6489 if (USE_FP_FOR_ARG_P (cum, mode, type))
6490 {
fb63c729
AM
6491 /* If we are passing this arg in the fixed parameter save area
6492 (gprs or memory) as well as fprs, then this function should
79773478
AM
6493 return the number of partial bytes passed in the parameter
6494 save area rather than partial bytes passed in fprs. */
6495 if (type
6496 && (cum->nargs_prototype <= 0
6497 || (DEFAULT_ABI == ABI_AIX
6498 && TARGET_XL_COMPAT
6499 && align_words >= GP_ARG_NUM_REG)))
6500 return 0;
6501 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6502 > FP_ARG_MAX_REG + 1)
ac7e839c 6503 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6504 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6505 return 0;
6506 }
6507
ec6376ab
AM
6508 if (align_words < GP_ARG_NUM_REG
6509 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6510 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6511
c53bdcf5 6512 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6513 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6514
c53bdcf5 6515 return ret;
4697a36c
MM
6516}
6517\f
6518/* A C expression that indicates when an argument must be passed by
6519 reference. If nonzero for an argument, a copy of that argument is
6520 made in memory and a pointer to the argument is passed instead of
6521 the argument itself. The pointer is passed in whatever way is
6522 appropriate for passing a pointer to that type.
6523
b2d04ecf
AM
6524 Under V.4, aggregates and long double are passed by reference.
6525
6526 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6527 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6528
6529 As an extension to all ABIs, variable sized types are passed by
6530 reference. */
4697a36c 6531
8cd5a4e0 6532static bool
f676971a 6533rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6534 enum machine_mode mode, const_tree type,
bada2eb8 6535 bool named ATTRIBUTE_UNUSED)
4697a36c 6536{
602ea4d3 6537 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6538 {
6539 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6540 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6541 return 1;
6542 }
6543
6544 if (!type)
6545 return 0;
4697a36c 6546
bada2eb8
DE
6547 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6548 {
6549 if (TARGET_DEBUG_ARG)
6550 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6551 return 1;
6552 }
6553
6554 if (int_size_in_bytes (type) < 0)
6555 {
6556 if (TARGET_DEBUG_ARG)
6557 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6558 return 1;
6559 }
6560
6561 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6562 modes only exist for GCC vector types if -maltivec. */
6563 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6564 {
6565 if (TARGET_DEBUG_ARG)
6566 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6567 return 1;
6568 }
b693336b
PB
6569
6570 /* Pass synthetic vectors in memory. */
bada2eb8 6571 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6572 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6573 {
6574 static bool warned_for_pass_big_vectors = false;
6575 if (TARGET_DEBUG_ARG)
6576 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6577 if (!warned_for_pass_big_vectors)
6578 {
d4ee4d25 6579 warning (0, "GCC vector passed by reference: "
b693336b
PB
6580 "non-standard ABI extension with no compatibility guarantee");
6581 warned_for_pass_big_vectors = true;
6582 }
6583 return 1;
6584 }
6585
b2d04ecf 6586 return 0;
4697a36c 6587}
5985c7a6
FJ
6588
6589static void
2d9db8eb 6590rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6591{
6592 int i;
6593 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6594
6595 if (nregs == 0)
6596 return;
6597
c4ad648e 6598 for (i = 0; i < nregs; i++)
5985c7a6 6599 {
9390387d 6600 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6601 if (reload_completed)
c4ad648e
AM
6602 {
6603 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6604 tem = NULL_RTX;
6605 else
6606 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6607 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6608 }
5985c7a6
FJ
6609 else
6610 tem = replace_equiv_address (tem, XEXP (tem, 0));
6611
37409796 6612 gcc_assert (tem);
5985c7a6
FJ
6613
6614 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6615 }
6616}
4697a36c
MM
6617\f
6618/* Perform any needed actions needed for a function that is receiving a
f676971a 6619 variable number of arguments.
4697a36c
MM
6620
6621 CUM is as above.
6622
6623 MODE and TYPE are the mode and type of the current parameter.
6624
6625 PRETEND_SIZE is a variable that should be set to the amount of stack
6626 that must be pushed by the prolog to pretend that our caller pushed
6627 it.
6628
6629 Normally, this macro will push all remaining incoming registers on the
6630 stack and set PRETEND_SIZE to the length of the registers pushed. */
6631
c6e8c921 6632static void
f676971a 6633setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6634 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6635 int no_rtl)
4697a36c 6636{
4cc833b7
RH
6637 CUMULATIVE_ARGS next_cum;
6638 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6639 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6640 int first_reg_offset;
6641 alias_set_type set;
4697a36c 6642
f31bf321 6643 /* Skip the last named argument. */
d34c5b80 6644 next_cum = *cum;
594a51fe 6645 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6646
f607bc57 6647 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6648 {
5b667039
JJ
6649 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6650
60e2d0ca 6651 if (! no_rtl)
5b667039
JJ
6652 {
6653 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6654 HOST_WIDE_INT offset = 0;
6655
6656 /* Try to optimize the size of the varargs save area.
6657 The ABI requires that ap.reg_save_area is doubleword
6658 aligned, but we don't need to allocate space for all
6659 the bytes, only those to which we actually will save
6660 anything. */
6661 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6662 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6663 if (TARGET_HARD_FLOAT && TARGET_FPRS
6664 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6665 && cfun->va_list_fpr_size)
6666 {
6667 if (gpr_reg_num)
6668 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6669 * UNITS_PER_FP_WORD;
6670 if (cfun->va_list_fpr_size
6671 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6672 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6673 else
6674 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6675 * UNITS_PER_FP_WORD;
6676 }
6677 if (gpr_reg_num)
6678 {
6679 offset = -((first_reg_offset * reg_size) & ~7);
6680 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6681 {
6682 gpr_reg_num = cfun->va_list_gpr_size;
6683 if (reg_size == 4 && (first_reg_offset & 1))
6684 gpr_reg_num++;
6685 }
6686 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6687 }
6688 else if (fpr_size)
6689 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6690 * UNITS_PER_FP_WORD
6691 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6692
5b667039
JJ
6693 if (gpr_size + fpr_size)
6694 {
6695 rtx reg_save_area
6696 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6697 gcc_assert (GET_CODE (reg_save_area) == MEM);
6698 reg_save_area = XEXP (reg_save_area, 0);
6699 if (GET_CODE (reg_save_area) == PLUS)
6700 {
6701 gcc_assert (XEXP (reg_save_area, 0)
6702 == virtual_stack_vars_rtx);
6703 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6704 offset += INTVAL (XEXP (reg_save_area, 1));
6705 }
6706 else
6707 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6708 }
6709
6710 cfun->machine->varargs_save_offset = offset;
6711 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6712 }
4697a36c 6713 }
60e2d0ca 6714 else
4697a36c 6715 {
d34c5b80 6716 first_reg_offset = next_cum.words;
4cc833b7 6717 save_area = virtual_incoming_args_rtx;
4697a36c 6718
fe984136 6719 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6720 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6721 }
4697a36c 6722
dfafc897 6723 set = get_varargs_alias_set ();
9d30f3c1
JJ
6724 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6725 && cfun->va_list_gpr_size)
4cc833b7 6726 {
9d30f3c1
JJ
6727 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6728
6729 if (va_list_gpr_counter_field)
6730 {
6731 /* V4 va_list_gpr_size counts number of registers needed. */
6732 if (nregs > cfun->va_list_gpr_size)
6733 nregs = cfun->va_list_gpr_size;
6734 }
6735 else
6736 {
6737 /* char * va_list instead counts number of bytes needed. */
6738 if (nregs > cfun->va_list_gpr_size / reg_size)
6739 nregs = cfun->va_list_gpr_size / reg_size;
6740 }
6741
dfafc897 6742 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6743 plus_constant (save_area,
13e2e16e
DE
6744 first_reg_offset * reg_size));
6745 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6746 set_mem_alias_set (mem, set);
8ac61af7 6747 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6748
f676971a 6749 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6750 nregs);
4697a36c
MM
6751 }
6752
4697a36c 6753 /* Save FP registers if needed. */
f607bc57 6754 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6755 && TARGET_HARD_FLOAT && TARGET_FPRS
6756 && ! no_rtl
9d30f3c1
JJ
6757 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6758 && cfun->va_list_fpr_size)
4697a36c 6759 {
9d30f3c1 6760 int fregno = next_cum.fregno, nregs;
9ebbca7d 6761 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6762 rtx lab = gen_label_rtx ();
5b667039
JJ
6763 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6764 * UNITS_PER_FP_WORD);
4697a36c 6765
c4ad648e
AM
6766 emit_jump_insn
6767 (gen_rtx_SET (VOIDmode,
6768 pc_rtx,
6769 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6770 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6771 const0_rtx),
39403d82 6772 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6773 pc_rtx)));
6774
9d30f3c1
JJ
6775 for (nregs = 0;
6776 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6777 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6778 {
696e45ba
ME
6779 mem = gen_rtx_MEM ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6780 ? DFmode : SFmode,
6781 plus_constant (save_area, off));
6782 MEM_NOTRAP_P (mem) = 1;
6783 set_mem_alias_set (mem, set);
6784 set_mem_align (mem, GET_MODE_ALIGNMENT (
6785 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6786 ? DFmode : SFmode));
6787 emit_move_insn (mem, gen_rtx_REG (
6788 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6789 ? DFmode : SFmode, fregno));
4697a36c 6790 }
4cc833b7
RH
6791
6792 emit_label (lab);
4697a36c 6793 }
4697a36c 6794}
4697a36c 6795
dfafc897 6796/* Create the va_list data type. */
2c4974b7 6797
c35d187f
RH
6798static tree
6799rs6000_build_builtin_va_list (void)
dfafc897 6800{
64c2816f 6801 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6802
9ebbca7d
GK
6803 /* For AIX, prefer 'char *' because that's what the system
6804 header files like. */
f607bc57 6805 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6806 return build_pointer_type (char_type_node);
dfafc897 6807
f1e639b1 6808 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6809 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6810
f676971a 6811 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6812 unsigned_char_type_node);
f676971a 6813 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6814 unsigned_char_type_node);
64c2816f
DT
6815 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6816 every user file. */
6817 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6818 short_unsigned_type_node);
dfafc897
FS
6819 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6820 ptr_type_node);
6821 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6822 ptr_type_node);
6823
9d30f3c1
JJ
6824 va_list_gpr_counter_field = f_gpr;
6825 va_list_fpr_counter_field = f_fpr;
6826
dfafc897
FS
6827 DECL_FIELD_CONTEXT (f_gpr) = record;
6828 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6829 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6830 DECL_FIELD_CONTEXT (f_ovf) = record;
6831 DECL_FIELD_CONTEXT (f_sav) = record;
6832
bab45a51
FS
6833 TREE_CHAIN (record) = type_decl;
6834 TYPE_NAME (record) = type_decl;
dfafc897
FS
6835 TYPE_FIELDS (record) = f_gpr;
6836 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6837 TREE_CHAIN (f_fpr) = f_res;
6838 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6839 TREE_CHAIN (f_ovf) = f_sav;
6840
6841 layout_type (record);
6842
6843 /* The correct type is an array type of one element. */
6844 return build_array_type (record, build_index_type (size_zero_node));
6845}
6846
6847/* Implement va_start. */
6848
d7bd8aeb 6849static void
a2369ed3 6850rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6851{
dfafc897 6852 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6853 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6854 tree gpr, fpr, ovf, sav, t;
2c4974b7 6855
dfafc897 6856 /* Only SVR4 needs something special. */
f607bc57 6857 if (DEFAULT_ABI != ABI_V4)
dfafc897 6858 {
e5faf155 6859 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6860 return;
6861 }
6862
973a648b 6863 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6864 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6865 f_res = TREE_CHAIN (f_fpr);
6866 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6867 f_sav = TREE_CHAIN (f_ovf);
6868
872a65b5 6869 valist = build_va_arg_indirect_ref (valist);
47a25a46 6870 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6871 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6872 f_fpr, NULL_TREE);
6873 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6874 f_ovf, NULL_TREE);
6875 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6876 f_sav, NULL_TREE);
dfafc897
FS
6877
6878 /* Count number of gp and fp argument registers used. */
38173d38
JH
6879 words = crtl->args.info.words;
6880 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6881 GP_ARG_NUM_REG);
38173d38 6882 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6883 FP_ARG_NUM_REG);
dfafc897
FS
6884
6885 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6886 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6887 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6888 words, n_gpr, n_fpr);
dfafc897 6889
9d30f3c1
JJ
6890 if (cfun->va_list_gpr_size)
6891 {
726a989a 6892 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
47a25a46 6893 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6894 TREE_SIDE_EFFECTS (t) = 1;
6895 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6896 }
58c8adc1 6897
9d30f3c1
JJ
6898 if (cfun->va_list_fpr_size)
6899 {
726a989a 6900 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 6901 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6902 TREE_SIDE_EFFECTS (t) = 1;
6903 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6904 }
dfafc897
FS
6905
6906 /* Find the overflow area. */
6907 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6908 if (words != 0)
5be014d5
AP
6909 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6910 size_int (words * UNITS_PER_WORD));
726a989a 6911 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6912 TREE_SIDE_EFFECTS (t) = 1;
6913 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6914
9d30f3c1
JJ
6915 /* If there were no va_arg invocations, don't set up the register
6916 save area. */
6917 if (!cfun->va_list_gpr_size
6918 && !cfun->va_list_fpr_size
6919 && n_gpr < GP_ARG_NUM_REG
6920 && n_fpr < FP_ARG_V4_MAX_REG)
6921 return;
6922
dfafc897
FS
6923 /* Find the register save area. */
6924 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6925 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6926 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6927 size_int (cfun->machine->varargs_save_offset));
726a989a 6928 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
6929 TREE_SIDE_EFFECTS (t) = 1;
6930 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6931}
6932
6933/* Implement va_arg. */
6934
23a60a04 6935tree
726a989a
RB
6936rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6937 gimple_seq *post_p)
cd3ce9b4 6938{
cd3ce9b4
JM
6939 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6940 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6941 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6942 tree lab_false, lab_over, addr;
6943 int align;
6944 tree ptrtype = build_pointer_type (type);
7393f7f8 6945 int regalign = 0;
726a989a 6946 gimple stmt;
cd3ce9b4 6947
08b0dc1b
RH
6948 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6949 {
6950 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6951 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6952 }
6953
cd3ce9b4
JM
6954 if (DEFAULT_ABI != ABI_V4)
6955 {
08b0dc1b 6956 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6957 {
6958 tree elem_type = TREE_TYPE (type);
6959 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6960 int elem_size = GET_MODE_SIZE (elem_mode);
6961
6962 if (elem_size < UNITS_PER_WORD)
6963 {
23a60a04 6964 tree real_part, imag_part;
726a989a 6965 gimple_seq post = NULL;
cd3ce9b4 6966
23a60a04
JM
6967 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6968 &post);
6969 /* Copy the value into a temporary, lest the formal temporary
6970 be reused out from under us. */
6971 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
726a989a 6972 gimple_seq_add_seq (pre_p, post);
cd3ce9b4 6973
23a60a04
JM
6974 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6975 post_p);
cd3ce9b4 6976
47a25a46 6977 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6978 }
6979 }
6980
23a60a04 6981 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6982 }
6983
6984 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6985 f_fpr = TREE_CHAIN (f_gpr);
6986 f_res = TREE_CHAIN (f_fpr);
6987 f_ovf = TREE_CHAIN (f_res);
6988 f_sav = TREE_CHAIN (f_ovf);
6989
872a65b5 6990 valist = build_va_arg_indirect_ref (valist);
47a25a46 6991 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6992 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6993 f_fpr, NULL_TREE);
6994 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6995 f_ovf, NULL_TREE);
6996 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6997 f_sav, NULL_TREE);
cd3ce9b4
JM
6998
6999 size = int_size_in_bytes (type);
7000 rsize = (size + 3) / 4;
7001 align = 1;
7002
08b0dc1b 7003 if (TARGET_HARD_FLOAT && TARGET_FPRS
696e45ba
ME
7004 && ((TARGET_SINGLE_FLOAT && TYPE_MODE (type) == SFmode)
7005 || (TARGET_DOUBLE_FLOAT
7006 && (TYPE_MODE (type) == DFmode
7007 || TYPE_MODE (type) == TFmode
7008 || TYPE_MODE (type) == SDmode
7009 || TYPE_MODE (type) == DDmode
7010 || TYPE_MODE (type) == TDmode))))
cd3ce9b4
JM
7011 {
7012 /* FP args go in FP registers, if present. */
cd3ce9b4 7013 reg = fpr;
602ea4d3 7014 n_reg = (size + 7) / 8;
696e45ba
ME
7015 sav_ofs = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4) * 4;
7016 sav_scale = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4);
e41b2a33 7017 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
7018 align = 8;
7019 }
7020 else
7021 {
7022 /* Otherwise into GP registers. */
cd3ce9b4
JM
7023 reg = gpr;
7024 n_reg = rsize;
7025 sav_ofs = 0;
7026 sav_scale = 4;
7027 if (n_reg == 2)
7028 align = 8;
7029 }
7030
7031 /* Pull the value out of the saved registers.... */
7032
7033 lab_over = NULL;
7034 addr = create_tmp_var (ptr_type_node, "addr");
7035 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7036
7037 /* AltiVec vectors never go in registers when -mabi=altivec. */
7038 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
7039 align = 16;
7040 else
7041 {
7042 lab_false = create_artificial_label ();
7043 lab_over = create_artificial_label ();
7044
7045 /* Long long and SPE vectors are aligned in the registers.
7046 As are any other 2 gpr item such as complex int due to a
7047 historical mistake. */
7048 u = reg;
602ea4d3 7049 if (n_reg == 2 && reg == gpr)
cd3ce9b4 7050 {
7393f7f8 7051 regalign = 1;
726a989a 7052 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7053 build_int_cst (TREE_TYPE (reg), n_reg - 1));
726a989a
RB
7054 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
7055 unshare_expr (reg), u);
cd3ce9b4 7056 }
7393f7f8
BE
7057 /* _Decimal128 is passed in even/odd fpr pairs; the stored
7058 reg number is 0 for f1, so we want to make it odd. */
7059 else if (reg == fpr && TYPE_MODE (type) == TDmode)
7060 {
7061 regalign = 1;
726a989a 7062 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
383e91e4 7063 build_int_cst (TREE_TYPE (reg), 1));
726a989a 7064 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7393f7f8 7065 }
cd3ce9b4 7066
95674810 7067 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
7068 t = build2 (GE_EXPR, boolean_type_node, u, t);
7069 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7070 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7071 gimplify_and_add (t, pre_p);
7072
7073 t = sav;
7074 if (sav_ofs)
5be014d5 7075 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 7076
726a989a 7077 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 7078 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
7079 u = fold_convert (sizetype, u);
7080 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7081 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 7082
e41b2a33
PB
7083 /* _Decimal32 varargs are located in the second word of the 64-bit
7084 FP register for 32-bit binaries. */
4f011e1e
JM
7085 if (!TARGET_POWERPC64
7086 && TARGET_HARD_FLOAT && TARGET_FPRS
7087 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
7088 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7089
726a989a 7090 gimplify_assign (addr, t, pre_p);
cd3ce9b4 7091
726a989a 7092 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
cd3ce9b4 7093
726a989a
RB
7094 stmt = gimple_build_label (lab_false);
7095 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4 7096
7393f7f8 7097 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
7098 {
7099 /* Ensure that we don't find any more args in regs.
7393f7f8 7100 Alignment has taken care of for special cases. */
726a989a 7101 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
cd3ce9b4
JM
7102 }
7103 }
7104
7105 /* ... otherwise out of the overflow area. */
7106
7107 /* Care for on-stack alignment if needed. */
7108 t = ovf;
7109 if (align != 1)
7110 {
5be014d5
AP
7111 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7112 t = fold_convert (sizetype, t);
4a90aeeb 7113 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
7114 size_int (-align));
7115 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
7116 }
7117 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7118
726a989a 7119 gimplify_assign (unshare_expr (addr), t, pre_p);
cd3ce9b4 7120
5be014d5 7121 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
726a989a 7122 gimplify_assign (unshare_expr (ovf), t, pre_p);
cd3ce9b4
JM
7123
7124 if (lab_over)
7125 {
726a989a
RB
7126 stmt = gimple_build_label (lab_over);
7127 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4
JM
7128 }
7129
0cfbc62b
JM
7130 if (STRICT_ALIGNMENT
7131 && (TYPE_ALIGN (type)
7132 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7133 {
7134 /* The value (of type complex double, for example) may not be
7135 aligned in memory in the saved registers, so copy via a
7136 temporary. (This is the same code as used for SPARC.) */
7137 tree tmp = create_tmp_var (type, "va_arg_tmp");
7138 tree dest_addr = build_fold_addr_expr (tmp);
7139
5039610b
SL
7140 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7141 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
7142
7143 gimplify_and_add (copy, pre_p);
7144 addr = dest_addr;
7145 }
7146
08b0dc1b 7147 addr = fold_convert (ptrtype, addr);
872a65b5 7148 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
7149}
7150
0ac081f6
AH
7151/* Builtins. */
7152
58646b77
PB
7153static void
7154def_builtin (int mask, const char *name, tree type, int code)
7155{
96038623 7156 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
7157 {
7158 if (rs6000_builtin_decls[code])
7159 abort ();
7160
7161 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7162 add_builtin_function (name, type, code, BUILT_IN_MD,
7163 NULL, NULL_TREE);
58646b77
PB
7164 }
7165}
0ac081f6 7166
24408032
AH
7167/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7168
2212663f 7169static const struct builtin_description bdesc_3arg[] =
24408032
AH
7170{
7171 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7172 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7173 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7174 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7175 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7176 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7177 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7178 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7179 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7180 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7181 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7182 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7183 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7184 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7185 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7186 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7187 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7188 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7189 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7190 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7191 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7192 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7193 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7194
7195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7210
7211 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7212 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7213 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7214 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7215 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7216 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7217 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7218 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7219 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7220};
2212663f 7221
95385cbb
AH
7222/* DST operations: void foo (void *, const int, const char). */
7223
7224static const struct builtin_description bdesc_dst[] =
7225{
7226 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7227 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7228 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7229 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7230
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7235};
7236
2212663f 7237/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7238
a3170dc6 7239static struct builtin_description bdesc_2arg[] =
0ac081f6 7240{
f18c054f
DB
7241 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7242 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7243 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7244 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7245 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7246 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7247 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7248 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7249 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7250 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7251 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7252 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7253 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7254 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7255 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7256 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7257 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7258 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7259 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7260 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7261 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7262 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7263 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7264 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7265 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7266 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7267 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7268 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7269 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7270 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7271 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7272 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7273 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7274 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7275 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7276 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7277 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7278 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7279 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7280 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7281 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7282 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7283 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7284 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7285 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7286 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7287 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7288 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7289 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7290 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7291 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7292 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7293 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7294 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7295 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7296 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7297 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7298 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7299 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7300 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7301 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7302 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7303 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7304 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7305 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7306 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7307 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7308 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7309 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7310 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7311 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7312 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7313 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7314 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7315 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7316 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7317 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7318 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7319 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7320 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7321 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7322 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7323 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7324 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7325 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7326 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7327 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7328 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7329 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7330 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7331 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7332 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7333 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7334 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7335 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7336 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7337 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7338 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7339 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7340 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7341 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7342 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7343 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7344 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7345 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7346 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7347 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7348 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7349 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7350 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7351 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7352
58646b77
PB
7353 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7354 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7355 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7356 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7357 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7358 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7359 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7360 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7361 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7362 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7363 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7364 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7365 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7366 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7367 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7368 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7369 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7370 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7371 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7372 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7373 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7374 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7375 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7376 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7377 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7378 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7379 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7380 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7381 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7382 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7411 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7412 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7413 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7414 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7415 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7416 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7417 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7418 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7419 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7420 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7421 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7422 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7423 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7424 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7425 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7426 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7427 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7428 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7429 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7430 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7431 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7432 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7433 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7434 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7435 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7436 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7437 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7441 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7442 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7449 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7450 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7451 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7452 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7453 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7454 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7455 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7456 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7457 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7458 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7459 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7479 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7480
96038623
DE
7481 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7482 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7483 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7484 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7485 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7486 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7487 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7488 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7489 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7490 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7491
a3170dc6
AH
7492 /* Place holder, leave as first spe builtin. */
7493 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7494 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7495 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7496 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7497 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7498 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7499 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7500 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7501 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7502 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7503 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7504 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7505 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7506 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7507 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7508 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7509 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7510 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7511 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7512 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7513 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7514 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7515 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7516 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7517 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7518 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7519 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7520 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7521 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7522 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7523 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7524 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7525 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7526 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7527 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7528 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7529 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7530 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7531 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7532 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7533 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7534 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7535 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7536 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7537 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7538 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7539 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7540 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7541 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7542 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7543 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7544 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7545 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7546 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7547 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7548 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7549 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7550 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7551 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7552 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7553 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7554 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7555 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7556 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7557 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7558 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7559 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7560 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7561 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7562 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7563 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7564 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7565 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7566 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7567 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7568 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7569 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7570 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7571 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7572 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7573 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7574 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7575 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7576 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7577 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7578 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7579 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7580 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7581 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7582 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7583 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7584 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7585 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7586 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7587 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7588 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7589 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7590 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7591 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7592 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7593 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7594 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7595 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7596 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7597 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7598 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7599 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7600 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7601 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7602
7603 /* SPE binary operations expecting a 5-bit unsigned literal. */
7604 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7605
7606 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7607 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7608 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7609 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7610 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7611 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7612 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7613 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7614 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7615 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7616 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7617 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7618 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7619 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7620 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7621 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7622 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7623 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7624 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7625 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7626 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7627 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7628 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7629 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7630 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7631 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7632
7633 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7634 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7635};
7636
7637/* AltiVec predicates. */
7638
7639struct builtin_description_predicates
7640{
7641 const unsigned int mask;
7642 const enum insn_code icode;
7643 const char *opcode;
7644 const char *const name;
7645 const enum rs6000_builtins code;
7646};
7647
7648static const struct builtin_description_predicates bdesc_altivec_preds[] =
7649{
7650 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7651 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7652 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7653 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7654 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7655 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7656 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7657 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7658 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7659 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7660 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7661 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7662 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7663
7664 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7665 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7666 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7667};
24408032 7668
a3170dc6
AH
7669/* SPE predicates. */
7670static struct builtin_description bdesc_spe_predicates[] =
7671{
7672 /* Place-holder. Leave as first. */
7673 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7674 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7675 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7676 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7677 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7678 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7679 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7680 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7681 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7682 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7683 /* Place-holder. Leave as last. */
7684 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7685};
7686
7687/* SPE evsel predicates. */
7688static struct builtin_description bdesc_spe_evsel[] =
7689{
7690 /* Place-holder. Leave as first. */
7691 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7692 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7693 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7694 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7695 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7696 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7697 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7698 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7699 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7700 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7701 /* Place-holder. Leave as last. */
7702 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7703};
7704
96038623
DE
7705/* PAIRED predicates. */
7706static const struct builtin_description bdesc_paired_preds[] =
7707{
7708 /* Place-holder. Leave as first. */
7709 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7710 /* Place-holder. Leave as last. */
7711 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7712};
7713
b6d08ca1 7714/* ABS* operations. */
100c4561
AH
7715
7716static const struct builtin_description bdesc_abs[] =
7717{
7718 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7719 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7720 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7721 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7722 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7723 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7724 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7725};
7726
617e0e1d
DB
7727/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7728 foo (VECa). */
24408032 7729
a3170dc6 7730static struct builtin_description bdesc_1arg[] =
2212663f 7731{
617e0e1d
DB
7732 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7733 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7734 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7735 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7736 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7737 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7738 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7739 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7740 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7741 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7742 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7743 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7744 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7745 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7746 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7747 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7748 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7749
58646b77
PB
7750 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7751 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7752 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7753 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7754 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7755 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7756 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7757 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7758 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7759 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7760 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7761 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7762 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7763 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7764 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7765 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7766 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7767 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7768 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7769
a3170dc6
AH
7770 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7771 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7772 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7773 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7774 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7775 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7776 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7777 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7778 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7779 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7780 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7781 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7782 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7783 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7784 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7785 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7786 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7787 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7788 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7789 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7790 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7791 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7792 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7793 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7794 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7795 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7796 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7797 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7798 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7799 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7800
7801 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7802 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7803
7804 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7805 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7806 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7807 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7808 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7809};
7810
7811static rtx
5039610b 7812rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7813{
7814 rtx pat;
5039610b 7815 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7816 rtx op0 = expand_normal (arg0);
2212663f
DB
7817 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7818 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7819
0559cc77
DE
7820 if (icode == CODE_FOR_nothing)
7821 /* Builtin not supported on this processor. */
7822 return 0;
7823
20e26713
AH
7824 /* If we got invalid arguments bail out before generating bad rtl. */
7825 if (arg0 == error_mark_node)
9a171fcd 7826 return const0_rtx;
20e26713 7827
0559cc77
DE
7828 if (icode == CODE_FOR_altivec_vspltisb
7829 || icode == CODE_FOR_altivec_vspltish
7830 || icode == CODE_FOR_altivec_vspltisw
7831 || icode == CODE_FOR_spe_evsplatfi
7832 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7833 {
7834 /* Only allow 5-bit *signed* literals. */
b44140e7 7835 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7836 || INTVAL (op0) > 15
7837 || INTVAL (op0) < -16)
b44140e7
AH
7838 {
7839 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7840 return const0_rtx;
b44140e7 7841 }
b44140e7
AH
7842 }
7843
c62f2db5 7844 if (target == 0
2212663f
DB
7845 || GET_MODE (target) != tmode
7846 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7847 target = gen_reg_rtx (tmode);
7848
7849 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7850 op0 = copy_to_mode_reg (mode0, op0);
7851
7852 pat = GEN_FCN (icode) (target, op0);
7853 if (! pat)
7854 return 0;
7855 emit_insn (pat);
0ac081f6 7856
2212663f
DB
7857 return target;
7858}
ae4b4a02 7859
100c4561 7860static rtx
5039610b 7861altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7862{
7863 rtx pat, scratch1, scratch2;
5039610b 7864 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7865 rtx op0 = expand_normal (arg0);
100c4561
AH
7866 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7867 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7868
7869 /* If we have invalid arguments, bail out before generating bad rtl. */
7870 if (arg0 == error_mark_node)
9a171fcd 7871 return const0_rtx;
100c4561
AH
7872
7873 if (target == 0
7874 || GET_MODE (target) != tmode
7875 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7876 target = gen_reg_rtx (tmode);
7877
7878 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7879 op0 = copy_to_mode_reg (mode0, op0);
7880
7881 scratch1 = gen_reg_rtx (mode0);
7882 scratch2 = gen_reg_rtx (mode0);
7883
7884 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7885 if (! pat)
7886 return 0;
7887 emit_insn (pat);
7888
7889 return target;
7890}
7891
0ac081f6 7892static rtx
5039610b 7893rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7894{
7895 rtx pat;
5039610b
SL
7896 tree arg0 = CALL_EXPR_ARG (exp, 0);
7897 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7898 rtx op0 = expand_normal (arg0);
7899 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7900 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7901 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7902 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7903
0559cc77
DE
7904 if (icode == CODE_FOR_nothing)
7905 /* Builtin not supported on this processor. */
7906 return 0;
7907
20e26713
AH
7908 /* If we got invalid arguments bail out before generating bad rtl. */
7909 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7910 return const0_rtx;
20e26713 7911
0559cc77
DE
7912 if (icode == CODE_FOR_altivec_vcfux
7913 || icode == CODE_FOR_altivec_vcfsx
7914 || icode == CODE_FOR_altivec_vctsxs
7915 || icode == CODE_FOR_altivec_vctuxs
7916 || icode == CODE_FOR_altivec_vspltb
7917 || icode == CODE_FOR_altivec_vsplth
7918 || icode == CODE_FOR_altivec_vspltw
7919 || icode == CODE_FOR_spe_evaddiw
7920 || icode == CODE_FOR_spe_evldd
7921 || icode == CODE_FOR_spe_evldh
7922 || icode == CODE_FOR_spe_evldw
7923 || icode == CODE_FOR_spe_evlhhesplat
7924 || icode == CODE_FOR_spe_evlhhossplat
7925 || icode == CODE_FOR_spe_evlhhousplat
7926 || icode == CODE_FOR_spe_evlwhe
7927 || icode == CODE_FOR_spe_evlwhos
7928 || icode == CODE_FOR_spe_evlwhou
7929 || icode == CODE_FOR_spe_evlwhsplat
7930 || icode == CODE_FOR_spe_evlwwsplat
7931 || icode == CODE_FOR_spe_evrlwi
7932 || icode == CODE_FOR_spe_evslwi
7933 || icode == CODE_FOR_spe_evsrwis
f5119d10 7934 || icode == CODE_FOR_spe_evsubifw
0559cc77 7935 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7936 {
7937 /* Only allow 5-bit unsigned literals. */
8bb418a3 7938 STRIP_NOPS (arg1);
b44140e7
AH
7939 if (TREE_CODE (arg1) != INTEGER_CST
7940 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7941 {
7942 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7943 return const0_rtx;
b44140e7 7944 }
b44140e7
AH
7945 }
7946
c62f2db5 7947 if (target == 0
0ac081f6
AH
7948 || GET_MODE (target) != tmode
7949 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7950 target = gen_reg_rtx (tmode);
7951
7952 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7953 op0 = copy_to_mode_reg (mode0, op0);
7954 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7955 op1 = copy_to_mode_reg (mode1, op1);
7956
7957 pat = GEN_FCN (icode) (target, op0, op1);
7958 if (! pat)
7959 return 0;
7960 emit_insn (pat);
7961
7962 return target;
7963}
6525c0e7 7964
ae4b4a02 7965static rtx
f676971a 7966altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7967 tree exp, rtx target)
ae4b4a02
AH
7968{
7969 rtx pat, scratch;
5039610b
SL
7970 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7971 tree arg0 = CALL_EXPR_ARG (exp, 1);
7972 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7973 rtx op0 = expand_normal (arg0);
7974 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7975 enum machine_mode tmode = SImode;
7976 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7977 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7978 int cr6_form_int;
7979
7980 if (TREE_CODE (cr6_form) != INTEGER_CST)
7981 {
7982 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7983 return const0_rtx;
ae4b4a02
AH
7984 }
7985 else
7986 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7987
37409796 7988 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7989
7990 /* If we have invalid arguments, bail out before generating bad rtl. */
7991 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7992 return const0_rtx;
ae4b4a02
AH
7993
7994 if (target == 0
7995 || GET_MODE (target) != tmode
7996 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7997 target = gen_reg_rtx (tmode);
7998
7999 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8000 op0 = copy_to_mode_reg (mode0, op0);
8001 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8002 op1 = copy_to_mode_reg (mode1, op1);
8003
8004 scratch = gen_reg_rtx (mode0);
8005
8006 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 8007 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
8008 if (! pat)
8009 return 0;
8010 emit_insn (pat);
8011
8012 /* The vec_any* and vec_all* predicates use the same opcodes for two
8013 different operations, but the bits in CR6 will be different
8014 depending on what information we want. So we have to play tricks
8015 with CR6 to get the right bits out.
8016
8017 If you think this is disgusting, look at the specs for the
8018 AltiVec predicates. */
8019
c4ad648e
AM
8020 switch (cr6_form_int)
8021 {
8022 case 0:
8023 emit_insn (gen_cr6_test_for_zero (target));
8024 break;
8025 case 1:
8026 emit_insn (gen_cr6_test_for_zero_reverse (target));
8027 break;
8028 case 2:
8029 emit_insn (gen_cr6_test_for_lt (target));
8030 break;
8031 case 3:
8032 emit_insn (gen_cr6_test_for_lt_reverse (target));
8033 break;
8034 default:
8035 error ("argument 1 of __builtin_altivec_predicate is out of range");
8036 break;
8037 }
ae4b4a02
AH
8038
8039 return target;
8040}
8041
96038623
DE
8042static rtx
8043paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
8044{
8045 rtx pat, addr;
8046 tree arg0 = CALL_EXPR_ARG (exp, 0);
8047 tree arg1 = CALL_EXPR_ARG (exp, 1);
8048 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8049 enum machine_mode mode0 = Pmode;
8050 enum machine_mode mode1 = Pmode;
8051 rtx op0 = expand_normal (arg0);
8052 rtx op1 = expand_normal (arg1);
8053
8054 if (icode == CODE_FOR_nothing)
8055 /* Builtin not supported on this processor. */
8056 return 0;
8057
8058 /* If we got invalid arguments bail out before generating bad rtl. */
8059 if (arg0 == error_mark_node || arg1 == error_mark_node)
8060 return const0_rtx;
8061
8062 if (target == 0
8063 || GET_MODE (target) != tmode
8064 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8065 target = gen_reg_rtx (tmode);
8066
8067 op1 = copy_to_mode_reg (mode1, op1);
8068
8069 if (op0 == const0_rtx)
8070 {
8071 addr = gen_rtx_MEM (tmode, op1);
8072 }
8073 else
8074 {
8075 op0 = copy_to_mode_reg (mode0, op0);
8076 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8077 }
8078
8079 pat = GEN_FCN (icode) (target, addr);
8080
8081 if (! pat)
8082 return 0;
8083 emit_insn (pat);
8084
8085 return target;
8086}
8087
b4a62fa0 8088static rtx
0b61703c 8089altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
b4a62fa0
SB
8090{
8091 rtx pat, addr;
5039610b
SL
8092 tree arg0 = CALL_EXPR_ARG (exp, 0);
8093 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
8094 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8095 enum machine_mode mode0 = Pmode;
8096 enum machine_mode mode1 = Pmode;
84217346
MD
8097 rtx op0 = expand_normal (arg0);
8098 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
8099
8100 if (icode == CODE_FOR_nothing)
8101 /* Builtin not supported on this processor. */
8102 return 0;
8103
8104 /* If we got invalid arguments bail out before generating bad rtl. */
8105 if (arg0 == error_mark_node || arg1 == error_mark_node)
8106 return const0_rtx;
8107
8108 if (target == 0
8109 || GET_MODE (target) != tmode
8110 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8111 target = gen_reg_rtx (tmode);
8112
f676971a 8113 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
8114
8115 if (op0 == const0_rtx)
8116 {
0b61703c 8117 addr = gen_rtx_MEM (blk ? BLKmode : tmode, op1);
b4a62fa0
SB
8118 }
8119 else
8120 {
8121 op0 = copy_to_mode_reg (mode0, op0);
0b61703c 8122 addr = gen_rtx_MEM (blk ? BLKmode : tmode, gen_rtx_PLUS (Pmode, op0, op1));
b4a62fa0
SB
8123 }
8124
8125 pat = GEN_FCN (icode) (target, addr);
8126
8127 if (! pat)
8128 return 0;
8129 emit_insn (pat);
8130
8131 return target;
8132}
8133
61bea3b0 8134static rtx
5039610b 8135spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 8136{
5039610b
SL
8137 tree arg0 = CALL_EXPR_ARG (exp, 0);
8138 tree arg1 = CALL_EXPR_ARG (exp, 1);
8139 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8140 rtx op0 = expand_normal (arg0);
8141 rtx op1 = expand_normal (arg1);
8142 rtx op2 = expand_normal (arg2);
61bea3b0
AH
8143 rtx pat;
8144 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8145 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8146 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8147
8148 /* Invalid arguments. Bail before doing anything stoopid! */
8149 if (arg0 == error_mark_node
8150 || arg1 == error_mark_node
8151 || arg2 == error_mark_node)
8152 return const0_rtx;
8153
8154 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8155 op0 = copy_to_mode_reg (mode2, op0);
8156 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8157 op1 = copy_to_mode_reg (mode0, op1);
8158 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8159 op2 = copy_to_mode_reg (mode1, op2);
8160
8161 pat = GEN_FCN (icode) (op1, op2, op0);
8162 if (pat)
8163 emit_insn (pat);
8164 return NULL_RTX;
8165}
8166
96038623
DE
8167static rtx
8168paired_expand_stv_builtin (enum insn_code icode, tree exp)
8169{
8170 tree arg0 = CALL_EXPR_ARG (exp, 0);
8171 tree arg1 = CALL_EXPR_ARG (exp, 1);
8172 tree arg2 = CALL_EXPR_ARG (exp, 2);
8173 rtx op0 = expand_normal (arg0);
8174 rtx op1 = expand_normal (arg1);
8175 rtx op2 = expand_normal (arg2);
8176 rtx pat, addr;
8177 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8178 enum machine_mode mode1 = Pmode;
8179 enum machine_mode mode2 = Pmode;
8180
8181 /* Invalid arguments. Bail before doing anything stoopid! */
8182 if (arg0 == error_mark_node
8183 || arg1 == error_mark_node
8184 || arg2 == error_mark_node)
8185 return const0_rtx;
8186
8187 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8188 op0 = copy_to_mode_reg (tmode, op0);
8189
8190 op2 = copy_to_mode_reg (mode2, op2);
8191
8192 if (op1 == const0_rtx)
8193 {
8194 addr = gen_rtx_MEM (tmode, op2);
8195 }
8196 else
8197 {
8198 op1 = copy_to_mode_reg (mode1, op1);
8199 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8200 }
8201
8202 pat = GEN_FCN (icode) (addr, op0);
8203 if (pat)
8204 emit_insn (pat);
8205 return NULL_RTX;
8206}
8207
6525c0e7 8208static rtx
5039610b 8209altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8210{
5039610b
SL
8211 tree arg0 = CALL_EXPR_ARG (exp, 0);
8212 tree arg1 = CALL_EXPR_ARG (exp, 1);
8213 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8214 rtx op0 = expand_normal (arg0);
8215 rtx op1 = expand_normal (arg1);
8216 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8217 rtx pat, addr;
8218 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8219 enum machine_mode mode1 = Pmode;
8220 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8221
8222 /* Invalid arguments. Bail before doing anything stoopid! */
8223 if (arg0 == error_mark_node
8224 || arg1 == error_mark_node
8225 || arg2 == error_mark_node)
9a171fcd 8226 return const0_rtx;
6525c0e7 8227
b4a62fa0
SB
8228 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8229 op0 = copy_to_mode_reg (tmode, op0);
8230
f676971a 8231 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8232
8233 if (op1 == const0_rtx)
8234 {
8235 addr = gen_rtx_MEM (tmode, op2);
8236 }
8237 else
8238 {
8239 op1 = copy_to_mode_reg (mode1, op1);
8240 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8241 }
6525c0e7 8242
b4a62fa0 8243 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8244 if (pat)
8245 emit_insn (pat);
8246 return NULL_RTX;
8247}
8248
2212663f 8249static rtx
5039610b 8250rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8251{
8252 rtx pat;
5039610b
SL
8253 tree arg0 = CALL_EXPR_ARG (exp, 0);
8254 tree arg1 = CALL_EXPR_ARG (exp, 1);
8255 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8256 rtx op0 = expand_normal (arg0);
8257 rtx op1 = expand_normal (arg1);
8258 rtx op2 = expand_normal (arg2);
2212663f
DB
8259 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8260 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8261 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8262 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8263
774b5662
DE
8264 if (icode == CODE_FOR_nothing)
8265 /* Builtin not supported on this processor. */
8266 return 0;
8267
20e26713
AH
8268 /* If we got invalid arguments bail out before generating bad rtl. */
8269 if (arg0 == error_mark_node
8270 || arg1 == error_mark_node
8271 || arg2 == error_mark_node)
9a171fcd 8272 return const0_rtx;
20e26713 8273
aba5fb01
NS
8274 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8275 || icode == CODE_FOR_altivec_vsldoi_v4si
8276 || icode == CODE_FOR_altivec_vsldoi_v8hi
8277 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8278 {
8279 /* Only allow 4-bit unsigned literals. */
8bb418a3 8280 STRIP_NOPS (arg2);
b44140e7
AH
8281 if (TREE_CODE (arg2) != INTEGER_CST
8282 || TREE_INT_CST_LOW (arg2) & ~0xf)
8283 {
8284 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8285 return const0_rtx;
b44140e7 8286 }
b44140e7
AH
8287 }
8288
c62f2db5 8289 if (target == 0
2212663f
DB
8290 || GET_MODE (target) != tmode
8291 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8292 target = gen_reg_rtx (tmode);
8293
8294 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8295 op0 = copy_to_mode_reg (mode0, op0);
8296 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8297 op1 = copy_to_mode_reg (mode1, op1);
8298 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8299 op2 = copy_to_mode_reg (mode2, op2);
8300
49e39588
RE
8301 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8302 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8303 else
8304 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8305 if (! pat)
8306 return 0;
8307 emit_insn (pat);
8308
8309 return target;
8310}
92898235 8311
3a9b8c7e 8312/* Expand the lvx builtins. */
0ac081f6 8313static rtx
a2369ed3 8314altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8315{
5039610b 8316 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8317 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8318 tree arg0;
8319 enum machine_mode tmode, mode0;
7c3abc73 8320 rtx pat, op0;
3a9b8c7e 8321 enum insn_code icode;
92898235 8322
0ac081f6
AH
8323 switch (fcode)
8324 {
f18c054f 8325 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8326 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8327 break;
f18c054f 8328 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8329 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8330 break;
8331 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8332 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8333 break;
8334 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8335 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8336 break;
8337 default:
8338 *expandedp = false;
8339 return NULL_RTX;
8340 }
0ac081f6 8341
3a9b8c7e 8342 *expandedp = true;
f18c054f 8343
5039610b 8344 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8345 op0 = expand_normal (arg0);
3a9b8c7e
AH
8346 tmode = insn_data[icode].operand[0].mode;
8347 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8348
3a9b8c7e
AH
8349 if (target == 0
8350 || GET_MODE (target) != tmode
8351 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8352 target = gen_reg_rtx (tmode);
24408032 8353
3a9b8c7e
AH
8354 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8355 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8356
3a9b8c7e
AH
8357 pat = GEN_FCN (icode) (target, op0);
8358 if (! pat)
8359 return 0;
8360 emit_insn (pat);
8361 return target;
8362}
f18c054f 8363
3a9b8c7e
AH
8364/* Expand the stvx builtins. */
8365static rtx
f676971a 8366altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8367 bool *expandedp)
3a9b8c7e 8368{
5039610b 8369 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8370 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8371 tree arg0, arg1;
8372 enum machine_mode mode0, mode1;
7c3abc73 8373 rtx pat, op0, op1;
3a9b8c7e 8374 enum insn_code icode;
f18c054f 8375
3a9b8c7e
AH
8376 switch (fcode)
8377 {
8378 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8379 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8380 break;
8381 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8382 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8383 break;
8384 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8385 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8386 break;
8387 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8388 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8389 break;
8390 default:
8391 *expandedp = false;
8392 return NULL_RTX;
8393 }
24408032 8394
5039610b
SL
8395 arg0 = CALL_EXPR_ARG (exp, 0);
8396 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8397 op0 = expand_normal (arg0);
8398 op1 = expand_normal (arg1);
3a9b8c7e
AH
8399 mode0 = insn_data[icode].operand[0].mode;
8400 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8401
3a9b8c7e
AH
8402 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8403 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8404 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8405 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8406
3a9b8c7e
AH
8407 pat = GEN_FCN (icode) (op0, op1);
8408 if (pat)
8409 emit_insn (pat);
f18c054f 8410
3a9b8c7e
AH
8411 *expandedp = true;
8412 return NULL_RTX;
8413}
f18c054f 8414
3a9b8c7e
AH
8415/* Expand the dst builtins. */
8416static rtx
f676971a 8417altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8418 bool *expandedp)
3a9b8c7e 8419{
5039610b 8420 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8421 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8422 tree arg0, arg1, arg2;
8423 enum machine_mode mode0, mode1, mode2;
7c3abc73 8424 rtx pat, op0, op1, op2;
586de218 8425 const struct builtin_description *d;
a3170dc6 8426 size_t i;
f18c054f 8427
3a9b8c7e 8428 *expandedp = false;
f18c054f 8429
3a9b8c7e 8430 /* Handle DST variants. */
586de218 8431 d = bdesc_dst;
3a9b8c7e
AH
8432 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8433 if (d->code == fcode)
8434 {
5039610b
SL
8435 arg0 = CALL_EXPR_ARG (exp, 0);
8436 arg1 = CALL_EXPR_ARG (exp, 1);
8437 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8438 op0 = expand_normal (arg0);
8439 op1 = expand_normal (arg1);
8440 op2 = expand_normal (arg2);
3a9b8c7e
AH
8441 mode0 = insn_data[d->icode].operand[0].mode;
8442 mode1 = insn_data[d->icode].operand[1].mode;
8443 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8444
3a9b8c7e
AH
8445 /* Invalid arguments, bail out before generating bad rtl. */
8446 if (arg0 == error_mark_node
8447 || arg1 == error_mark_node
8448 || arg2 == error_mark_node)
8449 return const0_rtx;
f18c054f 8450
86e7df90 8451 *expandedp = true;
8bb418a3 8452 STRIP_NOPS (arg2);
3a9b8c7e
AH
8453 if (TREE_CODE (arg2) != INTEGER_CST
8454 || TREE_INT_CST_LOW (arg2) & ~0x3)
8455 {
9e637a26 8456 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8457 return const0_rtx;
8458 }
f18c054f 8459
3a9b8c7e 8460 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8461 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8462 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8463 op1 = copy_to_mode_reg (mode1, op1);
24408032 8464
3a9b8c7e
AH
8465 pat = GEN_FCN (d->icode) (op0, op1, op2);
8466 if (pat != 0)
8467 emit_insn (pat);
f18c054f 8468
3a9b8c7e
AH
8469 return NULL_RTX;
8470 }
f18c054f 8471
3a9b8c7e
AH
8472 return NULL_RTX;
8473}
24408032 8474
7a4eca66
DE
8475/* Expand vec_init builtin. */
8476static rtx
5039610b 8477altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8478{
8479 enum machine_mode tmode = TYPE_MODE (type);
8480 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8481 int i, n_elt = GET_MODE_NUNITS (tmode);
8482 rtvec v = rtvec_alloc (n_elt);
8483
8484 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8485 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8486
5039610b 8487 for (i = 0; i < n_elt; ++i)
7a4eca66 8488 {
5039610b 8489 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8490 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8491 }
8492
7a4eca66
DE
8493 if (!target || !register_operand (target, tmode))
8494 target = gen_reg_rtx (tmode);
8495
8496 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8497 return target;
8498}
8499
8500/* Return the integer constant in ARG. Constrain it to be in the range
8501 of the subparts of VEC_TYPE; issue an error if not. */
8502
8503static int
8504get_element_number (tree vec_type, tree arg)
8505{
8506 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8507
8508 if (!host_integerp (arg, 1)
8509 || (elt = tree_low_cst (arg, 1), elt > max))
8510 {
8511 error ("selector must be an integer constant in the range 0..%wi", max);
8512 return 0;
8513 }
8514
8515 return elt;
8516}
8517
8518/* Expand vec_set builtin. */
8519static rtx
5039610b 8520altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8521{
8522 enum machine_mode tmode, mode1;
8523 tree arg0, arg1, arg2;
8524 int elt;
8525 rtx op0, op1;
8526
5039610b
SL
8527 arg0 = CALL_EXPR_ARG (exp, 0);
8528 arg1 = CALL_EXPR_ARG (exp, 1);
8529 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8530
8531 tmode = TYPE_MODE (TREE_TYPE (arg0));
8532 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8533 gcc_assert (VECTOR_MODE_P (tmode));
8534
8535 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8536 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8537 elt = get_element_number (TREE_TYPE (arg0), arg2);
8538
8539 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8540 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8541
8542 op0 = force_reg (tmode, op0);
8543 op1 = force_reg (mode1, op1);
8544
8545 rs6000_expand_vector_set (op0, op1, elt);
8546
8547 return op0;
8548}
8549
8550/* Expand vec_ext builtin. */
8551static rtx
5039610b 8552altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8553{
8554 enum machine_mode tmode, mode0;
8555 tree arg0, arg1;
8556 int elt;
8557 rtx op0;
8558
5039610b
SL
8559 arg0 = CALL_EXPR_ARG (exp, 0);
8560 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8561
84217346 8562 op0 = expand_normal (arg0);
7a4eca66
DE
8563 elt = get_element_number (TREE_TYPE (arg0), arg1);
8564
8565 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8566 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8567 gcc_assert (VECTOR_MODE_P (mode0));
8568
8569 op0 = force_reg (mode0, op0);
8570
8571 if (optimize || !target || !register_operand (target, tmode))
8572 target = gen_reg_rtx (tmode);
8573
8574 rs6000_expand_vector_extract (target, op0, elt);
8575
8576 return target;
8577}
8578
3a9b8c7e
AH
8579/* Expand the builtin in EXP and store the result in TARGET. Store
8580 true in *EXPANDEDP if we found a builtin to expand. */
8581static rtx
a2369ed3 8582altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8583{
586de218
KG
8584 const struct builtin_description *d;
8585 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8586 size_t i;
8587 enum insn_code icode;
5039610b 8588 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8589 tree arg0;
8590 rtx op0, pat;
8591 enum machine_mode tmode, mode0;
3a9b8c7e 8592 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8593
58646b77
PB
8594 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8595 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8596 {
8597 *expandedp = true;
ea40ba9c 8598 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8599 return const0_rtx;
8600 }
8601
3a9b8c7e
AH
8602 target = altivec_expand_ld_builtin (exp, target, expandedp);
8603 if (*expandedp)
8604 return target;
0ac081f6 8605
3a9b8c7e
AH
8606 target = altivec_expand_st_builtin (exp, target, expandedp);
8607 if (*expandedp)
8608 return target;
8609
8610 target = altivec_expand_dst_builtin (exp, target, expandedp);
8611 if (*expandedp)
8612 return target;
8613
8614 *expandedp = true;
95385cbb 8615
3a9b8c7e
AH
8616 switch (fcode)
8617 {
6525c0e7 8618 case ALTIVEC_BUILTIN_STVX:
5039610b 8619 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8620 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8621 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8622 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8623 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8624 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8625 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8626 case ALTIVEC_BUILTIN_STVXL:
5039610b 8627 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8628
0b61703c
AP
8629 case ALTIVEC_BUILTIN_STVLX:
8630 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlx, exp);
8631 case ALTIVEC_BUILTIN_STVLXL:
8632 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
8633 case ALTIVEC_BUILTIN_STVRX:
8634 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
8635 case ALTIVEC_BUILTIN_STVRXL:
8636 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
8637
95385cbb
AH
8638 case ALTIVEC_BUILTIN_MFVSCR:
8639 icode = CODE_FOR_altivec_mfvscr;
8640 tmode = insn_data[icode].operand[0].mode;
8641
8642 if (target == 0
8643 || GET_MODE (target) != tmode
8644 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8645 target = gen_reg_rtx (tmode);
f676971a 8646
95385cbb 8647 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8648 if (! pat)
8649 return 0;
8650 emit_insn (pat);
95385cbb
AH
8651 return target;
8652
8653 case ALTIVEC_BUILTIN_MTVSCR:
8654 icode = CODE_FOR_altivec_mtvscr;
5039610b 8655 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8656 op0 = expand_normal (arg0);
95385cbb
AH
8657 mode0 = insn_data[icode].operand[0].mode;
8658
8659 /* If we got invalid arguments bail out before generating bad rtl. */
8660 if (arg0 == error_mark_node)
9a171fcd 8661 return const0_rtx;
95385cbb
AH
8662
8663 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8664 op0 = copy_to_mode_reg (mode0, op0);
8665
8666 pat = GEN_FCN (icode) (op0);
8667 if (pat)
8668 emit_insn (pat);
8669 return NULL_RTX;
3a9b8c7e 8670
95385cbb
AH
8671 case ALTIVEC_BUILTIN_DSSALL:
8672 emit_insn (gen_altivec_dssall ());
8673 return NULL_RTX;
8674
8675 case ALTIVEC_BUILTIN_DSS:
8676 icode = CODE_FOR_altivec_dss;
5039610b 8677 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8678 STRIP_NOPS (arg0);
84217346 8679 op0 = expand_normal (arg0);
95385cbb
AH
8680 mode0 = insn_data[icode].operand[0].mode;
8681
8682 /* If we got invalid arguments bail out before generating bad rtl. */
8683 if (arg0 == error_mark_node)
9a171fcd 8684 return const0_rtx;
95385cbb 8685
b44140e7
AH
8686 if (TREE_CODE (arg0) != INTEGER_CST
8687 || TREE_INT_CST_LOW (arg0) & ~0x3)
8688 {
8689 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8690 return const0_rtx;
b44140e7
AH
8691 }
8692
95385cbb
AH
8693 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8694 op0 = copy_to_mode_reg (mode0, op0);
8695
8696 emit_insn (gen_altivec_dss (op0));
0ac081f6 8697 return NULL_RTX;
7a4eca66
DE
8698
8699 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8700 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8701 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8702 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8703 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8704
8705 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8706 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8707 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8708 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8709 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8710
8711 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8712 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8713 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8714 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8715 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8716
8717 default:
8718 break;
8719 /* Fall through. */
0ac081f6 8720 }
24408032 8721
100c4561 8722 /* Expand abs* operations. */
586de218 8723 d = bdesc_abs;
ca7558fc 8724 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8725 if (d->code == fcode)
5039610b 8726 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8727
ae4b4a02 8728 /* Expand the AltiVec predicates. */
586de218 8729 dp = bdesc_altivec_preds;
ca7558fc 8730 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8731 if (dp->code == fcode)
c4ad648e 8732 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8733 exp, target);
ae4b4a02 8734
6525c0e7
AH
8735 /* LV* are funky. We initialized them differently. */
8736 switch (fcode)
8737 {
8738 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8739 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
0b61703c 8740 exp, target, false);
6525c0e7 8741 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8742 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
0b61703c 8743 exp, target, false);
6525c0e7 8744 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8745 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
0b61703c 8746 exp, target, false);
6525c0e7 8747 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8748 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
0b61703c 8749 exp, target, false);
6525c0e7 8750 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8751 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
0b61703c 8752 exp, target, false);
6525c0e7 8753 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8754 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
0b61703c 8755 exp, target, false);
6525c0e7 8756 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8757 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
0b61703c
AP
8758 exp, target, false);
8759 case ALTIVEC_BUILTIN_LVLX:
8760 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
8761 exp, target, true);
8762 case ALTIVEC_BUILTIN_LVLXL:
8763 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlxl,
8764 exp, target, true);
8765 case ALTIVEC_BUILTIN_LVRX:
8766 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
8767 exp, target, true);
8768 case ALTIVEC_BUILTIN_LVRXL:
8769 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
8770 exp, target, true);
6525c0e7
AH
8771 default:
8772 break;
8773 /* Fall through. */
8774 }
95385cbb 8775
92898235 8776 *expandedp = false;
0ac081f6
AH
8777 return NULL_RTX;
8778}
8779
96038623
DE
8780/* Expand the builtin in EXP and store the result in TARGET. Store
8781 true in *EXPANDEDP if we found a builtin to expand. */
8782static rtx
8783paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8784{
8785 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8786 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8787 const struct builtin_description *d;
96038623
DE
8788 size_t i;
8789
8790 *expandedp = true;
8791
8792 switch (fcode)
8793 {
8794 case PAIRED_BUILTIN_STX:
8795 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8796 case PAIRED_BUILTIN_LX:
8797 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8798 default:
8799 break;
8800 /* Fall through. */
8801 }
8802
8803 /* Expand the paired predicates. */
23a651fc 8804 d = bdesc_paired_preds;
96038623
DE
8805 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8806 if (d->code == fcode)
8807 return paired_expand_predicate_builtin (d->icode, exp, target);
8808
8809 *expandedp = false;
8810 return NULL_RTX;
8811}
8812
a3170dc6
AH
8813/* Binops that need to be initialized manually, but can be expanded
8814 automagically by rs6000_expand_binop_builtin. */
8815static struct builtin_description bdesc_2arg_spe[] =
8816{
8817 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8818 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8819 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8820 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8821 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8822 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8823 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8824 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8825 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8826 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8827 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8828 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8829 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8830 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8831 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8832 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8833 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8834 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8835 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8836 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8837 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8838 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8839};
8840
8841/* Expand the builtin in EXP and store the result in TARGET. Store
8842 true in *EXPANDEDP if we found a builtin to expand.
8843
8844 This expands the SPE builtins that are not simple unary and binary
8845 operations. */
8846static rtx
a2369ed3 8847spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8848{
5039610b 8849 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8850 tree arg1, arg0;
8851 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8852 enum insn_code icode;
8853 enum machine_mode tmode, mode0;
8854 rtx pat, op0;
8855 struct builtin_description *d;
8856 size_t i;
8857
8858 *expandedp = true;
8859
8860 /* Syntax check for a 5-bit unsigned immediate. */
8861 switch (fcode)
8862 {
8863 case SPE_BUILTIN_EVSTDD:
8864 case SPE_BUILTIN_EVSTDH:
8865 case SPE_BUILTIN_EVSTDW:
8866 case SPE_BUILTIN_EVSTWHE:
8867 case SPE_BUILTIN_EVSTWHO:
8868 case SPE_BUILTIN_EVSTWWE:
8869 case SPE_BUILTIN_EVSTWWO:
5039610b 8870 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8871 if (TREE_CODE (arg1) != INTEGER_CST
8872 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8873 {
8874 error ("argument 2 must be a 5-bit unsigned literal");
8875 return const0_rtx;
8876 }
8877 break;
8878 default:
8879 break;
8880 }
8881
00332c9f
AH
8882 /* The evsplat*i instructions are not quite generic. */
8883 switch (fcode)
8884 {
8885 case SPE_BUILTIN_EVSPLATFI:
8886 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8887 exp, target);
00332c9f
AH
8888 case SPE_BUILTIN_EVSPLATI:
8889 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8890 exp, target);
00332c9f
AH
8891 default:
8892 break;
8893 }
8894
a3170dc6
AH
8895 d = (struct builtin_description *) bdesc_2arg_spe;
8896 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8897 if (d->code == fcode)
5039610b 8898 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8899
8900 d = (struct builtin_description *) bdesc_spe_predicates;
8901 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8902 if (d->code == fcode)
5039610b 8903 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8904
8905 d = (struct builtin_description *) bdesc_spe_evsel;
8906 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8907 if (d->code == fcode)
5039610b 8908 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8909
8910 switch (fcode)
8911 {
8912 case SPE_BUILTIN_EVSTDDX:
5039610b 8913 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8914 case SPE_BUILTIN_EVSTDHX:
5039610b 8915 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8916 case SPE_BUILTIN_EVSTDWX:
5039610b 8917 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8918 case SPE_BUILTIN_EVSTWHEX:
5039610b 8919 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8920 case SPE_BUILTIN_EVSTWHOX:
5039610b 8921 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8922 case SPE_BUILTIN_EVSTWWEX:
5039610b 8923 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8924 case SPE_BUILTIN_EVSTWWOX:
5039610b 8925 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8926 case SPE_BUILTIN_EVSTDD:
5039610b 8927 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8928 case SPE_BUILTIN_EVSTDH:
5039610b 8929 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8930 case SPE_BUILTIN_EVSTDW:
5039610b 8931 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8932 case SPE_BUILTIN_EVSTWHE:
5039610b 8933 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8934 case SPE_BUILTIN_EVSTWHO:
5039610b 8935 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8936 case SPE_BUILTIN_EVSTWWE:
5039610b 8937 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8938 case SPE_BUILTIN_EVSTWWO:
5039610b 8939 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8940 case SPE_BUILTIN_MFSPEFSCR:
8941 icode = CODE_FOR_spe_mfspefscr;
8942 tmode = insn_data[icode].operand[0].mode;
8943
8944 if (target == 0
8945 || GET_MODE (target) != tmode
8946 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8947 target = gen_reg_rtx (tmode);
f676971a 8948
a3170dc6
AH
8949 pat = GEN_FCN (icode) (target);
8950 if (! pat)
8951 return 0;
8952 emit_insn (pat);
8953 return target;
8954 case SPE_BUILTIN_MTSPEFSCR:
8955 icode = CODE_FOR_spe_mtspefscr;
5039610b 8956 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8957 op0 = expand_normal (arg0);
a3170dc6
AH
8958 mode0 = insn_data[icode].operand[0].mode;
8959
8960 if (arg0 == error_mark_node)
8961 return const0_rtx;
8962
8963 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8964 op0 = copy_to_mode_reg (mode0, op0);
8965
8966 pat = GEN_FCN (icode) (op0);
8967 if (pat)
8968 emit_insn (pat);
8969 return NULL_RTX;
8970 default:
8971 break;
8972 }
8973
8974 *expandedp = false;
8975 return NULL_RTX;
8976}
8977
96038623
DE
8978static rtx
8979paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8980{
8981 rtx pat, scratch, tmp;
8982 tree form = CALL_EXPR_ARG (exp, 0);
8983 tree arg0 = CALL_EXPR_ARG (exp, 1);
8984 tree arg1 = CALL_EXPR_ARG (exp, 2);
8985 rtx op0 = expand_normal (arg0);
8986 rtx op1 = expand_normal (arg1);
8987 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8988 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8989 int form_int;
8990 enum rtx_code code;
8991
8992 if (TREE_CODE (form) != INTEGER_CST)
8993 {
8994 error ("argument 1 of __builtin_paired_predicate must be a constant");
8995 return const0_rtx;
8996 }
8997 else
8998 form_int = TREE_INT_CST_LOW (form);
8999
9000 gcc_assert (mode0 == mode1);
9001
9002 if (arg0 == error_mark_node || arg1 == error_mark_node)
9003 return const0_rtx;
9004
9005 if (target == 0
9006 || GET_MODE (target) != SImode
9007 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
9008 target = gen_reg_rtx (SImode);
9009 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
9010 op0 = copy_to_mode_reg (mode0, op0);
9011 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
9012 op1 = copy_to_mode_reg (mode1, op1);
9013
9014 scratch = gen_reg_rtx (CCFPmode);
9015
9016 pat = GEN_FCN (icode) (scratch, op0, op1);
9017 if (!pat)
9018 return const0_rtx;
9019
9020 emit_insn (pat);
9021
9022 switch (form_int)
9023 {
9024 /* LT bit. */
9025 case 0:
9026 code = LT;
9027 break;
9028 /* GT bit. */
9029 case 1:
9030 code = GT;
9031 break;
9032 /* EQ bit. */
9033 case 2:
9034 code = EQ;
9035 break;
9036 /* UN bit. */
9037 case 3:
9038 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9039 return target;
9040 default:
9041 error ("argument 1 of __builtin_paired_predicate is out of range");
9042 return const0_rtx;
9043 }
9044
9045 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9046 emit_move_insn (target, tmp);
9047 return target;
9048}
9049
a3170dc6 9050static rtx
5039610b 9051spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9052{
9053 rtx pat, scratch, tmp;
5039610b
SL
9054 tree form = CALL_EXPR_ARG (exp, 0);
9055 tree arg0 = CALL_EXPR_ARG (exp, 1);
9056 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
9057 rtx op0 = expand_normal (arg0);
9058 rtx op1 = expand_normal (arg1);
a3170dc6
AH
9059 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9060 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9061 int form_int;
9062 enum rtx_code code;
9063
9064 if (TREE_CODE (form) != INTEGER_CST)
9065 {
9066 error ("argument 1 of __builtin_spe_predicate must be a constant");
9067 return const0_rtx;
9068 }
9069 else
9070 form_int = TREE_INT_CST_LOW (form);
9071
37409796 9072 gcc_assert (mode0 == mode1);
a3170dc6
AH
9073
9074 if (arg0 == error_mark_node || arg1 == error_mark_node)
9075 return const0_rtx;
9076
9077 if (target == 0
9078 || GET_MODE (target) != SImode
9079 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
9080 target = gen_reg_rtx (SImode);
9081
9082 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9083 op0 = copy_to_mode_reg (mode0, op0);
9084 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9085 op1 = copy_to_mode_reg (mode1, op1);
9086
9087 scratch = gen_reg_rtx (CCmode);
9088
9089 pat = GEN_FCN (icode) (scratch, op0, op1);
9090 if (! pat)
9091 return const0_rtx;
9092 emit_insn (pat);
9093
9094 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
9095 _lower_. We use one compare, but look in different bits of the
9096 CR for each variant.
9097
9098 There are 2 elements in each SPE simd type (upper/lower). The CR
9099 bits are set as follows:
9100
9101 BIT0 | BIT 1 | BIT 2 | BIT 3
9102 U | L | (U | L) | (U & L)
9103
9104 So, for an "all" relationship, BIT 3 would be set.
9105 For an "any" relationship, BIT 2 would be set. Etc.
9106
9107 Following traditional nomenclature, these bits map to:
9108
9109 BIT0 | BIT 1 | BIT 2 | BIT 3
9110 LT | GT | EQ | OV
9111
9112 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9113 */
9114
9115 switch (form_int)
9116 {
9117 /* All variant. OV bit. */
9118 case 0:
9119 /* We need to get to the OV bit, which is the ORDERED bit. We
9120 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 9121 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
9122 So let's just use another pattern. */
9123 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9124 return target;
9125 /* Any variant. EQ bit. */
9126 case 1:
9127 code = EQ;
9128 break;
9129 /* Upper variant. LT bit. */
9130 case 2:
9131 code = LT;
9132 break;
9133 /* Lower variant. GT bit. */
9134 case 3:
9135 code = GT;
9136 break;
9137 default:
9138 error ("argument 1 of __builtin_spe_predicate is out of range");
9139 return const0_rtx;
9140 }
9141
9142 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9143 emit_move_insn (target, tmp);
9144
9145 return target;
9146}
9147
9148/* The evsel builtins look like this:
9149
9150 e = __builtin_spe_evsel_OP (a, b, c, d);
9151
9152 and work like this:
9153
9154 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9155 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9156*/
9157
9158static rtx
5039610b 9159spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9160{
9161 rtx pat, scratch;
5039610b
SL
9162 tree arg0 = CALL_EXPR_ARG (exp, 0);
9163 tree arg1 = CALL_EXPR_ARG (exp, 1);
9164 tree arg2 = CALL_EXPR_ARG (exp, 2);
9165 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
9166 rtx op0 = expand_normal (arg0);
9167 rtx op1 = expand_normal (arg1);
9168 rtx op2 = expand_normal (arg2);
9169 rtx op3 = expand_normal (arg3);
a3170dc6
AH
9170 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9171 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9172
37409796 9173 gcc_assert (mode0 == mode1);
a3170dc6
AH
9174
9175 if (arg0 == error_mark_node || arg1 == error_mark_node
9176 || arg2 == error_mark_node || arg3 == error_mark_node)
9177 return const0_rtx;
9178
9179 if (target == 0
9180 || GET_MODE (target) != mode0
9181 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9182 target = gen_reg_rtx (mode0);
9183
9184 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9185 op0 = copy_to_mode_reg (mode0, op0);
9186 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9187 op1 = copy_to_mode_reg (mode0, op1);
9188 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9189 op2 = copy_to_mode_reg (mode0, op2);
9190 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9191 op3 = copy_to_mode_reg (mode0, op3);
9192
9193 /* Generate the compare. */
9194 scratch = gen_reg_rtx (CCmode);
9195 pat = GEN_FCN (icode) (scratch, op0, op1);
9196 if (! pat)
9197 return const0_rtx;
9198 emit_insn (pat);
9199
9200 if (mode0 == V2SImode)
9201 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9202 else
9203 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9204
9205 return target;
9206}
9207
0ac081f6
AH
9208/* Expand an expression EXP that calls a built-in function,
9209 with result going to TARGET if that's convenient
9210 (and in mode MODE if that's convenient).
9211 SUBTARGET may be used as the target for computing one of EXP's operands.
9212 IGNORE is nonzero if the value is to be ignored. */
9213
9214static rtx
a2369ed3 9215rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9216 enum machine_mode mode ATTRIBUTE_UNUSED,
9217 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9218{
5039610b 9219 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9220 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9221 const struct builtin_description *d;
92898235
AH
9222 size_t i;
9223 rtx ret;
9224 bool success;
f676971a 9225
9c78b944
DE
9226 if (fcode == RS6000_BUILTIN_RECIP)
9227 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9228
9229 if (fcode == RS6000_BUILTIN_RECIPF)
9230 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9231
9232 if (fcode == RS6000_BUILTIN_RSQRTF)
9233 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9234
7ccf35ed
DN
9235 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9236 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9237 {
9238 int icode = (int) CODE_FOR_altivec_lvsr;
9239 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9240 enum machine_mode mode = insn_data[icode].operand[1].mode;
9241 tree arg;
9242 rtx op, addr, pat;
9243
37409796 9244 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9245
5039610b 9246 arg = CALL_EXPR_ARG (exp, 0);
37409796 9247 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9248 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9249 addr = memory_address (mode, op);
9250 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9251 op = addr;
9252 else
9253 {
9254 /* For the load case need to negate the address. */
9255 op = gen_reg_rtx (GET_MODE (addr));
9256 emit_insn (gen_rtx_SET (VOIDmode, op,
9257 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9258 }
7ccf35ed
DN
9259 op = gen_rtx_MEM (mode, op);
9260
9261 if (target == 0
9262 || GET_MODE (target) != tmode
9263 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9264 target = gen_reg_rtx (tmode);
9265
9266 /*pat = gen_altivec_lvsr (target, op);*/
9267 pat = GEN_FCN (icode) (target, op);
9268 if (!pat)
9269 return 0;
9270 emit_insn (pat);
9271
9272 return target;
9273 }
5039610b
SL
9274
9275 /* FIXME: There's got to be a nicer way to handle this case than
9276 constructing a new CALL_EXPR. */
f57d17f1 9277 if (fcode == ALTIVEC_BUILTIN_VCFUX
7910ae0c
DN
9278 || fcode == ALTIVEC_BUILTIN_VCFSX
9279 || fcode == ALTIVEC_BUILTIN_VCTUXS
9280 || fcode == ALTIVEC_BUILTIN_VCTSXS)
f57d17f1 9281 {
5039610b
SL
9282 if (call_expr_nargs (exp) == 1)
9283 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9284 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9285 }
7ccf35ed 9286
0ac081f6 9287 if (TARGET_ALTIVEC)
92898235
AH
9288 {
9289 ret = altivec_expand_builtin (exp, target, &success);
9290
a3170dc6
AH
9291 if (success)
9292 return ret;
9293 }
9294 if (TARGET_SPE)
9295 {
9296 ret = spe_expand_builtin (exp, target, &success);
9297
92898235
AH
9298 if (success)
9299 return ret;
9300 }
96038623
DE
9301 if (TARGET_PAIRED_FLOAT)
9302 {
9303 ret = paired_expand_builtin (exp, target, &success);
9304
9305 if (success)
9306 return ret;
9307 }
92898235 9308
96038623 9309 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9310
37409796
NS
9311 /* Handle simple unary operations. */
9312 d = (struct builtin_description *) bdesc_1arg;
9313 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9314 if (d->code == fcode)
5039610b 9315 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9316
37409796
NS
9317 /* Handle simple binary operations. */
9318 d = (struct builtin_description *) bdesc_2arg;
9319 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9320 if (d->code == fcode)
5039610b 9321 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9322
37409796 9323 /* Handle simple ternary operations. */
586de218 9324 d = bdesc_3arg;
37409796
NS
9325 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9326 if (d->code == fcode)
5039610b 9327 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9328
37409796 9329 gcc_unreachable ();
0ac081f6
AH
9330}
9331
7c62e993
PB
9332static tree
9333build_opaque_vector_type (tree node, int nunits)
9334{
9335 node = copy_node (node);
9336 TYPE_MAIN_VARIANT (node) = node;
0627bfb4 9337 TYPE_CANONICAL (node) = node;
7c62e993
PB
9338 return build_vector_type (node, nunits);
9339}
9340
0ac081f6 9341static void
863d938c 9342rs6000_init_builtins (void)
0ac081f6 9343{
4a5eab38
PB
9344 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9345 V2SF_type_node = build_vector_type (float_type_node, 2);
9346 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9347 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9348 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9349 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9350 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9351
9352 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9353 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9354 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9355
7c62e993
PB
9356 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9357 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9358 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9359 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9360
8bb418a3
ZL
9361 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9362 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9363 'vector unsigned short'. */
9364
8dd16ecc
NS
9365 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9366 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9367 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9368 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9369
58646b77
PB
9370 long_integer_type_internal_node = long_integer_type_node;
9371 long_unsigned_type_internal_node = long_unsigned_type_node;
9372 intQI_type_internal_node = intQI_type_node;
9373 uintQI_type_internal_node = unsigned_intQI_type_node;
9374 intHI_type_internal_node = intHI_type_node;
9375 uintHI_type_internal_node = unsigned_intHI_type_node;
9376 intSI_type_internal_node = intSI_type_node;
9377 uintSI_type_internal_node = unsigned_intSI_type_node;
9378 float_type_internal_node = float_type_node;
9379 void_type_internal_node = void_type_node;
9380
8bb418a3
ZL
9381 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9382 get_identifier ("__bool char"),
9383 bool_char_type_node));
9384 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9385 get_identifier ("__bool short"),
9386 bool_short_type_node));
9387 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9388 get_identifier ("__bool int"),
9389 bool_int_type_node));
9390 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9391 get_identifier ("__pixel"),
9392 pixel_type_node));
9393
4a5eab38
PB
9394 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9395 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9396 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9397 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9398
9399 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9400 get_identifier ("__vector unsigned char"),
9401 unsigned_V16QI_type_node));
9402 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9403 get_identifier ("__vector signed char"),
9404 V16QI_type_node));
9405 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9406 get_identifier ("__vector __bool char"),
9407 bool_V16QI_type_node));
9408
9409 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9410 get_identifier ("__vector unsigned short"),
9411 unsigned_V8HI_type_node));
9412 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9413 get_identifier ("__vector signed short"),
9414 V8HI_type_node));
9415 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9416 get_identifier ("__vector __bool short"),
9417 bool_V8HI_type_node));
9418
9419 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9420 get_identifier ("__vector unsigned int"),
9421 unsigned_V4SI_type_node));
9422 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9423 get_identifier ("__vector signed int"),
9424 V4SI_type_node));
9425 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9426 get_identifier ("__vector __bool int"),
9427 bool_V4SI_type_node));
9428
9429 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9430 get_identifier ("__vector float"),
9431 V4SF_type_node));
9432 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9433 get_identifier ("__vector __pixel"),
9434 pixel_V8HI_type_node));
9435
96038623
DE
9436 if (TARGET_PAIRED_FLOAT)
9437 paired_init_builtins ();
a3170dc6 9438 if (TARGET_SPE)
3fdaa45a 9439 spe_init_builtins ();
0ac081f6
AH
9440 if (TARGET_ALTIVEC)
9441 altivec_init_builtins ();
96038623 9442 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9443 rs6000_common_init_builtins ();
9c78b944
DE
9444 if (TARGET_PPC_GFXOPT)
9445 {
9446 tree ftype = build_function_type_list (float_type_node,
9447 float_type_node,
9448 float_type_node,
9449 NULL_TREE);
9450 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9451 RS6000_BUILTIN_RECIPF);
9452
9453 ftype = build_function_type_list (float_type_node,
9454 float_type_node,
9455 NULL_TREE);
9456 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9457 RS6000_BUILTIN_RSQRTF);
9458 }
9459 if (TARGET_POPCNTB)
9460 {
9461 tree ftype = build_function_type_list (double_type_node,
9462 double_type_node,
9463 double_type_node,
9464 NULL_TREE);
9465 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9466 RS6000_BUILTIN_RECIP);
9467
9468 }
69ca3549
DE
9469
9470#if TARGET_XCOFF
9471 /* AIX libm provides clog as __clog. */
9472 if (built_in_decls [BUILT_IN_CLOG])
9473 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9474#endif
fb220235
FXC
9475
9476#ifdef SUBTARGET_INIT_BUILTINS
9477 SUBTARGET_INIT_BUILTINS;
9478#endif
0ac081f6
AH
9479}
9480
a3170dc6
AH
9481/* Search through a set of builtins and enable the mask bits.
9482 DESC is an array of builtins.
b6d08ca1 9483 SIZE is the total number of builtins.
a3170dc6
AH
9484 START is the builtin enum at which to start.
9485 END is the builtin enum at which to end. */
0ac081f6 9486static void
a2369ed3 9487enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9488 enum rs6000_builtins start,
a2369ed3 9489 enum rs6000_builtins end)
a3170dc6
AH
9490{
9491 int i;
9492
9493 for (i = 0; i < size; ++i)
9494 if (desc[i].code == start)
9495 break;
9496
9497 if (i == size)
9498 return;
9499
9500 for (; i < size; ++i)
9501 {
9502 /* Flip all the bits on. */
9503 desc[i].mask = target_flags;
9504 if (desc[i].code == end)
9505 break;
9506 }
9507}
9508
9509static void
863d938c 9510spe_init_builtins (void)
0ac081f6 9511{
a3170dc6
AH
9512 tree endlink = void_list_node;
9513 tree puint_type_node = build_pointer_type (unsigned_type_node);
9514 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9515 struct builtin_description *d;
0ac081f6
AH
9516 size_t i;
9517
a3170dc6
AH
9518 tree v2si_ftype_4_v2si
9519 = build_function_type
3fdaa45a
AH
9520 (opaque_V2SI_type_node,
9521 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9522 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9523 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9524 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9525 endlink)))));
9526
9527 tree v2sf_ftype_4_v2sf
9528 = build_function_type
3fdaa45a
AH
9529 (opaque_V2SF_type_node,
9530 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9531 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9532 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9533 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9534 endlink)))));
9535
9536 tree int_ftype_int_v2si_v2si
9537 = build_function_type
9538 (integer_type_node,
9539 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9540 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9541 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9542 endlink))));
9543
9544 tree int_ftype_int_v2sf_v2sf
9545 = build_function_type
9546 (integer_type_node,
9547 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9548 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9549 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9550 endlink))));
9551
9552 tree void_ftype_v2si_puint_int
9553 = build_function_type (void_type_node,
3fdaa45a 9554 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9555 tree_cons (NULL_TREE, puint_type_node,
9556 tree_cons (NULL_TREE,
9557 integer_type_node,
9558 endlink))));
9559
9560 tree void_ftype_v2si_puint_char
9561 = build_function_type (void_type_node,
3fdaa45a 9562 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9563 tree_cons (NULL_TREE, puint_type_node,
9564 tree_cons (NULL_TREE,
9565 char_type_node,
9566 endlink))));
9567
9568 tree void_ftype_v2si_pv2si_int
9569 = build_function_type (void_type_node,
3fdaa45a 9570 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9571 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9572 tree_cons (NULL_TREE,
9573 integer_type_node,
9574 endlink))));
9575
9576 tree void_ftype_v2si_pv2si_char
9577 = build_function_type (void_type_node,
3fdaa45a 9578 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9579 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9580 tree_cons (NULL_TREE,
9581 char_type_node,
9582 endlink))));
9583
9584 tree void_ftype_int
9585 = build_function_type (void_type_node,
9586 tree_cons (NULL_TREE, integer_type_node, endlink));
9587
9588 tree int_ftype_void
36e8d515 9589 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9590
9591 tree v2si_ftype_pv2si_int
3fdaa45a 9592 = build_function_type (opaque_V2SI_type_node,
6035d635 9593 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9594 tree_cons (NULL_TREE, integer_type_node,
9595 endlink)));
9596
9597 tree v2si_ftype_puint_int
3fdaa45a 9598 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9599 tree_cons (NULL_TREE, puint_type_node,
9600 tree_cons (NULL_TREE, integer_type_node,
9601 endlink)));
9602
9603 tree v2si_ftype_pushort_int
3fdaa45a 9604 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9605 tree_cons (NULL_TREE, pushort_type_node,
9606 tree_cons (NULL_TREE, integer_type_node,
9607 endlink)));
9608
00332c9f
AH
9609 tree v2si_ftype_signed_char
9610 = build_function_type (opaque_V2SI_type_node,
9611 tree_cons (NULL_TREE, signed_char_type_node,
9612 endlink));
9613
a3170dc6
AH
9614 /* The initialization of the simple binary and unary builtins is
9615 done in rs6000_common_init_builtins, but we have to enable the
9616 mask bits here manually because we have run out of `target_flags'
9617 bits. We really need to redesign this mask business. */
9618
9619 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9620 ARRAY_SIZE (bdesc_2arg),
9621 SPE_BUILTIN_EVADDW,
9622 SPE_BUILTIN_EVXOR);
9623 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9624 ARRAY_SIZE (bdesc_1arg),
9625 SPE_BUILTIN_EVABS,
9626 SPE_BUILTIN_EVSUBFUSIAAW);
9627 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9628 ARRAY_SIZE (bdesc_spe_predicates),
9629 SPE_BUILTIN_EVCMPEQ,
9630 SPE_BUILTIN_EVFSTSTLT);
9631 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9632 ARRAY_SIZE (bdesc_spe_evsel),
9633 SPE_BUILTIN_EVSEL_CMPGTS,
9634 SPE_BUILTIN_EVSEL_FSTSTEQ);
9635
36252949
AH
9636 (*lang_hooks.decls.pushdecl)
9637 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9638 opaque_V2SI_type_node));
9639
a3170dc6 9640 /* Initialize irregular SPE builtins. */
f676971a 9641
a3170dc6
AH
9642 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9643 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9644 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9645 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9646 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9647 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9648 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9649 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9650 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9651 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9652 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9653 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9654 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9655 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9656 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9657 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9658 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9659 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9660
9661 /* Loads. */
9662 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9663 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9664 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9665 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9666 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9667 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9668 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9669 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9670 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9671 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9672 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9673 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9674 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9675 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9676 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9677 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9678 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9679 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9680 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9681 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9682 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9683 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9684
9685 /* Predicates. */
9686 d = (struct builtin_description *) bdesc_spe_predicates;
9687 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9688 {
9689 tree type;
9690
9691 switch (insn_data[d->icode].operand[1].mode)
9692 {
9693 case V2SImode:
9694 type = int_ftype_int_v2si_v2si;
9695 break;
9696 case V2SFmode:
9697 type = int_ftype_int_v2sf_v2sf;
9698 break;
9699 default:
37409796 9700 gcc_unreachable ();
a3170dc6
AH
9701 }
9702
9703 def_builtin (d->mask, d->name, type, d->code);
9704 }
9705
9706 /* Evsel predicates. */
9707 d = (struct builtin_description *) bdesc_spe_evsel;
9708 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9709 {
9710 tree type;
9711
9712 switch (insn_data[d->icode].operand[1].mode)
9713 {
9714 case V2SImode:
9715 type = v2si_ftype_4_v2si;
9716 break;
9717 case V2SFmode:
9718 type = v2sf_ftype_4_v2sf;
9719 break;
9720 default:
37409796 9721 gcc_unreachable ();
a3170dc6
AH
9722 }
9723
9724 def_builtin (d->mask, d->name, type, d->code);
9725 }
9726}
9727
96038623
DE
9728static void
9729paired_init_builtins (void)
9730{
23a651fc 9731 const struct builtin_description *d;
96038623
DE
9732 size_t i;
9733 tree endlink = void_list_node;
9734
9735 tree int_ftype_int_v2sf_v2sf
9736 = build_function_type
9737 (integer_type_node,
9738 tree_cons (NULL_TREE, integer_type_node,
9739 tree_cons (NULL_TREE, V2SF_type_node,
9740 tree_cons (NULL_TREE, V2SF_type_node,
9741 endlink))));
9742 tree pcfloat_type_node =
9743 build_pointer_type (build_qualified_type
9744 (float_type_node, TYPE_QUAL_CONST));
9745
9746 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9747 long_integer_type_node,
9748 pcfloat_type_node,
9749 NULL_TREE);
9750 tree void_ftype_v2sf_long_pcfloat =
9751 build_function_type_list (void_type_node,
9752 V2SF_type_node,
9753 long_integer_type_node,
9754 pcfloat_type_node,
9755 NULL_TREE);
9756
9757
9758 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9759 PAIRED_BUILTIN_LX);
9760
9761
9762 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9763 PAIRED_BUILTIN_STX);
9764
9765 /* Predicates. */
23a651fc 9766 d = bdesc_paired_preds;
96038623
DE
9767 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9768 {
9769 tree type;
9770
9771 switch (insn_data[d->icode].operand[1].mode)
9772 {
9773 case V2SFmode:
9774 type = int_ftype_int_v2sf_v2sf;
9775 break;
9776 default:
9777 gcc_unreachable ();
9778 }
9779
9780 def_builtin (d->mask, d->name, type, d->code);
9781 }
9782}
9783
a3170dc6 9784static void
863d938c 9785altivec_init_builtins (void)
a3170dc6 9786{
586de218
KG
9787 const struct builtin_description *d;
9788 const struct builtin_description_predicates *dp;
a3170dc6 9789 size_t i;
7a4eca66
DE
9790 tree ftype;
9791
a3170dc6
AH
9792 tree pfloat_type_node = build_pointer_type (float_type_node);
9793 tree pint_type_node = build_pointer_type (integer_type_node);
9794 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9795 tree pchar_type_node = build_pointer_type (char_type_node);
9796
9797 tree pvoid_type_node = build_pointer_type (void_type_node);
9798
0dbc3651
ZW
9799 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9800 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9801 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9802 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9803
9804 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9805
58646b77
PB
9806 tree int_ftype_opaque
9807 = build_function_type_list (integer_type_node,
9808 opaque_V4SI_type_node, NULL_TREE);
266b4890
AP
9809 tree opaque_ftype_opaque
9810 = build_function_type (integer_type_node,
9811 NULL_TREE);
58646b77
PB
9812 tree opaque_ftype_opaque_int
9813 = build_function_type_list (opaque_V4SI_type_node,
9814 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9815 tree opaque_ftype_opaque_opaque_int
9816 = build_function_type_list (opaque_V4SI_type_node,
9817 opaque_V4SI_type_node, opaque_V4SI_type_node,
9818 integer_type_node, NULL_TREE);
9819 tree int_ftype_int_opaque_opaque
9820 = build_function_type_list (integer_type_node,
9821 integer_type_node, opaque_V4SI_type_node,
9822 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9823 tree int_ftype_int_v4si_v4si
9824 = build_function_type_list (integer_type_node,
9825 integer_type_node, V4SI_type_node,
9826 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9827 tree v4sf_ftype_pcfloat
9828 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9829 tree void_ftype_pfloat_v4sf
b4de2f7d 9830 = build_function_type_list (void_type_node,
a3170dc6 9831 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9832 tree v4si_ftype_pcint
9833 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9834 tree void_ftype_pint_v4si
b4de2f7d
AH
9835 = build_function_type_list (void_type_node,
9836 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9837 tree v8hi_ftype_pcshort
9838 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9839 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9840 = build_function_type_list (void_type_node,
9841 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9842 tree v16qi_ftype_pcchar
9843 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9844 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9845 = build_function_type_list (void_type_node,
9846 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9847 tree void_ftype_v4si
b4de2f7d 9848 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9849 tree v8hi_ftype_void
9850 = build_function_type (V8HI_type_node, void_list_node);
9851 tree void_ftype_void
9852 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9853 tree void_ftype_int
9854 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9855
58646b77
PB
9856 tree opaque_ftype_long_pcvoid
9857 = build_function_type_list (opaque_V4SI_type_node,
9858 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9859 tree v16qi_ftype_long_pcvoid
a3170dc6 9860 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9861 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9862 tree v8hi_ftype_long_pcvoid
a3170dc6 9863 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9864 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9865 tree v4si_ftype_long_pcvoid
a3170dc6 9866 = build_function_type_list (V4SI_type_node,
b4a62fa0 9867 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9868
58646b77
PB
9869 tree void_ftype_opaque_long_pvoid
9870 = build_function_type_list (void_type_node,
9871 opaque_V4SI_type_node, long_integer_type_node,
9872 pvoid_type_node, NULL_TREE);
b4a62fa0 9873 tree void_ftype_v4si_long_pvoid
b4de2f7d 9874 = build_function_type_list (void_type_node,
b4a62fa0 9875 V4SI_type_node, long_integer_type_node,
b4de2f7d 9876 pvoid_type_node, NULL_TREE);
b4a62fa0 9877 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9878 = build_function_type_list (void_type_node,
b4a62fa0 9879 V16QI_type_node, long_integer_type_node,
b4de2f7d 9880 pvoid_type_node, NULL_TREE);
b4a62fa0 9881 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9882 = build_function_type_list (void_type_node,
b4a62fa0 9883 V8HI_type_node, long_integer_type_node,
b4de2f7d 9884 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9885 tree int_ftype_int_v8hi_v8hi
9886 = build_function_type_list (integer_type_node,
9887 integer_type_node, V8HI_type_node,
9888 V8HI_type_node, NULL_TREE);
9889 tree int_ftype_int_v16qi_v16qi
9890 = build_function_type_list (integer_type_node,
9891 integer_type_node, V16QI_type_node,
9892 V16QI_type_node, NULL_TREE);
9893 tree int_ftype_int_v4sf_v4sf
9894 = build_function_type_list (integer_type_node,
9895 integer_type_node, V4SF_type_node,
9896 V4SF_type_node, NULL_TREE);
9897 tree v4si_ftype_v4si
9898 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9899 tree v8hi_ftype_v8hi
9900 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9901 tree v16qi_ftype_v16qi
9902 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9903 tree v4sf_ftype_v4sf
9904 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9905 tree void_ftype_pcvoid_int_int
a3170dc6 9906 = build_function_type_list (void_type_node,
0dbc3651 9907 pcvoid_type_node, integer_type_node,
8bb418a3 9908 integer_type_node, NULL_TREE);
8bb418a3 9909
0dbc3651
ZW
9910 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9911 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9912 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9913 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9914 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9915 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9916 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9917 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9918 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9919 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9920 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9921 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9922 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9923 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9924 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9925 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9926 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9927 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9928 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9929 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9930 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9931 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9932 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9933 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9934 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9935 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9936 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9937 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9938 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9939 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9940 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9941 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9942 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9943 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9944 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9945 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9946 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9947 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9948 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9949 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9950 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9951 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9952 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9953 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9954 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9955 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9956
0b61703c
AP
9957 if (rs6000_cpu == PROCESSOR_CELL)
9958 {
9959 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
9960 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
9961 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
9962 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRXL);
9963
9964 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLX);
9965 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLXL);
9966 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRX);
9967 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRXL);
9968
9969 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLX);
9970 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLXL);
9971 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRX);
9972 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRXL);
9973
9974 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLX);
9975 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLXL);
9976 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRX);
9977 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRXL);
9978 }
58646b77 9979 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
266b4890
AP
9980 def_builtin (MASK_ALTIVEC, "__builtin_vec_splats", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_SPLATS);
9981 def_builtin (MASK_ALTIVEC, "__builtin_vec_promote", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_PROMOTE);
58646b77
PB
9982
9983 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9984 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
266b4890
AP
9985 def_builtin (MASK_ALTIVEC, "__builtin_vec_extract", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_EXTRACT);
9986 def_builtin (MASK_ALTIVEC, "__builtin_vec_insert", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_INSERT);
58646b77
PB
9987 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9988 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9989 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9990 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9991 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9992 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9993 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9994 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9995
a3170dc6 9996 /* Add the DST variants. */
586de218 9997 d = bdesc_dst;
a3170dc6 9998 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9999 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
10000
10001 /* Initialize the predicates. */
586de218 10002 dp = bdesc_altivec_preds;
a3170dc6
AH
10003 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
10004 {
10005 enum machine_mode mode1;
10006 tree type;
58646b77
PB
10007 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10008 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 10009
58646b77
PB
10010 if (is_overloaded)
10011 mode1 = VOIDmode;
10012 else
10013 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
10014
10015 switch (mode1)
10016 {
58646b77
PB
10017 case VOIDmode:
10018 type = int_ftype_int_opaque_opaque;
10019 break;
a3170dc6
AH
10020 case V4SImode:
10021 type = int_ftype_int_v4si_v4si;
10022 break;
10023 case V8HImode:
10024 type = int_ftype_int_v8hi_v8hi;
10025 break;
10026 case V16QImode:
10027 type = int_ftype_int_v16qi_v16qi;
10028 break;
10029 case V4SFmode:
10030 type = int_ftype_int_v4sf_v4sf;
10031 break;
10032 default:
37409796 10033 gcc_unreachable ();
a3170dc6 10034 }
f676971a 10035
a3170dc6
AH
10036 def_builtin (dp->mask, dp->name, type, dp->code);
10037 }
10038
10039 /* Initialize the abs* operators. */
586de218 10040 d = bdesc_abs;
a3170dc6
AH
10041 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
10042 {
10043 enum machine_mode mode0;
10044 tree type;
10045
10046 mode0 = insn_data[d->icode].operand[0].mode;
10047
10048 switch (mode0)
10049 {
10050 case V4SImode:
10051 type = v4si_ftype_v4si;
10052 break;
10053 case V8HImode:
10054 type = v8hi_ftype_v8hi;
10055 break;
10056 case V16QImode:
10057 type = v16qi_ftype_v16qi;
10058 break;
10059 case V4SFmode:
10060 type = v4sf_ftype_v4sf;
10061 break;
10062 default:
37409796 10063 gcc_unreachable ();
a3170dc6 10064 }
f676971a 10065
a3170dc6
AH
10066 def_builtin (d->mask, d->name, type, d->code);
10067 }
7ccf35ed 10068
13c62176
DN
10069 if (TARGET_ALTIVEC)
10070 {
10071 tree decl;
10072
10073 /* Initialize target builtin that implements
10074 targetm.vectorize.builtin_mask_for_load. */
10075
c79efc4d
RÁE
10076 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
10077 v16qi_ftype_long_pcvoid,
10078 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
10079 BUILT_IN_MD, NULL, NULL_TREE);
10080 TREE_READONLY (decl) = 1;
13c62176
DN
10081 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
10082 altivec_builtin_mask_for_load = decl;
13c62176 10083 }
7a4eca66
DE
10084
10085 /* Access to the vec_init patterns. */
10086 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
10087 integer_type_node, integer_type_node,
10088 integer_type_node, NULL_TREE);
10089 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
10090 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
10091
10092 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
10093 short_integer_type_node,
10094 short_integer_type_node,
10095 short_integer_type_node,
10096 short_integer_type_node,
10097 short_integer_type_node,
10098 short_integer_type_node,
10099 short_integer_type_node, NULL_TREE);
10100 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
10101 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
10102
10103 ftype = build_function_type_list (V16QI_type_node, char_type_node,
10104 char_type_node, char_type_node,
10105 char_type_node, char_type_node,
10106 char_type_node, char_type_node,
10107 char_type_node, char_type_node,
10108 char_type_node, char_type_node,
10109 char_type_node, char_type_node,
10110 char_type_node, char_type_node,
10111 char_type_node, NULL_TREE);
10112 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
10113 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
10114
10115 ftype = build_function_type_list (V4SF_type_node, float_type_node,
10116 float_type_node, float_type_node,
10117 float_type_node, NULL_TREE);
10118 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
10119 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
10120
10121 /* Access to the vec_set patterns. */
10122 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
10123 intSI_type_node,
10124 integer_type_node, NULL_TREE);
10125 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
10126 ALTIVEC_BUILTIN_VEC_SET_V4SI);
10127
10128 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10129 intHI_type_node,
10130 integer_type_node, NULL_TREE);
10131 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10132 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10133
10134 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10135 intQI_type_node,
10136 integer_type_node, NULL_TREE);
10137 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10138 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10139
10140 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10141 float_type_node,
10142 integer_type_node, NULL_TREE);
10143 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10144 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10145
10146 /* Access to the vec_extract patterns. */
10147 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10148 integer_type_node, NULL_TREE);
10149 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10150 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10151
10152 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10153 integer_type_node, NULL_TREE);
10154 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10155 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10156
10157 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10158 integer_type_node, NULL_TREE);
10159 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10160 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10161
10162 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10163 integer_type_node, NULL_TREE);
10164 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10165 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
10166}
10167
10168static void
863d938c 10169rs6000_common_init_builtins (void)
a3170dc6 10170{
586de218 10171 const struct builtin_description *d;
a3170dc6
AH
10172 size_t i;
10173
96038623
DE
10174 tree v2sf_ftype_v2sf_v2sf_v2sf
10175 = build_function_type_list (V2SF_type_node,
10176 V2SF_type_node, V2SF_type_node,
10177 V2SF_type_node, NULL_TREE);
10178
a3170dc6
AH
10179 tree v4sf_ftype_v4sf_v4sf_v16qi
10180 = build_function_type_list (V4SF_type_node,
10181 V4SF_type_node, V4SF_type_node,
10182 V16QI_type_node, NULL_TREE);
10183 tree v4si_ftype_v4si_v4si_v16qi
10184 = build_function_type_list (V4SI_type_node,
10185 V4SI_type_node, V4SI_type_node,
10186 V16QI_type_node, NULL_TREE);
10187 tree v8hi_ftype_v8hi_v8hi_v16qi
10188 = build_function_type_list (V8HI_type_node,
10189 V8HI_type_node, V8HI_type_node,
10190 V16QI_type_node, NULL_TREE);
10191 tree v16qi_ftype_v16qi_v16qi_v16qi
10192 = build_function_type_list (V16QI_type_node,
10193 V16QI_type_node, V16QI_type_node,
10194 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
10195 tree v4si_ftype_int
10196 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10197 tree v8hi_ftype_int
10198 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10199 tree v16qi_ftype_int
10200 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
10201 tree v8hi_ftype_v16qi
10202 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10203 tree v4sf_ftype_v4sf
10204 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10205
10206 tree v2si_ftype_v2si_v2si
2abe3e28
AH
10207 = build_function_type_list (opaque_V2SI_type_node,
10208 opaque_V2SI_type_node,
10209 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10210
96038623 10211 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10212 = build_function_type_list (opaque_V2SF_type_node,
10213 opaque_V2SF_type_node,
10214 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10215
96038623
DE
10216 tree v2sf_ftype_v2sf_v2sf
10217 = build_function_type_list (V2SF_type_node,
10218 V2SF_type_node,
10219 V2SF_type_node, NULL_TREE);
10220
10221
a3170dc6 10222 tree v2si_ftype_int_int
2abe3e28 10223 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10224 integer_type_node, integer_type_node,
10225 NULL_TREE);
10226
58646b77
PB
10227 tree opaque_ftype_opaque
10228 = build_function_type_list (opaque_V4SI_type_node,
10229 opaque_V4SI_type_node, NULL_TREE);
10230
a3170dc6 10231 tree v2si_ftype_v2si
2abe3e28
AH
10232 = build_function_type_list (opaque_V2SI_type_node,
10233 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10234
96038623 10235 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10236 = build_function_type_list (opaque_V2SF_type_node,
10237 opaque_V2SF_type_node, NULL_TREE);
f676971a 10238
96038623
DE
10239 tree v2sf_ftype_v2sf
10240 = build_function_type_list (V2SF_type_node,
10241 V2SF_type_node, NULL_TREE);
10242
a3170dc6 10243 tree v2sf_ftype_v2si
2abe3e28
AH
10244 = build_function_type_list (opaque_V2SF_type_node,
10245 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10246
10247 tree v2si_ftype_v2sf
2abe3e28
AH
10248 = build_function_type_list (opaque_V2SI_type_node,
10249 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10250
10251 tree v2si_ftype_v2si_char
2abe3e28
AH
10252 = build_function_type_list (opaque_V2SI_type_node,
10253 opaque_V2SI_type_node,
10254 char_type_node, NULL_TREE);
a3170dc6
AH
10255
10256 tree v2si_ftype_int_char
2abe3e28 10257 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10258 integer_type_node, char_type_node, NULL_TREE);
10259
10260 tree v2si_ftype_char
2abe3e28
AH
10261 = build_function_type_list (opaque_V2SI_type_node,
10262 char_type_node, NULL_TREE);
a3170dc6
AH
10263
10264 tree int_ftype_int_int
10265 = build_function_type_list (integer_type_node,
10266 integer_type_node, integer_type_node,
10267 NULL_TREE);
95385cbb 10268
58646b77
PB
10269 tree opaque_ftype_opaque_opaque
10270 = build_function_type_list (opaque_V4SI_type_node,
10271 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10272 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10273 = build_function_type_list (V4SI_type_node,
10274 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10275 tree v4sf_ftype_v4si_int
b4de2f7d 10276 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10277 V4SI_type_node, integer_type_node, NULL_TREE);
10278 tree v4si_ftype_v4sf_int
b4de2f7d 10279 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10280 V4SF_type_node, integer_type_node, NULL_TREE);
10281 tree v4si_ftype_v4si_int
b4de2f7d 10282 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10283 V4SI_type_node, integer_type_node, NULL_TREE);
10284 tree v8hi_ftype_v8hi_int
b4de2f7d 10285 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10286 V8HI_type_node, integer_type_node, NULL_TREE);
10287 tree v16qi_ftype_v16qi_int
b4de2f7d 10288 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10289 V16QI_type_node, integer_type_node, NULL_TREE);
10290 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10291 = build_function_type_list (V16QI_type_node,
10292 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10293 integer_type_node, NULL_TREE);
10294 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10295 = build_function_type_list (V8HI_type_node,
10296 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10297 integer_type_node, NULL_TREE);
10298 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10299 = build_function_type_list (V4SI_type_node,
10300 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10301 integer_type_node, NULL_TREE);
10302 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10303 = build_function_type_list (V4SF_type_node,
10304 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10305 integer_type_node, NULL_TREE);
0ac081f6 10306 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10307 = build_function_type_list (V4SF_type_node,
10308 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10309 tree opaque_ftype_opaque_opaque_opaque
10310 = build_function_type_list (opaque_V4SI_type_node,
10311 opaque_V4SI_type_node, opaque_V4SI_type_node,
10312 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10313 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10314 = build_function_type_list (V4SF_type_node,
10315 V4SF_type_node, V4SF_type_node,
10316 V4SI_type_node, NULL_TREE);
2212663f 10317 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10318 = build_function_type_list (V4SF_type_node,
10319 V4SF_type_node, V4SF_type_node,
10320 V4SF_type_node, NULL_TREE);
f676971a 10321 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10322 = build_function_type_list (V4SI_type_node,
10323 V4SI_type_node, V4SI_type_node,
10324 V4SI_type_node, NULL_TREE);
0ac081f6 10325 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10326 = build_function_type_list (V8HI_type_node,
10327 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10328 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10329 = build_function_type_list (V8HI_type_node,
10330 V8HI_type_node, V8HI_type_node,
10331 V8HI_type_node, NULL_TREE);
c4ad648e 10332 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10333 = build_function_type_list (V4SI_type_node,
10334 V8HI_type_node, V8HI_type_node,
10335 V4SI_type_node, NULL_TREE);
c4ad648e 10336 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10337 = build_function_type_list (V4SI_type_node,
10338 V16QI_type_node, V16QI_type_node,
10339 V4SI_type_node, NULL_TREE);
0ac081f6 10340 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10341 = build_function_type_list (V16QI_type_node,
10342 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10343 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10344 = build_function_type_list (V4SI_type_node,
10345 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10346 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10347 = build_function_type_list (V8HI_type_node,
10348 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10349 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10350 = build_function_type_list (V4SI_type_node,
10351 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10352 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10353 = build_function_type_list (V8HI_type_node,
10354 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10355 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10356 = build_function_type_list (V16QI_type_node,
10357 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10358 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10359 = build_function_type_list (V4SI_type_node,
10360 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10361 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10362 = build_function_type_list (V4SI_type_node,
10363 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10364 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10365 = build_function_type_list (V4SI_type_node,
10366 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10367 tree v4si_ftype_v8hi
10368 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10369 tree int_ftype_v4si_v4si
10370 = build_function_type_list (integer_type_node,
10371 V4SI_type_node, V4SI_type_node, NULL_TREE);
10372 tree int_ftype_v4sf_v4sf
10373 = build_function_type_list (integer_type_node,
10374 V4SF_type_node, V4SF_type_node, NULL_TREE);
10375 tree int_ftype_v16qi_v16qi
10376 = build_function_type_list (integer_type_node,
10377 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10378 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10379 = build_function_type_list (integer_type_node,
10380 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10381
6f317ef3 10382 /* Add the simple ternary operators. */
586de218 10383 d = bdesc_3arg;
ca7558fc 10384 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10385 {
2212663f
DB
10386 enum machine_mode mode0, mode1, mode2, mode3;
10387 tree type;
58646b77
PB
10388 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10389 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10390
58646b77
PB
10391 if (is_overloaded)
10392 {
10393 mode0 = VOIDmode;
10394 mode1 = VOIDmode;
10395 mode2 = VOIDmode;
10396 mode3 = VOIDmode;
10397 }
10398 else
10399 {
10400 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10401 continue;
f676971a 10402
58646b77
PB
10403 mode0 = insn_data[d->icode].operand[0].mode;
10404 mode1 = insn_data[d->icode].operand[1].mode;
10405 mode2 = insn_data[d->icode].operand[2].mode;
10406 mode3 = insn_data[d->icode].operand[3].mode;
10407 }
bb8df8a6 10408
2212663f
DB
10409 /* When all four are of the same mode. */
10410 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10411 {
10412 switch (mode0)
10413 {
58646b77
PB
10414 case VOIDmode:
10415 type = opaque_ftype_opaque_opaque_opaque;
10416 break;
617e0e1d
DB
10417 case V4SImode:
10418 type = v4si_ftype_v4si_v4si_v4si;
10419 break;
2212663f
DB
10420 case V4SFmode:
10421 type = v4sf_ftype_v4sf_v4sf_v4sf;
10422 break;
10423 case V8HImode:
10424 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10425 break;
2212663f
DB
10426 case V16QImode:
10427 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10428 break;
96038623
DE
10429 case V2SFmode:
10430 type = v2sf_ftype_v2sf_v2sf_v2sf;
10431 break;
2212663f 10432 default:
37409796 10433 gcc_unreachable ();
2212663f
DB
10434 }
10435 }
10436 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10437 {
2212663f
DB
10438 switch (mode0)
10439 {
10440 case V4SImode:
10441 type = v4si_ftype_v4si_v4si_v16qi;
10442 break;
10443 case V4SFmode:
10444 type = v4sf_ftype_v4sf_v4sf_v16qi;
10445 break;
10446 case V8HImode:
10447 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10448 break;
2212663f
DB
10449 case V16QImode:
10450 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10451 break;
2212663f 10452 default:
37409796 10453 gcc_unreachable ();
2212663f
DB
10454 }
10455 }
f676971a 10456 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10457 && mode3 == V4SImode)
24408032 10458 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10459 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10460 && mode3 == V4SImode)
24408032 10461 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10462 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10463 && mode3 == V4SImode)
24408032
AH
10464 type = v4sf_ftype_v4sf_v4sf_v4si;
10465
a7b376ee 10466 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10467 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10468 && mode3 == QImode)
b9e4e5d1 10469 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10470
a7b376ee 10471 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10472 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10473 && mode3 == QImode)
b9e4e5d1 10474 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10475
a7b376ee 10476 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10477 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10478 && mode3 == QImode)
b9e4e5d1 10479 type = v4si_ftype_v4si_v4si_int;
24408032 10480
a7b376ee 10481 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10482 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10483 && mode3 == QImode)
b9e4e5d1 10484 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10485
2212663f 10486 else
37409796 10487 gcc_unreachable ();
2212663f
DB
10488
10489 def_builtin (d->mask, d->name, type, d->code);
10490 }
10491
0ac081f6 10492 /* Add the simple binary operators. */
00b960c7 10493 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10494 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10495 {
10496 enum machine_mode mode0, mode1, mode2;
10497 tree type;
58646b77
PB
10498 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10499 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10500
58646b77
PB
10501 if (is_overloaded)
10502 {
10503 mode0 = VOIDmode;
10504 mode1 = VOIDmode;
10505 mode2 = VOIDmode;
10506 }
10507 else
bb8df8a6 10508 {
58646b77
PB
10509 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10510 continue;
f676971a 10511
58646b77
PB
10512 mode0 = insn_data[d->icode].operand[0].mode;
10513 mode1 = insn_data[d->icode].operand[1].mode;
10514 mode2 = insn_data[d->icode].operand[2].mode;
10515 }
0ac081f6
AH
10516
10517 /* When all three operands are of the same mode. */
10518 if (mode0 == mode1 && mode1 == mode2)
10519 {
10520 switch (mode0)
10521 {
58646b77
PB
10522 case VOIDmode:
10523 type = opaque_ftype_opaque_opaque;
10524 break;
0ac081f6
AH
10525 case V4SFmode:
10526 type = v4sf_ftype_v4sf_v4sf;
10527 break;
10528 case V4SImode:
10529 type = v4si_ftype_v4si_v4si;
10530 break;
10531 case V16QImode:
10532 type = v16qi_ftype_v16qi_v16qi;
10533 break;
10534 case V8HImode:
10535 type = v8hi_ftype_v8hi_v8hi;
10536 break;
a3170dc6
AH
10537 case V2SImode:
10538 type = v2si_ftype_v2si_v2si;
10539 break;
96038623
DE
10540 case V2SFmode:
10541 if (TARGET_PAIRED_FLOAT)
10542 type = v2sf_ftype_v2sf_v2sf;
10543 else
10544 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10545 break;
10546 case SImode:
10547 type = int_ftype_int_int;
10548 break;
0ac081f6 10549 default:
37409796 10550 gcc_unreachable ();
0ac081f6
AH
10551 }
10552 }
10553
10554 /* A few other combos we really don't want to do manually. */
10555
10556 /* vint, vfloat, vfloat. */
10557 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10558 type = v4si_ftype_v4sf_v4sf;
10559
10560 /* vshort, vchar, vchar. */
10561 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10562 type = v8hi_ftype_v16qi_v16qi;
10563
10564 /* vint, vshort, vshort. */
10565 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10566 type = v4si_ftype_v8hi_v8hi;
10567
10568 /* vshort, vint, vint. */
10569 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10570 type = v8hi_ftype_v4si_v4si;
10571
10572 /* vchar, vshort, vshort. */
10573 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10574 type = v16qi_ftype_v8hi_v8hi;
10575
10576 /* vint, vchar, vint. */
10577 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10578 type = v4si_ftype_v16qi_v4si;
10579
fa066a23
AH
10580 /* vint, vchar, vchar. */
10581 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10582 type = v4si_ftype_v16qi_v16qi;
10583
0ac081f6
AH
10584 /* vint, vshort, vint. */
10585 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10586 type = v4si_ftype_v8hi_v4si;
f676971a 10587
a7b376ee 10588 /* vint, vint, 5-bit literal. */
2212663f 10589 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10590 type = v4si_ftype_v4si_int;
f676971a 10591
a7b376ee 10592 /* vshort, vshort, 5-bit literal. */
2212663f 10593 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10594 type = v8hi_ftype_v8hi_int;
f676971a 10595
a7b376ee 10596 /* vchar, vchar, 5-bit literal. */
2212663f 10597 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10598 type = v16qi_ftype_v16qi_int;
0ac081f6 10599
a7b376ee 10600 /* vfloat, vint, 5-bit literal. */
617e0e1d 10601 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10602 type = v4sf_ftype_v4si_int;
f676971a 10603
a7b376ee 10604 /* vint, vfloat, 5-bit literal. */
617e0e1d 10605 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10606 type = v4si_ftype_v4sf_int;
617e0e1d 10607
a3170dc6
AH
10608 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10609 type = v2si_ftype_int_int;
10610
10611 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10612 type = v2si_ftype_v2si_char;
10613
10614 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10615 type = v2si_ftype_int_char;
10616
37409796 10617 else
0ac081f6 10618 {
37409796
NS
10619 /* int, x, x. */
10620 gcc_assert (mode0 == SImode);
0ac081f6
AH
10621 switch (mode1)
10622 {
10623 case V4SImode:
10624 type = int_ftype_v4si_v4si;
10625 break;
10626 case V4SFmode:
10627 type = int_ftype_v4sf_v4sf;
10628 break;
10629 case V16QImode:
10630 type = int_ftype_v16qi_v16qi;
10631 break;
10632 case V8HImode:
10633 type = int_ftype_v8hi_v8hi;
10634 break;
10635 default:
37409796 10636 gcc_unreachable ();
0ac081f6
AH
10637 }
10638 }
10639
2212663f
DB
10640 def_builtin (d->mask, d->name, type, d->code);
10641 }
24408032 10642
2212663f
DB
10643 /* Add the simple unary operators. */
10644 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10645 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10646 {
10647 enum machine_mode mode0, mode1;
10648 tree type;
58646b77
PB
10649 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10650 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10651
10652 if (is_overloaded)
10653 {
10654 mode0 = VOIDmode;
10655 mode1 = VOIDmode;
10656 }
10657 else
10658 {
10659 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10660 continue;
bb8df8a6 10661
58646b77
PB
10662 mode0 = insn_data[d->icode].operand[0].mode;
10663 mode1 = insn_data[d->icode].operand[1].mode;
10664 }
2212663f
DB
10665
10666 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10667 type = v4si_ftype_int;
2212663f 10668 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10669 type = v8hi_ftype_int;
2212663f 10670 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10671 type = v16qi_ftype_int;
58646b77
PB
10672 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10673 type = opaque_ftype_opaque;
617e0e1d
DB
10674 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10675 type = v4sf_ftype_v4sf;
20e26713
AH
10676 else if (mode0 == V8HImode && mode1 == V16QImode)
10677 type = v8hi_ftype_v16qi;
10678 else if (mode0 == V4SImode && mode1 == V8HImode)
10679 type = v4si_ftype_v8hi;
a3170dc6
AH
10680 else if (mode0 == V2SImode && mode1 == V2SImode)
10681 type = v2si_ftype_v2si;
10682 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10683 {
10684 if (TARGET_PAIRED_FLOAT)
10685 type = v2sf_ftype_v2sf;
10686 else
10687 type = v2sf_ftype_v2sf_spe;
10688 }
a3170dc6
AH
10689 else if (mode0 == V2SFmode && mode1 == V2SImode)
10690 type = v2sf_ftype_v2si;
10691 else if (mode0 == V2SImode && mode1 == V2SFmode)
10692 type = v2si_ftype_v2sf;
10693 else if (mode0 == V2SImode && mode1 == QImode)
10694 type = v2si_ftype_char;
2212663f 10695 else
37409796 10696 gcc_unreachable ();
2212663f 10697
0ac081f6
AH
10698 def_builtin (d->mask, d->name, type, d->code);
10699 }
10700}
10701
c15c90bb
ZW
10702static void
10703rs6000_init_libfuncs (void)
10704{
602ea4d3
JJ
10705 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10706 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10707 {
602ea4d3
JJ
10708 /* AIX library routines for float->int conversion. */
10709 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10710 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10711 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10712 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10713 }
c15c90bb 10714
602ea4d3 10715 if (!TARGET_IEEEQUAD)
98c41d98 10716 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10717 if (!TARGET_XL_COMPAT)
10718 {
10719 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10720 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10721 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10722 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10723
17caeff2 10724 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10725 {
10726 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10727 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10728 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10729 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10730 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10731 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10732 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10733
10734 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10735 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10736 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10737 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10738 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10739 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10740 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10741 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10742 }
b26941b4
JM
10743
10744 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10745 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10746 }
10747 else
10748 {
10749 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10750 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10751 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10752 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10753 }
c9034561 10754 else
c15c90bb 10755 {
c9034561 10756 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10757
10758 set_optab_libfunc (add_optab, TFmode, "_q_add");
10759 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10760 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10761 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10762 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10763 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10764 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10765
c9034561
ZW
10766 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10767 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10768 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10769 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10770 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10771 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10772
85363ca0
ZW
10773 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10774 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10775 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10776 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10777 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10778 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10779 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10780 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10781 }
10782}
fba73eb1
DE
10783
10784\f
10785/* Expand a block clear operation, and return 1 if successful. Return 0
10786 if we should let the compiler generate normal code.
10787
10788 operands[0] is the destination
10789 operands[1] is the length
57e84f18 10790 operands[3] is the alignment */
fba73eb1
DE
10791
10792int
10793expand_block_clear (rtx operands[])
10794{
10795 rtx orig_dest = operands[0];
10796 rtx bytes_rtx = operands[1];
57e84f18 10797 rtx align_rtx = operands[3];
5514620a
GK
10798 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10799 HOST_WIDE_INT align;
10800 HOST_WIDE_INT bytes;
fba73eb1
DE
10801 int offset;
10802 int clear_bytes;
5514620a 10803 int clear_step;
fba73eb1
DE
10804
10805 /* If this is not a fixed size move, just call memcpy */
10806 if (! constp)
10807 return 0;
10808
37409796
NS
10809 /* This must be a fixed size alignment */
10810 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10811 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10812
10813 /* Anything to clear? */
10814 bytes = INTVAL (bytes_rtx);
10815 if (bytes <= 0)
10816 return 1;
10817
5514620a
GK
10818 /* Use the builtin memset after a point, to avoid huge code bloat.
10819 When optimize_size, avoid any significant code bloat; calling
10820 memset is about 4 instructions, so allow for one instruction to
10821 load zero and three to do clearing. */
10822 if (TARGET_ALTIVEC && align >= 128)
10823 clear_step = 16;
10824 else if (TARGET_POWERPC64 && align >= 32)
10825 clear_step = 8;
21d818ff
NF
10826 else if (TARGET_SPE && align >= 64)
10827 clear_step = 8;
5514620a
GK
10828 else
10829 clear_step = 4;
fba73eb1 10830
5514620a
GK
10831 if (optimize_size && bytes > 3 * clear_step)
10832 return 0;
10833 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10834 return 0;
10835
10836 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10837 {
fba73eb1
DE
10838 enum machine_mode mode = BLKmode;
10839 rtx dest;
f676971a 10840
5514620a
GK
10841 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10842 {
10843 clear_bytes = 16;
10844 mode = V4SImode;
10845 }
21d818ff
NF
10846 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10847 {
10848 clear_bytes = 8;
10849 mode = V2SImode;
10850 }
5514620a 10851 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10852 /* 64-bit loads and stores require word-aligned
10853 displacements. */
10854 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10855 {
10856 clear_bytes = 8;
10857 mode = DImode;
fba73eb1 10858 }
5514620a 10859 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10860 { /* move 4 bytes */
10861 clear_bytes = 4;
10862 mode = SImode;
fba73eb1 10863 }
ec53fc93 10864 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10865 { /* move 2 bytes */
10866 clear_bytes = 2;
10867 mode = HImode;
fba73eb1
DE
10868 }
10869 else /* move 1 byte at a time */
10870 {
10871 clear_bytes = 1;
10872 mode = QImode;
fba73eb1 10873 }
f676971a 10874
fba73eb1 10875 dest = adjust_address (orig_dest, mode, offset);
f676971a 10876
5514620a 10877 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10878 }
10879
10880 return 1;
10881}
10882
35aff10b 10883\f
7e69e155
MM
10884/* Expand a block move operation, and return 1 if successful. Return 0
10885 if we should let the compiler generate normal code.
10886
10887 operands[0] is the destination
10888 operands[1] is the source
10889 operands[2] is the length
10890 operands[3] is the alignment */
10891
3933e0e1
MM
10892#define MAX_MOVE_REG 4
10893
7e69e155 10894int
a2369ed3 10895expand_block_move (rtx operands[])
7e69e155 10896{
b6c9286a
MM
10897 rtx orig_dest = operands[0];
10898 rtx orig_src = operands[1];
7e69e155 10899 rtx bytes_rtx = operands[2];
7e69e155 10900 rtx align_rtx = operands[3];
3933e0e1 10901 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10902 int align;
3933e0e1
MM
10903 int bytes;
10904 int offset;
7e69e155 10905 int move_bytes;
cabfd258
GK
10906 rtx stores[MAX_MOVE_REG];
10907 int num_reg = 0;
7e69e155 10908
3933e0e1 10909 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10910 if (! constp)
3933e0e1
MM
10911 return 0;
10912
37409796
NS
10913 /* This must be a fixed size alignment */
10914 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10915 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10916
7e69e155 10917 /* Anything to move? */
3933e0e1
MM
10918 bytes = INTVAL (bytes_rtx);
10919 if (bytes <= 0)
7e69e155
MM
10920 return 1;
10921
ea9982a8 10922 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10923 reg_parm_stack_space. */
ea9982a8 10924 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10925 return 0;
10926
cabfd258 10927 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10928 {
cabfd258 10929 union {
70128ad9 10930 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10931 rtx (*mov) (rtx, rtx);
cabfd258
GK
10932 } gen_func;
10933 enum machine_mode mode = BLKmode;
10934 rtx src, dest;
f676971a 10935
5514620a
GK
10936 /* Altivec first, since it will be faster than a string move
10937 when it applies, and usually not significantly larger. */
10938 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10939 {
10940 move_bytes = 16;
10941 mode = V4SImode;
10942 gen_func.mov = gen_movv4si;
10943 }
21d818ff
NF
10944 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10945 {
10946 move_bytes = 8;
10947 mode = V2SImode;
10948 gen_func.mov = gen_movv2si;
10949 }
5514620a 10950 else if (TARGET_STRING
cabfd258
GK
10951 && bytes > 24 /* move up to 32 bytes at a time */
10952 && ! fixed_regs[5]
10953 && ! fixed_regs[6]
10954 && ! fixed_regs[7]
10955 && ! fixed_regs[8]
10956 && ! fixed_regs[9]
10957 && ! fixed_regs[10]
10958 && ! fixed_regs[11]
10959 && ! fixed_regs[12])
7e69e155 10960 {
cabfd258 10961 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10962 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10963 }
10964 else if (TARGET_STRING
10965 && bytes > 16 /* move up to 24 bytes at a time */
10966 && ! fixed_regs[5]
10967 && ! fixed_regs[6]
10968 && ! fixed_regs[7]
10969 && ! fixed_regs[8]
10970 && ! fixed_regs[9]
10971 && ! fixed_regs[10])
10972 {
10973 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10974 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10975 }
10976 else if (TARGET_STRING
10977 && bytes > 8 /* move up to 16 bytes at a time */
10978 && ! fixed_regs[5]
10979 && ! fixed_regs[6]
10980 && ! fixed_regs[7]
10981 && ! fixed_regs[8])
10982 {
10983 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10984 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10985 }
10986 else if (bytes >= 8 && TARGET_POWERPC64
10987 /* 64-bit loads and stores require word-aligned
10988 displacements. */
fba73eb1 10989 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10990 {
10991 move_bytes = 8;
10992 mode = DImode;
10993 gen_func.mov = gen_movdi;
10994 }
10995 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10996 { /* move up to 8 bytes at a time */
10997 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10998 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10999 }
cd7d9ca4 11000 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
11001 { /* move 4 bytes */
11002 move_bytes = 4;
11003 mode = SImode;
11004 gen_func.mov = gen_movsi;
11005 }
ec53fc93 11006 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
11007 { /* move 2 bytes */
11008 move_bytes = 2;
11009 mode = HImode;
11010 gen_func.mov = gen_movhi;
11011 }
11012 else if (TARGET_STRING && bytes > 1)
11013 { /* move up to 4 bytes at a time */
11014 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 11015 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
11016 }
11017 else /* move 1 byte at a time */
11018 {
11019 move_bytes = 1;
11020 mode = QImode;
11021 gen_func.mov = gen_movqi;
11022 }
f676971a 11023
cabfd258
GK
11024 src = adjust_address (orig_src, mode, offset);
11025 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
11026
11027 if (mode != BLKmode)
cabfd258
GK
11028 {
11029 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 11030
cabfd258
GK
11031 emit_insn ((*gen_func.mov) (tmp_reg, src));
11032 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 11033 }
3933e0e1 11034
cabfd258
GK
11035 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
11036 {
11037 int i;
11038 for (i = 0; i < num_reg; i++)
11039 emit_insn (stores[i]);
11040 num_reg = 0;
11041 }
35aff10b 11042
cabfd258 11043 if (mode == BLKmode)
7e69e155 11044 {
70128ad9 11045 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
11046 patterns require zero offset. */
11047 if (!REG_P (XEXP (src, 0)))
b6c9286a 11048 {
cabfd258
GK
11049 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
11050 src = replace_equiv_address (src, src_reg);
b6c9286a 11051 }
cabfd258 11052 set_mem_size (src, GEN_INT (move_bytes));
f676971a 11053
cabfd258 11054 if (!REG_P (XEXP (dest, 0)))
3933e0e1 11055 {
cabfd258
GK
11056 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
11057 dest = replace_equiv_address (dest, dest_reg);
7e69e155 11058 }
cabfd258 11059 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 11060
70128ad9 11061 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
11062 GEN_INT (move_bytes & 31),
11063 align_rtx));
7e69e155 11064 }
7e69e155
MM
11065 }
11066
11067 return 1;
11068}
11069
d62294f5 11070\f
9caa3eb2
DE
11071/* Return a string to perform a load_multiple operation.
11072 operands[0] is the vector.
11073 operands[1] is the source address.
11074 operands[2] is the first destination register. */
11075
11076const char *
a2369ed3 11077rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
11078{
11079 /* We have to handle the case where the pseudo used to contain the address
11080 is assigned to one of the output registers. */
11081 int i, j;
11082 int words = XVECLEN (operands[0], 0);
11083 rtx xop[10];
11084
11085 if (XVECLEN (operands[0], 0) == 1)
11086 return "{l|lwz} %2,0(%1)";
11087
11088 for (i = 0; i < words; i++)
11089 if (refers_to_regno_p (REGNO (operands[2]) + i,
11090 REGNO (operands[2]) + i + 1, operands[1], 0))
11091 {
11092 if (i == words-1)
11093 {
11094 xop[0] = GEN_INT (4 * (words-1));
11095 xop[1] = operands[1];
11096 xop[2] = operands[2];
11097 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
11098 return "";
11099 }
11100 else if (i == 0)
11101 {
11102 xop[0] = GEN_INT (4 * (words-1));
11103 xop[1] = operands[1];
11104 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
11105 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
11106 return "";
11107 }
11108 else
11109 {
11110 for (j = 0; j < words; j++)
11111 if (j != i)
11112 {
11113 xop[0] = GEN_INT (j * 4);
11114 xop[1] = operands[1];
11115 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
11116 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
11117 }
11118 xop[0] = GEN_INT (i * 4);
11119 xop[1] = operands[1];
11120 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
11121 return "";
11122 }
11123 }
11124
11125 return "{lsi|lswi} %2,%1,%N0";
11126}
11127
9878760c 11128\f
a4f6c312
SS
11129/* A validation routine: say whether CODE, a condition code, and MODE
11130 match. The other alternatives either don't make sense or should
11131 never be generated. */
39a10a29 11132
48d72335 11133void
a2369ed3 11134validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 11135{
37409796
NS
11136 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11137 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11138 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
11139
11140 /* These don't make sense. */
37409796
NS
11141 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11142 || mode != CCUNSmode);
39a10a29 11143
37409796
NS
11144 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11145 || mode == CCUNSmode);
39a10a29 11146
37409796
NS
11147 gcc_assert (mode == CCFPmode
11148 || (code != ORDERED && code != UNORDERED
11149 && code != UNEQ && code != LTGT
11150 && code != UNGT && code != UNLT
11151 && code != UNGE && code != UNLE));
f676971a
EC
11152
11153 /* These should never be generated except for
bc9ec0e0 11154 flag_finite_math_only. */
37409796
NS
11155 gcc_assert (mode != CCFPmode
11156 || flag_finite_math_only
11157 || (code != LE && code != GE
11158 && code != UNEQ && code != LTGT
11159 && code != UNGT && code != UNLT));
39a10a29
GK
11160
11161 /* These are invalid; the information is not there. */
37409796 11162 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
11163}
11164
9878760c
RK
11165\f
11166/* Return 1 if ANDOP is a mask that has no bits on that are not in the
11167 mask required to convert the result of a rotate insn into a shift
b1765bde 11168 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
11169
11170int
a2369ed3 11171includes_lshift_p (rtx shiftop, rtx andop)
9878760c 11172{
e2c953b6
DE
11173 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11174
11175 shift_mask <<= INTVAL (shiftop);
9878760c 11176
b1765bde 11177 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
11178}
11179
11180/* Similar, but for right shift. */
11181
11182int
a2369ed3 11183includes_rshift_p (rtx shiftop, rtx andop)
9878760c 11184{
a7653a2c 11185 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
11186
11187 shift_mask >>= INTVAL (shiftop);
11188
b1765bde 11189 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
11190}
11191
c5059423
AM
11192/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11193 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 11194 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
11195
11196int
a2369ed3 11197includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 11198{
c5059423
AM
11199 if (GET_CODE (andop) == CONST_INT)
11200 {
02071907 11201 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 11202
c5059423 11203 c = INTVAL (andop);
02071907 11204 if (c == 0 || c == ~0)
c5059423 11205 return 0;
e2c953b6 11206
02071907 11207 shift_mask = ~0;
c5059423
AM
11208 shift_mask <<= INTVAL (shiftop);
11209
b6d08ca1 11210 /* Find the least significant one bit. */
c5059423
AM
11211 lsb = c & -c;
11212
11213 /* It must coincide with the LSB of the shift mask. */
11214 if (-lsb != shift_mask)
11215 return 0;
e2c953b6 11216
c5059423
AM
11217 /* Invert to look for the next transition (if any). */
11218 c = ~c;
11219
11220 /* Remove the low group of ones (originally low group of zeros). */
11221 c &= -lsb;
11222
11223 /* Again find the lsb, and check we have all 1's above. */
11224 lsb = c & -c;
11225 return c == -lsb;
11226 }
11227 else if (GET_CODE (andop) == CONST_DOUBLE
11228 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11229 {
02071907
AM
11230 HOST_WIDE_INT low, high, lsb;
11231 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11232
11233 low = CONST_DOUBLE_LOW (andop);
11234 if (HOST_BITS_PER_WIDE_INT < 64)
11235 high = CONST_DOUBLE_HIGH (andop);
11236
11237 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11238 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11239 return 0;
11240
11241 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11242 {
02071907 11243 shift_mask_high = ~0;
c5059423
AM
11244 if (INTVAL (shiftop) > 32)
11245 shift_mask_high <<= INTVAL (shiftop) - 32;
11246
11247 lsb = high & -high;
11248
11249 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11250 return 0;
11251
11252 high = ~high;
11253 high &= -lsb;
11254
11255 lsb = high & -high;
11256 return high == -lsb;
11257 }
11258
02071907 11259 shift_mask_low = ~0;
c5059423
AM
11260 shift_mask_low <<= INTVAL (shiftop);
11261
11262 lsb = low & -low;
11263
11264 if (-lsb != shift_mask_low)
11265 return 0;
11266
11267 if (HOST_BITS_PER_WIDE_INT < 64)
11268 high = ~high;
11269 low = ~low;
11270 low &= -lsb;
11271
11272 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11273 {
11274 lsb = high & -high;
11275 return high == -lsb;
11276 }
11277
11278 lsb = low & -low;
11279 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11280 }
11281 else
11282 return 0;
11283}
e2c953b6 11284
c5059423
AM
11285/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11286 to perform a left shift. It must have SHIFTOP or more least
c1207243 11287 significant 0's, with the remainder of the word 1's. */
e2c953b6 11288
c5059423 11289int
a2369ed3 11290includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11291{
e2c953b6 11292 if (GET_CODE (andop) == CONST_INT)
c5059423 11293 {
02071907 11294 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11295
02071907 11296 shift_mask = ~0;
c5059423
AM
11297 shift_mask <<= INTVAL (shiftop);
11298 c = INTVAL (andop);
11299
c1207243 11300 /* Find the least significant one bit. */
c5059423
AM
11301 lsb = c & -c;
11302
11303 /* It must be covered by the shift mask.
a4f6c312 11304 This test also rejects c == 0. */
c5059423
AM
11305 if ((lsb & shift_mask) == 0)
11306 return 0;
11307
11308 /* Check we have all 1's above the transition, and reject all 1's. */
11309 return c == -lsb && lsb != 1;
11310 }
11311 else if (GET_CODE (andop) == CONST_DOUBLE
11312 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11313 {
02071907 11314 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11315
11316 low = CONST_DOUBLE_LOW (andop);
11317
11318 if (HOST_BITS_PER_WIDE_INT < 64)
11319 {
02071907 11320 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11321
11322 high = CONST_DOUBLE_HIGH (andop);
11323
11324 if (low == 0)
11325 {
02071907 11326 shift_mask_high = ~0;
c5059423
AM
11327 if (INTVAL (shiftop) > 32)
11328 shift_mask_high <<= INTVAL (shiftop) - 32;
11329
11330 lsb = high & -high;
11331
11332 if ((lsb & shift_mask_high) == 0)
11333 return 0;
11334
11335 return high == -lsb;
11336 }
11337 if (high != ~0)
11338 return 0;
11339 }
11340
02071907 11341 shift_mask_low = ~0;
c5059423
AM
11342 shift_mask_low <<= INTVAL (shiftop);
11343
11344 lsb = low & -low;
11345
11346 if ((lsb & shift_mask_low) == 0)
11347 return 0;
11348
11349 return low == -lsb && lsb != 1;
11350 }
e2c953b6 11351 else
c5059423 11352 return 0;
9878760c 11353}
35068b43 11354
11ac38b2
DE
11355/* Return 1 if operands will generate a valid arguments to rlwimi
11356instruction for insert with right shift in 64-bit mode. The mask may
11357not start on the first bit or stop on the last bit because wrap-around
11358effects of instruction do not correspond to semantics of RTL insn. */
11359
11360int
11361insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11362{
429ec7dc
DE
11363 if (INTVAL (startop) > 32
11364 && INTVAL (startop) < 64
11365 && INTVAL (sizeop) > 1
11366 && INTVAL (sizeop) + INTVAL (startop) < 64
11367 && INTVAL (shiftop) > 0
11368 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11369 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11370 return 1;
11371
11372 return 0;
11373}
11374
35068b43 11375/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11376 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11377
11378int
a2369ed3 11379registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11380{
11381 /* We might have been passed a SUBREG. */
f676971a 11382 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11383 return 0;
f676971a 11384
90f81f99
AP
11385 /* We might have been passed non floating point registers. */
11386 if (!FP_REGNO_P (REGNO (reg1))
11387 || !FP_REGNO_P (REGNO (reg2)))
11388 return 0;
35068b43
RK
11389
11390 return (REGNO (reg1) == REGNO (reg2) - 1);
11391}
11392
a4f6c312
SS
11393/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11394 addr1 and addr2 must be in consecutive memory locations
11395 (addr2 == addr1 + 8). */
35068b43
RK
11396
11397int
90f81f99 11398mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11399{
90f81f99 11400 rtx addr1, addr2;
bb8df8a6
EC
11401 unsigned int reg1, reg2;
11402 int offset1, offset2;
35068b43 11403
90f81f99
AP
11404 /* The mems cannot be volatile. */
11405 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11406 return 0;
f676971a 11407
90f81f99
AP
11408 addr1 = XEXP (mem1, 0);
11409 addr2 = XEXP (mem2, 0);
11410
35068b43
RK
11411 /* Extract an offset (if used) from the first addr. */
11412 if (GET_CODE (addr1) == PLUS)
11413 {
11414 /* If not a REG, return zero. */
11415 if (GET_CODE (XEXP (addr1, 0)) != REG)
11416 return 0;
11417 else
11418 {
c4ad648e 11419 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11420 /* The offset must be constant! */
11421 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11422 return 0;
11423 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11424 }
11425 }
11426 else if (GET_CODE (addr1) != REG)
11427 return 0;
11428 else
11429 {
11430 reg1 = REGNO (addr1);
11431 /* This was a simple (mem (reg)) expression. Offset is 0. */
11432 offset1 = 0;
11433 }
11434
bb8df8a6
EC
11435 /* And now for the second addr. */
11436 if (GET_CODE (addr2) == PLUS)
11437 {
11438 /* If not a REG, return zero. */
11439 if (GET_CODE (XEXP (addr2, 0)) != REG)
11440 return 0;
11441 else
11442 {
11443 reg2 = REGNO (XEXP (addr2, 0));
11444 /* The offset must be constant. */
11445 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11446 return 0;
11447 offset2 = INTVAL (XEXP (addr2, 1));
11448 }
11449 }
11450 else if (GET_CODE (addr2) != REG)
35068b43 11451 return 0;
bb8df8a6
EC
11452 else
11453 {
11454 reg2 = REGNO (addr2);
11455 /* This was a simple (mem (reg)) expression. Offset is 0. */
11456 offset2 = 0;
11457 }
35068b43 11458
bb8df8a6
EC
11459 /* Both of these must have the same base register. */
11460 if (reg1 != reg2)
35068b43
RK
11461 return 0;
11462
11463 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11464 if (offset2 != offset1 + 8)
35068b43
RK
11465 return 0;
11466
11467 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11468 instructions. */
11469 return 1;
11470}
9878760c 11471\f
e41b2a33
PB
11472
11473rtx
11474rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11475{
11476 static bool eliminated = false;
11477 if (mode != SDmode)
11478 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11479 else
11480 {
11481 rtx mem = cfun->machine->sdmode_stack_slot;
11482 gcc_assert (mem != NULL_RTX);
11483
11484 if (!eliminated)
11485 {
11486 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11487 cfun->machine->sdmode_stack_slot = mem;
11488 eliminated = true;
11489 }
11490 return mem;
11491 }
11492}
11493
11494static tree
11495rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11496{
11497 /* Don't walk into types. */
11498 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11499 {
11500 *walk_subtrees = 0;
11501 return NULL_TREE;
11502 }
11503
11504 switch (TREE_CODE (*tp))
11505 {
11506 case VAR_DECL:
11507 case PARM_DECL:
11508 case FIELD_DECL:
11509 case RESULT_DECL:
11510 case REAL_CST:
fdf4f148 11511 case INDIRECT_REF:
a0f39282
JJ
11512 case ALIGN_INDIRECT_REF:
11513 case MISALIGNED_INDIRECT_REF:
fdf4f148 11514 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11515 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11516 return *tp;
11517 break;
11518 default:
11519 break;
11520 }
11521
11522 return NULL_TREE;
11523}
11524
11525
11526/* Allocate a 64-bit stack slot to be used for copying SDmode
11527 values through if this function has any SDmode references. */
11528
11529static void
11530rs6000_alloc_sdmode_stack_slot (void)
11531{
11532 tree t;
11533 basic_block bb;
726a989a 11534 gimple_stmt_iterator gsi;
e41b2a33
PB
11535
11536 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11537
11538 FOR_EACH_BB (bb)
726a989a 11539 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e41b2a33 11540 {
726a989a 11541 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
e41b2a33
PB
11542 if (ret)
11543 {
11544 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11545 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11546 SDmode, 0);
11547 return;
11548 }
11549 }
11550
11551 /* Check for any SDmode parameters of the function. */
11552 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11553 {
11554 if (TREE_TYPE (t) == error_mark_node)
11555 continue;
11556
11557 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11558 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11559 {
11560 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11561 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11562 SDmode, 0);
11563 return;
11564 }
11565 }
11566}
11567
11568static void
11569rs6000_instantiate_decls (void)
11570{
11571 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11572 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11573}
11574
9878760c 11575/* Return the register class of a scratch register needed to copy IN into
0a2aaacc 11576 or out of a register in RCLASS in MODE. If it can be done directly,
9878760c
RK
11577 NO_REGS is returned. */
11578
11579enum reg_class
0a2aaacc 11580rs6000_secondary_reload_class (enum reg_class rclass,
3c4774e0
R
11581 enum machine_mode mode ATTRIBUTE_UNUSED,
11582 rtx in)
9878760c 11583{
5accd822 11584 int regno;
9878760c 11585
ab82a49f
AP
11586 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11587#if TARGET_MACHO
c4ad648e 11588 && MACHOPIC_INDIRECT
ab82a49f 11589#endif
c4ad648e 11590 ))
46fad5b7
DJ
11591 {
11592 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11593 other than BASE_REGS for TARGET_ELF. So indicate that a
11594 register from BASE_REGS is needed as an intermediate
11595 register.
f676971a 11596
46fad5b7
DJ
11597 On Darwin, pic addresses require a load from memory, which
11598 needs a base register. */
0a2aaacc 11599 if (rclass != BASE_REGS
c4ad648e
AM
11600 && (GET_CODE (in) == SYMBOL_REF
11601 || GET_CODE (in) == HIGH
11602 || GET_CODE (in) == LABEL_REF
11603 || GET_CODE (in) == CONST))
11604 return BASE_REGS;
46fad5b7 11605 }
e7b7998a 11606
5accd822
DE
11607 if (GET_CODE (in) == REG)
11608 {
11609 regno = REGNO (in);
11610 if (regno >= FIRST_PSEUDO_REGISTER)
11611 {
11612 regno = true_regnum (in);
11613 if (regno >= FIRST_PSEUDO_REGISTER)
11614 regno = -1;
11615 }
11616 }
11617 else if (GET_CODE (in) == SUBREG)
11618 {
11619 regno = true_regnum (in);
11620 if (regno >= FIRST_PSEUDO_REGISTER)
11621 regno = -1;
11622 }
11623 else
11624 regno = -1;
11625
9878760c
RK
11626 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11627 into anything. */
0a2aaacc 11628 if (rclass == GENERAL_REGS || rclass == BASE_REGS
9878760c
RK
11629 || (regno >= 0 && INT_REGNO_P (regno)))
11630 return NO_REGS;
11631
11632 /* Constants, memory, and FP registers can go into FP registers. */
11633 if ((regno == -1 || FP_REGNO_P (regno))
0a2aaacc 11634 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
e41b2a33 11635 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11636
0ac081f6
AH
11637 /* Memory, and AltiVec registers can go into AltiVec registers. */
11638 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
0a2aaacc 11639 && rclass == ALTIVEC_REGS)
0ac081f6
AH
11640 return NO_REGS;
11641
9878760c 11642 /* We can copy among the CR registers. */
0a2aaacc 11643 if ((rclass == CR_REGS || rclass == CR0_REGS)
9878760c
RK
11644 && regno >= 0 && CR_REGNO_P (regno))
11645 return NO_REGS;
11646
11647 /* Otherwise, we need GENERAL_REGS. */
11648 return GENERAL_REGS;
11649}
11650\f
11651/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11652 know this is a valid comparison.
9878760c
RK
11653
11654 SCC_P is 1 if this is for an scc. That means that %D will have been
11655 used instead of %C, so the bits will be in different places.
11656
b4ac57ab 11657 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11658
11659int
a2369ed3 11660ccr_bit (rtx op, int scc_p)
9878760c
RK
11661{
11662 enum rtx_code code = GET_CODE (op);
11663 enum machine_mode cc_mode;
11664 int cc_regnum;
11665 int base_bit;
9ebbca7d 11666 rtx reg;
9878760c 11667
ec8e098d 11668 if (!COMPARISON_P (op))
9878760c
RK
11669 return -1;
11670
9ebbca7d
GK
11671 reg = XEXP (op, 0);
11672
37409796 11673 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11674
11675 cc_mode = GET_MODE (reg);
11676 cc_regnum = REGNO (reg);
11677 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11678
39a10a29 11679 validate_condition_mode (code, cc_mode);
c5defebb 11680
b7053a3f
GK
11681 /* When generating a sCOND operation, only positive conditions are
11682 allowed. */
37409796
NS
11683 gcc_assert (!scc_p
11684 || code == EQ || code == GT || code == LT || code == UNORDERED
11685 || code == GTU || code == LTU);
f676971a 11686
9878760c
RK
11687 switch (code)
11688 {
11689 case NE:
11690 return scc_p ? base_bit + 3 : base_bit + 2;
11691 case EQ:
11692 return base_bit + 2;
1c882ea4 11693 case GT: case GTU: case UNLE:
9878760c 11694 return base_bit + 1;
1c882ea4 11695 case LT: case LTU: case UNGE:
9878760c 11696 return base_bit;
1c882ea4
GK
11697 case ORDERED: case UNORDERED:
11698 return base_bit + 3;
9878760c
RK
11699
11700 case GE: case GEU:
39a10a29 11701 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11702 unordered position. So test that bit. For integer, this is ! LT
11703 unless this is an scc insn. */
39a10a29 11704 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11705
11706 case LE: case LEU:
39a10a29 11707 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11708
9878760c 11709 default:
37409796 11710 gcc_unreachable ();
9878760c
RK
11711 }
11712}
1ff7789b 11713\f
8d30c4ee 11714/* Return the GOT register. */
1ff7789b 11715
9390387d 11716rtx
a2369ed3 11717rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11718{
a4f6c312
SS
11719 /* The second flow pass currently (June 1999) can't update
11720 regs_ever_live without disturbing other parts of the compiler, so
11721 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11722 if (!can_create_pseudo_p ()
11723 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11724 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11725
e3b5732b 11726 crtl->uses_pic_offset_table = 1;
3cb999d8 11727
1ff7789b
MM
11728 return pic_offset_table_rtx;
11729}
a7df97e6 11730\f
e2500fed
GK
11731/* Function to init struct machine_function.
11732 This will be called, via a pointer variable,
11733 from push_function_context. */
a7df97e6 11734
e2500fed 11735static struct machine_function *
863d938c 11736rs6000_init_machine_status (void)
a7df97e6 11737{
5ead67f6 11738 return GGC_CNEW (machine_function);
a7df97e6 11739}
9878760c 11740\f
0ba1b2ff
AM
11741/* These macros test for integers and extract the low-order bits. */
11742#define INT_P(X) \
11743((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11744 && GET_MODE (X) == VOIDmode)
11745
11746#define INT_LOWPART(X) \
11747 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11748
11749int
a2369ed3 11750extract_MB (rtx op)
0ba1b2ff
AM
11751{
11752 int i;
11753 unsigned long val = INT_LOWPART (op);
11754
11755 /* If the high bit is zero, the value is the first 1 bit we find
11756 from the left. */
11757 if ((val & 0x80000000) == 0)
11758 {
37409796 11759 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11760
11761 i = 1;
11762 while (((val <<= 1) & 0x80000000) == 0)
11763 ++i;
11764 return i;
11765 }
11766
11767 /* If the high bit is set and the low bit is not, or the mask is all
11768 1's, the value is zero. */
11769 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11770 return 0;
11771
11772 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11773 from the right. */
11774 i = 31;
11775 while (((val >>= 1) & 1) != 0)
11776 --i;
11777
11778 return i;
11779}
11780
11781int
a2369ed3 11782extract_ME (rtx op)
0ba1b2ff
AM
11783{
11784 int i;
11785 unsigned long val = INT_LOWPART (op);
11786
11787 /* If the low bit is zero, the value is the first 1 bit we find from
11788 the right. */
11789 if ((val & 1) == 0)
11790 {
37409796 11791 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11792
11793 i = 30;
11794 while (((val >>= 1) & 1) == 0)
11795 --i;
11796
11797 return i;
11798 }
11799
11800 /* If the low bit is set and the high bit is not, or the mask is all
11801 1's, the value is 31. */
11802 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11803 return 31;
11804
11805 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11806 from the left. */
11807 i = 0;
11808 while (((val <<= 1) & 0x80000000) != 0)
11809 ++i;
11810
11811 return i;
11812}
11813
c4501e62
JJ
11814/* Locate some local-dynamic symbol still in use by this function
11815 so that we can print its name in some tls_ld pattern. */
11816
11817static const char *
863d938c 11818rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11819{
11820 rtx insn;
11821
11822 if (cfun->machine->some_ld_name)
11823 return cfun->machine->some_ld_name;
11824
11825 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11826 if (INSN_P (insn)
11827 && for_each_rtx (&PATTERN (insn),
11828 rs6000_get_some_local_dynamic_name_1, 0))
11829 return cfun->machine->some_ld_name;
11830
37409796 11831 gcc_unreachable ();
c4501e62
JJ
11832}
11833
11834/* Helper function for rs6000_get_some_local_dynamic_name. */
11835
11836static int
a2369ed3 11837rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11838{
11839 rtx x = *px;
11840
11841 if (GET_CODE (x) == SYMBOL_REF)
11842 {
11843 const char *str = XSTR (x, 0);
11844 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11845 {
11846 cfun->machine->some_ld_name = str;
11847 return 1;
11848 }
11849 }
11850
11851 return 0;
11852}
11853
85b776df
AM
11854/* Write out a function code label. */
11855
11856void
11857rs6000_output_function_entry (FILE *file, const char *fname)
11858{
11859 if (fname[0] != '.')
11860 {
11861 switch (DEFAULT_ABI)
11862 {
11863 default:
37409796 11864 gcc_unreachable ();
85b776df
AM
11865
11866 case ABI_AIX:
11867 if (DOT_SYMBOLS)
11868 putc ('.', file);
11869 else
11870 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11871 break;
11872
11873 case ABI_V4:
11874 case ABI_DARWIN:
11875 break;
11876 }
11877 }
11878 if (TARGET_AIX)
11879 RS6000_OUTPUT_BASENAME (file, fname);
11880 else
11881 assemble_name (file, fname);
11882}
11883
9878760c
RK
11884/* Print an operand. Recognize special options, documented below. */
11885
38c1f2d7 11886#if TARGET_ELF
d9407988 11887#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11888#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11889#else
11890#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11891#define SMALL_DATA_REG 0
ba5e43aa
MM
11892#endif
11893
9878760c 11894void
a2369ed3 11895print_operand (FILE *file, rtx x, int code)
9878760c
RK
11896{
11897 int i;
a260abc9 11898 HOST_WIDE_INT val;
0ba1b2ff 11899 unsigned HOST_WIDE_INT uval;
9878760c
RK
11900
11901 switch (code)
11902 {
a8b3aeda 11903 case '.':
a85d226b
RK
11904 /* Write out an instruction after the call which may be replaced
11905 with glue code by the loader. This depends on the AIX version. */
11906 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11907 return;
11908
81eace42
GK
11909 /* %a is output_address. */
11910
9854d9ed
RK
11911 case 'A':
11912 /* If X is a constant integer whose low-order 5 bits are zero,
11913 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11914 in the AIX assembler where "sri" with a zero shift count
20e26713 11915 writes a trash instruction. */
9854d9ed 11916 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11917 putc ('l', file);
9854d9ed 11918 else
76229ac8 11919 putc ('r', file);
9854d9ed
RK
11920 return;
11921
11922 case 'b':
e2c953b6
DE
11923 /* If constant, low-order 16 bits of constant, unsigned.
11924 Otherwise, write normally. */
11925 if (INT_P (x))
11926 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11927 else
11928 print_operand (file, x, 0);
cad12a8d
RK
11929 return;
11930
a260abc9
DE
11931 case 'B':
11932 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11933 for 64-bit mask direction. */
9390387d 11934 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11935 return;
a260abc9 11936
81eace42
GK
11937 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11938 output_operand. */
11939
423c1189
AH
11940 case 'c':
11941 /* X is a CR register. Print the number of the GT bit of the CR. */
11942 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11943 output_operand_lossage ("invalid %%E value");
11944 else
11945 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11946 return;
11947
11948 case 'D':
cef6b86c 11949 /* Like 'J' but get to the GT bit only. */
37409796 11950 gcc_assert (GET_CODE (x) == REG);
423c1189 11951
cef6b86c
EB
11952 /* Bit 1 is GT bit. */
11953 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11954
cef6b86c
EB
11955 /* Add one for shift count in rlinm for scc. */
11956 fprintf (file, "%d", i + 1);
423c1189
AH
11957 return;
11958
9854d9ed 11959 case 'E':
39a10a29 11960 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11961 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11962 output_operand_lossage ("invalid %%E value");
78fbdbf7 11963 else
39a10a29 11964 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11965 return;
9854d9ed
RK
11966
11967 case 'f':
11968 /* X is a CR register. Print the shift count needed to move it
11969 to the high-order four bits. */
11970 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11971 output_operand_lossage ("invalid %%f value");
11972 else
9ebbca7d 11973 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11974 return;
11975
11976 case 'F':
11977 /* Similar, but print the count for the rotate in the opposite
11978 direction. */
11979 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11980 output_operand_lossage ("invalid %%F value");
11981 else
9ebbca7d 11982 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11983 return;
11984
11985 case 'G':
11986 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11987 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11988 if (GET_CODE (x) != CONST_INT)
11989 output_operand_lossage ("invalid %%G value");
11990 else if (INTVAL (x) >= 0)
76229ac8 11991 putc ('z', file);
9854d9ed 11992 else
76229ac8 11993 putc ('m', file);
9854d9ed 11994 return;
e2c953b6 11995
9878760c 11996 case 'h':
a4f6c312
SS
11997 /* If constant, output low-order five bits. Otherwise, write
11998 normally. */
9878760c 11999 if (INT_P (x))
5f59ecb7 12000 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
12001 else
12002 print_operand (file, x, 0);
12003 return;
12004
64305719 12005 case 'H':
a4f6c312
SS
12006 /* If constant, output low-order six bits. Otherwise, write
12007 normally. */
64305719 12008 if (INT_P (x))
5f59ecb7 12009 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
12010 else
12011 print_operand (file, x, 0);
12012 return;
12013
9854d9ed
RK
12014 case 'I':
12015 /* Print `i' if this is a constant, else nothing. */
9878760c 12016 if (INT_P (x))
76229ac8 12017 putc ('i', file);
9878760c
RK
12018 return;
12019
9854d9ed
RK
12020 case 'j':
12021 /* Write the bit number in CCR for jump. */
12022 i = ccr_bit (x, 0);
12023 if (i == -1)
12024 output_operand_lossage ("invalid %%j code");
9878760c 12025 else
9854d9ed 12026 fprintf (file, "%d", i);
9878760c
RK
12027 return;
12028
9854d9ed
RK
12029 case 'J':
12030 /* Similar, but add one for shift count in rlinm for scc and pass
12031 scc flag to `ccr_bit'. */
12032 i = ccr_bit (x, 1);
12033 if (i == -1)
12034 output_operand_lossage ("invalid %%J code");
12035 else
a0466a68
RK
12036 /* If we want bit 31, write a shift count of zero, not 32. */
12037 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
12038 return;
12039
9854d9ed
RK
12040 case 'k':
12041 /* X must be a constant. Write the 1's complement of the
12042 constant. */
9878760c 12043 if (! INT_P (x))
9854d9ed 12044 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
12045 else
12046 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
12047 return;
12048
81eace42 12049 case 'K':
9ebbca7d
GK
12050 /* X must be a symbolic constant on ELF. Write an
12051 expression suitable for an 'addi' that adds in the low 16
12052 bits of the MEM. */
12053 if (GET_CODE (x) != CONST)
12054 {
12055 print_operand_address (file, x);
12056 fputs ("@l", file);
12057 }
12058 else
12059 {
12060 if (GET_CODE (XEXP (x, 0)) != PLUS
12061 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
12062 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
12063 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 12064 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
12065 print_operand_address (file, XEXP (XEXP (x, 0), 0));
12066 fputs ("@l", file);
ed8d2920
MM
12067 /* For GNU as, there must be a non-alphanumeric character
12068 between 'l' and the number. The '-' is added by
12069 print_operand() already. */
12070 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
12071 fputs ("+", file);
9ebbca7d
GK
12072 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
12073 }
81eace42
GK
12074 return;
12075
12076 /* %l is output_asm_label. */
9ebbca7d 12077
9854d9ed
RK
12078 case 'L':
12079 /* Write second word of DImode or DFmode reference. Works on register
12080 or non-indexed memory only. */
12081 if (GET_CODE (x) == REG)
fb5c67a7 12082 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
12083 else if (GET_CODE (x) == MEM)
12084 {
12085 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 12086 we have already done it, we can just use an offset of word. */
9854d9ed
RK
12087 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12088 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
12089 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12090 UNITS_PER_WORD));
6fb5fa3c
DB
12091 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12092 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12093 UNITS_PER_WORD));
9854d9ed 12094 else
d7624dc0
RK
12095 output_address (XEXP (adjust_address_nv (x, SImode,
12096 UNITS_PER_WORD),
12097 0));
ed8908e7 12098
ba5e43aa 12099 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12100 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12101 reg_names[SMALL_DATA_REG]);
9854d9ed 12102 }
9878760c 12103 return;
f676971a 12104
9878760c
RK
12105 case 'm':
12106 /* MB value for a mask operand. */
b1765bde 12107 if (! mask_operand (x, SImode))
9878760c
RK
12108 output_operand_lossage ("invalid %%m value");
12109
0ba1b2ff 12110 fprintf (file, "%d", extract_MB (x));
9878760c
RK
12111 return;
12112
12113 case 'M':
12114 /* ME value for a mask operand. */
b1765bde 12115 if (! mask_operand (x, SImode))
a260abc9 12116 output_operand_lossage ("invalid %%M value");
9878760c 12117
0ba1b2ff 12118 fprintf (file, "%d", extract_ME (x));
9878760c
RK
12119 return;
12120
81eace42
GK
12121 /* %n outputs the negative of its operand. */
12122
9878760c
RK
12123 case 'N':
12124 /* Write the number of elements in the vector times 4. */
12125 if (GET_CODE (x) != PARALLEL)
12126 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
12127 else
12128 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
12129 return;
12130
12131 case 'O':
12132 /* Similar, but subtract 1 first. */
12133 if (GET_CODE (x) != PARALLEL)
1427100a 12134 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
12135 else
12136 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
12137 return;
12138
9854d9ed
RK
12139 case 'p':
12140 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12141 if (! INT_P (x)
2bfcf297 12142 || INT_LOWPART (x) < 0
9854d9ed
RK
12143 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12144 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
12145 else
12146 fprintf (file, "%d", i);
9854d9ed
RK
12147 return;
12148
9878760c
RK
12149 case 'P':
12150 /* The operand must be an indirect memory reference. The result
8bb418a3 12151 is the register name. */
9878760c
RK
12152 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12153 || REGNO (XEXP (x, 0)) >= 32)
12154 output_operand_lossage ("invalid %%P value");
e2c953b6 12155 else
fb5c67a7 12156 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
12157 return;
12158
dfbdccdb
GK
12159 case 'q':
12160 /* This outputs the logical code corresponding to a boolean
12161 expression. The expression may have one or both operands
39a10a29 12162 negated (if one, only the first one). For condition register
c4ad648e
AM
12163 logical operations, it will also treat the negated
12164 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 12165 {
63bc1d05 12166 const char *const *t = 0;
dfbdccdb
GK
12167 const char *s;
12168 enum rtx_code code = GET_CODE (x);
12169 static const char * const tbl[3][3] = {
12170 { "and", "andc", "nor" },
12171 { "or", "orc", "nand" },
12172 { "xor", "eqv", "xor" } };
12173
12174 if (code == AND)
12175 t = tbl[0];
12176 else if (code == IOR)
12177 t = tbl[1];
12178 else if (code == XOR)
12179 t = tbl[2];
12180 else
12181 output_operand_lossage ("invalid %%q value");
12182
12183 if (GET_CODE (XEXP (x, 0)) != NOT)
12184 s = t[0];
12185 else
12186 {
12187 if (GET_CODE (XEXP (x, 1)) == NOT)
12188 s = t[2];
12189 else
12190 s = t[1];
12191 }
f676971a 12192
dfbdccdb
GK
12193 fputs (s, file);
12194 }
12195 return;
12196
2c4a9cff
DE
12197 case 'Q':
12198 if (TARGET_MFCRF)
3b6ce0af 12199 fputc (',', file);
5efb1046 12200 /* FALLTHRU */
2c4a9cff
DE
12201 else
12202 return;
12203
9854d9ed
RK
12204 case 'R':
12205 /* X is a CR register. Print the mask for `mtcrf'. */
12206 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12207 output_operand_lossage ("invalid %%R value");
12208 else
9ebbca7d 12209 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12210 return;
9854d9ed
RK
12211
12212 case 's':
12213 /* Low 5 bits of 32 - value */
12214 if (! INT_P (x))
12215 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12216 else
12217 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12218 return;
9854d9ed 12219
a260abc9 12220 case 'S':
0ba1b2ff 12221 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12222 CONST_INT 32-bit mask is considered sign-extended so any
12223 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12224 if (! mask64_operand (x, DImode))
a260abc9
DE
12225 output_operand_lossage ("invalid %%S value");
12226
0ba1b2ff 12227 uval = INT_LOWPART (x);
a260abc9 12228
0ba1b2ff 12229 if (uval & 1) /* Clear Left */
a260abc9 12230 {
f099d360
GK
12231#if HOST_BITS_PER_WIDE_INT > 64
12232 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12233#endif
0ba1b2ff 12234 i = 64;
a260abc9 12235 }
0ba1b2ff 12236 else /* Clear Right */
a260abc9 12237 {
0ba1b2ff 12238 uval = ~uval;
f099d360
GK
12239#if HOST_BITS_PER_WIDE_INT > 64
12240 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12241#endif
0ba1b2ff 12242 i = 63;
a260abc9 12243 }
0ba1b2ff
AM
12244 while (uval != 0)
12245 --i, uval >>= 1;
37409796 12246 gcc_assert (i >= 0);
0ba1b2ff
AM
12247 fprintf (file, "%d", i);
12248 return;
a260abc9 12249
a3170dc6
AH
12250 case 't':
12251 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12252 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12253
12254 /* Bit 3 is OV bit. */
12255 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12256
12257 /* If we want bit 31, write a shift count of zero, not 32. */
12258 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12259 return;
12260
cccf3bdc
DE
12261 case 'T':
12262 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12263 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12264 && REGNO (x) != CTR_REGNO))
cccf3bdc 12265 output_operand_lossage ("invalid %%T value");
1de43f85 12266 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12267 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12268 else
12269 fputs ("ctr", file);
12270 return;
12271
9854d9ed 12272 case 'u':
802a0058 12273 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12274 if (! INT_P (x))
12275 output_operand_lossage ("invalid %%u value");
e2c953b6 12276 else
f676971a 12277 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12278 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12279 return;
12280
802a0058
MM
12281 case 'v':
12282 /* High-order 16 bits of constant for use in signed operand. */
12283 if (! INT_P (x))
12284 output_operand_lossage ("invalid %%v value");
e2c953b6 12285 else
134c32f6
DE
12286 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12287 (INT_LOWPART (x) >> 16) & 0xffff);
12288 return;
802a0058 12289
9854d9ed
RK
12290 case 'U':
12291 /* Print `u' if this has an auto-increment or auto-decrement. */
12292 if (GET_CODE (x) == MEM
12293 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12294 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12295 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12296 putc ('u', file);
9854d9ed 12297 return;
9878760c 12298
e0cd0770
JC
12299 case 'V':
12300 /* Print the trap code for this operand. */
12301 switch (GET_CODE (x))
12302 {
12303 case EQ:
12304 fputs ("eq", file); /* 4 */
12305 break;
12306 case NE:
12307 fputs ("ne", file); /* 24 */
12308 break;
12309 case LT:
12310 fputs ("lt", file); /* 16 */
12311 break;
12312 case LE:
12313 fputs ("le", file); /* 20 */
12314 break;
12315 case GT:
12316 fputs ("gt", file); /* 8 */
12317 break;
12318 case GE:
12319 fputs ("ge", file); /* 12 */
12320 break;
12321 case LTU:
12322 fputs ("llt", file); /* 2 */
12323 break;
12324 case LEU:
12325 fputs ("lle", file); /* 6 */
12326 break;
12327 case GTU:
12328 fputs ("lgt", file); /* 1 */
12329 break;
12330 case GEU:
12331 fputs ("lge", file); /* 5 */
12332 break;
12333 default:
37409796 12334 gcc_unreachable ();
e0cd0770
JC
12335 }
12336 break;
12337
9854d9ed
RK
12338 case 'w':
12339 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12340 normally. */
12341 if (INT_P (x))
f676971a 12342 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12343 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12344 else
12345 print_operand (file, x, 0);
9878760c
RK
12346 return;
12347
9854d9ed 12348 case 'W':
e2c953b6 12349 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12350 val = (GET_CODE (x) == CONST_INT
12351 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12352
12353 if (val < 0)
12354 i = -1;
9854d9ed 12355 else
e2c953b6
DE
12356 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12357 if ((val <<= 1) < 0)
12358 break;
12359
12360#if HOST_BITS_PER_WIDE_INT == 32
12361 if (GET_CODE (x) == CONST_INT && i >= 0)
12362 i += 32; /* zero-extend high-part was all 0's */
12363 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12364 {
12365 val = CONST_DOUBLE_LOW (x);
12366
37409796
NS
12367 gcc_assert (val);
12368 if (val < 0)
e2c953b6
DE
12369 --i;
12370 else
12371 for ( ; i < 64; i++)
12372 if ((val <<= 1) < 0)
12373 break;
12374 }
12375#endif
12376
12377 fprintf (file, "%d", i + 1);
9854d9ed 12378 return;
9878760c 12379
9854d9ed
RK
12380 case 'X':
12381 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12382 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12383 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12384 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12385 putc ('x', file);
9854d9ed 12386 return;
9878760c 12387
9854d9ed
RK
12388 case 'Y':
12389 /* Like 'L', for third word of TImode */
12390 if (GET_CODE (x) == REG)
fb5c67a7 12391 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12392 else if (GET_CODE (x) == MEM)
9878760c 12393 {
9854d9ed
RK
12394 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12395 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12396 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12397 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12398 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12399 else
d7624dc0 12400 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12401 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12402 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12403 reg_names[SMALL_DATA_REG]);
9878760c
RK
12404 }
12405 return;
f676971a 12406
9878760c 12407 case 'z':
b4ac57ab
RS
12408 /* X is a SYMBOL_REF. Write out the name preceded by a
12409 period and without any trailing data in brackets. Used for function
4d30c363
MM
12410 names. If we are configured for System V (or the embedded ABI) on
12411 the PowerPC, do not emit the period, since those systems do not use
12412 TOCs and the like. */
37409796 12413 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12414
c4ad648e
AM
12415 /* Mark the decl as referenced so that cgraph will output the
12416 function. */
9bf6462a 12417 if (SYMBOL_REF_DECL (x))
c4ad648e 12418 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12419
85b776df 12420 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12421 if (TARGET_MACHO)
12422 {
12423 const char *name = XSTR (x, 0);
a031e781 12424#if TARGET_MACHO
3b48085e 12425 if (MACHOPIC_INDIRECT
11abc112
MM
12426 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12427 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12428#endif
12429 assemble_name (file, name);
12430 }
85b776df 12431 else if (!DOT_SYMBOLS)
9739c90c 12432 assemble_name (file, XSTR (x, 0));
85b776df
AM
12433 else
12434 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12435 return;
12436
9854d9ed
RK
12437 case 'Z':
12438 /* Like 'L', for last word of TImode. */
12439 if (GET_CODE (x) == REG)
fb5c67a7 12440 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12441 else if (GET_CODE (x) == MEM)
12442 {
12443 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12444 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12445 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12446 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12447 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12448 else
d7624dc0 12449 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12450 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12451 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12452 reg_names[SMALL_DATA_REG]);
9854d9ed 12453 }
5c23c401 12454 return;
0ac081f6 12455
a3170dc6 12456 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12457 case 'y':
12458 {
12459 rtx tmp;
12460
37409796 12461 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12462
12463 tmp = XEXP (x, 0);
12464
90d3ff1c 12465 /* Ugly hack because %y is overloaded. */
8ef65e3d 12466 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12467 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12468 || GET_MODE (x) == TFmode
12469 || GET_MODE (x) == TImode))
a3170dc6
AH
12470 {
12471 /* Handle [reg]. */
12472 if (GET_CODE (tmp) == REG)
12473 {
12474 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12475 break;
12476 }
12477 /* Handle [reg+UIMM]. */
12478 else if (GET_CODE (tmp) == PLUS &&
12479 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12480 {
12481 int x;
12482
37409796 12483 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12484
12485 x = INTVAL (XEXP (tmp, 1));
12486 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12487 break;
12488 }
12489
12490 /* Fall through. Must be [reg+reg]. */
12491 }
850e8d3d
DN
12492 if (TARGET_ALTIVEC
12493 && GET_CODE (tmp) == AND
12494 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12495 && INTVAL (XEXP (tmp, 1)) == -16)
12496 tmp = XEXP (tmp, 0);
0ac081f6 12497 if (GET_CODE (tmp) == REG)
c62f2db5 12498 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12499 else
0ac081f6 12500 {
cb8cc791
AP
12501 if (!GET_CODE (tmp) == PLUS
12502 || !REG_P (XEXP (tmp, 0))
12503 || !REG_P (XEXP (tmp, 1)))
12504 {
12505 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12506 break;
12507 }
bb8df8a6 12508
0ac081f6
AH
12509 if (REGNO (XEXP (tmp, 0)) == 0)
12510 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12511 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12512 else
12513 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12514 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12515 }
0ac081f6
AH
12516 break;
12517 }
f676971a 12518
9878760c
RK
12519 case 0:
12520 if (GET_CODE (x) == REG)
12521 fprintf (file, "%s", reg_names[REGNO (x)]);
12522 else if (GET_CODE (x) == MEM)
12523 {
12524 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12525 know the width from the mode. */
12526 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12527 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12528 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12529 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12530 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12531 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12532 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12533 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12534 else
a54d04b7 12535 output_address (XEXP (x, 0));
9878760c
RK
12536 }
12537 else
a54d04b7 12538 output_addr_const (file, x);
a85d226b 12539 return;
9878760c 12540
c4501e62
JJ
12541 case '&':
12542 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12543 return;
12544
9878760c
RK
12545 default:
12546 output_operand_lossage ("invalid %%xn code");
12547 }
12548}
12549\f
12550/* Print the address of an operand. */
12551
12552void
a2369ed3 12553print_operand_address (FILE *file, rtx x)
9878760c
RK
12554{
12555 if (GET_CODE (x) == REG)
4697a36c 12556 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12557 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12558 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12559 {
12560 output_addr_const (file, x);
ba5e43aa 12561 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12562 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12563 reg_names[SMALL_DATA_REG]);
37409796
NS
12564 else
12565 gcc_assert (!TARGET_TOC);
9878760c
RK
12566 }
12567 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12568 {
9024f4b8 12569 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12570 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12571 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12572 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12573 else
4697a36c
MM
12574 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12575 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12576 }
12577 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12578 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12579 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12580#if TARGET_ELF
12581 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12582 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12583 {
12584 output_addr_const (file, XEXP (x, 1));
12585 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12586 }
c859cda6
DJ
12587#endif
12588#if TARGET_MACHO
12589 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12590 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12591 {
12592 fprintf (file, "lo16(");
12593 output_addr_const (file, XEXP (x, 1));
12594 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12595 }
3cb999d8 12596#endif
4d588c14 12597 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12598 {
2e4316da 12599 output_addr_const (file, XEXP (x, 1));
9ebbca7d
GK
12600 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12601 }
9878760c 12602 else
37409796 12603 gcc_unreachable ();
9878760c
RK
12604}
12605\f
2e4316da
RS
12606/* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */
12607
12608bool
12609rs6000_output_addr_const_extra (FILE *file, rtx x)
12610{
12611 if (GET_CODE (x) == UNSPEC)
12612 switch (XINT (x, 1))
12613 {
12614 case UNSPEC_TOCREL:
12615 x = XVECEXP (x, 0, 0);
12616 gcc_assert (GET_CODE (x) == SYMBOL_REF);
12617 output_addr_const (file, x);
12618 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
12619 {
12620 putc ('-', file);
12621 assemble_name (file, toc_label_name);
12622 }
12623 else if (TARGET_ELF)
12624 fputs ("@toc", file);
12625 return true;
08a6a74b
RS
12626
12627#if TARGET_MACHO
12628 case UNSPEC_MACHOPIC_OFFSET:
12629 output_addr_const (file, XVECEXP (x, 0, 0));
12630 putc ('-', file);
12631 machopic_output_function_base_name (file);
12632 return true;
12633#endif
2e4316da
RS
12634 }
12635 return false;
12636}
12637\f
88cad84b 12638/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12639 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12640 is defined. It also needs to handle DI-mode objects on 64-bit
12641 targets. */
12642
12643static bool
a2369ed3 12644rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12645{
f4f4921e 12646#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12647 /* Special handling for SI values. */
84dcde01 12648 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12649 {
301d03af 12650 static int recurse = 0;
f676971a 12651
301d03af
RS
12652 /* For -mrelocatable, we mark all addresses that need to be fixed up
12653 in the .fixup section. */
12654 if (TARGET_RELOCATABLE
d6b5193b
RS
12655 && in_section != toc_section
12656 && in_section != text_section
4325ca90 12657 && !unlikely_text_section_p (in_section)
301d03af
RS
12658 && !recurse
12659 && GET_CODE (x) != CONST_INT
12660 && GET_CODE (x) != CONST_DOUBLE
12661 && CONSTANT_P (x))
12662 {
12663 char buf[256];
12664
12665 recurse = 1;
12666 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12667 fixuplabelno++;
12668 ASM_OUTPUT_LABEL (asm_out_file, buf);
12669 fprintf (asm_out_file, "\t.long\t(");
12670 output_addr_const (asm_out_file, x);
12671 fprintf (asm_out_file, ")@fixup\n");
12672 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12673 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12674 fprintf (asm_out_file, "\t.long\t");
12675 assemble_name (asm_out_file, buf);
12676 fprintf (asm_out_file, "\n\t.previous\n");
12677 recurse = 0;
12678 return true;
12679 }
12680 /* Remove initial .'s to turn a -mcall-aixdesc function
12681 address into the address of the descriptor, not the function
12682 itself. */
12683 else if (GET_CODE (x) == SYMBOL_REF
12684 && XSTR (x, 0)[0] == '.'
12685 && DEFAULT_ABI == ABI_AIX)
12686 {
12687 const char *name = XSTR (x, 0);
12688 while (*name == '.')
12689 name++;
12690
12691 fprintf (asm_out_file, "\t.long\t%s\n", name);
12692 return true;
12693 }
12694 }
f4f4921e 12695#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12696 return default_assemble_integer (x, size, aligned_p);
12697}
93638d7a
AM
12698
12699#ifdef HAVE_GAS_HIDDEN
12700/* Emit an assembler directive to set symbol visibility for DECL to
12701 VISIBILITY_TYPE. */
12702
5add3202 12703static void
a2369ed3 12704rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12705{
93638d7a
AM
12706 /* Functions need to have their entry point symbol visibility set as
12707 well as their descriptor symbol visibility. */
85b776df
AM
12708 if (DEFAULT_ABI == ABI_AIX
12709 && DOT_SYMBOLS
12710 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12711 {
25fdb4dc 12712 static const char * const visibility_types[] = {
c4ad648e 12713 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12714 };
12715
12716 const char *name, *type;
93638d7a
AM
12717
12718 name = ((* targetm.strip_name_encoding)
12719 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12720 type = visibility_types[vis];
93638d7a 12721
25fdb4dc
RH
12722 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12723 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12724 }
25fdb4dc
RH
12725 else
12726 default_assemble_visibility (decl, vis);
93638d7a
AM
12727}
12728#endif
301d03af 12729\f
39a10a29 12730enum rtx_code
a2369ed3 12731rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12732{
12733 /* Reversal of FP compares takes care -- an ordered compare
12734 becomes an unordered compare and vice versa. */
f676971a 12735 if (mode == CCFPmode
bc9ec0e0
GK
12736 && (!flag_finite_math_only
12737 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12738 || code == UNEQ || code == LTGT))
bab6226b 12739 return reverse_condition_maybe_unordered (code);
39a10a29 12740 else
bab6226b 12741 return reverse_condition (code);
39a10a29
GK
12742}
12743
39a10a29
GK
12744/* Generate a compare for CODE. Return a brand-new rtx that
12745 represents the result of the compare. */
a4f6c312 12746
39a10a29 12747static rtx
a2369ed3 12748rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12749{
12750 enum machine_mode comp_mode;
12751 rtx compare_result;
12752
12753 if (rs6000_compare_fp_p)
12754 comp_mode = CCFPmode;
12755 else if (code == GTU || code == LTU
c4ad648e 12756 || code == GEU || code == LEU)
39a10a29 12757 comp_mode = CCUNSmode;
60934f9c
NS
12758 else if ((code == EQ || code == NE)
12759 && GET_CODE (rs6000_compare_op0) == SUBREG
12760 && GET_CODE (rs6000_compare_op1) == SUBREG
12761 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12762 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12763 /* These are unsigned values, perhaps there will be a later
12764 ordering compare that can be shared with this one.
12765 Unfortunately we cannot detect the signedness of the operands
12766 for non-subregs. */
12767 comp_mode = CCUNSmode;
39a10a29
GK
12768 else
12769 comp_mode = CCmode;
12770
12771 /* First, the compare. */
12772 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12773
cef6b86c 12774 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12775 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12776 && rs6000_compare_fp_p)
a3170dc6 12777 {
64022b5d 12778 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12779 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12780
12781 if (op_mode == VOIDmode)
12782 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12783
cef6b86c
EB
12784 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12785 This explains the following mess. */
423c1189 12786
a3170dc6
AH
12787 switch (code)
12788 {
423c1189 12789 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12790 switch (op_mode)
12791 {
12792 case SFmode:
12793 cmp = flag_unsafe_math_optimizations
12794 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12795 rs6000_compare_op1)
12796 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12797 rs6000_compare_op1);
12798 break;
12799
12800 case DFmode:
12801 cmp = flag_unsafe_math_optimizations
12802 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12803 rs6000_compare_op1)
12804 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12805 rs6000_compare_op1);
12806 break;
12807
17caeff2
JM
12808 case TFmode:
12809 cmp = flag_unsafe_math_optimizations
12810 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12811 rs6000_compare_op1)
12812 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12813 rs6000_compare_op1);
12814 break;
12815
37409796
NS
12816 default:
12817 gcc_unreachable ();
12818 }
a3170dc6 12819 break;
bb8df8a6 12820
423c1189 12821 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12822 switch (op_mode)
12823 {
12824 case SFmode:
12825 cmp = flag_unsafe_math_optimizations
12826 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12827 rs6000_compare_op1)
12828 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12829 rs6000_compare_op1);
12830 break;
bb8df8a6 12831
37409796
NS
12832 case DFmode:
12833 cmp = flag_unsafe_math_optimizations
12834 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12835 rs6000_compare_op1)
12836 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12837 rs6000_compare_op1);
12838 break;
12839
17caeff2
JM
12840 case TFmode:
12841 cmp = flag_unsafe_math_optimizations
12842 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12843 rs6000_compare_op1)
12844 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12845 rs6000_compare_op1);
12846 break;
12847
37409796
NS
12848 default:
12849 gcc_unreachable ();
12850 }
a3170dc6 12851 break;
bb8df8a6 12852
423c1189 12853 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12854 switch (op_mode)
12855 {
12856 case SFmode:
12857 cmp = flag_unsafe_math_optimizations
12858 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12859 rs6000_compare_op1)
12860 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12861 rs6000_compare_op1);
12862 break;
bb8df8a6 12863
37409796
NS
12864 case DFmode:
12865 cmp = flag_unsafe_math_optimizations
12866 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12867 rs6000_compare_op1)
12868 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12869 rs6000_compare_op1);
12870 break;
12871
17caeff2
JM
12872 case TFmode:
12873 cmp = flag_unsafe_math_optimizations
12874 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12875 rs6000_compare_op1)
12876 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12877 rs6000_compare_op1);
12878 break;
12879
37409796
NS
12880 default:
12881 gcc_unreachable ();
12882 }
a3170dc6 12883 break;
4d4cbc0e 12884 default:
37409796 12885 gcc_unreachable ();
a3170dc6
AH
12886 }
12887
12888 /* Synthesize LE and GE from LT/GT || EQ. */
12889 if (code == LE || code == GE || code == LEU || code == GEU)
12890 {
a3170dc6
AH
12891 emit_insn (cmp);
12892
12893 switch (code)
12894 {
12895 case LE: code = LT; break;
12896 case GE: code = GT; break;
12897 case LEU: code = LT; break;
12898 case GEU: code = GT; break;
37409796 12899 default: gcc_unreachable ();
a3170dc6
AH
12900 }
12901
a3170dc6
AH
12902 compare_result2 = gen_reg_rtx (CCFPmode);
12903
12904 /* Do the EQ. */
37409796
NS
12905 switch (op_mode)
12906 {
12907 case SFmode:
12908 cmp = flag_unsafe_math_optimizations
12909 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12910 rs6000_compare_op1)
12911 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12912 rs6000_compare_op1);
12913 break;
12914
12915 case DFmode:
12916 cmp = flag_unsafe_math_optimizations
12917 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12918 rs6000_compare_op1)
12919 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12920 rs6000_compare_op1);
12921 break;
12922
17caeff2
JM
12923 case TFmode:
12924 cmp = flag_unsafe_math_optimizations
12925 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12926 rs6000_compare_op1)
12927 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12928 rs6000_compare_op1);
12929 break;
12930
37409796
NS
12931 default:
12932 gcc_unreachable ();
12933 }
a3170dc6
AH
12934 emit_insn (cmp);
12935
a3170dc6 12936 /* OR them together. */
64022b5d
AH
12937 or_result = gen_reg_rtx (CCFPmode);
12938 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12939 compare_result2);
a3170dc6
AH
12940 compare_result = or_result;
12941 code = EQ;
12942 }
12943 else
12944 {
a3170dc6 12945 if (code == NE || code == LTGT)
a3170dc6 12946 code = NE;
423c1189
AH
12947 else
12948 code = EQ;
a3170dc6
AH
12949 }
12950
12951 emit_insn (cmp);
12952 }
12953 else
de17c25f
DE
12954 {
12955 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12956 CLOBBERs to match cmptf_internal2 pattern. */
12957 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12958 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12959 && !TARGET_IEEEQUAD
de17c25f
DE
12960 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12961 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12962 gen_rtvec (9,
12963 gen_rtx_SET (VOIDmode,
12964 compare_result,
12965 gen_rtx_COMPARE (comp_mode,
12966 rs6000_compare_op0,
12967 rs6000_compare_op1)),
12968 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12969 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12970 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12971 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12972 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12973 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12974 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12975 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12976 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12977 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12978 {
12979 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12980 comp_mode = CCEQmode;
12981 compare_result = gen_reg_rtx (CCEQmode);
12982 if (TARGET_64BIT)
12983 emit_insn (gen_stack_protect_testdi (compare_result,
12984 rs6000_compare_op0, op1));
12985 else
12986 emit_insn (gen_stack_protect_testsi (compare_result,
12987 rs6000_compare_op0, op1));
12988 }
de17c25f
DE
12989 else
12990 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12991 gen_rtx_COMPARE (comp_mode,
12992 rs6000_compare_op0,
12993 rs6000_compare_op1)));
12994 }
f676971a 12995
ca5adc63 12996 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12997 under flag_finite_math_only we don't bother. */
39a10a29 12998 if (rs6000_compare_fp_p
e7108df9 12999 && !flag_finite_math_only
8ef65e3d 13000 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
13001 && (code == LE || code == GE
13002 || code == UNEQ || code == LTGT
13003 || code == UNGT || code == UNLT))
13004 {
13005 enum rtx_code or1, or2;
13006 rtx or1_rtx, or2_rtx, compare2_rtx;
13007 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 13008
39a10a29
GK
13009 switch (code)
13010 {
13011 case LE: or1 = LT; or2 = EQ; break;
13012 case GE: or1 = GT; or2 = EQ; break;
13013 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
13014 case LTGT: or1 = LT; or2 = GT; break;
13015 case UNGT: or1 = UNORDERED; or2 = GT; break;
13016 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 13017 default: gcc_unreachable ();
39a10a29
GK
13018 }
13019 validate_condition_mode (or1, comp_mode);
13020 validate_condition_mode (or2, comp_mode);
1c563bed
KH
13021 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
13022 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
13023 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
13024 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
13025 const_true_rtx);
13026 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
13027
13028 compare_result = or_result;
13029 code = EQ;
13030 }
13031
13032 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 13033
1c563bed 13034 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
13035}
13036
13037
13038/* Emit the RTL for an sCOND pattern. */
13039
13040void
a2369ed3 13041rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
13042{
13043 rtx condition_rtx;
13044 enum machine_mode op_mode;
b7053a3f 13045 enum rtx_code cond_code;
39a10a29
GK
13046
13047 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
13048 cond_code = GET_CODE (condition_rtx);
13049
8ef65e3d 13050 if (rs6000_compare_fp_p
423c1189
AH
13051 && !TARGET_FPRS && TARGET_HARD_FLOAT)
13052 {
13053 rtx t;
13054
13055 PUT_MODE (condition_rtx, SImode);
13056 t = XEXP (condition_rtx, 0);
13057
37409796 13058 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
13059
13060 if (cond_code == NE)
64022b5d 13061 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 13062
64022b5d 13063 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
13064 return;
13065 }
13066
b7053a3f
GK
13067 if (cond_code == NE
13068 || cond_code == GE || cond_code == LE
13069 || cond_code == GEU || cond_code == LEU
13070 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
13071 {
13072 rtx not_result = gen_reg_rtx (CCEQmode);
13073 rtx not_op, rev_cond_rtx;
13074 enum machine_mode cc_mode;
f676971a 13075
b7053a3f
GK
13076 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
13077
1c563bed 13078 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 13079 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
13080 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
13081 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
13082 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
13083 }
39a10a29
GK
13084
13085 op_mode = GET_MODE (rs6000_compare_op0);
13086 if (op_mode == VOIDmode)
13087 op_mode = GET_MODE (rs6000_compare_op1);
13088
13089 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
13090 {
13091 PUT_MODE (condition_rtx, DImode);
13092 convert_move (result, condition_rtx, 0);
13093 }
13094 else
13095 {
13096 PUT_MODE (condition_rtx, SImode);
13097 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
13098 }
13099}
13100
39a10a29
GK
13101/* Emit a branch of kind CODE to location LOC. */
13102
13103void
a2369ed3 13104rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
13105{
13106 rtx condition_rtx, loc_ref;
13107
13108 condition_rtx = rs6000_generate_compare (code);
13109 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
13110 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
13111 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
13112 loc_ref, pc_rtx)));
13113}
13114
12a4e8c5
GK
13115/* Return the string to output a conditional branch to LABEL, which is
13116 the operand number of the label, or -1 if the branch is really a
f676971a 13117 conditional return.
12a4e8c5
GK
13118
13119 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
13120 condition code register and its mode specifies what kind of
13121 comparison we made.
13122
a0ab749a 13123 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
13124
13125 INSN is the insn. */
13126
13127char *
a2369ed3 13128output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
13129{
13130 static char string[64];
13131 enum rtx_code code = GET_CODE (op);
13132 rtx cc_reg = XEXP (op, 0);
13133 enum machine_mode mode = GET_MODE (cc_reg);
13134 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 13135 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
13136 int really_reversed = reversed ^ need_longbranch;
13137 char *s = string;
13138 const char *ccode;
13139 const char *pred;
13140 rtx note;
13141
39a10a29
GK
13142 validate_condition_mode (code, mode);
13143
13144 /* Work out which way this really branches. We could use
13145 reverse_condition_maybe_unordered here always but this
13146 makes the resulting assembler clearer. */
12a4e8c5 13147 if (really_reversed)
de40e1df
DJ
13148 {
13149 /* Reversal of FP compares takes care -- an ordered compare
13150 becomes an unordered compare and vice versa. */
13151 if (mode == CCFPmode)
13152 code = reverse_condition_maybe_unordered (code);
13153 else
13154 code = reverse_condition (code);
13155 }
12a4e8c5 13156
8ef65e3d 13157 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
13158 {
13159 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13160 to the GT bit. */
37409796
NS
13161 switch (code)
13162 {
13163 case EQ:
13164 /* Opposite of GT. */
13165 code = GT;
13166 break;
13167
13168 case NE:
13169 code = UNLE;
13170 break;
13171
13172 default:
13173 gcc_unreachable ();
13174 }
a3170dc6
AH
13175 }
13176
39a10a29 13177 switch (code)
12a4e8c5
GK
13178 {
13179 /* Not all of these are actually distinct opcodes, but
13180 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
13181 case NE: case LTGT:
13182 ccode = "ne"; break;
13183 case EQ: case UNEQ:
13184 ccode = "eq"; break;
f676971a 13185 case GE: case GEU:
50a0b056 13186 ccode = "ge"; break;
f676971a 13187 case GT: case GTU: case UNGT:
50a0b056 13188 ccode = "gt"; break;
f676971a 13189 case LE: case LEU:
50a0b056 13190 ccode = "le"; break;
f676971a 13191 case LT: case LTU: case UNLT:
50a0b056 13192 ccode = "lt"; break;
12a4e8c5
GK
13193 case UNORDERED: ccode = "un"; break;
13194 case ORDERED: ccode = "nu"; break;
13195 case UNGE: ccode = "nl"; break;
13196 case UNLE: ccode = "ng"; break;
13197 default:
37409796 13198 gcc_unreachable ();
12a4e8c5 13199 }
f676971a
EC
13200
13201 /* Maybe we have a guess as to how likely the branch is.
94a54f47 13202 The old mnemonics don't have a way to specify this information. */
f4857b9b 13203 pred = "";
12a4e8c5
GK
13204 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13205 if (note != NULL_RTX)
13206 {
13207 /* PROB is the difference from 50%. */
13208 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13209
13210 /* Only hint for highly probable/improbable branches on newer
13211 cpus as static prediction overrides processor dynamic
13212 prediction. For older cpus we may as well always hint, but
13213 assume not taken for branches that are very close to 50% as a
13214 mispredicted taken branch is more expensive than a
f676971a 13215 mispredicted not-taken branch. */
ec507f2d 13216 if (rs6000_always_hint
2c9e13f3
JH
13217 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13218 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13219 {
13220 if (abs (prob) > REG_BR_PROB_BASE / 20
13221 && ((prob > 0) ^ need_longbranch))
c4ad648e 13222 pred = "+";
f4857b9b
AM
13223 else
13224 pred = "-";
13225 }
12a4e8c5 13226 }
12a4e8c5
GK
13227
13228 if (label == NULL)
94a54f47 13229 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13230 else
94a54f47 13231 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13232
37c67319 13233 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13234 Assume they'd only be the first character.... */
37c67319
GK
13235 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13236 *s++ = '%';
94a54f47 13237 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13238
13239 if (label != NULL)
13240 {
13241 /* If the branch distance was too far, we may have to use an
13242 unconditional branch to go the distance. */
13243 if (need_longbranch)
44518ddd 13244 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13245 else
13246 s += sprintf (s, ",%s", label);
13247 }
13248
13249 return string;
13250}
50a0b056 13251
64022b5d 13252/* Return the string to flip the GT bit on a CR. */
423c1189 13253char *
64022b5d 13254output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13255{
13256 static char string[64];
13257 int a, b;
13258
37409796
NS
13259 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13260 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13261
64022b5d
AH
13262 /* GT bit. */
13263 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13264 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13265
13266 sprintf (string, "crnot %d,%d", a, b);
13267 return string;
13268}
13269
21213b4c
DP
13270/* Return insn index for the vector compare instruction for given CODE,
13271 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13272 not available. */
13273
13274static int
94ff898d 13275get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13276 enum machine_mode dest_mode,
13277 enum machine_mode op_mode)
13278{
13279 if (!TARGET_ALTIVEC)
13280 return INSN_NOT_AVAILABLE;
13281
13282 switch (code)
13283 {
13284 case EQ:
13285 if (dest_mode == V16QImode && op_mode == V16QImode)
13286 return UNSPEC_VCMPEQUB;
13287 if (dest_mode == V8HImode && op_mode == V8HImode)
13288 return UNSPEC_VCMPEQUH;
13289 if (dest_mode == V4SImode && op_mode == V4SImode)
13290 return UNSPEC_VCMPEQUW;
13291 if (dest_mode == V4SImode && op_mode == V4SFmode)
13292 return UNSPEC_VCMPEQFP;
13293 break;
13294 case GE:
13295 if (dest_mode == V4SImode && op_mode == V4SFmode)
13296 return UNSPEC_VCMPGEFP;
13297 case GT:
13298 if (dest_mode == V16QImode && op_mode == V16QImode)
13299 return UNSPEC_VCMPGTSB;
13300 if (dest_mode == V8HImode && op_mode == V8HImode)
13301 return UNSPEC_VCMPGTSH;
13302 if (dest_mode == V4SImode && op_mode == V4SImode)
13303 return UNSPEC_VCMPGTSW;
13304 if (dest_mode == V4SImode && op_mode == V4SFmode)
13305 return UNSPEC_VCMPGTFP;
13306 break;
13307 case GTU:
13308 if (dest_mode == V16QImode && op_mode == V16QImode)
13309 return UNSPEC_VCMPGTUB;
13310 if (dest_mode == V8HImode && op_mode == V8HImode)
13311 return UNSPEC_VCMPGTUH;
13312 if (dest_mode == V4SImode && op_mode == V4SImode)
13313 return UNSPEC_VCMPGTUW;
13314 break;
13315 default:
13316 break;
13317 }
13318 return INSN_NOT_AVAILABLE;
13319}
13320
13321/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13322 DMODE is expected destination mode. This is a recursive function. */
13323
13324static rtx
13325rs6000_emit_vector_compare (enum rtx_code rcode,
13326 rtx op0, rtx op1,
13327 enum machine_mode dmode)
13328{
13329 int vec_cmp_insn;
13330 rtx mask;
13331 enum machine_mode dest_mode;
13332 enum machine_mode op_mode = GET_MODE (op1);
13333
37409796
NS
13334 gcc_assert (TARGET_ALTIVEC);
13335 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13336
13337 /* Floating point vector compare instructions uses destination V4SImode.
13338 Move destination to appropriate mode later. */
13339 if (dmode == V4SFmode)
13340 dest_mode = V4SImode;
13341 else
13342 dest_mode = dmode;
13343
13344 mask = gen_reg_rtx (dest_mode);
13345 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13346
13347 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13348 {
13349 bool swap_operands = false;
13350 bool try_again = false;
13351 switch (rcode)
13352 {
13353 case LT:
13354 rcode = GT;
13355 swap_operands = true;
13356 try_again = true;
13357 break;
13358 case LTU:
13359 rcode = GTU;
13360 swap_operands = true;
13361 try_again = true;
13362 break;
13363 case NE:
370df7db
JC
13364 case UNLE:
13365 case UNLT:
13366 case UNGE:
13367 case UNGT:
13368 /* Invert condition and try again.
13369 e.g., A != B becomes ~(A==B). */
21213b4c 13370 {
370df7db 13371 enum rtx_code rev_code;
21213b4c 13372 enum insn_code nor_code;
d1123cde 13373 rtx eq_rtx;
370df7db
JC
13374
13375 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13376 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13377 dest_mode);
94ff898d 13378
166cdb08 13379 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13380 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13381 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13382
13383 if (dmode != dest_mode)
13384 {
13385 rtx temp = gen_reg_rtx (dest_mode);
13386 convert_move (temp, mask, 0);
13387 return temp;
13388 }
13389 return mask;
13390 }
13391 break;
13392 case GE:
13393 case GEU:
13394 case LE:
13395 case LEU:
13396 /* Try GT/GTU/LT/LTU OR EQ */
13397 {
13398 rtx c_rtx, eq_rtx;
13399 enum insn_code ior_code;
13400 enum rtx_code new_code;
13401
37409796
NS
13402 switch (rcode)
13403 {
13404 case GE:
13405 new_code = GT;
13406 break;
13407
13408 case GEU:
13409 new_code = GTU;
13410 break;
13411
13412 case LE:
13413 new_code = LT;
13414 break;
13415
13416 case LEU:
13417 new_code = LTU;
13418 break;
13419
13420 default:
13421 gcc_unreachable ();
13422 }
21213b4c
DP
13423
13424 c_rtx = rs6000_emit_vector_compare (new_code,
13425 op0, op1, dest_mode);
13426 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13427 dest_mode);
13428
166cdb08 13429 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13430 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13431 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13432 if (dmode != dest_mode)
13433 {
13434 rtx temp = gen_reg_rtx (dest_mode);
13435 convert_move (temp, mask, 0);
13436 return temp;
13437 }
13438 return mask;
13439 }
13440 break;
13441 default:
37409796 13442 gcc_unreachable ();
21213b4c
DP
13443 }
13444
13445 if (try_again)
13446 {
13447 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13448 /* You only get two chances. */
13449 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13450 }
13451
13452 if (swap_operands)
13453 {
13454 rtx tmp;
13455 tmp = op0;
13456 op0 = op1;
13457 op1 = tmp;
13458 }
13459 }
13460
915167f5
GK
13461 emit_insn (gen_rtx_SET (VOIDmode, mask,
13462 gen_rtx_UNSPEC (dest_mode,
13463 gen_rtvec (2, op0, op1),
13464 vec_cmp_insn)));
21213b4c
DP
13465 if (dmode != dest_mode)
13466 {
13467 rtx temp = gen_reg_rtx (dest_mode);
13468 convert_move (temp, mask, 0);
13469 return temp;
13470 }
13471 return mask;
13472}
13473
13474/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13475 valid insn doesn exist for given mode. */
13476
13477static int
13478get_vsel_insn (enum machine_mode mode)
13479{
13480 switch (mode)
13481 {
13482 case V4SImode:
13483 return UNSPEC_VSEL4SI;
13484 break;
13485 case V4SFmode:
13486 return UNSPEC_VSEL4SF;
13487 break;
13488 case V8HImode:
13489 return UNSPEC_VSEL8HI;
13490 break;
13491 case V16QImode:
13492 return UNSPEC_VSEL16QI;
13493 break;
13494 default:
13495 return INSN_NOT_AVAILABLE;
13496 break;
13497 }
13498 return INSN_NOT_AVAILABLE;
13499}
13500
13501/* Emit vector select insn where DEST is destination using
13502 operands OP1, OP2 and MASK. */
13503
13504static void
13505rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13506{
13507 rtx t, temp;
13508 enum machine_mode dest_mode = GET_MODE (dest);
13509 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13510
13511 temp = gen_reg_rtx (dest_mode);
94ff898d 13512
bb8df8a6 13513 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13514 select op2. */
915167f5
GK
13515 t = gen_rtx_SET (VOIDmode, temp,
13516 gen_rtx_UNSPEC (dest_mode,
13517 gen_rtvec (3, op2, op1, mask),
13518 vsel_insn_index));
21213b4c
DP
13519 emit_insn (t);
13520 emit_move_insn (dest, temp);
13521 return;
13522}
13523
94ff898d 13524/* Emit vector conditional expression.
21213b4c
DP
13525 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13526 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13527
13528int
13529rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13530 rtx cond, rtx cc_op0, rtx cc_op1)
13531{
13532 enum machine_mode dest_mode = GET_MODE (dest);
13533 enum rtx_code rcode = GET_CODE (cond);
13534 rtx mask;
13535
13536 if (!TARGET_ALTIVEC)
13537 return 0;
13538
13539 /* Get the vector mask for the given relational operations. */
13540 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13541
13542 rs6000_emit_vector_select (dest, op1, op2, mask);
13543
13544 return 1;
13545}
13546
50a0b056
GK
13547/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13548 operands of the last comparison is nonzero/true, FALSE_COND if it
13549 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13550
50a0b056 13551int
a2369ed3 13552rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13553{
13554 enum rtx_code code = GET_CODE (op);
13555 rtx op0 = rs6000_compare_op0;
13556 rtx op1 = rs6000_compare_op1;
13557 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13558 enum machine_mode compare_mode = GET_MODE (op0);
13559 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13560 rtx temp;
add2402e 13561 bool is_against_zero;
50a0b056 13562
a3c9585f 13563 /* These modes should always match. */
a3170dc6
AH
13564 if (GET_MODE (op1) != compare_mode
13565 /* In the isel case however, we can use a compare immediate, so
13566 op1 may be a small constant. */
13567 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13568 return 0;
178c3eff 13569 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13570 return 0;
178c3eff 13571 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13572 return 0;
13573
50a0b056 13574 /* First, work out if the hardware can do this at all, or
a3c9585f 13575 if it's too slow.... */
50a0b056 13576 if (! rs6000_compare_fp_p)
a3170dc6
AH
13577 {
13578 if (TARGET_ISEL)
13579 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13580 return 0;
13581 }
8ef65e3d 13582 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13583 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13584 return 0;
50a0b056 13585
add2402e 13586 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13587
add2402e
GK
13588 /* A floating-point subtract might overflow, underflow, or produce
13589 an inexact result, thus changing the floating-point flags, so it
13590 can't be generated if we care about that. It's safe if one side
13591 of the construct is zero, since then no subtract will be
13592 generated. */
ebb109ad 13593 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13594 && flag_trapping_math && ! is_against_zero)
13595 return 0;
13596
50a0b056
GK
13597 /* Eliminate half of the comparisons by switching operands, this
13598 makes the remaining code simpler. */
13599 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13600 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13601 {
13602 code = reverse_condition_maybe_unordered (code);
13603 temp = true_cond;
13604 true_cond = false_cond;
13605 false_cond = temp;
13606 }
13607
13608 /* UNEQ and LTGT take four instructions for a comparison with zero,
13609 it'll probably be faster to use a branch here too. */
bc9ec0e0 13610 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13611 return 0;
f676971a 13612
50a0b056
GK
13613 if (GET_CODE (op1) == CONST_DOUBLE)
13614 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13615
b6d08ca1 13616 /* We're going to try to implement comparisons by performing
50a0b056
GK
13617 a subtract, then comparing against zero. Unfortunately,
13618 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13619 know that the operand is finite and the comparison
50a0b056 13620 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13621 if (HONOR_INFINITIES (compare_mode)
50a0b056 13622 && code != GT && code != UNGE
045572c7 13623 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13624 /* Constructs of the form (a OP b ? a : b) are safe. */
13625 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13626 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13627 && ! rtx_equal_p (op1, true_cond))))
13628 return 0;
add2402e 13629
50a0b056
GK
13630 /* At this point we know we can use fsel. */
13631
13632 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13633 if (! is_against_zero)
13634 {
13635 temp = gen_reg_rtx (compare_mode);
13636 emit_insn (gen_rtx_SET (VOIDmode, temp,
13637 gen_rtx_MINUS (compare_mode, op0, op1)));
13638 op0 = temp;
13639 op1 = CONST0_RTX (compare_mode);
13640 }
50a0b056
GK
13641
13642 /* If we don't care about NaNs we can reduce some of the comparisons
13643 down to faster ones. */
bc9ec0e0 13644 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13645 switch (code)
13646 {
13647 case GT:
13648 code = LE;
13649 temp = true_cond;
13650 true_cond = false_cond;
13651 false_cond = temp;
13652 break;
13653 case UNGE:
13654 code = GE;
13655 break;
13656 case UNEQ:
13657 code = EQ;
13658 break;
13659 default:
13660 break;
13661 }
13662
13663 /* Now, reduce everything down to a GE. */
13664 switch (code)
13665 {
13666 case GE:
13667 break;
13668
13669 case LE:
3148ad6d
DJ
13670 temp = gen_reg_rtx (compare_mode);
13671 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13672 op0 = temp;
13673 break;
13674
13675 case ORDERED:
3148ad6d
DJ
13676 temp = gen_reg_rtx (compare_mode);
13677 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13678 op0 = temp;
13679 break;
13680
13681 case EQ:
3148ad6d 13682 temp = gen_reg_rtx (compare_mode);
f676971a 13683 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13684 gen_rtx_NEG (compare_mode,
13685 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13686 op0 = temp;
13687 break;
13688
13689 case UNGE:
bc9ec0e0 13690 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13691 temp = gen_reg_rtx (result_mode);
50a0b056 13692 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13693 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13694 gen_rtx_GE (VOIDmode,
13695 op0, op1),
13696 true_cond, false_cond)));
bc9ec0e0
GK
13697 false_cond = true_cond;
13698 true_cond = temp;
50a0b056 13699
3148ad6d
DJ
13700 temp = gen_reg_rtx (compare_mode);
13701 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13702 op0 = temp;
13703 break;
13704
13705 case GT:
bc9ec0e0 13706 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13707 temp = gen_reg_rtx (result_mode);
50a0b056 13708 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13709 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13710 gen_rtx_GE (VOIDmode,
13711 op0, op1),
13712 true_cond, false_cond)));
bc9ec0e0
GK
13713 true_cond = false_cond;
13714 false_cond = temp;
50a0b056 13715
3148ad6d
DJ
13716 temp = gen_reg_rtx (compare_mode);
13717 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13718 op0 = temp;
13719 break;
13720
13721 default:
37409796 13722 gcc_unreachable ();
50a0b056
GK
13723 }
13724
13725 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13726 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13727 gen_rtx_GE (VOIDmode,
13728 op0, op1),
13729 true_cond, false_cond)));
13730 return 1;
13731}
13732
a3170dc6
AH
13733/* Same as above, but for ints (isel). */
13734
13735static int
a2369ed3 13736rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13737{
13738 rtx condition_rtx, cr;
13739
13740 /* All isel implementations thus far are 32-bits. */
13741 if (GET_MODE (rs6000_compare_op0) != SImode)
13742 return 0;
13743
13744 /* We still have to do the compare, because isel doesn't do a
13745 compare, it just looks at the CRx bits set by a previous compare
13746 instruction. */
13747 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13748 cr = XEXP (condition_rtx, 0);
13749
13750 if (GET_MODE (cr) == CCmode)
13751 emit_insn (gen_isel_signed (dest, condition_rtx,
13752 true_cond, false_cond, cr));
13753 else
13754 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13755 true_cond, false_cond, cr));
13756
13757 return 1;
13758}
13759
13760const char *
a2369ed3 13761output_isel (rtx *operands)
a3170dc6
AH
13762{
13763 enum rtx_code code;
13764
13765 code = GET_CODE (operands[1]);
13766 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13767 {
13768 PUT_CODE (operands[1], reverse_condition (code));
13769 return "isel %0,%3,%2,%j1";
13770 }
13771 else
13772 return "isel %0,%2,%3,%j1";
13773}
13774
50a0b056 13775void
a2369ed3 13776rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13777{
13778 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13779 enum rtx_code c;
50a0b056 13780 rtx target;
5dc8d536
AH
13781
13782 if (code == SMAX || code == SMIN)
13783 c = GE;
13784 else
13785 c = GEU;
13786
50a0b056 13787 if (code == SMAX || code == UMAX)
f676971a 13788 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13789 op0, op1, mode, 0);
13790 else
f676971a 13791 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13792 op1, op0, mode, 0);
37409796 13793 gcc_assert (target);
50a0b056
GK
13794 if (target != dest)
13795 emit_move_insn (dest, target);
13796}
46c07df8 13797
915167f5
GK
13798/* Emit instructions to perform a load-reserved/store-conditional operation.
13799 The operation performed is an atomic
13800 (set M (CODE:MODE M OP))
13801 If not NULL, BEFORE is atomically set to M before the operation, and
13802 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13803 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13804 Either OP or M may be wrapped in a NOT operation. */
13805
13806void
13807rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13808 rtx m, rtx op, rtx before_param, rtx after_param,
13809 bool sync_p)
13810{
13811 enum machine_mode used_mode;
13812 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13813 rtx used_m;
13814 rtvec vec;
13815 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13816 rtx shift = NULL_RTX;
bb8df8a6 13817
915167f5 13818 if (sync_p)
a3cb4b82 13819 emit_insn (gen_lwsync ());
bb8df8a6 13820
915167f5
GK
13821 if (GET_CODE (m) == NOT)
13822 used_m = XEXP (m, 0);
13823 else
13824 used_m = m;
13825
13826 /* If this is smaller than SImode, we'll have to use SImode with
13827 adjustments. */
13828 if (mode == QImode || mode == HImode)
13829 {
13830 rtx newop, oldop;
13831
13832 if (MEM_ALIGN (used_m) >= 32)
13833 {
13834 int ishift = 0;
13835 if (BYTES_BIG_ENDIAN)
13836 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13837
915167f5 13838 shift = GEN_INT (ishift);
c75c6d11 13839 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13840 }
13841 else
13842 {
13843 rtx addrSI, aligned_addr;
a9c9d3fa 13844 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13845
c75c6d11
JJ
13846 addrSI = gen_lowpart_common (SImode,
13847 force_reg (Pmode, XEXP (used_m, 0)));
13848 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13849 shift = gen_reg_rtx (SImode);
13850
13851 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13852 GEN_INT (shift_mask)));
13853 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13854
13855 aligned_addr = expand_binop (Pmode, and_optab,
13856 XEXP (used_m, 0),
13857 GEN_INT (-4), NULL_RTX,
13858 1, OPTAB_LIB_WIDEN);
13859 used_m = change_address (used_m, SImode, aligned_addr);
13860 set_mem_align (used_m, 32);
915167f5 13861 }
c75c6d11
JJ
13862 /* It's safe to keep the old alias set of USED_M, because
13863 the operation is atomic and only affects the original
13864 USED_M. */
13865 if (GET_CODE (m) == NOT)
13866 m = gen_rtx_NOT (SImode, used_m);
13867 else
13868 m = used_m;
915167f5
GK
13869
13870 if (GET_CODE (op) == NOT)
13871 {
13872 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13873 oldop = gen_rtx_NOT (SImode, oldop);
13874 }
13875 else
13876 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13877
915167f5
GK
13878 switch (code)
13879 {
13880 case IOR:
13881 case XOR:
13882 newop = expand_binop (SImode, and_optab,
13883 oldop, GEN_INT (imask), NULL_RTX,
13884 1, OPTAB_LIB_WIDEN);
13885 emit_insn (gen_ashlsi3 (newop, newop, shift));
13886 break;
13887
13888 case AND:
13889 newop = expand_binop (SImode, ior_optab,
13890 oldop, GEN_INT (~imask), NULL_RTX,
13891 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13892 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13893 break;
13894
13895 case PLUS:
9f0076e5 13896 case MINUS:
915167f5
GK
13897 {
13898 rtx mask;
bb8df8a6 13899
915167f5
GK
13900 newop = expand_binop (SImode, and_optab,
13901 oldop, GEN_INT (imask), NULL_RTX,
13902 1, OPTAB_LIB_WIDEN);
13903 emit_insn (gen_ashlsi3 (newop, newop, shift));
13904
13905 mask = gen_reg_rtx (SImode);
13906 emit_move_insn (mask, GEN_INT (imask));
13907 emit_insn (gen_ashlsi3 (mask, mask, shift));
13908
9f0076e5
DE
13909 if (code == PLUS)
13910 newop = gen_rtx_PLUS (SImode, m, newop);
13911 else
13912 newop = gen_rtx_MINUS (SImode, m, newop);
13913 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13914 newop = gen_rtx_IOR (SImode, newop,
13915 gen_rtx_AND (SImode,
13916 gen_rtx_NOT (SImode, mask),
13917 m));
13918 break;
13919 }
13920
13921 default:
13922 gcc_unreachable ();
13923 }
13924
a9c9d3fa
GK
13925 if (GET_CODE (m) == NOT)
13926 {
13927 rtx mask, xorm;
13928
13929 mask = gen_reg_rtx (SImode);
13930 emit_move_insn (mask, GEN_INT (imask));
13931 emit_insn (gen_ashlsi3 (mask, mask, shift));
13932
13933 xorm = gen_rtx_XOR (SImode, used_m, mask);
13934 /* Depending on the value of 'op', the XOR or the operation might
13935 be able to be simplified away. */
13936 newop = simplify_gen_binary (code, SImode, xorm, newop);
13937 }
915167f5
GK
13938 op = newop;
13939 used_mode = SImode;
13940 before = gen_reg_rtx (used_mode);
13941 after = gen_reg_rtx (used_mode);
13942 }
13943 else
13944 {
13945 used_mode = mode;
13946 before = before_param;
13947 after = after_param;
13948
13949 if (before == NULL_RTX)
13950 before = gen_reg_rtx (used_mode);
13951 if (after == NULL_RTX)
13952 after = gen_reg_rtx (used_mode);
13953 }
bb8df8a6 13954
9f0076e5
DE
13955 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13956 && used_mode != mode)
915167f5
GK
13957 the_op = op; /* Computed above. */
13958 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13959 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13960 else
13961 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13962
13963 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13964 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13965 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13966 gen_rtx_UNSPEC (used_mode,
13967 gen_rtvec (1, the_op),
13968 UNSPEC_SYNC_OP));
915167f5
GK
13969 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13970
9f0076e5 13971 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13972 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13973 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13974 else
13975 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13976 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13977
13978 /* Shift and mask the return values properly. */
13979 if (used_mode != mode && before_param)
13980 {
13981 emit_insn (gen_lshrsi3 (before, before, shift));
13982 convert_move (before_param, before, 1);
13983 }
13984
13985 if (used_mode != mode && after_param)
13986 {
13987 emit_insn (gen_lshrsi3 (after, after, shift));
13988 convert_move (after_param, after, 1);
13989 }
13990
13991 /* The previous sequence will end with a branch that's dependent on
13992 the conditional store, so placing an isync will ensure that no
13993 other instructions (especially, no load or store instructions)
13994 can start before the atomic operation completes. */
13995 if (sync_p)
13996 emit_insn (gen_isync ());
13997}
13998
b52110d4
DE
13999/* A subroutine of the atomic operation splitters. Jump to LABEL if
14000 COND is true. Mark the jump as unlikely to be taken. */
14001
14002static void
14003emit_unlikely_jump (rtx cond, rtx label)
14004{
14005 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
14006 rtx x;
14007
14008 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
14009 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
14010 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
14011}
14012
14013/* A subroutine of the atomic operation splitters. Emit a load-locked
14014 instruction in MODE. */
14015
14016static void
14017emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
14018{
14019 rtx (*fn) (rtx, rtx) = NULL;
14020 if (mode == SImode)
14021 fn = gen_load_locked_si;
14022 else if (mode == DImode)
14023 fn = gen_load_locked_di;
14024 emit_insn (fn (reg, mem));
14025}
14026
14027/* A subroutine of the atomic operation splitters. Emit a store-conditional
14028 instruction in MODE. */
14029
14030static void
14031emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
14032{
14033 rtx (*fn) (rtx, rtx, rtx) = NULL;
14034 if (mode == SImode)
14035 fn = gen_store_conditional_si;
14036 else if (mode == DImode)
14037 fn = gen_store_conditional_di;
14038
9f0076e5 14039 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
14040 if (PPC405_ERRATUM77)
14041 emit_insn (gen_memory_barrier ());
14042
14043 emit_insn (fn (res, mem, val));
14044}
14045
ea2c620c 14046/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 14047 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
14048 operand of the binary operator. BEFORE and AFTER are optional locations to
14049 return the value of MEM either before of after the operation. SCRATCH is
14050 a scratch register. */
14051
14052void
14053rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
14054 rtx before, rtx after, rtx scratch)
14055{
14056 enum machine_mode mode = GET_MODE (mem);
14057 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14058
a3cb4b82 14059 emit_insn (gen_lwsync ());
9f0076e5
DE
14060
14061 label = gen_label_rtx ();
14062 emit_label (label);
14063 label = gen_rtx_LABEL_REF (VOIDmode, label);
14064
14065 if (before == NULL_RTX)
14066 before = scratch;
14067 emit_load_locked (mode, before, mem);
14068
14069 if (code == NOT)
14070 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
14071 else if (code == AND)
14072 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
14073 else
14074 x = gen_rtx_fmt_ee (code, mode, before, val);
14075
14076 if (after != NULL_RTX)
14077 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
14078 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
14079
14080 emit_store_conditional (mode, cond, mem, scratch);
14081
14082 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14083 emit_unlikely_jump (x, label);
14084
14085 emit_insn (gen_isync ());
14086}
14087
b52110d4
DE
14088/* Expand an atomic compare and swap operation. MEM is the memory on which
14089 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
14090 value to be stored. SCRATCH is a scratch GPR. */
14091
14092void
14093rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
14094 rtx scratch)
14095{
14096 enum machine_mode mode = GET_MODE (mem);
14097 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14098
a3cb4b82 14099 emit_insn (gen_lwsync ());
b52110d4
DE
14100
14101 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14102 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14103 emit_label (XEXP (label1, 0));
14104
14105 emit_load_locked (mode, retval, mem);
14106
14107 x = gen_rtx_COMPARE (CCmode, retval, oldval);
14108 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14109
14110 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14111 emit_unlikely_jump (x, label2);
14112
14113 emit_move_insn (scratch, newval);
14114 emit_store_conditional (mode, cond, mem, scratch);
14115
14116 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14117 emit_unlikely_jump (x, label1);
14118
14119 emit_insn (gen_isync ());
14120 emit_label (XEXP (label2, 0));
14121}
14122
14123/* Expand an atomic test and set operation. MEM is the memory on which
14124 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
14125
14126void
14127rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
14128{
14129 enum machine_mode mode = GET_MODE (mem);
14130 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14131
b52110d4
DE
14132 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14133 emit_label (XEXP (label, 0));
14134
14135 emit_load_locked (mode, retval, mem);
14136 emit_move_insn (scratch, val);
14137 emit_store_conditional (mode, cond, mem, scratch);
14138
14139 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14140 emit_unlikely_jump (x, label);
14141
14142 emit_insn (gen_isync ());
14143}
14144
9fc75b97
DE
14145void
14146rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14147{
14148 enum machine_mode mode = GET_MODE (mem);
14149 rtx addrSI, align, wdst, shift, mask;
14150 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14151 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14152
14153 /* Shift amount for subword relative to aligned word. */
4b49d9ce
AP
14154 addrSI = force_reg (GET_MODE (XEXP (mem, 0)), XEXP (mem, 0));
14155 addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI));
9fc75b97
DE
14156 shift = gen_reg_rtx (SImode);
14157 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14158 GEN_INT (shift_mask)));
14159 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14160
14161 /* Shift and mask old value into position within word. */
14162 oldval = convert_modes (SImode, mode, oldval, 1);
14163 oldval = expand_binop (SImode, and_optab,
14164 oldval, GEN_INT (imask), NULL_RTX,
14165 1, OPTAB_LIB_WIDEN);
14166 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14167
14168 /* Shift and mask new value into position within word. */
14169 newval = convert_modes (SImode, mode, newval, 1);
14170 newval = expand_binop (SImode, and_optab,
14171 newval, GEN_INT (imask), NULL_RTX,
14172 1, OPTAB_LIB_WIDEN);
14173 emit_insn (gen_ashlsi3 (newval, newval, shift));
14174
14175 /* Mask for insertion. */
14176 mask = gen_reg_rtx (SImode);
14177 emit_move_insn (mask, GEN_INT (imask));
14178 emit_insn (gen_ashlsi3 (mask, mask, shift));
14179
14180 /* Address of aligned word containing subword. */
14181 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14182 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14183 mem = change_address (mem, SImode, align);
14184 set_mem_align (mem, 32);
14185 MEM_VOLATILE_P (mem) = 1;
14186
14187 wdst = gen_reg_rtx (SImode);
14188 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14189 oldval, newval, mem));
14190
2725b75c
JJ
14191 /* Shift the result back. */
14192 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14193
9fc75b97
DE
14194 emit_move_insn (dst, gen_lowpart (mode, wdst));
14195}
14196
14197void
14198rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14199 rtx oldval, rtx newval, rtx mem,
14200 rtx scratch)
14201{
14202 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14203
a3cb4b82 14204 emit_insn (gen_lwsync ());
9fc75b97
DE
14205 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14206 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14207 emit_label (XEXP (label1, 0));
14208
14209 emit_load_locked (SImode, scratch, mem);
14210
14211 /* Mask subword within loaded value for comparison with oldval.
14212 Use UNSPEC_AND to avoid clobber.*/
14213 emit_insn (gen_rtx_SET (SImode, dest,
14214 gen_rtx_UNSPEC (SImode,
14215 gen_rtvec (2, scratch, mask),
14216 UNSPEC_AND)));
14217
14218 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14219 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14220
14221 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14222 emit_unlikely_jump (x, label2);
14223
14224 /* Clear subword within loaded value for insertion of new value. */
14225 emit_insn (gen_rtx_SET (SImode, scratch,
14226 gen_rtx_AND (SImode,
14227 gen_rtx_NOT (SImode, mask), scratch)));
14228 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14229 emit_store_conditional (SImode, cond, mem, scratch);
14230
14231 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14232 emit_unlikely_jump (x, label1);
14233
14234 emit_insn (gen_isync ());
14235 emit_label (XEXP (label2, 0));
14236}
14237
14238
b52110d4 14239 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14240 multi-register moves. It will emit at most one instruction for
14241 each register that is accessed; that is, it won't emit li/lis pairs
14242 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14243 register. */
46c07df8 14244
46c07df8 14245void
a9baceb1 14246rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14247{
a9baceb1
GK
14248 /* The register number of the first register being moved. */
14249 int reg;
14250 /* The mode that is to be moved. */
14251 enum machine_mode mode;
14252 /* The mode that the move is being done in, and its size. */
14253 enum machine_mode reg_mode;
14254 int reg_mode_size;
14255 /* The number of registers that will be moved. */
14256 int nregs;
14257
14258 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14259 mode = GET_MODE (dst);
c8b622ff 14260 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14261 if (FP_REGNO_P (reg))
696e45ba
ME
14262 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode :
14263 ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? DFmode : SFmode);
a9baceb1
GK
14264 else if (ALTIVEC_REGNO_P (reg))
14265 reg_mode = V16QImode;
4f011e1e
JM
14266 else if (TARGET_E500_DOUBLE && mode == TFmode)
14267 reg_mode = DFmode;
a9baceb1
GK
14268 else
14269 reg_mode = word_mode;
14270 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14271
37409796 14272 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14273
a9baceb1
GK
14274 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14275 {
14276 /* Move register range backwards, if we might have destructive
14277 overlap. */
14278 int i;
14279 for (i = nregs - 1; i >= 0; i--)
f676971a 14280 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14281 simplify_gen_subreg (reg_mode, dst, mode,
14282 i * reg_mode_size),
14283 simplify_gen_subreg (reg_mode, src, mode,
14284 i * reg_mode_size)));
14285 }
46c07df8
HP
14286 else
14287 {
a9baceb1
GK
14288 int i;
14289 int j = -1;
14290 bool used_update = false;
46c07df8 14291
c1e55850 14292 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14293 {
14294 rtx breg;
3a1f863f 14295
a9baceb1
GK
14296 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14297 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14298 {
14299 rtx delta_rtx;
a9baceb1 14300 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14301 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14302 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14303 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14304 emit_insn (TARGET_32BIT
14305 ? gen_addsi3 (breg, breg, delta_rtx)
14306 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14307 src = replace_equiv_address (src, breg);
3a1f863f 14308 }
d04b6e6e 14309 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14310 {
13e2e16e 14311 rtx basereg;
c1e55850
GK
14312 basereg = gen_rtx_REG (Pmode, reg);
14313 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14314 src = replace_equiv_address (src, basereg);
c1e55850 14315 }
3a1f863f 14316
0423421f
AM
14317 breg = XEXP (src, 0);
14318 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14319 breg = XEXP (breg, 0);
14320
14321 /* If the base register we are using to address memory is
14322 also a destination reg, then change that register last. */
14323 if (REG_P (breg)
14324 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14325 && REGNO (breg) < REGNO (dst) + nregs)
14326 j = REGNO (breg) - REGNO (dst);
c4ad648e 14327 }
46c07df8 14328
a9baceb1 14329 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14330 {
14331 rtx breg;
14332
a9baceb1
GK
14333 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14334 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14335 {
14336 rtx delta_rtx;
a9baceb1 14337 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14338 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14339 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14340 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14341
14342 /* We have to update the breg before doing the store.
14343 Use store with update, if available. */
14344
14345 if (TARGET_UPDATE)
14346 {
a9baceb1 14347 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14348 emit_insn (TARGET_32BIT
14349 ? (TARGET_POWERPC64
14350 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14351 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14352 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14353 used_update = true;
3a1f863f
DE
14354 }
14355 else
a9baceb1
GK
14356 emit_insn (TARGET_32BIT
14357 ? gen_addsi3 (breg, breg, delta_rtx)
14358 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14359 dst = replace_equiv_address (dst, breg);
3a1f863f 14360 }
37409796 14361 else
d04b6e6e 14362 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14363 }
14364
46c07df8 14365 for (i = 0; i < nregs; i++)
f676971a 14366 {
3a1f863f
DE
14367 /* Calculate index to next subword. */
14368 ++j;
f676971a 14369 if (j == nregs)
3a1f863f 14370 j = 0;
46c07df8 14371
112cdef5 14372 /* If compiler already emitted move of first word by
a9baceb1 14373 store with update, no need to do anything. */
3a1f863f 14374 if (j == 0 && used_update)
a9baceb1 14375 continue;
f676971a 14376
a9baceb1
GK
14377 emit_insn (gen_rtx_SET (VOIDmode,
14378 simplify_gen_subreg (reg_mode, dst, mode,
14379 j * reg_mode_size),
14380 simplify_gen_subreg (reg_mode, src, mode,
14381 j * reg_mode_size)));
3a1f863f 14382 }
46c07df8
HP
14383 }
14384}
14385
12a4e8c5 14386\f
a4f6c312
SS
14387/* This page contains routines that are used to determine what the
14388 function prologue and epilogue code will do and write them out. */
9878760c 14389
a4f6c312
SS
14390/* Return the first fixed-point register that is required to be
14391 saved. 32 if none. */
9878760c
RK
14392
14393int
863d938c 14394first_reg_to_save (void)
9878760c
RK
14395{
14396 int first_reg;
14397
14398 /* Find lowest numbered live register. */
14399 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14400 if (df_regs_ever_live_p (first_reg)
a38d360d 14401 && (! call_used_regs[first_reg]
1db02437 14402 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14403 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14404 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14405 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14406 break;
14407
ee890fe2 14408#if TARGET_MACHO
93638d7a 14409 if (flag_pic
e3b5732b 14410 && crtl->uses_pic_offset_table
93638d7a 14411 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14412 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14413#endif
14414
9878760c
RK
14415 return first_reg;
14416}
14417
14418/* Similar, for FP regs. */
14419
14420int
863d938c 14421first_fp_reg_to_save (void)
9878760c
RK
14422{
14423 int first_reg;
14424
14425 /* Find lowest numbered live register. */
14426 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14427 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14428 break;
14429
14430 return first_reg;
14431}
00b960c7
AH
14432
14433/* Similar, for AltiVec regs. */
14434
14435static int
863d938c 14436first_altivec_reg_to_save (void)
00b960c7
AH
14437{
14438 int i;
14439
14440 /* Stack frame remains as is unless we are in AltiVec ABI. */
14441 if (! TARGET_ALTIVEC_ABI)
14442 return LAST_ALTIVEC_REGNO + 1;
14443
22fa69da 14444 /* On Darwin, the unwind routines are compiled without
982afe02 14445 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14446 altivec registers when necessary. */
e3b5732b 14447 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14448 && ! TARGET_ALTIVEC)
14449 return FIRST_ALTIVEC_REGNO + 20;
14450
00b960c7
AH
14451 /* Find lowest numbered live register. */
14452 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14453 if (df_regs_ever_live_p (i))
00b960c7
AH
14454 break;
14455
14456 return i;
14457}
14458
14459/* Return a 32-bit mask of the AltiVec registers we need to set in
14460 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14461 the 32-bit word is 0. */
14462
14463static unsigned int
863d938c 14464compute_vrsave_mask (void)
00b960c7
AH
14465{
14466 unsigned int i, mask = 0;
14467
22fa69da 14468 /* On Darwin, the unwind routines are compiled without
982afe02 14469 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14470 call-saved altivec registers when necessary. */
e3b5732b 14471 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14472 && ! TARGET_ALTIVEC)
14473 mask |= 0xFFF;
14474
00b960c7
AH
14475 /* First, find out if we use _any_ altivec registers. */
14476 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14477 if (df_regs_ever_live_p (i))
00b960c7
AH
14478 mask |= ALTIVEC_REG_BIT (i);
14479
14480 if (mask == 0)
14481 return mask;
14482
00b960c7
AH
14483 /* Next, remove the argument registers from the set. These must
14484 be in the VRSAVE mask set by the caller, so we don't need to add
14485 them in again. More importantly, the mask we compute here is
14486 used to generate CLOBBERs in the set_vrsave insn, and we do not
14487 wish the argument registers to die. */
38173d38 14488 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14489 mask &= ~ALTIVEC_REG_BIT (i);
14490
14491 /* Similarly, remove the return value from the set. */
14492 {
14493 bool yes = false;
14494 diddle_return_value (is_altivec_return_reg, &yes);
14495 if (yes)
14496 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14497 }
14498
14499 return mask;
14500}
14501
d62294f5 14502/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14503 size of prologues/epilogues by calling our own save/restore-the-world
14504 routines. */
d62294f5
FJ
14505
14506static void
f57fe068
AM
14507compute_save_world_info (rs6000_stack_t *info_ptr)
14508{
14509 info_ptr->world_save_p = 1;
14510 info_ptr->world_save_p
14511 = (WORLD_SAVE_P (info_ptr)
14512 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14513 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14514 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14515 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14516 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14517 && info_ptr->cr_save_p);
f676971a 14518
d62294f5
FJ
14519 /* This will not work in conjunction with sibcalls. Make sure there
14520 are none. (This check is expensive, but seldom executed.) */
f57fe068 14521 if (WORLD_SAVE_P (info_ptr))
f676971a 14522 {
d62294f5
FJ
14523 rtx insn;
14524 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14525 if ( GET_CODE (insn) == CALL_INSN
14526 && SIBLING_CALL_P (insn))
14527 {
14528 info_ptr->world_save_p = 0;
14529 break;
14530 }
d62294f5 14531 }
f676971a 14532
f57fe068 14533 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14534 {
14535 /* Even if we're not touching VRsave, make sure there's room on the
14536 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14537 will attempt to save it. */
d62294f5
FJ
14538 info_ptr->vrsave_size = 4;
14539
298ac1dd
AP
14540 /* If we are going to save the world, we need to save the link register too. */
14541 info_ptr->lr_save_p = 1;
14542
d62294f5
FJ
14543 /* "Save" the VRsave register too if we're saving the world. */
14544 if (info_ptr->vrsave_mask == 0)
c4ad648e 14545 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14546
14547 /* Because the Darwin register save/restore routines only handle
c4ad648e 14548 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14549 check. */
37409796
NS
14550 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14551 && (info_ptr->first_altivec_reg_save
14552 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14553 }
f676971a 14554 return;
d62294f5
FJ
14555}
14556
14557
00b960c7 14558static void
a2369ed3 14559is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14560{
14561 bool *yes = (bool *) xyes;
14562 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14563 *yes = true;
14564}
14565
4697a36c
MM
14566\f
14567/* Calculate the stack information for the current function. This is
14568 complicated by having two separate calling sequences, the AIX calling
14569 sequence and the V.4 calling sequence.
14570
592696dd 14571 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14572 32-bit 64-bit
4697a36c 14573 SP----> +---------------------------------------+
a260abc9 14574 | back chain to caller | 0 0
4697a36c 14575 +---------------------------------------+
a260abc9 14576 | saved CR | 4 8 (8-11)
4697a36c 14577 +---------------------------------------+
a260abc9 14578 | saved LR | 8 16
4697a36c 14579 +---------------------------------------+
a260abc9 14580 | reserved for compilers | 12 24
4697a36c 14581 +---------------------------------------+
a260abc9 14582 | reserved for binders | 16 32
4697a36c 14583 +---------------------------------------+
a260abc9 14584 | saved TOC pointer | 20 40
4697a36c 14585 +---------------------------------------+
a260abc9 14586 | Parameter save area (P) | 24 48
4697a36c 14587 +---------------------------------------+
a260abc9 14588 | Alloca space (A) | 24+P etc.
802a0058 14589 +---------------------------------------+
a7df97e6 14590 | Local variable space (L) | 24+P+A
4697a36c 14591 +---------------------------------------+
a7df97e6 14592 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14593 +---------------------------------------+
00b960c7
AH
14594 | Save area for AltiVec registers (W) | 24+P+A+L+X
14595 +---------------------------------------+
14596 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14597 +---------------------------------------+
14598 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14599 +---------------------------------------+
00b960c7
AH
14600 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14601 +---------------------------------------+
14602 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14603 +---------------------------------------+
14604 old SP->| back chain to caller's caller |
14605 +---------------------------------------+
14606
5376a30c
KR
14607 The required alignment for AIX configurations is two words (i.e., 8
14608 or 16 bytes).
14609
14610
4697a36c
MM
14611 V.4 stack frames look like:
14612
14613 SP----> +---------------------------------------+
14614 | back chain to caller | 0
14615 +---------------------------------------+
5eb387b8 14616 | caller's saved LR | 4
4697a36c
MM
14617 +---------------------------------------+
14618 | Parameter save area (P) | 8
14619 +---------------------------------------+
a7df97e6 14620 | Alloca space (A) | 8+P
f676971a 14621 +---------------------------------------+
a7df97e6 14622 | Varargs save area (V) | 8+P+A
f676971a 14623 +---------------------------------------+
a7df97e6 14624 | Local variable space (L) | 8+P+A+V
f676971a 14625 +---------------------------------------+
a7df97e6 14626 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14627 +---------------------------------------+
00b960c7
AH
14628 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14629 +---------------------------------------+
14630 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14631 +---------------------------------------+
14632 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14633 +---------------------------------------+
c4ad648e
AM
14634 | SPE: area for 64-bit GP registers |
14635 +---------------------------------------+
14636 | SPE alignment padding |
14637 +---------------------------------------+
00b960c7 14638 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14639 +---------------------------------------+
00b960c7 14640 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14641 +---------------------------------------+
00b960c7 14642 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14643 +---------------------------------------+
14644 old SP->| back chain to caller's caller |
14645 +---------------------------------------+
b6c9286a 14646
5376a30c
KR
14647 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14648 given. (But note below and in sysv4.h that we require only 8 and
14649 may round up the size of our stack frame anyways. The historical
14650 reason is early versions of powerpc-linux which didn't properly
14651 align the stack at program startup. A happy side-effect is that
14652 -mno-eabi libraries can be used with -meabi programs.)
14653
50d440bc 14654 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14655 the stack alignment requirements may differ. If -mno-eabi is not
14656 given, the required stack alignment is 8 bytes; if -mno-eabi is
14657 given, the required alignment is 16 bytes. (But see V.4 comment
14658 above.) */
4697a36c 14659
61b2fbe7
MM
14660#ifndef ABI_STACK_BOUNDARY
14661#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14662#endif
14663
d1d0c603 14664static rs6000_stack_t *
863d938c 14665rs6000_stack_info (void)
4697a36c 14666{
022123e6 14667 static rs6000_stack_t info;
4697a36c 14668 rs6000_stack_t *info_ptr = &info;
327e5343 14669 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14670 int ehrd_size;
64045029 14671 int save_align;
8070c91a 14672 int first_gp;
44688022 14673 HOST_WIDE_INT non_fixed_size;
4697a36c 14674
022123e6 14675 memset (&info, 0, sizeof (info));
4697a36c 14676
c19de7aa
AH
14677 if (TARGET_SPE)
14678 {
14679 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14680 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14681 cfun->machine->insn_chain_scanned_p
14682 = spe_func_has_64bit_regs_p () + 1;
14683 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14684 }
14685
a4f6c312 14686 /* Select which calling sequence. */
178274da 14687 info_ptr->abi = DEFAULT_ABI;
9878760c 14688
a4f6c312 14689 /* Calculate which registers need to be saved & save area size. */
4697a36c 14690 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14691 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14692 even if it currently looks like we won't. Reload may need it to
14693 get at a constant; if so, it will have already created a constant
14694 pool entry for it. */
2bfcf297 14695 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14696 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14697 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14698 && crtl->uses_const_pool
1db02437 14699 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14700 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14701 else
8070c91a
DJ
14702 first_gp = info_ptr->first_gp_reg_save;
14703
14704 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14705
a3170dc6
AH
14706 /* For the SPE, we have an additional upper 32-bits on each GPR.
14707 Ideally we should save the entire 64-bits only when the upper
14708 half is used in SIMD instructions. Since we only record
14709 registers live (not the size they are used in), this proves
14710 difficult because we'd have to traverse the instruction chain at
14711 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14712 so we opt to save the GPRs in 64-bits always if but one register
14713 gets used in 64-bits. Otherwise, all the registers in the frame
14714 get saved in 32-bits.
a3170dc6 14715
c19de7aa 14716 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14717 traditional GP save area will be empty. */
c19de7aa 14718 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14719 info_ptr->gp_size = 0;
14720
4697a36c
MM
14721 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14722 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14723
00b960c7
AH
14724 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14725 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14726 - info_ptr->first_altivec_reg_save);
14727
592696dd 14728 /* Does this function call anything? */
71f123ca
FS
14729 info_ptr->calls_p = (! current_function_is_leaf
14730 || cfun->machine->ra_needs_full_frame);
b6c9286a 14731
a4f6c312 14732 /* Determine if we need to save the link register. */
022123e6 14733 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14734 && crtl->profile
022123e6 14735 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14736#ifdef TARGET_RELOCATABLE
14737 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14738#endif
14739 || (info_ptr->first_fp_reg_save != 64
14740 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14741 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14742 || info_ptr->calls_p
14743 || rs6000_ra_ever_killed ())
4697a36c
MM
14744 {
14745 info_ptr->lr_save_p = 1;
1de43f85 14746 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14747 }
14748
9ebbca7d 14749 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14750 if (df_regs_ever_live_p (CR2_REGNO)
14751 || df_regs_ever_live_p (CR3_REGNO)
14752 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14753 {
14754 info_ptr->cr_save_p = 1;
178274da 14755 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14756 info_ptr->cr_size = reg_size;
14757 }
14758
83720594
RH
14759 /* If the current function calls __builtin_eh_return, then we need
14760 to allocate stack space for registers that will hold data for
14761 the exception handler. */
e3b5732b 14762 if (crtl->calls_eh_return)
83720594
RH
14763 {
14764 unsigned int i;
14765 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14766 continue;
a3170dc6
AH
14767
14768 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14769 ehrd_size = i * (TARGET_SPE_ABI
14770 && info_ptr->spe_64bit_regs_used != 0
14771 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14772 }
14773 else
14774 ehrd_size = 0;
14775
592696dd 14776 /* Determine various sizes. */
4697a36c
MM
14777 info_ptr->reg_size = reg_size;
14778 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14779 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14780 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14781 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14782 if (FRAME_GROWS_DOWNWARD)
14783 info_ptr->vars_size
5b667039
JJ
14784 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14785 + info_ptr->parm_size,
7d5175e1 14786 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14787 - (info_ptr->fixed_size + info_ptr->vars_size
14788 + info_ptr->parm_size);
00b960c7 14789
c19de7aa 14790 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14791 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14792 else
14793 info_ptr->spe_gp_size = 0;
14794
4d774ff8
HP
14795 if (TARGET_ALTIVEC_ABI)
14796 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14797 else
4d774ff8
HP
14798 info_ptr->vrsave_mask = 0;
14799
14800 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14801 info_ptr->vrsave_size = 4;
14802 else
14803 info_ptr->vrsave_size = 0;
b6c9286a 14804
d62294f5
FJ
14805 compute_save_world_info (info_ptr);
14806
592696dd 14807 /* Calculate the offsets. */
178274da 14808 switch (DEFAULT_ABI)
4697a36c 14809 {
b6c9286a 14810 case ABI_NONE:
24d304eb 14811 default:
37409796 14812 gcc_unreachable ();
b6c9286a
MM
14813
14814 case ABI_AIX:
ee890fe2 14815 case ABI_DARWIN:
b6c9286a
MM
14816 info_ptr->fp_save_offset = - info_ptr->fp_size;
14817 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14818
14819 if (TARGET_ALTIVEC_ABI)
14820 {
14821 info_ptr->vrsave_save_offset
14822 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14823
982afe02 14824 /* Align stack so vector save area is on a quadword boundary.
9278121c 14825 The padding goes above the vectors. */
00b960c7
AH
14826 if (info_ptr->altivec_size != 0)
14827 info_ptr->altivec_padding_size
9278121c 14828 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14829 else
14830 info_ptr->altivec_padding_size = 0;
14831
14832 info_ptr->altivec_save_offset
14833 = info_ptr->vrsave_save_offset
14834 - info_ptr->altivec_padding_size
14835 - info_ptr->altivec_size;
9278121c
GK
14836 gcc_assert (info_ptr->altivec_size == 0
14837 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14838
14839 /* Adjust for AltiVec case. */
14840 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14841 }
14842 else
14843 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14844 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14845 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14846 break;
14847
14848 case ABI_V4:
b6c9286a
MM
14849 info_ptr->fp_save_offset = - info_ptr->fp_size;
14850 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14851 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14852
c19de7aa 14853 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14854 {
14855 /* Align stack so SPE GPR save area is aligned on a
14856 double-word boundary. */
f78c3290 14857 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14858 info_ptr->spe_padding_size
14859 = 8 - (-info_ptr->cr_save_offset % 8);
14860 else
14861 info_ptr->spe_padding_size = 0;
14862
14863 info_ptr->spe_gp_save_offset
14864 = info_ptr->cr_save_offset
14865 - info_ptr->spe_padding_size
14866 - info_ptr->spe_gp_size;
14867
14868 /* Adjust for SPE case. */
022123e6 14869 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14870 }
a3170dc6 14871 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14872 {
14873 info_ptr->vrsave_save_offset
14874 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14875
14876 /* Align stack so vector save area is on a quadword boundary. */
14877 if (info_ptr->altivec_size != 0)
14878 info_ptr->altivec_padding_size
14879 = 16 - (-info_ptr->vrsave_save_offset % 16);
14880 else
14881 info_ptr->altivec_padding_size = 0;
14882
14883 info_ptr->altivec_save_offset
14884 = info_ptr->vrsave_save_offset
14885 - info_ptr->altivec_padding_size
14886 - info_ptr->altivec_size;
14887
14888 /* Adjust for AltiVec case. */
022123e6 14889 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14890 }
14891 else
022123e6
AM
14892 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14893 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14894 info_ptr->lr_save_offset = reg_size;
14895 break;
4697a36c
MM
14896 }
14897
64045029 14898 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14899 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14900 + info_ptr->gp_size
14901 + info_ptr->altivec_size
14902 + info_ptr->altivec_padding_size
a3170dc6
AH
14903 + info_ptr->spe_gp_size
14904 + info_ptr->spe_padding_size
00b960c7
AH
14905 + ehrd_size
14906 + info_ptr->cr_size
022123e6 14907 + info_ptr->vrsave_size,
64045029 14908 save_align);
00b960c7 14909
44688022 14910 non_fixed_size = (info_ptr->vars_size
ff381587 14911 + info_ptr->parm_size
5b667039 14912 + info_ptr->save_size);
ff381587 14913
44688022
AM
14914 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14915 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14916
14917 /* Determine if we need to allocate any stack frame:
14918
a4f6c312
SS
14919 For AIX we need to push the stack if a frame pointer is needed
14920 (because the stack might be dynamically adjusted), if we are
14921 debugging, if we make calls, or if the sum of fp_save, gp_save,
14922 and local variables are more than the space needed to save all
14923 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14924 + 18*8 = 288 (GPR13 reserved).
ff381587 14925
a4f6c312
SS
14926 For V.4 we don't have the stack cushion that AIX uses, but assume
14927 that the debugger can handle stackless frames. */
ff381587
MM
14928
14929 if (info_ptr->calls_p)
14930 info_ptr->push_p = 1;
14931
178274da 14932 else if (DEFAULT_ABI == ABI_V4)
44688022 14933 info_ptr->push_p = non_fixed_size != 0;
ff381587 14934
178274da
AM
14935 else if (frame_pointer_needed)
14936 info_ptr->push_p = 1;
14937
14938 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14939 info_ptr->push_p = 1;
14940
ff381587 14941 else
44688022 14942 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14943
a4f6c312 14944 /* Zero offsets if we're not saving those registers. */
8dda1a21 14945 if (info_ptr->fp_size == 0)
4697a36c
MM
14946 info_ptr->fp_save_offset = 0;
14947
8dda1a21 14948 if (info_ptr->gp_size == 0)
4697a36c
MM
14949 info_ptr->gp_save_offset = 0;
14950
00b960c7
AH
14951 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14952 info_ptr->altivec_save_offset = 0;
14953
14954 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14955 info_ptr->vrsave_save_offset = 0;
14956
c19de7aa
AH
14957 if (! TARGET_SPE_ABI
14958 || info_ptr->spe_64bit_regs_used == 0
14959 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14960 info_ptr->spe_gp_save_offset = 0;
14961
c81fc13e 14962 if (! info_ptr->lr_save_p)
4697a36c
MM
14963 info_ptr->lr_save_offset = 0;
14964
c81fc13e 14965 if (! info_ptr->cr_save_p)
4697a36c
MM
14966 info_ptr->cr_save_offset = 0;
14967
14968 return info_ptr;
14969}
14970
c19de7aa
AH
14971/* Return true if the current function uses any GPRs in 64-bit SIMD
14972 mode. */
14973
14974static bool
863d938c 14975spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14976{
14977 rtx insns, insn;
14978
14979 /* Functions that save and restore all the call-saved registers will
14980 need to save/restore the registers in 64-bits. */
e3b5732b
JH
14981 if (crtl->calls_eh_return
14982 || cfun->calls_setjmp
14983 || crtl->has_nonlocal_goto)
c19de7aa
AH
14984 return true;
14985
14986 insns = get_insns ();
14987
14988 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14989 {
14990 if (INSN_P (insn))
14991 {
14992 rtx i;
14993
b5a5beb9
AH
14994 /* FIXME: This should be implemented with attributes...
14995
14996 (set_attr "spe64" "true")....then,
14997 if (get_spe64(insn)) return true;
14998
14999 It's the only reliable way to do the stuff below. */
15000
c19de7aa 15001 i = PATTERN (insn);
f82f556d
AH
15002 if (GET_CODE (i) == SET)
15003 {
15004 enum machine_mode mode = GET_MODE (SET_SRC (i));
15005
15006 if (SPE_VECTOR_MODE (mode))
15007 return true;
4f011e1e 15008 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
15009 return true;
15010 }
c19de7aa
AH
15011 }
15012 }
15013
15014 return false;
15015}
15016
d1d0c603 15017static void
a2369ed3 15018debug_stack_info (rs6000_stack_t *info)
9878760c 15019{
d330fd93 15020 const char *abi_string;
24d304eb 15021
c81fc13e 15022 if (! info)
4697a36c
MM
15023 info = rs6000_stack_info ();
15024
15025 fprintf (stderr, "\nStack information for function %s:\n",
15026 ((current_function_decl && DECL_NAME (current_function_decl))
15027 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
15028 : "<unknown>"));
15029
24d304eb
RK
15030 switch (info->abi)
15031 {
b6c9286a
MM
15032 default: abi_string = "Unknown"; break;
15033 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 15034 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 15035 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 15036 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
15037 }
15038
15039 fprintf (stderr, "\tABI = %5s\n", abi_string);
15040
00b960c7
AH
15041 if (TARGET_ALTIVEC_ABI)
15042 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
15043
a3170dc6
AH
15044 if (TARGET_SPE_ABI)
15045 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
15046
4697a36c
MM
15047 if (info->first_gp_reg_save != 32)
15048 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
15049
15050 if (info->first_fp_reg_save != 64)
15051 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 15052
00b960c7
AH
15053 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
15054 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
15055 info->first_altivec_reg_save);
15056
4697a36c
MM
15057 if (info->lr_save_p)
15058 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 15059
4697a36c
MM
15060 if (info->cr_save_p)
15061 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
15062
00b960c7
AH
15063 if (info->vrsave_mask)
15064 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
15065
4697a36c
MM
15066 if (info->push_p)
15067 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
15068
15069 if (info->calls_p)
15070 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
15071
4697a36c
MM
15072 if (info->gp_save_offset)
15073 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
15074
15075 if (info->fp_save_offset)
15076 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
15077
00b960c7
AH
15078 if (info->altivec_save_offset)
15079 fprintf (stderr, "\taltivec_save_offset = %5d\n",
15080 info->altivec_save_offset);
15081
a3170dc6
AH
15082 if (info->spe_gp_save_offset)
15083 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
15084 info->spe_gp_save_offset);
15085
00b960c7
AH
15086 if (info->vrsave_save_offset)
15087 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
15088 info->vrsave_save_offset);
15089
4697a36c
MM
15090 if (info->lr_save_offset)
15091 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
15092
15093 if (info->cr_save_offset)
15094 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
15095
15096 if (info->varargs_save_offset)
15097 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
15098
15099 if (info->total_size)
d1d0c603
JJ
15100 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15101 info->total_size);
4697a36c 15102
4697a36c 15103 if (info->vars_size)
d1d0c603
JJ
15104 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15105 info->vars_size);
4697a36c
MM
15106
15107 if (info->parm_size)
15108 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
15109
15110 if (info->fixed_size)
15111 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
15112
15113 if (info->gp_size)
15114 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
15115
a3170dc6
AH
15116 if (info->spe_gp_size)
15117 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
15118
4697a36c
MM
15119 if (info->fp_size)
15120 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
15121
00b960c7
AH
15122 if (info->altivec_size)
15123 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
15124
15125 if (info->vrsave_size)
15126 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
15127
15128 if (info->altivec_padding_size)
15129 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15130 info->altivec_padding_size);
15131
a3170dc6
AH
15132 if (info->spe_padding_size)
15133 fprintf (stderr, "\tspe_padding_size = %5d\n",
15134 info->spe_padding_size);
15135
4697a36c
MM
15136 if (info->cr_size)
15137 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15138
15139 if (info->save_size)
15140 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15141
15142 if (info->reg_size != 4)
15143 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15144
15145 fprintf (stderr, "\n");
9878760c 15146}
71f123ca
FS
15147
15148rtx
a2369ed3 15149rs6000_return_addr (int count, rtx frame)
71f123ca 15150{
a4f6c312
SS
15151 /* Currently we don't optimize very well between prolog and body
15152 code and for PIC code the code can be actually quite bad, so
15153 don't try to be too clever here. */
f1384257 15154 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
15155 {
15156 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
15157
15158 return
15159 gen_rtx_MEM
15160 (Pmode,
15161 memory_address
15162 (Pmode,
15163 plus_constant (copy_to_reg
15164 (gen_rtx_MEM (Pmode,
15165 memory_address (Pmode, frame))),
15166 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
15167 }
15168
8c29550d 15169 cfun->machine->ra_need_lr = 1;
1de43f85 15170 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
15171}
15172
5e1bf043
DJ
15173/* Say whether a function is a candidate for sibcall handling or not.
15174 We do not allow indirect calls to be optimized into sibling calls.
15175 Also, we can't do it if there are any vector parameters; there's
15176 nowhere to put the VRsave code so it works; note that functions with
15177 vector parameters are required to have a prototype, so the argument
15178 type info must be available here. (The tail recursion case can work
15179 with vector parameters, but there's no way to distinguish here.) */
4977bab6 15180static bool
a2369ed3 15181rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
15182{
15183 tree type;
4977bab6 15184 if (decl)
5e1bf043
DJ
15185 {
15186 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 15187 {
4977bab6 15188 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
15189 type; type = TREE_CHAIN (type))
15190 {
c15b529f 15191 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 15192 return false;
5e1bf043 15193 }
c4ad648e 15194 }
5e1bf043 15195 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
15196 || ((*targetm.binds_local_p) (decl)
15197 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 15198 {
4977bab6 15199 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
15200
15201 if (!lookup_attribute ("longcall", attr_list)
15202 || lookup_attribute ("shortcall", attr_list))
4977bab6 15203 return true;
2bcc50d0 15204 }
5e1bf043 15205 }
4977bab6 15206 return false;
5e1bf043
DJ
15207}
15208
e7e64a25
AS
15209/* NULL if INSN insn is valid within a low-overhead loop.
15210 Otherwise return why doloop cannot be applied.
9419649c
DE
15211 PowerPC uses the COUNT register for branch on table instructions. */
15212
e7e64a25 15213static const char *
3101faab 15214rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15215{
15216 if (CALL_P (insn))
e7e64a25 15217 return "Function call in the loop.";
9419649c
DE
15218
15219 if (JUMP_P (insn)
15220 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15221 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15222 return "Computed branch in the loop.";
9419649c 15223
e7e64a25 15224 return NULL;
9419649c
DE
15225}
15226
71f123ca 15227static int
863d938c 15228rs6000_ra_ever_killed (void)
71f123ca
FS
15229{
15230 rtx top;
5e1bf043
DJ
15231 rtx reg;
15232 rtx insn;
71f123ca 15233
e3b5732b 15234 if (crtl->is_thunk)
71f123ca 15235 return 0;
eb0424da 15236
36f7e964
AH
15237 /* regs_ever_live has LR marked as used if any sibcalls are present,
15238 but this should not force saving and restoring in the
15239 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15240 clobbers LR, so that is inappropriate. */
36f7e964 15241
5e1bf043
DJ
15242 /* Also, the prologue can generate a store into LR that
15243 doesn't really count, like this:
36f7e964 15244
5e1bf043
DJ
15245 move LR->R0
15246 bcl to set PIC register
15247 move LR->R31
15248 move R0->LR
36f7e964
AH
15249
15250 When we're called from the epilogue, we need to avoid counting
15251 this as a store. */
f676971a 15252
71f123ca
FS
15253 push_topmost_sequence ();
15254 top = get_insns ();
15255 pop_topmost_sequence ();
1de43f85 15256 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15257
5e1bf043
DJ
15258 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15259 {
15260 if (INSN_P (insn))
15261 {
022123e6
AM
15262 if (CALL_P (insn))
15263 {
15264 if (!SIBLING_CALL_P (insn))
15265 return 1;
15266 }
1de43f85 15267 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15268 return 1;
36f7e964
AH
15269 else if (set_of (reg, insn) != NULL_RTX
15270 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15271 return 1;
15272 }
15273 }
15274 return 0;
71f123ca 15275}
4697a36c 15276\f
9ebbca7d 15277/* Emit instructions needed to load the TOC register.
c7ca610e 15278 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15279 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15280
15281void
a2369ed3 15282rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15283{
6fb5fa3c 15284 rtx dest;
1db02437 15285 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15286
7f970b70 15287 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15288 {
7f970b70 15289 char buf[30];
e65a3857 15290 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15291
15292 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15293 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15294 if (flag_pic == 2)
15295 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15296 else
15297 got = rs6000_got_sym ();
15298 tmp1 = tmp2 = dest;
15299 if (!fromprolog)
15300 {
15301 tmp1 = gen_reg_rtx (Pmode);
15302 tmp2 = gen_reg_rtx (Pmode);
15303 }
6fb5fa3c
DB
15304 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15305 emit_move_insn (tmp1,
1de43f85 15306 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15307 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15308 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15309 }
15310 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15311 {
6fb5fa3c 15312 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15313 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15314 }
15315 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15316 {
15317 char buf[30];
20b71b17
AM
15318 rtx temp0 = (fromprolog
15319 ? gen_rtx_REG (Pmode, 0)
15320 : gen_reg_rtx (Pmode));
20b71b17 15321
20b71b17
AM
15322 if (fromprolog)
15323 {
ccbca5e4 15324 rtx symF, symL;
38c1f2d7 15325
20b71b17
AM
15326 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15327 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15328
20b71b17
AM
15329 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15330 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15331
6fb5fa3c
DB
15332 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15333 emit_move_insn (dest,
1de43f85 15334 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15335 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15336 }
15337 else
20b71b17
AM
15338 {
15339 rtx tocsym;
20b71b17
AM
15340
15341 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15342 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15343 emit_move_insn (dest,
1de43f85 15344 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15345 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15346 }
6fb5fa3c 15347 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15348 }
20b71b17
AM
15349 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15350 {
15351 /* This is for AIX code running in non-PIC ELF32. */
15352 char buf[30];
15353 rtx realsym;
15354 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15355 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15356
6fb5fa3c
DB
15357 emit_insn (gen_elf_high (dest, realsym));
15358 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15359 }
37409796 15360 else
9ebbca7d 15361 {
37409796 15362 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15363
9ebbca7d 15364 if (TARGET_32BIT)
6fb5fa3c 15365 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15366 else
6fb5fa3c 15367 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15368 }
15369}
15370
d1d0c603
JJ
15371/* Emit instructions to restore the link register after determining where
15372 its value has been stored. */
15373
15374void
15375rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15376{
15377 rs6000_stack_t *info = rs6000_stack_info ();
15378 rtx operands[2];
15379
15380 operands[0] = source;
15381 operands[1] = scratch;
15382
15383 if (info->lr_save_p)
15384 {
15385 rtx frame_rtx = stack_pointer_rtx;
15386 HOST_WIDE_INT sp_offset = 0;
15387 rtx tmp;
15388
15389 if (frame_pointer_needed
e3b5732b 15390 || cfun->calls_alloca
d1d0c603
JJ
15391 || info->total_size > 32767)
15392 {
0be76840 15393 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15394 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15395 frame_rtx = operands[1];
15396 }
15397 else if (info->push_p)
15398 sp_offset = info->total_size;
15399
15400 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15401 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15402 emit_move_insn (tmp, operands[0]);
15403 }
15404 else
1de43f85 15405 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15406}
15407
4862826d 15408static GTY(()) alias_set_type set = -1;
f103e34d 15409
4862826d 15410alias_set_type
863d938c 15411get_TOC_alias_set (void)
9ebbca7d 15412{
f103e34d
GK
15413 if (set == -1)
15414 set = new_alias_set ();
15415 return set;
f676971a 15416}
9ebbca7d 15417
c1207243 15418/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15419 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15420 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15421#if TARGET_ELF
3c9eb5f4 15422static int
f676971a 15423uses_TOC (void)
9ebbca7d 15424{
c4501e62 15425 rtx insn;
38c1f2d7 15426
c4501e62
JJ
15427 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15428 if (INSN_P (insn))
15429 {
15430 rtx pat = PATTERN (insn);
15431 int i;
9ebbca7d 15432
f676971a 15433 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15434 for (i = 0; i < XVECLEN (pat, 0); i++)
15435 {
15436 rtx sub = XVECEXP (pat, 0, i);
15437 if (GET_CODE (sub) == USE)
15438 {
15439 sub = XEXP (sub, 0);
15440 if (GET_CODE (sub) == UNSPEC
15441 && XINT (sub, 1) == UNSPEC_TOC)
15442 return 1;
15443 }
15444 }
15445 }
15446 return 0;
9ebbca7d 15447}
c954844a 15448#endif
38c1f2d7 15449
9ebbca7d 15450rtx
f676971a 15451create_TOC_reference (rtx symbol)
9ebbca7d 15452{
b3a13419 15453 if (!can_create_pseudo_p ())
6fb5fa3c 15454 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15455 return gen_rtx_PLUS (Pmode,
a8a05998 15456 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a 15457 gen_rtx_CONST (Pmode,
2e4316da 15458 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL)));
9ebbca7d 15459}
38c1f2d7 15460
fc4767bb
JJ
15461/* If _Unwind_* has been called from within the same module,
15462 toc register is not guaranteed to be saved to 40(1) on function
15463 entry. Save it there in that case. */
c7ca610e 15464
9ebbca7d 15465void
863d938c 15466rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15467{
15468 rtx mem;
15469 rtx stack_top = gen_reg_rtx (Pmode);
15470 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15471 rtx opcode = gen_reg_rtx (SImode);
15472 rtx tocompare = gen_reg_rtx (SImode);
15473 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15474
8308679f 15475 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15476 emit_move_insn (stack_top, mem);
15477
8308679f
DE
15478 mem = gen_frame_mem (Pmode,
15479 gen_rtx_PLUS (Pmode, stack_top,
15480 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15481 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15482 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15483 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15484 : 0xE8410028, SImode));
9ebbca7d 15485
fc4767bb 15486 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15487 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15488 no_toc_save_needed);
9ebbca7d 15489
8308679f
DE
15490 mem = gen_frame_mem (Pmode,
15491 gen_rtx_PLUS (Pmode, stack_top,
15492 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15493 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15494 emit_label (no_toc_save_needed);
9ebbca7d 15495}
38c1f2d7 15496\f
0be76840
DE
15497/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15498 and the change to the stack pointer. */
ba4828e0 15499
9ebbca7d 15500static void
863d938c 15501rs6000_emit_stack_tie (void)
9ebbca7d 15502{
0be76840
DE
15503 rtx mem = gen_frame_mem (BLKmode,
15504 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15505
9ebbca7d
GK
15506 emit_insn (gen_stack_tie (mem));
15507}
38c1f2d7 15508
9ebbca7d
GK
15509/* Emit the correct code for allocating stack space, as insns.
15510 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15511 If COPY_R11, make sure a copy of the old frame is left in r11,
15512 in preference to r12 if COPY_R12.
9ebbca7d
GK
15513 The generated code may use hard register 0 as a temporary. */
15514
15515static void
f78c3290 15516rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15517{
9ebbca7d
GK
15518 rtx insn;
15519 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15520 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15521 rtx todec = gen_int_mode (-size, Pmode);
15522
15523 if (INTVAL (todec) != -size)
15524 {
d4ee4d25 15525 warning (0, "stack frame too large");
61168ff1
RS
15526 emit_insn (gen_trap ());
15527 return;
15528 }
a157febd 15529
e3b5732b 15530 if (crtl->limit_stack)
a157febd
GK
15531 {
15532 if (REG_P (stack_limit_rtx)
f676971a 15533 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15534 && REGNO (stack_limit_rtx) <= 31)
15535 {
5b71a4e7 15536 emit_insn (TARGET_32BIT
9ebbca7d
GK
15537 ? gen_addsi3 (tmp_reg,
15538 stack_limit_rtx,
15539 GEN_INT (size))
15540 : gen_adddi3 (tmp_reg,
15541 stack_limit_rtx,
15542 GEN_INT (size)));
5b71a4e7 15543
9ebbca7d
GK
15544 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15545 const0_rtx));
a157febd
GK
15546 }
15547 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15548 && TARGET_32BIT
f607bc57 15549 && DEFAULT_ABI == ABI_V4)
a157febd 15550 {
9ebbca7d 15551 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15552 gen_rtx_PLUS (Pmode,
15553 stack_limit_rtx,
9ebbca7d 15554 GEN_INT (size)));
5b71a4e7 15555
9ebbca7d
GK
15556 emit_insn (gen_elf_high (tmp_reg, toload));
15557 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15558 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15559 const0_rtx));
a157febd
GK
15560 }
15561 else
d4ee4d25 15562 warning (0, "stack limit expression is not supported");
a157febd
GK
15563 }
15564
f78c3290
NF
15565 if (copy_r12 || copy_r11 || ! TARGET_UPDATE)
15566 emit_move_insn (copy_r11
15567 ? gen_rtx_REG (Pmode, 11)
15568 : gen_rtx_REG (Pmode, 12),
15569 stack_reg);
9ebbca7d 15570
38c1f2d7
MM
15571 if (TARGET_UPDATE)
15572 {
16044a80
PH
15573 rtx par, set, mem;
15574
9ebbca7d 15575 if (size > 32767)
38c1f2d7 15576 {
9ebbca7d 15577 /* Need a note here so that try_split doesn't get confused. */
9390387d 15578 if (get_last_insn () == NULL_RTX)
2e040219 15579 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15580 insn = emit_move_insn (tmp_reg, todec);
15581 try_split (PATTERN (insn), insn, 0);
15582 todec = tmp_reg;
38c1f2d7 15583 }
5b71a4e7
DE
15584
15585 insn = emit_insn (TARGET_32BIT
15586 ? gen_movsi_update (stack_reg, stack_reg,
15587 todec, stack_reg)
c4ad648e 15588 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15589 todec, stack_reg));
16044a80
PH
15590 /* Since we didn't use gen_frame_mem to generate the MEM, grab
15591 it now and set the alias set/attributes. The above gen_*_update
15592 calls will generate a PARALLEL with the MEM set being the first
15593 operation. */
15594 par = PATTERN (insn);
15595 gcc_assert (GET_CODE (par) == PARALLEL);
15596 set = XVECEXP (par, 0, 0);
15597 gcc_assert (GET_CODE (set) == SET);
15598 mem = SET_DEST (set);
15599 gcc_assert (MEM_P (mem));
15600 MEM_NOTRAP_P (mem) = 1;
15601 set_mem_alias_set (mem, get_frame_alias_set ());
38c1f2d7
MM
15602 }
15603 else
15604 {
5b71a4e7
DE
15605 insn = emit_insn (TARGET_32BIT
15606 ? gen_addsi3 (stack_reg, stack_reg, todec)
15607 : gen_adddi3 (stack_reg, stack_reg, todec));
16044a80 15608 emit_move_insn (gen_frame_mem (Pmode, stack_reg),
f78c3290
NF
15609 copy_r11
15610 ? gen_rtx_REG (Pmode, 11)
15611 : gen_rtx_REG (Pmode, 12));
9ebbca7d 15612 }
f676971a 15613
9ebbca7d 15614 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15615 REG_NOTES (insn) =
9ebbca7d 15616 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15617 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15618 gen_rtx_PLUS (Pmode, stack_reg,
15619 GEN_INT (-size))),
15620 REG_NOTES (insn));
15621}
15622
a4f6c312
SS
15623/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15624 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15625 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15626 deduce these equivalences by itself so it wasn't necessary to hold
15627 its hand so much. */
9ebbca7d
GK
15628
15629static void
f676971a 15630rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15631 rtx reg2, rtx rreg)
9ebbca7d
GK
15632{
15633 rtx real, temp;
15634
e56c4463
JL
15635 /* copy_rtx will not make unique copies of registers, so we need to
15636 ensure we don't have unwanted sharing here. */
15637 if (reg == reg2)
15638 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15639
15640 if (reg == rreg)
15641 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15642
9ebbca7d
GK
15643 real = copy_rtx (PATTERN (insn));
15644
89e7058f
AH
15645 if (reg2 != NULL_RTX)
15646 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15647
15648 real = replace_rtx (real, reg,
9ebbca7d
GK
15649 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15650 STACK_POINTER_REGNUM),
15651 GEN_INT (val)));
f676971a 15652
9ebbca7d
GK
15653 /* We expect that 'real' is either a SET or a PARALLEL containing
15654 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15655 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15656
15657 if (GET_CODE (real) == SET)
15658 {
15659 rtx set = real;
f676971a 15660
9ebbca7d
GK
15661 temp = simplify_rtx (SET_SRC (set));
15662 if (temp)
15663 SET_SRC (set) = temp;
15664 temp = simplify_rtx (SET_DEST (set));
15665 if (temp)
15666 SET_DEST (set) = temp;
15667 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15668 {
9ebbca7d
GK
15669 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15670 if (temp)
15671 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15672 }
38c1f2d7 15673 }
37409796 15674 else
9ebbca7d
GK
15675 {
15676 int i;
37409796
NS
15677
15678 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15679 for (i = 0; i < XVECLEN (real, 0); i++)
15680 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15681 {
15682 rtx set = XVECEXP (real, 0, i);
f676971a 15683
9ebbca7d
GK
15684 temp = simplify_rtx (SET_SRC (set));
15685 if (temp)
15686 SET_SRC (set) = temp;
15687 temp = simplify_rtx (SET_DEST (set));
15688 if (temp)
15689 SET_DEST (set) = temp;
15690 if (GET_CODE (SET_DEST (set)) == MEM)
15691 {
15692 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15693 if (temp)
15694 XEXP (SET_DEST (set), 0) = temp;
15695 }
15696 RTX_FRAME_RELATED_P (set) = 1;
15697 }
15698 }
c19de7aa 15699
9ebbca7d
GK
15700 RTX_FRAME_RELATED_P (insn) = 1;
15701 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15702 real,
15703 REG_NOTES (insn));
38c1f2d7
MM
15704}
15705
00b960c7
AH
15706/* Returns an insn that has a vrsave set operation with the
15707 appropriate CLOBBERs. */
15708
15709static rtx
a2369ed3 15710generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15711{
15712 int nclobs, i;
15713 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15714 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15715
a004eb82
AH
15716 clobs[0]
15717 = gen_rtx_SET (VOIDmode,
15718 vrsave,
15719 gen_rtx_UNSPEC_VOLATILE (SImode,
15720 gen_rtvec (2, reg, vrsave),
3aca4bff 15721 UNSPECV_SET_VRSAVE));
00b960c7
AH
15722
15723 nclobs = 1;
15724
9aa86737
AH
15725 /* We need to clobber the registers in the mask so the scheduler
15726 does not move sets to VRSAVE before sets of AltiVec registers.
15727
15728 However, if the function receives nonlocal gotos, reload will set
15729 all call saved registers live. We will end up with:
15730
15731 (set (reg 999) (mem))
15732 (parallel [ (set (reg vrsave) (unspec blah))
15733 (clobber (reg 999))])
15734
15735 The clobber will cause the store into reg 999 to be dead, and
15736 flow will attempt to delete an epilogue insn. In this case, we
15737 need an unspec use/set of the register. */
00b960c7
AH
15738
15739 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15740 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15741 {
15742 if (!epiloguep || call_used_regs [i])
15743 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15744 gen_rtx_REG (V4SImode, i));
15745 else
15746 {
15747 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15748
15749 clobs[nclobs++]
a004eb82
AH
15750 = gen_rtx_SET (VOIDmode,
15751 reg,
15752 gen_rtx_UNSPEC (V4SImode,
15753 gen_rtvec (1, reg), 27));
9aa86737
AH
15754 }
15755 }
00b960c7
AH
15756
15757 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15758
15759 for (i = 0; i < nclobs; ++i)
15760 XVECEXP (insn, 0, i) = clobs[i];
15761
15762 return insn;
15763}
15764
89e7058f
AH
15765/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15766 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15767
15768static void
f676971a 15769emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15770 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15771{
15772 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15773 rtx replacea, replaceb;
15774
15775 int_rtx = GEN_INT (offset);
15776
15777 /* Some cases that need register indexed addressing. */
15778 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15779 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15780 || (TARGET_SPE_ABI
15781 && SPE_VECTOR_MODE (mode)
15782 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15783 {
15784 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15785 flow path of instructions in the prologue. */
89e7058f
AH
15786 offset_rtx = gen_rtx_REG (Pmode, 11);
15787 emit_move_insn (offset_rtx, int_rtx);
15788
15789 replacea = offset_rtx;
15790 replaceb = int_rtx;
15791 }
15792 else
15793 {
15794 offset_rtx = int_rtx;
15795 replacea = NULL_RTX;
15796 replaceb = NULL_RTX;
15797 }
15798
15799 reg = gen_rtx_REG (mode, regno);
15800 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15801 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15802
15803 insn = emit_move_insn (mem, reg);
15804
15805 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15806}
15807
a3170dc6
AH
15808/* Emit an offset memory reference suitable for a frame store, while
15809 converting to a valid addressing mode. */
15810
15811static rtx
a2369ed3 15812gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15813{
15814 rtx int_rtx, offset_rtx;
15815
15816 int_rtx = GEN_INT (offset);
15817
4d4cbc0e 15818 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15819 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15820 {
15821 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15822 emit_move_insn (offset_rtx, int_rtx);
15823 }
15824 else
15825 offset_rtx = int_rtx;
15826
0be76840 15827 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15828}
15829
6d0a8091
DJ
15830/* Look for user-defined global regs. We should not save and restore these,
15831 and cannot use stmw/lmw if there are any in its range. */
15832
15833static bool
f78c3290 15834no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15835{
15836 int i;
f78c3290
NF
15837 for (i = first; i < gpr ? 32 : 64 ; i++)
15838 if (global_regs[i])
6d0a8091
DJ
15839 return false;
15840 return true;
15841}
15842
699c914a
MS
15843#ifndef TARGET_FIX_AND_CONTINUE
15844#define TARGET_FIX_AND_CONTINUE 0
15845#endif
15846
f78c3290
NF
15847/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15848#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15849#define LAST_SAVRES_REGISTER 31
15850#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15851
15852static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15853
15854/* Return the symbol for an out-of-line register save/restore routine.
15855 We are saving/restoring GPRs if GPR is true. */
15856
15857static rtx
15858rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15859{
15860 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15861 rtx sym;
15862 int select = ((savep ? 1 : 0) << 2
15863 | (gpr
15864 /* On the SPE, we never have any FPRs, but we do have
15865 32/64-bit versions of the routines. */
15866 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15867 : 0) << 1
15868 | (exitp ? 1: 0));
15869
15870 /* Don't generate bogus routine names. */
15871 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15872
15873 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15874
15875 if (sym == NULL)
15876 {
15877 char name[30];
15878 const char *action;
15879 const char *regkind;
15880 const char *exit_suffix;
15881
15882 action = savep ? "save" : "rest";
15883
15884 /* SPE has slightly different names for its routines depending on
15885 whether we are saving 32-bit or 64-bit registers. */
15886 if (TARGET_SPE_ABI)
15887 {
15888 /* No floating point saves on the SPE. */
15889 gcc_assert (gpr);
15890
15891 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15892 }
15893 else
15894 regkind = gpr ? "gpr" : "fpr";
15895
15896 exit_suffix = exitp ? "_x" : "";
15897
15898 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15899
15900 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15901 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15902 }
15903
15904 return sym;
15905}
15906
15907/* Emit a sequence of insns, including a stack tie if needed, for
15908 resetting the stack pointer. If SAVRES is true, then don't reset the
15909 stack pointer, but move the base of the frame into r11 for use by
15910 out-of-line register restore routines. */
15911
15912static void
15913rs6000_emit_stack_reset (rs6000_stack_t *info,
15914 rtx sp_reg_rtx, rtx frame_reg_rtx,
15915 int sp_offset, bool savres)
15916{
15917 /* This blockage is needed so that sched doesn't decide to move
15918 the sp change before the register restores. */
15919 if (frame_reg_rtx != sp_reg_rtx
15920 || (TARGET_SPE_ABI
15921 && info->spe_64bit_regs_used != 0
15922 && info->first_gp_reg_save != 32))
15923 rs6000_emit_stack_tie ();
15924
15925 if (frame_reg_rtx != sp_reg_rtx)
15926 {
15927 rs6000_emit_stack_tie ();
15928 if (sp_offset != 0)
15929 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15930 GEN_INT (sp_offset)));
15931 else if (!savres)
15932 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15933 }
15934 else if (sp_offset != 0)
15935 {
15936 /* If we are restoring registers out-of-line, we will be using the
15937 "exit" variants of the restore routines, which will reset the
15938 stack for us. But we do need to point r11 into the right place
15939 for those routines. */
15940 rtx dest_reg = (savres
15941 ? gen_rtx_REG (Pmode, 11)
15942 : sp_reg_rtx);
15943
15944 emit_insn (TARGET_32BIT
15945 ? gen_addsi3 (dest_reg, sp_reg_rtx,
15946 GEN_INT (sp_offset))
15947 : gen_adddi3 (dest_reg, sp_reg_rtx,
15948 GEN_INT (sp_offset)));
15949 }
15950}
15951
15952/* Construct a parallel rtx describing the effect of a call to an
15953 out-of-line register save/restore routine. */
15954
15955static rtx
15956rs6000_make_savres_rtx (rs6000_stack_t *info,
15957 rtx frame_reg_rtx, int save_area_offset,
15958 enum machine_mode reg_mode,
15959 bool savep, bool gpr, bool exitp)
15960{
15961 int i;
15962 int offset, start_reg, end_reg, n_regs;
15963 int reg_size = GET_MODE_SIZE (reg_mode);
15964 rtx sym;
15965 rtvec p;
15966
15967 offset = 0;
15968 start_reg = (gpr
15969 ? info->first_gp_reg_save
15970 : info->first_fp_reg_save);
15971 end_reg = gpr ? 32 : 64;
15972 n_regs = end_reg - start_reg;
15973 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15974
15975 /* If we're saving registers, then we should never say we're exiting. */
15976 gcc_assert ((savep && !exitp) || !savep);
15977
15978 if (exitp)
15979 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15980
15981 RTVEC_ELT (p, offset++)
15982 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15983
15984 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15985 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
15986 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
15987
15988 for (i = 0; i < end_reg - start_reg; i++)
15989 {
15990 rtx addr, reg, mem;
15991 reg = gen_rtx_REG (reg_mode, start_reg + i);
15992 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15993 GEN_INT (save_area_offset + reg_size*i));
15994 mem = gen_frame_mem (reg_mode, addr);
15995
15996 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
15997 savep ? mem : reg,
15998 savep ? reg : mem);
15999 }
16000
16001 return gen_rtx_PARALLEL (VOIDmode, p);
16002}
16003
52ff33d0
NF
16004/* Determine whether the gp REG is really used. */
16005
16006static bool
16007rs6000_reg_live_or_pic_offset_p (int reg)
16008{
6fb5fa3c 16009 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
16010 && (!call_used_regs[reg]
16011 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16012 && TARGET_TOC && TARGET_MINIMAL_TOC)))
16013 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16014 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
16015 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
16016}
16017
f78c3290
NF
16018enum {
16019 SAVRES_MULTIPLE = 0x1,
16020 SAVRES_INLINE_FPRS = 0x2,
16021 SAVRES_INLINE_GPRS = 0x4
16022};
16023
16024/* Determine the strategy for savings/restoring registers. */
16025
16026static int
16027rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
16028 int using_static_chain_p, int sibcall)
16029{
16030 bool using_multiple_p;
16031 bool common;
16032 bool savres_fprs_inline;
16033 bool savres_gprs_inline;
16034 bool noclobber_global_gprs
16035 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
16036
16037 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
16038 && (!TARGET_SPE_ABI
16039 || info->spe_64bit_regs_used == 0)
16040 && info->first_gp_reg_save < 31
16041 && noclobber_global_gprs);
16042 /* Don't bother to try to save things out-of-line if r11 is occupied
16043 by the static chain. It would require too much fiddling and the
16044 static chain is rarely used anyway. */
16045 common = (using_static_chain_p
16046 || sibcall
16047 || crtl->calls_eh_return
16048 || !info->lr_save_p
16049 || cfun->machine->ra_need_lr
16050 || info->total_size > 32767);
16051 savres_fprs_inline = (common
16052 || info->first_fp_reg_save == 64
16053 || !no_global_regs_above (info->first_fp_reg_save,
16054 /*gpr=*/false)
16055 || FP_SAVE_INLINE (info->first_fp_reg_save));
16056 savres_gprs_inline = (common
16057 /* Saving CR interferes with the exit routines
16058 used on the SPE, so just punt here. */
16059 || (!savep
16060 && TARGET_SPE_ABI
16061 && info->spe_64bit_regs_used != 0
16062 && info->cr_save_p != 0)
16063 || info->first_gp_reg_save == 32
16064 || !noclobber_global_gprs
16065 || GP_SAVE_INLINE (info->first_gp_reg_save));
16066
16067 if (savep)
16068 /* If we are going to use store multiple, then don't even bother
16069 with the out-of-line routines, since the store-multiple instruction
16070 will always be smaller. */
16071 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16072 else
16073 {
16074 /* The situation is more complicated with load multiple. We'd
16075 prefer to use the out-of-line routines for restores, since the
16076 "exit" out-of-line routines can handle the restore of LR and
16077 the frame teardown. But we can only use the out-of-line
16078 routines if we know that we've used store multiple or
16079 out-of-line routines in the prologue, i.e. if we've saved all
16080 the registers from first_gp_reg_save. Otherwise, we risk
16081 loading garbage from the stack. Furthermore, we can only use
16082 the "exit" out-of-line gpr restore if we haven't saved any
16083 fprs. */
16084 bool saved_all = !savres_gprs_inline || using_multiple_p;
16085
16086 if (saved_all && info->first_fp_reg_save != 64)
16087 /* We can't use the exit routine; use load multiple if it's
16088 available. */
16089 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16090 }
16091
16092 return (using_multiple_p
16093 | (savres_fprs_inline << 1)
16094 | (savres_gprs_inline << 2));
16095}
16096
9ebbca7d
GK
16097/* Emit function prologue as insns. */
16098
9878760c 16099void
863d938c 16100rs6000_emit_prologue (void)
9878760c 16101{
4697a36c 16102 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 16103 enum machine_mode reg_mode = Pmode;
327e5343 16104 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16105 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
16106 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
16107 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 16108 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 16109 rtx insn;
f78c3290 16110 int strategy;
9ebbca7d 16111 int saving_FPRs_inline;
f78c3290 16112 int saving_GPRs_inline;
9ebbca7d 16113 int using_store_multiple;
f78c3290
NF
16114 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
16115 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
16116 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 16117 HOST_WIDE_INT sp_offset = 0;
f676971a 16118
699c914a
MS
16119 if (TARGET_FIX_AND_CONTINUE)
16120 {
16121 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 16122 address by modifying the first 5 instructions of the function
699c914a
MS
16123 to branch to the overriding function. This is necessary to
16124 permit function pointers that point to the old function to
16125 actually forward to the new function. */
16126 emit_insn (gen_nop ());
16127 emit_insn (gen_nop ());
de2ab0ca 16128 emit_insn (gen_nop ());
699c914a
MS
16129 emit_insn (gen_nop ());
16130 emit_insn (gen_nop ());
16131 }
16132
16133 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16134 {
16135 reg_mode = V2SImode;
16136 reg_size = 8;
16137 }
a3170dc6 16138
f78c3290
NF
16139 strategy = rs6000_savres_strategy (info, /*savep=*/true,
16140 /*static_chain_p=*/using_static_chain_p,
16141 /*sibcall=*/0);
16142 using_store_multiple = strategy & SAVRES_MULTIPLE;
16143 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16144 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
16145
16146 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
16147 if (! WORLD_SAVE_P (info)
16148 && info->push_p
acd0b319 16149 && (DEFAULT_ABI == ABI_V4
e3b5732b 16150 || crtl->calls_eh_return))
9ebbca7d 16151 {
f78c3290
NF
16152 bool need_r11 = (TARGET_SPE
16153 ? (!saving_GPRs_inline
16154 && info->spe_64bit_regs_used == 0)
16155 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
16156 if (info->total_size < 32767)
16157 sp_offset = info->total_size;
16158 else
f78c3290
NF
16159 frame_reg_rtx = (need_r11
16160 ? gen_rtx_REG (Pmode, 11)
16161 : frame_ptr_rtx);
f676971a 16162 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
16163 (frame_reg_rtx != sp_reg_rtx
16164 && (info->cr_save_p
16165 || info->lr_save_p
16166 || info->first_fp_reg_save < 64
16167 || info->first_gp_reg_save < 32
f78c3290
NF
16168 )),
16169 need_r11);
9ebbca7d
GK
16170 if (frame_reg_rtx != sp_reg_rtx)
16171 rs6000_emit_stack_tie ();
16172 }
16173
d62294f5 16174 /* Handle world saves specially here. */
f57fe068 16175 if (WORLD_SAVE_P (info))
d62294f5
FJ
16176 {
16177 int i, j, sz;
16178 rtx treg;
16179 rtvec p;
22fa69da 16180 rtx reg0;
d62294f5
FJ
16181
16182 /* save_world expects lr in r0. */
22fa69da 16183 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 16184 if (info->lr_save_p)
c4ad648e 16185 {
22fa69da 16186 insn = emit_move_insn (reg0,
1de43f85 16187 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
16188 RTX_FRAME_RELATED_P (insn) = 1;
16189 }
d62294f5
FJ
16190
16191 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 16192 assumptions about the offsets of various bits of the stack
992d08b1 16193 frame. */
37409796
NS
16194 gcc_assert (info->gp_save_offset == -220
16195 && info->fp_save_offset == -144
16196 && info->lr_save_offset == 8
16197 && info->cr_save_offset == 4
16198 && info->push_p
16199 && info->lr_save_p
e3b5732b 16200 && (!crtl->calls_eh_return
37409796
NS
16201 || info->ehrd_offset == -432)
16202 && info->vrsave_save_offset == -224
22fa69da 16203 && info->altivec_save_offset == -416);
d62294f5
FJ
16204
16205 treg = gen_rtx_REG (SImode, 11);
16206 emit_move_insn (treg, GEN_INT (-info->total_size));
16207
16208 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 16209 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
16210
16211 /* Preserve CR2 for save_world prologues */
22fa69da 16212 sz = 5;
d62294f5
FJ
16213 sz += 32 - info->first_gp_reg_save;
16214 sz += 64 - info->first_fp_reg_save;
16215 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16216 p = rtvec_alloc (sz);
16217 j = 0;
16218 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 16219 gen_rtx_REG (SImode,
1de43f85 16220 LR_REGNO));
d62294f5 16221 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16222 gen_rtx_SYMBOL_REF (Pmode,
16223 "*save_world"));
d62294f5 16224 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16225 properly. */
16226 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16227 {
696e45ba
ME
16228 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16229 ? DFmode : SFmode),
16230 info->first_fp_reg_save + i);
c4ad648e
AM
16231 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16232 GEN_INT (info->fp_save_offset
16233 + sp_offset + 8 * i));
696e45ba
ME
16234 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16235 ? DFmode : SFmode), addr);
c4ad648e
AM
16236
16237 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16238 }
d62294f5 16239 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16240 {
16241 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16242 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16243 GEN_INT (info->altivec_save_offset
16244 + sp_offset + 16 * i));
0be76840 16245 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16246
16247 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16248 }
d62294f5 16249 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16250 {
16251 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16252 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16253 GEN_INT (info->gp_save_offset
16254 + sp_offset + reg_size * i));
0be76840 16255 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16256
16257 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16258 }
16259
16260 {
16261 /* CR register traditionally saved as CR2. */
16262 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16263 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16264 GEN_INT (info->cr_save_offset
16265 + sp_offset));
0be76840 16266 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16267
16268 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16269 }
22fa69da
GK
16270 /* Explain about use of R0. */
16271 if (info->lr_save_p)
16272 {
16273 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16274 GEN_INT (info->lr_save_offset
16275 + sp_offset));
16276 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16277
22fa69da
GK
16278 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16279 }
16280 /* Explain what happens to the stack pointer. */
16281 {
16282 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16283 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16284 }
d62294f5
FJ
16285
16286 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16287 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16288 treg, GEN_INT (-info->total_size));
16289 sp_offset = info->total_size;
d62294f5
FJ
16290 }
16291
9ebbca7d 16292 /* If we use the link register, get it into r0. */
f57fe068 16293 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16294 {
52ff33d0
NF
16295 rtx addr, reg, mem;
16296
f8a57be8 16297 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16298 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16299 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16300
16301 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16302 GEN_INT (info->lr_save_offset + sp_offset));
16303 reg = gen_rtx_REG (Pmode, 0);
16304 mem = gen_rtx_MEM (Pmode, addr);
16305 /* This should not be of rs6000_sr_alias_set, because of
16306 __builtin_return_address. */
16307
16308 insn = emit_move_insn (mem, reg);
16309 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16310 NULL_RTX, NULL_RTX);
f8a57be8 16311 }
9ebbca7d
GK
16312
16313 /* If we need to save CR, put it into r12. */
f57fe068 16314 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16315 {
f8a57be8 16316 rtx set;
f676971a 16317
9ebbca7d 16318 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16319 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16320 RTX_FRAME_RELATED_P (insn) = 1;
16321 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16322 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16323 But that's OK. All we have to do is specify that _one_ condition
16324 code register is saved in this stack slot. The thrower's epilogue
16325 will then restore all the call-saved registers.
16326 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16327 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16328 gen_rtx_REG (SImode, CR2_REGNO));
16329 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16330 set,
16331 REG_NOTES (insn));
9ebbca7d
GK
16332 }
16333
a4f6c312
SS
16334 /* Do any required saving of fpr's. If only one or two to save, do
16335 it ourselves. Otherwise, call function. */
f57fe068 16336 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16337 {
16338 int i;
16339 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16340 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16341 && ! call_used_regs[info->first_fp_reg_save+i]))
696e45ba
ME
16342 emit_frame_save (frame_reg_rtx, frame_ptr_rtx,
16343 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16344 ? DFmode : SFmode,
89e7058f
AH
16345 info->first_fp_reg_save + i,
16346 info->fp_save_offset + sp_offset + 8 * i,
16347 info->total_size);
9ebbca7d 16348 }
f57fe068 16349 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16350 {
16351 rtx par;
16352
16353 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16354 info->fp_save_offset + sp_offset,
16355 DFmode,
16356 /*savep=*/true, /*gpr=*/false,
16357 /*exitp=*/false);
16358 insn = emit_insn (par);
16359 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16360 NULL_RTX, NULL_RTX);
16361 }
16362
16363 /* Save GPRs. This is done as a PARALLEL if we are using
16364 the store-multiple instructions. */
16365 if (!WORLD_SAVE_P (info)
16366 && TARGET_SPE_ABI
16367 && info->spe_64bit_regs_used != 0
16368 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16369 {
16370 int i;
f78c3290
NF
16371 rtx spe_save_area_ptr;
16372
16373 /* Determine whether we can address all of the registers that need
16374 to be saved with an offset from the stack pointer that fits in
16375 the small const field for SPE memory instructions. */
16376 int spe_regs_addressable_via_sp
16377 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16378 + (32 - info->first_gp_reg_save - 1) * reg_size)
16379 && saving_GPRs_inline);
16380 int spe_offset;
16381
16382 if (spe_regs_addressable_via_sp)
16383 {
16384 spe_save_area_ptr = frame_reg_rtx;
16385 spe_offset = info->spe_gp_save_offset + sp_offset;
16386 }
16387 else
16388 {
16389 /* Make r11 point to the start of the SPE save area. We need
16390 to be careful here if r11 is holding the static chain. If
16391 it is, then temporarily save it in r0. We would use r0 as
16392 our base register here, but using r0 as a base register in
16393 loads and stores means something different from what we
16394 would like. */
16395 int ool_adjust = (saving_GPRs_inline
16396 ? 0
16397 : (info->first_gp_reg_save
16398 - (FIRST_SAVRES_REGISTER+1))*8);
16399 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16400 + sp_offset - ool_adjust);
16401
16402 if (using_static_chain_p)
16403 {
16404 rtx r0 = gen_rtx_REG (Pmode, 0);
16405 gcc_assert (info->first_gp_reg_save > 11);
16406
16407 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16408 }
16409
16410 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16411 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16412 frame_reg_rtx,
16413 GEN_INT (offset)));
16414 /* We need to make sure the move to r11 gets noted for
16415 properly outputting unwind information. */
16416 if (!saving_GPRs_inline)
16417 rs6000_frame_related (insn, frame_reg_rtx, offset,
16418 NULL_RTX, NULL_RTX);
16419 spe_offset = 0;
16420 }
16421
16422 if (saving_GPRs_inline)
16423 {
16424 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16425 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16426 {
16427 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16428 rtx offset, addr, mem;
f676971a 16429
f78c3290
NF
16430 /* We're doing all this to ensure that the offset fits into
16431 the immediate offset of 'evstdd'. */
16432 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16433
16434 offset = GEN_INT (reg_size * i + spe_offset);
16435 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16436 mem = gen_rtx_MEM (V2SImode, addr);
16437
16438 insn = emit_move_insn (mem, reg);
16439
16440 rs6000_frame_related (insn, spe_save_area_ptr,
16441 info->spe_gp_save_offset
16442 + sp_offset + reg_size * i,
16443 offset, const0_rtx);
16444 }
16445 }
16446 else
9ebbca7d 16447 {
f78c3290 16448 rtx par;
9ebbca7d 16449
f78c3290
NF
16450 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16451 0, reg_mode,
16452 /*savep=*/true, /*gpr=*/true,
16453 /*exitp=*/false);
16454 insn = emit_insn (par);
16455 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16456 NULL_RTX, NULL_RTX);
9ebbca7d 16457 }
f78c3290
NF
16458
16459
16460 /* Move the static chain pointer back. */
16461 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16462 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16463 }
16464 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16465 {
16466 rtx par;
16467
16468 /* Need to adjust r11 if we saved any FPRs. */
16469 if (info->first_fp_reg_save != 64)
16470 {
16471 rtx r11 = gen_rtx_REG (reg_mode, 11);
16472 rtx offset = GEN_INT (info->total_size
16473 + (-8 * (64-info->first_fp_reg_save)));
16474 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16475 ? sp_reg_rtx : r11);
16476
16477 emit_insn (TARGET_32BIT
16478 ? gen_addsi3 (r11, ptr_reg, offset)
16479 : gen_adddi3 (r11, ptr_reg, offset));
16480 }
16481
16482 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16483 info->gp_save_offset + sp_offset,
16484 reg_mode,
16485 /*savep=*/true, /*gpr=*/true,
16486 /*exitp=*/false);
16487 insn = emit_insn (par);
f676971a 16488 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16489 NULL_RTX, NULL_RTX);
16490 }
f78c3290 16491 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16492 {
308c142a 16493 rtvec p;
9ebbca7d
GK
16494 int i;
16495 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16496 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16497 {
16498 rtx addr, reg, mem;
16499 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16500 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16501 GEN_INT (info->gp_save_offset
16502 + sp_offset
9ebbca7d 16503 + reg_size * i));
0be76840 16504 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16505
16506 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16507 }
16508 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16509 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16510 NULL_RTX, NULL_RTX);
b6c9286a 16511 }
f57fe068 16512 else if (!WORLD_SAVE_P (info))
b6c9286a 16513 {
9ebbca7d
GK
16514 int i;
16515 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16516 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16517 {
16518 rtx addr, reg, mem;
16519 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16520
52ff33d0
NF
16521 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16522 GEN_INT (info->gp_save_offset
16523 + sp_offset
16524 + reg_size * i));
16525 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16526
52ff33d0
NF
16527 insn = emit_move_insn (mem, reg);
16528 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16529 NULL_RTX, NULL_RTX);
16530 }
9ebbca7d
GK
16531 }
16532
83720594
RH
16533 /* ??? There's no need to emit actual instructions here, but it's the
16534 easiest way to get the frame unwind information emitted. */
e3b5732b 16535 if (crtl->calls_eh_return)
83720594 16536 {
78e1b90d
DE
16537 unsigned int i, regno;
16538
fc4767bb
JJ
16539 /* In AIX ABI we need to pretend we save r2 here. */
16540 if (TARGET_AIX)
16541 {
16542 rtx addr, reg, mem;
16543
16544 reg = gen_rtx_REG (reg_mode, 2);
16545 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16546 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16547 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16548
16549 insn = emit_move_insn (mem, reg);
f676971a 16550 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16551 NULL_RTX, NULL_RTX);
16552 PATTERN (insn) = gen_blockage ();
16553 }
16554
83720594
RH
16555 for (i = 0; ; ++i)
16556 {
83720594
RH
16557 regno = EH_RETURN_DATA_REGNO (i);
16558 if (regno == INVALID_REGNUM)
16559 break;
16560
89e7058f
AH
16561 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16562 info->ehrd_offset + sp_offset
16563 + reg_size * (int) i,
16564 info->total_size);
83720594
RH
16565 }
16566 }
16567
9ebbca7d 16568 /* Save CR if we use any that must be preserved. */
f57fe068 16569 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16570 {
16571 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16572 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16573 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16574 /* See the large comment above about why CR2_REGNO is used. */
16575 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16576
9ebbca7d
GK
16577 /* If r12 was used to hold the original sp, copy cr into r0 now
16578 that it's free. */
16579 if (REGNO (frame_reg_rtx) == 12)
16580 {
f8a57be8
GK
16581 rtx set;
16582
9ebbca7d 16583 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16584 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16585 RTX_FRAME_RELATED_P (insn) = 1;
16586 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16587 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16588 set,
16589 REG_NOTES (insn));
f676971a 16590
9ebbca7d
GK
16591 }
16592 insn = emit_move_insn (mem, cr_save_rtx);
16593
f676971a 16594 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16595 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16596 }
16597
f676971a 16598 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16599 for which it was done previously. */
f57fe068 16600 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16601 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16602 {
bcb2d701 16603 if (info->total_size < 32767)
2b2c2fe5 16604 sp_offset = info->total_size;
bcb2d701
EC
16605 else
16606 frame_reg_rtx = frame_ptr_rtx;
16607 rs6000_emit_allocate_stack (info->total_size,
16608 (frame_reg_rtx != sp_reg_rtx
16609 && ((info->altivec_size != 0)
16610 || (info->vrsave_mask != 0)
f78c3290
NF
16611 )),
16612 FALSE);
bcb2d701
EC
16613 if (frame_reg_rtx != sp_reg_rtx)
16614 rs6000_emit_stack_tie ();
2b2c2fe5 16615 }
9ebbca7d
GK
16616
16617 /* Set frame pointer, if needed. */
16618 if (frame_pointer_needed)
16619 {
7d5175e1 16620 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16621 sp_reg_rtx);
16622 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16623 }
9878760c 16624
2b2c2fe5
EC
16625 /* Save AltiVec registers if needed. Save here because the red zone does
16626 not include AltiVec registers. */
16627 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16628 {
16629 int i;
16630
16631 /* There should be a non inline version of this, for when we
16632 are saving lots of vector registers. */
16633 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16634 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16635 {
16636 rtx areg, savereg, mem;
16637 int offset;
16638
16639 offset = info->altivec_save_offset + sp_offset
16640 + 16 * (i - info->first_altivec_reg_save);
16641
16642 savereg = gen_rtx_REG (V4SImode, i);
16643
16644 areg = gen_rtx_REG (Pmode, 0);
16645 emit_move_insn (areg, GEN_INT (offset));
16646
16647 /* AltiVec addressing mode is [reg+reg]. */
16648 mem = gen_frame_mem (V4SImode,
16649 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16650
16651 insn = emit_move_insn (mem, savereg);
16652
16653 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16654 areg, GEN_INT (offset));
16655 }
16656 }
16657
16658 /* VRSAVE is a bit vector representing which AltiVec registers
16659 are used. The OS uses this to determine which vector
16660 registers to save on a context switch. We need to save
16661 VRSAVE on the stack frame, add whatever AltiVec registers we
16662 used in this function, and do the corresponding magic in the
16663 epilogue. */
16664
16665 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16666 && info->vrsave_mask != 0)
16667 {
16668 rtx reg, mem, vrsave;
16669 int offset;
16670
16671 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16672 as frame_reg_rtx and r11 as the static chain pointer for
16673 nested functions. */
16674 reg = gen_rtx_REG (SImode, 0);
16675 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16676 if (TARGET_MACHO)
16677 emit_insn (gen_get_vrsave_internal (reg));
16678 else
16679 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16680
16681 if (!WORLD_SAVE_P (info))
16682 {
16683 /* Save VRSAVE. */
16684 offset = info->vrsave_save_offset + sp_offset;
16685 mem = gen_frame_mem (SImode,
16686 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16687 GEN_INT (offset)));
16688 insn = emit_move_insn (mem, reg);
16689 }
16690
16691 /* Include the registers in the mask. */
16692 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16693
16694 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16695 }
16696
1db02437 16697 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16698 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16699 || (DEFAULT_ABI == ABI_V4
16700 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16701 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16702 {
16703 /* If emit_load_toc_table will use the link register, we need to save
16704 it. We use R12 for this purpose because emit_load_toc_table
16705 can use register 0. This allows us to use a plain 'blr' to return
16706 from the procedure more often. */
16707 int save_LR_around_toc_setup = (TARGET_ELF
16708 && DEFAULT_ABI != ABI_AIX
16709 && flag_pic
16710 && ! info->lr_save_p
16711 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16712 if (save_LR_around_toc_setup)
16713 {
1de43f85 16714 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16715
c4ad648e 16716 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16717 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16718
c4ad648e 16719 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16720
c4ad648e 16721 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16722 RTX_FRAME_RELATED_P (insn) = 1;
16723 }
16724 else
16725 rs6000_emit_load_toc_table (TRUE);
16726 }
ee890fe2 16727
fcce224d 16728#if TARGET_MACHO
ee890fe2 16729 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16730 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16731 {
1de43f85 16732 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
08a6a74b 16733 rtx src = gen_rtx_SYMBOL_REF (Pmode, MACHOPIC_FUNCTION_BASE_NAME);
ee890fe2 16734
6d0a8091
DJ
16735 /* Save and restore LR locally around this call (in R0). */
16736 if (!info->lr_save_p)
6fb5fa3c 16737 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16738
6fb5fa3c 16739 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16740
6fb5fa3c
DB
16741 emit_move_insn (gen_rtx_REG (Pmode,
16742 RS6000_PIC_OFFSET_TABLE_REGNUM),
16743 lr);
6d0a8091
DJ
16744
16745 if (!info->lr_save_p)
6fb5fa3c 16746 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16747 }
fcce224d 16748#endif
9ebbca7d
GK
16749}
16750
9ebbca7d 16751/* Write function prologue. */
a4f6c312 16752
08c148a8 16753static void
f676971a 16754rs6000_output_function_prologue (FILE *file,
a2369ed3 16755 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16756{
16757 rs6000_stack_t *info = rs6000_stack_info ();
16758
4697a36c
MM
16759 if (TARGET_DEBUG_STACK)
16760 debug_stack_info (info);
9878760c 16761
a4f6c312
SS
16762 /* Write .extern for any function we will call to save and restore
16763 fp values. */
16764 if (info->first_fp_reg_save < 64
16765 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16766 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16767 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16768 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16769
c764f757
RK
16770 /* Write .extern for AIX common mode routines, if needed. */
16771 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16772 {
f6709c70
JW
16773 fputs ("\t.extern __mulh\n", file);
16774 fputs ("\t.extern __mull\n", file);
16775 fputs ("\t.extern __divss\n", file);
16776 fputs ("\t.extern __divus\n", file);
16777 fputs ("\t.extern __quoss\n", file);
16778 fputs ("\t.extern __quous\n", file);
c764f757
RK
16779 common_mode_defined = 1;
16780 }
9878760c 16781
9ebbca7d 16782 if (! HAVE_prologue)
979721f8 16783 {
9ebbca7d 16784 start_sequence ();
9dda4cc8 16785
a4f6c312
SS
16786 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16787 the "toplevel" insn chain. */
2e040219 16788 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16789 rs6000_emit_prologue ();
2e040219 16790 emit_note (NOTE_INSN_DELETED);
178c3eff 16791
a3c9585f 16792 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16793 {
16794 rtx insn;
16795 unsigned addr = 0;
16796 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16797 {
16798 INSN_ADDRESSES_NEW (insn, addr);
16799 addr += 4;
16800 }
16801 }
9dda4cc8 16802
9ebbca7d 16803 if (TARGET_DEBUG_STACK)
a4f6c312 16804 debug_rtx_list (get_insns (), 100);
c9d691e9 16805 final (get_insns (), file, FALSE);
9ebbca7d 16806 end_sequence ();
979721f8
MM
16807 }
16808
9ebbca7d
GK
16809 rs6000_pic_labelno++;
16810}
f676971a 16811
1c9c5e43
AM
16812/* Non-zero if vmx regs are restored before the frame pop, zero if
16813 we restore after the pop when possible. */
16814#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16815
f78c3290
NF
16816/* Reload CR from REG. */
16817
16818static void
16819rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16820{
16821 int count = 0;
16822 int i;
16823
16824 if (using_mfcr_multiple)
16825 {
16826 for (i = 0; i < 8; i++)
16827 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16828 count++;
16829 gcc_assert (count);
16830 }
16831
16832 if (using_mfcr_multiple && count > 1)
16833 {
16834 rtvec p;
16835 int ndx;
16836
16837 p = rtvec_alloc (count);
16838
16839 ndx = 0;
16840 for (i = 0; i < 8; i++)
16841 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16842 {
16843 rtvec r = rtvec_alloc (2);
16844 RTVEC_ELT (r, 0) = reg;
16845 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16846 RTVEC_ELT (p, ndx) =
16847 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16848 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16849 ndx++;
16850 }
16851 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16852 gcc_assert (ndx == count);
16853 }
16854 else
16855 for (i = 0; i < 8; i++)
16856 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16857 {
16858 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16859 CR0_REGNO+i),
16860 reg));
16861 }
16862}
16863
9ebbca7d 16864/* Emit function epilogue as insns.
9878760c 16865
9ebbca7d
GK
16866 At present, dwarf2out_frame_debug_expr doesn't understand
16867 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16868 anywhere in the epilogue. Most of the insns below would in any case
16869 need special notes to explain where r11 is in relation to the stack. */
9878760c 16870
9ebbca7d 16871void
a2369ed3 16872rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16873{
16874 rs6000_stack_t *info;
f78c3290 16875 int restoring_GPRs_inline;
9ebbca7d
GK
16876 int restoring_FPRs_inline;
16877 int using_load_multiple;
d296e02e 16878 int using_mtcr_multiple;
9ebbca7d 16879 int use_backchain_to_restore_sp;
f78c3290
NF
16880 int restore_lr;
16881 int strategy;
9ebbca7d
GK
16882 int sp_offset = 0;
16883 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16884 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16885 enum machine_mode reg_mode = Pmode;
327e5343 16886 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16887 int i;
16888
c19de7aa
AH
16889 info = rs6000_stack_info ();
16890
16891 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16892 {
16893 reg_mode = V2SImode;
16894 reg_size = 8;
16895 }
16896
f78c3290
NF
16897 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16898 /*static_chain_p=*/0, sibcall);
16899 using_load_multiple = strategy & SAVRES_MULTIPLE;
16900 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16901 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16902 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16903 || rs6000_cpu == PROCESSOR_PPC603
16904 || rs6000_cpu == PROCESSOR_PPC750
16905 || optimize_size);
1c9c5e43
AM
16906 /* Restore via the backchain when we have a large frame, since this
16907 is more efficient than an addis, addi pair. The second condition
16908 here will not trigger at the moment; We don't actually need a
16909 frame pointer for alloca, but the generic parts of the compiler
16910 give us one anyway. */
16911 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16912 || info->total_size
16913 + (info->lr_save_p ? info->lr_save_offset : 0)
16914 > 32767
1c9c5e43
AM
16915 || (cfun->calls_alloca
16916 && !frame_pointer_needed));
f78c3290
NF
16917 restore_lr = (info->lr_save_p
16918 && restoring_GPRs_inline
16919 && restoring_FPRs_inline);
9ebbca7d 16920
f57fe068 16921 if (WORLD_SAVE_P (info))
d62294f5
FJ
16922 {
16923 int i, j;
16924 char rname[30];
16925 const char *alloc_rname;
16926 rtvec p;
16927
16928 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16929 stack slot (which is not likely to be our caller.)
16930 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16931 rest_world is similar, except any R10 parameter is ignored.
16932 The exception-handling stuff that was here in 2.95 is no
16933 longer necessary. */
d62294f5
FJ
16934
16935 p = rtvec_alloc (9
16936 + 1
f676971a 16937 + 32 - info->first_gp_reg_save
c4ad648e
AM
16938 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16939 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16940
e3b5732b 16941 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16942 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16943 alloc_rname = ggc_strdup (rname);
16944
16945 j = 0;
16946 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16947 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16948 gen_rtx_REG (Pmode,
1de43f85 16949 LR_REGNO));
d62294f5 16950 RTVEC_ELT (p, j++)
c4ad648e 16951 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16952 /* The instruction pattern requires a clobber here;
c4ad648e 16953 it is shared with the restVEC helper. */
d62294f5 16954 RTVEC_ELT (p, j++)
c4ad648e 16955 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16956
16957 {
c4ad648e
AM
16958 /* CR register traditionally saved as CR2. */
16959 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16960 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16961 GEN_INT (info->cr_save_offset));
0be76840 16962 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16963
16964 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16965 }
16966
16967 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16968 {
16969 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16970 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16971 GEN_INT (info->gp_save_offset
16972 + reg_size * i));
0be76840 16973 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16974
16975 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16976 }
d62294f5 16977 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16978 {
16979 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16980 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16981 GEN_INT (info->altivec_save_offset
16982 + 16 * i));
0be76840 16983 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16984
16985 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16986 }
d62294f5 16987 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e 16988 {
696e45ba
ME
16989 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16990 ? DFmode : SFmode),
16991 info->first_fp_reg_save + i);
c4ad648e
AM
16992 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16993 GEN_INT (info->fp_save_offset
16994 + 8 * i));
696e45ba
ME
16995 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16996 ? DFmode : SFmode), addr);
c4ad648e
AM
16997
16998 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16999 }
d62294f5 17000 RTVEC_ELT (p, j++)
c4ad648e 17001 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 17002 RTVEC_ELT (p, j++)
c4ad648e 17003 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 17004 RTVEC_ELT (p, j++)
c4ad648e 17005 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 17006 RTVEC_ELT (p, j++)
c4ad648e 17007 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 17008 RTVEC_ELT (p, j++)
c4ad648e 17009 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
17010 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
17011
17012 return;
17013 }
17014
45b194f8
AM
17015 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
17016 if (info->push_p)
2b2c2fe5 17017 sp_offset = info->total_size;
f676971a 17018
e6477eaa
AM
17019 /* Restore AltiVec registers if we must do so before adjusting the
17020 stack. */
17021 if (TARGET_ALTIVEC_ABI
17022 && info->altivec_size != 0
1c9c5e43
AM
17023 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17024 || (DEFAULT_ABI != ABI_V4
17025 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))))
9aa86737
AH
17026 {
17027 int i;
17028
e6477eaa
AM
17029 if (use_backchain_to_restore_sp)
17030 {
17031 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17032 emit_move_insn (frame_reg_rtx,
17033 gen_rtx_MEM (Pmode, sp_reg_rtx));
17034 sp_offset = 0;
17035 }
1c9c5e43
AM
17036 else if (frame_pointer_needed)
17037 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 17038
9aa86737
AH
17039 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17040 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17041 {
17042 rtx addr, areg, mem;
17043
17044 areg = gen_rtx_REG (Pmode, 0);
17045 emit_move_insn
17046 (areg, GEN_INT (info->altivec_save_offset
17047 + sp_offset
17048 + 16 * (i - info->first_altivec_reg_save)));
17049
17050 /* AltiVec addressing mode is [reg+reg]. */
17051 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 17052 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
17053
17054 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17055 }
17056 }
17057
e6477eaa
AM
17058 /* Restore VRSAVE if we must do so before adjusting the stack. */
17059 if (TARGET_ALTIVEC
17060 && TARGET_ALTIVEC_VRSAVE
17061 && info->vrsave_mask != 0
1c9c5e43
AM
17062 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17063 || (DEFAULT_ABI != ABI_V4
17064 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))))
e6477eaa
AM
17065 {
17066 rtx addr, mem, reg;
17067
1c9c5e43 17068 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 17069 {
1c9c5e43
AM
17070 if (use_backchain_to_restore_sp)
17071 {
17072 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17073 emit_move_insn (frame_reg_rtx,
17074 gen_rtx_MEM (Pmode, sp_reg_rtx));
17075 sp_offset = 0;
17076 }
17077 else if (frame_pointer_needed)
17078 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
17079 }
17080
17081 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17082 GEN_INT (info->vrsave_save_offset + sp_offset));
17083 mem = gen_frame_mem (SImode, addr);
17084 reg = gen_rtx_REG (SImode, 12);
17085 emit_move_insn (reg, mem);
17086
17087 emit_insn (generate_set_vrsave (reg, info, 1));
17088 }
17089
1c9c5e43
AM
17090 /* If we have a large stack frame, restore the old stack pointer
17091 using the backchain. */
2b2c2fe5
EC
17092 if (use_backchain_to_restore_sp)
17093 {
1c9c5e43 17094 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
17095 {
17096 /* Under V.4, don't reset the stack pointer until after we're done
17097 loading the saved registers. */
17098 if (DEFAULT_ABI == ABI_V4)
17099 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17100
17101 emit_move_insn (frame_reg_rtx,
17102 gen_rtx_MEM (Pmode, sp_reg_rtx));
17103 sp_offset = 0;
17104 }
1c9c5e43
AM
17105 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17106 && DEFAULT_ABI == ABI_V4)
17107 /* frame_reg_rtx has been set up by the altivec restore. */
17108 ;
17109 else
17110 {
17111 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
17112 frame_reg_rtx = sp_reg_rtx;
17113 }
17114 }
17115 /* If we have a frame pointer, we can restore the old stack pointer
17116 from it. */
17117 else if (frame_pointer_needed)
17118 {
17119 frame_reg_rtx = sp_reg_rtx;
17120 if (DEFAULT_ABI == ABI_V4)
17121 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17122
17123 emit_insn (TARGET_32BIT
17124 ? gen_addsi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17125 GEN_INT (info->total_size))
17126 : gen_adddi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17127 GEN_INT (info->total_size)));
17128 sp_offset = 0;
2b2c2fe5 17129 }
45b194f8
AM
17130 else if (info->push_p
17131 && DEFAULT_ABI != ABI_V4
e3b5732b 17132 && !crtl->calls_eh_return)
2b2c2fe5 17133 {
45b194f8
AM
17134 emit_insn (TARGET_32BIT
17135 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
17136 GEN_INT (info->total_size))
17137 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
17138 GEN_INT (info->total_size)));
17139 sp_offset = 0;
2b2c2fe5
EC
17140 }
17141
e6477eaa 17142 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
17143 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17144 && TARGET_ALTIVEC_ABI
e6477eaa
AM
17145 && info->altivec_size != 0
17146 && (DEFAULT_ABI == ABI_V4
17147 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
17148 {
17149 int i;
17150
17151 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17152 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17153 {
17154 rtx addr, areg, mem;
17155
17156 areg = gen_rtx_REG (Pmode, 0);
17157 emit_move_insn
17158 (areg, GEN_INT (info->altivec_save_offset
17159 + sp_offset
17160 + 16 * (i - info->first_altivec_reg_save)));
17161
17162 /* AltiVec addressing mode is [reg+reg]. */
17163 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17164 mem = gen_frame_mem (V4SImode, addr);
17165
17166 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17167 }
17168 }
17169
17170 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
17171 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17172 && TARGET_ALTIVEC
e6477eaa
AM
17173 && TARGET_ALTIVEC_VRSAVE
17174 && info->vrsave_mask != 0
17175 && (DEFAULT_ABI == ABI_V4
17176 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
554c2941
AM
17177 {
17178 rtx addr, mem, reg;
17179
17180 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17181 GEN_INT (info->vrsave_save_offset + sp_offset));
17182 mem = gen_frame_mem (SImode, addr);
17183 reg = gen_rtx_REG (SImode, 12);
17184 emit_move_insn (reg, mem);
17185
17186 emit_insn (generate_set_vrsave (reg, info, 1));
17187 }
17188
f78c3290
NF
17189 /* Get the old lr if we saved it. If we are restoring registers
17190 out-of-line, then the out-of-line routines can do this for us. */
17191 if (restore_lr)
b6c9286a 17192 {
a3170dc6
AH
17193 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17194 info->lr_save_offset + sp_offset);
ba4828e0 17195
9ebbca7d 17196 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 17197 }
f676971a 17198
9ebbca7d
GK
17199 /* Get the old cr if we saved it. */
17200 if (info->cr_save_p)
17201 {
17202 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17203 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 17204 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 17205
9ebbca7d
GK
17206 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17207 }
f676971a 17208
9ebbca7d 17209 /* Set LR here to try to overlap restores below. */
f78c3290 17210 if (restore_lr)
1de43f85 17211 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 17212 gen_rtx_REG (Pmode, 0));
f676971a 17213
83720594 17214 /* Load exception handler data registers, if needed. */
e3b5732b 17215 if (crtl->calls_eh_return)
83720594 17216 {
78e1b90d
DE
17217 unsigned int i, regno;
17218
fc4767bb
JJ
17219 if (TARGET_AIX)
17220 {
17221 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17222 GEN_INT (sp_offset + 5 * reg_size));
0be76840 17223 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
17224
17225 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17226 }
17227
83720594
RH
17228 for (i = 0; ; ++i)
17229 {
a3170dc6 17230 rtx mem;
83720594
RH
17231
17232 regno = EH_RETURN_DATA_REGNO (i);
17233 if (regno == INVALID_REGNUM)
17234 break;
17235
a3170dc6
AH
17236 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17237 info->ehrd_offset + sp_offset
17238 + reg_size * (int) i);
83720594
RH
17239
17240 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17241 }
17242 }
f676971a 17243
9ebbca7d
GK
17244 /* Restore GPRs. This is done as a PARALLEL if we are using
17245 the load-multiple instructions. */
f78c3290
NF
17246 if (TARGET_SPE_ABI
17247 && info->spe_64bit_regs_used != 0
17248 && info->first_gp_reg_save != 32)
52ff33d0 17249 {
52ff33d0
NF
17250 /* Determine whether we can address all of the registers that need
17251 to be saved with an offset from the stack pointer that fits in
17252 the small const field for SPE memory instructions. */
17253 int spe_regs_addressable_via_sp
f78c3290
NF
17254 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17255 + (32 - info->first_gp_reg_save - 1) * reg_size)
17256 && restoring_GPRs_inline);
52ff33d0
NF
17257 int spe_offset;
17258
17259 if (spe_regs_addressable_via_sp)
45b194f8 17260 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17261 else
17262 {
45b194f8 17263 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17264 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17265 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17266 There's no need to worry here because the static chain is passed
17267 anew to every function. */
f78c3290
NF
17268 int ool_adjust = (restoring_GPRs_inline
17269 ? 0
17270 : (info->first_gp_reg_save
17271 - (FIRST_SAVRES_REGISTER+1))*8);
17272
45b194f8
AM
17273 if (frame_reg_rtx == sp_reg_rtx)
17274 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17275 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17276 GEN_INT (info->spe_gp_save_offset
17277 + sp_offset
17278 - ool_adjust)));
45b194f8
AM
17279 /* Keep the invariant that frame_reg_rtx + sp_offset points
17280 at the top of the stack frame. */
17281 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17282
17283 spe_offset = 0;
17284 }
17285
f78c3290
NF
17286 if (restoring_GPRs_inline)
17287 {
17288 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17289 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17290 {
17291 rtx offset, addr, mem;
52ff33d0 17292
f78c3290
NF
17293 /* We're doing all this to ensure that the immediate offset
17294 fits into the immediate field of 'evldd'. */
17295 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17296
f78c3290
NF
17297 offset = GEN_INT (spe_offset + reg_size * i);
17298 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17299 mem = gen_rtx_MEM (V2SImode, addr);
52ff33d0 17300
f78c3290
NF
17301 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17302 mem);
17303 }
17304 }
17305 else
17306 {
17307 rtx par;
17308
17309 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17310 0, reg_mode,
17311 /*savep=*/false, /*gpr=*/true,
17312 /*exitp=*/true);
17313 emit_jump_insn (par);
17314
17315 /* We don't want anybody else emitting things after we jumped
17316 back. */
17317 return;
17318 }
52ff33d0 17319 }
f78c3290
NF
17320 else if (!restoring_GPRs_inline)
17321 {
17322 /* We are jumping to an out-of-line function. */
17323 bool can_use_exit = info->first_fp_reg_save == 64;
17324 rtx par;
17325
17326 /* Emit stack reset code if we need it. */
17327 if (can_use_exit)
17328 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17329 sp_offset, can_use_exit);
17330 else
17331 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17332 sp_reg_rtx,
17333 GEN_INT (sp_offset - info->fp_size)));
17334
17335 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17336 info->gp_save_offset, reg_mode,
17337 /*savep=*/false, /*gpr=*/true,
17338 /*exitp=*/can_use_exit);
17339
17340 if (can_use_exit)
17341 {
17342 if (info->cr_save_p)
17343 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17344 using_mtcr_multiple);
17345
17346 emit_jump_insn (par);
17347
17348 /* We don't want anybody else emitting things after we jumped
17349 back. */
17350 return;
17351 }
17352 else
17353 emit_insn (par);
17354 }
17355 else if (using_load_multiple)
17356 {
17357 rtvec p;
17358 p = rtvec_alloc (32 - info->first_gp_reg_save);
17359 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17360 {
f676971a
EC
17361 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17362 GEN_INT (info->gp_save_offset
17363 + sp_offset
9ebbca7d 17364 + reg_size * i));
0be76840 17365 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 17366
f78c3290
NF
17367 RTVEC_ELT (p, i) =
17368 gen_rtx_SET (VOIDmode,
17369 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17370 mem);
9ebbca7d 17371 }
f78c3290
NF
17372 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17373 }
17374 else
17375 {
17376 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17377 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17378 {
17379 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17380 GEN_INT (info->gp_save_offset
17381 + sp_offset
17382 + reg_size * i));
17383 rtx mem = gen_frame_mem (reg_mode, addr);
17384
17385 emit_move_insn (gen_rtx_REG (reg_mode,
17386 info->first_gp_reg_save + i), mem);
17387 }
17388 }
9878760c 17389
9ebbca7d
GK
17390 /* Restore fpr's if we need to do it without calling a function. */
17391 if (restoring_FPRs_inline)
17392 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17393 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17394 && ! call_used_regs[info->first_fp_reg_save+i]))
17395 {
17396 rtx addr, mem;
17397 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17398 GEN_INT (info->fp_save_offset
17399 + sp_offset
a4f6c312 17400 + 8 * i));
696e45ba
ME
17401 mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17402 ? DFmode : SFmode), addr);
9ebbca7d 17403
696e45ba
ME
17404 emit_move_insn (gen_rtx_REG (((TARGET_HARD_FLOAT
17405 && TARGET_DOUBLE_FLOAT)
17406 ? DFmode : SFmode),
9ebbca7d
GK
17407 info->first_fp_reg_save + i),
17408 mem);
17409 }
8d30c4ee 17410
9ebbca7d
GK
17411 /* If we saved cr, restore it here. Just those that were used. */
17412 if (info->cr_save_p)
f78c3290 17413 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
979721f8 17414
9ebbca7d 17415 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17416 have been done. */
f78c3290
NF
17417 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17418 sp_offset, !restoring_FPRs_inline);
b6c9286a 17419
e3b5732b 17420 if (crtl->calls_eh_return)
83720594
RH
17421 {
17422 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 17423 emit_insn (TARGET_32BIT
83720594
RH
17424 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
17425 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
17426 }
17427
9ebbca7d
GK
17428 if (!sibcall)
17429 {
17430 rtvec p;
17431 if (! restoring_FPRs_inline)
f78c3290 17432 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17433 else
17434 p = rtvec_alloc (2);
b6c9286a 17435
e35b9579 17436 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17437 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17438 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17439 : gen_rtx_CLOBBER (VOIDmode,
17440 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17441
17442 /* If we have to restore more than two FP registers, branch to the
17443 restore function. It will return to our caller. */
17444 if (! restoring_FPRs_inline)
17445 {
17446 int i;
f78c3290
NF
17447 rtx sym;
17448
17449 sym = rs6000_savres_routine_sym (info,
17450 /*savep=*/false,
17451 /*gpr=*/false,
17452 /*exitp=*/true);
17453 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17454 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17455 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17456 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17457 {
17458 rtx addr, mem;
17459 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17460 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17461 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17462
f78c3290 17463 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17464 gen_rtx_SET (VOIDmode,
17465 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17466 mem);
b6c9286a
MM
17467 }
17468 }
f676971a 17469
9ebbca7d 17470 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17471 }
9878760c
RK
17472}
17473
17474/* Write function epilogue. */
17475
08c148a8 17476static void
f676971a 17477rs6000_output_function_epilogue (FILE *file,
a2369ed3 17478 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17479{
9ebbca7d 17480 if (! HAVE_epilogue)
9878760c 17481 {
9ebbca7d
GK
17482 rtx insn = get_last_insn ();
17483 /* If the last insn was a BARRIER, we don't have to write anything except
17484 the trace table. */
17485 if (GET_CODE (insn) == NOTE)
17486 insn = prev_nonnote_insn (insn);
17487 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17488 {
9ebbca7d
GK
17489 /* This is slightly ugly, but at least we don't have two
17490 copies of the epilogue-emitting code. */
17491 start_sequence ();
17492
17493 /* A NOTE_INSN_DELETED is supposed to be at the start
17494 and end of the "toplevel" insn chain. */
2e040219 17495 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17496 rs6000_emit_epilogue (FALSE);
2e040219 17497 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17498
a3c9585f 17499 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17500 {
17501 rtx insn;
17502 unsigned addr = 0;
17503 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17504 {
17505 INSN_ADDRESSES_NEW (insn, addr);
17506 addr += 4;
17507 }
17508 }
17509
9ebbca7d 17510 if (TARGET_DEBUG_STACK)
a4f6c312 17511 debug_rtx_list (get_insns (), 100);
c9d691e9 17512 final (get_insns (), file, FALSE);
9ebbca7d 17513 end_sequence ();
4697a36c 17514 }
9878760c 17515 }
b4ac57ab 17516
efdba735
SH
17517#if TARGET_MACHO
17518 macho_branch_islands ();
0e5da0be
GK
17519 /* Mach-O doesn't support labels at the end of objects, so if
17520 it looks like we might want one, insert a NOP. */
17521 {
17522 rtx insn = get_last_insn ();
17523 while (insn
17524 && NOTE_P (insn)
a38e7aa5 17525 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17526 insn = PREV_INSN (insn);
f676971a
EC
17527 if (insn
17528 && (LABEL_P (insn)
0e5da0be 17529 || (NOTE_P (insn)
a38e7aa5 17530 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17531 fputs ("\tnop\n", file);
17532 }
17533#endif
17534
9b30bae2 17535 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17536 on its format.
17537
17538 We don't output a traceback table if -finhibit-size-directive was
17539 used. The documentation for -finhibit-size-directive reads
17540 ``don't output a @code{.size} assembler directive, or anything
17541 else that would cause trouble if the function is split in the
17542 middle, and the two halves are placed at locations far apart in
17543 memory.'' The traceback table has this property, since it
17544 includes the offset from the start of the function to the
4d30c363
MM
17545 traceback table itself.
17546
17547 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17548 different traceback table. */
57ac7be9 17549 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
e3b5732b 17550 && rs6000_traceback != traceback_none && !crtl->is_thunk)
9b30bae2 17551 {
69c75916 17552 const char *fname = NULL;
3ac88239 17553 const char *language_string = lang_hooks.name;
6041bf2f 17554 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17555 int i;
57ac7be9 17556 int optional_tbtab;
8097c268 17557 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17558
17559 if (rs6000_traceback == traceback_full)
17560 optional_tbtab = 1;
17561 else if (rs6000_traceback == traceback_part)
17562 optional_tbtab = 0;
17563 else
17564 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17565
69c75916
AM
17566 if (optional_tbtab)
17567 {
17568 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17569 while (*fname == '.') /* V.4 encodes . in the name */
17570 fname++;
17571
17572 /* Need label immediately before tbtab, so we can compute
17573 its offset from the function start. */
17574 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17575 ASM_OUTPUT_LABEL (file, fname);
17576 }
314fc5a9
ILT
17577
17578 /* The .tbtab pseudo-op can only be used for the first eight
17579 expressions, since it can't handle the possibly variable
17580 length fields that follow. However, if you omit the optional
17581 fields, the assembler outputs zeros for all optional fields
17582 anyways, giving each variable length field is minimum length
17583 (as defined in sys/debug.h). Thus we can not use the .tbtab
17584 pseudo-op at all. */
17585
17586 /* An all-zero word flags the start of the tbtab, for debuggers
17587 that have to find it by searching forward from the entry
17588 point or from the current pc. */
19d2d16f 17589 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17590
17591 /* Tbtab format type. Use format type 0. */
19d2d16f 17592 fputs ("\t.byte 0,", file);
314fc5a9 17593
5fc921c1
DE
17594 /* Language type. Unfortunately, there does not seem to be any
17595 official way to discover the language being compiled, so we
17596 use language_string.
17597 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17598 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17599 a number, so for now use 9. */
5fc921c1 17600 if (! strcmp (language_string, "GNU C"))
314fc5a9 17601 i = 0;
6de9cd9a 17602 else if (! strcmp (language_string, "GNU F77")
7f62878c 17603 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17604 i = 1;
8b83775b 17605 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17606 i = 2;
5fc921c1
DE
17607 else if (! strcmp (language_string, "GNU Ada"))
17608 i = 3;
56438901
AM
17609 else if (! strcmp (language_string, "GNU C++")
17610 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17611 i = 9;
9517ead8
AG
17612 else if (! strcmp (language_string, "GNU Java"))
17613 i = 13;
5fc921c1
DE
17614 else if (! strcmp (language_string, "GNU Objective-C"))
17615 i = 14;
314fc5a9 17616 else
37409796 17617 gcc_unreachable ();
314fc5a9
ILT
17618 fprintf (file, "%d,", i);
17619
17620 /* 8 single bit fields: global linkage (not set for C extern linkage,
17621 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17622 from start of procedure stored in tbtab, internal function, function
17623 has controlled storage, function has no toc, function uses fp,
17624 function logs/aborts fp operations. */
17625 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17626 fprintf (file, "%d,",
17627 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17628
17629 /* 6 bitfields: function is interrupt handler, name present in
17630 proc table, function calls alloca, on condition directives
17631 (controls stack walks, 3 bits), saves condition reg, saves
17632 link reg. */
17633 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17634 set up as a frame pointer, even when there is no alloca call. */
17635 fprintf (file, "%d,",
6041bf2f
DE
17636 ((optional_tbtab << 6)
17637 | ((optional_tbtab & frame_pointer_needed) << 5)
17638 | (info->cr_save_p << 1)
17639 | (info->lr_save_p)));
314fc5a9 17640
6041bf2f 17641 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17642 (6 bits). */
17643 fprintf (file, "%d,",
4697a36c 17644 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17645
17646 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17647 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17648
6041bf2f
DE
17649 if (optional_tbtab)
17650 {
17651 /* Compute the parameter info from the function decl argument
17652 list. */
17653 tree decl;
17654 int next_parm_info_bit = 31;
314fc5a9 17655
6041bf2f
DE
17656 for (decl = DECL_ARGUMENTS (current_function_decl);
17657 decl; decl = TREE_CHAIN (decl))
17658 {
17659 rtx parameter = DECL_INCOMING_RTL (decl);
17660 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17661
6041bf2f
DE
17662 if (GET_CODE (parameter) == REG)
17663 {
ebb109ad 17664 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17665 {
17666 int bits;
17667
17668 float_parms++;
17669
37409796
NS
17670 switch (mode)
17671 {
17672 case SFmode:
e41b2a33 17673 case SDmode:
37409796
NS
17674 bits = 0x2;
17675 break;
17676
17677 case DFmode:
7393f7f8 17678 case DDmode:
37409796 17679 case TFmode:
7393f7f8 17680 case TDmode:
37409796
NS
17681 bits = 0x3;
17682 break;
17683
17684 default:
17685 gcc_unreachable ();
17686 }
6041bf2f
DE
17687
17688 /* If only one bit will fit, don't or in this entry. */
17689 if (next_parm_info_bit > 0)
17690 parm_info |= (bits << (next_parm_info_bit - 1));
17691 next_parm_info_bit -= 2;
17692 }
17693 else
17694 {
17695 fixed_parms += ((GET_MODE_SIZE (mode)
17696 + (UNITS_PER_WORD - 1))
17697 / UNITS_PER_WORD);
17698 next_parm_info_bit -= 1;
17699 }
17700 }
17701 }
17702 }
314fc5a9
ILT
17703
17704 /* Number of fixed point parameters. */
17705 /* This is actually the number of words of fixed point parameters; thus
17706 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17707 fprintf (file, "%d,", fixed_parms);
17708
17709 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17710 all on stack. */
17711 /* This is actually the number of fp registers that hold parameters;
17712 and thus the maximum value is 13. */
17713 /* Set parameters on stack bit if parameters are not in their original
17714 registers, regardless of whether they are on the stack? Xlc
17715 seems to set the bit when not optimizing. */
17716 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17717
6041bf2f
DE
17718 if (! optional_tbtab)
17719 return;
17720
314fc5a9
ILT
17721 /* Optional fields follow. Some are variable length. */
17722
17723 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17724 11 double float. */
17725 /* There is an entry for each parameter in a register, in the order that
17726 they occur in the parameter list. Any intervening arguments on the
17727 stack are ignored. If the list overflows a long (max possible length
17728 34 bits) then completely leave off all elements that don't fit. */
17729 /* Only emit this long if there was at least one parameter. */
17730 if (fixed_parms || float_parms)
17731 fprintf (file, "\t.long %d\n", parm_info);
17732
17733 /* Offset from start of code to tb table. */
19d2d16f 17734 fputs ("\t.long ", file);
314fc5a9 17735 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17736 if (TARGET_AIX)
17737 RS6000_OUTPUT_BASENAME (file, fname);
17738 else
17739 assemble_name (file, fname);
17740 putc ('-', file);
17741 rs6000_output_function_entry (file, fname);
19d2d16f 17742 putc ('\n', file);
314fc5a9
ILT
17743
17744 /* Interrupt handler mask. */
17745 /* Omit this long, since we never set the interrupt handler bit
17746 above. */
17747
17748 /* Number of CTL (controlled storage) anchors. */
17749 /* Omit this long, since the has_ctl bit is never set above. */
17750
17751 /* Displacement into stack of each CTL anchor. */
17752 /* Omit this list of longs, because there are no CTL anchors. */
17753
17754 /* Length of function name. */
69c75916
AM
17755 if (*fname == '*')
17756 ++fname;
296b8152 17757 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17758
17759 /* Function name. */
17760 assemble_string (fname, strlen (fname));
17761
17762 /* Register for alloca automatic storage; this is always reg 31.
17763 Only emit this if the alloca bit was set above. */
17764 if (frame_pointer_needed)
19d2d16f 17765 fputs ("\t.byte 31\n", file);
b1765bde
DE
17766
17767 fputs ("\t.align 2\n", file);
9b30bae2 17768 }
9878760c 17769}
17167fd8 17770\f
a4f6c312
SS
17771/* A C compound statement that outputs the assembler code for a thunk
17772 function, used to implement C++ virtual function calls with
17773 multiple inheritance. The thunk acts as a wrapper around a virtual
17774 function, adjusting the implicit object parameter before handing
17775 control off to the real function.
17776
17777 First, emit code to add the integer DELTA to the location that
17778 contains the incoming first argument. Assume that this argument
17779 contains a pointer, and is the one used to pass the `this' pointer
17780 in C++. This is the incoming argument *before* the function
17781 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17782 values of all other incoming arguments.
17167fd8
MM
17783
17784 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17785 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17786 not touch the return address. Hence returning from FUNCTION will
17787 return to whoever called the current `thunk'.
17167fd8 17788
a4f6c312
SS
17789 The effect must be as if FUNCTION had been called directly with the
17790 adjusted first argument. This macro is responsible for emitting
17791 all of the code for a thunk function; output_function_prologue()
17792 and output_function_epilogue() are not invoked.
17167fd8 17793
a4f6c312
SS
17794 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17795 been extracted from it.) It might possibly be useful on some
17796 targets, but probably not.
17167fd8 17797
a4f6c312
SS
17798 If you do not define this macro, the target-independent code in the
17799 C++ frontend will generate a less efficient heavyweight thunk that
17800 calls FUNCTION instead of jumping to it. The generic approach does
17801 not support varargs. */
17167fd8 17802
3961e8fe 17803static void
f676971a
EC
17804rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17805 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17806 tree function)
17167fd8 17807{
0a2aaacc 17808 rtx this_rtx, insn, funexp;
17167fd8 17809
5b71a4e7 17810 reload_completed = 1;
fe3ad572 17811 epilogue_completed = 1;
56a7189a 17812
5b71a4e7 17813 /* Mark the end of the (empty) prologue. */
2e040219 17814 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17815
5b71a4e7
DE
17816 /* Find the "this" pointer. If the function returns a structure,
17817 the structure return pointer is in r3. */
61f71b34 17818 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 17819 this_rtx = gen_rtx_REG (Pmode, 4);
56a7189a 17820 else
0a2aaacc 17821 this_rtx = gen_rtx_REG (Pmode, 3);
17167fd8 17822
5b71a4e7
DE
17823 /* Apply the constant offset, if required. */
17824 if (delta)
17825 {
17826 rtx delta_rtx = GEN_INT (delta);
17827 emit_insn (TARGET_32BIT
0a2aaacc
KG
17828 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
17829 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17167fd8
MM
17830 }
17831
5b71a4e7
DE
17832 /* Apply the offset from the vtable, if required. */
17833 if (vcall_offset)
17167fd8 17834 {
5b71a4e7
DE
17835 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17836 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17837
0a2aaacc 17838 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
eeff9307
JJ
17839 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17840 {
17841 emit_insn (TARGET_32BIT
17842 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17843 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17844 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17845 }
17846 else
17847 {
17848 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17849
17850 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17851 }
5b71a4e7 17852 emit_insn (TARGET_32BIT
0a2aaacc
KG
17853 ? gen_addsi3 (this_rtx, this_rtx, tmp)
17854 : gen_adddi3 (this_rtx, this_rtx, tmp));
17167fd8
MM
17855 }
17856
5b71a4e7
DE
17857 /* Generate a tail call to the target function. */
17858 if (!TREE_USED (function))
17859 {
17860 assemble_external (function);
17861 TREE_USED (function) = 1;
17862 }
17863 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17864 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17865
17866#if TARGET_MACHO
ab82a49f 17867 if (MACHOPIC_INDIRECT)
5b71a4e7 17868 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17869#endif
5b71a4e7
DE
17870
17871 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17872 generate sibcall RTL explicitly. */
5b71a4e7
DE
17873 insn = emit_call_insn (
17874 gen_rtx_PARALLEL (VOIDmode,
17875 gen_rtvec (4,
17876 gen_rtx_CALL (VOIDmode,
17877 funexp, const0_rtx),
17878 gen_rtx_USE (VOIDmode, const0_rtx),
17879 gen_rtx_USE (VOIDmode,
17880 gen_rtx_REG (SImode,
1de43f85 17881 LR_REGNO)),
5b71a4e7
DE
17882 gen_rtx_RETURN (VOIDmode))));
17883 SIBLING_CALL_P (insn) = 1;
17884 emit_barrier ();
17885
17886 /* Run just enough of rest_of_compilation to get the insns emitted.
17887 There's not really enough bulk here to make other passes such as
17888 instruction scheduling worth while. Note that use_thunk calls
17889 assemble_start_function and assemble_end_function. */
17890 insn = get_insns ();
55e092c4 17891 insn_locators_alloc ();
5b71a4e7
DE
17892 shorten_branches (insn);
17893 final_start_function (insn, file, 1);
c9d691e9 17894 final (insn, file, 1);
5b71a4e7 17895 final_end_function ();
d7087dd2 17896 free_after_compilation (cfun);
5b71a4e7
DE
17897
17898 reload_completed = 0;
fe3ad572 17899 epilogue_completed = 0;
9ebbca7d 17900}
9ebbca7d
GK
17901\f
17902/* A quick summary of the various types of 'constant-pool tables'
17903 under PowerPC:
17904
f676971a 17905 Target Flags Name One table per
9ebbca7d
GK
17906 AIX (none) AIX TOC object file
17907 AIX -mfull-toc AIX TOC object file
17908 AIX -mminimal-toc AIX minimal TOC translation unit
17909 SVR4/EABI (none) SVR4 SDATA object file
17910 SVR4/EABI -fpic SVR4 pic object file
17911 SVR4/EABI -fPIC SVR4 PIC translation unit
17912 SVR4/EABI -mrelocatable EABI TOC function
17913 SVR4/EABI -maix AIX TOC object file
f676971a 17914 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17915 AIX minimal TOC translation unit
17916
17917 Name Reg. Set by entries contains:
17918 made by addrs? fp? sum?
17919
17920 AIX TOC 2 crt0 as Y option option
17921 AIX minimal TOC 30 prolog gcc Y Y option
17922 SVR4 SDATA 13 crt0 gcc N Y N
17923 SVR4 pic 30 prolog ld Y not yet N
17924 SVR4 PIC 30 prolog gcc Y option option
17925 EABI TOC 30 prolog gcc Y option option
17926
17927*/
17928
9ebbca7d
GK
17929/* Hash functions for the hash table. */
17930
17931static unsigned
a2369ed3 17932rs6000_hash_constant (rtx k)
9ebbca7d 17933{
46b33600
RH
17934 enum rtx_code code = GET_CODE (k);
17935 enum machine_mode mode = GET_MODE (k);
17936 unsigned result = (code << 3) ^ mode;
17937 const char *format;
17938 int flen, fidx;
f676971a 17939
46b33600
RH
17940 format = GET_RTX_FORMAT (code);
17941 flen = strlen (format);
17942 fidx = 0;
9ebbca7d 17943
46b33600
RH
17944 switch (code)
17945 {
17946 case LABEL_REF:
17947 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17948
17949 case CONST_DOUBLE:
17950 if (mode != VOIDmode)
17951 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17952 flen = 2;
17953 break;
17954
17955 case CODE_LABEL:
17956 fidx = 3;
17957 break;
17958
17959 default:
17960 break;
17961 }
9ebbca7d
GK
17962
17963 for (; fidx < flen; fidx++)
17964 switch (format[fidx])
17965 {
17966 case 's':
17967 {
17968 unsigned i, len;
17969 const char *str = XSTR (k, fidx);
17970 len = strlen (str);
17971 result = result * 613 + len;
17972 for (i = 0; i < len; i++)
17973 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17974 break;
17975 }
9ebbca7d
GK
17976 case 'u':
17977 case 'e':
17978 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17979 break;
17980 case 'i':
17981 case 'n':
17982 result = result * 613 + (unsigned) XINT (k, fidx);
17983 break;
17984 case 'w':
17985 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17986 result = result * 613 + (unsigned) XWINT (k, fidx);
17987 else
17988 {
17989 size_t i;
9390387d 17990 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17991 result = result * 613 + (unsigned) (XWINT (k, fidx)
17992 >> CHAR_BIT * i);
17993 }
17994 break;
09501938
DE
17995 case '0':
17996 break;
9ebbca7d 17997 default:
37409796 17998 gcc_unreachable ();
9ebbca7d 17999 }
46b33600 18000
9ebbca7d
GK
18001 return result;
18002}
18003
18004static unsigned
a2369ed3 18005toc_hash_function (const void *hash_entry)
9ebbca7d 18006{
f676971a 18007 const struct toc_hash_struct *thc =
a9098fd0
GK
18008 (const struct toc_hash_struct *) hash_entry;
18009 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
18010}
18011
18012/* Compare H1 and H2 for equivalence. */
18013
18014static int
a2369ed3 18015toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
18016{
18017 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
18018 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
18019
a9098fd0
GK
18020 if (((const struct toc_hash_struct *) h1)->key_mode
18021 != ((const struct toc_hash_struct *) h2)->key_mode)
18022 return 0;
18023
5692c7bc 18024 return rtx_equal_p (r1, r2);
9ebbca7d
GK
18025}
18026
28e510bd
MM
18027/* These are the names given by the C++ front-end to vtables, and
18028 vtable-like objects. Ideally, this logic should not be here;
18029 instead, there should be some programmatic way of inquiring as
18030 to whether or not an object is a vtable. */
18031
18032#define VTABLE_NAME_P(NAME) \
9390387d 18033 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
18034 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
18035 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 18036 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 18037 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
18038
18039void
a2369ed3 18040rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
18041{
18042 /* Currently C++ toc references to vtables can be emitted before it
18043 is decided whether the vtable is public or private. If this is
18044 the case, then the linker will eventually complain that there is
f676971a 18045 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
18046 we emit the TOC reference to reference the symbol and not the
18047 section. */
18048 const char *name = XSTR (x, 0);
54ee9799 18049
f676971a 18050 if (VTABLE_NAME_P (name))
54ee9799
DE
18051 {
18052 RS6000_OUTPUT_BASENAME (file, name);
18053 }
18054 else
18055 assemble_name (file, name);
28e510bd
MM
18056}
18057
a4f6c312
SS
18058/* Output a TOC entry. We derive the entry name from what is being
18059 written. */
9878760c
RK
18060
18061void
a2369ed3 18062output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
18063{
18064 char buf[256];
3cce094d 18065 const char *name = buf;
ec940faa 18066 const char *real_name;
9878760c 18067 rtx base = x;
16fdeb48 18068 HOST_WIDE_INT offset = 0;
9878760c 18069
37409796 18070 gcc_assert (!TARGET_NO_TOC);
4697a36c 18071
9ebbca7d
GK
18072 /* When the linker won't eliminate them, don't output duplicate
18073 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
18074 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
18075 CODE_LABELs. */
18076 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
18077 {
18078 struct toc_hash_struct *h;
18079 void * * found;
f676971a 18080
17211ab5 18081 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 18082 time because GGC is not initialized at that point. */
17211ab5 18083 if (toc_hash_table == NULL)
f676971a 18084 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
18085 toc_hash_eq, NULL);
18086
5ead67f6 18087 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 18088 h->key = x;
a9098fd0 18089 h->key_mode = mode;
9ebbca7d 18090 h->labelno = labelno;
f676971a 18091
9ebbca7d
GK
18092 found = htab_find_slot (toc_hash_table, h, 1);
18093 if (*found == NULL)
18094 *found = h;
f676971a 18095 else /* This is indeed a duplicate.
9ebbca7d
GK
18096 Set this label equal to that label. */
18097 {
18098 fputs ("\t.set ", file);
18099 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18100 fprintf (file, "%d,", labelno);
18101 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 18102 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
18103 found)->labelno));
18104 return;
18105 }
18106 }
18107
18108 /* If we're going to put a double constant in the TOC, make sure it's
18109 aligned properly when strict alignment is on. */
ff1720ed
RK
18110 if (GET_CODE (x) == CONST_DOUBLE
18111 && STRICT_ALIGNMENT
a9098fd0 18112 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
18113 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
18114 ASM_OUTPUT_ALIGN (file, 3);
18115 }
18116
4977bab6 18117 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 18118
37c37a57
RK
18119 /* Handle FP constants specially. Note that if we have a minimal
18120 TOC, things we put here aren't actually in the TOC, so we can allow
18121 FP constants. */
00b79d54
BE
18122 if (GET_CODE (x) == CONST_DOUBLE &&
18123 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
18124 {
18125 REAL_VALUE_TYPE rv;
18126 long k[4];
18127
18128 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18129 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18130 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
18131 else
18132 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
18133
18134 if (TARGET_64BIT)
18135 {
18136 if (TARGET_MINIMAL_TOC)
18137 fputs (DOUBLE_INT_ASM_OP, file);
18138 else
18139 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18140 k[0] & 0xffffffff, k[1] & 0xffffffff,
18141 k[2] & 0xffffffff, k[3] & 0xffffffff);
18142 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
18143 k[0] & 0xffffffff, k[1] & 0xffffffff,
18144 k[2] & 0xffffffff, k[3] & 0xffffffff);
18145 return;
18146 }
18147 else
18148 {
18149 if (TARGET_MINIMAL_TOC)
18150 fputs ("\t.long ", file);
18151 else
18152 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18153 k[0] & 0xffffffff, k[1] & 0xffffffff,
18154 k[2] & 0xffffffff, k[3] & 0xffffffff);
18155 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18156 k[0] & 0xffffffff, k[1] & 0xffffffff,
18157 k[2] & 0xffffffff, k[3] & 0xffffffff);
18158 return;
18159 }
18160 }
00b79d54
BE
18161 else if (GET_CODE (x) == CONST_DOUBLE &&
18162 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 18163 {
042259f2
DE
18164 REAL_VALUE_TYPE rv;
18165 long k[2];
0adc764e 18166
042259f2 18167 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18168
18169 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18170 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18171 else
18172 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 18173
13ded975
DE
18174 if (TARGET_64BIT)
18175 {
18176 if (TARGET_MINIMAL_TOC)
2bfcf297 18177 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18178 else
2f0552b6
AM
18179 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18180 k[0] & 0xffffffff, k[1] & 0xffffffff);
18181 fprintf (file, "0x%lx%08lx\n",
18182 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18183 return;
18184 }
1875cc88 18185 else
13ded975
DE
18186 {
18187 if (TARGET_MINIMAL_TOC)
2bfcf297 18188 fputs ("\t.long ", file);
13ded975 18189 else
2f0552b6
AM
18190 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18191 k[0] & 0xffffffff, k[1] & 0xffffffff);
18192 fprintf (file, "0x%lx,0x%lx\n",
18193 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18194 return;
18195 }
9878760c 18196 }
00b79d54
BE
18197 else if (GET_CODE (x) == CONST_DOUBLE &&
18198 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 18199 {
042259f2
DE
18200 REAL_VALUE_TYPE rv;
18201 long l;
9878760c 18202
042259f2 18203 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18204 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18205 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18206 else
18207 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 18208
31bfaa0b
DE
18209 if (TARGET_64BIT)
18210 {
18211 if (TARGET_MINIMAL_TOC)
2bfcf297 18212 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 18213 else
2f0552b6
AM
18214 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18215 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
18216 return;
18217 }
042259f2 18218 else
31bfaa0b
DE
18219 {
18220 if (TARGET_MINIMAL_TOC)
2bfcf297 18221 fputs ("\t.long ", file);
31bfaa0b 18222 else
2f0552b6
AM
18223 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18224 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
18225 return;
18226 }
042259f2 18227 }
f176e826 18228 else if (GET_MODE (x) == VOIDmode
a9098fd0 18229 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 18230 {
e2c953b6 18231 unsigned HOST_WIDE_INT low;
042259f2
DE
18232 HOST_WIDE_INT high;
18233
18234 if (GET_CODE (x) == CONST_DOUBLE)
18235 {
18236 low = CONST_DOUBLE_LOW (x);
18237 high = CONST_DOUBLE_HIGH (x);
18238 }
18239 else
18240#if HOST_BITS_PER_WIDE_INT == 32
18241 {
18242 low = INTVAL (x);
0858c623 18243 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18244 }
18245#else
18246 {
c4ad648e
AM
18247 low = INTVAL (x) & 0xffffffff;
18248 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18249 }
18250#endif
9878760c 18251
a9098fd0
GK
18252 /* TOC entries are always Pmode-sized, but since this
18253 is a bigendian machine then if we're putting smaller
18254 integer constants in the TOC we have to pad them.
18255 (This is still a win over putting the constants in
18256 a separate constant pool, because then we'd have
02a4ec28
FS
18257 to have both a TOC entry _and_ the actual constant.)
18258
18259 For a 32-bit target, CONST_INT values are loaded and shifted
18260 entirely within `low' and can be stored in one TOC entry. */
18261
37409796
NS
18262 /* It would be easy to make this work, but it doesn't now. */
18263 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18264
18265 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18266 {
18267#if HOST_BITS_PER_WIDE_INT == 32
18268 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18269 POINTER_SIZE, &low, &high, 0);
18270#else
18271 low |= high << 32;
18272 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18273 high = (HOST_WIDE_INT) low >> 32;
18274 low &= 0xffffffff;
18275#endif
18276 }
a9098fd0 18277
13ded975
DE
18278 if (TARGET_64BIT)
18279 {
18280 if (TARGET_MINIMAL_TOC)
2bfcf297 18281 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18282 else
2f0552b6
AM
18283 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18284 (long) high & 0xffffffff, (long) low & 0xffffffff);
18285 fprintf (file, "0x%lx%08lx\n",
18286 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18287 return;
18288 }
1875cc88 18289 else
13ded975 18290 {
02a4ec28
FS
18291 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18292 {
18293 if (TARGET_MINIMAL_TOC)
2bfcf297 18294 fputs ("\t.long ", file);
02a4ec28 18295 else
2bfcf297 18296 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18297 (long) high & 0xffffffff, (long) low & 0xffffffff);
18298 fprintf (file, "0x%lx,0x%lx\n",
18299 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18300 }
13ded975 18301 else
02a4ec28
FS
18302 {
18303 if (TARGET_MINIMAL_TOC)
2bfcf297 18304 fputs ("\t.long ", file);
02a4ec28 18305 else
2f0552b6
AM
18306 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18307 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18308 }
13ded975
DE
18309 return;
18310 }
9878760c
RK
18311 }
18312
18313 if (GET_CODE (x) == CONST)
18314 {
37409796 18315 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18316
9878760c
RK
18317 base = XEXP (XEXP (x, 0), 0);
18318 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18319 }
f676971a 18320
37409796
NS
18321 switch (GET_CODE (base))
18322 {
18323 case SYMBOL_REF:
18324 name = XSTR (base, 0);
18325 break;
18326
18327 case LABEL_REF:
18328 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18329 CODE_LABEL_NUMBER (XEXP (base, 0)));
18330 break;
18331
18332 case CODE_LABEL:
18333 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18334 break;
18335
18336 default:
18337 gcc_unreachable ();
18338 }
9878760c 18339
772c5265 18340 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 18341 if (TARGET_MINIMAL_TOC)
2bfcf297 18342 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18343 else
18344 {
b6c9286a 18345 fprintf (file, "\t.tc %s", real_name);
9878760c 18346
1875cc88 18347 if (offset < 0)
16fdeb48 18348 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18349 else if (offset)
16fdeb48 18350 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18351
19d2d16f 18352 fputs ("[TC],", file);
1875cc88 18353 }
581bc4de
MM
18354
18355 /* Currently C++ toc references to vtables can be emitted before it
18356 is decided whether the vtable is public or private. If this is
18357 the case, then the linker will eventually complain that there is
18358 a TOC reference to an unknown section. Thus, for vtables only,
18359 we emit the TOC reference to reference the symbol and not the
18360 section. */
28e510bd 18361 if (VTABLE_NAME_P (name))
581bc4de 18362 {
54ee9799 18363 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18364 if (offset < 0)
16fdeb48 18365 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18366 else if (offset > 0)
16fdeb48 18367 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18368 }
18369 else
18370 output_addr_const (file, x);
19d2d16f 18371 putc ('\n', file);
9878760c
RK
18372}
18373\f
18374/* Output an assembler pseudo-op to write an ASCII string of N characters
18375 starting at P to FILE.
18376
18377 On the RS/6000, we have to do this using the .byte operation and
18378 write out special characters outside the quoted string.
18379 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18380 so we must artificially break them up early. */
9878760c
RK
18381
18382void
a2369ed3 18383output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18384{
18385 char c;
18386 int i, count_string;
d330fd93
KG
18387 const char *for_string = "\t.byte \"";
18388 const char *for_decimal = "\t.byte ";
18389 const char *to_close = NULL;
9878760c
RK
18390
18391 count_string = 0;
18392 for (i = 0; i < n; i++)
18393 {
18394 c = *p++;
18395 if (c >= ' ' && c < 0177)
18396 {
18397 if (for_string)
18398 fputs (for_string, file);
18399 putc (c, file);
18400
18401 /* Write two quotes to get one. */
18402 if (c == '"')
18403 {
18404 putc (c, file);
18405 ++count_string;
18406 }
18407
18408 for_string = NULL;
18409 for_decimal = "\"\n\t.byte ";
18410 to_close = "\"\n";
18411 ++count_string;
18412
18413 if (count_string >= 512)
18414 {
18415 fputs (to_close, file);
18416
18417 for_string = "\t.byte \"";
18418 for_decimal = "\t.byte ";
18419 to_close = NULL;
18420 count_string = 0;
18421 }
18422 }
18423 else
18424 {
18425 if (for_decimal)
18426 fputs (for_decimal, file);
18427 fprintf (file, "%d", c);
18428
18429 for_string = "\n\t.byte \"";
18430 for_decimal = ", ";
18431 to_close = "\n";
18432 count_string = 0;
18433 }
18434 }
18435
18436 /* Now close the string if we have written one. Then end the line. */
18437 if (to_close)
9ebbca7d 18438 fputs (to_close, file);
9878760c
RK
18439}
18440\f
18441/* Generate a unique section name for FILENAME for a section type
18442 represented by SECTION_DESC. Output goes into BUF.
18443
18444 SECTION_DESC can be any string, as long as it is different for each
18445 possible section type.
18446
18447 We name the section in the same manner as xlc. The name begins with an
18448 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18449 names) with the last period replaced by the string SECTION_DESC. If
18450 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18451 the name. */
9878760c
RK
18452
18453void
f676971a 18454rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18455 const char *section_desc)
9878760c 18456{
9ebbca7d 18457 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18458 char *p;
18459 int len;
9878760c
RK
18460
18461 after_last_slash = filename;
18462 for (q = filename; *q; q++)
11e5fe42
RK
18463 {
18464 if (*q == '/')
18465 after_last_slash = q + 1;
18466 else if (*q == '.')
18467 last_period = q;
18468 }
9878760c 18469
11e5fe42 18470 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18471 *buf = (char *) xmalloc (len);
9878760c
RK
18472
18473 p = *buf;
18474 *p++ = '_';
18475
18476 for (q = after_last_slash; *q; q++)
18477 {
11e5fe42 18478 if (q == last_period)
c4ad648e 18479 {
9878760c
RK
18480 strcpy (p, section_desc);
18481 p += strlen (section_desc);
e3981aab 18482 break;
c4ad648e 18483 }
9878760c 18484
e9a780ec 18485 else if (ISALNUM (*q))
c4ad648e 18486 *p++ = *q;
9878760c
RK
18487 }
18488
11e5fe42 18489 if (last_period == 0)
9878760c
RK
18490 strcpy (p, section_desc);
18491 else
18492 *p = '\0';
18493}
e165f3f0 18494\f
a4f6c312 18495/* Emit profile function. */
411707f4 18496
411707f4 18497void
a2369ed3 18498output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18499{
858081ad
AH
18500 /* Non-standard profiling for kernels, which just saves LR then calls
18501 _mcount without worrying about arg saves. The idea is to change
18502 the function prologue as little as possible as it isn't easy to
18503 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18504 if (TARGET_PROFILE_KERNEL)
18505 return;
18506
8480e480
CC
18507 if (DEFAULT_ABI == ABI_AIX)
18508 {
9739c90c
JJ
18509#ifndef NO_PROFILE_COUNTERS
18510# define NO_PROFILE_COUNTERS 0
18511#endif
f676971a 18512 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
18513 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
18514 else
18515 {
18516 char buf[30];
18517 const char *label_name;
18518 rtx fun;
411707f4 18519
9739c90c
JJ
18520 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18521 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18522 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18523
9739c90c
JJ
18524 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
18525 fun, Pmode);
18526 }
8480e480 18527 }
ee890fe2
SS
18528 else if (DEFAULT_ABI == ABI_DARWIN)
18529 {
d5fa86ba 18530 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18531 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18532
18533 /* Be conservative and always set this, at least for now. */
e3b5732b 18534 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18535
18536#if TARGET_MACHO
18537 /* For PIC code, set up a stub and collect the caller's address
18538 from r0, which is where the prologue puts it. */
11abc112 18539 if (MACHOPIC_INDIRECT
e3b5732b 18540 && crtl->uses_pic_offset_table)
11abc112 18541 caller_addr_regno = 0;
ee890fe2
SS
18542#endif
18543 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
18544 0, VOIDmode, 1,
18545 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18546 }
411707f4
CC
18547}
18548
a4f6c312 18549/* Write function profiler code. */
e165f3f0
RK
18550
18551void
a2369ed3 18552output_function_profiler (FILE *file, int labelno)
e165f3f0 18553{
3daf36a4 18554 char buf[100];
e165f3f0 18555
38c1f2d7 18556 switch (DEFAULT_ABI)
3daf36a4 18557 {
38c1f2d7 18558 default:
37409796 18559 gcc_unreachable ();
38c1f2d7
MM
18560
18561 case ABI_V4:
09eeeacb
AM
18562 if (!TARGET_32BIT)
18563 {
d4ee4d25 18564 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18565 return;
18566 }
ffcfcb5f 18567 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18568 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18569 if (NO_PROFILE_COUNTERS)
18570 {
18571 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18572 reg_names[0], reg_names[1]);
18573 }
18574 else if (TARGET_SECURE_PLT && flag_pic)
18575 {
18576 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18577 reg_names[0], reg_names[1]);
18578 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18579 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18580 reg_names[12], reg_names[12]);
18581 assemble_name (file, buf);
18582 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18583 assemble_name (file, buf);
18584 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18585 }
18586 else if (flag_pic == 1)
38c1f2d7 18587 {
dfdfa60f 18588 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18589 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18590 reg_names[0], reg_names[1]);
17167fd8 18591 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18592 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18593 assemble_name (file, buf);
17167fd8 18594 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18595 }
9ebbca7d 18596 else if (flag_pic > 1)
38c1f2d7 18597 {
71625f3d
AM
18598 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18599 reg_names[0], reg_names[1]);
9ebbca7d 18600 /* Now, we need to get the address of the label. */
71625f3d 18601 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18602 assemble_name (file, buf);
9ebbca7d
GK
18603 fputs ("-.\n1:", file);
18604 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18605 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18606 reg_names[0], reg_names[11]);
18607 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18608 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18609 }
38c1f2d7
MM
18610 else
18611 {
17167fd8 18612 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18613 assemble_name (file, buf);
dfdfa60f 18614 fputs ("@ha\n", file);
71625f3d
AM
18615 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18616 reg_names[0], reg_names[1]);
a260abc9 18617 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18618 assemble_name (file, buf);
17167fd8 18619 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18620 }
18621
50d440bc 18622 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18623 fprintf (file, "\tbl %s%s\n",
18624 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18625 break;
18626
18627 case ABI_AIX:
ee890fe2 18628 case ABI_DARWIN:
ffcfcb5f
AM
18629 if (!TARGET_PROFILE_KERNEL)
18630 {
a3c9585f 18631 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18632 }
18633 else
18634 {
37409796 18635 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18636
18637 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18638 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18639
6de9cd9a 18640 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18641 {
18642 asm_fprintf (file, "\tstd %s,24(%s)\n",
18643 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18644 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18645 asm_fprintf (file, "\tld %s,24(%s)\n",
18646 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18647 }
18648 else
18649 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18650 }
38c1f2d7
MM
18651 break;
18652 }
e165f3f0 18653}
a251ffd0 18654
b54cf83a 18655\f
44cd321e
PS
18656
18657/* The following variable value is the last issued insn. */
18658
18659static rtx last_scheduled_insn;
18660
18661/* The following variable helps to balance issuing of load and
18662 store instructions */
18663
18664static int load_store_pendulum;
18665
b54cf83a
DE
18666/* Power4 load update and store update instructions are cracked into a
18667 load or store and an integer insn which are executed in the same cycle.
18668 Branches have their own dispatch slot which does not count against the
18669 GCC issue rate, but it changes the program flow so there are no other
18670 instructions to issue in this cycle. */
18671
18672static int
f676971a
EC
18673rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18674 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18675 rtx insn, int more)
b54cf83a 18676{
44cd321e 18677 last_scheduled_insn = insn;
b54cf83a
DE
18678 if (GET_CODE (PATTERN (insn)) == USE
18679 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18680 {
18681 cached_can_issue_more = more;
18682 return cached_can_issue_more;
18683 }
18684
18685 if (insn_terminates_group_p (insn, current_group))
18686 {
18687 cached_can_issue_more = 0;
18688 return cached_can_issue_more;
18689 }
b54cf83a 18690
d296e02e
AP
18691 /* If no reservation, but reach here */
18692 if (recog_memoized (insn) < 0)
18693 return more;
18694
ec507f2d 18695 if (rs6000_sched_groups)
b54cf83a 18696 {
cbe26ab8 18697 if (is_microcoded_insn (insn))
44cd321e 18698 cached_can_issue_more = 0;
cbe26ab8 18699 else if (is_cracked_insn (insn))
44cd321e
PS
18700 cached_can_issue_more = more > 2 ? more - 2 : 0;
18701 else
18702 cached_can_issue_more = more - 1;
18703
18704 return cached_can_issue_more;
b54cf83a 18705 }
165b263e 18706
d296e02e
AP
18707 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18708 return 0;
18709
44cd321e
PS
18710 cached_can_issue_more = more - 1;
18711 return cached_can_issue_more;
b54cf83a
DE
18712}
18713
a251ffd0
TG
18714/* Adjust the cost of a scheduling dependency. Return the new cost of
18715 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18716
c237e94a 18717static int
0a4f0294 18718rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18719{
44cd321e 18720 enum attr_type attr_type;
a251ffd0 18721
44cd321e 18722 if (! recog_memoized (insn))
a251ffd0
TG
18723 return 0;
18724
44cd321e 18725 switch (REG_NOTE_KIND (link))
a251ffd0 18726 {
44cd321e
PS
18727 case REG_DEP_TRUE:
18728 {
18729 /* Data dependency; DEP_INSN writes a register that INSN reads
18730 some cycles later. */
18731
18732 /* Separate a load from a narrower, dependent store. */
18733 if (rs6000_sched_groups
18734 && GET_CODE (PATTERN (insn)) == SET
18735 && GET_CODE (PATTERN (dep_insn)) == SET
18736 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18737 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18738 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18739 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18740 return cost + 14;
18741
18742 attr_type = get_attr_type (insn);
18743
18744 switch (attr_type)
18745 {
18746 case TYPE_JMPREG:
18747 /* Tell the first scheduling pass about the latency between
18748 a mtctr and bctr (and mtlr and br/blr). The first
18749 scheduling pass will not know about this latency since
18750 the mtctr instruction, which has the latency associated
18751 to it, will be generated by reload. */
18752 return TARGET_POWER ? 5 : 4;
18753 case TYPE_BRANCH:
18754 /* Leave some extra cycles between a compare and its
18755 dependent branch, to inhibit expensive mispredicts. */
18756 if ((rs6000_cpu_attr == CPU_PPC603
18757 || rs6000_cpu_attr == CPU_PPC604
18758 || rs6000_cpu_attr == CPU_PPC604E
18759 || rs6000_cpu_attr == CPU_PPC620
18760 || rs6000_cpu_attr == CPU_PPC630
18761 || rs6000_cpu_attr == CPU_PPC750
18762 || rs6000_cpu_attr == CPU_PPC7400
18763 || rs6000_cpu_attr == CPU_PPC7450
18764 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18765 || rs6000_cpu_attr == CPU_POWER5
18766 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18767 && recog_memoized (dep_insn)
18768 && (INSN_CODE (dep_insn) >= 0))
982afe02 18769
44cd321e
PS
18770 switch (get_attr_type (dep_insn))
18771 {
18772 case TYPE_CMP:
18773 case TYPE_COMPARE:
18774 case TYPE_DELAYED_COMPARE:
18775 case TYPE_IMUL_COMPARE:
18776 case TYPE_LMUL_COMPARE:
18777 case TYPE_FPCOMPARE:
18778 case TYPE_CR_LOGICAL:
18779 case TYPE_DELAYED_CR:
18780 return cost + 2;
18781 default:
18782 break;
18783 }
18784 break;
18785
18786 case TYPE_STORE:
18787 case TYPE_STORE_U:
18788 case TYPE_STORE_UX:
18789 case TYPE_FPSTORE:
18790 case TYPE_FPSTORE_U:
18791 case TYPE_FPSTORE_UX:
18792 if ((rs6000_cpu == PROCESSOR_POWER6)
18793 && recog_memoized (dep_insn)
18794 && (INSN_CODE (dep_insn) >= 0))
18795 {
18796
18797 if (GET_CODE (PATTERN (insn)) != SET)
18798 /* If this happens, we have to extend this to schedule
18799 optimally. Return default for now. */
18800 return cost;
18801
18802 /* Adjust the cost for the case where the value written
18803 by a fixed point operation is used as the address
18804 gen value on a store. */
18805 switch (get_attr_type (dep_insn))
18806 {
18807 case TYPE_LOAD:
18808 case TYPE_LOAD_U:
18809 case TYPE_LOAD_UX:
18810 case TYPE_CNTLZ:
18811 {
18812 if (! store_data_bypass_p (dep_insn, insn))
18813 return 4;
18814 break;
18815 }
18816 case TYPE_LOAD_EXT:
18817 case TYPE_LOAD_EXT_U:
18818 case TYPE_LOAD_EXT_UX:
18819 case TYPE_VAR_SHIFT_ROTATE:
18820 case TYPE_VAR_DELAYED_COMPARE:
18821 {
18822 if (! store_data_bypass_p (dep_insn, insn))
18823 return 6;
18824 break;
18825 }
18826 case TYPE_INTEGER:
18827 case TYPE_COMPARE:
18828 case TYPE_FAST_COMPARE:
18829 case TYPE_EXTS:
18830 case TYPE_SHIFT:
18831 case TYPE_INSERT_WORD:
18832 case TYPE_INSERT_DWORD:
18833 case TYPE_FPLOAD_U:
18834 case TYPE_FPLOAD_UX:
18835 case TYPE_STORE_U:
18836 case TYPE_STORE_UX:
18837 case TYPE_FPSTORE_U:
18838 case TYPE_FPSTORE_UX:
18839 {
18840 if (! store_data_bypass_p (dep_insn, insn))
18841 return 3;
18842 break;
18843 }
18844 case TYPE_IMUL:
18845 case TYPE_IMUL2:
18846 case TYPE_IMUL3:
18847 case TYPE_LMUL:
18848 case TYPE_IMUL_COMPARE:
18849 case TYPE_LMUL_COMPARE:
18850 {
18851 if (! store_data_bypass_p (dep_insn, insn))
18852 return 17;
18853 break;
18854 }
18855 case TYPE_IDIV:
18856 {
18857 if (! store_data_bypass_p (dep_insn, insn))
18858 return 45;
18859 break;
18860 }
18861 case TYPE_LDIV:
18862 {
18863 if (! store_data_bypass_p (dep_insn, insn))
18864 return 57;
18865 break;
18866 }
18867 default:
18868 break;
18869 }
18870 }
18871 break;
18872
18873 case TYPE_LOAD:
18874 case TYPE_LOAD_U:
18875 case TYPE_LOAD_UX:
18876 case TYPE_LOAD_EXT:
18877 case TYPE_LOAD_EXT_U:
18878 case TYPE_LOAD_EXT_UX:
18879 if ((rs6000_cpu == PROCESSOR_POWER6)
18880 && recog_memoized (dep_insn)
18881 && (INSN_CODE (dep_insn) >= 0))
18882 {
18883
18884 /* Adjust the cost for the case where the value written
18885 by a fixed point instruction is used within the address
18886 gen portion of a subsequent load(u)(x) */
18887 switch (get_attr_type (dep_insn))
18888 {
18889 case TYPE_LOAD:
18890 case TYPE_LOAD_U:
18891 case TYPE_LOAD_UX:
18892 case TYPE_CNTLZ:
18893 {
18894 if (set_to_load_agen (dep_insn, insn))
18895 return 4;
18896 break;
18897 }
18898 case TYPE_LOAD_EXT:
18899 case TYPE_LOAD_EXT_U:
18900 case TYPE_LOAD_EXT_UX:
18901 case TYPE_VAR_SHIFT_ROTATE:
18902 case TYPE_VAR_DELAYED_COMPARE:
18903 {
18904 if (set_to_load_agen (dep_insn, insn))
18905 return 6;
18906 break;
18907 }
18908 case TYPE_INTEGER:
18909 case TYPE_COMPARE:
18910 case TYPE_FAST_COMPARE:
18911 case TYPE_EXTS:
18912 case TYPE_SHIFT:
18913 case TYPE_INSERT_WORD:
18914 case TYPE_INSERT_DWORD:
18915 case TYPE_FPLOAD_U:
18916 case TYPE_FPLOAD_UX:
18917 case TYPE_STORE_U:
18918 case TYPE_STORE_UX:
18919 case TYPE_FPSTORE_U:
18920 case TYPE_FPSTORE_UX:
18921 {
18922 if (set_to_load_agen (dep_insn, insn))
18923 return 3;
18924 break;
18925 }
18926 case TYPE_IMUL:
18927 case TYPE_IMUL2:
18928 case TYPE_IMUL3:
18929 case TYPE_LMUL:
18930 case TYPE_IMUL_COMPARE:
18931 case TYPE_LMUL_COMPARE:
18932 {
18933 if (set_to_load_agen (dep_insn, insn))
18934 return 17;
18935 break;
18936 }
18937 case TYPE_IDIV:
18938 {
18939 if (set_to_load_agen (dep_insn, insn))
18940 return 45;
18941 break;
18942 }
18943 case TYPE_LDIV:
18944 {
18945 if (set_to_load_agen (dep_insn, insn))
18946 return 57;
18947 break;
18948 }
18949 default:
18950 break;
18951 }
18952 }
18953 break;
18954
18955 case TYPE_FPLOAD:
18956 if ((rs6000_cpu == PROCESSOR_POWER6)
18957 && recog_memoized (dep_insn)
18958 && (INSN_CODE (dep_insn) >= 0)
18959 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18960 return 2;
18961
18962 default:
18963 break;
18964 }
c9dbf840 18965
a251ffd0 18966 /* Fall out to return default cost. */
44cd321e
PS
18967 }
18968 break;
18969
18970 case REG_DEP_OUTPUT:
18971 /* Output dependency; DEP_INSN writes a register that INSN writes some
18972 cycles later. */
18973 if ((rs6000_cpu == PROCESSOR_POWER6)
18974 && recog_memoized (dep_insn)
18975 && (INSN_CODE (dep_insn) >= 0))
18976 {
18977 attr_type = get_attr_type (insn);
18978
18979 switch (attr_type)
18980 {
18981 case TYPE_FP:
18982 if (get_attr_type (dep_insn) == TYPE_FP)
18983 return 1;
18984 break;
18985 case TYPE_FPLOAD:
18986 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18987 return 2;
18988 break;
18989 default:
18990 break;
18991 }
18992 }
18993 case REG_DEP_ANTI:
18994 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18995 cycles later. */
18996 return 0;
18997
18998 default:
18999 gcc_unreachable ();
a251ffd0
TG
19000 }
19001
19002 return cost;
19003}
b6c9286a 19004
cbe26ab8 19005/* The function returns a true if INSN is microcoded.
839a4992 19006 Return false otherwise. */
cbe26ab8
DN
19007
19008static bool
19009is_microcoded_insn (rtx insn)
19010{
19011 if (!insn || !INSN_P (insn)
19012 || GET_CODE (PATTERN (insn)) == USE
19013 || GET_CODE (PATTERN (insn)) == CLOBBER)
19014 return false;
19015
d296e02e
AP
19016 if (rs6000_cpu_attr == CPU_CELL)
19017 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
19018
ec507f2d 19019 if (rs6000_sched_groups)
cbe26ab8
DN
19020 {
19021 enum attr_type type = get_attr_type (insn);
19022 if (type == TYPE_LOAD_EXT_U
19023 || type == TYPE_LOAD_EXT_UX
19024 || type == TYPE_LOAD_UX
19025 || type == TYPE_STORE_UX
19026 || type == TYPE_MFCR)
c4ad648e 19027 return true;
cbe26ab8
DN
19028 }
19029
19030 return false;
19031}
19032
cbe26ab8
DN
19033/* The function returns true if INSN is cracked into 2 instructions
19034 by the processor (and therefore occupies 2 issue slots). */
19035
19036static bool
19037is_cracked_insn (rtx insn)
19038{
19039 if (!insn || !INSN_P (insn)
19040 || GET_CODE (PATTERN (insn)) == USE
19041 || GET_CODE (PATTERN (insn)) == CLOBBER)
19042 return false;
19043
ec507f2d 19044 if (rs6000_sched_groups)
cbe26ab8
DN
19045 {
19046 enum attr_type type = get_attr_type (insn);
19047 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
19048 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
19049 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
19050 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
19051 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
19052 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
19053 || type == TYPE_IDIV || type == TYPE_LDIV
19054 || type == TYPE_INSERT_WORD)
19055 return true;
cbe26ab8
DN
19056 }
19057
19058 return false;
19059}
19060
19061/* The function returns true if INSN can be issued only from
a3c9585f 19062 the branch slot. */
cbe26ab8
DN
19063
19064static bool
19065is_branch_slot_insn (rtx insn)
19066{
19067 if (!insn || !INSN_P (insn)
19068 || GET_CODE (PATTERN (insn)) == USE
19069 || GET_CODE (PATTERN (insn)) == CLOBBER)
19070 return false;
19071
ec507f2d 19072 if (rs6000_sched_groups)
cbe26ab8
DN
19073 {
19074 enum attr_type type = get_attr_type (insn);
19075 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 19076 return true;
cbe26ab8
DN
19077 return false;
19078 }
19079
19080 return false;
19081}
79ae11c4 19082
44cd321e
PS
19083/* The function returns true if out_inst sets a value that is
19084 used in the address generation computation of in_insn */
19085static bool
19086set_to_load_agen (rtx out_insn, rtx in_insn)
19087{
19088 rtx out_set, in_set;
19089
19090 /* For performance reasons, only handle the simple case where
19091 both loads are a single_set. */
19092 out_set = single_set (out_insn);
19093 if (out_set)
19094 {
19095 in_set = single_set (in_insn);
19096 if (in_set)
19097 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
19098 }
19099
19100 return false;
19101}
19102
19103/* The function returns true if the target storage location of
19104 out_insn is adjacent to the target storage location of in_insn */
19105/* Return 1 if memory locations are adjacent. */
19106
19107static bool
19108adjacent_mem_locations (rtx insn1, rtx insn2)
19109{
19110
e3a0e200
PB
19111 rtx a = get_store_dest (PATTERN (insn1));
19112 rtx b = get_store_dest (PATTERN (insn2));
19113
44cd321e
PS
19114 if ((GET_CODE (XEXP (a, 0)) == REG
19115 || (GET_CODE (XEXP (a, 0)) == PLUS
19116 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
19117 && (GET_CODE (XEXP (b, 0)) == REG
19118 || (GET_CODE (XEXP (b, 0)) == PLUS
19119 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
19120 {
f98e8938 19121 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 19122 rtx reg0, reg1;
44cd321e
PS
19123
19124 if (GET_CODE (XEXP (a, 0)) == PLUS)
19125 {
19126 reg0 = XEXP (XEXP (a, 0), 0);
19127 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
19128 }
19129 else
19130 reg0 = XEXP (a, 0);
19131
19132 if (GET_CODE (XEXP (b, 0)) == PLUS)
19133 {
19134 reg1 = XEXP (XEXP (b, 0), 0);
19135 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
19136 }
19137 else
19138 reg1 = XEXP (b, 0);
19139
19140 val_diff = val1 - val0;
19141
19142 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
19143 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
19144 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
19145 }
19146
19147 return false;
19148}
19149
a4f6c312 19150/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
19151 priority INSN_PRIORITY (INSN). Increase the priority to execute the
19152 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
19153 define this macro if you do not need to adjust the scheduling
19154 priorities of insns. */
bef84347 19155
c237e94a 19156static int
a2369ed3 19157rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 19158{
a4f6c312
SS
19159 /* On machines (like the 750) which have asymmetric integer units,
19160 where one integer unit can do multiply and divides and the other
19161 can't, reduce the priority of multiply/divide so it is scheduled
19162 before other integer operations. */
bef84347
VM
19163
19164#if 0
2c3c49de 19165 if (! INSN_P (insn))
bef84347
VM
19166 return priority;
19167
19168 if (GET_CODE (PATTERN (insn)) == USE)
19169 return priority;
19170
19171 switch (rs6000_cpu_attr) {
19172 case CPU_PPC750:
19173 switch (get_attr_type (insn))
19174 {
19175 default:
19176 break;
19177
19178 case TYPE_IMUL:
19179 case TYPE_IDIV:
3cb999d8
DE
19180 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19181 priority, priority);
bef84347
VM
19182 if (priority >= 0 && priority < 0x01000000)
19183 priority >>= 3;
19184 break;
19185 }
19186 }
19187#endif
19188
44cd321e 19189 if (insn_must_be_first_in_group (insn)
79ae11c4 19190 && reload_completed
f676971a 19191 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
19192 && rs6000_sched_restricted_insns_priority)
19193 {
19194
c4ad648e
AM
19195 /* Prioritize insns that can be dispatched only in the first
19196 dispatch slot. */
79ae11c4 19197 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
19198 /* Attach highest priority to insn. This means that in
19199 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 19200 precede 'priority' (critical path) considerations. */
f676971a 19201 return current_sched_info->sched_max_insns_priority;
79ae11c4 19202 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 19203 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
19204 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19205 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
19206 return (priority + 1);
19207 }
79ae11c4 19208
44cd321e
PS
19209 if (rs6000_cpu == PROCESSOR_POWER6
19210 && ((load_store_pendulum == -2 && is_load_insn (insn))
19211 || (load_store_pendulum == 2 && is_store_insn (insn))))
19212 /* Attach highest priority to insn if the scheduler has just issued two
19213 stores and this instruction is a load, or two loads and this instruction
19214 is a store. Power6 wants loads and stores scheduled alternately
19215 when possible */
19216 return current_sched_info->sched_max_insns_priority;
19217
bef84347
VM
19218 return priority;
19219}
19220
d296e02e
AP
19221/* Return true if the instruction is nonpipelined on the Cell. */
19222static bool
19223is_nonpipeline_insn (rtx insn)
19224{
19225 enum attr_type type;
19226 if (!insn || !INSN_P (insn)
19227 || GET_CODE (PATTERN (insn)) == USE
19228 || GET_CODE (PATTERN (insn)) == CLOBBER)
19229 return false;
19230
19231 type = get_attr_type (insn);
19232 if (type == TYPE_IMUL
19233 || type == TYPE_IMUL2
19234 || type == TYPE_IMUL3
19235 || type == TYPE_LMUL
19236 || type == TYPE_IDIV
19237 || type == TYPE_LDIV
19238 || type == TYPE_SDIV
19239 || type == TYPE_DDIV
19240 || type == TYPE_SSQRT
19241 || type == TYPE_DSQRT
19242 || type == TYPE_MFCR
19243 || type == TYPE_MFCRF
19244 || type == TYPE_MFJMPR)
19245 {
19246 return true;
19247 }
19248 return false;
19249}
19250
19251
a4f6c312
SS
19252/* Return how many instructions the machine can issue per cycle. */
19253
c237e94a 19254static int
863d938c 19255rs6000_issue_rate (void)
b6c9286a 19256{
3317bab1
DE
19257 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19258 if (!reload_completed)
19259 return 1;
19260
b6c9286a 19261 switch (rs6000_cpu_attr) {
3cb999d8
DE
19262 case CPU_RIOS1: /* ? */
19263 case CPU_RS64A:
19264 case CPU_PPC601: /* ? */
ed947a96 19265 case CPU_PPC7450:
3cb999d8 19266 return 3;
b54cf83a 19267 case CPU_PPC440:
b6c9286a 19268 case CPU_PPC603:
bef84347 19269 case CPU_PPC750:
ed947a96 19270 case CPU_PPC7400:
be12c2b0 19271 case CPU_PPC8540:
d296e02e 19272 case CPU_CELL:
fa41c305
EW
19273 case CPU_PPCE300C2:
19274 case CPU_PPCE300C3:
edae5fe3 19275 case CPU_PPCE500MC:
f676971a 19276 return 2;
3cb999d8 19277 case CPU_RIOS2:
b6c9286a 19278 case CPU_PPC604:
19684119 19279 case CPU_PPC604E:
b6c9286a 19280 case CPU_PPC620:
3cb999d8 19281 case CPU_PPC630:
b6c9286a 19282 return 4;
cbe26ab8 19283 case CPU_POWER4:
ec507f2d 19284 case CPU_POWER5:
44cd321e 19285 case CPU_POWER6:
cbe26ab8 19286 return 5;
b6c9286a
MM
19287 default:
19288 return 1;
19289 }
19290}
19291
be12c2b0
VM
19292/* Return how many instructions to look ahead for better insn
19293 scheduling. */
19294
19295static int
863d938c 19296rs6000_use_sched_lookahead (void)
be12c2b0
VM
19297{
19298 if (rs6000_cpu_attr == CPU_PPC8540)
19299 return 4;
d296e02e
AP
19300 if (rs6000_cpu_attr == CPU_CELL)
19301 return (reload_completed ? 8 : 0);
be12c2b0
VM
19302 return 0;
19303}
19304
d296e02e
AP
19305/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19306static int
19307rs6000_use_sched_lookahead_guard (rtx insn)
19308{
19309 if (rs6000_cpu_attr != CPU_CELL)
19310 return 1;
19311
19312 if (insn == NULL_RTX || !INSN_P (insn))
19313 abort ();
982afe02 19314
d296e02e
AP
19315 if (!reload_completed
19316 || is_nonpipeline_insn (insn)
19317 || is_microcoded_insn (insn))
19318 return 0;
19319
19320 return 1;
19321}
19322
569fa502
DN
19323/* Determine is PAT refers to memory. */
19324
19325static bool
19326is_mem_ref (rtx pat)
19327{
19328 const char * fmt;
19329 int i, j;
19330 bool ret = false;
19331
1de59bbd
DE
19332 /* stack_tie does not produce any real memory traffic. */
19333 if (GET_CODE (pat) == UNSPEC
19334 && XINT (pat, 1) == UNSPEC_TIE)
19335 return false;
19336
569fa502
DN
19337 if (GET_CODE (pat) == MEM)
19338 return true;
19339
19340 /* Recursively process the pattern. */
19341 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19342
19343 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19344 {
19345 if (fmt[i] == 'e')
19346 ret |= is_mem_ref (XEXP (pat, i));
19347 else if (fmt[i] == 'E')
19348 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19349 ret |= is_mem_ref (XVECEXP (pat, i, j));
19350 }
19351
19352 return ret;
19353}
19354
19355/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19356
569fa502
DN
19357static bool
19358is_load_insn1 (rtx pat)
19359{
19360 if (!pat || pat == NULL_RTX)
19361 return false;
19362
19363 if (GET_CODE (pat) == SET)
19364 return is_mem_ref (SET_SRC (pat));
19365
19366 if (GET_CODE (pat) == PARALLEL)
19367 {
19368 int i;
19369
19370 for (i = 0; i < XVECLEN (pat, 0); i++)
19371 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19372 return true;
19373 }
19374
19375 return false;
19376}
19377
19378/* Determine if INSN loads from memory. */
19379
19380static bool
19381is_load_insn (rtx insn)
19382{
19383 if (!insn || !INSN_P (insn))
19384 return false;
19385
19386 if (GET_CODE (insn) == CALL_INSN)
19387 return false;
19388
19389 return is_load_insn1 (PATTERN (insn));
19390}
19391
19392/* Determine if PAT is a PATTERN of a store insn. */
19393
19394static bool
19395is_store_insn1 (rtx pat)
19396{
19397 if (!pat || pat == NULL_RTX)
19398 return false;
19399
19400 if (GET_CODE (pat) == SET)
19401 return is_mem_ref (SET_DEST (pat));
19402
19403 if (GET_CODE (pat) == PARALLEL)
19404 {
19405 int i;
19406
19407 for (i = 0; i < XVECLEN (pat, 0); i++)
19408 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19409 return true;
19410 }
19411
19412 return false;
19413}
19414
19415/* Determine if INSN stores to memory. */
19416
19417static bool
19418is_store_insn (rtx insn)
19419{
19420 if (!insn || !INSN_P (insn))
19421 return false;
19422
19423 return is_store_insn1 (PATTERN (insn));
19424}
19425
e3a0e200
PB
19426/* Return the dest of a store insn. */
19427
19428static rtx
19429get_store_dest (rtx pat)
19430{
19431 gcc_assert (is_store_insn1 (pat));
19432
19433 if (GET_CODE (pat) == SET)
19434 return SET_DEST (pat);
19435 else if (GET_CODE (pat) == PARALLEL)
19436 {
19437 int i;
19438
19439 for (i = 0; i < XVECLEN (pat, 0); i++)
19440 {
19441 rtx inner_pat = XVECEXP (pat, 0, i);
19442 if (GET_CODE (inner_pat) == SET
19443 && is_mem_ref (SET_DEST (inner_pat)))
19444 return inner_pat;
19445 }
19446 }
19447 /* We shouldn't get here, because we should have either a simple
19448 store insn or a store with update which are covered above. */
19449 gcc_unreachable();
19450}
19451
569fa502
DN
19452/* Returns whether the dependence between INSN and NEXT is considered
19453 costly by the given target. */
19454
19455static bool
b198261f 19456rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19457{
b198261f
MK
19458 rtx insn;
19459 rtx next;
19460
aabcd309 19461 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19462 allow all dependent insns in the same group.
569fa502
DN
19463 This is the most aggressive option. */
19464 if (rs6000_sched_costly_dep == no_dep_costly)
19465 return false;
19466
f676971a 19467 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19468 do not allow dependent instructions in the same group.
19469 This is the most conservative option. */
19470 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19471 return true;
569fa502 19472
b198261f
MK
19473 insn = DEP_PRO (dep);
19474 next = DEP_CON (dep);
19475
f676971a
EC
19476 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19477 && is_load_insn (next)
569fa502
DN
19478 && is_store_insn (insn))
19479 /* Prevent load after store in the same group. */
19480 return true;
19481
19482 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19483 && is_load_insn (next)
569fa502 19484 && is_store_insn (insn)
e2f6ff94 19485 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19486 /* Prevent load after store in the same group if it is a true
19487 dependence. */
569fa502 19488 return true;
f676971a
EC
19489
19490 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19491 and will not be scheduled in the same group. */
19492 if (rs6000_sched_costly_dep <= max_dep_latency
19493 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19494 return true;
19495
19496 return false;
19497}
19498
f676971a 19499/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19500 skipping any "non-active" insns - insns that will not actually occupy
19501 an issue slot. Return NULL_RTX if such an insn is not found. */
19502
19503static rtx
19504get_next_active_insn (rtx insn, rtx tail)
19505{
f489aff8 19506 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19507 return NULL_RTX;
19508
f489aff8 19509 while (1)
cbe26ab8 19510 {
f489aff8
AM
19511 insn = NEXT_INSN (insn);
19512 if (insn == NULL_RTX || insn == tail)
19513 return NULL_RTX;
cbe26ab8 19514
f489aff8
AM
19515 if (CALL_P (insn)
19516 || JUMP_P (insn)
19517 || (NONJUMP_INSN_P (insn)
19518 && GET_CODE (PATTERN (insn)) != USE
19519 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19520 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19521 break;
19522 }
19523 return insn;
cbe26ab8
DN
19524}
19525
44cd321e
PS
19526/* We are about to begin issuing insns for this clock cycle. */
19527
19528static int
19529rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19530 rtx *ready ATTRIBUTE_UNUSED,
19531 int *pn_ready ATTRIBUTE_UNUSED,
19532 int clock_var ATTRIBUTE_UNUSED)
19533{
d296e02e
AP
19534 int n_ready = *pn_ready;
19535
44cd321e
PS
19536 if (sched_verbose)
19537 fprintf (dump, "// rs6000_sched_reorder :\n");
19538
d296e02e
AP
19539 /* Reorder the ready list, if the second to last ready insn
19540 is a nonepipeline insn. */
19541 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19542 {
19543 if (is_nonpipeline_insn (ready[n_ready - 1])
19544 && (recog_memoized (ready[n_ready - 2]) > 0))
19545 /* Simply swap first two insns. */
19546 {
19547 rtx tmp = ready[n_ready - 1];
19548 ready[n_ready - 1] = ready[n_ready - 2];
19549 ready[n_ready - 2] = tmp;
19550 }
19551 }
19552
44cd321e
PS
19553 if (rs6000_cpu == PROCESSOR_POWER6)
19554 load_store_pendulum = 0;
19555
19556 return rs6000_issue_rate ();
19557}
19558
19559/* Like rs6000_sched_reorder, but called after issuing each insn. */
19560
19561static int
19562rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19563 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19564{
19565 if (sched_verbose)
19566 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19567
19568 /* For Power6, we need to handle some special cases to try and keep the
19569 store queue from overflowing and triggering expensive flushes.
19570
19571 This code monitors how load and store instructions are being issued
19572 and skews the ready list one way or the other to increase the likelihood
19573 that a desired instruction is issued at the proper time.
19574
19575 A couple of things are done. First, we maintain a "load_store_pendulum"
19576 to track the current state of load/store issue.
19577
19578 - If the pendulum is at zero, then no loads or stores have been
19579 issued in the current cycle so we do nothing.
19580
19581 - If the pendulum is 1, then a single load has been issued in this
19582 cycle and we attempt to locate another load in the ready list to
19583 issue with it.
19584
2f8e468b 19585 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19586 issued in this cycle, so we increase the priority of the first load
19587 in the ready list to increase it's likelihood of being chosen first
19588 in the next cycle.
19589
19590 - If the pendulum is -1, then a single store has been issued in this
19591 cycle and we attempt to locate another store in the ready list to
19592 issue with it, preferring a store to an adjacent memory location to
19593 facilitate store pairing in the store queue.
19594
19595 - If the pendulum is 2, then two loads have already been
19596 issued in this cycle, so we increase the priority of the first store
19597 in the ready list to increase it's likelihood of being chosen first
19598 in the next cycle.
19599
19600 - If the pendulum < -2 or > 2, then do nothing.
19601
19602 Note: This code covers the most common scenarios. There exist non
19603 load/store instructions which make use of the LSU and which
19604 would need to be accounted for to strictly model the behavior
19605 of the machine. Those instructions are currently unaccounted
19606 for to help minimize compile time overhead of this code.
19607 */
19608 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19609 {
19610 int pos;
19611 int i;
19612 rtx tmp;
19613
19614 if (is_store_insn (last_scheduled_insn))
19615 /* Issuing a store, swing the load_store_pendulum to the left */
19616 load_store_pendulum--;
19617 else if (is_load_insn (last_scheduled_insn))
19618 /* Issuing a load, swing the load_store_pendulum to the right */
19619 load_store_pendulum++;
19620 else
19621 return cached_can_issue_more;
19622
19623 /* If the pendulum is balanced, or there is only one instruction on
19624 the ready list, then all is well, so return. */
19625 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19626 return cached_can_issue_more;
19627
19628 if (load_store_pendulum == 1)
19629 {
19630 /* A load has been issued in this cycle. Scan the ready list
19631 for another load to issue with it */
19632 pos = *pn_ready-1;
19633
19634 while (pos >= 0)
19635 {
19636 if (is_load_insn (ready[pos]))
19637 {
19638 /* Found a load. Move it to the head of the ready list,
19639 and adjust it's priority so that it is more likely to
19640 stay there */
19641 tmp = ready[pos];
19642 for (i=pos; i<*pn_ready-1; i++)
19643 ready[i] = ready[i + 1];
19644 ready[*pn_ready-1] = tmp;
e855c69d
AB
19645
19646 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19647 INSN_PRIORITY (tmp)++;
19648 break;
19649 }
19650 pos--;
19651 }
19652 }
19653 else if (load_store_pendulum == -2)
19654 {
19655 /* Two stores have been issued in this cycle. Increase the
19656 priority of the first load in the ready list to favor it for
19657 issuing in the next cycle. */
19658 pos = *pn_ready-1;
19659
19660 while (pos >= 0)
19661 {
19662 if (is_load_insn (ready[pos])
e855c69d
AB
19663 && !sel_sched_p ()
19664 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19665 {
19666 INSN_PRIORITY (ready[pos])++;
19667
19668 /* Adjust the pendulum to account for the fact that a load
19669 was found and increased in priority. This is to prevent
19670 increasing the priority of multiple loads */
19671 load_store_pendulum--;
19672
19673 break;
19674 }
19675 pos--;
19676 }
19677 }
19678 else if (load_store_pendulum == -1)
19679 {
19680 /* A store has been issued in this cycle. Scan the ready list for
19681 another store to issue with it, preferring a store to an adjacent
19682 memory location */
19683 int first_store_pos = -1;
19684
19685 pos = *pn_ready-1;
19686
19687 while (pos >= 0)
19688 {
19689 if (is_store_insn (ready[pos]))
19690 {
19691 /* Maintain the index of the first store found on the
19692 list */
19693 if (first_store_pos == -1)
19694 first_store_pos = pos;
19695
19696 if (is_store_insn (last_scheduled_insn)
19697 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19698 {
19699 /* Found an adjacent store. Move it to the head of the
19700 ready list, and adjust it's priority so that it is
19701 more likely to stay there */
19702 tmp = ready[pos];
19703 for (i=pos; i<*pn_ready-1; i++)
19704 ready[i] = ready[i + 1];
19705 ready[*pn_ready-1] = tmp;
e855c69d
AB
19706
19707 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e 19708 INSN_PRIORITY (tmp)++;
e855c69d 19709
44cd321e
PS
19710 first_store_pos = -1;
19711
19712 break;
19713 };
19714 }
19715 pos--;
19716 }
19717
19718 if (first_store_pos >= 0)
19719 {
19720 /* An adjacent store wasn't found, but a non-adjacent store was,
19721 so move the non-adjacent store to the front of the ready
19722 list, and adjust its priority so that it is more likely to
19723 stay there. */
19724 tmp = ready[first_store_pos];
19725 for (i=first_store_pos; i<*pn_ready-1; i++)
19726 ready[i] = ready[i + 1];
19727 ready[*pn_ready-1] = tmp;
e855c69d 19728 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19729 INSN_PRIORITY (tmp)++;
19730 }
19731 }
19732 else if (load_store_pendulum == 2)
19733 {
19734 /* Two loads have been issued in this cycle. Increase the priority
19735 of the first store in the ready list to favor it for issuing in
19736 the next cycle. */
19737 pos = *pn_ready-1;
19738
19739 while (pos >= 0)
19740 {
19741 if (is_store_insn (ready[pos])
e855c69d
AB
19742 && !sel_sched_p ()
19743 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19744 {
19745 INSN_PRIORITY (ready[pos])++;
19746
19747 /* Adjust the pendulum to account for the fact that a store
19748 was found and increased in priority. This is to prevent
19749 increasing the priority of multiple stores */
19750 load_store_pendulum++;
19751
19752 break;
19753 }
19754 pos--;
19755 }
19756 }
19757 }
19758
19759 return cached_can_issue_more;
19760}
19761
839a4992 19762/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19763 of group WHICH_GROUP.
19764
19765 If WHICH_GROUP == current_group, this function will return true if INSN
19766 causes the termination of the current group (i.e, the dispatch group to
19767 which INSN belongs). This means that INSN will be the last insn in the
19768 group it belongs to.
19769
19770 If WHICH_GROUP == previous_group, this function will return true if INSN
19771 causes the termination of the previous group (i.e, the dispatch group that
19772 precedes the group to which INSN belongs). This means that INSN will be
19773 the first insn in the group it belongs to). */
19774
19775static bool
19776insn_terminates_group_p (rtx insn, enum group_termination which_group)
19777{
44cd321e 19778 bool first, last;
cbe26ab8
DN
19779
19780 if (! insn)
19781 return false;
569fa502 19782
44cd321e
PS
19783 first = insn_must_be_first_in_group (insn);
19784 last = insn_must_be_last_in_group (insn);
cbe26ab8 19785
44cd321e 19786 if (first && last)
cbe26ab8
DN
19787 return true;
19788
19789 if (which_group == current_group)
44cd321e 19790 return last;
cbe26ab8 19791 else if (which_group == previous_group)
44cd321e
PS
19792 return first;
19793
19794 return false;
19795}
19796
19797
19798static bool
19799insn_must_be_first_in_group (rtx insn)
19800{
19801 enum attr_type type;
19802
19803 if (!insn
19804 || insn == NULL_RTX
19805 || GET_CODE (insn) == NOTE
19806 || GET_CODE (PATTERN (insn)) == USE
19807 || GET_CODE (PATTERN (insn)) == CLOBBER)
19808 return false;
19809
19810 switch (rs6000_cpu)
cbe26ab8 19811 {
44cd321e
PS
19812 case PROCESSOR_POWER5:
19813 if (is_cracked_insn (insn))
19814 return true;
19815 case PROCESSOR_POWER4:
19816 if (is_microcoded_insn (insn))
19817 return true;
19818
19819 if (!rs6000_sched_groups)
19820 return false;
19821
19822 type = get_attr_type (insn);
19823
19824 switch (type)
19825 {
19826 case TYPE_MFCR:
19827 case TYPE_MFCRF:
19828 case TYPE_MTCR:
19829 case TYPE_DELAYED_CR:
19830 case TYPE_CR_LOGICAL:
19831 case TYPE_MTJMPR:
19832 case TYPE_MFJMPR:
19833 case TYPE_IDIV:
19834 case TYPE_LDIV:
19835 case TYPE_LOAD_L:
19836 case TYPE_STORE_C:
19837 case TYPE_ISYNC:
19838 case TYPE_SYNC:
19839 return true;
19840 default:
19841 break;
19842 }
19843 break;
19844 case PROCESSOR_POWER6:
19845 type = get_attr_type (insn);
19846
19847 switch (type)
19848 {
19849 case TYPE_INSERT_DWORD:
19850 case TYPE_EXTS:
19851 case TYPE_CNTLZ:
19852 case TYPE_SHIFT:
19853 case TYPE_VAR_SHIFT_ROTATE:
19854 case TYPE_TRAP:
19855 case TYPE_IMUL:
19856 case TYPE_IMUL2:
19857 case TYPE_IMUL3:
19858 case TYPE_LMUL:
19859 case TYPE_IDIV:
19860 case TYPE_INSERT_WORD:
19861 case TYPE_DELAYED_COMPARE:
19862 case TYPE_IMUL_COMPARE:
19863 case TYPE_LMUL_COMPARE:
19864 case TYPE_FPCOMPARE:
19865 case TYPE_MFCR:
19866 case TYPE_MTCR:
19867 case TYPE_MFJMPR:
19868 case TYPE_MTJMPR:
19869 case TYPE_ISYNC:
19870 case TYPE_SYNC:
19871 case TYPE_LOAD_L:
19872 case TYPE_STORE_C:
19873 case TYPE_LOAD_U:
19874 case TYPE_LOAD_UX:
19875 case TYPE_LOAD_EXT_UX:
19876 case TYPE_STORE_U:
19877 case TYPE_STORE_UX:
19878 case TYPE_FPLOAD_U:
19879 case TYPE_FPLOAD_UX:
19880 case TYPE_FPSTORE_U:
19881 case TYPE_FPSTORE_UX:
19882 return true;
19883 default:
19884 break;
19885 }
19886 break;
19887 default:
19888 break;
19889 }
19890
19891 return false;
19892}
19893
19894static bool
19895insn_must_be_last_in_group (rtx insn)
19896{
19897 enum attr_type type;
19898
19899 if (!insn
19900 || insn == NULL_RTX
19901 || GET_CODE (insn) == NOTE
19902 || GET_CODE (PATTERN (insn)) == USE
19903 || GET_CODE (PATTERN (insn)) == CLOBBER)
19904 return false;
19905
19906 switch (rs6000_cpu) {
19907 case PROCESSOR_POWER4:
19908 case PROCESSOR_POWER5:
19909 if (is_microcoded_insn (insn))
19910 return true;
19911
19912 if (is_branch_slot_insn (insn))
19913 return true;
19914
19915 break;
19916 case PROCESSOR_POWER6:
19917 type = get_attr_type (insn);
19918
19919 switch (type)
19920 {
19921 case TYPE_EXTS:
19922 case TYPE_CNTLZ:
19923 case TYPE_SHIFT:
19924 case TYPE_VAR_SHIFT_ROTATE:
19925 case TYPE_TRAP:
19926 case TYPE_IMUL:
19927 case TYPE_IMUL2:
19928 case TYPE_IMUL3:
19929 case TYPE_LMUL:
19930 case TYPE_IDIV:
19931 case TYPE_DELAYED_COMPARE:
19932 case TYPE_IMUL_COMPARE:
19933 case TYPE_LMUL_COMPARE:
19934 case TYPE_FPCOMPARE:
19935 case TYPE_MFCR:
19936 case TYPE_MTCR:
19937 case TYPE_MFJMPR:
19938 case TYPE_MTJMPR:
19939 case TYPE_ISYNC:
19940 case TYPE_SYNC:
19941 case TYPE_LOAD_L:
19942 case TYPE_STORE_C:
19943 return true;
19944 default:
19945 break;
cbe26ab8 19946 }
44cd321e
PS
19947 break;
19948 default:
19949 break;
19950 }
cbe26ab8
DN
19951
19952 return false;
19953}
19954
839a4992 19955/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19956 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19957
19958static bool
19959is_costly_group (rtx *group_insns, rtx next_insn)
19960{
19961 int i;
cbe26ab8
DN
19962 int issue_rate = rs6000_issue_rate ();
19963
19964 for (i = 0; i < issue_rate; i++)
19965 {
e2f6ff94
MK
19966 sd_iterator_def sd_it;
19967 dep_t dep;
cbe26ab8 19968 rtx insn = group_insns[i];
b198261f 19969
cbe26ab8 19970 if (!insn)
c4ad648e 19971 continue;
b198261f 19972
e2f6ff94 19973 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19974 {
b198261f
MK
19975 rtx next = DEP_CON (dep);
19976
19977 if (next == next_insn
19978 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19979 return true;
c4ad648e 19980 }
cbe26ab8
DN
19981 }
19982
19983 return false;
19984}
19985
f676971a 19986/* Utility of the function redefine_groups.
cbe26ab8
DN
19987 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19988 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19989 to keep it "far" (in a separate group) from GROUP_INSNS, following
19990 one of the following schemes, depending on the value of the flag
19991 -minsert_sched_nops = X:
19992 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19993 in order to force NEXT_INSN into a separate group.
f676971a
EC
19994 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19995 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19996 insertion (has a group just ended, how many vacant issue slots remain in the
19997 last group, and how many dispatch groups were encountered so far). */
19998
f676971a 19999static int
c4ad648e
AM
20000force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
20001 rtx next_insn, bool *group_end, int can_issue_more,
20002 int *group_count)
cbe26ab8
DN
20003{
20004 rtx nop;
20005 bool force;
20006 int issue_rate = rs6000_issue_rate ();
20007 bool end = *group_end;
20008 int i;
20009
20010 if (next_insn == NULL_RTX)
20011 return can_issue_more;
20012
20013 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
20014 return can_issue_more;
20015
20016 force = is_costly_group (group_insns, next_insn);
20017 if (!force)
20018 return can_issue_more;
20019
20020 if (sched_verbose > 6)
20021 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 20022 *group_count ,can_issue_more);
cbe26ab8
DN
20023
20024 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
20025 {
20026 if (*group_end)
c4ad648e 20027 can_issue_more = 0;
cbe26ab8
DN
20028
20029 /* Since only a branch can be issued in the last issue_slot, it is
20030 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
20031 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
20032 in this case the last nop will start a new group and the branch
20033 will be forced to the new group. */
cbe26ab8 20034 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 20035 can_issue_more--;
cbe26ab8
DN
20036
20037 while (can_issue_more > 0)
c4ad648e 20038 {
9390387d 20039 nop = gen_nop ();
c4ad648e
AM
20040 emit_insn_before (nop, next_insn);
20041 can_issue_more--;
20042 }
cbe26ab8
DN
20043
20044 *group_end = true;
20045 return 0;
f676971a 20046 }
cbe26ab8
DN
20047
20048 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
20049 {
20050 int n_nops = rs6000_sched_insert_nops;
20051
f676971a 20052 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 20053 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 20054 if (can_issue_more == 0)
c4ad648e 20055 can_issue_more = issue_rate;
cbe26ab8
DN
20056 can_issue_more--;
20057 if (can_issue_more == 0)
c4ad648e
AM
20058 {
20059 can_issue_more = issue_rate - 1;
20060 (*group_count)++;
20061 end = true;
20062 for (i = 0; i < issue_rate; i++)
20063 {
20064 group_insns[i] = 0;
20065 }
20066 }
cbe26ab8
DN
20067
20068 while (n_nops > 0)
c4ad648e
AM
20069 {
20070 nop = gen_nop ();
20071 emit_insn_before (nop, next_insn);
20072 if (can_issue_more == issue_rate - 1) /* new group begins */
20073 end = false;
20074 can_issue_more--;
20075 if (can_issue_more == 0)
20076 {
20077 can_issue_more = issue_rate - 1;
20078 (*group_count)++;
20079 end = true;
20080 for (i = 0; i < issue_rate; i++)
20081 {
20082 group_insns[i] = 0;
20083 }
20084 }
20085 n_nops--;
20086 }
cbe26ab8
DN
20087
20088 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 20089 can_issue_more++;
cbe26ab8 20090
c4ad648e
AM
20091 /* Is next_insn going to start a new group? */
20092 *group_end
20093 = (end
cbe26ab8
DN
20094 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20095 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20096 || (can_issue_more < issue_rate &&
c4ad648e 20097 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20098 if (*group_end && end)
c4ad648e 20099 (*group_count)--;
cbe26ab8
DN
20100
20101 if (sched_verbose > 6)
c4ad648e
AM
20102 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
20103 *group_count, can_issue_more);
f676971a
EC
20104 return can_issue_more;
20105 }
cbe26ab8
DN
20106
20107 return can_issue_more;
20108}
20109
20110/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 20111 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
20112 form in practice. It tries to achieve this synchronization by forcing the
20113 estimated processor grouping on the compiler (as opposed to the function
20114 'pad_goups' which tries to force the scheduler's grouping on the processor).
20115
20116 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
20117 examines the (estimated) dispatch groups that will be formed by the processor
20118 dispatcher. It marks these group boundaries to reflect the estimated
20119 processor grouping, overriding the grouping that the scheduler had marked.
20120 Depending on the value of the flag '-minsert-sched-nops' this function can
20121 force certain insns into separate groups or force a certain distance between
20122 them by inserting nops, for example, if there exists a "costly dependence"
20123 between the insns.
20124
20125 The function estimates the group boundaries that the processor will form as
0fa2e4df 20126 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
20127 each insn. A subsequent insn will start a new group if one of the following
20128 4 cases applies:
20129 - no more vacant issue slots remain in the current dispatch group.
20130 - only the last issue slot, which is the branch slot, is vacant, but the next
20131 insn is not a branch.
20132 - only the last 2 or less issue slots, including the branch slot, are vacant,
20133 which means that a cracked insn (which occupies two issue slots) can't be
20134 issued in this group.
f676971a 20135 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
20136 start a new group. */
20137
20138static int
20139redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20140{
20141 rtx insn, next_insn;
20142 int issue_rate;
20143 int can_issue_more;
20144 int slot, i;
20145 bool group_end;
20146 int group_count = 0;
20147 rtx *group_insns;
20148
20149 /* Initialize. */
20150 issue_rate = rs6000_issue_rate ();
5ead67f6 20151 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 20152 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
20153 {
20154 group_insns[i] = 0;
20155 }
20156 can_issue_more = issue_rate;
20157 slot = 0;
20158 insn = get_next_active_insn (prev_head_insn, tail);
20159 group_end = false;
20160
20161 while (insn != NULL_RTX)
20162 {
20163 slot = (issue_rate - can_issue_more);
20164 group_insns[slot] = insn;
20165 can_issue_more =
c4ad648e 20166 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 20167 if (insn_terminates_group_p (insn, current_group))
c4ad648e 20168 can_issue_more = 0;
cbe26ab8
DN
20169
20170 next_insn = get_next_active_insn (insn, tail);
20171 if (next_insn == NULL_RTX)
c4ad648e 20172 return group_count + 1;
cbe26ab8 20173
c4ad648e
AM
20174 /* Is next_insn going to start a new group? */
20175 group_end
20176 = (can_issue_more == 0
20177 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20178 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20179 || (can_issue_more < issue_rate &&
20180 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20181
f676971a 20182 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
20183 next_insn, &group_end, can_issue_more,
20184 &group_count);
cbe26ab8
DN
20185
20186 if (group_end)
c4ad648e
AM
20187 {
20188 group_count++;
20189 can_issue_more = 0;
20190 for (i = 0; i < issue_rate; i++)
20191 {
20192 group_insns[i] = 0;
20193 }
20194 }
cbe26ab8
DN
20195
20196 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 20197 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 20198 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 20199 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
20200
20201 insn = next_insn;
20202 if (can_issue_more == 0)
c4ad648e
AM
20203 can_issue_more = issue_rate;
20204 } /* while */
cbe26ab8
DN
20205
20206 return group_count;
20207}
20208
20209/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20210 dispatch group boundaries that the scheduler had marked. Pad with nops
20211 any dispatch groups which have vacant issue slots, in order to force the
20212 scheduler's grouping on the processor dispatcher. The function
20213 returns the number of dispatch groups found. */
20214
20215static int
20216pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20217{
20218 rtx insn, next_insn;
20219 rtx nop;
20220 int issue_rate;
20221 int can_issue_more;
20222 int group_end;
20223 int group_count = 0;
20224
20225 /* Initialize issue_rate. */
20226 issue_rate = rs6000_issue_rate ();
20227 can_issue_more = issue_rate;
20228
20229 insn = get_next_active_insn (prev_head_insn, tail);
20230 next_insn = get_next_active_insn (insn, tail);
20231
20232 while (insn != NULL_RTX)
20233 {
20234 can_issue_more =
20235 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20236
20237 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20238
20239 if (next_insn == NULL_RTX)
c4ad648e 20240 break;
cbe26ab8
DN
20241
20242 if (group_end)
c4ad648e
AM
20243 {
20244 /* If the scheduler had marked group termination at this location
e855c69d 20245 (between insn and next_insn), and neither insn nor next_insn will
c4ad648e
AM
20246 force group termination, pad the group with nops to force group
20247 termination. */
20248 if (can_issue_more
20249 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20250 && !insn_terminates_group_p (insn, current_group)
20251 && !insn_terminates_group_p (next_insn, previous_group))
20252 {
9390387d 20253 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20254 can_issue_more--;
20255
20256 while (can_issue_more)
20257 {
20258 nop = gen_nop ();
20259 emit_insn_before (nop, next_insn);
20260 can_issue_more--;
20261 }
20262 }
20263
20264 can_issue_more = issue_rate;
20265 group_count++;
20266 }
cbe26ab8
DN
20267
20268 insn = next_insn;
20269 next_insn = get_next_active_insn (insn, tail);
20270 }
20271
20272 return group_count;
20273}
20274
44cd321e
PS
20275/* We're beginning a new block. Initialize data structures as necessary. */
20276
20277static void
20278rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20279 int sched_verbose ATTRIBUTE_UNUSED,
20280 int max_ready ATTRIBUTE_UNUSED)
982afe02 20281{
44cd321e
PS
20282 last_scheduled_insn = NULL_RTX;
20283 load_store_pendulum = 0;
20284}
20285
cbe26ab8
DN
20286/* The following function is called at the end of scheduling BB.
20287 After reload, it inserts nops at insn group bundling. */
20288
20289static void
38f391a5 20290rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20291{
20292 int n_groups;
20293
20294 if (sched_verbose)
20295 fprintf (dump, "=== Finishing schedule.\n");
20296
ec507f2d 20297 if (reload_completed && rs6000_sched_groups)
cbe26ab8 20298 {
e855c69d
AB
20299 /* Do not run sched_finish hook when selective scheduling enabled. */
20300 if (sel_sched_p ())
20301 return;
20302
cbe26ab8 20303 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20304 return;
cbe26ab8
DN
20305
20306 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20307 n_groups = pad_groups (dump, sched_verbose,
20308 current_sched_info->prev_head,
20309 current_sched_info->next_tail);
cbe26ab8 20310 else
c4ad648e
AM
20311 n_groups = redefine_groups (dump, sched_verbose,
20312 current_sched_info->prev_head,
20313 current_sched_info->next_tail);
cbe26ab8
DN
20314
20315 if (sched_verbose >= 6)
20316 {
20317 fprintf (dump, "ngroups = %d\n", n_groups);
20318 print_rtl (dump, current_sched_info->prev_head);
20319 fprintf (dump, "Done finish_sched\n");
20320 }
20321 }
20322}
e855c69d
AB
20323
20324struct _rs6000_sched_context
20325{
20326 short cached_can_issue_more;
20327 rtx last_scheduled_insn;
20328 int load_store_pendulum;
20329};
20330
20331typedef struct _rs6000_sched_context rs6000_sched_context_def;
20332typedef rs6000_sched_context_def *rs6000_sched_context_t;
20333
20334/* Allocate store for new scheduling context. */
20335static void *
20336rs6000_alloc_sched_context (void)
20337{
20338 return xmalloc (sizeof (rs6000_sched_context_def));
20339}
20340
20341/* If CLEAN_P is true then initializes _SC with clean data,
20342 and from the global context otherwise. */
20343static void
20344rs6000_init_sched_context (void *_sc, bool clean_p)
20345{
20346 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20347
20348 if (clean_p)
20349 {
20350 sc->cached_can_issue_more = 0;
20351 sc->last_scheduled_insn = NULL_RTX;
20352 sc->load_store_pendulum = 0;
20353 }
20354 else
20355 {
20356 sc->cached_can_issue_more = cached_can_issue_more;
20357 sc->last_scheduled_insn = last_scheduled_insn;
20358 sc->load_store_pendulum = load_store_pendulum;
20359 }
20360}
20361
20362/* Sets the global scheduling context to the one pointed to by _SC. */
20363static void
20364rs6000_set_sched_context (void *_sc)
20365{
20366 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20367
20368 gcc_assert (sc != NULL);
20369
20370 cached_can_issue_more = sc->cached_can_issue_more;
20371 last_scheduled_insn = sc->last_scheduled_insn;
20372 load_store_pendulum = sc->load_store_pendulum;
20373}
20374
20375/* Free _SC. */
20376static void
20377rs6000_free_sched_context (void *_sc)
20378{
20379 gcc_assert (_sc != NULL);
20380
20381 free (_sc);
20382}
20383
b6c9286a 20384\f
b6c9286a
MM
20385/* Length in units of the trampoline for entering a nested function. */
20386
20387int
863d938c 20388rs6000_trampoline_size (void)
b6c9286a
MM
20389{
20390 int ret = 0;
20391
20392 switch (DEFAULT_ABI)
20393 {
20394 default:
37409796 20395 gcc_unreachable ();
b6c9286a
MM
20396
20397 case ABI_AIX:
8f802bfb 20398 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20399 break;
20400
4dabc42d 20401 case ABI_DARWIN:
b6c9286a 20402 case ABI_V4:
03a7e1a5 20403 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20404 break;
b6c9286a
MM
20405 }
20406
20407 return ret;
20408}
20409
20410/* Emit RTL insns to initialize the variable parts of a trampoline.
20411 FNADDR is an RTX for the address of the function's pure code.
20412 CXT is an RTX for the static chain value for the function. */
20413
20414void
a2369ed3 20415rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20416{
8bd04c56 20417 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20418 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20419
20420 switch (DEFAULT_ABI)
20421 {
20422 default:
37409796 20423 gcc_unreachable ();
b6c9286a 20424
8bd04c56 20425/* Macros to shorten the code expansions below. */
9613eaff 20426#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20427#define MEM_PLUS(addr,offset) \
9613eaff 20428 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20429
b6c9286a
MM
20430 /* Under AIX, just build the 3 word function descriptor */
20431 case ABI_AIX:
8bd04c56 20432 {
9613eaff
SH
20433 rtx fn_reg = gen_reg_rtx (Pmode);
20434 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20435 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20436 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20437 emit_move_insn (MEM_DEREF (addr), fn_reg);
20438 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20439 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20440 }
b6c9286a
MM
20441 break;
20442
4dabc42d
TC
20443 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20444 case ABI_DARWIN:
b6c9286a 20445 case ABI_V4:
9613eaff 20446 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 20447 FALSE, VOIDmode, 4,
9613eaff 20448 addr, Pmode,
eaf1bcf1 20449 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20450 fnaddr, Pmode,
20451 ctx_reg, Pmode);
b6c9286a 20452 break;
b6c9286a
MM
20453 }
20454
20455 return;
20456}
7509c759
MM
20457
20458\f
91d231cb 20459/* Table of valid machine attributes. */
a4f6c312 20460
91d231cb 20461const struct attribute_spec rs6000_attribute_table[] =
7509c759 20462{
91d231cb 20463 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20464 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20465 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20466 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20467 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20468 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20469#ifdef SUBTARGET_ATTRIBUTE_TABLE
20470 SUBTARGET_ATTRIBUTE_TABLE,
20471#endif
a5c76ee6 20472 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20473};
7509c759 20474
8bb418a3
ZL
20475/* Handle the "altivec" attribute. The attribute may have
20476 arguments as follows:
f676971a 20477
8bb418a3
ZL
20478 __attribute__((altivec(vector__)))
20479 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20480 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20481
20482 and may appear more than once (e.g., 'vector bool char') in a
20483 given declaration. */
20484
20485static tree
f90ac3f0
UP
20486rs6000_handle_altivec_attribute (tree *node,
20487 tree name ATTRIBUTE_UNUSED,
20488 tree args,
8bb418a3
ZL
20489 int flags ATTRIBUTE_UNUSED,
20490 bool *no_add_attrs)
20491{
20492 tree type = *node, result = NULL_TREE;
20493 enum machine_mode mode;
20494 int unsigned_p;
20495 char altivec_type
20496 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20497 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20498 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20499 : '?');
8bb418a3
ZL
20500
20501 while (POINTER_TYPE_P (type)
20502 || TREE_CODE (type) == FUNCTION_TYPE
20503 || TREE_CODE (type) == METHOD_TYPE
20504 || TREE_CODE (type) == ARRAY_TYPE)
20505 type = TREE_TYPE (type);
20506
20507 mode = TYPE_MODE (type);
20508
f90ac3f0
UP
20509 /* Check for invalid AltiVec type qualifiers. */
20510 if (type == long_unsigned_type_node || type == long_integer_type_node)
20511 {
20512 if (TARGET_64BIT)
20513 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20514 else if (rs6000_warn_altivec_long)
d4ee4d25 20515 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20516 }
20517 else if (type == long_long_unsigned_type_node
20518 || type == long_long_integer_type_node)
20519 error ("use of %<long long%> in AltiVec types is invalid");
20520 else if (type == double_type_node)
20521 error ("use of %<double%> in AltiVec types is invalid");
20522 else if (type == long_double_type_node)
20523 error ("use of %<long double%> in AltiVec types is invalid");
20524 else if (type == boolean_type_node)
20525 error ("use of boolean types in AltiVec types is invalid");
20526 else if (TREE_CODE (type) == COMPLEX_TYPE)
20527 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20528 else if (DECIMAL_FLOAT_MODE_P (mode))
20529 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20530
20531 switch (altivec_type)
20532 {
20533 case 'v':
8df83eae 20534 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20535 switch (mode)
20536 {
c4ad648e
AM
20537 case SImode:
20538 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20539 break;
20540 case HImode:
20541 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20542 break;
20543 case QImode:
20544 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20545 break;
20546 case SFmode: result = V4SF_type_node; break;
20547 /* If the user says 'vector int bool', we may be handed the 'bool'
20548 attribute _before_ the 'vector' attribute, and so select the
20549 proper type in the 'b' case below. */
20550 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20551 result = type;
20552 default: break;
8bb418a3
ZL
20553 }
20554 break;
20555 case 'b':
20556 switch (mode)
20557 {
c4ad648e
AM
20558 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20559 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20560 case QImode: case V16QImode: result = bool_V16QI_type_node;
20561 default: break;
8bb418a3
ZL
20562 }
20563 break;
20564 case 'p':
20565 switch (mode)
20566 {
c4ad648e
AM
20567 case V8HImode: result = pixel_V8HI_type_node;
20568 default: break;
8bb418a3
ZL
20569 }
20570 default: break;
20571 }
20572
4f538d42
UW
20573 /* Propagate qualifiers attached to the element type
20574 onto the vector type. */
20575 if (result && result != type && TYPE_QUALS (type))
20576 result = build_qualified_type (result, TYPE_QUALS (type));
7958a2a6 20577
8bb418a3
ZL
20578 *no_add_attrs = true; /* No need to hang on to the attribute. */
20579
f90ac3f0 20580 if (result)
5dc11954 20581 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20582
20583 return NULL_TREE;
20584}
20585
f18eca82
ZL
20586/* AltiVec defines four built-in scalar types that serve as vector
20587 elements; we must teach the compiler how to mangle them. */
20588
20589static const char *
3101faab 20590rs6000_mangle_type (const_tree type)
f18eca82 20591{
608063c3
JB
20592 type = TYPE_MAIN_VARIANT (type);
20593
20594 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20595 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20596 return NULL;
20597
f18eca82
ZL
20598 if (type == bool_char_type_node) return "U6__boolc";
20599 if (type == bool_short_type_node) return "U6__bools";
20600 if (type == pixel_type_node) return "u7__pixel";
20601 if (type == bool_int_type_node) return "U6__booli";
20602
337bde91
DE
20603 /* Mangle IBM extended float long double as `g' (__float128) on
20604 powerpc*-linux where long-double-64 previously was the default. */
20605 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20606 && TARGET_ELF
20607 && TARGET_LONG_DOUBLE_128
20608 && !TARGET_IEEEQUAD)
20609 return "g";
20610
f18eca82
ZL
20611 /* For all other types, use normal C++ mangling. */
20612 return NULL;
20613}
20614
a5c76ee6
ZW
20615/* Handle a "longcall" or "shortcall" attribute; arguments as in
20616 struct attribute_spec.handler. */
a4f6c312 20617
91d231cb 20618static tree
f676971a
EC
20619rs6000_handle_longcall_attribute (tree *node, tree name,
20620 tree args ATTRIBUTE_UNUSED,
20621 int flags ATTRIBUTE_UNUSED,
a2369ed3 20622 bool *no_add_attrs)
91d231cb
JM
20623{
20624 if (TREE_CODE (*node) != FUNCTION_TYPE
20625 && TREE_CODE (*node) != FIELD_DECL
20626 && TREE_CODE (*node) != TYPE_DECL)
20627 {
5c498b10 20628 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
20629 IDENTIFIER_POINTER (name));
20630 *no_add_attrs = true;
20631 }
6a4cee5f 20632
91d231cb 20633 return NULL_TREE;
7509c759
MM
20634}
20635
a5c76ee6
ZW
20636/* Set longcall attributes on all functions declared when
20637 rs6000_default_long_calls is true. */
20638static void
a2369ed3 20639rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20640{
20641 if (rs6000_default_long_calls
20642 && (TREE_CODE (type) == FUNCTION_TYPE
20643 || TREE_CODE (type) == METHOD_TYPE))
20644 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20645 NULL_TREE,
20646 TYPE_ATTRIBUTES (type));
16d6f994
EC
20647
20648#if TARGET_MACHO
20649 darwin_set_default_type_attributes (type);
20650#endif
a5c76ee6
ZW
20651}
20652
3cb999d8
DE
20653/* Return a reference suitable for calling a function with the
20654 longcall attribute. */
a4f6c312 20655
9390387d 20656rtx
a2369ed3 20657rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20658{
d330fd93 20659 const char *call_name;
6a4cee5f
MM
20660 tree node;
20661
20662 if (GET_CODE (call_ref) != SYMBOL_REF)
20663 return call_ref;
20664
20665 /* System V adds '.' to the internal name, so skip them. */
20666 call_name = XSTR (call_ref, 0);
20667 if (*call_name == '.')
20668 {
20669 while (*call_name == '.')
20670 call_name++;
20671
20672 node = get_identifier (call_name);
39403d82 20673 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20674 }
20675
20676 return force_reg (Pmode, call_ref);
20677}
7509c759 20678\f
77ccdfed
EC
20679#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20680#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20681#endif
20682
20683/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20684 struct attribute_spec.handler. */
20685static tree
20686rs6000_handle_struct_attribute (tree *node, tree name,
20687 tree args ATTRIBUTE_UNUSED,
20688 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20689{
20690 tree *type = NULL;
20691 if (DECL_P (*node))
20692 {
20693 if (TREE_CODE (*node) == TYPE_DECL)
20694 type = &TREE_TYPE (*node);
20695 }
20696 else
20697 type = node;
20698
20699 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20700 || TREE_CODE (*type) == UNION_TYPE)))
20701 {
20702 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
20703 *no_add_attrs = true;
20704 }
20705
20706 else if ((is_attribute_p ("ms_struct", name)
20707 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20708 || ((is_attribute_p ("gcc_struct", name)
20709 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20710 {
20711 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
20712 IDENTIFIER_POINTER (name));
20713 *no_add_attrs = true;
20714 }
20715
20716 return NULL_TREE;
20717}
20718
20719static bool
3101faab 20720rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20721{
20722 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20723 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20724 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20725}
20726\f
b64a1b53
RH
20727#ifdef USING_ELFOS_H
20728
d6b5193b 20729/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20730
d6b5193b
RS
20731static void
20732rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20733{
20734 if (DEFAULT_ABI == ABI_AIX
20735 && TARGET_MINIMAL_TOC
20736 && !TARGET_RELOCATABLE)
20737 {
20738 if (!toc_initialized)
20739 {
20740 toc_initialized = 1;
20741 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20742 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20743 fprintf (asm_out_file, "\t.tc ");
20744 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20745 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20746 fprintf (asm_out_file, "\n");
20747
20748 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20749 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20750 fprintf (asm_out_file, " = .+32768\n");
20751 }
20752 else
20753 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20754 }
20755 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20756 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20757 else
20758 {
20759 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20760 if (!toc_initialized)
20761 {
20762 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20763 fprintf (asm_out_file, " = .+32768\n");
20764 toc_initialized = 1;
20765 }
20766 }
20767}
20768
20769/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20770
b64a1b53 20771static void
d6b5193b
RS
20772rs6000_elf_asm_init_sections (void)
20773{
20774 toc_section
20775 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20776
20777 sdata2_section
20778 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20779 SDATA2_SECTION_ASM_OP);
20780}
20781
20782/* Implement TARGET_SELECT_RTX_SECTION. */
20783
20784static section *
f676971a 20785rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20786 unsigned HOST_WIDE_INT align)
7509c759 20787{
a9098fd0 20788 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20789 return toc_section;
7509c759 20790 else
d6b5193b 20791 return default_elf_select_rtx_section (mode, x, align);
7509c759 20792}
d9407988 20793\f
d1908feb
JJ
20794/* For a SYMBOL_REF, set generic flags and then perform some
20795 target-specific processing.
20796
d1908feb
JJ
20797 When the AIX ABI is requested on a non-AIX system, replace the
20798 function name with the real name (with a leading .) rather than the
20799 function descriptor name. This saves a lot of overriding code to
20800 read the prefixes. */
d9407988 20801
fb49053f 20802static void
a2369ed3 20803rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20804{
d1908feb 20805 default_encode_section_info (decl, rtl, first);
b2003250 20806
d1908feb
JJ
20807 if (first
20808 && TREE_CODE (decl) == FUNCTION_DECL
20809 && !TARGET_AIX
20810 && DEFAULT_ABI == ABI_AIX)
d9407988 20811 {
c6a2438a 20812 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20813 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20814 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20815 str[0] = '.';
20816 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20817 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20818 }
d9407988
MM
20819}
20820
21d9bb3f 20821static inline bool
0a2aaacc 20822compare_section_name (const char *section, const char *templ)
21d9bb3f
PB
20823{
20824 int len;
20825
0a2aaacc
KG
20826 len = strlen (templ);
20827 return (strncmp (section, templ, len) == 0
21d9bb3f
PB
20828 && (section[len] == 0 || section[len] == '.'));
20829}
20830
c1b7d95a 20831bool
3101faab 20832rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20833{
20834 if (rs6000_sdata == SDATA_NONE)
20835 return false;
20836
7482ad25
AF
20837 /* We want to merge strings, so we never consider them small data. */
20838 if (TREE_CODE (decl) == STRING_CST)
20839 return false;
20840
20841 /* Functions are never in the small data area. */
20842 if (TREE_CODE (decl) == FUNCTION_DECL)
20843 return false;
20844
0e5dbd9b
DE
20845 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20846 {
20847 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20848 if (compare_section_name (section, ".sdata")
20849 || compare_section_name (section, ".sdata2")
20850 || compare_section_name (section, ".gnu.linkonce.s")
20851 || compare_section_name (section, ".sbss")
20852 || compare_section_name (section, ".sbss2")
20853 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20854 || strcmp (section, ".PPC.EMB.sdata0") == 0
20855 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20856 return true;
20857 }
20858 else
20859 {
20860 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20861
20862 if (size > 0
307b599c 20863 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20864 /* If it's not public, and we're not going to reference it there,
20865 there's no need to put it in the small data section. */
0e5dbd9b
DE
20866 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20867 return true;
20868 }
20869
20870 return false;
20871}
20872
b91da81f 20873#endif /* USING_ELFOS_H */
aacd3885
RS
20874\f
20875/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20876
aacd3885 20877static bool
3101faab 20878rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20879{
20880 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20881}
a6c2a102 20882\f
000034eb 20883/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20884 ADDR can be effectively incremented by incrementing REG.
20885
20886 r0 is special and we must not select it as an address
20887 register by this routine since our caller will try to
20888 increment the returned register via an "la" instruction. */
000034eb 20889
9390387d 20890rtx
a2369ed3 20891find_addr_reg (rtx addr)
000034eb
DE
20892{
20893 while (GET_CODE (addr) == PLUS)
20894 {
02441cd6
JL
20895 if (GET_CODE (XEXP (addr, 0)) == REG
20896 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20897 addr = XEXP (addr, 0);
02441cd6
JL
20898 else if (GET_CODE (XEXP (addr, 1)) == REG
20899 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20900 addr = XEXP (addr, 1);
20901 else if (CONSTANT_P (XEXP (addr, 0)))
20902 addr = XEXP (addr, 1);
20903 else if (CONSTANT_P (XEXP (addr, 1)))
20904 addr = XEXP (addr, 0);
20905 else
37409796 20906 gcc_unreachable ();
000034eb 20907 }
37409796
NS
20908 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20909 return addr;
000034eb
DE
20910}
20911
a6c2a102 20912void
a2369ed3 20913rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20914{
20915 fatal_insn ("bad address", op);
20916}
c8023011 20917
ee890fe2
SS
20918#if TARGET_MACHO
20919
efdba735 20920static tree branch_island_list = 0;
ee890fe2 20921
efdba735
SH
20922/* Remember to generate a branch island for far calls to the given
20923 function. */
ee890fe2 20924
f676971a 20925static void
c4ad648e
AM
20926add_compiler_branch_island (tree label_name, tree function_name,
20927 int line_number)
ee890fe2 20928{
efdba735 20929 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20930 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20931 TREE_CHAIN (branch_island) = branch_island_list;
20932 branch_island_list = branch_island;
ee890fe2
SS
20933}
20934
efdba735
SH
20935#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20936#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20937#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20938 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20939
efdba735
SH
20940/* Generate far-jump branch islands for everything on the
20941 branch_island_list. Invoked immediately after the last instruction
20942 of the epilogue has been emitted; the branch-islands must be
20943 appended to, and contiguous with, the function body. Mach-O stubs
20944 are generated in machopic_output_stub(). */
ee890fe2 20945
efdba735
SH
20946static void
20947macho_branch_islands (void)
20948{
20949 char tmp_buf[512];
20950 tree branch_island;
20951
20952 for (branch_island = branch_island_list;
20953 branch_island;
20954 branch_island = TREE_CHAIN (branch_island))
20955 {
20956 const char *label =
20957 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20958 const char *name =
11abc112 20959 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20960 char name_buf[512];
20961 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20962 if (name[0] == '*' || name[0] == '&')
20963 strcpy (name_buf, name+1);
20964 else
20965 {
20966 name_buf[0] = '_';
20967 strcpy (name_buf+1, name);
20968 }
20969 strcpy (tmp_buf, "\n");
20970 strcat (tmp_buf, label);
ee890fe2 20971#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20972 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20973 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20974#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20975 if (flag_pic)
20976 {
20977 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20978 strcat (tmp_buf, label);
20979 strcat (tmp_buf, "_pic\n");
20980 strcat (tmp_buf, label);
20981 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20982
efdba735
SH
20983 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20984 strcat (tmp_buf, name_buf);
20985 strcat (tmp_buf, " - ");
20986 strcat (tmp_buf, label);
20987 strcat (tmp_buf, "_pic)\n");
f676971a 20988
efdba735 20989 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20990
efdba735
SH
20991 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20992 strcat (tmp_buf, name_buf);
20993 strcat (tmp_buf, " - ");
20994 strcat (tmp_buf, label);
20995 strcat (tmp_buf, "_pic)\n");
f676971a 20996
efdba735
SH
20997 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20998 }
20999 else
21000 {
21001 strcat (tmp_buf, ":\nlis r12,hi16(");
21002 strcat (tmp_buf, name_buf);
21003 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
21004 strcat (tmp_buf, name_buf);
21005 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
21006 }
21007 output_asm_insn (tmp_buf, 0);
ee890fe2 21008#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 21009 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 21010 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 21011#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 21012 }
ee890fe2 21013
efdba735 21014 branch_island_list = 0;
ee890fe2
SS
21015}
21016
21017/* NO_PREVIOUS_DEF checks in the link list whether the function name is
21018 already there or not. */
21019
efdba735 21020static int
a2369ed3 21021no_previous_def (tree function_name)
ee890fe2 21022{
efdba735
SH
21023 tree branch_island;
21024 for (branch_island = branch_island_list;
21025 branch_island;
21026 branch_island = TREE_CHAIN (branch_island))
21027 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
21028 return 0;
21029 return 1;
21030}
21031
21032/* GET_PREV_LABEL gets the label name from the previous definition of
21033 the function. */
21034
efdba735 21035static tree
a2369ed3 21036get_prev_label (tree function_name)
ee890fe2 21037{
efdba735
SH
21038 tree branch_island;
21039 for (branch_island = branch_island_list;
21040 branch_island;
21041 branch_island = TREE_CHAIN (branch_island))
21042 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21043 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
21044 return 0;
21045}
21046
75b1b789
MS
21047#ifndef DARWIN_LINKER_GENERATES_ISLANDS
21048#define DARWIN_LINKER_GENERATES_ISLANDS 0
21049#endif
21050
21051/* KEXTs still need branch islands. */
21052#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
21053 || flag_mkernel || flag_apple_kext)
21054
ee890fe2 21055/* INSN is either a function call or a millicode call. It may have an
f676971a 21056 unconditional jump in its delay slot.
ee890fe2
SS
21057
21058 CALL_DEST is the routine we are calling. */
21059
21060char *
c4ad648e
AM
21061output_call (rtx insn, rtx *operands, int dest_operand_number,
21062 int cookie_operand_number)
ee890fe2
SS
21063{
21064 static char buf[256];
75b1b789
MS
21065 if (DARWIN_GENERATE_ISLANDS
21066 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 21067 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
21068 {
21069 tree labelname;
efdba735 21070 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 21071
ee890fe2
SS
21072 if (no_previous_def (funname))
21073 {
ee890fe2
SS
21074 rtx label_rtx = gen_label_rtx ();
21075 char *label_buf, temp_buf[256];
21076 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
21077 CODE_LABEL_NUMBER (label_rtx));
21078 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
21079 labelname = get_identifier (label_buf);
a38e7aa5 21080 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
21081 }
21082 else
21083 labelname = get_prev_label (funname);
21084
efdba735
SH
21085 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
21086 instruction will reach 'foo', otherwise link as 'bl L42'".
21087 "L42" should be a 'branch island', that will do a far jump to
21088 'foo'. Branch islands are generated in
21089 macho_branch_islands(). */
ee890fe2 21090 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 21091 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
21092 }
21093 else
efdba735
SH
21094 sprintf (buf, "bl %%z%d", dest_operand_number);
21095 return buf;
ee890fe2
SS
21096}
21097
ee890fe2
SS
21098/* Generate PIC and indirect symbol stubs. */
21099
21100void
a2369ed3 21101machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
21102{
21103 unsigned int length;
a4f6c312
SS
21104 char *symbol_name, *lazy_ptr_name;
21105 char *local_label_0;
ee890fe2
SS
21106 static int label = 0;
21107
df56a27f 21108 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 21109 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 21110
ee890fe2 21111
ee890fe2 21112 length = strlen (symb);
5ead67f6 21113 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21114 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
21115
5ead67f6 21116 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
21117 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
21118
ee890fe2 21119 if (flag_pic == 2)
56c779bc 21120 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 21121 else
56c779bc 21122 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
21123
21124 if (flag_pic == 2)
21125 {
d974312d
DJ
21126 fprintf (file, "\t.align 5\n");
21127
21128 fprintf (file, "%s:\n", stub);
21129 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21130
876455fa 21131 label++;
5ead67f6 21132 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 21133 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 21134
ee890fe2
SS
21135 fprintf (file, "\tmflr r0\n");
21136 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
21137 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
21138 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
21139 lazy_ptr_name, local_label_0);
21140 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
21141 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
21142 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
21143 lazy_ptr_name, local_label_0);
21144 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
21145 fprintf (file, "\tbctr\n");
21146 }
21147 else
d974312d
DJ
21148 {
21149 fprintf (file, "\t.align 4\n");
21150
21151 fprintf (file, "%s:\n", stub);
21152 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21153
21154 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
21155 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21156 (TARGET_64BIT ? "ldu" : "lwzu"),
21157 lazy_ptr_name);
d974312d
DJ
21158 fprintf (file, "\tmtctr r12\n");
21159 fprintf (file, "\tbctr\n");
21160 }
f676971a 21161
56c779bc 21162 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
21163 fprintf (file, "%s:\n", lazy_ptr_name);
21164 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
21165 fprintf (file, "%sdyld_stub_binding_helper\n",
21166 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
21167}
21168
21169/* Legitimize PIC addresses. If the address is already
21170 position-independent, we return ORIG. Newly generated
21171 position-independent addresses go into a reg. This is REG if non
21172 zero, otherwise we allocate register(s) as necessary. */
21173
4fbbe694 21174#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
21175
21176rtx
f676971a 21177rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 21178 rtx reg)
ee890fe2
SS
21179{
21180 rtx base, offset;
21181
21182 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21183 reg = gen_reg_rtx (Pmode);
21184
21185 if (GET_CODE (orig) == CONST)
21186 {
37409796
NS
21187 rtx reg_temp;
21188
ee890fe2
SS
21189 if (GET_CODE (XEXP (orig, 0)) == PLUS
21190 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21191 return orig;
21192
37409796 21193 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 21194
37409796
NS
21195 /* Use a different reg for the intermediate value, as
21196 it will be marked UNCHANGING. */
b3a13419 21197 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
21198 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21199 Pmode, reg_temp);
21200 offset =
21201 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21202 Pmode, reg);
bb8df8a6 21203
ee890fe2
SS
21204 if (GET_CODE (offset) == CONST_INT)
21205 {
21206 if (SMALL_INT (offset))
ed8908e7 21207 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
21208 else if (! reload_in_progress && ! reload_completed)
21209 offset = force_reg (Pmode, offset);
21210 else
c859cda6
DJ
21211 {
21212 rtx mem = force_const_mem (Pmode, orig);
21213 return machopic_legitimize_pic_address (mem, Pmode, reg);
21214 }
ee890fe2 21215 }
f1c25d3b 21216 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
21217 }
21218
21219 /* Fall back on generic machopic code. */
21220 return machopic_legitimize_pic_address (orig, mode, reg);
21221}
21222
c4e18b1c
GK
21223/* Output a .machine directive for the Darwin assembler, and call
21224 the generic start_file routine. */
21225
21226static void
21227rs6000_darwin_file_start (void)
21228{
94ff898d 21229 static const struct
c4e18b1c
GK
21230 {
21231 const char *arg;
21232 const char *name;
21233 int if_set;
21234 } mapping[] = {
55dbfb48 21235 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
21236 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21237 { "power4", "ppc970", 0 },
21238 { "G5", "ppc970", 0 },
21239 { "7450", "ppc7450", 0 },
21240 { "7400", "ppc7400", MASK_ALTIVEC },
21241 { "G4", "ppc7400", 0 },
21242 { "750", "ppc750", 0 },
21243 { "740", "ppc750", 0 },
21244 { "G3", "ppc750", 0 },
21245 { "604e", "ppc604e", 0 },
21246 { "604", "ppc604", 0 },
21247 { "603e", "ppc603", 0 },
21248 { "603", "ppc603", 0 },
21249 { "601", "ppc601", 0 },
21250 { NULL, "ppc", 0 } };
21251 const char *cpu_id = "";
21252 size_t i;
94ff898d 21253
9390387d 21254 rs6000_file_start ();
192d0f89 21255 darwin_file_start ();
c4e18b1c
GK
21256
21257 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21258 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21259 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21260 && rs6000_select[i].string[0] != '\0')
21261 cpu_id = rs6000_select[i].string;
21262
21263 /* Look through the mapping array. Pick the first name that either
21264 matches the argument, has a bit set in IF_SET that is also set
21265 in the target flags, or has a NULL name. */
21266
21267 i = 0;
21268 while (mapping[i].arg != NULL
21269 && strcmp (mapping[i].arg, cpu_id) != 0
21270 && (mapping[i].if_set & target_flags) == 0)
21271 i++;
21272
21273 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21274}
21275
ee890fe2 21276#endif /* TARGET_MACHO */
7c262518
RH
21277
21278#if TARGET_ELF
9b580a0b
RH
21279static int
21280rs6000_elf_reloc_rw_mask (void)
7c262518 21281{
9b580a0b
RH
21282 if (flag_pic)
21283 return 3;
21284 else if (DEFAULT_ABI == ABI_AIX)
21285 return 2;
21286 else
21287 return 0;
7c262518 21288}
d9f6800d
RH
21289
21290/* Record an element in the table of global constructors. SYMBOL is
21291 a SYMBOL_REF of the function to be called; PRIORITY is a number
21292 between 0 and MAX_INIT_PRIORITY.
21293
21294 This differs from default_named_section_asm_out_constructor in
21295 that we have special handling for -mrelocatable. */
21296
21297static void
a2369ed3 21298rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
21299{
21300 const char *section = ".ctors";
21301 char buf[16];
21302
21303 if (priority != DEFAULT_INIT_PRIORITY)
21304 {
21305 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21306 /* Invert the numbering so the linker puts us in the proper
21307 order; constructors are run from right to left, and the
21308 linker sorts in increasing order. */
21309 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21310 section = buf;
21311 }
21312
d6b5193b 21313 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21314 assemble_align (POINTER_SIZE);
d9f6800d
RH
21315
21316 if (TARGET_RELOCATABLE)
21317 {
21318 fputs ("\t.long (", asm_out_file);
21319 output_addr_const (asm_out_file, symbol);
21320 fputs (")@fixup\n", asm_out_file);
21321 }
21322 else
c8af3574 21323 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21324}
21325
21326static void
a2369ed3 21327rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21328{
21329 const char *section = ".dtors";
21330 char buf[16];
21331
21332 if (priority != DEFAULT_INIT_PRIORITY)
21333 {
21334 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21335 /* Invert the numbering so the linker puts us in the proper
21336 order; constructors are run from right to left, and the
21337 linker sorts in increasing order. */
21338 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21339 section = buf;
21340 }
21341
d6b5193b 21342 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21343 assemble_align (POINTER_SIZE);
d9f6800d
RH
21344
21345 if (TARGET_RELOCATABLE)
21346 {
21347 fputs ("\t.long (", asm_out_file);
21348 output_addr_const (asm_out_file, symbol);
21349 fputs (")@fixup\n", asm_out_file);
21350 }
21351 else
c8af3574 21352 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21353}
9739c90c
JJ
21354
21355void
a2369ed3 21356rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21357{
21358 if (TARGET_64BIT)
21359 {
21360 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21361 ASM_OUTPUT_LABEL (file, name);
21362 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21363 rs6000_output_function_entry (file, name);
21364 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21365 if (DOT_SYMBOLS)
9739c90c 21366 {
85b776df 21367 fputs ("\t.size\t", file);
9739c90c 21368 assemble_name (file, name);
85b776df
AM
21369 fputs (",24\n\t.type\t.", file);
21370 assemble_name (file, name);
21371 fputs (",@function\n", file);
21372 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21373 {
21374 fputs ("\t.globl\t.", file);
21375 assemble_name (file, name);
21376 putc ('\n', file);
21377 }
9739c90c 21378 }
85b776df
AM
21379 else
21380 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21381 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21382 rs6000_output_function_entry (file, name);
21383 fputs (":\n", file);
9739c90c
JJ
21384 return;
21385 }
21386
21387 if (TARGET_RELOCATABLE
7f970b70 21388 && !TARGET_SECURE_PLT
e3b5732b 21389 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21390 && uses_TOC ())
9739c90c
JJ
21391 {
21392 char buf[256];
21393
21394 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21395
21396 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21397 fprintf (file, "\t.long ");
21398 assemble_name (file, buf);
21399 putc ('-', file);
21400 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21401 assemble_name (file, buf);
21402 putc ('\n', file);
21403 }
21404
21405 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21406 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21407
21408 if (DEFAULT_ABI == ABI_AIX)
21409 {
21410 const char *desc_name, *orig_name;
21411
21412 orig_name = (*targetm.strip_name_encoding) (name);
21413 desc_name = orig_name;
21414 while (*desc_name == '.')
21415 desc_name++;
21416
21417 if (TREE_PUBLIC (decl))
21418 fprintf (file, "\t.globl %s\n", desc_name);
21419
21420 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21421 fprintf (file, "%s:\n", desc_name);
21422 fprintf (file, "\t.long %s\n", orig_name);
21423 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21424 if (DEFAULT_ABI == ABI_AIX)
21425 fputs ("\t.long 0\n", file);
21426 fprintf (file, "\t.previous\n");
21427 }
21428 ASM_OUTPUT_LABEL (file, name);
21429}
1334b570
AM
21430
21431static void
21432rs6000_elf_end_indicate_exec_stack (void)
21433{
21434 if (TARGET_32BIT)
21435 file_end_indicate_exec_stack ();
21436}
7c262518
RH
21437#endif
21438
cbaaba19 21439#if TARGET_XCOFF
0d5817b2
DE
21440static void
21441rs6000_xcoff_asm_output_anchor (rtx symbol)
21442{
21443 char buffer[100];
21444
21445 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21446 SYMBOL_REF_BLOCK_OFFSET (symbol));
21447 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21448}
21449
7c262518 21450static void
a2369ed3 21451rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21452{
21453 fputs (GLOBAL_ASM_OP, stream);
21454 RS6000_OUTPUT_BASENAME (stream, name);
21455 putc ('\n', stream);
21456}
21457
d6b5193b
RS
21458/* A get_unnamed_decl callback, used for read-only sections. PTR
21459 points to the section string variable. */
21460
21461static void
21462rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21463{
890f9edf
OH
21464 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21465 *(const char *const *) directive,
21466 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21467}
21468
21469/* Likewise for read-write sections. */
21470
21471static void
21472rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21473{
890f9edf
OH
21474 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21475 *(const char *const *) directive,
21476 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21477}
21478
21479/* A get_unnamed_section callback, used for switching to toc_section. */
21480
21481static void
21482rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21483{
21484 if (TARGET_MINIMAL_TOC)
21485 {
21486 /* toc_section is always selected at least once from
21487 rs6000_xcoff_file_start, so this is guaranteed to
21488 always be defined once and only once in each file. */
21489 if (!toc_initialized)
21490 {
21491 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21492 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21493 toc_initialized = 1;
21494 }
21495 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21496 (TARGET_32BIT ? "" : ",3"));
21497 }
21498 else
21499 fputs ("\t.toc\n", asm_out_file);
21500}
21501
21502/* Implement TARGET_ASM_INIT_SECTIONS. */
21503
21504static void
21505rs6000_xcoff_asm_init_sections (void)
21506{
21507 read_only_data_section
21508 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21509 &xcoff_read_only_section_name);
21510
21511 private_data_section
21512 = get_unnamed_section (SECTION_WRITE,
21513 rs6000_xcoff_output_readwrite_section_asm_op,
21514 &xcoff_private_data_section_name);
21515
21516 read_only_private_data_section
21517 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21518 &xcoff_private_data_section_name);
21519
21520 toc_section
21521 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21522
21523 readonly_data_section = read_only_data_section;
21524 exception_section = data_section;
21525}
21526
9b580a0b
RH
21527static int
21528rs6000_xcoff_reloc_rw_mask (void)
21529{
21530 return 3;
21531}
21532
b275d088 21533static void
c18a5b6c
MM
21534rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21535 tree decl ATTRIBUTE_UNUSED)
7c262518 21536{
0e5dbd9b
DE
21537 int smclass;
21538 static const char * const suffix[3] = { "PR", "RO", "RW" };
21539
21540 if (flags & SECTION_CODE)
21541 smclass = 0;
21542 else if (flags & SECTION_WRITE)
21543 smclass = 2;
21544 else
21545 smclass = 1;
21546
5b5198f7 21547 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21548 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21549 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21550}
ae46c4e0 21551
d6b5193b 21552static section *
f676971a 21553rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21554 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21555{
9b580a0b 21556 if (decl_readonly_section (decl, reloc))
ae46c4e0 21557 {
0e5dbd9b 21558 if (TREE_PUBLIC (decl))
d6b5193b 21559 return read_only_data_section;
ae46c4e0 21560 else
d6b5193b 21561 return read_only_private_data_section;
ae46c4e0
RH
21562 }
21563 else
21564 {
0e5dbd9b 21565 if (TREE_PUBLIC (decl))
d6b5193b 21566 return data_section;
ae46c4e0 21567 else
d6b5193b 21568 return private_data_section;
ae46c4e0
RH
21569 }
21570}
21571
21572static void
a2369ed3 21573rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21574{
21575 const char *name;
ae46c4e0 21576
5b5198f7
DE
21577 /* Use select_section for private and uninitialized data. */
21578 if (!TREE_PUBLIC (decl)
21579 || DECL_COMMON (decl)
0e5dbd9b
DE
21580 || DECL_INITIAL (decl) == NULL_TREE
21581 || DECL_INITIAL (decl) == error_mark_node
21582 || (flag_zero_initialized_in_bss
21583 && initializer_zerop (DECL_INITIAL (decl))))
21584 return;
21585
21586 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21587 name = (*targetm.strip_name_encoding) (name);
21588 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21589}
b64a1b53 21590
fb49053f
RH
21591/* Select section for constant in constant pool.
21592
21593 On RS/6000, all constants are in the private read-only data area.
21594 However, if this is being placed in the TOC it must be output as a
21595 toc entry. */
21596
d6b5193b 21597static section *
f676971a 21598rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21599 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21600{
21601 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21602 return toc_section;
b64a1b53 21603 else
d6b5193b 21604 return read_only_private_data_section;
b64a1b53 21605}
772c5265
RH
21606
21607/* Remove any trailing [DS] or the like from the symbol name. */
21608
21609static const char *
a2369ed3 21610rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21611{
21612 size_t len;
21613 if (*name == '*')
21614 name++;
21615 len = strlen (name);
21616 if (name[len - 1] == ']')
21617 return ggc_alloc_string (name, len - 4);
21618 else
21619 return name;
21620}
21621
5add3202
DE
21622/* Section attributes. AIX is always PIC. */
21623
21624static unsigned int
a2369ed3 21625rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21626{
5b5198f7 21627 unsigned int align;
9b580a0b 21628 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21629
21630 /* Align to at least UNIT size. */
21631 if (flags & SECTION_CODE)
21632 align = MIN_UNITS_PER_WORD;
21633 else
21634 /* Increase alignment of large objects if not already stricter. */
21635 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21636 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21637 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21638
21639 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21640}
a5fe455b 21641
1bc7c5b6
ZW
21642/* Output at beginning of assembler file.
21643
21644 Initialize the section names for the RS/6000 at this point.
21645
21646 Specify filename, including full path, to assembler.
21647
21648 We want to go into the TOC section so at least one .toc will be emitted.
21649 Also, in order to output proper .bs/.es pairs, we need at least one static
21650 [RW] section emitted.
21651
21652 Finally, declare mcount when profiling to make the assembler happy. */
21653
21654static void
863d938c 21655rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21656{
21657 rs6000_gen_section_name (&xcoff_bss_section_name,
21658 main_input_filename, ".bss_");
21659 rs6000_gen_section_name (&xcoff_private_data_section_name,
21660 main_input_filename, ".rw_");
21661 rs6000_gen_section_name (&xcoff_read_only_section_name,
21662 main_input_filename, ".ro_");
21663
21664 fputs ("\t.file\t", asm_out_file);
21665 output_quoted_string (asm_out_file, main_input_filename);
21666 fputc ('\n', asm_out_file);
1bc7c5b6 21667 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21668 switch_to_section (private_data_section);
21669 switch_to_section (text_section);
1bc7c5b6
ZW
21670 if (profile_flag)
21671 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21672 rs6000_file_start ();
21673}
21674
a5fe455b
ZW
21675/* Output at end of assembler file.
21676 On the RS/6000, referencing data should automatically pull in text. */
21677
21678static void
863d938c 21679rs6000_xcoff_file_end (void)
a5fe455b 21680{
d6b5193b 21681 switch_to_section (text_section);
a5fe455b 21682 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21683 switch_to_section (data_section);
a5fe455b
ZW
21684 fputs (TARGET_32BIT
21685 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21686 asm_out_file);
21687}
f1384257 21688#endif /* TARGET_XCOFF */
0e5dbd9b 21689
3c50106f
RH
21690/* Compute a (partial) cost for rtx X. Return true if the complete
21691 cost has been computed, and false if subexpressions should be
21692 scanned. In either case, *TOTAL contains the cost result. */
21693
21694static bool
f40751dd
JH
21695rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
21696 bool speed)
3c50106f 21697{
f0517163
RS
21698 enum machine_mode mode = GET_MODE (x);
21699
3c50106f
RH
21700 switch (code)
21701 {
30a555d9 21702 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21703 case CONST_INT:
066cd967
DE
21704 if (((outer_code == SET
21705 || outer_code == PLUS
21706 || outer_code == MINUS)
279bb624
DE
21707 && (satisfies_constraint_I (x)
21708 || satisfies_constraint_L (x)))
066cd967 21709 || (outer_code == AND
279bb624
DE
21710 && (satisfies_constraint_K (x)
21711 || (mode == SImode
21712 ? satisfies_constraint_L (x)
21713 : satisfies_constraint_J (x))
1990cd79
AM
21714 || mask_operand (x, mode)
21715 || (mode == DImode
21716 && mask64_operand (x, DImode))))
22e54023 21717 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21718 && (satisfies_constraint_K (x)
21719 || (mode == SImode
21720 ? satisfies_constraint_L (x)
21721 : satisfies_constraint_J (x))))
066cd967
DE
21722 || outer_code == ASHIFT
21723 || outer_code == ASHIFTRT
21724 || outer_code == LSHIFTRT
21725 || outer_code == ROTATE
21726 || outer_code == ROTATERT
d5861a7a 21727 || outer_code == ZERO_EXTRACT
066cd967 21728 || (outer_code == MULT
279bb624 21729 && satisfies_constraint_I (x))
22e54023
DE
21730 || ((outer_code == DIV || outer_code == UDIV
21731 || outer_code == MOD || outer_code == UMOD)
21732 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21733 || (outer_code == COMPARE
279bb624
DE
21734 && (satisfies_constraint_I (x)
21735 || satisfies_constraint_K (x)))
22e54023 21736 || (outer_code == EQ
279bb624
DE
21737 && (satisfies_constraint_I (x)
21738 || satisfies_constraint_K (x)
21739 || (mode == SImode
21740 ? satisfies_constraint_L (x)
21741 : satisfies_constraint_J (x))))
22e54023 21742 || (outer_code == GTU
279bb624 21743 && satisfies_constraint_I (x))
22e54023 21744 || (outer_code == LTU
279bb624 21745 && satisfies_constraint_P (x)))
066cd967
DE
21746 {
21747 *total = 0;
21748 return true;
21749 }
21750 else if ((outer_code == PLUS
4ae234b0 21751 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21752 || (outer_code == MINUS
4ae234b0 21753 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21754 || ((outer_code == SET
21755 || outer_code == IOR
21756 || outer_code == XOR)
21757 && (INTVAL (x)
21758 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21759 {
21760 *total = COSTS_N_INSNS (1);
21761 return true;
21762 }
21763 /* FALLTHRU */
21764
21765 case CONST_DOUBLE:
f6fe3a22 21766 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21767 {
f6fe3a22
DE
21768 if ((outer_code == IOR || outer_code == XOR)
21769 && CONST_DOUBLE_HIGH (x) == 0
21770 && (CONST_DOUBLE_LOW (x)
21771 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21772 {
21773 *total = 0;
21774 return true;
21775 }
21776 else if ((outer_code == AND && and64_2_operand (x, DImode))
21777 || ((outer_code == SET
21778 || outer_code == IOR
21779 || outer_code == XOR)
21780 && CONST_DOUBLE_HIGH (x) == 0))
21781 {
21782 *total = COSTS_N_INSNS (1);
21783 return true;
21784 }
066cd967
DE
21785 }
21786 /* FALLTHRU */
21787
3c50106f 21788 case CONST:
066cd967 21789 case HIGH:
3c50106f 21790 case SYMBOL_REF:
066cd967
DE
21791 case MEM:
21792 /* When optimizing for size, MEM should be slightly more expensive
21793 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21794 L1 cache latency is about two instructions. */
f40751dd 21795 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21796 return true;
21797
30a555d9
DE
21798 case LABEL_REF:
21799 *total = 0;
21800 return true;
21801
3c50106f 21802 case PLUS:
f0517163 21803 if (mode == DFmode)
066cd967
DE
21804 {
21805 if (GET_CODE (XEXP (x, 0)) == MULT)
21806 {
21807 /* FNMA accounted in outer NEG. */
21808 if (outer_code == NEG)
21809 *total = rs6000_cost->dmul - rs6000_cost->fp;
21810 else
21811 *total = rs6000_cost->dmul;
21812 }
21813 else
21814 *total = rs6000_cost->fp;
21815 }
f0517163 21816 else if (mode == SFmode)
066cd967
DE
21817 {
21818 /* FNMA accounted in outer NEG. */
21819 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21820 *total = 0;
21821 else
21822 *total = rs6000_cost->fp;
21823 }
f0517163 21824 else
066cd967
DE
21825 *total = COSTS_N_INSNS (1);
21826 return false;
3c50106f 21827
52190329 21828 case MINUS:
f0517163 21829 if (mode == DFmode)
066cd967 21830 {
762c919f
JM
21831 if (GET_CODE (XEXP (x, 0)) == MULT
21832 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21833 {
21834 /* FNMA accounted in outer NEG. */
21835 if (outer_code == NEG)
762c919f 21836 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21837 else
21838 *total = rs6000_cost->dmul;
21839 }
21840 else
21841 *total = rs6000_cost->fp;
21842 }
f0517163 21843 else if (mode == SFmode)
066cd967
DE
21844 {
21845 /* FNMA accounted in outer NEG. */
21846 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21847 *total = 0;
21848 else
21849 *total = rs6000_cost->fp;
21850 }
f0517163 21851 else
c4ad648e 21852 *total = COSTS_N_INSNS (1);
066cd967 21853 return false;
3c50106f
RH
21854
21855 case MULT:
c9dbf840 21856 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21857 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21858 {
8b897cfa
RS
21859 if (INTVAL (XEXP (x, 1)) >= -256
21860 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21861 *total = rs6000_cost->mulsi_const9;
8b897cfa 21862 else
06a67bdd 21863 *total = rs6000_cost->mulsi_const;
3c50106f 21864 }
066cd967
DE
21865 /* FMA accounted in outer PLUS/MINUS. */
21866 else if ((mode == DFmode || mode == SFmode)
21867 && (outer_code == PLUS || outer_code == MINUS))
21868 *total = 0;
f0517163 21869 else if (mode == DFmode)
06a67bdd 21870 *total = rs6000_cost->dmul;
f0517163 21871 else if (mode == SFmode)
06a67bdd 21872 *total = rs6000_cost->fp;
f0517163 21873 else if (mode == DImode)
06a67bdd 21874 *total = rs6000_cost->muldi;
8b897cfa 21875 else
06a67bdd 21876 *total = rs6000_cost->mulsi;
066cd967 21877 return false;
3c50106f
RH
21878
21879 case DIV:
21880 case MOD:
f0517163
RS
21881 if (FLOAT_MODE_P (mode))
21882 {
06a67bdd
RS
21883 *total = mode == DFmode ? rs6000_cost->ddiv
21884 : rs6000_cost->sdiv;
066cd967 21885 return false;
f0517163 21886 }
5efb1046 21887 /* FALLTHRU */
3c50106f
RH
21888
21889 case UDIV:
21890 case UMOD:
627b6fe2
DJ
21891 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21892 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21893 {
21894 if (code == DIV || code == MOD)
21895 /* Shift, addze */
21896 *total = COSTS_N_INSNS (2);
21897 else
21898 /* Shift */
21899 *total = COSTS_N_INSNS (1);
21900 }
c4ad648e 21901 else
627b6fe2
DJ
21902 {
21903 if (GET_MODE (XEXP (x, 1)) == DImode)
21904 *total = rs6000_cost->divdi;
21905 else
21906 *total = rs6000_cost->divsi;
21907 }
21908 /* Add in shift and subtract for MOD. */
21909 if (code == MOD || code == UMOD)
21910 *total += COSTS_N_INSNS (2);
066cd967 21911 return false;
3c50106f 21912
32f56aad 21913 case CTZ:
3c50106f
RH
21914 case FFS:
21915 *total = COSTS_N_INSNS (4);
066cd967 21916 return false;
3c50106f 21917
32f56aad
DE
21918 case POPCOUNT:
21919 *total = COSTS_N_INSNS (6);
21920 return false;
21921
06a67bdd 21922 case NOT:
066cd967
DE
21923 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21924 {
21925 *total = 0;
21926 return false;
21927 }
21928 /* FALLTHRU */
21929
21930 case AND:
32f56aad 21931 case CLZ:
066cd967
DE
21932 case IOR:
21933 case XOR:
d5861a7a
DE
21934 case ZERO_EXTRACT:
21935 *total = COSTS_N_INSNS (1);
21936 return false;
21937
066cd967
DE
21938 case ASHIFT:
21939 case ASHIFTRT:
21940 case LSHIFTRT:
21941 case ROTATE:
21942 case ROTATERT:
d5861a7a 21943 /* Handle mul_highpart. */
066cd967
DE
21944 if (outer_code == TRUNCATE
21945 && GET_CODE (XEXP (x, 0)) == MULT)
21946 {
21947 if (mode == DImode)
21948 *total = rs6000_cost->muldi;
21949 else
21950 *total = rs6000_cost->mulsi;
21951 return true;
21952 }
d5861a7a
DE
21953 else if (outer_code == AND)
21954 *total = 0;
21955 else
21956 *total = COSTS_N_INSNS (1);
21957 return false;
21958
21959 case SIGN_EXTEND:
21960 case ZERO_EXTEND:
21961 if (GET_CODE (XEXP (x, 0)) == MEM)
21962 *total = 0;
21963 else
21964 *total = COSTS_N_INSNS (1);
066cd967 21965 return false;
06a67bdd 21966
066cd967
DE
21967 case COMPARE:
21968 case NEG:
21969 case ABS:
21970 if (!FLOAT_MODE_P (mode))
21971 {
21972 *total = COSTS_N_INSNS (1);
21973 return false;
21974 }
21975 /* FALLTHRU */
21976
21977 case FLOAT:
21978 case UNSIGNED_FLOAT:
21979 case FIX:
21980 case UNSIGNED_FIX:
06a67bdd
RS
21981 case FLOAT_TRUNCATE:
21982 *total = rs6000_cost->fp;
066cd967 21983 return false;
06a67bdd 21984
a2af5043
DJ
21985 case FLOAT_EXTEND:
21986 if (mode == DFmode)
21987 *total = 0;
21988 else
21989 *total = rs6000_cost->fp;
21990 return false;
21991
06a67bdd
RS
21992 case UNSPEC:
21993 switch (XINT (x, 1))
21994 {
21995 case UNSPEC_FRSP:
21996 *total = rs6000_cost->fp;
21997 return true;
21998
21999 default:
22000 break;
22001 }
22002 break;
22003
22004 case CALL:
22005 case IF_THEN_ELSE:
f40751dd 22006 if (!speed)
06a67bdd
RS
22007 {
22008 *total = COSTS_N_INSNS (1);
22009 return true;
22010 }
066cd967
DE
22011 else if (FLOAT_MODE_P (mode)
22012 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
22013 {
22014 *total = rs6000_cost->fp;
22015 return false;
22016 }
06a67bdd
RS
22017 break;
22018
c0600ecd
DE
22019 case EQ:
22020 case GTU:
22021 case LTU:
22e54023
DE
22022 /* Carry bit requires mode == Pmode.
22023 NEG or PLUS already counted so only add one. */
22024 if (mode == Pmode
22025 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 22026 {
22e54023
DE
22027 *total = COSTS_N_INSNS (1);
22028 return true;
22029 }
22030 if (outer_code == SET)
22031 {
22032 if (XEXP (x, 1) == const0_rtx)
c0600ecd 22033 {
22e54023 22034 *total = COSTS_N_INSNS (2);
c0600ecd 22035 return true;
c0600ecd 22036 }
22e54023
DE
22037 else if (mode == Pmode)
22038 {
22039 *total = COSTS_N_INSNS (3);
22040 return false;
22041 }
22042 }
22043 /* FALLTHRU */
22044
22045 case GT:
22046 case LT:
22047 case UNORDERED:
22048 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
22049 {
22050 *total = COSTS_N_INSNS (2);
22051 return true;
c0600ecd 22052 }
22e54023
DE
22053 /* CC COMPARE. */
22054 if (outer_code == COMPARE)
22055 {
22056 *total = 0;
22057 return true;
22058 }
22059 break;
c0600ecd 22060
3c50106f 22061 default:
06a67bdd 22062 break;
3c50106f 22063 }
06a67bdd
RS
22064
22065 return false;
3c50106f
RH
22066}
22067
34bb030a
DE
22068/* A C expression returning the cost of moving data from a register of class
22069 CLASS1 to one of CLASS2. */
22070
22071int
f676971a 22072rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 22073 enum reg_class from, enum reg_class to)
34bb030a
DE
22074{
22075 /* Moves from/to GENERAL_REGS. */
22076 if (reg_classes_intersect_p (to, GENERAL_REGS)
22077 || reg_classes_intersect_p (from, GENERAL_REGS))
22078 {
22079 if (! reg_classes_intersect_p (to, GENERAL_REGS))
22080 from = to;
22081
22082 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
22083 return (rs6000_memory_move_cost (mode, from, 0)
22084 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
22085
c4ad648e
AM
22086 /* It's more expensive to move CR_REGS than CR0_REGS because of the
22087 shift. */
34bb030a
DE
22088 else if (from == CR_REGS)
22089 return 4;
22090
aafc759a
PH
22091 /* Power6 has slower LR/CTR moves so make them more expensive than
22092 memory in order to bias spills to memory .*/
22093 else if (rs6000_cpu == PROCESSOR_POWER6
22094 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
22095 return 6 * hard_regno_nregs[0][mode];
22096
34bb030a 22097 else
c4ad648e 22098 /* A move will cost one instruction per GPR moved. */
c8b622ff 22099 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
22100 }
22101
c4ad648e 22102 /* Moving between two similar registers is just one instruction. */
34bb030a 22103 else if (reg_classes_intersect_p (to, from))
7393f7f8 22104 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 22105
c4ad648e 22106 /* Everything else has to go through GENERAL_REGS. */
34bb030a 22107 else
f676971a 22108 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
22109 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
22110}
22111
22112/* A C expressions returning the cost of moving data of MODE from a register to
22113 or from memory. */
22114
22115int
0a2aaacc 22116rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
a2369ed3 22117 int in ATTRIBUTE_UNUSED)
34bb030a 22118{
0a2aaacc 22119 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
c8b622ff 22120 return 4 * hard_regno_nregs[0][mode];
0a2aaacc 22121 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
c8b622ff 22122 return 4 * hard_regno_nregs[32][mode];
0a2aaacc 22123 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
c8b622ff 22124 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a 22125 else
0a2aaacc 22126 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
34bb030a
DE
22127}
22128
9c78b944
DE
22129/* Returns a code for a target-specific builtin that implements
22130 reciprocal of the function, or NULL_TREE if not available. */
22131
22132static tree
22133rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
22134 bool sqrt ATTRIBUTE_UNUSED)
22135{
22136 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
22137 && flag_finite_math_only && !flag_trapping_math
22138 && flag_unsafe_math_optimizations))
22139 return NULL_TREE;
22140
22141 if (md_fn)
22142 return NULL_TREE;
22143 else
22144 switch (fn)
22145 {
22146 case BUILT_IN_SQRTF:
22147 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
22148
22149 default:
22150 return NULL_TREE;
22151 }
22152}
22153
ef765ea9
DE
22154/* Newton-Raphson approximation of single-precision floating point divide n/d.
22155 Assumes no trapping math and finite arguments. */
22156
22157void
9c78b944 22158rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22159{
22160 rtx x0, e0, e1, y1, u0, v0, one;
22161
22162 x0 = gen_reg_rtx (SFmode);
22163 e0 = gen_reg_rtx (SFmode);
22164 e1 = gen_reg_rtx (SFmode);
22165 y1 = gen_reg_rtx (SFmode);
22166 u0 = gen_reg_rtx (SFmode);
22167 v0 = gen_reg_rtx (SFmode);
22168 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22169
22170 /* x0 = 1./d estimate */
22171 emit_insn (gen_rtx_SET (VOIDmode, x0,
22172 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22173 UNSPEC_FRES)));
22174 /* e0 = 1. - d * x0 */
22175 emit_insn (gen_rtx_SET (VOIDmode, e0,
22176 gen_rtx_MINUS (SFmode, one,
22177 gen_rtx_MULT (SFmode, d, x0))));
22178 /* e1 = e0 + e0 * e0 */
22179 emit_insn (gen_rtx_SET (VOIDmode, e1,
22180 gen_rtx_PLUS (SFmode,
22181 gen_rtx_MULT (SFmode, e0, e0), e0)));
22182 /* y1 = x0 + e1 * x0 */
22183 emit_insn (gen_rtx_SET (VOIDmode, y1,
22184 gen_rtx_PLUS (SFmode,
22185 gen_rtx_MULT (SFmode, e1, x0), x0)));
22186 /* u0 = n * y1 */
22187 emit_insn (gen_rtx_SET (VOIDmode, u0,
22188 gen_rtx_MULT (SFmode, n, y1)));
22189 /* v0 = n - d * u0 */
22190 emit_insn (gen_rtx_SET (VOIDmode, v0,
22191 gen_rtx_MINUS (SFmode, n,
22192 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
22193 /* dst = u0 + v0 * y1 */
22194 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22195 gen_rtx_PLUS (SFmode,
22196 gen_rtx_MULT (SFmode, v0, y1), u0)));
22197}
22198
22199/* Newton-Raphson approximation of double-precision floating point divide n/d.
22200 Assumes no trapping math and finite arguments. */
22201
22202void
9c78b944 22203rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22204{
22205 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22206
22207 x0 = gen_reg_rtx (DFmode);
22208 e0 = gen_reg_rtx (DFmode);
22209 e1 = gen_reg_rtx (DFmode);
22210 e2 = gen_reg_rtx (DFmode);
22211 y1 = gen_reg_rtx (DFmode);
22212 y2 = gen_reg_rtx (DFmode);
22213 y3 = gen_reg_rtx (DFmode);
22214 u0 = gen_reg_rtx (DFmode);
22215 v0 = gen_reg_rtx (DFmode);
22216 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22217
22218 /* x0 = 1./d estimate */
22219 emit_insn (gen_rtx_SET (VOIDmode, x0,
22220 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22221 UNSPEC_FRES)));
22222 /* e0 = 1. - d * x0 */
22223 emit_insn (gen_rtx_SET (VOIDmode, e0,
22224 gen_rtx_MINUS (DFmode, one,
22225 gen_rtx_MULT (SFmode, d, x0))));
22226 /* y1 = x0 + e0 * x0 */
22227 emit_insn (gen_rtx_SET (VOIDmode, y1,
22228 gen_rtx_PLUS (DFmode,
22229 gen_rtx_MULT (DFmode, e0, x0), x0)));
22230 /* e1 = e0 * e0 */
22231 emit_insn (gen_rtx_SET (VOIDmode, e1,
22232 gen_rtx_MULT (DFmode, e0, e0)));
22233 /* y2 = y1 + e1 * y1 */
22234 emit_insn (gen_rtx_SET (VOIDmode, y2,
22235 gen_rtx_PLUS (DFmode,
22236 gen_rtx_MULT (DFmode, e1, y1), y1)));
22237 /* e2 = e1 * e1 */
22238 emit_insn (gen_rtx_SET (VOIDmode, e2,
22239 gen_rtx_MULT (DFmode, e1, e1)));
22240 /* y3 = y2 + e2 * y2 */
22241 emit_insn (gen_rtx_SET (VOIDmode, y3,
22242 gen_rtx_PLUS (DFmode,
22243 gen_rtx_MULT (DFmode, e2, y2), y2)));
22244 /* u0 = n * y3 */
22245 emit_insn (gen_rtx_SET (VOIDmode, u0,
22246 gen_rtx_MULT (DFmode, n, y3)));
22247 /* v0 = n - d * u0 */
22248 emit_insn (gen_rtx_SET (VOIDmode, v0,
22249 gen_rtx_MINUS (DFmode, n,
22250 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
22251 /* dst = u0 + v0 * y3 */
22252 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22253 gen_rtx_PLUS (DFmode,
22254 gen_rtx_MULT (DFmode, v0, y3), u0)));
22255}
22256
565ef4ba 22257
9c78b944
DE
22258/* Newton-Raphson approximation of single-precision floating point rsqrt.
22259 Assumes no trapping math and finite arguments. */
22260
22261void
22262rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22263{
22264 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22265 half, one, halfthree, c1, cond, label;
22266
22267 x0 = gen_reg_rtx (SFmode);
22268 x1 = gen_reg_rtx (SFmode);
22269 x2 = gen_reg_rtx (SFmode);
22270 y1 = gen_reg_rtx (SFmode);
22271 u0 = gen_reg_rtx (SFmode);
22272 u1 = gen_reg_rtx (SFmode);
22273 u2 = gen_reg_rtx (SFmode);
22274 v0 = gen_reg_rtx (SFmode);
22275 v1 = gen_reg_rtx (SFmode);
22276 v2 = gen_reg_rtx (SFmode);
22277 t0 = gen_reg_rtx (SFmode);
22278 halfthree = gen_reg_rtx (SFmode);
22279 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22280 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22281
22282 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22283 emit_insn (gen_rtx_SET (VOIDmode, t0,
22284 gen_rtx_MULT (SFmode, src, src)));
22285
22286 emit_insn (gen_rtx_SET (VOIDmode, cond,
22287 gen_rtx_COMPARE (CCFPmode, t0, src)));
22288 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22289 emit_unlikely_jump (c1, label);
22290
22291 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22292 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22293
22294 /* halfthree = 1.5 = 1.0 + 0.5 */
22295 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22296 gen_rtx_PLUS (SFmode, one, half)));
22297
22298 /* x0 = rsqrt estimate */
22299 emit_insn (gen_rtx_SET (VOIDmode, x0,
22300 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22301 UNSPEC_RSQRT)));
22302
22303 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22304 emit_insn (gen_rtx_SET (VOIDmode, y1,
22305 gen_rtx_MINUS (SFmode,
22306 gen_rtx_MULT (SFmode, src, halfthree),
22307 src)));
22308
22309 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22310 emit_insn (gen_rtx_SET (VOIDmode, u0,
22311 gen_rtx_MULT (SFmode, x0, x0)));
22312 emit_insn (gen_rtx_SET (VOIDmode, v0,
22313 gen_rtx_MINUS (SFmode,
22314 halfthree,
22315 gen_rtx_MULT (SFmode, y1, u0))));
22316 emit_insn (gen_rtx_SET (VOIDmode, x1,
22317 gen_rtx_MULT (SFmode, x0, v0)));
22318
22319 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22320 emit_insn (gen_rtx_SET (VOIDmode, u1,
22321 gen_rtx_MULT (SFmode, x1, x1)));
22322 emit_insn (gen_rtx_SET (VOIDmode, v1,
22323 gen_rtx_MINUS (SFmode,
22324 halfthree,
22325 gen_rtx_MULT (SFmode, y1, u1))));
22326 emit_insn (gen_rtx_SET (VOIDmode, x2,
22327 gen_rtx_MULT (SFmode, x1, v1)));
22328
22329 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22330 emit_insn (gen_rtx_SET (VOIDmode, u2,
22331 gen_rtx_MULT (SFmode, x2, x2)));
22332 emit_insn (gen_rtx_SET (VOIDmode, v2,
22333 gen_rtx_MINUS (SFmode,
22334 halfthree,
22335 gen_rtx_MULT (SFmode, y1, u2))));
22336 emit_insn (gen_rtx_SET (VOIDmode, dst,
22337 gen_rtx_MULT (SFmode, x2, v2)));
22338
22339 emit_label (XEXP (label, 0));
22340}
22341
565ef4ba
RS
22342/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22343 target, and SRC is the argument operand. */
22344
22345void
22346rs6000_emit_popcount (rtx dst, rtx src)
22347{
22348 enum machine_mode mode = GET_MODE (dst);
22349 rtx tmp1, tmp2;
22350
22351 tmp1 = gen_reg_rtx (mode);
22352
22353 if (mode == SImode)
22354 {
22355 emit_insn (gen_popcntbsi2 (tmp1, src));
22356 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22357 NULL_RTX, 0);
22358 tmp2 = force_reg (SImode, tmp2);
22359 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22360 }
22361 else
22362 {
22363 emit_insn (gen_popcntbdi2 (tmp1, src));
22364 tmp2 = expand_mult (DImode, tmp1,
22365 GEN_INT ((HOST_WIDE_INT)
22366 0x01010101 << 32 | 0x01010101),
22367 NULL_RTX, 0);
22368 tmp2 = force_reg (DImode, tmp2);
22369 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22370 }
22371}
22372
22373
22374/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22375 target, and SRC is the argument operand. */
22376
22377void
22378rs6000_emit_parity (rtx dst, rtx src)
22379{
22380 enum machine_mode mode = GET_MODE (dst);
22381 rtx tmp;
22382
22383 tmp = gen_reg_rtx (mode);
22384 if (mode == SImode)
22385 {
22386 /* Is mult+shift >= shift+xor+shift+xor? */
22387 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22388 {
22389 rtx tmp1, tmp2, tmp3, tmp4;
22390
22391 tmp1 = gen_reg_rtx (SImode);
22392 emit_insn (gen_popcntbsi2 (tmp1, src));
22393
22394 tmp2 = gen_reg_rtx (SImode);
22395 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22396 tmp3 = gen_reg_rtx (SImode);
22397 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22398
22399 tmp4 = gen_reg_rtx (SImode);
22400 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22401 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22402 }
22403 else
22404 rs6000_emit_popcount (tmp, src);
22405 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22406 }
22407 else
22408 {
22409 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22410 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22411 {
22412 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22413
22414 tmp1 = gen_reg_rtx (DImode);
22415 emit_insn (gen_popcntbdi2 (tmp1, src));
22416
22417 tmp2 = gen_reg_rtx (DImode);
22418 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22419 tmp3 = gen_reg_rtx (DImode);
22420 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22421
22422 tmp4 = gen_reg_rtx (DImode);
22423 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22424 tmp5 = gen_reg_rtx (DImode);
22425 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22426
22427 tmp6 = gen_reg_rtx (DImode);
22428 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22429 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22430 }
22431 else
22432 rs6000_emit_popcount (tmp, src);
22433 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22434 }
22435}
22436
ded9bf77
AH
22437/* Return an RTX representing where to find the function value of a
22438 function returning MODE. */
22439static rtx
22440rs6000_complex_function_value (enum machine_mode mode)
22441{
22442 unsigned int regno;
22443 rtx r1, r2;
22444 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22445 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22446
18f63bfa
AH
22447 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22448 regno = FP_ARG_RETURN;
354ed18f
AH
22449 else
22450 {
18f63bfa 22451 regno = GP_ARG_RETURN;
ded9bf77 22452
18f63bfa
AH
22453 /* 32-bit is OK since it'll go in r3/r4. */
22454 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22455 return gen_rtx_REG (mode, regno);
22456 }
22457
18f63bfa
AH
22458 if (inner_bytes >= 8)
22459 return gen_rtx_REG (mode, regno);
22460
ded9bf77
AH
22461 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22462 const0_rtx);
22463 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22464 GEN_INT (inner_bytes));
ded9bf77
AH
22465 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22466}
22467
a6ebc39a
AH
22468/* Define how to find the value returned by a function.
22469 VALTYPE is the data type of the value (as a tree).
22470 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22471 otherwise, FUNC is 0.
22472
22473 On the SPE, both FPs and vectors are returned in r3.
22474
22475 On RS/6000 an integer value is in r3 and a floating-point value is in
22476 fp1, unless -msoft-float. */
22477
22478rtx
586de218 22479rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22480{
22481 enum machine_mode mode;
2a8fa26c 22482 unsigned int regno;
a6ebc39a 22483
594a51fe
SS
22484 /* Special handling for structs in darwin64. */
22485 if (rs6000_darwin64_abi
22486 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22487 && TREE_CODE (valtype) == RECORD_TYPE
22488 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22489 {
22490 CUMULATIVE_ARGS valcum;
22491 rtx valret;
22492
0b5383eb 22493 valcum.words = 0;
594a51fe
SS
22494 valcum.fregno = FP_ARG_MIN_REG;
22495 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22496 /* Do a trial code generation as if this were going to be passed as
22497 an argument; if any part goes in memory, we return NULL. */
22498 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22499 if (valret)
22500 return valret;
22501 /* Otherwise fall through to standard ABI rules. */
22502 }
22503
0e67400a
FJ
22504 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22505 {
22506 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22507 return gen_rtx_PARALLEL (DImode,
22508 gen_rtvec (2,
22509 gen_rtx_EXPR_LIST (VOIDmode,
22510 gen_rtx_REG (SImode, GP_ARG_RETURN),
22511 const0_rtx),
22512 gen_rtx_EXPR_LIST (VOIDmode,
22513 gen_rtx_REG (SImode,
22514 GP_ARG_RETURN + 1),
22515 GEN_INT (4))));
22516 }
0f086e42
FJ
22517 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22518 {
22519 return gen_rtx_PARALLEL (DCmode,
22520 gen_rtvec (4,
22521 gen_rtx_EXPR_LIST (VOIDmode,
22522 gen_rtx_REG (SImode, GP_ARG_RETURN),
22523 const0_rtx),
22524 gen_rtx_EXPR_LIST (VOIDmode,
22525 gen_rtx_REG (SImode,
22526 GP_ARG_RETURN + 1),
22527 GEN_INT (4)),
22528 gen_rtx_EXPR_LIST (VOIDmode,
22529 gen_rtx_REG (SImode,
22530 GP_ARG_RETURN + 2),
22531 GEN_INT (8)),
22532 gen_rtx_EXPR_LIST (VOIDmode,
22533 gen_rtx_REG (SImode,
22534 GP_ARG_RETURN + 3),
22535 GEN_INT (12))));
22536 }
602ea4d3 22537
7348aa7f
FXC
22538 mode = TYPE_MODE (valtype);
22539 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22540 || POINTER_TYPE_P (valtype))
b78d48dd 22541 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22542
e41b2a33
PB
22543 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22544 /* _Decimal128 must use an even/odd register pair. */
22545 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
56f4cc04 22546 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 22547 regno = FP_ARG_RETURN;
ded9bf77 22548 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22549 && targetm.calls.split_complex_arg)
ded9bf77 22550 return rs6000_complex_function_value (mode);
44688022 22551 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22552 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22553 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22554 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22555 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22556 && (mode == DFmode || mode == DCmode
22557 || mode == TFmode || mode == TCmode))
18f63bfa 22558 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22559 else
22560 regno = GP_ARG_RETURN;
22561
22562 return gen_rtx_REG (mode, regno);
22563}
22564
ded9bf77
AH
22565/* Define how to find the value returned by a library function
22566 assuming the value has mode MODE. */
22567rtx
22568rs6000_libcall_value (enum machine_mode mode)
22569{
22570 unsigned int regno;
22571
2e6c9641
FJ
22572 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22573 {
22574 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22575 return gen_rtx_PARALLEL (DImode,
22576 gen_rtvec (2,
22577 gen_rtx_EXPR_LIST (VOIDmode,
22578 gen_rtx_REG (SImode, GP_ARG_RETURN),
22579 const0_rtx),
22580 gen_rtx_EXPR_LIST (VOIDmode,
22581 gen_rtx_REG (SImode,
22582 GP_ARG_RETURN + 1),
22583 GEN_INT (4))));
22584 }
22585
e41b2a33
PB
22586 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22587 /* _Decimal128 must use an even/odd register pair. */
22588 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22589 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
22590 && TARGET_HARD_FLOAT && TARGET_FPRS)
22591 regno = FP_ARG_RETURN;
44688022
AM
22592 else if (ALTIVEC_VECTOR_MODE (mode)
22593 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22594 regno = ALTIVEC_ARG_RETURN;
42ba5130 22595 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22596 return rs6000_complex_function_value (mode);
18f63bfa 22597 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22598 && (mode == DFmode || mode == DCmode
22599 || mode == TFmode || mode == TCmode))
18f63bfa 22600 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22601 else
22602 regno = GP_ARG_RETURN;
22603
22604 return gen_rtx_REG (mode, regno);
22605}
22606
d1d0c603
JJ
22607/* Define the offset between two registers, FROM to be eliminated and its
22608 replacement TO, at the start of a routine. */
22609HOST_WIDE_INT
22610rs6000_initial_elimination_offset (int from, int to)
22611{
22612 rs6000_stack_t *info = rs6000_stack_info ();
22613 HOST_WIDE_INT offset;
22614
7d5175e1 22615 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22616 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22617 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22618 {
22619 offset = info->push_p ? 0 : -info->total_size;
22620 if (FRAME_GROWS_DOWNWARD)
5b667039 22621 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22622 }
22623 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22624 offset = FRAME_GROWS_DOWNWARD
5b667039 22625 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22626 : 0;
22627 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22628 offset = info->total_size;
22629 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22630 offset = info->push_p ? info->total_size : 0;
22631 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22632 offset = 0;
22633 else
37409796 22634 gcc_unreachable ();
d1d0c603
JJ
22635
22636 return offset;
22637}
22638
58646b77 22639/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 22640
c8e4f0e9 22641static bool
3101faab 22642rs6000_is_opaque_type (const_tree type)
62e1dfcf 22643{
58646b77 22644 return (type == opaque_V2SI_type_node
2abe3e28 22645 || type == opaque_V2SF_type_node
58646b77 22646 || type == opaque_V4SI_type_node);
62e1dfcf
NC
22647}
22648
96714395 22649static rtx
a2369ed3 22650rs6000_dwarf_register_span (rtx reg)
96714395
AH
22651{
22652 unsigned regno;
22653
4d4cbc0e
AH
22654 if (TARGET_SPE
22655 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
22656 || (TARGET_E500_DOUBLE
22657 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
22658 ;
22659 else
96714395
AH
22660 return NULL_RTX;
22661
22662 regno = REGNO (reg);
22663
22664 /* The duality of the SPE register size wreaks all kinds of havoc.
22665 This is a way of distinguishing r0 in 32-bits from r0 in
22666 64-bits. */
22667 return
22668 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
22669 BYTES_BIG_ENDIAN
22670 ? gen_rtvec (2,
22671 gen_rtx_REG (SImode, regno + 1200),
22672 gen_rtx_REG (SImode, regno))
22673 : gen_rtvec (2,
22674 gen_rtx_REG (SImode, regno),
22675 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
22676}
22677
37ea0b7e
JM
22678/* Fill in sizes for SPE register high parts in table used by unwinder. */
22679
22680static void
22681rs6000_init_dwarf_reg_sizes_extra (tree address)
22682{
22683 if (TARGET_SPE)
22684 {
22685 int i;
22686 enum machine_mode mode = TYPE_MODE (char_type_node);
22687 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
22688 rtx mem = gen_rtx_MEM (BLKmode, addr);
22689 rtx value = gen_int_mode (4, mode);
22690
22691 for (i = 1201; i < 1232; i++)
22692 {
22693 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22694 HOST_WIDE_INT offset
22695 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22696
22697 emit_move_insn (adjust_address (mem, mode, offset), value);
22698 }
22699 }
22700}
22701
93c9d1ba
AM
22702/* Map internal gcc register numbers to DWARF2 register numbers. */
22703
22704unsigned int
22705rs6000_dbx_register_number (unsigned int regno)
22706{
22707 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22708 return regno;
22709 if (regno == MQ_REGNO)
22710 return 100;
1de43f85 22711 if (regno == LR_REGNO)
93c9d1ba 22712 return 108;
1de43f85 22713 if (regno == CTR_REGNO)
93c9d1ba
AM
22714 return 109;
22715 if (CR_REGNO_P (regno))
22716 return regno - CR0_REGNO + 86;
22717 if (regno == XER_REGNO)
22718 return 101;
22719 if (ALTIVEC_REGNO_P (regno))
22720 return regno - FIRST_ALTIVEC_REGNO + 1124;
22721 if (regno == VRSAVE_REGNO)
22722 return 356;
22723 if (regno == VSCR_REGNO)
22724 return 67;
22725 if (regno == SPE_ACC_REGNO)
22726 return 99;
22727 if (regno == SPEFSCR_REGNO)
22728 return 612;
22729 /* SPE high reg number. We get these values of regno from
22730 rs6000_dwarf_register_span. */
37409796
NS
22731 gcc_assert (regno >= 1200 && regno < 1232);
22732 return regno;
93c9d1ba
AM
22733}
22734
93f90be6 22735/* target hook eh_return_filter_mode */
f676971a 22736static enum machine_mode
93f90be6
FJ
22737rs6000_eh_return_filter_mode (void)
22738{
22739 return TARGET_32BIT ? SImode : word_mode;
22740}
22741
00b79d54
BE
22742/* Target hook for scalar_mode_supported_p. */
22743static bool
22744rs6000_scalar_mode_supported_p (enum machine_mode mode)
22745{
22746 if (DECIMAL_FLOAT_MODE_P (mode))
22747 return true;
22748 else
22749 return default_scalar_mode_supported_p (mode);
22750}
22751
f676971a
EC
22752/* Target hook for vector_mode_supported_p. */
22753static bool
22754rs6000_vector_mode_supported_p (enum machine_mode mode)
22755{
22756
96038623
DE
22757 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22758 return true;
22759
f676971a
EC
22760 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22761 return true;
22762
22763 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22764 return true;
22765
22766 else
22767 return false;
22768}
22769
bb8df8a6
EC
22770/* Target hook for invalid_arg_for_unprototyped_fn. */
22771static const char *
3101faab 22772invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22773{
22774 return (!rs6000_darwin64_abi
22775 && typelist == 0
22776 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22777 && (funcdecl == NULL_TREE
22778 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22779 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22780 ? N_("AltiVec argument passed to unprototyped function")
22781 : NULL;
22782}
22783
3aebbe5f
JJ
22784/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22785 setup by using __stack_chk_fail_local hidden function instead of
22786 calling __stack_chk_fail directly. Otherwise it is better to call
22787 __stack_chk_fail directly. */
22788
22789static tree
22790rs6000_stack_protect_fail (void)
22791{
22792 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22793 ? default_hidden_stack_protect_fail ()
22794 : default_external_stack_protect_fail ();
22795}
22796
17211ab5 22797#include "gt-rs6000.h"