]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
collect2.c (scan_prog_file, [...]): Use CONST_CAST instead of bare conversion to...
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
1c9c5e43 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
94f4765c
NF
177/* Nonzero if we want SPE SIMD instructions. */
178int rs6000_spe;
179
a3170dc6
AH
180/* Nonzero if we want SPE ABI extensions. */
181int rs6000_spe_abi;
182
94f4765c
NF
183/* Nonzero to use isel instructions. */
184int rs6000_isel;
185
5da702b1
AH
186/* Nonzero if floating point operations are done in the GPRs. */
187int rs6000_float_gprs = 0;
188
594a51fe
SS
189/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
190int rs6000_darwin64_abi;
191
a0ab749a 192/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 193static GTY(()) int common_mode_defined;
c81bebd7 194
9878760c
RK
195/* Save information from a "cmpxx" operation until the branch or scc is
196 emitted. */
9878760c
RK
197rtx rs6000_compare_op0, rs6000_compare_op1;
198int rs6000_compare_fp_p;
874a0744 199
874a0744
MM
200/* Label number of label created for -mrelocatable, to call to so we can
201 get the address of the GOT section */
202int rs6000_pic_labelno;
c81bebd7 203
b91da81f 204#ifdef USING_ELFOS_H
c81bebd7 205/* Which abi to adhere to */
9739c90c 206const char *rs6000_abi_name;
d9407988
MM
207
208/* Semantics of the small data area */
209enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
210
211/* Which small data model to use */
815cdc52 212const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
213
214/* Counter for labels which are to be placed in .fixup. */
215int fixuplabelno = 0;
874a0744 216#endif
4697a36c 217
c4501e62
JJ
218/* Bit size of immediate TLS offsets and string from which it is decoded. */
219int rs6000_tls_size = 32;
220const char *rs6000_tls_size_string;
221
b6c9286a
MM
222/* ABI enumeration available for subtarget to use. */
223enum rs6000_abi rs6000_current_abi;
224
85b776df
AM
225/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
226int dot_symbols;
227
38c1f2d7 228/* Debug flags */
815cdc52 229const char *rs6000_debug_name;
38c1f2d7
MM
230int rs6000_debug_stack; /* debug stack applications */
231int rs6000_debug_arg; /* debug argument handling */
232
aabcd309 233/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
234bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
235
58646b77
PB
236/* Built in types. */
237
238tree rs6000_builtin_types[RS6000_BTI_MAX];
239tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 240
57ac7be9
AM
241const char *rs6000_traceback_name;
242static enum {
243 traceback_default = 0,
244 traceback_none,
245 traceback_part,
246 traceback_full
247} rs6000_traceback;
248
38c1f2d7
MM
249/* Flag to say the TOC is initialized */
250int toc_initialized;
9ebbca7d 251char toc_label_name[10];
38c1f2d7 252
44cd321e
PS
253/* Cached value of rs6000_variable_issue. This is cached in
254 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
255static short cached_can_issue_more;
256
d6b5193b
RS
257static GTY(()) section *read_only_data_section;
258static GTY(()) section *private_data_section;
259static GTY(()) section *read_only_private_data_section;
260static GTY(()) section *sdata2_section;
261static GTY(()) section *toc_section;
262
a3c9585f
KH
263/* Control alignment for fields within structures. */
264/* String from -malign-XXXXX. */
025d9908
KH
265int rs6000_alignment_flags;
266
78f5898b
AH
267/* True for any options that were explicitly set. */
268struct {
df01da37 269 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 270 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
271 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
272 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
273 bool spe; /* True if -mspe= was used. */
274 bool float_gprs; /* True if -mfloat-gprs= was used. */
275 bool isel; /* True if -misel was used. */
276 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 277 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 278 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
279} rs6000_explicit_options;
280
a3170dc6
AH
281struct builtin_description
282{
283 /* mask is not const because we're going to alter it below. This
284 nonsense will go away when we rewrite the -march infrastructure
285 to give us more target flag bits. */
286 unsigned int mask;
287 const enum insn_code icode;
288 const char *const name;
289 const enum rs6000_builtins code;
290};
8b897cfa
RS
291\f
292/* Target cpu costs. */
293
294struct processor_costs {
c4ad648e 295 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
296 const int mulsi_const; /* cost of SImode multiplication by constant. */
297 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
298 const int muldi; /* cost of DImode multiplication. */
299 const int divsi; /* cost of SImode division. */
300 const int divdi; /* cost of DImode division. */
301 const int fp; /* cost of simple SFmode and DFmode insns. */
302 const int dmul; /* cost of DFmode multiplication (and fmadd). */
303 const int sdiv; /* cost of SFmode division (fdivs). */
304 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
305 const int cache_line_size; /* cache line size in bytes. */
306 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
307 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
308 const int simultaneous_prefetches; /* number of parallel prefetch
309 operations. */
8b897cfa
RS
310};
311
312const struct processor_costs *rs6000_cost;
313
314/* Processor costs (relative to an add) */
315
316/* Instruction size costs on 32bit processors. */
317static const
318struct processor_costs size32_cost = {
06a67bdd
RS
319 COSTS_N_INSNS (1), /* mulsi */
320 COSTS_N_INSNS (1), /* mulsi_const */
321 COSTS_N_INSNS (1), /* mulsi_const9 */
322 COSTS_N_INSNS (1), /* muldi */
323 COSTS_N_INSNS (1), /* divsi */
324 COSTS_N_INSNS (1), /* divdi */
325 COSTS_N_INSNS (1), /* fp */
326 COSTS_N_INSNS (1), /* dmul */
327 COSTS_N_INSNS (1), /* sdiv */
328 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
329 32,
330 0,
331 0,
5f732aba 332 0,
8b897cfa
RS
333};
334
335/* Instruction size costs on 64bit processors. */
336static const
337struct processor_costs size64_cost = {
06a67bdd
RS
338 COSTS_N_INSNS (1), /* mulsi */
339 COSTS_N_INSNS (1), /* mulsi_const */
340 COSTS_N_INSNS (1), /* mulsi_const9 */
341 COSTS_N_INSNS (1), /* muldi */
342 COSTS_N_INSNS (1), /* divsi */
343 COSTS_N_INSNS (1), /* divdi */
344 COSTS_N_INSNS (1), /* fp */
345 COSTS_N_INSNS (1), /* dmul */
346 COSTS_N_INSNS (1), /* sdiv */
347 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
348 128,
349 0,
350 0,
5f732aba 351 0,
8b897cfa
RS
352};
353
354/* Instruction costs on RIOS1 processors. */
355static const
356struct processor_costs rios1_cost = {
06a67bdd
RS
357 COSTS_N_INSNS (5), /* mulsi */
358 COSTS_N_INSNS (4), /* mulsi_const */
359 COSTS_N_INSNS (3), /* mulsi_const9 */
360 COSTS_N_INSNS (5), /* muldi */
361 COSTS_N_INSNS (19), /* divsi */
362 COSTS_N_INSNS (19), /* divdi */
363 COSTS_N_INSNS (2), /* fp */
364 COSTS_N_INSNS (2), /* dmul */
365 COSTS_N_INSNS (19), /* sdiv */
366 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 367 128, /* cache line size */
5f732aba
DE
368 64, /* l1 cache */
369 512, /* l2 cache */
0b11da67 370 0, /* streams */
8b897cfa
RS
371};
372
373/* Instruction costs on RIOS2 processors. */
374static const
375struct processor_costs rios2_cost = {
06a67bdd
RS
376 COSTS_N_INSNS (2), /* mulsi */
377 COSTS_N_INSNS (2), /* mulsi_const */
378 COSTS_N_INSNS (2), /* mulsi_const9 */
379 COSTS_N_INSNS (2), /* muldi */
380 COSTS_N_INSNS (13), /* divsi */
381 COSTS_N_INSNS (13), /* divdi */
382 COSTS_N_INSNS (2), /* fp */
383 COSTS_N_INSNS (2), /* dmul */
384 COSTS_N_INSNS (17), /* sdiv */
385 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 386 256, /* cache line size */
5f732aba
DE
387 256, /* l1 cache */
388 1024, /* l2 cache */
0b11da67 389 0, /* streams */
8b897cfa
RS
390};
391
392/* Instruction costs on RS64A processors. */
393static const
394struct processor_costs rs64a_cost = {
06a67bdd
RS
395 COSTS_N_INSNS (20), /* mulsi */
396 COSTS_N_INSNS (12), /* mulsi_const */
397 COSTS_N_INSNS (8), /* mulsi_const9 */
398 COSTS_N_INSNS (34), /* muldi */
399 COSTS_N_INSNS (65), /* divsi */
400 COSTS_N_INSNS (67), /* divdi */
401 COSTS_N_INSNS (4), /* fp */
402 COSTS_N_INSNS (4), /* dmul */
403 COSTS_N_INSNS (31), /* sdiv */
404 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 405 128, /* cache line size */
5f732aba
DE
406 128, /* l1 cache */
407 2048, /* l2 cache */
0b11da67 408 1, /* streams */
8b897cfa
RS
409};
410
411/* Instruction costs on MPCCORE processors. */
412static const
413struct processor_costs mpccore_cost = {
06a67bdd
RS
414 COSTS_N_INSNS (2), /* mulsi */
415 COSTS_N_INSNS (2), /* mulsi_const */
416 COSTS_N_INSNS (2), /* mulsi_const9 */
417 COSTS_N_INSNS (2), /* muldi */
418 COSTS_N_INSNS (6), /* divsi */
419 COSTS_N_INSNS (6), /* divdi */
420 COSTS_N_INSNS (4), /* fp */
421 COSTS_N_INSNS (5), /* dmul */
422 COSTS_N_INSNS (10), /* sdiv */
423 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 424 32, /* cache line size */
5f732aba
DE
425 4, /* l1 cache */
426 16, /* l2 cache */
0b11da67 427 1, /* streams */
8b897cfa
RS
428};
429
430/* Instruction costs on PPC403 processors. */
431static const
432struct processor_costs ppc403_cost = {
06a67bdd
RS
433 COSTS_N_INSNS (4), /* mulsi */
434 COSTS_N_INSNS (4), /* mulsi_const */
435 COSTS_N_INSNS (4), /* mulsi_const9 */
436 COSTS_N_INSNS (4), /* muldi */
437 COSTS_N_INSNS (33), /* divsi */
438 COSTS_N_INSNS (33), /* divdi */
439 COSTS_N_INSNS (11), /* fp */
440 COSTS_N_INSNS (11), /* dmul */
441 COSTS_N_INSNS (11), /* sdiv */
442 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 443 32, /* cache line size */
5f732aba
DE
444 4, /* l1 cache */
445 16, /* l2 cache */
0b11da67 446 1, /* streams */
8b897cfa
RS
447};
448
449/* Instruction costs on PPC405 processors. */
450static const
451struct processor_costs ppc405_cost = {
06a67bdd
RS
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (4), /* mulsi_const */
454 COSTS_N_INSNS (3), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (35), /* divsi */
457 COSTS_N_INSNS (35), /* divdi */
458 COSTS_N_INSNS (11), /* fp */
459 COSTS_N_INSNS (11), /* dmul */
460 COSTS_N_INSNS (11), /* sdiv */
461 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 462 32, /* cache line size */
5f732aba
DE
463 16, /* l1 cache */
464 128, /* l2 cache */
0b11da67 465 1, /* streams */
8b897cfa
RS
466};
467
468/* Instruction costs on PPC440 processors. */
469static const
470struct processor_costs ppc440_cost = {
06a67bdd
RS
471 COSTS_N_INSNS (3), /* mulsi */
472 COSTS_N_INSNS (2), /* mulsi_const */
473 COSTS_N_INSNS (2), /* mulsi_const9 */
474 COSTS_N_INSNS (3), /* muldi */
475 COSTS_N_INSNS (34), /* divsi */
476 COSTS_N_INSNS (34), /* divdi */
477 COSTS_N_INSNS (5), /* fp */
478 COSTS_N_INSNS (5), /* dmul */
479 COSTS_N_INSNS (19), /* sdiv */
480 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 481 32, /* cache line size */
5f732aba
DE
482 32, /* l1 cache */
483 256, /* l2 cache */
0b11da67 484 1, /* streams */
8b897cfa
RS
485};
486
487/* Instruction costs on PPC601 processors. */
488static const
489struct processor_costs ppc601_cost = {
06a67bdd
RS
490 COSTS_N_INSNS (5), /* mulsi */
491 COSTS_N_INSNS (5), /* mulsi_const */
492 COSTS_N_INSNS (5), /* mulsi_const9 */
493 COSTS_N_INSNS (5), /* muldi */
494 COSTS_N_INSNS (36), /* divsi */
495 COSTS_N_INSNS (36), /* divdi */
496 COSTS_N_INSNS (4), /* fp */
497 COSTS_N_INSNS (5), /* dmul */
498 COSTS_N_INSNS (17), /* sdiv */
499 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 500 32, /* cache line size */
5f732aba
DE
501 32, /* l1 cache */
502 256, /* l2 cache */
0b11da67 503 1, /* streams */
8b897cfa
RS
504};
505
506/* Instruction costs on PPC603 processors. */
507static const
508struct processor_costs ppc603_cost = {
06a67bdd
RS
509 COSTS_N_INSNS (5), /* mulsi */
510 COSTS_N_INSNS (3), /* mulsi_const */
511 COSTS_N_INSNS (2), /* mulsi_const9 */
512 COSTS_N_INSNS (5), /* muldi */
513 COSTS_N_INSNS (37), /* divsi */
514 COSTS_N_INSNS (37), /* divdi */
515 COSTS_N_INSNS (3), /* fp */
516 COSTS_N_INSNS (4), /* dmul */
517 COSTS_N_INSNS (18), /* sdiv */
518 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 519 32, /* cache line size */
5f732aba
DE
520 8, /* l1 cache */
521 64, /* l2 cache */
0b11da67 522 1, /* streams */
8b897cfa
RS
523};
524
525/* Instruction costs on PPC604 processors. */
526static const
527struct processor_costs ppc604_cost = {
06a67bdd
RS
528 COSTS_N_INSNS (4), /* mulsi */
529 COSTS_N_INSNS (4), /* mulsi_const */
530 COSTS_N_INSNS (4), /* mulsi_const9 */
531 COSTS_N_INSNS (4), /* muldi */
532 COSTS_N_INSNS (20), /* divsi */
533 COSTS_N_INSNS (20), /* divdi */
534 COSTS_N_INSNS (3), /* fp */
535 COSTS_N_INSNS (3), /* dmul */
536 COSTS_N_INSNS (18), /* sdiv */
537 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 538 32, /* cache line size */
5f732aba
DE
539 16, /* l1 cache */
540 512, /* l2 cache */
0b11da67 541 1, /* streams */
8b897cfa
RS
542};
543
544/* Instruction costs on PPC604e processors. */
545static const
546struct processor_costs ppc604e_cost = {
06a67bdd
RS
547 COSTS_N_INSNS (2), /* mulsi */
548 COSTS_N_INSNS (2), /* mulsi_const */
549 COSTS_N_INSNS (2), /* mulsi_const9 */
550 COSTS_N_INSNS (2), /* muldi */
551 COSTS_N_INSNS (20), /* divsi */
552 COSTS_N_INSNS (20), /* divdi */
553 COSTS_N_INSNS (3), /* fp */
554 COSTS_N_INSNS (3), /* dmul */
555 COSTS_N_INSNS (18), /* sdiv */
556 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 557 32, /* cache line size */
5f732aba
DE
558 32, /* l1 cache */
559 1024, /* l2 cache */
0b11da67 560 1, /* streams */
8b897cfa
RS
561};
562
f0517163 563/* Instruction costs on PPC620 processors. */
8b897cfa
RS
564static const
565struct processor_costs ppc620_cost = {
06a67bdd
RS
566 COSTS_N_INSNS (5), /* mulsi */
567 COSTS_N_INSNS (4), /* mulsi_const */
568 COSTS_N_INSNS (3), /* mulsi_const9 */
569 COSTS_N_INSNS (7), /* muldi */
570 COSTS_N_INSNS (21), /* divsi */
571 COSTS_N_INSNS (37), /* divdi */
572 COSTS_N_INSNS (3), /* fp */
573 COSTS_N_INSNS (3), /* dmul */
574 COSTS_N_INSNS (18), /* sdiv */
575 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 576 128, /* cache line size */
5f732aba
DE
577 32, /* l1 cache */
578 1024, /* l2 cache */
0b11da67 579 1, /* streams */
f0517163
RS
580};
581
582/* Instruction costs on PPC630 processors. */
583static const
584struct processor_costs ppc630_cost = {
06a67bdd
RS
585 COSTS_N_INSNS (5), /* mulsi */
586 COSTS_N_INSNS (4), /* mulsi_const */
587 COSTS_N_INSNS (3), /* mulsi_const9 */
588 COSTS_N_INSNS (7), /* muldi */
589 COSTS_N_INSNS (21), /* divsi */
590 COSTS_N_INSNS (37), /* divdi */
591 COSTS_N_INSNS (3), /* fp */
592 COSTS_N_INSNS (3), /* dmul */
593 COSTS_N_INSNS (17), /* sdiv */
594 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 595 128, /* cache line size */
5f732aba
DE
596 64, /* l1 cache */
597 1024, /* l2 cache */
0b11da67 598 1, /* streams */
8b897cfa
RS
599};
600
d296e02e
AP
601/* Instruction costs on Cell processor. */
602/* COSTS_N_INSNS (1) ~ one add. */
603static const
604struct processor_costs ppccell_cost = {
605 COSTS_N_INSNS (9/2)+2, /* mulsi */
606 COSTS_N_INSNS (6/2), /* mulsi_const */
607 COSTS_N_INSNS (6/2), /* mulsi_const9 */
608 COSTS_N_INSNS (15/2)+2, /* muldi */
609 COSTS_N_INSNS (38/2), /* divsi */
610 COSTS_N_INSNS (70/2), /* divdi */
611 COSTS_N_INSNS (10/2), /* fp */
612 COSTS_N_INSNS (10/2), /* dmul */
613 COSTS_N_INSNS (74/2), /* sdiv */
614 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 615 128, /* cache line size */
5f732aba
DE
616 32, /* l1 cache */
617 512, /* l2 cache */
618 6, /* streams */
d296e02e
AP
619};
620
8b897cfa
RS
621/* Instruction costs on PPC750 and PPC7400 processors. */
622static const
623struct processor_costs ppc750_cost = {
06a67bdd
RS
624 COSTS_N_INSNS (5), /* mulsi */
625 COSTS_N_INSNS (3), /* mulsi_const */
626 COSTS_N_INSNS (2), /* mulsi_const9 */
627 COSTS_N_INSNS (5), /* muldi */
628 COSTS_N_INSNS (17), /* divsi */
629 COSTS_N_INSNS (17), /* divdi */
630 COSTS_N_INSNS (3), /* fp */
631 COSTS_N_INSNS (3), /* dmul */
632 COSTS_N_INSNS (17), /* sdiv */
633 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 634 32, /* cache line size */
5f732aba
DE
635 32, /* l1 cache */
636 512, /* l2 cache */
0b11da67 637 1, /* streams */
8b897cfa
RS
638};
639
640/* Instruction costs on PPC7450 processors. */
641static const
642struct processor_costs ppc7450_cost = {
06a67bdd
RS
643 COSTS_N_INSNS (4), /* mulsi */
644 COSTS_N_INSNS (3), /* mulsi_const */
645 COSTS_N_INSNS (3), /* mulsi_const9 */
646 COSTS_N_INSNS (4), /* muldi */
647 COSTS_N_INSNS (23), /* divsi */
648 COSTS_N_INSNS (23), /* divdi */
649 COSTS_N_INSNS (5), /* fp */
650 COSTS_N_INSNS (5), /* dmul */
651 COSTS_N_INSNS (21), /* sdiv */
652 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 653 32, /* cache line size */
5f732aba
DE
654 32, /* l1 cache */
655 1024, /* l2 cache */
0b11da67 656 1, /* streams */
8b897cfa 657};
a3170dc6 658
8b897cfa
RS
659/* Instruction costs on PPC8540 processors. */
660static const
661struct processor_costs ppc8540_cost = {
06a67bdd
RS
662 COSTS_N_INSNS (4), /* mulsi */
663 COSTS_N_INSNS (4), /* mulsi_const */
664 COSTS_N_INSNS (4), /* mulsi_const9 */
665 COSTS_N_INSNS (4), /* muldi */
666 COSTS_N_INSNS (19), /* divsi */
667 COSTS_N_INSNS (19), /* divdi */
668 COSTS_N_INSNS (4), /* fp */
669 COSTS_N_INSNS (4), /* dmul */
670 COSTS_N_INSNS (29), /* sdiv */
671 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 672 32, /* cache line size */
5f732aba
DE
673 32, /* l1 cache */
674 256, /* l2 cache */
0b11da67 675 1, /* prefetch streams /*/
8b897cfa
RS
676};
677
fa41c305
EW
678/* Instruction costs on E300C2 and E300C3 cores. */
679static const
680struct processor_costs ppce300c2c3_cost = {
681 COSTS_N_INSNS (4), /* mulsi */
682 COSTS_N_INSNS (4), /* mulsi_const */
683 COSTS_N_INSNS (4), /* mulsi_const9 */
684 COSTS_N_INSNS (4), /* muldi */
685 COSTS_N_INSNS (19), /* divsi */
686 COSTS_N_INSNS (19), /* divdi */
687 COSTS_N_INSNS (3), /* fp */
688 COSTS_N_INSNS (4), /* dmul */
689 COSTS_N_INSNS (18), /* sdiv */
690 COSTS_N_INSNS (33), /* ddiv */
642639ce 691 32,
a19b7d46
EW
692 16, /* l1 cache */
693 16, /* l2 cache */
642639ce 694 1, /* prefetch streams /*/
fa41c305
EW
695};
696
edae5fe3
DE
697/* Instruction costs on PPCE500MC processors. */
698static const
699struct processor_costs ppce500mc_cost = {
700 COSTS_N_INSNS (4), /* mulsi */
701 COSTS_N_INSNS (4), /* mulsi_const */
702 COSTS_N_INSNS (4), /* mulsi_const9 */
703 COSTS_N_INSNS (4), /* muldi */
704 COSTS_N_INSNS (14), /* divsi */
705 COSTS_N_INSNS (14), /* divdi */
706 COSTS_N_INSNS (8), /* fp */
707 COSTS_N_INSNS (10), /* dmul */
708 COSTS_N_INSNS (36), /* sdiv */
709 COSTS_N_INSNS (66), /* ddiv */
710 64, /* cache line size */
711 32, /* l1 cache */
712 128, /* l2 cache */
713 1, /* prefetch streams /*/
714};
715
8b897cfa
RS
716/* Instruction costs on POWER4 and POWER5 processors. */
717static const
718struct processor_costs power4_cost = {
06a67bdd
RS
719 COSTS_N_INSNS (3), /* mulsi */
720 COSTS_N_INSNS (2), /* mulsi_const */
721 COSTS_N_INSNS (2), /* mulsi_const9 */
722 COSTS_N_INSNS (4), /* muldi */
723 COSTS_N_INSNS (18), /* divsi */
724 COSTS_N_INSNS (34), /* divdi */
725 COSTS_N_INSNS (3), /* fp */
726 COSTS_N_INSNS (3), /* dmul */
727 COSTS_N_INSNS (17), /* sdiv */
728 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 729 128, /* cache line size */
5f732aba
DE
730 32, /* l1 cache */
731 1024, /* l2 cache */
0b11da67 732 8, /* prefetch streams /*/
8b897cfa
RS
733};
734
44cd321e
PS
735/* Instruction costs on POWER6 processors. */
736static const
737struct processor_costs power6_cost = {
738 COSTS_N_INSNS (8), /* mulsi */
739 COSTS_N_INSNS (8), /* mulsi_const */
740 COSTS_N_INSNS (8), /* mulsi_const9 */
741 COSTS_N_INSNS (8), /* muldi */
742 COSTS_N_INSNS (22), /* divsi */
743 COSTS_N_INSNS (28), /* divdi */
744 COSTS_N_INSNS (3), /* fp */
745 COSTS_N_INSNS (3), /* dmul */
746 COSTS_N_INSNS (13), /* sdiv */
747 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 748 128, /* cache line size */
5f732aba
DE
749 64, /* l1 cache */
750 2048, /* l2 cache */
0b11da67 751 16, /* prefetch streams */
44cd321e
PS
752};
753
8b897cfa 754\f
a2369ed3 755static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 756static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 757static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
758static void rs6000_emit_stack_tie (void);
759static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 760static bool spe_func_has_64bit_regs_p (void);
b20a9cca 761static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 762 int, HOST_WIDE_INT);
a2369ed3 763static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 764static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
765static unsigned rs6000_hash_constant (rtx);
766static unsigned toc_hash_function (const void *);
767static int toc_hash_eq (const void *, const void *);
768static int constant_pool_expr_1 (rtx, int *, int *);
769static bool constant_pool_expr_p (rtx);
d04b6e6e 770static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
771static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
772static struct machine_function * rs6000_init_machine_status (void);
773static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 774static bool no_global_regs_above (int, bool);
5add3202 775#ifdef HAVE_GAS_HIDDEN
a2369ed3 776static void rs6000_assemble_visibility (tree, int);
5add3202 777#endif
a2369ed3
DJ
778static int rs6000_ra_ever_killed (void);
779static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 780static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 781static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 782static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 783static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 784static const char *rs6000_mangle_type (const_tree);
b86fe7b4 785extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 786static void rs6000_set_default_type_attributes (tree);
f78c3290
NF
787static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
788static void rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
789static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
790 enum machine_mode, bool, bool, bool);
52ff33d0 791static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
792static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
793static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
794static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
795static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
796static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
797 tree);
a2369ed3 798static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 799static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 800static void rs6000_file_start (void);
7c262518 801#if TARGET_ELF
9b580a0b 802static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
803static void rs6000_elf_asm_out_constructor (rtx, int);
804static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 805static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 806static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
807static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
808 unsigned HOST_WIDE_INT);
a56d7372 809static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 810 ATTRIBUTE_UNUSED;
7c262518 811#endif
3101faab 812static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
813static void rs6000_alloc_sdmode_stack_slot (void);
814static void rs6000_instantiate_decls (void);
cbaaba19 815#if TARGET_XCOFF
0d5817b2 816static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 817static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 818static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 819static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 820static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 821static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 822 unsigned HOST_WIDE_INT);
d6b5193b
RS
823static void rs6000_xcoff_unique_section (tree, int);
824static section *rs6000_xcoff_select_rtx_section
825 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
826static const char * rs6000_xcoff_strip_name_encoding (const char *);
827static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
828static void rs6000_xcoff_file_start (void);
829static void rs6000_xcoff_file_end (void);
f1384257 830#endif
a2369ed3
DJ
831static int rs6000_variable_issue (FILE *, int, rtx, int);
832static bool rs6000_rtx_costs (rtx, int, int, int *);
833static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 834static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 835static bool is_microcoded_insn (rtx);
d296e02e 836static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
837static bool is_cracked_insn (rtx);
838static bool is_branch_slot_insn (rtx);
44cd321e 839static bool is_load_insn (rtx);
e3a0e200 840static rtx get_store_dest (rtx pat);
44cd321e
PS
841static bool is_store_insn (rtx);
842static bool set_to_load_agen (rtx,rtx);
982afe02 843static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
844static int rs6000_adjust_priority (rtx, int);
845static int rs6000_issue_rate (void);
b198261f 846static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
847static rtx get_next_active_insn (rtx, rtx);
848static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
849static bool insn_must_be_first_in_group (rtx);
850static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
851static bool is_costly_group (rtx *, rtx);
852static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
853static int redefine_groups (FILE *, int, rtx, rtx);
854static int pad_groups (FILE *, int, rtx, rtx);
855static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
856static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
857static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 858static int rs6000_use_sched_lookahead (void);
d296e02e 859static int rs6000_use_sched_lookahead_guard (rtx);
9c78b944 860static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 861static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
862static tree rs6000_builtin_mul_widen_even (tree);
863static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 864static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 865
58646b77 866static void def_builtin (int, const char *, tree, int);
3101faab 867static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
868static void rs6000_init_builtins (void);
869static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
870static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
871static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
872static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
873static void altivec_init_builtins (void);
874static void rs6000_common_init_builtins (void);
c15c90bb 875static void rs6000_init_libfuncs (void);
a2369ed3 876
96038623
DE
877static void paired_init_builtins (void);
878static rtx paired_expand_builtin (tree, rtx, bool *);
879static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
880static rtx paired_expand_stv_builtin (enum insn_code, tree);
881static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
882
b20a9cca
AM
883static void enable_mask_for_builtins (struct builtin_description *, int,
884 enum rs6000_builtins,
885 enum rs6000_builtins);
7c62e993 886static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
887static void spe_init_builtins (void);
888static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 889static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
890static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
891static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
892static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
893static rs6000_stack_t *rs6000_stack_info (void);
894static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
895
896static rtx altivec_expand_builtin (tree, rtx, bool *);
897static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
898static rtx altivec_expand_st_builtin (tree, rtx, bool *);
899static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
900static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 901static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 902 const char *, tree, rtx);
b4a62fa0 903static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 904static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
905static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
906static rtx altivec_expand_vec_set_builtin (tree);
907static rtx altivec_expand_vec_ext_builtin (tree, rtx);
908static int get_element_number (tree, tree);
78f5898b 909static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 910static void rs6000_parse_tls_size_option (void);
5da702b1 911static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
912static int first_altivec_reg_to_save (void);
913static unsigned int compute_vrsave_mask (void);
9390387d 914static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
915static void is_altivec_return_reg (rtx, void *);
916static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
917int easy_vector_constant (rtx, enum machine_mode);
3101faab 918static bool rs6000_is_opaque_type (const_tree);
a2369ed3 919static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 920static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 921static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 922static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
923static rtx rs6000_tls_get_addr (void);
924static rtx rs6000_got_sym (void);
9390387d 925static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
926static const char *rs6000_get_some_local_dynamic_name (void);
927static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 928static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 929static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 930 enum machine_mode, tree);
0b5383eb
DJ
931static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
932 HOST_WIDE_INT);
933static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
934 tree, HOST_WIDE_INT);
935static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
936 HOST_WIDE_INT,
937 rtx[], int *);
938static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
939 const_tree, HOST_WIDE_INT,
940 rtx[], int *);
941static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 942static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 943static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
944static void setup_incoming_varargs (CUMULATIVE_ARGS *,
945 enum machine_mode, tree,
946 int *, int);
8cd5a4e0 947static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 948 const_tree, bool);
78a52f11
RH
949static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
950 tree, bool);
3101faab 951static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
952#if TARGET_MACHO
953static void macho_branch_islands (void);
efdba735
SH
954static int no_previous_def (tree function_name);
955static tree get_prev_label (tree function_name);
c4e18b1c 956static void rs6000_darwin_file_start (void);
efdba735
SH
957#endif
958
c35d187f 959static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 960static void rs6000_va_start (tree, rtx);
23a60a04 961static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
586de218 962static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 963static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 964static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 965static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 966 enum machine_mode);
94ff898d 967static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
968 enum machine_mode);
969static int get_vsel_insn (enum machine_mode);
970static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 971static tree rs6000_stack_protect_fail (void);
21213b4c
DP
972
973const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
974static enum machine_mode rs6000_eh_return_filter_mode (void);
975
17211ab5
GK
976/* Hash table stuff for keeping track of TOC entries. */
977
978struct toc_hash_struct GTY(())
979{
980 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
981 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
982 rtx key;
983 enum machine_mode key_mode;
984 int labelno;
985};
986
987static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
988\f
989/* Default register names. */
990char rs6000_reg_names[][8] =
991{
802a0058
MM
992 "0", "1", "2", "3", "4", "5", "6", "7",
993 "8", "9", "10", "11", "12", "13", "14", "15",
994 "16", "17", "18", "19", "20", "21", "22", "23",
995 "24", "25", "26", "27", "28", "29", "30", "31",
996 "0", "1", "2", "3", "4", "5", "6", "7",
997 "8", "9", "10", "11", "12", "13", "14", "15",
998 "16", "17", "18", "19", "20", "21", "22", "23",
999 "24", "25", "26", "27", "28", "29", "30", "31",
1000 "mq", "lr", "ctr","ap",
1001 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
1002 "xer",
1003 /* AltiVec registers. */
0cd5e3a1
AH
1004 "0", "1", "2", "3", "4", "5", "6", "7",
1005 "8", "9", "10", "11", "12", "13", "14", "15",
1006 "16", "17", "18", "19", "20", "21", "22", "23",
1007 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1008 "vrsave", "vscr",
1009 /* SPE registers. */
7d5175e1
JJ
1010 "spe_acc", "spefscr",
1011 /* Soft frame pointer. */
1012 "sfp"
c81bebd7
MM
1013};
1014
1015#ifdef TARGET_REGNAMES
8b60264b 1016static const char alt_reg_names[][8] =
c81bebd7 1017{
802a0058
MM
1018 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1019 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1020 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1021 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1022 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1023 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1024 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1025 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1026 "mq", "lr", "ctr", "ap",
1027 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1028 "xer",
59a4c851 1029 /* AltiVec registers. */
0ac081f6 1030 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1031 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1032 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1033 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1034 "vrsave", "vscr",
1035 /* SPE registers. */
7d5175e1
JJ
1036 "spe_acc", "spefscr",
1037 /* Soft frame pointer. */
1038 "sfp"
c81bebd7
MM
1039};
1040#endif
9878760c 1041\f
daf11973
MM
1042#ifndef MASK_STRICT_ALIGN
1043#define MASK_STRICT_ALIGN 0
1044#endif
ffcfcb5f
AM
1045#ifndef TARGET_PROFILE_KERNEL
1046#define TARGET_PROFILE_KERNEL 0
1047#endif
3961e8fe
RH
1048
1049/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1050#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1051\f
1052/* Initialize the GCC target structure. */
91d231cb
JM
1053#undef TARGET_ATTRIBUTE_TABLE
1054#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1055#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1056#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1057
301d03af
RS
1058#undef TARGET_ASM_ALIGNED_DI_OP
1059#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1060
1061/* Default unaligned ops are only provided for ELF. Find the ops needed
1062 for non-ELF systems. */
1063#ifndef OBJECT_FORMAT_ELF
cbaaba19 1064#if TARGET_XCOFF
ae6c1efd 1065/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1066 64-bit targets. */
1067#undef TARGET_ASM_UNALIGNED_HI_OP
1068#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1069#undef TARGET_ASM_UNALIGNED_SI_OP
1070#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1071#undef TARGET_ASM_UNALIGNED_DI_OP
1072#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1073#else
1074/* For Darwin. */
1075#undef TARGET_ASM_UNALIGNED_HI_OP
1076#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1077#undef TARGET_ASM_UNALIGNED_SI_OP
1078#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1079#undef TARGET_ASM_UNALIGNED_DI_OP
1080#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1081#undef TARGET_ASM_ALIGNED_DI_OP
1082#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1083#endif
1084#endif
1085
1086/* This hook deals with fixups for relocatable code and DI-mode objects
1087 in 64-bit code. */
1088#undef TARGET_ASM_INTEGER
1089#define TARGET_ASM_INTEGER rs6000_assemble_integer
1090
93638d7a
AM
1091#ifdef HAVE_GAS_HIDDEN
1092#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1093#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1094#endif
1095
c4501e62
JJ
1096#undef TARGET_HAVE_TLS
1097#define TARGET_HAVE_TLS HAVE_AS_TLS
1098
1099#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1100#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1101
08c148a8
NB
1102#undef TARGET_ASM_FUNCTION_PROLOGUE
1103#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1104#undef TARGET_ASM_FUNCTION_EPILOGUE
1105#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1106
b54cf83a
DE
1107#undef TARGET_SCHED_VARIABLE_ISSUE
1108#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1109
c237e94a
ZW
1110#undef TARGET_SCHED_ISSUE_RATE
1111#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1112#undef TARGET_SCHED_ADJUST_COST
1113#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1114#undef TARGET_SCHED_ADJUST_PRIORITY
1115#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1116#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1117#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1118#undef TARGET_SCHED_INIT
1119#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1120#undef TARGET_SCHED_FINISH
1121#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1122#undef TARGET_SCHED_REORDER
1123#define TARGET_SCHED_REORDER rs6000_sched_reorder
1124#undef TARGET_SCHED_REORDER2
1125#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1126
be12c2b0
VM
1127#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1128#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1129
d296e02e
AP
1130#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1131#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1132
7ccf35ed
DN
1133#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1134#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1135#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1136#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1137#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1138#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1139#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1140#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1141
5b900a4c
DN
1142#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1143#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1144
0ac081f6
AH
1145#undef TARGET_INIT_BUILTINS
1146#define TARGET_INIT_BUILTINS rs6000_init_builtins
1147
1148#undef TARGET_EXPAND_BUILTIN
1149#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1150
608063c3
JB
1151#undef TARGET_MANGLE_TYPE
1152#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1153
c15c90bb
ZW
1154#undef TARGET_INIT_LIBFUNCS
1155#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1156
f1384257 1157#if TARGET_MACHO
0e5dbd9b 1158#undef TARGET_BINDS_LOCAL_P
31920d83 1159#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1160#endif
0e5dbd9b 1161
77ccdfed
EC
1162#undef TARGET_MS_BITFIELD_LAYOUT_P
1163#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1164
3961e8fe
RH
1165#undef TARGET_ASM_OUTPUT_MI_THUNK
1166#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1167
3961e8fe 1168#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1169#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1170
4977bab6
ZW
1171#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1172#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1173
2e3f0db6
DJ
1174#undef TARGET_INVALID_WITHIN_DOLOOP
1175#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1176
3c50106f
RH
1177#undef TARGET_RTX_COSTS
1178#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1179#undef TARGET_ADDRESS_COST
1180#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1181
c8e4f0e9 1182#undef TARGET_VECTOR_OPAQUE_P
58646b77 1183#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1184
96714395
AH
1185#undef TARGET_DWARF_REGISTER_SPAN
1186#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1187
37ea0b7e
JM
1188#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1189#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1190
c6e8c921
GK
1191/* On rs6000, function arguments are promoted, as are function return
1192 values. */
1193#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1194#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1195#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1196#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1197
c6e8c921
GK
1198#undef TARGET_RETURN_IN_MEMORY
1199#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1200
1201#undef TARGET_SETUP_INCOMING_VARARGS
1202#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1203
1204/* Always strict argument naming on rs6000. */
1205#undef TARGET_STRICT_ARGUMENT_NAMING
1206#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1207#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1208#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1209#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1210#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1211#undef TARGET_MUST_PASS_IN_STACK
1212#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1213#undef TARGET_PASS_BY_REFERENCE
1214#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1215#undef TARGET_ARG_PARTIAL_BYTES
1216#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1217
c35d187f
RH
1218#undef TARGET_BUILD_BUILTIN_VA_LIST
1219#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1220
d7bd8aeb
JJ
1221#undef TARGET_EXPAND_BUILTIN_VA_START
1222#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1223
cd3ce9b4
JM
1224#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1225#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1226
93f90be6
FJ
1227#undef TARGET_EH_RETURN_FILTER_MODE
1228#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1229
00b79d54
BE
1230#undef TARGET_SCALAR_MODE_SUPPORTED_P
1231#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1232
f676971a
EC
1233#undef TARGET_VECTOR_MODE_SUPPORTED_P
1234#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1235
4d3e6fae
FJ
1236#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1237#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1238
78f5898b
AH
1239#undef TARGET_HANDLE_OPTION
1240#define TARGET_HANDLE_OPTION rs6000_handle_option
1241
1242#undef TARGET_DEFAULT_TARGET_FLAGS
1243#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1244 (TARGET_DEFAULT)
78f5898b 1245
3aebbe5f
JJ
1246#undef TARGET_STACK_PROTECT_FAIL
1247#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1248
445cf5eb
JM
1249/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1250 The PowerPC architecture requires only weak consistency among
1251 processors--that is, memory accesses between processors need not be
1252 sequentially consistent and memory accesses among processors can occur
1253 in any order. The ability to order memory accesses weakly provides
1254 opportunities for more efficient use of the system bus. Unless a
1255 dependency exists, the 604e allows read operations to precede store
1256 operations. */
1257#undef TARGET_RELAXED_ORDERING
1258#define TARGET_RELAXED_ORDERING true
1259
fdbe66f2
EB
1260#ifdef HAVE_AS_TLS
1261#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1262#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1263#endif
1264
aacd3885
RS
1265/* Use a 32-bit anchor range. This leads to sequences like:
1266
1267 addis tmp,anchor,high
1268 add dest,tmp,low
1269
1270 where tmp itself acts as an anchor, and can be shared between
1271 accesses to the same 64k page. */
1272#undef TARGET_MIN_ANCHOR_OFFSET
1273#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1274#undef TARGET_MAX_ANCHOR_OFFSET
1275#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1276#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1277#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1278
9c78b944
DE
1279#undef TARGET_BUILTIN_RECIPROCAL
1280#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1281
e41b2a33
PB
1282#undef TARGET_EXPAND_TO_RTL_HOOK
1283#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1284
1285#undef TARGET_INSTANTIATE_DECLS
1286#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1287
f6897b10 1288struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1289\f
0d1fbc8c
AH
1290
1291/* Value is 1 if hard register REGNO can hold a value of machine-mode
1292 MODE. */
1293static int
1294rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1295{
1296 /* The GPRs can hold any mode, but values bigger than one register
1297 cannot go past R31. */
1298 if (INT_REGNO_P (regno))
1299 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1300
a5a97921 1301 /* The float registers can only hold floating modes and DImode.
7393f7f8 1302 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1303 if (FP_REGNO_P (regno))
1304 return
96038623 1305 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1306 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1307 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1308 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1309 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1310 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1311 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1312
1313 /* The CR register can only hold CC modes. */
1314 if (CR_REGNO_P (regno))
1315 return GET_MODE_CLASS (mode) == MODE_CC;
1316
1317 if (XER_REGNO_P (regno))
1318 return mode == PSImode;
1319
1320 /* AltiVec only in AldyVec registers. */
1321 if (ALTIVEC_REGNO_P (regno))
1322 return ALTIVEC_VECTOR_MODE (mode);
1323
1324 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1325 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1326 return 1;
1327
1328 /* We cannot put TImode anywhere except general register and it must be
1329 able to fit within the register set. */
1330
1331 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1332}
1333
1334/* Initialize rs6000_hard_regno_mode_ok_p table. */
1335static void
1336rs6000_init_hard_regno_mode_ok (void)
1337{
1338 int r, m;
1339
1340 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1341 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1342 if (rs6000_hard_regno_mode_ok (r, m))
1343 rs6000_hard_regno_mode_ok_p[m][r] = true;
1344}
1345
e4cad568
GK
1346#if TARGET_MACHO
1347/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1348
1349static void
1350darwin_rs6000_override_options (void)
1351{
1352 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1353 off. */
1354 rs6000_altivec_abi = 1;
1355 TARGET_ALTIVEC_VRSAVE = 1;
1356 if (DEFAULT_ABI == ABI_DARWIN)
1357 {
1358 if (MACHO_DYNAMIC_NO_PIC_P)
1359 {
1360 if (flag_pic)
1361 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1362 flag_pic = 0;
1363 }
1364 else if (flag_pic == 1)
1365 {
1366 flag_pic = 2;
1367 }
1368 }
1369 if (TARGET_64BIT && ! TARGET_POWERPC64)
1370 {
1371 target_flags |= MASK_POWERPC64;
1372 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1373 }
1374 if (flag_mkernel)
1375 {
1376 rs6000_default_long_calls = 1;
1377 target_flags |= MASK_SOFT_FLOAT;
1378 }
1379
1380 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1381 Altivec. */
1382 if (!flag_mkernel && !flag_apple_kext
1383 && TARGET_64BIT
1384 && ! (target_flags_explicit & MASK_ALTIVEC))
1385 target_flags |= MASK_ALTIVEC;
1386
1387 /* Unless the user (not the configurer) has explicitly overridden
1388 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1389 G4 unless targetting the kernel. */
1390 if (!flag_mkernel
1391 && !flag_apple_kext
1392 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1393 && ! (target_flags_explicit & MASK_ALTIVEC)
1394 && ! rs6000_select[1].string)
1395 {
1396 target_flags |= MASK_ALTIVEC;
1397 }
1398}
1399#endif
1400
c1e55850
GK
1401/* If not otherwise specified by a target, make 'long double' equivalent to
1402 'double'. */
1403
1404#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1405#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1406#endif
1407
5248c961
RK
1408/* Override command line options. Mostly we process the processor
1409 type and sometimes adjust other TARGET_ options. */
1410
1411void
d779d0dc 1412rs6000_override_options (const char *default_cpu)
5248c961 1413{
c4d38ccb 1414 size_t i, j;
8e3f41e7 1415 struct rs6000_cpu_select *ptr;
66188a7e 1416 int set_masks;
5248c961 1417
66188a7e 1418 /* Simplifications for entries below. */
85638c0d 1419
66188a7e
GK
1420 enum {
1421 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1422 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1423 };
85638c0d 1424
66188a7e
GK
1425 /* This table occasionally claims that a processor does not support
1426 a particular feature even though it does, but the feature is slower
1427 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1428 complete description of the processor's support.
66188a7e
GK
1429
1430 Please keep this list in order, and don't forget to update the
1431 documentation in invoke.texi when adding a new processor or
1432 flag. */
5248c961
RK
1433 static struct ptt
1434 {
8b60264b
KG
1435 const char *const name; /* Canonical processor name. */
1436 const enum processor_type processor; /* Processor type enum value. */
1437 const int target_enable; /* Target flags to enable. */
8b60264b 1438 } const processor_target_table[]
66188a7e 1439 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1440 {"403", PROCESSOR_PPC403,
66188a7e 1441 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1442 {"405", PROCESSOR_PPC405,
716019c0
JM
1443 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1444 {"405fp", PROCESSOR_PPC405,
1445 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1446 {"440", PROCESSOR_PPC440,
716019c0
JM
1447 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1448 {"440fp", PROCESSOR_PPC440,
1449 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1450 {"464", PROCESSOR_PPC440,
1451 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1452 {"464fp", PROCESSOR_PPC440,
1453 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1454 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1455 {"601", PROCESSOR_PPC601,
66188a7e
GK
1456 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1457 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1458 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1459 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1460 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1461 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1462 {"620", PROCESSOR_PPC620,
1463 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1464 {"630", PROCESSOR_PPC630,
1465 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1466 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1467 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1468 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1469 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1470 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1471 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1472 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1473 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1474 /* 8548 has a dummy entry for now. */
a45bce6e 1475 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1476 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1477 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1478 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1479 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1480 {"970", PROCESSOR_POWER4,
66188a7e 1481 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1482 {"cell", PROCESSOR_CELL,
1483 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1484 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1485 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1486 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1487 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1488 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1489 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1490 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1491 {"power2", PROCESSOR_POWER,
1492 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1493 {"power3", PROCESSOR_PPC630,
1494 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1495 {"power4", PROCESSOR_POWER4,
9a8d7941
DE
1496 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1497 | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1498 {"power5", PROCESSOR_POWER5,
9a8d7941 1499 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1500 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1501 {"power5+", PROCESSOR_POWER5,
9a8d7941 1502 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1503 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1504 {"power6", PROCESSOR_POWER6,
9a8d7941
DE
1505 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1506 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1507 {"power6x", PROCESSOR_POWER6,
9a8d7941
DE
1508 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1509 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP | MASK_MFPGPR},
66188a7e
GK
1510 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1511 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1512 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1513 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1514 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1515 {"rios2", PROCESSOR_RIOS2,
1516 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1517 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1518 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1519 {"rs64", PROCESSOR_RS64A,
1520 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1521 };
5248c961 1522
ca7558fc 1523 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1524
66188a7e
GK
1525 /* Some OSs don't support saving the high part of 64-bit registers on
1526 context switch. Other OSs don't support saving Altivec registers.
1527 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1528 settings; if the user wants either, the user must explicitly specify
1529 them and we won't interfere with the user's specification. */
1530
1531 enum {
1532 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1533 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1534 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1535 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1536 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1537 };
0d1fbc8c
AH
1538
1539 rs6000_init_hard_regno_mode_ok ();
1540
c4ad648e 1541 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1542#ifdef OS_MISSING_POWERPC64
1543 if (OS_MISSING_POWERPC64)
1544 set_masks &= ~MASK_POWERPC64;
1545#endif
1546#ifdef OS_MISSING_ALTIVEC
1547 if (OS_MISSING_ALTIVEC)
1548 set_masks &= ~MASK_ALTIVEC;
1549#endif
1550
768875a8
AM
1551 /* Don't override by the processor default if given explicitly. */
1552 set_masks &= ~target_flags_explicit;
957211c3 1553
a4f6c312 1554 /* Identify the processor type. */
8e3f41e7 1555 rs6000_select[0].string = default_cpu;
3cb999d8 1556 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1557
b6a1cbae 1558 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1559 {
8e3f41e7
MM
1560 ptr = &rs6000_select[i];
1561 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1562 {
8e3f41e7
MM
1563 for (j = 0; j < ptt_size; j++)
1564 if (! strcmp (ptr->string, processor_target_table[j].name))
1565 {
1566 if (ptr->set_tune_p)
1567 rs6000_cpu = processor_target_table[j].processor;
1568
1569 if (ptr->set_arch_p)
1570 {
66188a7e
GK
1571 target_flags &= ~set_masks;
1572 target_flags |= (processor_target_table[j].target_enable
1573 & set_masks);
8e3f41e7
MM
1574 }
1575 break;
1576 }
1577
4406229e 1578 if (j == ptt_size)
8e3f41e7 1579 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1580 }
1581 }
8a61d227 1582
edae5fe3
DE
1583 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1584 && !rs6000_explicit_options.isel)
a3170dc6
AH
1585 rs6000_isel = 1;
1586
edae5fe3
DE
1587 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1588 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1589 {
1590 if (TARGET_ALTIVEC)
1591 error ("AltiVec not supported in this target");
1592 if (TARGET_SPE)
1593 error ("Spe not supported in this target");
1594 }
1595
dff9f1b6
DE
1596 /* If we are optimizing big endian systems for space, use the load/store
1597 multiple and string instructions. */
ef792183 1598 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1599 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1600
a4f6c312
SS
1601 /* Don't allow -mmultiple or -mstring on little endian systems
1602 unless the cpu is a 750, because the hardware doesn't support the
1603 instructions used in little endian mode, and causes an alignment
1604 trap. The 750 does not cause an alignment trap (except when the
1605 target is unaligned). */
bef84347 1606
b21fb038 1607 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1608 {
1609 if (TARGET_MULTIPLE)
1610 {
1611 target_flags &= ~MASK_MULTIPLE;
b21fb038 1612 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1613 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1614 }
1615
1616 if (TARGET_STRING)
1617 {
1618 target_flags &= ~MASK_STRING;
b21fb038 1619 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1620 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1621 }
1622 }
3933e0e1 1623
38c1f2d7
MM
1624 /* Set debug flags */
1625 if (rs6000_debug_name)
1626 {
bfc79d3b 1627 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1628 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1629 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1630 rs6000_debug_stack = 1;
bfc79d3b 1631 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1632 rs6000_debug_arg = 1;
1633 else
c725bd79 1634 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1635 }
1636
57ac7be9
AM
1637 if (rs6000_traceback_name)
1638 {
1639 if (! strncmp (rs6000_traceback_name, "full", 4))
1640 rs6000_traceback = traceback_full;
1641 else if (! strncmp (rs6000_traceback_name, "part", 4))
1642 rs6000_traceback = traceback_part;
1643 else if (! strncmp (rs6000_traceback_name, "no", 2))
1644 rs6000_traceback = traceback_none;
1645 else
9e637a26 1646 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1647 rs6000_traceback_name);
1648 }
1649
78f5898b
AH
1650 if (!rs6000_explicit_options.long_double)
1651 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1652
602ea4d3 1653#ifndef POWERPC_LINUX
d3603e8c 1654 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1655 rs6000_ieeequad = 1;
1656#endif
1657
0db747be
DE
1658 /* Enable Altivec ABI for AIX -maltivec. */
1659 if (TARGET_XCOFF && TARGET_ALTIVEC)
1660 rs6000_altivec_abi = 1;
1661
a2db2771
JJ
1662 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1663 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1664 be explicitly overridden in either case. */
1665 if (TARGET_ELF)
6d0ef01e 1666 {
a2db2771
JJ
1667 if (!rs6000_explicit_options.altivec_abi
1668 && (TARGET_64BIT || TARGET_ALTIVEC))
1669 rs6000_altivec_abi = 1;
1670
1671 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1672 if (!rs6000_explicit_options.vrsave)
1673 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1674 }
1675
594a51fe
SS
1676 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1677 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1678 {
1679 rs6000_darwin64_abi = 1;
9c7956fd 1680#if TARGET_MACHO
6ac49599 1681 darwin_one_byte_bool = 1;
9c7956fd 1682#endif
d9168963
SS
1683 /* Default to natural alignment, for better performance. */
1684 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1685 }
1686
194c524a
DE
1687 /* Place FP constants in the constant pool instead of TOC
1688 if section anchors enabled. */
1689 if (flag_section_anchors)
1690 TARGET_NO_FP_IN_TOC = 1;
1691
c4501e62
JJ
1692 /* Handle -mtls-size option. */
1693 rs6000_parse_tls_size_option ();
1694
a7ae18e2
AH
1695#ifdef SUBTARGET_OVERRIDE_OPTIONS
1696 SUBTARGET_OVERRIDE_OPTIONS;
1697#endif
1698#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1699 SUBSUBTARGET_OVERRIDE_OPTIONS;
1700#endif
4d4cbc0e
AH
1701#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1702 SUB3TARGET_OVERRIDE_OPTIONS;
1703#endif
a7ae18e2 1704
edae5fe3 1705 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1706 {
edae5fe3 1707 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1708 MASK_STRING above when optimizing for size. */
1709 if ((target_flags & MASK_STRING) != 0)
1710 target_flags = target_flags & ~MASK_STRING;
1711 }
1712 else if (rs6000_select[1].string != NULL)
1713 {
1714 /* For the powerpc-eabispe configuration, we set all these by
1715 default, so let's unset them if we manually set another
1716 CPU that is not the E500. */
a2db2771 1717 if (!rs6000_explicit_options.spe_abi)
5da702b1 1718 rs6000_spe_abi = 0;
78f5898b 1719 if (!rs6000_explicit_options.spe)
5da702b1 1720 rs6000_spe = 0;
78f5898b 1721 if (!rs6000_explicit_options.float_gprs)
5da702b1 1722 rs6000_float_gprs = 0;
78f5898b 1723 if (!rs6000_explicit_options.isel)
5da702b1
AH
1724 rs6000_isel = 0;
1725 }
b5044283 1726
eca0d5e8
JM
1727 /* Detect invalid option combinations with E500. */
1728 CHECK_E500_OPTIONS;
1729
ec507f2d 1730 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1731 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1732 && rs6000_cpu != PROCESSOR_POWER6
1733 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1734 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1735 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1736 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1737 || rs6000_cpu == PROCESSOR_POWER5
1738 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1739
ec507f2d
DE
1740 rs6000_sched_restricted_insns_priority
1741 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1742
569fa502 1743 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1744 rs6000_sched_costly_dep
1745 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1746
569fa502
DN
1747 if (rs6000_sched_costly_dep_str)
1748 {
f676971a 1749 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1750 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1751 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1752 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1753 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1754 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1755 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1756 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1757 else
c4ad648e 1758 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1759 }
1760
1761 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1762 rs6000_sched_insert_nops
1763 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1764
cbe26ab8
DN
1765 if (rs6000_sched_insert_nops_str)
1766 {
1767 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1768 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1769 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1770 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1771 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1772 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1773 else
c4ad648e 1774 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1775 }
1776
c81bebd7 1777#ifdef TARGET_REGNAMES
a4f6c312
SS
1778 /* If the user desires alternate register names, copy in the
1779 alternate names now. */
c81bebd7 1780 if (TARGET_REGNAMES)
4e135bdd 1781 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1782#endif
1783
df01da37 1784 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1785 If -maix-struct-return or -msvr4-struct-return was explicitly
1786 used, don't override with the ABI default. */
df01da37
DE
1787 if (!rs6000_explicit_options.aix_struct_ret)
1788 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1789
602ea4d3 1790 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1791 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1792
f676971a 1793 if (TARGET_TOC)
9ebbca7d 1794 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1795
301d03af
RS
1796 /* We can only guarantee the availability of DI pseudo-ops when
1797 assembling for 64-bit targets. */
ae6c1efd 1798 if (!TARGET_64BIT)
301d03af
RS
1799 {
1800 targetm.asm_out.aligned_op.di = NULL;
1801 targetm.asm_out.unaligned_op.di = NULL;
1802 }
1803
1494c534
DE
1804 /* Set branch target alignment, if not optimizing for size. */
1805 if (!optimize_size)
1806 {
d296e02e
AP
1807 /* Cell wants to be aligned 8byte for dual issue. */
1808 if (rs6000_cpu == PROCESSOR_CELL)
1809 {
1810 if (align_functions <= 0)
1811 align_functions = 8;
1812 if (align_jumps <= 0)
1813 align_jumps = 8;
1814 if (align_loops <= 0)
1815 align_loops = 8;
1816 }
44cd321e 1817 if (rs6000_align_branch_targets)
1494c534
DE
1818 {
1819 if (align_functions <= 0)
1820 align_functions = 16;
1821 if (align_jumps <= 0)
1822 align_jumps = 16;
1823 if (align_loops <= 0)
1824 align_loops = 16;
1825 }
1826 if (align_jumps_max_skip <= 0)
1827 align_jumps_max_skip = 15;
1828 if (align_loops_max_skip <= 0)
1829 align_loops_max_skip = 15;
1830 }
2792d578 1831
71f123ca
FS
1832 /* Arrange to save and restore machine status around nested functions. */
1833 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1834
1835 /* We should always be splitting complex arguments, but we can't break
1836 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1837 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1838 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1839
1840 /* Initialize rs6000_cost with the appropriate target costs. */
1841 if (optimize_size)
1842 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1843 else
1844 switch (rs6000_cpu)
1845 {
1846 case PROCESSOR_RIOS1:
1847 rs6000_cost = &rios1_cost;
1848 break;
1849
1850 case PROCESSOR_RIOS2:
1851 rs6000_cost = &rios2_cost;
1852 break;
1853
1854 case PROCESSOR_RS64A:
1855 rs6000_cost = &rs64a_cost;
1856 break;
1857
1858 case PROCESSOR_MPCCORE:
1859 rs6000_cost = &mpccore_cost;
1860 break;
1861
1862 case PROCESSOR_PPC403:
1863 rs6000_cost = &ppc403_cost;
1864 break;
1865
1866 case PROCESSOR_PPC405:
1867 rs6000_cost = &ppc405_cost;
1868 break;
1869
1870 case PROCESSOR_PPC440:
1871 rs6000_cost = &ppc440_cost;
1872 break;
1873
1874 case PROCESSOR_PPC601:
1875 rs6000_cost = &ppc601_cost;
1876 break;
1877
1878 case PROCESSOR_PPC603:
1879 rs6000_cost = &ppc603_cost;
1880 break;
1881
1882 case PROCESSOR_PPC604:
1883 rs6000_cost = &ppc604_cost;
1884 break;
1885
1886 case PROCESSOR_PPC604e:
1887 rs6000_cost = &ppc604e_cost;
1888 break;
1889
1890 case PROCESSOR_PPC620:
8b897cfa
RS
1891 rs6000_cost = &ppc620_cost;
1892 break;
1893
f0517163
RS
1894 case PROCESSOR_PPC630:
1895 rs6000_cost = &ppc630_cost;
1896 break;
1897
982afe02 1898 case PROCESSOR_CELL:
d296e02e
AP
1899 rs6000_cost = &ppccell_cost;
1900 break;
1901
8b897cfa
RS
1902 case PROCESSOR_PPC750:
1903 case PROCESSOR_PPC7400:
1904 rs6000_cost = &ppc750_cost;
1905 break;
1906
1907 case PROCESSOR_PPC7450:
1908 rs6000_cost = &ppc7450_cost;
1909 break;
1910
1911 case PROCESSOR_PPC8540:
1912 rs6000_cost = &ppc8540_cost;
1913 break;
1914
fa41c305
EW
1915 case PROCESSOR_PPCE300C2:
1916 case PROCESSOR_PPCE300C3:
1917 rs6000_cost = &ppce300c2c3_cost;
1918 break;
1919
edae5fe3
DE
1920 case PROCESSOR_PPCE500MC:
1921 rs6000_cost = &ppce500mc_cost;
1922 break;
1923
8b897cfa
RS
1924 case PROCESSOR_POWER4:
1925 case PROCESSOR_POWER5:
1926 rs6000_cost = &power4_cost;
1927 break;
1928
44cd321e
PS
1929 case PROCESSOR_POWER6:
1930 rs6000_cost = &power6_cost;
1931 break;
1932
8b897cfa 1933 default:
37409796 1934 gcc_unreachable ();
8b897cfa 1935 }
0b11da67
DE
1936
1937 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1938 set_param_value ("simultaneous-prefetches",
1939 rs6000_cost->simultaneous_prefetches);
1940 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1941 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1942 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1943 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1944 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1945 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1946
1947 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1948 can be optimized to ap = __builtin_next_arg (0). */
1949 if (DEFAULT_ABI != ABI_V4)
1950 targetm.expand_builtin_va_start = NULL;
5248c961 1951}
5accd822 1952
7ccf35ed
DN
1953/* Implement targetm.vectorize.builtin_mask_for_load. */
1954static tree
1955rs6000_builtin_mask_for_load (void)
1956{
1957 if (TARGET_ALTIVEC)
1958 return altivec_builtin_mask_for_load;
1959 else
1960 return 0;
1961}
1962
f57d17f1
TM
1963/* Implement targetm.vectorize.builtin_conversion. */
1964static tree
1965rs6000_builtin_conversion (enum tree_code code, tree type)
1966{
1967 if (!TARGET_ALTIVEC)
1968 return NULL_TREE;
982afe02 1969
f57d17f1
TM
1970 switch (code)
1971 {
1972 case FLOAT_EXPR:
1973 switch (TYPE_MODE (type))
1974 {
1975 case V4SImode:
982afe02 1976 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1977 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1978 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1979 default:
1980 return NULL_TREE;
1981 }
1982 default:
1983 return NULL_TREE;
1984 }
1985}
1986
89d67cca
DN
1987/* Implement targetm.vectorize.builtin_mul_widen_even. */
1988static tree
1989rs6000_builtin_mul_widen_even (tree type)
1990{
1991 if (!TARGET_ALTIVEC)
1992 return NULL_TREE;
1993
1994 switch (TYPE_MODE (type))
1995 {
1996 case V8HImode:
982afe02 1997 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1998 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1999 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
2000
2001 case V16QImode:
2002 return TYPE_UNSIGNED (type) ?
2003 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
2004 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
2005 default:
2006 return NULL_TREE;
2007 }
2008}
2009
2010/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2011static tree
2012rs6000_builtin_mul_widen_odd (tree type)
2013{
2014 if (!TARGET_ALTIVEC)
2015 return NULL_TREE;
2016
2017 switch (TYPE_MODE (type))
2018 {
2019 case V8HImode:
2020 return TYPE_UNSIGNED (type) ?
2021 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
2022 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
2023
2024 case V16QImode:
2025 return TYPE_UNSIGNED (type) ?
2026 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
2027 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
2028 default:
2029 return NULL_TREE;
2030 }
2031}
2032
5b900a4c
DN
2033
2034/* Return true iff, data reference of TYPE can reach vector alignment (16)
2035 after applying N number of iterations. This routine does not determine
2036 how may iterations are required to reach desired alignment. */
2037
2038static bool
3101faab 2039rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2040{
2041 if (is_packed)
2042 return false;
2043
2044 if (TARGET_32BIT)
2045 {
2046 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2047 return true;
2048
2049 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2050 return true;
2051
2052 return false;
2053 }
2054 else
2055 {
2056 if (TARGET_MACHO)
2057 return false;
2058
2059 /* Assuming that all other types are naturally aligned. CHECKME! */
2060 return true;
2061 }
2062}
2063
5da702b1
AH
2064/* Handle generic options of the form -mfoo=yes/no.
2065 NAME is the option name.
2066 VALUE is the option value.
2067 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2068 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2069static void
5da702b1 2070rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2071{
5da702b1 2072 if (value == 0)
993f19a8 2073 return;
5da702b1
AH
2074 else if (!strcmp (value, "yes"))
2075 *flag = 1;
2076 else if (!strcmp (value, "no"))
2077 *flag = 0;
08b57fb3 2078 else
5da702b1 2079 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2080}
2081
c4501e62
JJ
2082/* Validate and record the size specified with the -mtls-size option. */
2083
2084static void
863d938c 2085rs6000_parse_tls_size_option (void)
c4501e62
JJ
2086{
2087 if (rs6000_tls_size_string == 0)
2088 return;
2089 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2090 rs6000_tls_size = 16;
2091 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2092 rs6000_tls_size = 32;
2093 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2094 rs6000_tls_size = 64;
2095 else
9e637a26 2096 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2097}
2098
5accd822 2099void
a2369ed3 2100optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2101{
2e3f0db6
DJ
2102 if (DEFAULT_ABI == ABI_DARWIN)
2103 /* The Darwin libraries never set errno, so we might as well
2104 avoid calling them when that's the only reason we would. */
2105 flag_errno_math = 0;
59d6560b
DE
2106
2107 /* Double growth factor to counter reduced min jump length. */
2108 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2109
2110 /* Enable section anchors by default.
2111 Skip section anchors for Objective C and Objective C++
2112 until front-ends fixed. */
23f99493 2113 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 2114 flag_section_anchors = 1;
5accd822 2115}
78f5898b
AH
2116
2117/* Implement TARGET_HANDLE_OPTION. */
2118
2119static bool
2120rs6000_handle_option (size_t code, const char *arg, int value)
2121{
2122 switch (code)
2123 {
2124 case OPT_mno_power:
2125 target_flags &= ~(MASK_POWER | MASK_POWER2
2126 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2127 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2128 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2129 break;
2130 case OPT_mno_powerpc:
2131 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2132 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2133 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2134 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2135 break;
2136 case OPT_mfull_toc:
d2894ab5
DE
2137 target_flags &= ~MASK_MINIMAL_TOC;
2138 TARGET_NO_FP_IN_TOC = 0;
2139 TARGET_NO_SUM_IN_TOC = 0;
2140 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2141#ifdef TARGET_USES_SYSV4_OPT
2142 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2143 just the same as -mminimal-toc. */
2144 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2145 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2146#endif
2147 break;
2148
2149#ifdef TARGET_USES_SYSV4_OPT
2150 case OPT_mtoc:
2151 /* Make -mtoc behave like -mminimal-toc. */
2152 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2153 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2154 break;
2155#endif
2156
2157#ifdef TARGET_USES_AIX64_OPT
2158 case OPT_maix64:
2159#else
2160 case OPT_m64:
2161#endif
2c9c9afd
AM
2162 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2163 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2164 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2165 break;
2166
2167#ifdef TARGET_USES_AIX64_OPT
2168 case OPT_maix32:
2169#else
2170 case OPT_m32:
2171#endif
2172 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2173 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2174 break;
2175
2176 case OPT_minsert_sched_nops_:
2177 rs6000_sched_insert_nops_str = arg;
2178 break;
2179
2180 case OPT_mminimal_toc:
2181 if (value == 1)
2182 {
d2894ab5
DE
2183 TARGET_NO_FP_IN_TOC = 0;
2184 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2185 }
2186 break;
2187
2188 case OPT_mpower:
2189 if (value == 1)
c2dba4ab
AH
2190 {
2191 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2192 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2193 }
78f5898b
AH
2194 break;
2195
2196 case OPT_mpower2:
2197 if (value == 1)
c2dba4ab
AH
2198 {
2199 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2200 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2201 }
78f5898b
AH
2202 break;
2203
2204 case OPT_mpowerpc_gpopt:
2205 case OPT_mpowerpc_gfxopt:
2206 if (value == 1)
c2dba4ab
AH
2207 {
2208 target_flags |= MASK_POWERPC;
2209 target_flags_explicit |= MASK_POWERPC;
2210 }
78f5898b
AH
2211 break;
2212
df01da37
DE
2213 case OPT_maix_struct_return:
2214 case OPT_msvr4_struct_return:
2215 rs6000_explicit_options.aix_struct_ret = true;
2216 break;
2217
78f5898b 2218 case OPT_mvrsave_:
a2db2771 2219 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2220 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2221 break;
78f5898b 2222
94f4765c
NF
2223 case OPT_misel:
2224 rs6000_explicit_options.isel = true;
2225 rs6000_isel = value;
2226 break;
2227
78f5898b
AH
2228 case OPT_misel_:
2229 rs6000_explicit_options.isel = true;
2230 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2231 break;
2232
94f4765c
NF
2233 case OPT_mspe:
2234 rs6000_explicit_options.spe = true;
2235 rs6000_spe = value;
2236 break;
2237
78f5898b
AH
2238 case OPT_mspe_:
2239 rs6000_explicit_options.spe = true;
2240 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2241 break;
2242
2243 case OPT_mdebug_:
2244 rs6000_debug_name = arg;
2245 break;
2246
2247#ifdef TARGET_USES_SYSV4_OPT
2248 case OPT_mcall_:
2249 rs6000_abi_name = arg;
2250 break;
2251
2252 case OPT_msdata_:
2253 rs6000_sdata_name = arg;
2254 break;
2255
2256 case OPT_mtls_size_:
2257 rs6000_tls_size_string = arg;
2258 break;
2259
2260 case OPT_mrelocatable:
2261 if (value == 1)
c2dba4ab 2262 {
e0bf274f
AM
2263 target_flags |= MASK_MINIMAL_TOC;
2264 target_flags_explicit |= MASK_MINIMAL_TOC;
2265 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2266 }
78f5898b
AH
2267 break;
2268
2269 case OPT_mrelocatable_lib:
2270 if (value == 1)
c2dba4ab 2271 {
e0bf274f
AM
2272 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2273 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2274 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2275 }
78f5898b 2276 else
c2dba4ab
AH
2277 {
2278 target_flags &= ~MASK_RELOCATABLE;
2279 target_flags_explicit |= MASK_RELOCATABLE;
2280 }
78f5898b
AH
2281 break;
2282#endif
2283
2284 case OPT_mabi_:
78f5898b
AH
2285 if (!strcmp (arg, "altivec"))
2286 {
a2db2771 2287 rs6000_explicit_options.altivec_abi = true;
78f5898b 2288 rs6000_altivec_abi = 1;
a2db2771
JJ
2289
2290 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2291 rs6000_spe_abi = 0;
2292 }
2293 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2294 {
a2db2771 2295 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2296 rs6000_altivec_abi = 0;
2297 }
78f5898b
AH
2298 else if (! strcmp (arg, "spe"))
2299 {
a2db2771 2300 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2301 rs6000_spe_abi = 1;
2302 rs6000_altivec_abi = 0;
2303 if (!TARGET_SPE_ABI)
2304 error ("not configured for ABI: '%s'", arg);
2305 }
2306 else if (! strcmp (arg, "no-spe"))
d3603e8c 2307 {
a2db2771 2308 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2309 rs6000_spe_abi = 0;
2310 }
78f5898b
AH
2311
2312 /* These are here for testing during development only, do not
2313 document in the manual please. */
2314 else if (! strcmp (arg, "d64"))
2315 {
2316 rs6000_darwin64_abi = 1;
2317 warning (0, "Using darwin64 ABI");
2318 }
2319 else if (! strcmp (arg, "d32"))
2320 {
2321 rs6000_darwin64_abi = 0;
2322 warning (0, "Using old darwin ABI");
2323 }
2324
602ea4d3
JJ
2325 else if (! strcmp (arg, "ibmlongdouble"))
2326 {
d3603e8c 2327 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2328 rs6000_ieeequad = 0;
2329 warning (0, "Using IBM extended precision long double");
2330 }
2331 else if (! strcmp (arg, "ieeelongdouble"))
2332 {
d3603e8c 2333 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2334 rs6000_ieeequad = 1;
2335 warning (0, "Using IEEE extended precision long double");
2336 }
2337
78f5898b
AH
2338 else
2339 {
2340 error ("unknown ABI specified: '%s'", arg);
2341 return false;
2342 }
2343 break;
2344
2345 case OPT_mcpu_:
2346 rs6000_select[1].string = arg;
2347 break;
2348
2349 case OPT_mtune_:
2350 rs6000_select[2].string = arg;
2351 break;
2352
2353 case OPT_mtraceback_:
2354 rs6000_traceback_name = arg;
2355 break;
2356
2357 case OPT_mfloat_gprs_:
2358 rs6000_explicit_options.float_gprs = true;
2359 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2360 rs6000_float_gprs = 1;
2361 else if (! strcmp (arg, "double"))
2362 rs6000_float_gprs = 2;
2363 else if (! strcmp (arg, "no"))
2364 rs6000_float_gprs = 0;
2365 else
2366 {
2367 error ("invalid option for -mfloat-gprs: '%s'", arg);
2368 return false;
2369 }
2370 break;
2371
2372 case OPT_mlong_double_:
2373 rs6000_explicit_options.long_double = true;
2374 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2375 if (value != 64 && value != 128)
2376 {
2377 error ("Unknown switch -mlong-double-%s", arg);
2378 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2379 return false;
2380 }
2381 else
2382 rs6000_long_double_type_size = value;
2383 break;
2384
2385 case OPT_msched_costly_dep_:
2386 rs6000_sched_costly_dep_str = arg;
2387 break;
2388
2389 case OPT_malign_:
2390 rs6000_explicit_options.alignment = true;
2391 if (! strcmp (arg, "power"))
2392 {
2393 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2394 some C library functions, so warn about it. The flag may be
2395 useful for performance studies from time to time though, so
2396 don't disable it entirely. */
2397 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2398 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2399 " it is incompatible with the installed C and C++ libraries");
2400 rs6000_alignment_flags = MASK_ALIGN_POWER;
2401 }
2402 else if (! strcmp (arg, "natural"))
2403 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2404 else
2405 {
2406 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2407 return false;
2408 }
2409 break;
2410 }
2411 return true;
2412}
3cfa4909
MM
2413\f
2414/* Do anything needed at the start of the asm file. */
2415
1bc7c5b6 2416static void
863d938c 2417rs6000_file_start (void)
3cfa4909 2418{
c4d38ccb 2419 size_t i;
3cfa4909 2420 char buffer[80];
d330fd93 2421 const char *start = buffer;
3cfa4909 2422 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2423 const char *default_cpu = TARGET_CPU_DEFAULT;
2424 FILE *file = asm_out_file;
2425
2426 default_file_start ();
2427
2428#ifdef TARGET_BI_ARCH
2429 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2430 default_cpu = 0;
2431#endif
3cfa4909
MM
2432
2433 if (flag_verbose_asm)
2434 {
2435 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2436 rs6000_select[0].string = default_cpu;
2437
b6a1cbae 2438 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2439 {
2440 ptr = &rs6000_select[i];
2441 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2442 {
2443 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2444 start = "";
2445 }
2446 }
2447
9c6b4ed9 2448 if (PPC405_ERRATUM77)
b0bfee6e 2449 {
9c6b4ed9 2450 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2451 start = "";
2452 }
b0bfee6e 2453
b91da81f 2454#ifdef USING_ELFOS_H
3cfa4909
MM
2455 switch (rs6000_sdata)
2456 {
2457 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2458 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2459 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2460 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2461 }
2462
2463 if (rs6000_sdata && g_switch_value)
2464 {
307b599c
MK
2465 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2466 g_switch_value);
3cfa4909
MM
2467 start = "";
2468 }
2469#endif
2470
2471 if (*start == '\0')
949ea356 2472 putc ('\n', file);
3cfa4909 2473 }
b723e82f 2474
e51917ae
JM
2475#ifdef HAVE_AS_GNU_ATTRIBUTE
2476 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2477 {
2478 fprintf (file, "\t.gnu_attribute 4, %d\n",
2479 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2480 fprintf (file, "\t.gnu_attribute 8, %d\n",
2481 (TARGET_ALTIVEC_ABI ? 2
2482 : TARGET_SPE_ABI ? 3
2483 : 1));
2484 }
e51917ae
JM
2485#endif
2486
b723e82f
JJ
2487 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2488 {
d6b5193b
RS
2489 switch_to_section (toc_section);
2490 switch_to_section (text_section);
b723e82f 2491 }
3cfa4909 2492}
c4e18b1c 2493
5248c961 2494\f
a0ab749a 2495/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2496
2497int
863d938c 2498direct_return (void)
9878760c 2499{
4697a36c
MM
2500 if (reload_completed)
2501 {
2502 rs6000_stack_t *info = rs6000_stack_info ();
2503
2504 if (info->first_gp_reg_save == 32
2505 && info->first_fp_reg_save == 64
00b960c7 2506 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2507 && ! info->lr_save_p
2508 && ! info->cr_save_p
00b960c7 2509 && info->vrsave_mask == 0
c81fc13e 2510 && ! info->push_p)
4697a36c
MM
2511 return 1;
2512 }
2513
2514 return 0;
9878760c
RK
2515}
2516
4e74d8ec
MM
2517/* Return the number of instructions it takes to form a constant in an
2518 integer register. */
2519
48d72335 2520int
a2369ed3 2521num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2522{
2523 /* signed constant loadable with {cal|addi} */
547b216d 2524 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2525 return 1;
2526
4e74d8ec 2527 /* constant loadable with {cau|addis} */
547b216d
DE
2528 else if ((value & 0xffff) == 0
2529 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2530 return 1;
2531
5f59ecb7 2532#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2533 else if (TARGET_POWERPC64)
4e74d8ec 2534 {
a65c591c
DE
2535 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2536 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2537
a65c591c 2538 if (high == 0 || high == -1)
4e74d8ec
MM
2539 return 2;
2540
a65c591c 2541 high >>= 1;
4e74d8ec 2542
a65c591c 2543 if (low == 0)
4e74d8ec 2544 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2545 else
2546 return (num_insns_constant_wide (high)
e396202a 2547 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2548 }
2549#endif
2550
2551 else
2552 return 2;
2553}
2554
2555int
a2369ed3 2556num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2557{
37409796 2558 HOST_WIDE_INT low, high;
bb8df8a6 2559
37409796 2560 switch (GET_CODE (op))
0d30d435 2561 {
37409796 2562 case CONST_INT:
0d30d435 2563#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2564 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2565 && mask64_operand (op, mode))
c4ad648e 2566 return 2;
0d30d435
DE
2567 else
2568#endif
2569 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2570
37409796 2571 case CONST_DOUBLE:
e41b2a33 2572 if (mode == SFmode || mode == SDmode)
37409796
NS
2573 {
2574 long l;
2575 REAL_VALUE_TYPE rv;
bb8df8a6 2576
37409796 2577 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2578 if (DECIMAL_FLOAT_MODE_P (mode))
2579 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2580 else
2581 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2582 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2583 }
a260abc9 2584
37409796
NS
2585 if (mode == VOIDmode || mode == DImode)
2586 {
2587 high = CONST_DOUBLE_HIGH (op);
2588 low = CONST_DOUBLE_LOW (op);
2589 }
2590 else
2591 {
2592 long l[2];
2593 REAL_VALUE_TYPE rv;
bb8df8a6 2594
37409796 2595 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2596 if (DECIMAL_FLOAT_MODE_P (mode))
2597 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2598 else
2599 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2600 high = l[WORDS_BIG_ENDIAN == 0];
2601 low = l[WORDS_BIG_ENDIAN != 0];
2602 }
47ad8c61 2603
37409796
NS
2604 if (TARGET_32BIT)
2605 return (num_insns_constant_wide (low)
2606 + num_insns_constant_wide (high));
2607 else
2608 {
2609 if ((high == 0 && low >= 0)
2610 || (high == -1 && low < 0))
2611 return num_insns_constant_wide (low);
bb8df8a6 2612
1990cd79 2613 else if (mask64_operand (op, mode))
37409796 2614 return 2;
bb8df8a6 2615
37409796
NS
2616 else if (low == 0)
2617 return num_insns_constant_wide (high) + 1;
bb8df8a6 2618
37409796
NS
2619 else
2620 return (num_insns_constant_wide (high)
2621 + num_insns_constant_wide (low) + 1);
2622 }
bb8df8a6 2623
37409796
NS
2624 default:
2625 gcc_unreachable ();
4e74d8ec 2626 }
4e74d8ec
MM
2627}
2628
0972012c
RS
2629/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2630 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2631 corresponding element of the vector, but for V4SFmode and V2SFmode,
2632 the corresponding "float" is interpreted as an SImode integer. */
2633
2634static HOST_WIDE_INT
2635const_vector_elt_as_int (rtx op, unsigned int elt)
2636{
2637 rtx tmp = CONST_VECTOR_ELT (op, elt);
2638 if (GET_MODE (op) == V4SFmode
2639 || GET_MODE (op) == V2SFmode)
2640 tmp = gen_lowpart (SImode, tmp);
2641 return INTVAL (tmp);
2642}
452a7d36 2643
77ccdfed 2644/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2645 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2646 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2647 all items are set to the same value and contain COPIES replicas of the
2648 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2649 operand and the others are set to the value of the operand's msb. */
2650
2651static bool
2652vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2653{
66180ff3
PB
2654 enum machine_mode mode = GET_MODE (op);
2655 enum machine_mode inner = GET_MODE_INNER (mode);
2656
2657 unsigned i;
2658 unsigned nunits = GET_MODE_NUNITS (mode);
2659 unsigned bitsize = GET_MODE_BITSIZE (inner);
2660 unsigned mask = GET_MODE_MASK (inner);
2661
0972012c 2662 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2663 HOST_WIDE_INT splat_val = val;
2664 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2665
2666 /* Construct the value to be splatted, if possible. If not, return 0. */
2667 for (i = 2; i <= copies; i *= 2)
452a7d36 2668 {
66180ff3
PB
2669 HOST_WIDE_INT small_val;
2670 bitsize /= 2;
2671 small_val = splat_val >> bitsize;
2672 mask >>= bitsize;
2673 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2674 return false;
2675 splat_val = small_val;
2676 }
c4ad648e 2677
66180ff3
PB
2678 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2679 if (EASY_VECTOR_15 (splat_val))
2680 ;
2681
2682 /* Also check if we can splat, and then add the result to itself. Do so if
2683 the value is positive, of if the splat instruction is using OP's mode;
2684 for splat_val < 0, the splat and the add should use the same mode. */
2685 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2686 && (splat_val >= 0 || (step == 1 && copies == 1)))
2687 ;
2688
2689 else
2690 return false;
2691
2692 /* Check if VAL is present in every STEP-th element, and the
2693 other elements are filled with its most significant bit. */
2694 for (i = 0; i < nunits - 1; ++i)
2695 {
2696 HOST_WIDE_INT desired_val;
2697 if (((i + 1) & (step - 1)) == 0)
2698 desired_val = val;
2699 else
2700 desired_val = msb_val;
2701
0972012c 2702 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2703 return false;
452a7d36 2704 }
66180ff3
PB
2705
2706 return true;
452a7d36
HP
2707}
2708
69ef87e2 2709
77ccdfed 2710/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2711 with a vspltisb, vspltish or vspltisw. */
2712
2713bool
2714easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2715{
66180ff3 2716 unsigned step, copies;
d744e06e 2717
66180ff3
PB
2718 if (mode == VOIDmode)
2719 mode = GET_MODE (op);
2720 else if (mode != GET_MODE (op))
2721 return false;
d744e06e 2722
66180ff3
PB
2723 /* Start with a vspltisw. */
2724 step = GET_MODE_NUNITS (mode) / 4;
2725 copies = 1;
2726
2727 if (vspltis_constant (op, step, copies))
2728 return true;
2729
2730 /* Then try with a vspltish. */
2731 if (step == 1)
2732 copies <<= 1;
2733 else
2734 step >>= 1;
2735
2736 if (vspltis_constant (op, step, copies))
2737 return true;
2738
2739 /* And finally a vspltisb. */
2740 if (step == 1)
2741 copies <<= 1;
2742 else
2743 step >>= 1;
2744
2745 if (vspltis_constant (op, step, copies))
2746 return true;
2747
2748 return false;
d744e06e
AH
2749}
2750
66180ff3
PB
2751/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2752 result is OP. Abort if it is not possible. */
d744e06e 2753
f676971a 2754rtx
66180ff3 2755gen_easy_altivec_constant (rtx op)
452a7d36 2756{
66180ff3
PB
2757 enum machine_mode mode = GET_MODE (op);
2758 int nunits = GET_MODE_NUNITS (mode);
2759 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2760 unsigned step = nunits / 4;
2761 unsigned copies = 1;
2762
2763 /* Start with a vspltisw. */
2764 if (vspltis_constant (op, step, copies))
2765 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2766
2767 /* Then try with a vspltish. */
2768 if (step == 1)
2769 copies <<= 1;
2770 else
2771 step >>= 1;
2772
2773 if (vspltis_constant (op, step, copies))
2774 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2775
2776 /* And finally a vspltisb. */
2777 if (step == 1)
2778 copies <<= 1;
2779 else
2780 step >>= 1;
2781
2782 if (vspltis_constant (op, step, copies))
2783 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2784
2785 gcc_unreachable ();
d744e06e
AH
2786}
2787
2788const char *
a2369ed3 2789output_vec_const_move (rtx *operands)
d744e06e
AH
2790{
2791 int cst, cst2;
2792 enum machine_mode mode;
2793 rtx dest, vec;
2794
2795 dest = operands[0];
2796 vec = operands[1];
d744e06e 2797 mode = GET_MODE (dest);
69ef87e2 2798
d744e06e
AH
2799 if (TARGET_ALTIVEC)
2800 {
66180ff3 2801 rtx splat_vec;
d744e06e
AH
2802 if (zero_constant (vec, mode))
2803 return "vxor %0,%0,%0";
37409796 2804
66180ff3
PB
2805 splat_vec = gen_easy_altivec_constant (vec);
2806 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2807 operands[1] = XEXP (splat_vec, 0);
2808 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2809 return "#";
bb8df8a6 2810
66180ff3 2811 switch (GET_MODE (splat_vec))
98ef3137 2812 {
37409796 2813 case V4SImode:
66180ff3 2814 return "vspltisw %0,%1";
c4ad648e 2815
37409796 2816 case V8HImode:
66180ff3 2817 return "vspltish %0,%1";
c4ad648e 2818
37409796 2819 case V16QImode:
66180ff3 2820 return "vspltisb %0,%1";
bb8df8a6 2821
37409796
NS
2822 default:
2823 gcc_unreachable ();
98ef3137 2824 }
69ef87e2
AH
2825 }
2826
37409796 2827 gcc_assert (TARGET_SPE);
bb8df8a6 2828
37409796
NS
2829 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2830 pattern of V1DI, V4HI, and V2SF.
2831
2832 FIXME: We should probably return # and add post reload
2833 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2834 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2835 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2836 operands[1] = CONST_VECTOR_ELT (vec, 0);
2837 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2838 if (cst == cst2)
2839 return "li %0,%1\n\tevmergelo %0,%0,%0";
2840 else
2841 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2842}
2843
f5027409
RE
2844/* Initialize TARGET of vector PAIRED to VALS. */
2845
2846void
2847paired_expand_vector_init (rtx target, rtx vals)
2848{
2849 enum machine_mode mode = GET_MODE (target);
2850 int n_elts = GET_MODE_NUNITS (mode);
2851 int n_var = 0;
2852 rtx x, new, tmp, constant_op, op1, op2;
2853 int i;
2854
2855 for (i = 0; i < n_elts; ++i)
2856 {
2857 x = XVECEXP (vals, 0, i);
2858 if (!CONSTANT_P (x))
2859 ++n_var;
2860 }
2861 if (n_var == 0)
2862 {
2863 /* Load from constant pool. */
2864 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2865 return;
2866 }
2867
2868 if (n_var == 2)
2869 {
2870 /* The vector is initialized only with non-constants. */
2871 new = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
2872 XVECEXP (vals, 0, 1));
2873
2874 emit_move_insn (target, new);
2875 return;
2876 }
2877
2878 /* One field is non-constant and the other one is a constant. Load the
2879 constant from the constant pool and use ps_merge instruction to
2880 construct the whole vector. */
2881 op1 = XVECEXP (vals, 0, 0);
2882 op2 = XVECEXP (vals, 0, 1);
2883
2884 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2885
2886 tmp = gen_reg_rtx (GET_MODE (constant_op));
2887 emit_move_insn (tmp, constant_op);
2888
2889 if (CONSTANT_P (op1))
2890 new = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
2891 else
2892 new = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
2893
2894 emit_move_insn (target, new);
2895}
2896
e2e95f45
RE
2897void
2898paired_expand_vector_move (rtx operands[])
2899{
2900 rtx op0 = operands[0], op1 = operands[1];
2901
2902 emit_move_insn (op0, op1);
2903}
2904
2905/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2906 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2907 operands for the relation operation COND. This is a recursive
2908 function. */
2909
2910static void
2911paired_emit_vector_compare (enum rtx_code rcode,
2912 rtx dest, rtx op0, rtx op1,
2913 rtx cc_op0, rtx cc_op1)
2914{
2915 rtx tmp = gen_reg_rtx (V2SFmode);
2916 rtx tmp1, max, min, equal_zero;
2917
2918 gcc_assert (TARGET_PAIRED_FLOAT);
2919 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2920
2921 switch (rcode)
2922 {
2923 case LT:
2924 case LTU:
2925 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2926 return;
2927 case GE:
2928 case GEU:
2929 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2930 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
2931 return;
2932 case LE:
2933 case LEU:
2934 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
2935 return;
2936 case GT:
2937 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2938 return;
2939 case EQ:
2940 tmp1 = gen_reg_rtx (V2SFmode);
2941 max = gen_reg_rtx (V2SFmode);
2942 min = gen_reg_rtx (V2SFmode);
2943 equal_zero = gen_reg_rtx (V2SFmode);
2944
2945 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
2946 emit_insn (gen_selv2sf4
2947 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2948 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
2949 emit_insn (gen_selv2sf4
2950 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
2951 emit_insn (gen_subv2sf3 (tmp1, min, max));
2952 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
2953 return;
2954 case NE:
2955 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
2956 return;
2957 case UNLE:
2958 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
2959 return;
2960 case UNLT:
2961 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
2962 return;
2963 case UNGE:
2964 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2965 return;
2966 case UNGT:
2967 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
2968 return;
2969 default:
2970 gcc_unreachable ();
2971 }
2972
2973 return;
2974}
2975
2976/* Emit vector conditional expression.
2977 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
2978 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
2979
2980int
2981paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
2982 rtx cond, rtx cc_op0, rtx cc_op1)
2983{
2984 enum rtx_code rcode = GET_CODE (cond);
2985
2986 if (!TARGET_PAIRED_FLOAT)
2987 return 0;
2988
2989 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
2990
2991 return 1;
2992}
2993
7a4eca66
DE
2994/* Initialize vector TARGET to VALS. */
2995
2996void
2997rs6000_expand_vector_init (rtx target, rtx vals)
2998{
2999 enum machine_mode mode = GET_MODE (target);
3000 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3001 int n_elts = GET_MODE_NUNITS (mode);
3002 int n_var = 0, one_var = -1;
3003 bool all_same = true, all_const_zero = true;
3004 rtx x, mem;
3005 int i;
3006
3007 for (i = 0; i < n_elts; ++i)
3008 {
3009 x = XVECEXP (vals, 0, i);
3010 if (!CONSTANT_P (x))
3011 ++n_var, one_var = i;
3012 else if (x != CONST0_RTX (inner_mode))
3013 all_const_zero = false;
3014
3015 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3016 all_same = false;
3017 }
3018
3019 if (n_var == 0)
3020 {
501fb355 3021 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3022 if (mode != V4SFmode && all_const_zero)
3023 {
3024 /* Zero register. */
3025 emit_insn (gen_rtx_SET (VOIDmode, target,
3026 gen_rtx_XOR (mode, target, target)));
3027 return;
3028 }
501fb355 3029 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3030 {
3031 /* Splat immediate. */
501fb355 3032 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3033 return;
3034 }
3035 else if (all_same)
3036 ; /* Splat vector element. */
3037 else
3038 {
3039 /* Load from constant pool. */
501fb355 3040 emit_move_insn (target, const_vec);
7a4eca66
DE
3041 return;
3042 }
3043 }
3044
3045 /* Store value to stack temp. Load vector element. Splat. */
3046 if (all_same)
3047 {
3048 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3049 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3050 XVECEXP (vals, 0, 0));
3051 x = gen_rtx_UNSPEC (VOIDmode,
3052 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3053 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3054 gen_rtvec (2,
3055 gen_rtx_SET (VOIDmode,
3056 target, mem),
3057 x)));
3058 x = gen_rtx_VEC_SELECT (inner_mode, target,
3059 gen_rtx_PARALLEL (VOIDmode,
3060 gen_rtvec (1, const0_rtx)));
3061 emit_insn (gen_rtx_SET (VOIDmode, target,
3062 gen_rtx_VEC_DUPLICATE (mode, x)));
3063 return;
3064 }
3065
3066 /* One field is non-constant. Load constant then overwrite
3067 varying field. */
3068 if (n_var == 1)
3069 {
3070 rtx copy = copy_rtx (vals);
3071
57b51d4d 3072 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3073 varying element. */
3074 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3075 rs6000_expand_vector_init (target, copy);
3076
3077 /* Insert variable. */
3078 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3079 return;
3080 }
3081
3082 /* Construct the vector in memory one field at a time
3083 and load the whole vector. */
3084 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3085 for (i = 0; i < n_elts; i++)
3086 emit_move_insn (adjust_address_nv (mem, inner_mode,
3087 i * GET_MODE_SIZE (inner_mode)),
3088 XVECEXP (vals, 0, i));
3089 emit_move_insn (target, mem);
3090}
3091
3092/* Set field ELT of TARGET to VAL. */
3093
3094void
3095rs6000_expand_vector_set (rtx target, rtx val, int elt)
3096{
3097 enum machine_mode mode = GET_MODE (target);
3098 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3099 rtx reg = gen_reg_rtx (mode);
3100 rtx mask, mem, x;
3101 int width = GET_MODE_SIZE (inner_mode);
3102 int i;
3103
3104 /* Load single variable value. */
3105 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3106 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3107 x = gen_rtx_UNSPEC (VOIDmode,
3108 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3109 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3110 gen_rtvec (2,
3111 gen_rtx_SET (VOIDmode,
3112 reg, mem),
3113 x)));
3114
3115 /* Linear sequence. */
3116 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3117 for (i = 0; i < 16; ++i)
3118 XVECEXP (mask, 0, i) = GEN_INT (i);
3119
3120 /* Set permute mask to insert element into target. */
3121 for (i = 0; i < width; ++i)
3122 XVECEXP (mask, 0, elt*width + i)
3123 = GEN_INT (i + 0x10);
3124 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3125 x = gen_rtx_UNSPEC (mode,
3126 gen_rtvec (3, target, reg,
3127 force_reg (V16QImode, x)),
3128 UNSPEC_VPERM);
3129 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3130}
3131
3132/* Extract field ELT from VEC into TARGET. */
3133
3134void
3135rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3136{
3137 enum machine_mode mode = GET_MODE (vec);
3138 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3139 rtx mem, x;
3140
3141 /* Allocate mode-sized buffer. */
3142 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3143
3144 /* Add offset to field within buffer matching vector element. */
3145 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3146
3147 /* Store single field into mode-sized buffer. */
3148 x = gen_rtx_UNSPEC (VOIDmode,
3149 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3150 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3151 gen_rtvec (2,
3152 gen_rtx_SET (VOIDmode,
3153 mem, vec),
3154 x)));
3155 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3156}
3157
0ba1b2ff
AM
3158/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3159 implement ANDing by the mask IN. */
3160void
a2369ed3 3161build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3162{
3163#if HOST_BITS_PER_WIDE_INT >= 64
3164 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3165 int shift;
3166
37409796 3167 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3168
3169 c = INTVAL (in);
3170 if (c & 1)
3171 {
3172 /* Assume c initially something like 0x00fff000000fffff. The idea
3173 is to rotate the word so that the middle ^^^^^^ group of zeros
3174 is at the MS end and can be cleared with an rldicl mask. We then
3175 rotate back and clear off the MS ^^ group of zeros with a
3176 second rldicl. */
3177 c = ~c; /* c == 0xff000ffffff00000 */
3178 lsb = c & -c; /* lsb == 0x0000000000100000 */
3179 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3180 c = ~c; /* c == 0x00fff000000fffff */
3181 c &= -lsb; /* c == 0x00fff00000000000 */
3182 lsb = c & -c; /* lsb == 0x0000100000000000 */
3183 c = ~c; /* c == 0xff000fffffffffff */
3184 c &= -lsb; /* c == 0xff00000000000000 */
3185 shift = 0;
3186 while ((lsb >>= 1) != 0)
3187 shift++; /* shift == 44 on exit from loop */
3188 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3189 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3190 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3191 }
3192 else
0ba1b2ff
AM
3193 {
3194 /* Assume c initially something like 0xff000f0000000000. The idea
3195 is to rotate the word so that the ^^^ middle group of zeros
3196 is at the LS end and can be cleared with an rldicr mask. We then
3197 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3198 a second rldicr. */
3199 lsb = c & -c; /* lsb == 0x0000010000000000 */
3200 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3201 c = ~c; /* c == 0x00fff0ffffffffff */
3202 c &= -lsb; /* c == 0x00fff00000000000 */
3203 lsb = c & -c; /* lsb == 0x0000100000000000 */
3204 c = ~c; /* c == 0xff000fffffffffff */
3205 c &= -lsb; /* c == 0xff00000000000000 */
3206 shift = 0;
3207 while ((lsb >>= 1) != 0)
3208 shift++; /* shift == 44 on exit from loop */
3209 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3210 m1 >>= shift; /* m1 == 0x0000000000000fff */
3211 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3212 }
3213
3214 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3215 masks will be all 1's. We are guaranteed more than one transition. */
3216 out[0] = GEN_INT (64 - shift);
3217 out[1] = GEN_INT (m1);
3218 out[2] = GEN_INT (shift);
3219 out[3] = GEN_INT (m2);
3220#else
045572c7
GK
3221 (void)in;
3222 (void)out;
37409796 3223 gcc_unreachable ();
0ba1b2ff 3224#endif
a260abc9
DE
3225}
3226
54b695e7 3227/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3228
3229bool
54b695e7
AH
3230invalid_e500_subreg (rtx op, enum machine_mode mode)
3231{
61c76239
JM
3232 if (TARGET_E500_DOUBLE)
3233 {
17caeff2 3234 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3235 subreg:TI and reg:TF. Decimal float modes are like integer
3236 modes (only low part of each register used) for this
3237 purpose. */
61c76239 3238 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3239 && (mode == SImode || mode == DImode || mode == TImode
3240 || mode == DDmode || mode == TDmode)
61c76239 3241 && REG_P (SUBREG_REG (op))
17caeff2 3242 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3243 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3244 return true;
3245
17caeff2
JM
3246 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3247 reg:TI. */
61c76239 3248 if (GET_CODE (op) == SUBREG
4f011e1e 3249 && (mode == DFmode || mode == TFmode)
61c76239 3250 && REG_P (SUBREG_REG (op))
17caeff2 3251 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3252 || GET_MODE (SUBREG_REG (op)) == TImode
3253 || GET_MODE (SUBREG_REG (op)) == DDmode
3254 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3255 return true;
3256 }
54b695e7 3257
61c76239
JM
3258 if (TARGET_SPE
3259 && GET_CODE (op) == SUBREG
3260 && mode == SImode
54b695e7 3261 && REG_P (SUBREG_REG (op))
14502dad 3262 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3263 return true;
3264
3265 return false;
3266}
3267
58182de3 3268/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3269 field is an FP double while the FP fields remain word aligned. */
3270
19d66194 3271unsigned int
fa5b0972
AM
3272rs6000_special_round_type_align (tree type, unsigned int computed,
3273 unsigned int specified)
95727fb8 3274{
fa5b0972 3275 unsigned int align = MAX (computed, specified);
95727fb8 3276 tree field = TYPE_FIELDS (type);
95727fb8 3277
bb8df8a6 3278 /* Skip all non field decls */
85962ac8 3279 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3280 field = TREE_CHAIN (field);
3281
fa5b0972
AM
3282 if (field != NULL && field != type)
3283 {
3284 type = TREE_TYPE (field);
3285 while (TREE_CODE (type) == ARRAY_TYPE)
3286 type = TREE_TYPE (type);
3287
3288 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3289 align = MAX (align, 64);
3290 }
95727fb8 3291
fa5b0972 3292 return align;
95727fb8
AP
3293}
3294
58182de3
GK
3295/* Darwin increases record alignment to the natural alignment of
3296 the first field. */
3297
3298unsigned int
3299darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3300 unsigned int specified)
3301{
3302 unsigned int align = MAX (computed, specified);
3303
3304 if (TYPE_PACKED (type))
3305 return align;
3306
3307 /* Find the first field, looking down into aggregates. */
3308 do {
3309 tree field = TYPE_FIELDS (type);
3310 /* Skip all non field decls */
3311 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3312 field = TREE_CHAIN (field);
3313 if (! field)
3314 break;
3315 type = TREE_TYPE (field);
3316 while (TREE_CODE (type) == ARRAY_TYPE)
3317 type = TREE_TYPE (type);
3318 } while (AGGREGATE_TYPE_P (type));
3319
3320 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3321 align = MAX (align, TYPE_ALIGN (type));
3322
3323 return align;
3324}
3325
a4f6c312 3326/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3327
3328int
f676971a 3329small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3330 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3331{
38c1f2d7 3332#if TARGET_ELF
5f59ecb7 3333 rtx sym_ref;
7509c759 3334
d9407988 3335 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3336 return 0;
a54d04b7 3337
f607bc57 3338 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3339 return 0;
3340
2aa42e6e
NF
3341 /* Vector and float memory instructions have a limited offset on the
3342 SPE, so using a vector or float variable directly as an operand is
3343 not useful. */
3344 if (TARGET_SPE
3345 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3346 return 0;
3347
88228c4b
MM
3348 if (GET_CODE (op) == SYMBOL_REF)
3349 sym_ref = op;
3350
3351 else if (GET_CODE (op) != CONST
3352 || GET_CODE (XEXP (op, 0)) != PLUS
3353 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3354 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3355 return 0;
3356
88228c4b 3357 else
dbf55e53
MM
3358 {
3359 rtx sum = XEXP (op, 0);
3360 HOST_WIDE_INT summand;
3361
3362 /* We have to be careful here, because it is the referenced address
c4ad648e 3363 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3364 summand = INTVAL (XEXP (sum, 1));
307b599c 3365 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3366 return 0;
dbf55e53
MM
3367
3368 sym_ref = XEXP (sum, 0);
3369 }
88228c4b 3370
20bfcd69 3371 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3372#else
3373 return 0;
3374#endif
7509c759 3375}
46c07df8 3376
3a1f863f 3377/* Return true if either operand is a general purpose register. */
46c07df8 3378
3a1f863f
DE
3379bool
3380gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3381{
3a1f863f
DE
3382 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3383 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3384}
3385
9ebbca7d 3386\f
4d588c14
RH
3387/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3388
f676971a
EC
3389static int
3390constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3391{
9390387d 3392 switch (GET_CODE (op))
9ebbca7d
GK
3393 {
3394 case SYMBOL_REF:
c4501e62
JJ
3395 if (RS6000_SYMBOL_REF_TLS_P (op))
3396 return 0;
3397 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3398 {
3399 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3400 {
3401 *have_sym = 1;
3402 return 1;
3403 }
3404 else
3405 return 0;
3406 }
3407 else if (! strcmp (XSTR (op, 0), toc_label_name))
3408 {
3409 *have_toc = 1;
3410 return 1;
3411 }
3412 else
3413 return 0;
9ebbca7d
GK
3414 case PLUS:
3415 case MINUS:
c1f11548
DE
3416 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3417 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3418 case CONST:
a4f6c312 3419 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3420 case CONST_INT:
a4f6c312 3421 return 1;
9ebbca7d 3422 default:
a4f6c312 3423 return 0;
9ebbca7d
GK
3424 }
3425}
3426
4d588c14 3427static bool
a2369ed3 3428constant_pool_expr_p (rtx op)
9ebbca7d
GK
3429{
3430 int have_sym = 0;
3431 int have_toc = 0;
3432 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3433}
3434
48d72335 3435bool
a2369ed3 3436toc_relative_expr_p (rtx op)
9ebbca7d 3437{
4d588c14
RH
3438 int have_sym = 0;
3439 int have_toc = 0;
3440 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3441}
3442
4d588c14 3443bool
a2369ed3 3444legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3445{
3446 return (TARGET_TOC
3447 && GET_CODE (x) == PLUS
3448 && GET_CODE (XEXP (x, 0)) == REG
3449 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3450 && constant_pool_expr_p (XEXP (x, 1)));
3451}
3452
d04b6e6e
EB
3453static bool
3454legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3455{
3456 return (DEFAULT_ABI == ABI_V4
3457 && !flag_pic && !TARGET_TOC
3458 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3459 && small_data_operand (x, mode));
3460}
3461
60cdabab
DE
3462/* SPE offset addressing is limited to 5-bits worth of double words. */
3463#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3464
76d2b81d
DJ
3465bool
3466rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3467{
3468 unsigned HOST_WIDE_INT offset, extra;
3469
3470 if (GET_CODE (x) != PLUS)
3471 return false;
3472 if (GET_CODE (XEXP (x, 0)) != REG)
3473 return false;
3474 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3475 return false;
60cdabab
DE
3476 if (legitimate_constant_pool_address_p (x))
3477 return true;
4d588c14
RH
3478 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3479 return false;
3480
3481 offset = INTVAL (XEXP (x, 1));
3482 extra = 0;
3483 switch (mode)
3484 {
3485 case V16QImode:
3486 case V8HImode:
3487 case V4SFmode:
3488 case V4SImode:
7a4eca66 3489 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3490 constant offset zero should not occur due to canonicalization. */
3491 return false;
4d588c14
RH
3492
3493 case V4HImode:
3494 case V2SImode:
3495 case V1DImode:
3496 case V2SFmode:
d42a3bae 3497 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3498 constant offset zero should not occur due to canonicalization. */
d42a3bae 3499 if (TARGET_PAIRED_FLOAT)
1a23970d 3500 return false;
4d588c14
RH
3501 /* SPE vector modes. */
3502 return SPE_CONST_OFFSET_OK (offset);
3503
3504 case DFmode:
4d4cbc0e
AH
3505 if (TARGET_E500_DOUBLE)
3506 return SPE_CONST_OFFSET_OK (offset);
3507
4f011e1e 3508 case DDmode:
4d588c14 3509 case DImode:
54b695e7
AH
3510 /* On e500v2, we may have:
3511
3512 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3513
3514 Which gets addressed with evldd instructions. */
3515 if (TARGET_E500_DOUBLE)
3516 return SPE_CONST_OFFSET_OK (offset);
3517
7393f7f8 3518 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3519 extra = 4;
3520 else if (offset & 3)
3521 return false;
3522 break;
3523
3524 case TFmode:
17caeff2
JM
3525 if (TARGET_E500_DOUBLE)
3526 return (SPE_CONST_OFFSET_OK (offset)
3527 && SPE_CONST_OFFSET_OK (offset + 8));
3528
4f011e1e 3529 case TDmode:
4d588c14 3530 case TImode:
7393f7f8 3531 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3532 extra = 12;
3533 else if (offset & 3)
3534 return false;
3535 else
3536 extra = 8;
3537 break;
3538
3539 default:
3540 break;
3541 }
3542
b1917422
AM
3543 offset += 0x8000;
3544 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3545}
3546
6fb5fa3c 3547bool
a2369ed3 3548legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3549{
3550 rtx op0, op1;
3551
3552 if (GET_CODE (x) != PLUS)
3553 return false;
850e8d3d 3554
4d588c14
RH
3555 op0 = XEXP (x, 0);
3556 op1 = XEXP (x, 1);
3557
bf00cc0f 3558 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3559 replaced with proper base and index regs. */
3560 if (!strict
3561 && reload_in_progress
3562 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3563 && REG_P (op1))
3564 return true;
3565
3566 return (REG_P (op0) && REG_P (op1)
3567 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3568 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3569 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3570 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3571}
3572
48d72335 3573inline bool
a2369ed3 3574legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3575{
3576 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3577}
3578
48d72335 3579bool
4c81e946
FJ
3580macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3581{
c4ad648e 3582 if (!TARGET_MACHO || !flag_pic
9390387d 3583 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3584 return false;
3585 x = XEXP (x, 0);
4c81e946
FJ
3586
3587 if (GET_CODE (x) != LO_SUM)
3588 return false;
3589 if (GET_CODE (XEXP (x, 0)) != REG)
3590 return false;
3591 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3592 return false;
3593 x = XEXP (x, 1);
3594
3595 return CONSTANT_P (x);
3596}
3597
4d588c14 3598static bool
a2369ed3 3599legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3600{
3601 if (GET_CODE (x) != LO_SUM)
3602 return false;
3603 if (GET_CODE (XEXP (x, 0)) != REG)
3604 return false;
3605 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3606 return false;
54b695e7 3607 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3608 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3609 || mode == DDmode || mode == TDmode
17caeff2 3610 || mode == DImode))
f82f556d 3611 return false;
4d588c14
RH
3612 x = XEXP (x, 1);
3613
8622e235 3614 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3615 {
a29077da 3616 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3617 return false;
3618 if (TARGET_TOC)
3619 return false;
3620 if (GET_MODE_NUNITS (mode) != 1)
3621 return false;
5e5f01b9 3622 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3623 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3624 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3625 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3626 return false;
3627
3628 return CONSTANT_P (x);
3629 }
3630
3631 return false;
3632}
3633
3634
9ebbca7d
GK
3635/* Try machine-dependent ways of modifying an illegitimate address
3636 to be legitimate. If we find one, return the new, valid address.
3637 This is used from only one place: `memory_address' in explow.c.
3638
a4f6c312
SS
3639 OLDX is the address as it was before break_out_memory_refs was
3640 called. In some cases it is useful to look at this to decide what
3641 needs to be done.
9ebbca7d 3642
a4f6c312 3643 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3644
a4f6c312
SS
3645 It is always safe for this function to do nothing. It exists to
3646 recognize opportunities to optimize the output.
9ebbca7d
GK
3647
3648 On RS/6000, first check for the sum of a register with a constant
3649 integer that is out of range. If so, generate code to add the
3650 constant with the low-order 16 bits masked to the register and force
3651 this result into another register (this can be done with `cau').
3652 Then generate an address of REG+(CONST&0xffff), allowing for the
3653 possibility of bit 16 being a one.
3654
3655 Then check for the sum of a register and something not constant, try to
3656 load the other things into a register and return the sum. */
4d588c14 3657
9ebbca7d 3658rtx
a2369ed3
DJ
3659rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3660 enum machine_mode mode)
0ac081f6 3661{
c4501e62
JJ
3662 if (GET_CODE (x) == SYMBOL_REF)
3663 {
3664 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3665 if (model != 0)
3666 return rs6000_legitimize_tls_address (x, model);
3667 }
3668
f676971a 3669 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3670 && GET_CODE (XEXP (x, 0)) == REG
3671 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3672 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3673 && !(SPE_VECTOR_MODE (mode)
efc05e3c 3674 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3675 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3676 || mode == DImode || mode == DDmode
3677 || mode == TDmode))))
f676971a 3678 {
9ebbca7d
GK
3679 HOST_WIDE_INT high_int, low_int;
3680 rtx sum;
a65c591c
DE
3681 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3682 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3683 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3684 GEN_INT (high_int)), 0);
3685 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3686 }
f676971a 3687 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3688 && GET_CODE (XEXP (x, 0)) == REG
3689 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3690 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3691 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3692 || TARGET_POWERPC64
efc05e3c 3693 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3694 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3695 && (TARGET_POWERPC64 || mode != DImode)
efc05e3c
PB
3696 && mode != TImode
3697 && mode != TFmode
3698 && mode != TDmode)
9ebbca7d
GK
3699 {
3700 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3701 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3702 }
0ac081f6
AH
3703 else if (ALTIVEC_VECTOR_MODE (mode))
3704 {
3705 rtx reg;
3706
3707 /* Make sure both operands are registers. */
3708 if (GET_CODE (x) == PLUS)
9f85ed45 3709 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3710 force_reg (Pmode, XEXP (x, 1)));
3711
3712 reg = force_reg (Pmode, x);
3713 return reg;
3714 }
4d4cbc0e 3715 else if (SPE_VECTOR_MODE (mode)
17caeff2 3716 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3717 || mode == DDmode || mode == TDmode
54b695e7 3718 || mode == DImode)))
a3170dc6 3719 {
54b695e7
AH
3720 if (mode == DImode)
3721 return NULL_RTX;
a3170dc6
AH
3722 /* We accept [reg + reg] and [reg + OFFSET]. */
3723
3724 if (GET_CODE (x) == PLUS)
61dd226f
NF
3725 {
3726 rtx op1 = XEXP (x, 0);
3727 rtx op2 = XEXP (x, 1);
3728 rtx y;
3729
3730 op1 = force_reg (Pmode, op1);
3731
3732 if (GET_CODE (op2) != REG
3733 && (GET_CODE (op2) != CONST_INT
3734 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3735 || (GET_MODE_SIZE (mode) > 8
3736 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3737 op2 = force_reg (Pmode, op2);
3738
3739 /* We can't always do [reg + reg] for these, because [reg +
3740 reg + offset] is not a legitimate addressing mode. */
3741 y = gen_rtx_PLUS (Pmode, op1, op2);
3742
4f011e1e 3743 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3744 return force_reg (Pmode, y);
3745 else
3746 return y;
3747 }
a3170dc6
AH
3748
3749 return force_reg (Pmode, x);
3750 }
f1384257
AM
3751 else if (TARGET_ELF
3752 && TARGET_32BIT
3753 && TARGET_NO_TOC
3754 && ! flag_pic
9ebbca7d 3755 && GET_CODE (x) != CONST_INT
f676971a 3756 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3757 && CONSTANT_P (x)
6ac7bf2c
GK
3758 && GET_MODE_NUNITS (mode) == 1
3759 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3760 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3761 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3762 {
3763 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3764 emit_insn (gen_elf_high (reg, x));
3765 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3766 }
ee890fe2
SS
3767 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3768 && ! flag_pic
ab82a49f
AP
3769#if TARGET_MACHO
3770 && ! MACHO_DYNAMIC_NO_PIC_P
3771#endif
ee890fe2 3772 && GET_CODE (x) != CONST_INT
f676971a 3773 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3774 && CONSTANT_P (x)
4d4447b5
PB
3775 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3776 || (mode != DFmode && mode != DDmode))
f676971a 3777 && mode != DImode
ee890fe2
SS
3778 && mode != TImode)
3779 {
3780 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3781 emit_insn (gen_macho_high (reg, x));
3782 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3783 }
f676971a 3784 else if (TARGET_TOC
0cdc04e8 3785 && GET_CODE (x) == SYMBOL_REF
4d588c14 3786 && constant_pool_expr_p (x)
a9098fd0 3787 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3788 {
3789 return create_TOC_reference (x);
3790 }
3791 else
3792 return NULL_RTX;
3793}
258bfae2 3794
fdbe66f2 3795/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3796 We need to emit DTP-relative relocations. */
3797
fdbe66f2 3798static void
c973d557
JJ
3799rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3800{
3801 switch (size)
3802 {
3803 case 4:
3804 fputs ("\t.long\t", file);
3805 break;
3806 case 8:
3807 fputs (DOUBLE_INT_ASM_OP, file);
3808 break;
3809 default:
37409796 3810 gcc_unreachable ();
c973d557
JJ
3811 }
3812 output_addr_const (file, x);
3813 fputs ("@dtprel+0x8000", file);
3814}
3815
c4501e62
JJ
3816/* Construct the SYMBOL_REF for the tls_get_addr function. */
3817
3818static GTY(()) rtx rs6000_tls_symbol;
3819static rtx
863d938c 3820rs6000_tls_get_addr (void)
c4501e62
JJ
3821{
3822 if (!rs6000_tls_symbol)
3823 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3824
3825 return rs6000_tls_symbol;
3826}
3827
3828/* Construct the SYMBOL_REF for TLS GOT references. */
3829
3830static GTY(()) rtx rs6000_got_symbol;
3831static rtx
863d938c 3832rs6000_got_sym (void)
c4501e62
JJ
3833{
3834 if (!rs6000_got_symbol)
3835 {
3836 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3837 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3838 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3839 }
c4501e62
JJ
3840
3841 return rs6000_got_symbol;
3842}
3843
3844/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3845 this (thread-local) address. */
3846
3847static rtx
a2369ed3 3848rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3849{
3850 rtx dest, insn;
3851
3852 dest = gen_reg_rtx (Pmode);
3853 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3854 {
3855 rtx tlsreg;
3856
3857 if (TARGET_64BIT)
3858 {
3859 tlsreg = gen_rtx_REG (Pmode, 13);
3860 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3861 }
3862 else
3863 {
3864 tlsreg = gen_rtx_REG (Pmode, 2);
3865 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3866 }
3867 emit_insn (insn);
3868 }
3869 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3870 {
3871 rtx tlsreg, tmp;
3872
3873 tmp = gen_reg_rtx (Pmode);
3874 if (TARGET_64BIT)
3875 {
3876 tlsreg = gen_rtx_REG (Pmode, 13);
3877 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3878 }
3879 else
3880 {
3881 tlsreg = gen_rtx_REG (Pmode, 2);
3882 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3883 }
3884 emit_insn (insn);
3885 if (TARGET_64BIT)
3886 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3887 else
3888 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3889 emit_insn (insn);
3890 }
3891 else
3892 {
3893 rtx r3, got, tga, tmp1, tmp2, eqv;
3894
4fed8f8f
AM
3895 /* We currently use relocations like @got@tlsgd for tls, which
3896 means the linker will handle allocation of tls entries, placing
3897 them in the .got section. So use a pointer to the .got section,
3898 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3899 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3900 if (TARGET_64BIT)
972f427b 3901 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3902 else
3903 {
3904 if (flag_pic == 1)
3905 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3906 else
3907 {
3908 rtx gsym = rs6000_got_sym ();
3909 got = gen_reg_rtx (Pmode);
3910 if (flag_pic == 0)
3911 rs6000_emit_move (got, gsym, Pmode);
3912 else
3913 {
e65a3857 3914 rtx tmp3, mem;
c4501e62
JJ
3915 rtx first, last;
3916
c4501e62
JJ
3917 tmp1 = gen_reg_rtx (Pmode);
3918 tmp2 = gen_reg_rtx (Pmode);
3919 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3920 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3921
e65a3857
DE
3922 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3923 emit_move_insn (tmp1,
1de43f85 3924 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3925 emit_move_insn (tmp2, mem);
3926 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3927 last = emit_move_insn (got, tmp3);
bd94cb6e 3928 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
3929 }
3930 }
3931 }
3932
3933 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3934 {
3935 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
3936 tga = rs6000_tls_get_addr ();
3937
3938 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
3939 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
3940 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
3941 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
3942 else if (DEFAULT_ABI == ABI_V4)
3943 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 3944 else
02135bc1
SB
3945 gcc_unreachable ();
3946
c4501e62 3947 start_sequence ();
c4501e62 3948 insn = emit_call_insn (insn);
becfd6e5 3949 RTL_CONST_CALL_P (insn) = 1;
c4501e62
JJ
3950 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3951 insn = get_insns ();
3952 end_sequence ();
3953 emit_libcall_block (insn, dest, r3, addr);
3954 }
3955 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3956 {
3957 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
3958 tga = rs6000_tls_get_addr ();
3959
3960 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
3961 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
3962 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
3963 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
3964 else if (DEFAULT_ABI == ABI_V4)
3965 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 3966 else
02135bc1
SB
3967 gcc_unreachable ();
3968
c4501e62 3969 start_sequence ();
c4501e62 3970 insn = emit_call_insn (insn);
becfd6e5 3971 RTL_CONST_CALL_P (insn) = 1;
c4501e62
JJ
3972 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3973 insn = get_insns ();
3974 end_sequence ();
3975 tmp1 = gen_reg_rtx (Pmode);
3976 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3977 UNSPEC_TLSLD);
3978 emit_libcall_block (insn, tmp1, r3, eqv);
3979 if (rs6000_tls_size == 16)
3980 {
3981 if (TARGET_64BIT)
3982 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3983 else
3984 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3985 }
3986 else if (rs6000_tls_size == 32)
3987 {
3988 tmp2 = gen_reg_rtx (Pmode);
3989 if (TARGET_64BIT)
3990 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3991 else
3992 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3993 emit_insn (insn);
3994 if (TARGET_64BIT)
3995 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3996 else
3997 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3998 }
3999 else
4000 {
4001 tmp2 = gen_reg_rtx (Pmode);
4002 if (TARGET_64BIT)
4003 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4004 else
4005 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4006 emit_insn (insn);
4007 insn = gen_rtx_SET (Pmode, dest,
4008 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4009 }
4010 emit_insn (insn);
4011 }
4012 else
4013 {
a7b376ee 4014 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4015 tmp2 = gen_reg_rtx (Pmode);
4016 if (TARGET_64BIT)
4017 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4018 else
4019 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4020 emit_insn (insn);
4021 if (TARGET_64BIT)
4022 insn = gen_tls_tls_64 (dest, tmp2, addr);
4023 else
4024 insn = gen_tls_tls_32 (dest, tmp2, addr);
4025 emit_insn (insn);
4026 }
4027 }
4028
4029 return dest;
4030}
4031
c4501e62
JJ
4032/* Return 1 if X contains a thread-local symbol. */
4033
4034bool
a2369ed3 4035rs6000_tls_referenced_p (rtx x)
c4501e62 4036{
cd413cab
AP
4037 if (! TARGET_HAVE_TLS)
4038 return false;
4039
c4501e62
JJ
4040 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4041}
4042
4043/* Return 1 if *X is a thread-local symbol. This is the same as
4044 rs6000_tls_symbol_ref except for the type of the unused argument. */
4045
9390387d 4046static int
a2369ed3 4047rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4048{
4049 return RS6000_SYMBOL_REF_TLS_P (*x);
4050}
4051
24ea750e
DJ
4052/* The convention appears to be to define this wherever it is used.
4053 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
4054 is now used here. */
4055#ifndef REG_MODE_OK_FOR_BASE_P
4056#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
4057#endif
4058
4059/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4060 replace the input X, or the original X if no replacement is called for.
4061 The output parameter *WIN is 1 if the calling macro should goto WIN,
4062 0 if it should not.
4063
4064 For RS/6000, we wish to handle large displacements off a base
4065 register by splitting the addend across an addiu/addis and the mem insn.
4066 This cuts number of extra insns needed from 3 to 1.
4067
4068 On Darwin, we use this to generate code for floating point constants.
4069 A movsf_low is generated so we wind up with 2 instructions rather than 3.
4070 The Darwin code is inside #if TARGET_MACHO because only then is
4071 machopic_function_base_name() defined. */
4072rtx
f676971a 4073rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4074 int opnum, int type,
4075 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4076{
f676971a 4077 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4078 if (GET_CODE (x) == PLUS
4079 && GET_CODE (XEXP (x, 0)) == PLUS
4080 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4081 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4082 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4083 {
4084 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4085 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4086 opnum, (enum reload_type)type);
24ea750e
DJ
4087 *win = 1;
4088 return x;
4089 }
3deb2758 4090
24ea750e
DJ
4091#if TARGET_MACHO
4092 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4093 && GET_CODE (x) == LO_SUM
4094 && GET_CODE (XEXP (x, 0)) == PLUS
4095 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4096 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
4097 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
4098 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
4099 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
4100 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
4101 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
4102 {
4103 /* Result of previous invocation of this function on Darwin
6f317ef3 4104 floating point constant. */
24ea750e 4105 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4106 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4107 opnum, (enum reload_type)type);
24ea750e
DJ
4108 *win = 1;
4109 return x;
4110 }
4111#endif
4937d02d
DE
4112
4113 /* Force ld/std non-word aligned offset into base register by wrapping
4114 in offset 0. */
4115 if (GET_CODE (x) == PLUS
4116 && GET_CODE (XEXP (x, 0)) == REG
4117 && REGNO (XEXP (x, 0)) < 32
4118 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4119 && GET_CODE (XEXP (x, 1)) == CONST_INT
4120 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4121 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4122 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4123 && TARGET_POWERPC64)
4124 {
4125 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4126 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4127 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4128 opnum, (enum reload_type) type);
4129 *win = 1;
4130 return x;
4131 }
4132
24ea750e
DJ
4133 if (GET_CODE (x) == PLUS
4134 && GET_CODE (XEXP (x, 0)) == REG
4135 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4136 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4137 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4138 && !SPE_VECTOR_MODE (mode)
17caeff2 4139 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4140 || mode == DDmode || mode == TDmode
54b695e7 4141 || mode == DImode))
78c875e8 4142 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4143 {
4144 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4145 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4146 HOST_WIDE_INT high
c4ad648e 4147 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4148
4149 /* Check for 32-bit overflow. */
4150 if (high + low != val)
c4ad648e 4151 {
24ea750e
DJ
4152 *win = 0;
4153 return x;
4154 }
4155
4156 /* Reload the high part into a base reg; leave the low part
c4ad648e 4157 in the mem directly. */
24ea750e
DJ
4158
4159 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4160 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4161 GEN_INT (high)),
4162 GEN_INT (low));
24ea750e
DJ
4163
4164 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4165 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4166 opnum, (enum reload_type)type);
24ea750e
DJ
4167 *win = 1;
4168 return x;
4169 }
4937d02d 4170
24ea750e 4171 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4172 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4173 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4174#if TARGET_MACHO
4175 && DEFAULT_ABI == ABI_DARWIN
a29077da 4176 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4177#else
4178 && DEFAULT_ABI == ABI_V4
4179 && !flag_pic
4180#endif
7393f7f8 4181 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4182 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4183 without fprs. */
0d8c1c97 4184 && mode != TFmode
7393f7f8 4185 && mode != TDmode
7b5d92b2 4186 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4187 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4188 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4189 {
8308679f 4190#if TARGET_MACHO
a29077da
GK
4191 if (flag_pic)
4192 {
4193 rtx offset = gen_rtx_CONST (Pmode,
4194 gen_rtx_MINUS (Pmode, x,
11abc112 4195 machopic_function_base_sym ()));
a29077da
GK
4196 x = gen_rtx_LO_SUM (GET_MODE (x),
4197 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4198 gen_rtx_HIGH (Pmode, offset)), offset);
4199 }
4200 else
8308679f 4201#endif
a29077da 4202 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4203 gen_rtx_HIGH (Pmode, x), x);
a29077da 4204
24ea750e 4205 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4206 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4207 opnum, (enum reload_type)type);
24ea750e
DJ
4208 *win = 1;
4209 return x;
4210 }
4937d02d 4211
dec1f3aa
DE
4212 /* Reload an offset address wrapped by an AND that represents the
4213 masking of the lower bits. Strip the outer AND and let reload
4214 convert the offset address into an indirect address. */
4215 if (TARGET_ALTIVEC
4216 && ALTIVEC_VECTOR_MODE (mode)
4217 && GET_CODE (x) == AND
4218 && GET_CODE (XEXP (x, 0)) == PLUS
4219 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4220 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4221 && GET_CODE (XEXP (x, 1)) == CONST_INT
4222 && INTVAL (XEXP (x, 1)) == -16)
4223 {
4224 x = XEXP (x, 0);
4225 *win = 1;
4226 return x;
4227 }
4228
24ea750e 4229 if (TARGET_TOC
0cdc04e8 4230 && GET_CODE (x) == SYMBOL_REF
4d588c14 4231 && constant_pool_expr_p (x)
c1f11548 4232 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4233 {
194c524a 4234 x = create_TOC_reference (x);
24ea750e
DJ
4235 *win = 1;
4236 return x;
4237 }
4238 *win = 0;
4239 return x;
f676971a 4240}
24ea750e 4241
258bfae2
FS
4242/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4243 that is a valid memory address for an instruction.
4244 The MODE argument is the machine mode for the MEM expression
4245 that wants to use this address.
4246
4247 On the RS/6000, there are four valid address: a SYMBOL_REF that
4248 refers to a constant pool entry of an address (or the sum of it
4249 plus a constant), a short (16-bit signed) constant plus a register,
4250 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4251 auto-increment. For DFmode, DDmode and DImode with a constant plus
4252 register, we must ensure that both words are addressable or PowerPC64
4253 with offset word aligned.
258bfae2 4254
4d4447b5 4255 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4256 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4257 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4258 during assembly output. */
4259int
a2369ed3 4260rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4261{
850e8d3d
DN
4262 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4263 if (TARGET_ALTIVEC
4264 && ALTIVEC_VECTOR_MODE (mode)
4265 && GET_CODE (x) == AND
4266 && GET_CODE (XEXP (x, 1)) == CONST_INT
4267 && INTVAL (XEXP (x, 1)) == -16)
4268 x = XEXP (x, 0);
4269
c4501e62
JJ
4270 if (RS6000_SYMBOL_REF_TLS_P (x))
4271 return 0;
4d588c14 4272 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4273 return 1;
4274 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4275 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4276 && !SPE_VECTOR_MODE (mode)
429ec7dc 4277 && mode != TFmode
7393f7f8 4278 && mode != TDmode
54b695e7 4279 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4280 && !(TARGET_E500_DOUBLE
4281 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4282 && TARGET_UPDATE
4d588c14 4283 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4284 return 1;
d04b6e6e 4285 if (legitimate_small_data_p (mode, x))
258bfae2 4286 return 1;
4d588c14 4287 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4288 return 1;
4289 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4290 if (! reg_ok_strict
4291 && GET_CODE (x) == PLUS
4292 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4293 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4294 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4295 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4296 return 1;
76d2b81d 4297 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4298 return 1;
4299 if (mode != TImode
76d2b81d 4300 && mode != TFmode
7393f7f8 4301 && mode != TDmode
a3170dc6
AH
4302 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4303 || TARGET_POWERPC64
4f011e1e
JM
4304 || (mode != DFmode && mode != DDmode)
4305 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4306 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4307 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4308 return 1;
6fb5fa3c
DB
4309 if (GET_CODE (x) == PRE_MODIFY
4310 && mode != TImode
4311 && mode != TFmode
4312 && mode != TDmode
4313 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4314 || TARGET_POWERPC64
4d4447b5 4315 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4316 && (TARGET_POWERPC64 || mode != DImode)
4317 && !ALTIVEC_VECTOR_MODE (mode)
4318 && !SPE_VECTOR_MODE (mode)
4319 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4320 && !(TARGET_E500_DOUBLE
4321 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4322 && TARGET_UPDATE
4323 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4324 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4325 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4326 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4327 return 1;
4d588c14 4328 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4329 return 1;
4330 return 0;
4331}
4d588c14
RH
4332
4333/* Go to LABEL if ADDR (a legitimate address expression)
4334 has an effect that depends on the machine mode it is used for.
4335
4336 On the RS/6000 this is true of all integral offsets (since AltiVec
4337 modes don't allow them) or is a pre-increment or decrement.
4338
4339 ??? Except that due to conceptual problems in offsettable_address_p
4340 we can't really report the problems of integral offsets. So leave
f676971a 4341 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4342 sub-words of a TFmode operand, which is what we had before. */
4343
4344bool
a2369ed3 4345rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4346{
4347 switch (GET_CODE (addr))
4348 {
4349 case PLUS:
4350 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4351 {
4352 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4353 return val + 12 + 0x8000 >= 0x10000;
4354 }
4355 break;
4356
4357 case LO_SUM:
4358 return true;
4359
619fe064 4360 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4361 case PRE_MODIFY:
4362 return TARGET_UPDATE;
4d588c14
RH
4363
4364 default:
4365 break;
4366 }
4367
4368 return false;
4369}
d8ecbcdb 4370
d04b6e6e
EB
4371/* More elaborate version of recog's offsettable_memref_p predicate
4372 that works around the ??? note of rs6000_mode_dependent_address.
4373 In particular it accepts
4374
4375 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4376
4377 in 32-bit mode, that the recog predicate rejects. */
4378
4379bool
4380rs6000_offsettable_memref_p (rtx op)
4381{
4382 if (!MEM_P (op))
4383 return false;
4384
4385 /* First mimic offsettable_memref_p. */
4386 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4387 return true;
4388
4389 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4390 the latter predicate knows nothing about the mode of the memory
4391 reference and, therefore, assumes that it is the largest supported
4392 mode (TFmode). As a consequence, legitimate offsettable memory
4393 references are rejected. rs6000_legitimate_offset_address_p contains
4394 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4395 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4396}
4397
d8ecbcdb
AH
4398/* Return number of consecutive hard regs needed starting at reg REGNO
4399 to hold something of mode MODE.
4400 This is ordinarily the length in words of a value of mode MODE
4401 but can be less for certain modes in special long registers.
4402
4403 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4404 scalar instructions. The upper 32 bits are only available to the
4405 SIMD instructions.
4406
4407 POWER and PowerPC GPRs hold 32 bits worth;
4408 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4409
4410int
4411rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4412{
4413 if (FP_REGNO_P (regno))
4414 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4415
4416 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4417 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4418
4419 if (ALTIVEC_REGNO_P (regno))
4420 return
4421 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4422
8521c414
JM
4423 /* The value returned for SCmode in the E500 double case is 2 for
4424 ABI compatibility; storing an SCmode value in a single register
4425 would require function_arg and rs6000_spe_function_arg to handle
4426 SCmode so as to pass the value correctly in a pair of
4427 registers. */
4f011e1e
JM
4428 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4429 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4430 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4431
d8ecbcdb
AH
4432 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4433}
2aa4498c
AH
4434
4435/* Change register usage conditional on target flags. */
4436void
4437rs6000_conditional_register_usage (void)
4438{
4439 int i;
4440
4441 /* Set MQ register fixed (already call_used) if not POWER
4442 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4443 be allocated. */
4444 if (! TARGET_POWER)
4445 fixed_regs[64] = 1;
4446
7c9ac5c0 4447 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4448 if (TARGET_64BIT)
4449 fixed_regs[13] = call_used_regs[13]
4450 = call_really_used_regs[13] = 1;
4451
4452 /* Conditionally disable FPRs. */
4453 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4454 for (i = 32; i < 64; i++)
4455 fixed_regs[i] = call_used_regs[i]
c4ad648e 4456 = call_really_used_regs[i] = 1;
2aa4498c 4457
7c9ac5c0
PH
4458 /* The TOC register is not killed across calls in a way that is
4459 visible to the compiler. */
4460 if (DEFAULT_ABI == ABI_AIX)
4461 call_really_used_regs[2] = 0;
4462
2aa4498c
AH
4463 if (DEFAULT_ABI == ABI_V4
4464 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4465 && flag_pic == 2)
4466 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4467
4468 if (DEFAULT_ABI == ABI_V4
4469 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4470 && flag_pic == 1)
4471 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4472 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4473 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4474
4475 if (DEFAULT_ABI == ABI_DARWIN
4476 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4477 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4478 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4479 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4480
b4db40bf
JJ
4481 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4482 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4483 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4484
2aa4498c
AH
4485 if (TARGET_SPE)
4486 {
4487 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4488 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4489 registers in prologues and epilogues. We no longer use r14
4490 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4491 pool for link-compatibility with older versions of GCC. Once
4492 "old" code has died out, we can return r14 to the allocation
4493 pool. */
4494 fixed_regs[14]
4495 = call_used_regs[14]
4496 = call_really_used_regs[14] = 1;
2aa4498c
AH
4497 }
4498
0db747be 4499 if (!TARGET_ALTIVEC)
2aa4498c
AH
4500 {
4501 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4502 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4503 call_really_used_regs[VRSAVE_REGNO] = 1;
4504 }
4505
0db747be
DE
4506 if (TARGET_ALTIVEC)
4507 global_regs[VSCR_REGNO] = 1;
4508
2aa4498c 4509 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4510 {
4511 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4512 call_used_regs[i] = call_really_used_regs[i] = 1;
4513
4514 /* AIX reserves VR20:31 in non-extended ABI mode. */
4515 if (TARGET_XCOFF)
4516 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4517 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4518 }
2aa4498c 4519}
fb4d4348 4520\f
a4f6c312
SS
4521/* Try to output insns to set TARGET equal to the constant C if it can
4522 be done in less than N insns. Do all computations in MODE.
4523 Returns the place where the output has been placed if it can be
4524 done and the insns have been emitted. If it would take more than N
4525 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4526
4527rtx
f676971a 4528rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4529 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4530{
af8cb5c5 4531 rtx result, insn, set;
2bfcf297
DB
4532 HOST_WIDE_INT c0, c1;
4533
37409796 4534 switch (mode)
2bfcf297 4535 {
37409796
NS
4536 case QImode:
4537 case HImode:
2bfcf297 4538 if (dest == NULL)
c4ad648e 4539 dest = gen_reg_rtx (mode);
2bfcf297
DB
4540 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4541 return dest;
bb8df8a6 4542
37409796 4543 case SImode:
b3a13419 4544 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4545
d448860e 4546 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4547 GEN_INT (INTVAL (source)
4548 & (~ (HOST_WIDE_INT) 0xffff))));
4549 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4550 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4551 GEN_INT (INTVAL (source) & 0xffff))));
4552 result = dest;
37409796
NS
4553 break;
4554
4555 case DImode:
4556 switch (GET_CODE (source))
af8cb5c5 4557 {
37409796 4558 case CONST_INT:
af8cb5c5
DE
4559 c0 = INTVAL (source);
4560 c1 = -(c0 < 0);
37409796 4561 break;
bb8df8a6 4562
37409796 4563 case CONST_DOUBLE:
2bfcf297 4564#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4565 c0 = CONST_DOUBLE_LOW (source);
4566 c1 = -(c0 < 0);
2bfcf297 4567#else
af8cb5c5
DE
4568 c0 = CONST_DOUBLE_LOW (source);
4569 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4570#endif
37409796
NS
4571 break;
4572
4573 default:
4574 gcc_unreachable ();
af8cb5c5 4575 }
af8cb5c5
DE
4576
4577 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4578 break;
4579
4580 default:
4581 gcc_unreachable ();
2bfcf297 4582 }
2bfcf297 4583
af8cb5c5
DE
4584 insn = get_last_insn ();
4585 set = single_set (insn);
4586 if (! CONSTANT_P (SET_SRC (set)))
4587 set_unique_reg_note (insn, REG_EQUAL, source);
4588
4589 return result;
2bfcf297
DB
4590}
4591
4592/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4593 fall back to a straight forward decomposition. We do this to avoid
4594 exponential run times encountered when looking for longer sequences
4595 with rs6000_emit_set_const. */
4596static rtx
a2369ed3 4597rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4598{
4599 if (!TARGET_POWERPC64)
4600 {
4601 rtx operand1, operand2;
4602
4603 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4604 DImode);
d448860e 4605 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4606 DImode);
4607 emit_move_insn (operand1, GEN_INT (c1));
4608 emit_move_insn (operand2, GEN_INT (c2));
4609 }
4610 else
4611 {
bc06712d 4612 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4613
bc06712d 4614 ud1 = c1 & 0xffff;
f921c9c9 4615 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4616#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4617 c2 = c1 >> 32;
2bfcf297 4618#endif
bc06712d 4619 ud3 = c2 & 0xffff;
f921c9c9 4620 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4621
f676971a 4622 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4623 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4624 {
bc06712d 4625 if (ud1 & 0x8000)
b78d48dd 4626 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4627 else
4628 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4629 }
2bfcf297 4630
f676971a 4631 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4632 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4633 {
bc06712d 4634 if (ud2 & 0x8000)
f676971a 4635 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4636 - 0x80000000));
252b88f7 4637 else
bc06712d
TR
4638 emit_move_insn (dest, GEN_INT (ud2 << 16));
4639 if (ud1 != 0)
d448860e
JH
4640 emit_move_insn (copy_rtx (dest),
4641 gen_rtx_IOR (DImode, copy_rtx (dest),
4642 GEN_INT (ud1)));
252b88f7 4643 }
f676971a 4644 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4645 || (ud4 == 0 && ! (ud3 & 0x8000)))
4646 {
4647 if (ud3 & 0x8000)
f676971a 4648 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4649 - 0x80000000));
4650 else
4651 emit_move_insn (dest, GEN_INT (ud3 << 16));
4652
4653 if (ud2 != 0)
d448860e
JH
4654 emit_move_insn (copy_rtx (dest),
4655 gen_rtx_IOR (DImode, copy_rtx (dest),
4656 GEN_INT (ud2)));
4657 emit_move_insn (copy_rtx (dest),
4658 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4659 GEN_INT (16)));
bc06712d 4660 if (ud1 != 0)
d448860e
JH
4661 emit_move_insn (copy_rtx (dest),
4662 gen_rtx_IOR (DImode, copy_rtx (dest),
4663 GEN_INT (ud1)));
bc06712d 4664 }
f676971a 4665 else
bc06712d
TR
4666 {
4667 if (ud4 & 0x8000)
f676971a 4668 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4669 - 0x80000000));
4670 else
4671 emit_move_insn (dest, GEN_INT (ud4 << 16));
4672
4673 if (ud3 != 0)
d448860e
JH
4674 emit_move_insn (copy_rtx (dest),
4675 gen_rtx_IOR (DImode, copy_rtx (dest),
4676 GEN_INT (ud3)));
2bfcf297 4677
d448860e
JH
4678 emit_move_insn (copy_rtx (dest),
4679 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4680 GEN_INT (32)));
bc06712d 4681 if (ud2 != 0)
d448860e
JH
4682 emit_move_insn (copy_rtx (dest),
4683 gen_rtx_IOR (DImode, copy_rtx (dest),
4684 GEN_INT (ud2 << 16)));
bc06712d 4685 if (ud1 != 0)
d448860e
JH
4686 emit_move_insn (copy_rtx (dest),
4687 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4688 }
4689 }
2bfcf297
DB
4690 return dest;
4691}
4692
76d2b81d 4693/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4694 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4695
4696static void
4697rs6000_eliminate_indexed_memrefs (rtx operands[2])
4698{
4699 if (GET_CODE (operands[0]) == MEM
4700 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4701 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4702 && ! reload_in_progress)
4703 operands[0]
4704 = replace_equiv_address (operands[0],
4705 copy_addr_to_reg (XEXP (operands[0], 0)));
4706
4707 if (GET_CODE (operands[1]) == MEM
4708 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4709 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4710 && ! reload_in_progress)
4711 operands[1]
4712 = replace_equiv_address (operands[1],
4713 copy_addr_to_reg (XEXP (operands[1], 0)));
4714}
4715
fb4d4348
GK
4716/* Emit a move from SOURCE to DEST in mode MODE. */
4717void
a2369ed3 4718rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4719{
4720 rtx operands[2];
4721 operands[0] = dest;
4722 operands[1] = source;
f676971a 4723
fb4d4348
GK
4724 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4725 if (GET_CODE (operands[1]) == CONST_DOUBLE
4726 && ! FLOAT_MODE_P (mode)
4727 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4728 {
4729 /* FIXME. This should never happen. */
4730 /* Since it seems that it does, do the safe thing and convert
4731 to a CONST_INT. */
2496c7bd 4732 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4733 }
37409796
NS
4734 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4735 || FLOAT_MODE_P (mode)
4736 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4737 || CONST_DOUBLE_LOW (operands[1]) < 0)
4738 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4739 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4740
c9e8cb32
DD
4741 /* Check if GCC is setting up a block move that will end up using FP
4742 registers as temporaries. We must make sure this is acceptable. */
4743 if (GET_CODE (operands[0]) == MEM
4744 && GET_CODE (operands[1]) == MEM
4745 && mode == DImode
41543739
GK
4746 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4747 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4748 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4749 ? 32 : MEM_ALIGN (operands[0])))
4750 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4751 ? 32
41543739
GK
4752 : MEM_ALIGN (operands[1]))))
4753 && ! MEM_VOLATILE_P (operands [0])
4754 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4755 {
41543739
GK
4756 emit_move_insn (adjust_address (operands[0], SImode, 0),
4757 adjust_address (operands[1], SImode, 0));
d448860e
JH
4758 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4759 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4760 return;
4761 }
630d42a0 4762
b3a13419 4763 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4764 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4765 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4766
a3170dc6
AH
4767 if (mode == SFmode && ! TARGET_POWERPC
4768 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4769 && GET_CODE (operands[0]) == MEM)
fb4d4348 4770 {
ffc14f31
GK
4771 int regnum;
4772
4773 if (reload_in_progress || reload_completed)
4774 regnum = true_regnum (operands[1]);
4775 else if (GET_CODE (operands[1]) == REG)
4776 regnum = REGNO (operands[1]);
4777 else
4778 regnum = -1;
f676971a 4779
fb4d4348
GK
4780 /* If operands[1] is a register, on POWER it may have
4781 double-precision data in it, so truncate it to single
4782 precision. */
4783 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4784 {
4785 rtx newreg;
b3a13419 4786 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4787 : gen_reg_rtx (mode));
fb4d4348
GK
4788 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4789 operands[1] = newreg;
4790 }
4791 }
4792
c4501e62
JJ
4793 /* Recognize the case where operand[1] is a reference to thread-local
4794 data and load its address to a register. */
84f52ebd 4795 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4796 {
84f52ebd
RH
4797 enum tls_model model;
4798 rtx tmp = operands[1];
4799 rtx addend = NULL;
4800
4801 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4802 {
4803 addend = XEXP (XEXP (tmp, 0), 1);
4804 tmp = XEXP (XEXP (tmp, 0), 0);
4805 }
4806
4807 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4808 model = SYMBOL_REF_TLS_MODEL (tmp);
4809 gcc_assert (model != 0);
4810
4811 tmp = rs6000_legitimize_tls_address (tmp, model);
4812 if (addend)
4813 {
4814 tmp = gen_rtx_PLUS (mode, tmp, addend);
4815 tmp = force_operand (tmp, operands[0]);
4816 }
4817 operands[1] = tmp;
c4501e62
JJ
4818 }
4819
8f4e6caf
RH
4820 /* Handle the case where reload calls us with an invalid address. */
4821 if (reload_in_progress && mode == Pmode
69ef87e2 4822 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4823 || ! nonimmediate_operand (operands[0], mode)))
4824 goto emit_set;
4825
a9baceb1
GK
4826 /* 128-bit constant floating-point values on Darwin should really be
4827 loaded as two parts. */
8521c414 4828 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4829 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4830 {
4831 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4832 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4833 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4834 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4835 simplify_gen_subreg (imode, operands[1], mode, 0),
4836 imode);
4837 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4838 GET_MODE_SIZE (imode)),
4839 simplify_gen_subreg (imode, operands[1], mode,
4840 GET_MODE_SIZE (imode)),
4841 imode);
a9baceb1
GK
4842 return;
4843 }
4844
e41b2a33
PB
4845 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4846 cfun->machine->sdmode_stack_slot =
4847 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4848
4849 if (reload_in_progress
4850 && mode == SDmode
4851 && MEM_P (operands[0])
4852 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
4853 && REG_P (operands[1]))
4854 {
4855 if (FP_REGNO_P (REGNO (operands[1])))
4856 {
4857 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
4858 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4859 emit_insn (gen_movsd_store (mem, operands[1]));
4860 }
4861 else if (INT_REGNO_P (REGNO (operands[1])))
4862 {
4863 rtx mem = adjust_address_nv (operands[0], mode, 4);
4864 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4865 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
4866 }
4867 else
4868 gcc_unreachable();
4869 return;
4870 }
4871 if (reload_in_progress
4872 && mode == SDmode
4873 && REG_P (operands[0])
4874 && MEM_P (operands[1])
4875 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
4876 {
4877 if (FP_REGNO_P (REGNO (operands[0])))
4878 {
4879 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
4880 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4881 emit_insn (gen_movsd_load (operands[0], mem));
4882 }
4883 else if (INT_REGNO_P (REGNO (operands[0])))
4884 {
4885 rtx mem = adjust_address_nv (operands[1], mode, 4);
4886 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4887 emit_insn (gen_movsd_hardfloat (operands[0], mem));
4888 }
4889 else
4890 gcc_unreachable();
4891 return;
4892 }
4893
fb4d4348
GK
4894 /* FIXME: In the long term, this switch statement should go away
4895 and be replaced by a sequence of tests based on things like
4896 mode == Pmode. */
4897 switch (mode)
4898 {
4899 case HImode:
4900 case QImode:
4901 if (CONSTANT_P (operands[1])
4902 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4903 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4904 break;
4905
06f4e019 4906 case TFmode:
7393f7f8 4907 case TDmode:
76d2b81d
DJ
4908 rs6000_eliminate_indexed_memrefs (operands);
4909 /* fall through */
4910
fb4d4348 4911 case DFmode:
7393f7f8 4912 case DDmode:
fb4d4348 4913 case SFmode:
e41b2a33 4914 case SDmode:
f676971a 4915 if (CONSTANT_P (operands[1])
fb4d4348 4916 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4917 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4918 break;
f676971a 4919
0ac081f6
AH
4920 case V16QImode:
4921 case V8HImode:
4922 case V4SFmode:
4923 case V4SImode:
a3170dc6
AH
4924 case V4HImode:
4925 case V2SFmode:
4926 case V2SImode:
00a892b8 4927 case V1DImode:
69ef87e2 4928 if (CONSTANT_P (operands[1])
d744e06e 4929 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4930 operands[1] = force_const_mem (mode, operands[1]);
4931 break;
f676971a 4932
fb4d4348 4933 case SImode:
a9098fd0 4934 case DImode:
fb4d4348
GK
4935 /* Use default pattern for address of ELF small data */
4936 if (TARGET_ELF
a9098fd0 4937 && mode == Pmode
f607bc57 4938 && DEFAULT_ABI == ABI_V4
f676971a 4939 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4940 || GET_CODE (operands[1]) == CONST)
4941 && small_data_operand (operands[1], mode))
fb4d4348
GK
4942 {
4943 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4944 return;
4945 }
4946
f607bc57 4947 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4948 && mode == Pmode && mode == SImode
4949 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4950 {
4951 emit_insn (gen_movsi_got (operands[0], operands[1]));
4952 return;
4953 }
4954
ee890fe2 4955 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4956 && TARGET_NO_TOC
4957 && ! flag_pic
a9098fd0 4958 && mode == Pmode
fb4d4348
GK
4959 && CONSTANT_P (operands[1])
4960 && GET_CODE (operands[1]) != HIGH
4961 && GET_CODE (operands[1]) != CONST_INT)
4962 {
b3a13419
ILT
4963 rtx target = (!can_create_pseudo_p ()
4964 ? operands[0]
4965 : gen_reg_rtx (mode));
fb4d4348
GK
4966
4967 /* If this is a function address on -mcall-aixdesc,
4968 convert it to the address of the descriptor. */
4969 if (DEFAULT_ABI == ABI_AIX
4970 && GET_CODE (operands[1]) == SYMBOL_REF
4971 && XSTR (operands[1], 0)[0] == '.')
4972 {
4973 const char *name = XSTR (operands[1], 0);
4974 rtx new_ref;
4975 while (*name == '.')
4976 name++;
4977 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4978 CONSTANT_POOL_ADDRESS_P (new_ref)
4979 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4980 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4981 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4982 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4983 operands[1] = new_ref;
4984 }
7509c759 4985
ee890fe2
SS
4986 if (DEFAULT_ABI == ABI_DARWIN)
4987 {
ab82a49f
AP
4988#if TARGET_MACHO
4989 if (MACHO_DYNAMIC_NO_PIC_P)
4990 {
4991 /* Take care of any required data indirection. */
4992 operands[1] = rs6000_machopic_legitimize_pic_address (
4993 operands[1], mode, operands[0]);
4994 if (operands[0] != operands[1])
4995 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4996 operands[0], operands[1]));
ab82a49f
AP
4997 return;
4998 }
4999#endif
b8a55285
AP
5000 emit_insn (gen_macho_high (target, operands[1]));
5001 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5002 return;
5003 }
5004
fb4d4348
GK
5005 emit_insn (gen_elf_high (target, operands[1]));
5006 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5007 return;
5008 }
5009
a9098fd0
GK
5010 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5011 and we have put it in the TOC, we just need to make a TOC-relative
5012 reference to it. */
5013 if (TARGET_TOC
5014 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5015 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5016 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5017 get_pool_mode (operands[1])))
fb4d4348 5018 {
a9098fd0 5019 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5020 }
a9098fd0
GK
5021 else if (mode == Pmode
5022 && CONSTANT_P (operands[1])
38886f37
AO
5023 && ((GET_CODE (operands[1]) != CONST_INT
5024 && ! easy_fp_constant (operands[1], mode))
5025 || (GET_CODE (operands[1]) == CONST_INT
5026 && num_insns_constant (operands[1], mode) > 2)
5027 || (GET_CODE (operands[0]) == REG
5028 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5029 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5030 && ! legitimate_constant_pool_address_p (operands[1])
5031 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
5032 {
5033 /* Emit a USE operation so that the constant isn't deleted if
5034 expensive optimizations are turned on because nobody
5035 references it. This should only be done for operands that
5036 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5037 This should not be done for operands that contain LABEL_REFs.
5038 For now, we just handle the obvious case. */
5039 if (GET_CODE (operands[1]) != LABEL_REF)
c41c1387 5040 emit_use (operands[1]);
fb4d4348 5041
c859cda6 5042#if TARGET_MACHO
ee890fe2 5043 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5044 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5045 {
ee890fe2
SS
5046 operands[1] =
5047 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5048 operands[0]);
5049 if (operands[0] != operands[1])
5050 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5051 return;
5052 }
c859cda6 5053#endif
ee890fe2 5054
fb4d4348
GK
5055 /* If we are to limit the number of things we put in the TOC and
5056 this is a symbol plus a constant we can add in one insn,
5057 just put the symbol in the TOC and add the constant. Don't do
5058 this if reload is in progress. */
5059 if (GET_CODE (operands[1]) == CONST
5060 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5061 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5062 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5063 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5064 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5065 && ! side_effects_p (operands[0]))
5066 {
a4f6c312
SS
5067 rtx sym =
5068 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5069 rtx other = XEXP (XEXP (operands[1], 0), 1);
5070
a9098fd0
GK
5071 sym = force_reg (mode, sym);
5072 if (mode == SImode)
5073 emit_insn (gen_addsi3 (operands[0], sym, other));
5074 else
5075 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5076 return;
5077 }
5078
a9098fd0 5079 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5080
f676971a 5081 if (TARGET_TOC
0cdc04e8 5082 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5083 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5084 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5085 get_pool_constant (XEXP (operands[1], 0)),
5086 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5087 {
ba4828e0 5088 operands[1]
542a8afa 5089 = gen_const_mem (mode,
c4ad648e 5090 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5091 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5092 }
fb4d4348
GK
5093 }
5094 break;
a9098fd0 5095
fb4d4348 5096 case TImode:
76d2b81d
DJ
5097 rs6000_eliminate_indexed_memrefs (operands);
5098
27dc0551
DE
5099 if (TARGET_POWER)
5100 {
5101 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5102 gen_rtvec (2,
5103 gen_rtx_SET (VOIDmode,
5104 operands[0], operands[1]),
5105 gen_rtx_CLOBBER (VOIDmode,
5106 gen_rtx_SCRATCH (SImode)))));
5107 return;
5108 }
fb4d4348
GK
5109 break;
5110
5111 default:
37409796 5112 gcc_unreachable ();
fb4d4348
GK
5113 }
5114
a9098fd0
GK
5115 /* Above, we may have called force_const_mem which may have returned
5116 an invalid address. If we can, fix this up; otherwise, reload will
5117 have to deal with it. */
8f4e6caf
RH
5118 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5119 operands[1] = validize_mem (operands[1]);
a9098fd0 5120
8f4e6caf 5121 emit_set:
fb4d4348
GK
5122 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5123}
4697a36c 5124\f
2858f73a
GK
5125/* Nonzero if we can use a floating-point register to pass this arg. */
5126#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5127 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
5128 && (CUM)->fregno <= FP_ARG_MAX_REG \
5129 && TARGET_HARD_FLOAT && TARGET_FPRS)
5130
5131/* Nonzero if we can use an AltiVec register to pass this arg. */
5132#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5133 (ALTIVEC_VECTOR_MODE (MODE) \
5134 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5135 && TARGET_ALTIVEC_ABI \
83953138 5136 && (NAMED))
2858f73a 5137
c6e8c921
GK
5138/* Return a nonzero value to say to return the function value in
5139 memory, just as large structures are always returned. TYPE will be
5140 the data type of the value, and FNTYPE will be the type of the
5141 function doing the returning, or @code{NULL} for libcalls.
5142
5143 The AIX ABI for the RS/6000 specifies that all structures are
5144 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5145 specifies that structures <= 8 bytes are returned in r3/r4, but a
5146 draft put them in memory, and GCC used to implement the draft
df01da37 5147 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5148 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5149 compatibility can change DRAFT_V4_STRUCT_RET to override the
5150 default, and -m switches get the final word. See
5151 rs6000_override_options for more details.
5152
5153 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5154 long double support is enabled. These values are returned in memory.
5155
5156 int_size_in_bytes returns -1 for variable size objects, which go in
5157 memory always. The cast to unsigned makes -1 > 8. */
5158
5159static bool
586de218 5160rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5161{
594a51fe
SS
5162 /* In the darwin64 abi, try to use registers for larger structs
5163 if possible. */
0b5383eb 5164 if (rs6000_darwin64_abi
594a51fe 5165 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5166 && int_size_in_bytes (type) > 0)
5167 {
5168 CUMULATIVE_ARGS valcum;
5169 rtx valret;
5170
5171 valcum.words = 0;
5172 valcum.fregno = FP_ARG_MIN_REG;
5173 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5174 /* Do a trial code generation as if this were going to be passed
5175 as an argument; if any part goes in memory, we return NULL. */
5176 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5177 if (valret)
5178 return false;
5179 /* Otherwise fall through to more conventional ABI rules. */
5180 }
594a51fe 5181
c6e8c921 5182 if (AGGREGATE_TYPE_P (type)
df01da37 5183 && (aix_struct_return
c6e8c921
GK
5184 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5185 return true;
b693336b 5186
bada2eb8
DE
5187 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5188 modes only exist for GCC vector types if -maltivec. */
5189 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5190 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5191 return false;
5192
b693336b
PB
5193 /* Return synthetic vectors in memory. */
5194 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5195 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5196 {
5197 static bool warned_for_return_big_vectors = false;
5198 if (!warned_for_return_big_vectors)
5199 {
d4ee4d25 5200 warning (0, "GCC vector returned by reference: "
b693336b
PB
5201 "non-standard ABI extension with no compatibility guarantee");
5202 warned_for_return_big_vectors = true;
5203 }
5204 return true;
5205 }
5206
602ea4d3 5207 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5208 return true;
ad630bef 5209
c6e8c921
GK
5210 return false;
5211}
5212
4697a36c
MM
5213/* Initialize a variable CUM of type CUMULATIVE_ARGS
5214 for a call to a function whose data type is FNTYPE.
5215 For a library call, FNTYPE is 0.
5216
5217 For incoming args we set the number of arguments in the prototype large
1c20ae99 5218 so we never return a PARALLEL. */
4697a36c
MM
5219
5220void
f676971a 5221init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5222 rtx libname ATTRIBUTE_UNUSED, int incoming,
5223 int libcall, int n_named_args)
4697a36c
MM
5224{
5225 static CUMULATIVE_ARGS zero_cumulative;
5226
5227 *cum = zero_cumulative;
5228 cum->words = 0;
5229 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5230 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5231 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5232 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5233 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5234 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5235 cum->stdarg = fntype
5236 && (TYPE_ARG_TYPES (fntype) != 0
5237 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5238 != void_type_node));
4697a36c 5239
0f6937fe
AM
5240 cum->nargs_prototype = 0;
5241 if (incoming || cum->prototype)
5242 cum->nargs_prototype = n_named_args;
4697a36c 5243
a5c76ee6 5244 /* Check for a longcall attribute. */
3eb4e360
AM
5245 if ((!fntype && rs6000_default_long_calls)
5246 || (fntype
5247 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5248 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5249 cum->call_cookie |= CALL_LONG;
6a4cee5f 5250
4697a36c
MM
5251 if (TARGET_DEBUG_ARG)
5252 {
5253 fprintf (stderr, "\ninit_cumulative_args:");
5254 if (fntype)
5255 {
5256 tree ret_type = TREE_TYPE (fntype);
5257 fprintf (stderr, " ret code = %s,",
5258 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5259 }
5260
6a4cee5f
MM
5261 if (cum->call_cookie & CALL_LONG)
5262 fprintf (stderr, " longcall,");
5263
4697a36c
MM
5264 fprintf (stderr, " proto = %d, nargs = %d\n",
5265 cum->prototype, cum->nargs_prototype);
5266 }
f676971a 5267
c4ad648e
AM
5268 if (fntype
5269 && !TARGET_ALTIVEC
5270 && TARGET_ALTIVEC_ABI
5271 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5272 {
c85ce869 5273 error ("cannot return value in vector register because"
c4ad648e 5274 " altivec instructions are disabled, use -maltivec"
c85ce869 5275 " to enable them");
c4ad648e 5276 }
4697a36c
MM
5277}
5278\f
fe984136
RH
5279/* Return true if TYPE must be passed on the stack and not in registers. */
5280
5281static bool
586de218 5282rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5283{
5284 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5285 return must_pass_in_stack_var_size (mode, type);
5286 else
5287 return must_pass_in_stack_var_size_or_pad (mode, type);
5288}
5289
c229cba9
DE
5290/* If defined, a C expression which determines whether, and in which
5291 direction, to pad out an argument with extra space. The value
5292 should be of type `enum direction': either `upward' to pad above
5293 the argument, `downward' to pad below, or `none' to inhibit
5294 padding.
5295
5296 For the AIX ABI structs are always stored left shifted in their
5297 argument slot. */
5298
9ebbca7d 5299enum direction
586de218 5300function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5301{
6e985040
AM
5302#ifndef AGGREGATE_PADDING_FIXED
5303#define AGGREGATE_PADDING_FIXED 0
5304#endif
5305#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5306#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5307#endif
5308
5309 if (!AGGREGATE_PADDING_FIXED)
5310 {
5311 /* GCC used to pass structures of the same size as integer types as
5312 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5313 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5314 passed padded downward, except that -mstrict-align further
5315 muddied the water in that multi-component structures of 2 and 4
5316 bytes in size were passed padded upward.
5317
5318 The following arranges for best compatibility with previous
5319 versions of gcc, but removes the -mstrict-align dependency. */
5320 if (BYTES_BIG_ENDIAN)
5321 {
5322 HOST_WIDE_INT size = 0;
5323
5324 if (mode == BLKmode)
5325 {
5326 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5327 size = int_size_in_bytes (type);
5328 }
5329 else
5330 size = GET_MODE_SIZE (mode);
5331
5332 if (size == 1 || size == 2 || size == 4)
5333 return downward;
5334 }
5335 return upward;
5336 }
5337
5338 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5339 {
5340 if (type != 0 && AGGREGATE_TYPE_P (type))
5341 return upward;
5342 }
c229cba9 5343
d3704c46
KH
5344 /* Fall back to the default. */
5345 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5346}
5347
b6c9286a 5348/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5349 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5350 PARM_BOUNDARY is used for all arguments.
f676971a 5351
84e9ad15
AM
5352 V.4 wants long longs and doubles to be double word aligned. Just
5353 testing the mode size is a boneheaded way to do this as it means
5354 that other types such as complex int are also double word aligned.
5355 However, we're stuck with this because changing the ABI might break
5356 existing library interfaces.
5357
b693336b
PB
5358 Doubleword align SPE vectors.
5359 Quadword align Altivec vectors.
5360 Quadword align large synthetic vector types. */
b6c9286a
MM
5361
5362int
b693336b 5363function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5364{
84e9ad15
AM
5365 if (DEFAULT_ABI == ABI_V4
5366 && (GET_MODE_SIZE (mode) == 8
5367 || (TARGET_HARD_FLOAT
5368 && TARGET_FPRS
7393f7f8 5369 && (mode == TFmode || mode == TDmode))))
4ed78545 5370 return 64;
ad630bef
DE
5371 else if (SPE_VECTOR_MODE (mode)
5372 || (type && TREE_CODE (type) == VECTOR_TYPE
5373 && int_size_in_bytes (type) >= 8
5374 && int_size_in_bytes (type) < 16))
e1f83b4d 5375 return 64;
ad630bef
DE
5376 else if (ALTIVEC_VECTOR_MODE (mode)
5377 || (type && TREE_CODE (type) == VECTOR_TYPE
5378 && int_size_in_bytes (type) >= 16))
0ac081f6 5379 return 128;
0b5383eb
DJ
5380 else if (rs6000_darwin64_abi && mode == BLKmode
5381 && type && TYPE_ALIGN (type) > 64)
5382 return 128;
9ebbca7d 5383 else
b6c9286a 5384 return PARM_BOUNDARY;
b6c9286a 5385}
c53bdcf5 5386
294bd182
AM
5387/* For a function parm of MODE and TYPE, return the starting word in
5388 the parameter area. NWORDS of the parameter area are already used. */
5389
5390static unsigned int
5391rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5392{
5393 unsigned int align;
5394 unsigned int parm_offset;
5395
5396 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5397 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5398 return nwords + (-(parm_offset + nwords) & align);
5399}
5400
c53bdcf5
AM
5401/* Compute the size (in words) of a function argument. */
5402
5403static unsigned long
5404rs6000_arg_size (enum machine_mode mode, tree type)
5405{
5406 unsigned long size;
5407
5408 if (mode != BLKmode)
5409 size = GET_MODE_SIZE (mode);
5410 else
5411 size = int_size_in_bytes (type);
5412
5413 if (TARGET_32BIT)
5414 return (size + 3) >> 2;
5415 else
5416 return (size + 7) >> 3;
5417}
b6c9286a 5418\f
0b5383eb 5419/* Use this to flush pending int fields. */
594a51fe
SS
5420
5421static void
0b5383eb
DJ
5422rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5423 HOST_WIDE_INT bitpos)
594a51fe 5424{
0b5383eb
DJ
5425 unsigned int startbit, endbit;
5426 int intregs, intoffset;
5427 enum machine_mode mode;
594a51fe 5428
0b5383eb
DJ
5429 if (cum->intoffset == -1)
5430 return;
594a51fe 5431
0b5383eb
DJ
5432 intoffset = cum->intoffset;
5433 cum->intoffset = -1;
5434
5435 if (intoffset % BITS_PER_WORD != 0)
5436 {
5437 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5438 MODE_INT, 0);
5439 if (mode == BLKmode)
594a51fe 5440 {
0b5383eb
DJ
5441 /* We couldn't find an appropriate mode, which happens,
5442 e.g., in packed structs when there are 3 bytes to load.
5443 Back intoffset back to the beginning of the word in this
5444 case. */
5445 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5446 }
594a51fe 5447 }
0b5383eb
DJ
5448
5449 startbit = intoffset & -BITS_PER_WORD;
5450 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5451 intregs = (endbit - startbit) / BITS_PER_WORD;
5452 cum->words += intregs;
5453}
5454
5455/* The darwin64 ABI calls for us to recurse down through structs,
5456 looking for elements passed in registers. Unfortunately, we have
5457 to track int register count here also because of misalignments
5458 in powerpc alignment mode. */
5459
5460static void
5461rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5462 tree type,
5463 HOST_WIDE_INT startbitpos)
5464{
5465 tree f;
5466
5467 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5468 if (TREE_CODE (f) == FIELD_DECL)
5469 {
5470 HOST_WIDE_INT bitpos = startbitpos;
5471 tree ftype = TREE_TYPE (f);
70fb00df
AP
5472 enum machine_mode mode;
5473 if (ftype == error_mark_node)
5474 continue;
5475 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5476
5477 if (DECL_SIZE (f) != 0
5478 && host_integerp (bit_position (f), 1))
5479 bitpos += int_bit_position (f);
5480
5481 /* ??? FIXME: else assume zero offset. */
5482
5483 if (TREE_CODE (ftype) == RECORD_TYPE)
5484 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5485 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5486 {
5487 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5488 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5489 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5490 }
5491 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5492 {
5493 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5494 cum->vregno++;
5495 cum->words += 2;
5496 }
5497 else if (cum->intoffset == -1)
5498 cum->intoffset = bitpos;
5499 }
594a51fe
SS
5500}
5501
4697a36c
MM
5502/* Update the data in CUM to advance over an argument
5503 of mode MODE and data type TYPE.
b2d04ecf
AM
5504 (TYPE is null for libcalls where that information may not be available.)
5505
5506 Note that for args passed by reference, function_arg will be called
5507 with MODE and TYPE set to that of the pointer to the arg, not the arg
5508 itself. */
4697a36c
MM
5509
5510void
f676971a 5511function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5512 tree type, int named, int depth)
4697a36c 5513{
0b5383eb
DJ
5514 int size;
5515
594a51fe
SS
5516 /* Only tick off an argument if we're not recursing. */
5517 if (depth == 0)
5518 cum->nargs_prototype--;
4697a36c 5519
ad630bef
DE
5520 if (TARGET_ALTIVEC_ABI
5521 && (ALTIVEC_VECTOR_MODE (mode)
5522 || (type && TREE_CODE (type) == VECTOR_TYPE
5523 && int_size_in_bytes (type) == 16)))
0ac081f6 5524 {
4ed78545
AM
5525 bool stack = false;
5526
2858f73a 5527 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5528 {
6d0ef01e
HP
5529 cum->vregno++;
5530 if (!TARGET_ALTIVEC)
c85ce869 5531 error ("cannot pass argument in vector register because"
6d0ef01e 5532 " altivec instructions are disabled, use -maltivec"
c85ce869 5533 " to enable them");
4ed78545
AM
5534
5535 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5536 even if it is going to be passed in a vector register.
4ed78545
AM
5537 Darwin does the same for variable-argument functions. */
5538 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5539 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5540 stack = true;
6d0ef01e 5541 }
4ed78545
AM
5542 else
5543 stack = true;
5544
5545 if (stack)
c4ad648e 5546 {
a594a19c 5547 int align;
f676971a 5548
2858f73a
GK
5549 /* Vector parameters must be 16-byte aligned. This places
5550 them at 2 mod 4 in terms of words in 32-bit mode, since
5551 the parameter save area starts at offset 24 from the
5552 stack. In 64-bit mode, they just have to start on an
5553 even word, since the parameter save area is 16-byte
5554 aligned. Space for GPRs is reserved even if the argument
5555 will be passed in memory. */
5556 if (TARGET_32BIT)
4ed78545 5557 align = (2 - cum->words) & 3;
2858f73a
GK
5558 else
5559 align = cum->words & 1;
c53bdcf5 5560 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5561
a594a19c
GK
5562 if (TARGET_DEBUG_ARG)
5563 {
f676971a 5564 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5565 cum->words, align);
5566 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5567 cum->nargs_prototype, cum->prototype,
2858f73a 5568 GET_MODE_NAME (mode));
a594a19c
GK
5569 }
5570 }
0ac081f6 5571 }
a4b0320c 5572 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5573 && !cum->stdarg
5574 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5575 cum->sysv_gregno++;
594a51fe
SS
5576
5577 else if (rs6000_darwin64_abi
5578 && mode == BLKmode
0b5383eb
DJ
5579 && TREE_CODE (type) == RECORD_TYPE
5580 && (size = int_size_in_bytes (type)) > 0)
5581 {
5582 /* Variable sized types have size == -1 and are
5583 treated as if consisting entirely of ints.
5584 Pad to 16 byte boundary if needed. */
5585 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5586 && (cum->words % 2) != 0)
5587 cum->words++;
5588 /* For varargs, we can just go up by the size of the struct. */
5589 if (!named)
5590 cum->words += (size + 7) / 8;
5591 else
5592 {
5593 /* It is tempting to say int register count just goes up by
5594 sizeof(type)/8, but this is wrong in a case such as
5595 { int; double; int; } [powerpc alignment]. We have to
5596 grovel through the fields for these too. */
5597 cum->intoffset = 0;
5598 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5599 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5600 size * BITS_PER_UNIT);
5601 }
5602 }
f607bc57 5603 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5604 {
a3170dc6 5605 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5606 && (mode == SFmode || mode == DFmode
e41b2a33 5607 || mode == SDmode || mode == DDmode || mode == TDmode
602ea4d3 5608 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5609 {
2d83f070
JJ
5610 /* _Decimal128 must use an even/odd register pair. This assumes
5611 that the register number is odd when fregno is odd. */
5612 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5613 cum->fregno++;
5614
5615 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5616 <= FP_ARG_V4_MAX_REG)
602ea4d3 5617 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5618 else
5619 {
602ea4d3 5620 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5621 if (mode == DFmode || mode == TFmode
5622 || mode == DDmode || mode == TDmode)
c4ad648e 5623 cum->words += cum->words & 1;
c53bdcf5 5624 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5625 }
4697a36c 5626 }
4cc833b7
RH
5627 else
5628 {
b2d04ecf 5629 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5630 int gregno = cum->sysv_gregno;
5631
4ed78545
AM
5632 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5633 (r7,r8) or (r9,r10). As does any other 2 word item such
5634 as complex int due to a historical mistake. */
5635 if (n_words == 2)
5636 gregno += (1 - gregno) & 1;
4cc833b7 5637
4ed78545 5638 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5639 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5640 {
4ed78545
AM
5641 /* Long long and SPE vectors are aligned on the stack.
5642 So are other 2 word items such as complex int due to
5643 a historical mistake. */
4cc833b7
RH
5644 if (n_words == 2)
5645 cum->words += cum->words & 1;
5646 cum->words += n_words;
5647 }
4697a36c 5648
4cc833b7
RH
5649 /* Note: continuing to accumulate gregno past when we've started
5650 spilling to the stack indicates the fact that we've started
5651 spilling to the stack to expand_builtin_saveregs. */
5652 cum->sysv_gregno = gregno + n_words;
5653 }
4697a36c 5654
4cc833b7
RH
5655 if (TARGET_DEBUG_ARG)
5656 {
5657 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5658 cum->words, cum->fregno);
5659 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5660 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5661 fprintf (stderr, "mode = %4s, named = %d\n",
5662 GET_MODE_NAME (mode), named);
5663 }
4697a36c
MM
5664 }
5665 else
4cc833b7 5666 {
b2d04ecf 5667 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5668 int start_words = cum->words;
5669 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5670
294bd182 5671 cum->words = align_words + n_words;
4697a36c 5672
ebb109ad 5673 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 5674 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5675 {
5676 /* _Decimal128 must be passed in an even/odd float register pair.
5677 This assumes that the register number is odd when fregno is
5678 odd. */
5679 if (mode == TDmode && (cum->fregno % 2) == 1)
5680 cum->fregno++;
5681 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5682 }
4cc833b7
RH
5683
5684 if (TARGET_DEBUG_ARG)
5685 {
5686 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5687 cum->words, cum->fregno);
5688 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5689 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5690 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5691 named, align_words - start_words, depth);
4cc833b7
RH
5692 }
5693 }
4697a36c 5694}
a6c9bed4 5695
f82f556d
AH
5696static rtx
5697spe_build_register_parallel (enum machine_mode mode, int gregno)
5698{
17caeff2 5699 rtx r1, r3, r5, r7;
f82f556d 5700
37409796 5701 switch (mode)
f82f556d 5702 {
37409796 5703 case DFmode:
54b695e7
AH
5704 r1 = gen_rtx_REG (DImode, gregno);
5705 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5706 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5707
5708 case DCmode:
17caeff2 5709 case TFmode:
54b695e7
AH
5710 r1 = gen_rtx_REG (DImode, gregno);
5711 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5712 r3 = gen_rtx_REG (DImode, gregno + 2);
5713 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5714 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5715
17caeff2
JM
5716 case TCmode:
5717 r1 = gen_rtx_REG (DImode, gregno);
5718 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5719 r3 = gen_rtx_REG (DImode, gregno + 2);
5720 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5721 r5 = gen_rtx_REG (DImode, gregno + 4);
5722 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5723 r7 = gen_rtx_REG (DImode, gregno + 6);
5724 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5725 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5726
37409796
NS
5727 default:
5728 gcc_unreachable ();
f82f556d 5729 }
f82f556d 5730}
b78d48dd 5731
f82f556d 5732/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5733static rtx
f676971a 5734rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5735 tree type)
a6c9bed4 5736{
f82f556d
AH
5737 int gregno = cum->sysv_gregno;
5738
5739 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5740 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5741 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5742 || mode == DCmode || mode == TCmode))
f82f556d 5743 {
b5870bee
AH
5744 int n_words = rs6000_arg_size (mode, type);
5745
f82f556d 5746 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5747 if (mode == DFmode)
b5870bee 5748 gregno += (1 - gregno) & 1;
f82f556d 5749
b5870bee
AH
5750 /* Multi-reg args are not split between registers and stack. */
5751 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5752 return NULL_RTX;
5753
5754 return spe_build_register_parallel (mode, gregno);
5755 }
a6c9bed4
AH
5756 if (cum->stdarg)
5757 {
c53bdcf5 5758 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5759
5760 /* SPE vectors are put in odd registers. */
5761 if (n_words == 2 && (gregno & 1) == 0)
5762 gregno += 1;
5763
5764 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5765 {
5766 rtx r1, r2;
5767 enum machine_mode m = SImode;
5768
5769 r1 = gen_rtx_REG (m, gregno);
5770 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5771 r2 = gen_rtx_REG (m, gregno + 1);
5772 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5773 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5774 }
5775 else
b78d48dd 5776 return NULL_RTX;
a6c9bed4
AH
5777 }
5778 else
5779 {
f82f556d
AH
5780 if (gregno <= GP_ARG_MAX_REG)
5781 return gen_rtx_REG (mode, gregno);
a6c9bed4 5782 else
b78d48dd 5783 return NULL_RTX;
a6c9bed4
AH
5784 }
5785}
5786
0b5383eb
DJ
5787/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5788 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5789
0b5383eb 5790static void
bb8df8a6 5791rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5792 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5793{
0b5383eb
DJ
5794 enum machine_mode mode;
5795 unsigned int regno;
5796 unsigned int startbit, endbit;
5797 int this_regno, intregs, intoffset;
5798 rtx reg;
594a51fe 5799
0b5383eb
DJ
5800 if (cum->intoffset == -1)
5801 return;
5802
5803 intoffset = cum->intoffset;
5804 cum->intoffset = -1;
5805
5806 /* If this is the trailing part of a word, try to only load that
5807 much into the register. Otherwise load the whole register. Note
5808 that in the latter case we may pick up unwanted bits. It's not a
5809 problem at the moment but may wish to revisit. */
5810
5811 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5812 {
0b5383eb
DJ
5813 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5814 MODE_INT, 0);
5815 if (mode == BLKmode)
5816 {
5817 /* We couldn't find an appropriate mode, which happens,
5818 e.g., in packed structs when there are 3 bytes to load.
5819 Back intoffset back to the beginning of the word in this
5820 case. */
5821 intoffset = intoffset & -BITS_PER_WORD;
5822 mode = word_mode;
5823 }
5824 }
5825 else
5826 mode = word_mode;
5827
5828 startbit = intoffset & -BITS_PER_WORD;
5829 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5830 intregs = (endbit - startbit) / BITS_PER_WORD;
5831 this_regno = cum->words + intoffset / BITS_PER_WORD;
5832
5833 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5834 cum->use_stack = 1;
bb8df8a6 5835
0b5383eb
DJ
5836 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5837 if (intregs <= 0)
5838 return;
5839
5840 intoffset /= BITS_PER_UNIT;
5841 do
5842 {
5843 regno = GP_ARG_MIN_REG + this_regno;
5844 reg = gen_rtx_REG (mode, regno);
5845 rvec[(*k)++] =
5846 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5847
5848 this_regno += 1;
5849 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5850 mode = word_mode;
5851 intregs -= 1;
5852 }
5853 while (intregs > 0);
5854}
5855
5856/* Recursive workhorse for the following. */
5857
5858static void
586de218 5859rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5860 HOST_WIDE_INT startbitpos, rtx rvec[],
5861 int *k)
5862{
5863 tree f;
5864
5865 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5866 if (TREE_CODE (f) == FIELD_DECL)
5867 {
5868 HOST_WIDE_INT bitpos = startbitpos;
5869 tree ftype = TREE_TYPE (f);
70fb00df
AP
5870 enum machine_mode mode;
5871 if (ftype == error_mark_node)
5872 continue;
5873 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5874
5875 if (DECL_SIZE (f) != 0
5876 && host_integerp (bit_position (f), 1))
5877 bitpos += int_bit_position (f);
5878
5879 /* ??? FIXME: else assume zero offset. */
5880
5881 if (TREE_CODE (ftype) == RECORD_TYPE)
5882 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5883 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5884 {
0b5383eb
DJ
5885#if 0
5886 switch (mode)
594a51fe 5887 {
0b5383eb
DJ
5888 case SCmode: mode = SFmode; break;
5889 case DCmode: mode = DFmode; break;
5890 case TCmode: mode = TFmode; break;
5891 default: break;
594a51fe 5892 }
0b5383eb
DJ
5893#endif
5894 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5895 rvec[(*k)++]
bb8df8a6 5896 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5897 gen_rtx_REG (mode, cum->fregno++),
5898 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5899 if (mode == TFmode || mode == TDmode)
0b5383eb 5900 cum->fregno++;
594a51fe 5901 }
0b5383eb
DJ
5902 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5903 {
5904 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5905 rvec[(*k)++]
bb8df8a6
EC
5906 = gen_rtx_EXPR_LIST (VOIDmode,
5907 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5908 GEN_INT (bitpos / BITS_PER_UNIT));
5909 }
5910 else if (cum->intoffset == -1)
5911 cum->intoffset = bitpos;
5912 }
5913}
594a51fe 5914
0b5383eb
DJ
5915/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5916 the register(s) to be used for each field and subfield of a struct
5917 being passed by value, along with the offset of where the
5918 register's value may be found in the block. FP fields go in FP
5919 register, vector fields go in vector registers, and everything
bb8df8a6 5920 else goes in int registers, packed as in memory.
8ff40a74 5921
0b5383eb
DJ
5922 This code is also used for function return values. RETVAL indicates
5923 whether this is the case.
8ff40a74 5924
a4d05547 5925 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5926 calling convention. */
594a51fe 5927
0b5383eb 5928static rtx
586de218 5929rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
5930 int named, bool retval)
5931{
5932 rtx rvec[FIRST_PSEUDO_REGISTER];
5933 int k = 1, kbase = 1;
5934 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5935 /* This is a copy; modifications are not visible to our caller. */
5936 CUMULATIVE_ARGS copy_cum = *orig_cum;
5937 CUMULATIVE_ARGS *cum = &copy_cum;
5938
5939 /* Pad to 16 byte boundary if needed. */
5940 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5941 && (cum->words % 2) != 0)
5942 cum->words++;
5943
5944 cum->intoffset = 0;
5945 cum->use_stack = 0;
5946 cum->named = named;
5947
5948 /* Put entries into rvec[] for individual FP and vector fields, and
5949 for the chunks of memory that go in int regs. Note we start at
5950 element 1; 0 is reserved for an indication of using memory, and
5951 may or may not be filled in below. */
5952 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5953 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5954
5955 /* If any part of the struct went on the stack put all of it there.
5956 This hack is because the generic code for
5957 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5958 parts of the struct are not at the beginning. */
5959 if (cum->use_stack)
5960 {
5961 if (retval)
5962 return NULL_RTX; /* doesn't go in registers at all */
5963 kbase = 0;
5964 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5965 }
5966 if (k > 1 || cum->use_stack)
5967 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5968 else
5969 return NULL_RTX;
5970}
5971
b78d48dd
FJ
5972/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5973
5974static rtx
ec6376ab 5975rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5976{
ec6376ab
AM
5977 int n_units;
5978 int i, k;
5979 rtx rvec[GP_ARG_NUM_REG + 1];
5980
5981 if (align_words >= GP_ARG_NUM_REG)
5982 return NULL_RTX;
5983
5984 n_units = rs6000_arg_size (mode, type);
5985
5986 /* Optimize the simple case where the arg fits in one gpr, except in
5987 the case of BLKmode due to assign_parms assuming that registers are
5988 BITS_PER_WORD wide. */
5989 if (n_units == 0
5990 || (n_units == 1 && mode != BLKmode))
5991 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5992
5993 k = 0;
5994 if (align_words + n_units > GP_ARG_NUM_REG)
5995 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5996 using a magic NULL_RTX component.
79773478
AM
5997 This is not strictly correct. Only some of the arg belongs in
5998 memory, not all of it. However, the normal scheme using
5999 function_arg_partial_nregs can result in unusual subregs, eg.
6000 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6001 store the whole arg to memory is often more efficient than code
6002 to store pieces, and we know that space is available in the right
6003 place for the whole arg. */
ec6376ab
AM
6004 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6005
6006 i = 0;
6007 do
36a454e1 6008 {
ec6376ab
AM
6009 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6010 rtx off = GEN_INT (i++ * 4);
6011 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6012 }
ec6376ab
AM
6013 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6014
6015 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6016}
6017
4697a36c
MM
6018/* Determine where to put an argument to a function.
6019 Value is zero to push the argument on the stack,
6020 or a hard register in which to store the argument.
6021
6022 MODE is the argument's machine mode.
6023 TYPE is the data type of the argument (as a tree).
6024 This is null for libcalls where that information may
6025 not be available.
6026 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6027 the preceding args and about the function being called. It is
6028 not modified in this routine.
4697a36c
MM
6029 NAMED is nonzero if this argument is a named parameter
6030 (otherwise it is an extra parameter matching an ellipsis).
6031
6032 On RS/6000 the first eight words of non-FP are normally in registers
6033 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6034 Under V.4, the first 8 FP args are in registers.
6035
6036 If this is floating-point and no prototype is specified, we use
6037 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6038 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6039 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6040 doesn't support PARALLEL anyway.
6041
6042 Note that for args passed by reference, function_arg will be called
6043 with MODE and TYPE set to that of the pointer to the arg, not the arg
6044 itself. */
4697a36c 6045
9390387d 6046rtx
f676971a 6047function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6048 tree type, int named)
4697a36c 6049{
4cc833b7 6050 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6051
a4f6c312
SS
6052 /* Return a marker to indicate whether CR1 needs to set or clear the
6053 bit that V.4 uses to say fp args were passed in registers.
6054 Assume that we don't need the marker for software floating point,
6055 or compiler generated library calls. */
4697a36c
MM
6056 if (mode == VOIDmode)
6057 {
f607bc57 6058 if (abi == ABI_V4
b9599e46 6059 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6060 && (cum->stdarg
6061 || (cum->nargs_prototype < 0
6062 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6063 {
a3170dc6
AH
6064 /* For the SPE, we need to crxor CR6 always. */
6065 if (TARGET_SPE_ABI)
6066 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6067 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6068 return GEN_INT (cum->call_cookie
6069 | ((cum->fregno == FP_ARG_MIN_REG)
6070 ? CALL_V4_SET_FP_ARGS
6071 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6072 }
4697a36c 6073
7509c759 6074 return GEN_INT (cum->call_cookie);
4697a36c
MM
6075 }
6076
0b5383eb
DJ
6077 if (rs6000_darwin64_abi && mode == BLKmode
6078 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6079 {
0b5383eb 6080 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6081 if (rslt != NULL_RTX)
6082 return rslt;
6083 /* Else fall through to usual handling. */
6084 }
6085
2858f73a 6086 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6087 if (TARGET_64BIT && ! cum->prototype)
6088 {
c4ad648e
AM
6089 /* Vector parameters get passed in vector register
6090 and also in GPRs or memory, in absence of prototype. */
6091 int align_words;
6092 rtx slot;
6093 align_words = (cum->words + 1) & ~1;
6094
6095 if (align_words >= GP_ARG_NUM_REG)
6096 {
6097 slot = NULL_RTX;
6098 }
6099 else
6100 {
6101 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6102 }
6103 return gen_rtx_PARALLEL (mode,
6104 gen_rtvec (2,
6105 gen_rtx_EXPR_LIST (VOIDmode,
6106 slot, const0_rtx),
6107 gen_rtx_EXPR_LIST (VOIDmode,
6108 gen_rtx_REG (mode, cum->vregno),
6109 const0_rtx)));
c72d6c26
HP
6110 }
6111 else
6112 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6113 else if (TARGET_ALTIVEC_ABI
6114 && (ALTIVEC_VECTOR_MODE (mode)
6115 || (type && TREE_CODE (type) == VECTOR_TYPE
6116 && int_size_in_bytes (type) == 16)))
0ac081f6 6117 {
2858f73a 6118 if (named || abi == ABI_V4)
a594a19c 6119 return NULL_RTX;
0ac081f6 6120 else
a594a19c
GK
6121 {
6122 /* Vector parameters to varargs functions under AIX or Darwin
6123 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6124 int align, align_words, n_words;
6125 enum machine_mode part_mode;
a594a19c
GK
6126
6127 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6128 2 mod 4 in terms of words in 32-bit mode, since the parameter
6129 save area starts at offset 24 from the stack. In 64-bit mode,
6130 they just have to start on an even word, since the parameter
6131 save area is 16-byte aligned. */
6132 if (TARGET_32BIT)
4ed78545 6133 align = (2 - cum->words) & 3;
2858f73a
GK
6134 else
6135 align = cum->words & 1;
a594a19c
GK
6136 align_words = cum->words + align;
6137
6138 /* Out of registers? Memory, then. */
6139 if (align_words >= GP_ARG_NUM_REG)
6140 return NULL_RTX;
ec6376ab
AM
6141
6142 if (TARGET_32BIT && TARGET_POWERPC64)
6143 return rs6000_mixed_function_arg (mode, type, align_words);
6144
2858f73a
GK
6145 /* The vector value goes in GPRs. Only the part of the
6146 value in GPRs is reported here. */
ec6376ab
AM
6147 part_mode = mode;
6148 n_words = rs6000_arg_size (mode, type);
6149 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6150 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6151 is either wholly in GPRs or half in GPRs and half not. */
6152 part_mode = DImode;
ec6376ab
AM
6153
6154 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6155 }
0ac081f6 6156 }
f82f556d
AH
6157 else if (TARGET_SPE_ABI && TARGET_SPE
6158 && (SPE_VECTOR_MODE (mode)
18f63bfa 6159 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6160 || mode == DCmode
6161 || mode == TFmode
6162 || mode == TCmode))))
a6c9bed4 6163 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6164
f607bc57 6165 else if (abi == ABI_V4)
4697a36c 6166 {
a3170dc6 6167 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 6168 && (mode == SFmode || mode == DFmode
7393f7f8 6169 || (mode == TFmode && !TARGET_IEEEQUAD)
e41b2a33 6170 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6171 {
2d83f070
JJ
6172 /* _Decimal128 must use an even/odd register pair. This assumes
6173 that the register number is odd when fregno is odd. */
6174 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6175 cum->fregno++;
6176
6177 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6178 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6179 return gen_rtx_REG (mode, cum->fregno);
6180 else
b78d48dd 6181 return NULL_RTX;
4cc833b7
RH
6182 }
6183 else
6184 {
b2d04ecf 6185 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6186 int gregno = cum->sysv_gregno;
6187
4ed78545
AM
6188 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6189 (r7,r8) or (r9,r10). As does any other 2 word item such
6190 as complex int due to a historical mistake. */
6191 if (n_words == 2)
6192 gregno += (1 - gregno) & 1;
4cc833b7 6193
4ed78545 6194 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6195 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6196 return NULL_RTX;
ec6376ab
AM
6197
6198 if (TARGET_32BIT && TARGET_POWERPC64)
6199 return rs6000_mixed_function_arg (mode, type,
6200 gregno - GP_ARG_MIN_REG);
6201 return gen_rtx_REG (mode, gregno);
4cc833b7 6202 }
4697a36c 6203 }
4cc833b7
RH
6204 else
6205 {
294bd182 6206 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6207
2d83f070
JJ
6208 /* _Decimal128 must be passed in an even/odd float register pair.
6209 This assumes that the register number is odd when fregno is odd. */
6210 if (mode == TDmode && (cum->fregno % 2) == 1)
6211 cum->fregno++;
6212
2858f73a 6213 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6214 {
ec6376ab
AM
6215 rtx rvec[GP_ARG_NUM_REG + 1];
6216 rtx r;
6217 int k;
c53bdcf5
AM
6218 bool needs_psave;
6219 enum machine_mode fmode = mode;
c53bdcf5
AM
6220 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6221
6222 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6223 {
c53bdcf5
AM
6224 /* Currently, we only ever need one reg here because complex
6225 doubles are split. */
7393f7f8
BE
6226 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6227 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6228
7393f7f8
BE
6229 /* Long double or _Decimal128 split over regs and memory. */
6230 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6231 }
c53bdcf5
AM
6232
6233 /* Do we also need to pass this arg in the parameter save
6234 area? */
6235 needs_psave = (type
6236 && (cum->nargs_prototype <= 0
6237 || (DEFAULT_ABI == ABI_AIX
de17c25f 6238 && TARGET_XL_COMPAT
c53bdcf5
AM
6239 && align_words >= GP_ARG_NUM_REG)));
6240
6241 if (!needs_psave && mode == fmode)
ec6376ab 6242 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6243
ec6376ab 6244 k = 0;
c53bdcf5
AM
6245 if (needs_psave)
6246 {
ec6376ab 6247 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6248 This piece must come first, before the fprs. */
c53bdcf5
AM
6249 if (align_words < GP_ARG_NUM_REG)
6250 {
6251 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6252
6253 if (align_words + n_words > GP_ARG_NUM_REG
6254 || (TARGET_32BIT && TARGET_POWERPC64))
6255 {
6256 /* If this is partially on the stack, then we only
6257 include the portion actually in registers here. */
6258 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6259 rtx off;
79773478
AM
6260 int i = 0;
6261 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6262 /* Not all of the arg fits in gprs. Say that it
6263 goes in memory too, using a magic NULL_RTX
6264 component. Also see comment in
6265 rs6000_mixed_function_arg for why the normal
6266 function_arg_partial_nregs scheme doesn't work
6267 in this case. */
6268 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6269 const0_rtx);
ec6376ab
AM
6270 do
6271 {
6272 r = gen_rtx_REG (rmode,
6273 GP_ARG_MIN_REG + align_words);
2e6c9641 6274 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6275 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6276 }
6277 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6278 }
6279 else
6280 {
6281 /* The whole arg fits in gprs. */
6282 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6283 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6284 }
c53bdcf5 6285 }
ec6376ab
AM
6286 else
6287 /* It's entirely in memory. */
6288 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6289 }
6290
ec6376ab
AM
6291 /* Describe where this piece goes in the fprs. */
6292 r = gen_rtx_REG (fmode, cum->fregno);
6293 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6294
6295 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6296 }
6297 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6298 {
ec6376ab
AM
6299 if (TARGET_32BIT && TARGET_POWERPC64)
6300 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6301
4eeca74f
AM
6302 if (mode == BLKmode)
6303 mode = Pmode;
6304
b2d04ecf
AM
6305 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6306 }
4cc833b7
RH
6307 else
6308 return NULL_RTX;
4697a36c 6309 }
4697a36c
MM
6310}
6311\f
ec6376ab 6312/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6313 the number of bytes passed in registers. For args passed entirely in
6314 registers or entirely in memory, zero. When an arg is described by a
6315 PARALLEL, perhaps using more than one register type, this function
6316 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6317
78a52f11
RH
6318static int
6319rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6320 tree type, bool named)
4697a36c 6321{
c53bdcf5 6322 int ret = 0;
ec6376ab 6323 int align_words;
c53bdcf5 6324
f607bc57 6325 if (DEFAULT_ABI == ABI_V4)
4697a36c 6326 return 0;
4697a36c 6327
c53bdcf5
AM
6328 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6329 && cum->nargs_prototype >= 0)
6330 return 0;
6331
0b5383eb
DJ
6332 /* In this complicated case we just disable the partial_nregs code. */
6333 if (rs6000_darwin64_abi && mode == BLKmode
6334 && TREE_CODE (type) == RECORD_TYPE
6335 && int_size_in_bytes (type) > 0)
6336 return 0;
6337
294bd182 6338 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6339
79773478
AM
6340 if (USE_FP_FOR_ARG_P (cum, mode, type))
6341 {
fb63c729
AM
6342 /* If we are passing this arg in the fixed parameter save area
6343 (gprs or memory) as well as fprs, then this function should
79773478
AM
6344 return the number of partial bytes passed in the parameter
6345 save area rather than partial bytes passed in fprs. */
6346 if (type
6347 && (cum->nargs_prototype <= 0
6348 || (DEFAULT_ABI == ABI_AIX
6349 && TARGET_XL_COMPAT
6350 && align_words >= GP_ARG_NUM_REG)))
6351 return 0;
6352 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6353 > FP_ARG_MAX_REG + 1)
ac7e839c 6354 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6355 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6356 return 0;
6357 }
6358
ec6376ab
AM
6359 if (align_words < GP_ARG_NUM_REG
6360 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6361 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6362
c53bdcf5 6363 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6364 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6365
c53bdcf5 6366 return ret;
4697a36c
MM
6367}
6368\f
6369/* A C expression that indicates when an argument must be passed by
6370 reference. If nonzero for an argument, a copy of that argument is
6371 made in memory and a pointer to the argument is passed instead of
6372 the argument itself. The pointer is passed in whatever way is
6373 appropriate for passing a pointer to that type.
6374
b2d04ecf
AM
6375 Under V.4, aggregates and long double are passed by reference.
6376
6377 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6378 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6379
6380 As an extension to all ABIs, variable sized types are passed by
6381 reference. */
4697a36c 6382
8cd5a4e0 6383static bool
f676971a 6384rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6385 enum machine_mode mode, const_tree type,
bada2eb8 6386 bool named ATTRIBUTE_UNUSED)
4697a36c 6387{
602ea4d3 6388 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6389 {
6390 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6391 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6392 return 1;
6393 }
6394
6395 if (!type)
6396 return 0;
4697a36c 6397
bada2eb8
DE
6398 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6399 {
6400 if (TARGET_DEBUG_ARG)
6401 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6402 return 1;
6403 }
6404
6405 if (int_size_in_bytes (type) < 0)
6406 {
6407 if (TARGET_DEBUG_ARG)
6408 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6409 return 1;
6410 }
6411
6412 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6413 modes only exist for GCC vector types if -maltivec. */
6414 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6415 {
6416 if (TARGET_DEBUG_ARG)
6417 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6418 return 1;
6419 }
b693336b
PB
6420
6421 /* Pass synthetic vectors in memory. */
bada2eb8 6422 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6423 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6424 {
6425 static bool warned_for_pass_big_vectors = false;
6426 if (TARGET_DEBUG_ARG)
6427 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6428 if (!warned_for_pass_big_vectors)
6429 {
d4ee4d25 6430 warning (0, "GCC vector passed by reference: "
b693336b
PB
6431 "non-standard ABI extension with no compatibility guarantee");
6432 warned_for_pass_big_vectors = true;
6433 }
6434 return 1;
6435 }
6436
b2d04ecf 6437 return 0;
4697a36c 6438}
5985c7a6
FJ
6439
6440static void
2d9db8eb 6441rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6442{
6443 int i;
6444 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6445
6446 if (nregs == 0)
6447 return;
6448
c4ad648e 6449 for (i = 0; i < nregs; i++)
5985c7a6 6450 {
9390387d 6451 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6452 if (reload_completed)
c4ad648e
AM
6453 {
6454 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6455 tem = NULL_RTX;
6456 else
6457 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6458 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6459 }
5985c7a6
FJ
6460 else
6461 tem = replace_equiv_address (tem, XEXP (tem, 0));
6462
37409796 6463 gcc_assert (tem);
5985c7a6
FJ
6464
6465 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6466 }
6467}
4697a36c
MM
6468\f
6469/* Perform any needed actions needed for a function that is receiving a
f676971a 6470 variable number of arguments.
4697a36c
MM
6471
6472 CUM is as above.
6473
6474 MODE and TYPE are the mode and type of the current parameter.
6475
6476 PRETEND_SIZE is a variable that should be set to the amount of stack
6477 that must be pushed by the prolog to pretend that our caller pushed
6478 it.
6479
6480 Normally, this macro will push all remaining incoming registers on the
6481 stack and set PRETEND_SIZE to the length of the registers pushed. */
6482
c6e8c921 6483static void
f676971a 6484setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6485 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6486 int no_rtl)
4697a36c 6487{
4cc833b7
RH
6488 CUMULATIVE_ARGS next_cum;
6489 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6490 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6491 int first_reg_offset;
6492 alias_set_type set;
4697a36c 6493
f31bf321 6494 /* Skip the last named argument. */
d34c5b80 6495 next_cum = *cum;
594a51fe 6496 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6497
f607bc57 6498 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6499 {
5b667039
JJ
6500 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6501
60e2d0ca 6502 if (! no_rtl)
5b667039
JJ
6503 {
6504 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6505 HOST_WIDE_INT offset = 0;
6506
6507 /* Try to optimize the size of the varargs save area.
6508 The ABI requires that ap.reg_save_area is doubleword
6509 aligned, but we don't need to allocate space for all
6510 the bytes, only those to which we actually will save
6511 anything. */
6512 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6513 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6514 if (TARGET_HARD_FLOAT && TARGET_FPRS
6515 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6516 && cfun->va_list_fpr_size)
6517 {
6518 if (gpr_reg_num)
6519 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6520 * UNITS_PER_FP_WORD;
6521 if (cfun->va_list_fpr_size
6522 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6523 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6524 else
6525 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6526 * UNITS_PER_FP_WORD;
6527 }
6528 if (gpr_reg_num)
6529 {
6530 offset = -((first_reg_offset * reg_size) & ~7);
6531 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6532 {
6533 gpr_reg_num = cfun->va_list_gpr_size;
6534 if (reg_size == 4 && (first_reg_offset & 1))
6535 gpr_reg_num++;
6536 }
6537 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6538 }
6539 else if (fpr_size)
6540 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6541 * UNITS_PER_FP_WORD
6542 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6543
5b667039
JJ
6544 if (gpr_size + fpr_size)
6545 {
6546 rtx reg_save_area
6547 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6548 gcc_assert (GET_CODE (reg_save_area) == MEM);
6549 reg_save_area = XEXP (reg_save_area, 0);
6550 if (GET_CODE (reg_save_area) == PLUS)
6551 {
6552 gcc_assert (XEXP (reg_save_area, 0)
6553 == virtual_stack_vars_rtx);
6554 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6555 offset += INTVAL (XEXP (reg_save_area, 1));
6556 }
6557 else
6558 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6559 }
6560
6561 cfun->machine->varargs_save_offset = offset;
6562 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6563 }
4697a36c 6564 }
60e2d0ca 6565 else
4697a36c 6566 {
d34c5b80 6567 first_reg_offset = next_cum.words;
4cc833b7 6568 save_area = virtual_incoming_args_rtx;
4697a36c 6569
fe984136 6570 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6571 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6572 }
4697a36c 6573
dfafc897 6574 set = get_varargs_alias_set ();
9d30f3c1
JJ
6575 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6576 && cfun->va_list_gpr_size)
4cc833b7 6577 {
9d30f3c1
JJ
6578 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6579
6580 if (va_list_gpr_counter_field)
6581 {
6582 /* V4 va_list_gpr_size counts number of registers needed. */
6583 if (nregs > cfun->va_list_gpr_size)
6584 nregs = cfun->va_list_gpr_size;
6585 }
6586 else
6587 {
6588 /* char * va_list instead counts number of bytes needed. */
6589 if (nregs > cfun->va_list_gpr_size / reg_size)
6590 nregs = cfun->va_list_gpr_size / reg_size;
6591 }
6592
dfafc897 6593 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6594 plus_constant (save_area,
13e2e16e
DE
6595 first_reg_offset * reg_size));
6596 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6597 set_mem_alias_set (mem, set);
8ac61af7 6598 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6599
f676971a 6600 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6601 nregs);
4697a36c
MM
6602 }
6603
4697a36c 6604 /* Save FP registers if needed. */
f607bc57 6605 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6606 && TARGET_HARD_FLOAT && TARGET_FPRS
6607 && ! no_rtl
9d30f3c1
JJ
6608 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6609 && cfun->va_list_fpr_size)
4697a36c 6610 {
9d30f3c1 6611 int fregno = next_cum.fregno, nregs;
9ebbca7d 6612 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6613 rtx lab = gen_label_rtx ();
5b667039
JJ
6614 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6615 * UNITS_PER_FP_WORD);
4697a36c 6616
c4ad648e
AM
6617 emit_jump_insn
6618 (gen_rtx_SET (VOIDmode,
6619 pc_rtx,
6620 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6621 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6622 const0_rtx),
39403d82 6623 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6624 pc_rtx)));
6625
9d30f3c1
JJ
6626 for (nregs = 0;
6627 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6628 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6629 {
5496b36f 6630 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6631 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6632 set_mem_alias_set (mem, set);
94ff898d 6633 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6634 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6635 }
4cc833b7
RH
6636
6637 emit_label (lab);
4697a36c 6638 }
4697a36c 6639}
4697a36c 6640
dfafc897 6641/* Create the va_list data type. */
2c4974b7 6642
c35d187f
RH
6643static tree
6644rs6000_build_builtin_va_list (void)
dfafc897 6645{
64c2816f 6646 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6647
9ebbca7d
GK
6648 /* For AIX, prefer 'char *' because that's what the system
6649 header files like. */
f607bc57 6650 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6651 return build_pointer_type (char_type_node);
dfafc897 6652
f1e639b1 6653 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6654 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6655
f676971a 6656 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6657 unsigned_char_type_node);
f676971a 6658 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6659 unsigned_char_type_node);
64c2816f
DT
6660 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6661 every user file. */
6662 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6663 short_unsigned_type_node);
dfafc897
FS
6664 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6665 ptr_type_node);
6666 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6667 ptr_type_node);
6668
9d30f3c1
JJ
6669 va_list_gpr_counter_field = f_gpr;
6670 va_list_fpr_counter_field = f_fpr;
6671
dfafc897
FS
6672 DECL_FIELD_CONTEXT (f_gpr) = record;
6673 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6674 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6675 DECL_FIELD_CONTEXT (f_ovf) = record;
6676 DECL_FIELD_CONTEXT (f_sav) = record;
6677
bab45a51
FS
6678 TREE_CHAIN (record) = type_decl;
6679 TYPE_NAME (record) = type_decl;
dfafc897
FS
6680 TYPE_FIELDS (record) = f_gpr;
6681 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6682 TREE_CHAIN (f_fpr) = f_res;
6683 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6684 TREE_CHAIN (f_ovf) = f_sav;
6685
6686 layout_type (record);
6687
6688 /* The correct type is an array type of one element. */
6689 return build_array_type (record, build_index_type (size_zero_node));
6690}
6691
6692/* Implement va_start. */
6693
d7bd8aeb 6694static void
a2369ed3 6695rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6696{
dfafc897 6697 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6698 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6699 tree gpr, fpr, ovf, sav, t;
2c4974b7 6700
dfafc897 6701 /* Only SVR4 needs something special. */
f607bc57 6702 if (DEFAULT_ABI != ABI_V4)
dfafc897 6703 {
e5faf155 6704 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6705 return;
6706 }
6707
973a648b 6708 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6709 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6710 f_res = TREE_CHAIN (f_fpr);
6711 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6712 f_sav = TREE_CHAIN (f_ovf);
6713
872a65b5 6714 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6715 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6716 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6717 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6718 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6719
6720 /* Count number of gp and fp argument registers used. */
38173d38
JH
6721 words = crtl->args.info.words;
6722 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6723 GP_ARG_NUM_REG);
38173d38 6724 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6725 FP_ARG_NUM_REG);
dfafc897
FS
6726
6727 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6728 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6729 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6730 words, n_gpr, n_fpr);
dfafc897 6731
9d30f3c1
JJ
6732 if (cfun->va_list_gpr_size)
6733 {
07beea0d 6734 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6735 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6736 TREE_SIDE_EFFECTS (t) = 1;
6737 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6738 }
58c8adc1 6739
9d30f3c1
JJ
6740 if (cfun->va_list_fpr_size)
6741 {
07beea0d 6742 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6743 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6744 TREE_SIDE_EFFECTS (t) = 1;
6745 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6746 }
dfafc897
FS
6747
6748 /* Find the overflow area. */
6749 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6750 if (words != 0)
5be014d5
AP
6751 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6752 size_int (words * UNITS_PER_WORD));
07beea0d 6753 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6754 TREE_SIDE_EFFECTS (t) = 1;
6755 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6756
9d30f3c1
JJ
6757 /* If there were no va_arg invocations, don't set up the register
6758 save area. */
6759 if (!cfun->va_list_gpr_size
6760 && !cfun->va_list_fpr_size
6761 && n_gpr < GP_ARG_NUM_REG
6762 && n_fpr < FP_ARG_V4_MAX_REG)
6763 return;
6764
dfafc897
FS
6765 /* Find the register save area. */
6766 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6767 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6768 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6769 size_int (cfun->machine->varargs_save_offset));
07beea0d 6770 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6771 TREE_SIDE_EFFECTS (t) = 1;
6772 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6773}
6774
6775/* Implement va_arg. */
6776
23a60a04
JM
6777tree
6778rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6779{
cd3ce9b4
JM
6780 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6781 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6782 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6783 tree lab_false, lab_over, addr;
6784 int align;
6785 tree ptrtype = build_pointer_type (type);
7393f7f8 6786 int regalign = 0;
cd3ce9b4 6787
08b0dc1b
RH
6788 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6789 {
6790 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6791 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6792 }
6793
cd3ce9b4
JM
6794 if (DEFAULT_ABI != ABI_V4)
6795 {
08b0dc1b 6796 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6797 {
6798 tree elem_type = TREE_TYPE (type);
6799 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6800 int elem_size = GET_MODE_SIZE (elem_mode);
6801
6802 if (elem_size < UNITS_PER_WORD)
6803 {
23a60a04 6804 tree real_part, imag_part;
cd3ce9b4
JM
6805 tree post = NULL_TREE;
6806
23a60a04
JM
6807 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6808 &post);
6809 /* Copy the value into a temporary, lest the formal temporary
6810 be reused out from under us. */
6811 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6812 append_to_statement_list (post, pre_p);
6813
23a60a04
JM
6814 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6815 post_p);
cd3ce9b4 6816
47a25a46 6817 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6818 }
6819 }
6820
23a60a04 6821 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6822 }
6823
6824 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6825 f_fpr = TREE_CHAIN (f_gpr);
6826 f_res = TREE_CHAIN (f_fpr);
6827 f_ovf = TREE_CHAIN (f_res);
6828 f_sav = TREE_CHAIN (f_ovf);
6829
872a65b5 6830 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6831 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6832 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6833 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6834 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6835
6836 size = int_size_in_bytes (type);
6837 rsize = (size + 3) / 4;
6838 align = 1;
6839
08b0dc1b 6840 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6841 && (TYPE_MODE (type) == SFmode
6842 || TYPE_MODE (type) == DFmode
7393f7f8 6843 || TYPE_MODE (type) == TFmode
e41b2a33 6844 || TYPE_MODE (type) == SDmode
7393f7f8
BE
6845 || TYPE_MODE (type) == DDmode
6846 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6847 {
6848 /* FP args go in FP registers, if present. */
cd3ce9b4 6849 reg = fpr;
602ea4d3 6850 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6851 sav_ofs = 8*4;
6852 sav_scale = 8;
e41b2a33 6853 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
6854 align = 8;
6855 }
6856 else
6857 {
6858 /* Otherwise into GP registers. */
cd3ce9b4
JM
6859 reg = gpr;
6860 n_reg = rsize;
6861 sav_ofs = 0;
6862 sav_scale = 4;
6863 if (n_reg == 2)
6864 align = 8;
6865 }
6866
6867 /* Pull the value out of the saved registers.... */
6868
6869 lab_over = NULL;
6870 addr = create_tmp_var (ptr_type_node, "addr");
6871 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6872
6873 /* AltiVec vectors never go in registers when -mabi=altivec. */
6874 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6875 align = 16;
6876 else
6877 {
6878 lab_false = create_artificial_label ();
6879 lab_over = create_artificial_label ();
6880
6881 /* Long long and SPE vectors are aligned in the registers.
6882 As are any other 2 gpr item such as complex int due to a
6883 historical mistake. */
6884 u = reg;
602ea4d3 6885 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6886 {
7393f7f8 6887 regalign = 1;
cd3ce9b4 6888 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6889 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6890 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6891 }
7393f7f8
BE
6892 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6893 reg number is 0 for f1, so we want to make it odd. */
6894 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6895 {
6896 regalign = 1;
383e91e4
JJ
6897 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg,
6898 build_int_cst (TREE_TYPE (reg), 1));
7393f7f8
BE
6899 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6900 }
cd3ce9b4 6901
95674810 6902 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6903 t = build2 (GE_EXPR, boolean_type_node, u, t);
6904 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6905 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6906 gimplify_and_add (t, pre_p);
6907
6908 t = sav;
6909 if (sav_ofs)
5be014d5 6910 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6911
8fb632eb
ZD
6912 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6913 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6914 u = fold_convert (sizetype, u);
6915 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6916 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6917
e41b2a33
PB
6918 /* _Decimal32 varargs are located in the second word of the 64-bit
6919 FP register for 32-bit binaries. */
4f011e1e
JM
6920 if (!TARGET_POWERPC64
6921 && TARGET_HARD_FLOAT && TARGET_FPRS
6922 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
6923 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
6924
07beea0d 6925 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6926 gimplify_and_add (t, pre_p);
6927
6928 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6929 gimplify_and_add (t, pre_p);
6930
6931 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6932 append_to_statement_list (t, pre_p);
6933
7393f7f8 6934 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6935 {
6936 /* Ensure that we don't find any more args in regs.
7393f7f8 6937 Alignment has taken care of for special cases. */
383e91e4
JJ
6938 t = build_gimple_modify_stmt (reg,
6939 build_int_cst (TREE_TYPE (reg), 8));
cd3ce9b4
JM
6940 gimplify_and_add (t, pre_p);
6941 }
6942 }
6943
6944 /* ... otherwise out of the overflow area. */
6945
6946 /* Care for on-stack alignment if needed. */
6947 t = ovf;
6948 if (align != 1)
6949 {
5be014d5
AP
6950 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6951 t = fold_convert (sizetype, t);
4a90aeeb 6952 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6953 size_int (-align));
6954 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6955 }
6956 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6957
07beea0d 6958 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6959 gimplify_and_add (u, pre_p);
6960
5be014d5 6961 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6962 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6963 gimplify_and_add (t, pre_p);
6964
6965 if (lab_over)
6966 {
6967 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6968 append_to_statement_list (t, pre_p);
6969 }
6970
0cfbc62b
JM
6971 if (STRICT_ALIGNMENT
6972 && (TYPE_ALIGN (type)
6973 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6974 {
6975 /* The value (of type complex double, for example) may not be
6976 aligned in memory in the saved registers, so copy via a
6977 temporary. (This is the same code as used for SPARC.) */
6978 tree tmp = create_tmp_var (type, "va_arg_tmp");
6979 tree dest_addr = build_fold_addr_expr (tmp);
6980
5039610b
SL
6981 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6982 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6983
6984 gimplify_and_add (copy, pre_p);
6985 addr = dest_addr;
6986 }
6987
08b0dc1b 6988 addr = fold_convert (ptrtype, addr);
872a65b5 6989 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6990}
6991
0ac081f6
AH
6992/* Builtins. */
6993
58646b77
PB
6994static void
6995def_builtin (int mask, const char *name, tree type, int code)
6996{
96038623 6997 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
6998 {
6999 if (rs6000_builtin_decls[code])
7000 abort ();
7001
7002 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7003 add_builtin_function (name, type, code, BUILT_IN_MD,
7004 NULL, NULL_TREE);
58646b77
PB
7005 }
7006}
0ac081f6 7007
24408032
AH
7008/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7009
2212663f 7010static const struct builtin_description bdesc_3arg[] =
24408032
AH
7011{
7012 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7013 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7014 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7015 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7016 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7017 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7018 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7019 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7020 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7021 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7022 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7023 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7024 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7025 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7026 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7027 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7028 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7029 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7030 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7031 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7032 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7033 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7034 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7035
7036 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7037 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7038 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7039 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7040 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7041 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7042 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7043 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7044 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7045 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7046 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7047 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7048 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7049 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7050 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7051
7052 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7053 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7054 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7055 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7056 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7057 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7058 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7059 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7060 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7061};
2212663f 7062
95385cbb
AH
7063/* DST operations: void foo (void *, const int, const char). */
7064
7065static const struct builtin_description bdesc_dst[] =
7066{
7067 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7068 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7069 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7070 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7071
7072 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7073 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7074 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7075 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7076};
7077
2212663f 7078/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7079
a3170dc6 7080static struct builtin_description bdesc_2arg[] =
0ac081f6 7081{
f18c054f
DB
7082 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7083 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7084 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7085 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7086 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7087 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7088 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7089 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7090 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7091 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7092 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7093 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7094 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7095 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7096 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7097 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7098 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7099 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7100 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7101 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7102 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7103 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7104 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7105 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7106 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7107 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7108 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7109 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7110 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7111 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7112 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7113 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7114 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7115 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7116 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7117 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7118 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7119 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7120 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7121 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7122 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7123 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7124 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7125 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7126 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7127 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7128 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7129 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7130 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7131 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7132 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7133 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7134 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7135 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7136 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7137 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7138 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7139 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7140 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7141 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7142 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7143 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7144 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7145 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7146 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7147 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7148 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7149 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7150 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7151 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7152 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7153 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7154 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7155 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7156 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7157 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7158 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7159 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7160 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7161 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7162 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7163 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7164 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7165 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7166 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7167 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7168 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7169 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7170 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7171 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7172 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7173 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7174 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7175 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7176 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7177 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7178 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7179 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7180 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7181 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7182 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7183 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7184 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7185 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7186 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7187 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7188 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7189 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7190 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7191 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7192 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7193
58646b77
PB
7194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7210 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7211 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7212 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7213 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7214 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7215 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7216 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7217 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7218 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7219 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7220 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7221 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7222 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7223 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7239 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7240 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7241 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7242 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7243 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7244 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7245 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7246 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7247 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7248 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7249 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7250 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7251 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7252 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7253 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7254 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7255 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7256 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7257 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7258 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7259 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7260 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7261 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7262 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7263 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7264 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7265 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7266 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7267 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7268 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7269 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7270 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7271 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7272 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7273 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7274 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7275 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7276 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7277 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7278 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7279 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7280 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7281 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7282 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7283 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7284 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7285 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7286 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7287 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7288 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7289 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7290 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7291 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7292 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7293 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7294 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7295 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7296 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7297 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7298 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7299 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7300 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7301 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7302 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7303 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7304 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7305 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7306 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7307 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7308 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7309 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7310 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7311 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7312 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7313 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7314 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7315 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7316 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7317 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7318 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7319 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7320 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7321
96038623
DE
7322 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7323 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7324 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7325 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7326 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7327 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7328 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7329 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7330 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7331 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7332
a3170dc6
AH
7333 /* Place holder, leave as first spe builtin. */
7334 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7335 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7336 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7337 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7338 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7339 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7340 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7341 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7342 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7343 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7344 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7345 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7346 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7347 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7348 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7349 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7350 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7351 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7352 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7353 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7354 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7355 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7356 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7357 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7358 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7359 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7360 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7361 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7362 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7363 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7364 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7365 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7366 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7367 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7368 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7369 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7370 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7371 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7372 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7373 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7374 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7375 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7376 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7377 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7378 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7379 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7380 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7381 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7382 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7383 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7384 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7385 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7386 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7387 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7388 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7389 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7390 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7391 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7392 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7393 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7394 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7395 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7396 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7397 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7398 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7399 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7400 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7401 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7402 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7403 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7404 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7405 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7406 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7407 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7408 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7409 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7410 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7411 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7412 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7413 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7414 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7415 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7416 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7417 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7418 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7419 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7420 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7421 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7422 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7423 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7424 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7425 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7426 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7427 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7428 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7429 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7430 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7431 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7432 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7433 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7434 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7435 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7436 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7437 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7438 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7439 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7440 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7441 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7442 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7443
7444 /* SPE binary operations expecting a 5-bit unsigned literal. */
7445 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7446
7447 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7448 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7449 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7450 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7451 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7452 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7453 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7454 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7455 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7456 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7457 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7458 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7459 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7460 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7461 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7462 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7463 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7464 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7465 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7466 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7467 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7468 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7469 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7470 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7471 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7472 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7473
7474 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7475 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7476};
7477
7478/* AltiVec predicates. */
7479
7480struct builtin_description_predicates
7481{
7482 const unsigned int mask;
7483 const enum insn_code icode;
7484 const char *opcode;
7485 const char *const name;
7486 const enum rs6000_builtins code;
7487};
7488
7489static const struct builtin_description_predicates bdesc_altivec_preds[] =
7490{
7491 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7492 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7493 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7494 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7495 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7496 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7497 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7498 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7499 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7500 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7501 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7502 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7503 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7504
7505 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7506 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7507 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7508};
24408032 7509
a3170dc6
AH
7510/* SPE predicates. */
7511static struct builtin_description bdesc_spe_predicates[] =
7512{
7513 /* Place-holder. Leave as first. */
7514 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7515 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7516 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7517 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7518 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7519 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7520 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7521 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7522 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7523 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7524 /* Place-holder. Leave as last. */
7525 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7526};
7527
7528/* SPE evsel predicates. */
7529static struct builtin_description bdesc_spe_evsel[] =
7530{
7531 /* Place-holder. Leave as first. */
7532 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7533 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7534 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7535 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7536 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7537 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7538 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7539 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7540 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7541 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7542 /* Place-holder. Leave as last. */
7543 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7544};
7545
96038623
DE
7546/* PAIRED predicates. */
7547static const struct builtin_description bdesc_paired_preds[] =
7548{
7549 /* Place-holder. Leave as first. */
7550 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7551 /* Place-holder. Leave as last. */
7552 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7553};
7554
b6d08ca1 7555/* ABS* operations. */
100c4561
AH
7556
7557static const struct builtin_description bdesc_abs[] =
7558{
7559 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7560 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7561 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7562 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7563 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7564 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7565 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7566};
7567
617e0e1d
DB
7568/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7569 foo (VECa). */
24408032 7570
a3170dc6 7571static struct builtin_description bdesc_1arg[] =
2212663f 7572{
617e0e1d
DB
7573 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7574 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7575 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7576 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7577 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7578 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7579 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7580 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7581 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7582 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7583 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7584 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7585 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7586 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7587 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7588 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7589 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7590
58646b77
PB
7591 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7592 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7593 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7594 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7595 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7596 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7597 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7598 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7599 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7600 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7601 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7602 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7603 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7604 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7605 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7606 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7607 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7608 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7609 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7610
a3170dc6
AH
7611 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7612 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7613 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7614 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7615 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7616 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7617 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7618 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7619 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7620 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7621 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7622 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7623 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7624 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7625 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7626 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7627 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7628 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7629 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7630 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7631 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7632 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7633 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7634 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7635 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7636 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7637 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7638 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7639 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7640 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7641
7642 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7643 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7644
7645 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7646 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7647 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7648 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7649 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7650};
7651
7652static rtx
5039610b 7653rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7654{
7655 rtx pat;
5039610b 7656 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7657 rtx op0 = expand_normal (arg0);
2212663f
DB
7658 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7659 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7660
0559cc77
DE
7661 if (icode == CODE_FOR_nothing)
7662 /* Builtin not supported on this processor. */
7663 return 0;
7664
20e26713
AH
7665 /* If we got invalid arguments bail out before generating bad rtl. */
7666 if (arg0 == error_mark_node)
9a171fcd 7667 return const0_rtx;
20e26713 7668
0559cc77
DE
7669 if (icode == CODE_FOR_altivec_vspltisb
7670 || icode == CODE_FOR_altivec_vspltish
7671 || icode == CODE_FOR_altivec_vspltisw
7672 || icode == CODE_FOR_spe_evsplatfi
7673 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7674 {
7675 /* Only allow 5-bit *signed* literals. */
b44140e7 7676 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7677 || INTVAL (op0) > 15
7678 || INTVAL (op0) < -16)
b44140e7
AH
7679 {
7680 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7681 return const0_rtx;
b44140e7 7682 }
b44140e7
AH
7683 }
7684
c62f2db5 7685 if (target == 0
2212663f
DB
7686 || GET_MODE (target) != tmode
7687 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7688 target = gen_reg_rtx (tmode);
7689
7690 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7691 op0 = copy_to_mode_reg (mode0, op0);
7692
7693 pat = GEN_FCN (icode) (target, op0);
7694 if (! pat)
7695 return 0;
7696 emit_insn (pat);
0ac081f6 7697
2212663f
DB
7698 return target;
7699}
ae4b4a02 7700
100c4561 7701static rtx
5039610b 7702altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7703{
7704 rtx pat, scratch1, scratch2;
5039610b 7705 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7706 rtx op0 = expand_normal (arg0);
100c4561
AH
7707 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7708 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7709
7710 /* If we have invalid arguments, bail out before generating bad rtl. */
7711 if (arg0 == error_mark_node)
9a171fcd 7712 return const0_rtx;
100c4561
AH
7713
7714 if (target == 0
7715 || GET_MODE (target) != tmode
7716 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7717 target = gen_reg_rtx (tmode);
7718
7719 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7720 op0 = copy_to_mode_reg (mode0, op0);
7721
7722 scratch1 = gen_reg_rtx (mode0);
7723 scratch2 = gen_reg_rtx (mode0);
7724
7725 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7726 if (! pat)
7727 return 0;
7728 emit_insn (pat);
7729
7730 return target;
7731}
7732
0ac081f6 7733static rtx
5039610b 7734rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7735{
7736 rtx pat;
5039610b
SL
7737 tree arg0 = CALL_EXPR_ARG (exp, 0);
7738 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7739 rtx op0 = expand_normal (arg0);
7740 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7741 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7742 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7743 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7744
0559cc77
DE
7745 if (icode == CODE_FOR_nothing)
7746 /* Builtin not supported on this processor. */
7747 return 0;
7748
20e26713
AH
7749 /* If we got invalid arguments bail out before generating bad rtl. */
7750 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7751 return const0_rtx;
20e26713 7752
0559cc77
DE
7753 if (icode == CODE_FOR_altivec_vcfux
7754 || icode == CODE_FOR_altivec_vcfsx
7755 || icode == CODE_FOR_altivec_vctsxs
7756 || icode == CODE_FOR_altivec_vctuxs
7757 || icode == CODE_FOR_altivec_vspltb
7758 || icode == CODE_FOR_altivec_vsplth
7759 || icode == CODE_FOR_altivec_vspltw
7760 || icode == CODE_FOR_spe_evaddiw
7761 || icode == CODE_FOR_spe_evldd
7762 || icode == CODE_FOR_spe_evldh
7763 || icode == CODE_FOR_spe_evldw
7764 || icode == CODE_FOR_spe_evlhhesplat
7765 || icode == CODE_FOR_spe_evlhhossplat
7766 || icode == CODE_FOR_spe_evlhhousplat
7767 || icode == CODE_FOR_spe_evlwhe
7768 || icode == CODE_FOR_spe_evlwhos
7769 || icode == CODE_FOR_spe_evlwhou
7770 || icode == CODE_FOR_spe_evlwhsplat
7771 || icode == CODE_FOR_spe_evlwwsplat
7772 || icode == CODE_FOR_spe_evrlwi
7773 || icode == CODE_FOR_spe_evslwi
7774 || icode == CODE_FOR_spe_evsrwis
f5119d10 7775 || icode == CODE_FOR_spe_evsubifw
0559cc77 7776 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7777 {
7778 /* Only allow 5-bit unsigned literals. */
8bb418a3 7779 STRIP_NOPS (arg1);
b44140e7
AH
7780 if (TREE_CODE (arg1) != INTEGER_CST
7781 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7782 {
7783 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7784 return const0_rtx;
b44140e7 7785 }
b44140e7
AH
7786 }
7787
c62f2db5 7788 if (target == 0
0ac081f6
AH
7789 || GET_MODE (target) != tmode
7790 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7791 target = gen_reg_rtx (tmode);
7792
7793 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7794 op0 = copy_to_mode_reg (mode0, op0);
7795 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7796 op1 = copy_to_mode_reg (mode1, op1);
7797
7798 pat = GEN_FCN (icode) (target, op0, op1);
7799 if (! pat)
7800 return 0;
7801 emit_insn (pat);
7802
7803 return target;
7804}
6525c0e7 7805
ae4b4a02 7806static rtx
f676971a 7807altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7808 tree exp, rtx target)
ae4b4a02
AH
7809{
7810 rtx pat, scratch;
5039610b
SL
7811 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7812 tree arg0 = CALL_EXPR_ARG (exp, 1);
7813 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7814 rtx op0 = expand_normal (arg0);
7815 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7816 enum machine_mode tmode = SImode;
7817 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7818 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7819 int cr6_form_int;
7820
7821 if (TREE_CODE (cr6_form) != INTEGER_CST)
7822 {
7823 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7824 return const0_rtx;
ae4b4a02
AH
7825 }
7826 else
7827 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7828
37409796 7829 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7830
7831 /* If we have invalid arguments, bail out before generating bad rtl. */
7832 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7833 return const0_rtx;
ae4b4a02
AH
7834
7835 if (target == 0
7836 || GET_MODE (target) != tmode
7837 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7838 target = gen_reg_rtx (tmode);
7839
7840 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7841 op0 = copy_to_mode_reg (mode0, op0);
7842 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7843 op1 = copy_to_mode_reg (mode1, op1);
7844
7845 scratch = gen_reg_rtx (mode0);
7846
7847 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7848 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7849 if (! pat)
7850 return 0;
7851 emit_insn (pat);
7852
7853 /* The vec_any* and vec_all* predicates use the same opcodes for two
7854 different operations, but the bits in CR6 will be different
7855 depending on what information we want. So we have to play tricks
7856 with CR6 to get the right bits out.
7857
7858 If you think this is disgusting, look at the specs for the
7859 AltiVec predicates. */
7860
c4ad648e
AM
7861 switch (cr6_form_int)
7862 {
7863 case 0:
7864 emit_insn (gen_cr6_test_for_zero (target));
7865 break;
7866 case 1:
7867 emit_insn (gen_cr6_test_for_zero_reverse (target));
7868 break;
7869 case 2:
7870 emit_insn (gen_cr6_test_for_lt (target));
7871 break;
7872 case 3:
7873 emit_insn (gen_cr6_test_for_lt_reverse (target));
7874 break;
7875 default:
7876 error ("argument 1 of __builtin_altivec_predicate is out of range");
7877 break;
7878 }
ae4b4a02
AH
7879
7880 return target;
7881}
7882
96038623
DE
7883static rtx
7884paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7885{
7886 rtx pat, addr;
7887 tree arg0 = CALL_EXPR_ARG (exp, 0);
7888 tree arg1 = CALL_EXPR_ARG (exp, 1);
7889 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7890 enum machine_mode mode0 = Pmode;
7891 enum machine_mode mode1 = Pmode;
7892 rtx op0 = expand_normal (arg0);
7893 rtx op1 = expand_normal (arg1);
7894
7895 if (icode == CODE_FOR_nothing)
7896 /* Builtin not supported on this processor. */
7897 return 0;
7898
7899 /* If we got invalid arguments bail out before generating bad rtl. */
7900 if (arg0 == error_mark_node || arg1 == error_mark_node)
7901 return const0_rtx;
7902
7903 if (target == 0
7904 || GET_MODE (target) != tmode
7905 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7906 target = gen_reg_rtx (tmode);
7907
7908 op1 = copy_to_mode_reg (mode1, op1);
7909
7910 if (op0 == const0_rtx)
7911 {
7912 addr = gen_rtx_MEM (tmode, op1);
7913 }
7914 else
7915 {
7916 op0 = copy_to_mode_reg (mode0, op0);
7917 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7918 }
7919
7920 pat = GEN_FCN (icode) (target, addr);
7921
7922 if (! pat)
7923 return 0;
7924 emit_insn (pat);
7925
7926 return target;
7927}
7928
b4a62fa0 7929static rtx
5039610b 7930altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7931{
7932 rtx pat, addr;
5039610b
SL
7933 tree arg0 = CALL_EXPR_ARG (exp, 0);
7934 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7935 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7936 enum machine_mode mode0 = Pmode;
7937 enum machine_mode mode1 = Pmode;
84217346
MD
7938 rtx op0 = expand_normal (arg0);
7939 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7940
7941 if (icode == CODE_FOR_nothing)
7942 /* Builtin not supported on this processor. */
7943 return 0;
7944
7945 /* If we got invalid arguments bail out before generating bad rtl. */
7946 if (arg0 == error_mark_node || arg1 == error_mark_node)
7947 return const0_rtx;
7948
7949 if (target == 0
7950 || GET_MODE (target) != tmode
7951 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7952 target = gen_reg_rtx (tmode);
7953
f676971a 7954 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7955
7956 if (op0 == const0_rtx)
7957 {
7958 addr = gen_rtx_MEM (tmode, op1);
7959 }
7960 else
7961 {
7962 op0 = copy_to_mode_reg (mode0, op0);
7963 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7964 }
7965
7966 pat = GEN_FCN (icode) (target, addr);
7967
7968 if (! pat)
7969 return 0;
7970 emit_insn (pat);
7971
7972 return target;
7973}
7974
61bea3b0 7975static rtx
5039610b 7976spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7977{
5039610b
SL
7978 tree arg0 = CALL_EXPR_ARG (exp, 0);
7979 tree arg1 = CALL_EXPR_ARG (exp, 1);
7980 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7981 rtx op0 = expand_normal (arg0);
7982 rtx op1 = expand_normal (arg1);
7983 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7984 rtx pat;
7985 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7986 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7987 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7988
7989 /* Invalid arguments. Bail before doing anything stoopid! */
7990 if (arg0 == error_mark_node
7991 || arg1 == error_mark_node
7992 || arg2 == error_mark_node)
7993 return const0_rtx;
7994
7995 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7996 op0 = copy_to_mode_reg (mode2, op0);
7997 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7998 op1 = copy_to_mode_reg (mode0, op1);
7999 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8000 op2 = copy_to_mode_reg (mode1, op2);
8001
8002 pat = GEN_FCN (icode) (op1, op2, op0);
8003 if (pat)
8004 emit_insn (pat);
8005 return NULL_RTX;
8006}
8007
96038623
DE
8008static rtx
8009paired_expand_stv_builtin (enum insn_code icode, tree exp)
8010{
8011 tree arg0 = CALL_EXPR_ARG (exp, 0);
8012 tree arg1 = CALL_EXPR_ARG (exp, 1);
8013 tree arg2 = CALL_EXPR_ARG (exp, 2);
8014 rtx op0 = expand_normal (arg0);
8015 rtx op1 = expand_normal (arg1);
8016 rtx op2 = expand_normal (arg2);
8017 rtx pat, addr;
8018 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8019 enum machine_mode mode1 = Pmode;
8020 enum machine_mode mode2 = Pmode;
8021
8022 /* Invalid arguments. Bail before doing anything stoopid! */
8023 if (arg0 == error_mark_node
8024 || arg1 == error_mark_node
8025 || arg2 == error_mark_node)
8026 return const0_rtx;
8027
8028 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8029 op0 = copy_to_mode_reg (tmode, op0);
8030
8031 op2 = copy_to_mode_reg (mode2, op2);
8032
8033 if (op1 == const0_rtx)
8034 {
8035 addr = gen_rtx_MEM (tmode, op2);
8036 }
8037 else
8038 {
8039 op1 = copy_to_mode_reg (mode1, op1);
8040 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8041 }
8042
8043 pat = GEN_FCN (icode) (addr, op0);
8044 if (pat)
8045 emit_insn (pat);
8046 return NULL_RTX;
8047}
8048
6525c0e7 8049static rtx
5039610b 8050altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8051{
5039610b
SL
8052 tree arg0 = CALL_EXPR_ARG (exp, 0);
8053 tree arg1 = CALL_EXPR_ARG (exp, 1);
8054 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8055 rtx op0 = expand_normal (arg0);
8056 rtx op1 = expand_normal (arg1);
8057 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8058 rtx pat, addr;
8059 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8060 enum machine_mode mode1 = Pmode;
8061 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8062
8063 /* Invalid arguments. Bail before doing anything stoopid! */
8064 if (arg0 == error_mark_node
8065 || arg1 == error_mark_node
8066 || arg2 == error_mark_node)
9a171fcd 8067 return const0_rtx;
6525c0e7 8068
b4a62fa0
SB
8069 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8070 op0 = copy_to_mode_reg (tmode, op0);
8071
f676971a 8072 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8073
8074 if (op1 == const0_rtx)
8075 {
8076 addr = gen_rtx_MEM (tmode, op2);
8077 }
8078 else
8079 {
8080 op1 = copy_to_mode_reg (mode1, op1);
8081 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8082 }
6525c0e7 8083
b4a62fa0 8084 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8085 if (pat)
8086 emit_insn (pat);
8087 return NULL_RTX;
8088}
8089
2212663f 8090static rtx
5039610b 8091rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8092{
8093 rtx pat;
5039610b
SL
8094 tree arg0 = CALL_EXPR_ARG (exp, 0);
8095 tree arg1 = CALL_EXPR_ARG (exp, 1);
8096 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8097 rtx op0 = expand_normal (arg0);
8098 rtx op1 = expand_normal (arg1);
8099 rtx op2 = expand_normal (arg2);
2212663f
DB
8100 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8101 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8102 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8103 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8104
774b5662
DE
8105 if (icode == CODE_FOR_nothing)
8106 /* Builtin not supported on this processor. */
8107 return 0;
8108
20e26713
AH
8109 /* If we got invalid arguments bail out before generating bad rtl. */
8110 if (arg0 == error_mark_node
8111 || arg1 == error_mark_node
8112 || arg2 == error_mark_node)
9a171fcd 8113 return const0_rtx;
20e26713 8114
aba5fb01
NS
8115 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8116 || icode == CODE_FOR_altivec_vsldoi_v4si
8117 || icode == CODE_FOR_altivec_vsldoi_v8hi
8118 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8119 {
8120 /* Only allow 4-bit unsigned literals. */
8bb418a3 8121 STRIP_NOPS (arg2);
b44140e7
AH
8122 if (TREE_CODE (arg2) != INTEGER_CST
8123 || TREE_INT_CST_LOW (arg2) & ~0xf)
8124 {
8125 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8126 return const0_rtx;
b44140e7 8127 }
b44140e7
AH
8128 }
8129
c62f2db5 8130 if (target == 0
2212663f
DB
8131 || GET_MODE (target) != tmode
8132 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8133 target = gen_reg_rtx (tmode);
8134
8135 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8136 op0 = copy_to_mode_reg (mode0, op0);
8137 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8138 op1 = copy_to_mode_reg (mode1, op1);
8139 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8140 op2 = copy_to_mode_reg (mode2, op2);
8141
49e39588
RE
8142 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8143 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8144 else
8145 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8146 if (! pat)
8147 return 0;
8148 emit_insn (pat);
8149
8150 return target;
8151}
92898235 8152
3a9b8c7e 8153/* Expand the lvx builtins. */
0ac081f6 8154static rtx
a2369ed3 8155altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8156{
5039610b 8157 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8158 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8159 tree arg0;
8160 enum machine_mode tmode, mode0;
7c3abc73 8161 rtx pat, op0;
3a9b8c7e 8162 enum insn_code icode;
92898235 8163
0ac081f6
AH
8164 switch (fcode)
8165 {
f18c054f 8166 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8167 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8168 break;
f18c054f 8169 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8170 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8171 break;
8172 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8173 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8174 break;
8175 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8176 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8177 break;
8178 default:
8179 *expandedp = false;
8180 return NULL_RTX;
8181 }
0ac081f6 8182
3a9b8c7e 8183 *expandedp = true;
f18c054f 8184
5039610b 8185 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8186 op0 = expand_normal (arg0);
3a9b8c7e
AH
8187 tmode = insn_data[icode].operand[0].mode;
8188 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8189
3a9b8c7e
AH
8190 if (target == 0
8191 || GET_MODE (target) != tmode
8192 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8193 target = gen_reg_rtx (tmode);
24408032 8194
3a9b8c7e
AH
8195 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8196 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8197
3a9b8c7e
AH
8198 pat = GEN_FCN (icode) (target, op0);
8199 if (! pat)
8200 return 0;
8201 emit_insn (pat);
8202 return target;
8203}
f18c054f 8204
3a9b8c7e
AH
8205/* Expand the stvx builtins. */
8206static rtx
f676971a 8207altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8208 bool *expandedp)
3a9b8c7e 8209{
5039610b 8210 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8211 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8212 tree arg0, arg1;
8213 enum machine_mode mode0, mode1;
7c3abc73 8214 rtx pat, op0, op1;
3a9b8c7e 8215 enum insn_code icode;
f18c054f 8216
3a9b8c7e
AH
8217 switch (fcode)
8218 {
8219 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8220 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8221 break;
8222 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8223 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8224 break;
8225 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8226 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8227 break;
8228 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8229 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8230 break;
8231 default:
8232 *expandedp = false;
8233 return NULL_RTX;
8234 }
24408032 8235
5039610b
SL
8236 arg0 = CALL_EXPR_ARG (exp, 0);
8237 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8238 op0 = expand_normal (arg0);
8239 op1 = expand_normal (arg1);
3a9b8c7e
AH
8240 mode0 = insn_data[icode].operand[0].mode;
8241 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8242
3a9b8c7e
AH
8243 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8244 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8245 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8246 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8247
3a9b8c7e
AH
8248 pat = GEN_FCN (icode) (op0, op1);
8249 if (pat)
8250 emit_insn (pat);
f18c054f 8251
3a9b8c7e
AH
8252 *expandedp = true;
8253 return NULL_RTX;
8254}
f18c054f 8255
3a9b8c7e
AH
8256/* Expand the dst builtins. */
8257static rtx
f676971a 8258altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8259 bool *expandedp)
3a9b8c7e 8260{
5039610b 8261 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8262 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8263 tree arg0, arg1, arg2;
8264 enum machine_mode mode0, mode1, mode2;
7c3abc73 8265 rtx pat, op0, op1, op2;
586de218 8266 const struct builtin_description *d;
a3170dc6 8267 size_t i;
f18c054f 8268
3a9b8c7e 8269 *expandedp = false;
f18c054f 8270
3a9b8c7e 8271 /* Handle DST variants. */
586de218 8272 d = bdesc_dst;
3a9b8c7e
AH
8273 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8274 if (d->code == fcode)
8275 {
5039610b
SL
8276 arg0 = CALL_EXPR_ARG (exp, 0);
8277 arg1 = CALL_EXPR_ARG (exp, 1);
8278 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8279 op0 = expand_normal (arg0);
8280 op1 = expand_normal (arg1);
8281 op2 = expand_normal (arg2);
3a9b8c7e
AH
8282 mode0 = insn_data[d->icode].operand[0].mode;
8283 mode1 = insn_data[d->icode].operand[1].mode;
8284 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8285
3a9b8c7e
AH
8286 /* Invalid arguments, bail out before generating bad rtl. */
8287 if (arg0 == error_mark_node
8288 || arg1 == error_mark_node
8289 || arg2 == error_mark_node)
8290 return const0_rtx;
f18c054f 8291
86e7df90 8292 *expandedp = true;
8bb418a3 8293 STRIP_NOPS (arg2);
3a9b8c7e
AH
8294 if (TREE_CODE (arg2) != INTEGER_CST
8295 || TREE_INT_CST_LOW (arg2) & ~0x3)
8296 {
9e637a26 8297 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8298 return const0_rtx;
8299 }
f18c054f 8300
3a9b8c7e 8301 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8302 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8303 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8304 op1 = copy_to_mode_reg (mode1, op1);
24408032 8305
3a9b8c7e
AH
8306 pat = GEN_FCN (d->icode) (op0, op1, op2);
8307 if (pat != 0)
8308 emit_insn (pat);
f18c054f 8309
3a9b8c7e
AH
8310 return NULL_RTX;
8311 }
f18c054f 8312
3a9b8c7e
AH
8313 return NULL_RTX;
8314}
24408032 8315
7a4eca66
DE
8316/* Expand vec_init builtin. */
8317static rtx
5039610b 8318altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8319{
8320 enum machine_mode tmode = TYPE_MODE (type);
8321 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8322 int i, n_elt = GET_MODE_NUNITS (tmode);
8323 rtvec v = rtvec_alloc (n_elt);
8324
8325 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8326 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8327
5039610b 8328 for (i = 0; i < n_elt; ++i)
7a4eca66 8329 {
5039610b 8330 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8331 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8332 }
8333
7a4eca66
DE
8334 if (!target || !register_operand (target, tmode))
8335 target = gen_reg_rtx (tmode);
8336
8337 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8338 return target;
8339}
8340
8341/* Return the integer constant in ARG. Constrain it to be in the range
8342 of the subparts of VEC_TYPE; issue an error if not. */
8343
8344static int
8345get_element_number (tree vec_type, tree arg)
8346{
8347 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8348
8349 if (!host_integerp (arg, 1)
8350 || (elt = tree_low_cst (arg, 1), elt > max))
8351 {
8352 error ("selector must be an integer constant in the range 0..%wi", max);
8353 return 0;
8354 }
8355
8356 return elt;
8357}
8358
8359/* Expand vec_set builtin. */
8360static rtx
5039610b 8361altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8362{
8363 enum machine_mode tmode, mode1;
8364 tree arg0, arg1, arg2;
8365 int elt;
8366 rtx op0, op1;
8367
5039610b
SL
8368 arg0 = CALL_EXPR_ARG (exp, 0);
8369 arg1 = CALL_EXPR_ARG (exp, 1);
8370 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8371
8372 tmode = TYPE_MODE (TREE_TYPE (arg0));
8373 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8374 gcc_assert (VECTOR_MODE_P (tmode));
8375
8376 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8377 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8378 elt = get_element_number (TREE_TYPE (arg0), arg2);
8379
8380 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8381 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8382
8383 op0 = force_reg (tmode, op0);
8384 op1 = force_reg (mode1, op1);
8385
8386 rs6000_expand_vector_set (op0, op1, elt);
8387
8388 return op0;
8389}
8390
8391/* Expand vec_ext builtin. */
8392static rtx
5039610b 8393altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8394{
8395 enum machine_mode tmode, mode0;
8396 tree arg0, arg1;
8397 int elt;
8398 rtx op0;
8399
5039610b
SL
8400 arg0 = CALL_EXPR_ARG (exp, 0);
8401 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8402
84217346 8403 op0 = expand_normal (arg0);
7a4eca66
DE
8404 elt = get_element_number (TREE_TYPE (arg0), arg1);
8405
8406 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8407 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8408 gcc_assert (VECTOR_MODE_P (mode0));
8409
8410 op0 = force_reg (mode0, op0);
8411
8412 if (optimize || !target || !register_operand (target, tmode))
8413 target = gen_reg_rtx (tmode);
8414
8415 rs6000_expand_vector_extract (target, op0, elt);
8416
8417 return target;
8418}
8419
3a9b8c7e
AH
8420/* Expand the builtin in EXP and store the result in TARGET. Store
8421 true in *EXPANDEDP if we found a builtin to expand. */
8422static rtx
a2369ed3 8423altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8424{
586de218
KG
8425 const struct builtin_description *d;
8426 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8427 size_t i;
8428 enum insn_code icode;
5039610b 8429 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8430 tree arg0;
8431 rtx op0, pat;
8432 enum machine_mode tmode, mode0;
3a9b8c7e 8433 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8434
58646b77
PB
8435 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8436 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8437 {
8438 *expandedp = true;
ea40ba9c 8439 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8440 return const0_rtx;
8441 }
8442
3a9b8c7e
AH
8443 target = altivec_expand_ld_builtin (exp, target, expandedp);
8444 if (*expandedp)
8445 return target;
0ac081f6 8446
3a9b8c7e
AH
8447 target = altivec_expand_st_builtin (exp, target, expandedp);
8448 if (*expandedp)
8449 return target;
8450
8451 target = altivec_expand_dst_builtin (exp, target, expandedp);
8452 if (*expandedp)
8453 return target;
8454
8455 *expandedp = true;
95385cbb 8456
3a9b8c7e
AH
8457 switch (fcode)
8458 {
6525c0e7 8459 case ALTIVEC_BUILTIN_STVX:
5039610b 8460 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8461 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8462 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8463 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8464 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8465 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8466 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8467 case ALTIVEC_BUILTIN_STVXL:
5039610b 8468 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8469
95385cbb
AH
8470 case ALTIVEC_BUILTIN_MFVSCR:
8471 icode = CODE_FOR_altivec_mfvscr;
8472 tmode = insn_data[icode].operand[0].mode;
8473
8474 if (target == 0
8475 || GET_MODE (target) != tmode
8476 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8477 target = gen_reg_rtx (tmode);
f676971a 8478
95385cbb 8479 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8480 if (! pat)
8481 return 0;
8482 emit_insn (pat);
95385cbb
AH
8483 return target;
8484
8485 case ALTIVEC_BUILTIN_MTVSCR:
8486 icode = CODE_FOR_altivec_mtvscr;
5039610b 8487 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8488 op0 = expand_normal (arg0);
95385cbb
AH
8489 mode0 = insn_data[icode].operand[0].mode;
8490
8491 /* If we got invalid arguments bail out before generating bad rtl. */
8492 if (arg0 == error_mark_node)
9a171fcd 8493 return const0_rtx;
95385cbb
AH
8494
8495 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8496 op0 = copy_to_mode_reg (mode0, op0);
8497
8498 pat = GEN_FCN (icode) (op0);
8499 if (pat)
8500 emit_insn (pat);
8501 return NULL_RTX;
3a9b8c7e 8502
95385cbb
AH
8503 case ALTIVEC_BUILTIN_DSSALL:
8504 emit_insn (gen_altivec_dssall ());
8505 return NULL_RTX;
8506
8507 case ALTIVEC_BUILTIN_DSS:
8508 icode = CODE_FOR_altivec_dss;
5039610b 8509 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8510 STRIP_NOPS (arg0);
84217346 8511 op0 = expand_normal (arg0);
95385cbb
AH
8512 mode0 = insn_data[icode].operand[0].mode;
8513
8514 /* If we got invalid arguments bail out before generating bad rtl. */
8515 if (arg0 == error_mark_node)
9a171fcd 8516 return const0_rtx;
95385cbb 8517
b44140e7
AH
8518 if (TREE_CODE (arg0) != INTEGER_CST
8519 || TREE_INT_CST_LOW (arg0) & ~0x3)
8520 {
8521 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8522 return const0_rtx;
b44140e7
AH
8523 }
8524
95385cbb
AH
8525 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8526 op0 = copy_to_mode_reg (mode0, op0);
8527
8528 emit_insn (gen_altivec_dss (op0));
0ac081f6 8529 return NULL_RTX;
7a4eca66
DE
8530
8531 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8532 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8533 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8534 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8535 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8536
8537 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8538 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8539 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8540 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8541 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8542
8543 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8544 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8545 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8546 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8547 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8548
8549 default:
8550 break;
8551 /* Fall through. */
0ac081f6 8552 }
24408032 8553
100c4561 8554 /* Expand abs* operations. */
586de218 8555 d = bdesc_abs;
ca7558fc 8556 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8557 if (d->code == fcode)
5039610b 8558 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8559
ae4b4a02 8560 /* Expand the AltiVec predicates. */
586de218 8561 dp = bdesc_altivec_preds;
ca7558fc 8562 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8563 if (dp->code == fcode)
c4ad648e 8564 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8565 exp, target);
ae4b4a02 8566
6525c0e7
AH
8567 /* LV* are funky. We initialized them differently. */
8568 switch (fcode)
8569 {
8570 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8571 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8572 exp, target);
6525c0e7 8573 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8574 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8575 exp, target);
6525c0e7 8576 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8577 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8578 exp, target);
6525c0e7 8579 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8580 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8581 exp, target);
6525c0e7 8582 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8583 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8584 exp, target);
6525c0e7 8585 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8586 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8587 exp, target);
6525c0e7 8588 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8589 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8590 exp, target);
6525c0e7
AH
8591 default:
8592 break;
8593 /* Fall through. */
8594 }
95385cbb 8595
92898235 8596 *expandedp = false;
0ac081f6
AH
8597 return NULL_RTX;
8598}
8599
96038623
DE
8600/* Expand the builtin in EXP and store the result in TARGET. Store
8601 true in *EXPANDEDP if we found a builtin to expand. */
8602static rtx
8603paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8604{
8605 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8606 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8607 const struct builtin_description *d;
96038623
DE
8608 size_t i;
8609
8610 *expandedp = true;
8611
8612 switch (fcode)
8613 {
8614 case PAIRED_BUILTIN_STX:
8615 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8616 case PAIRED_BUILTIN_LX:
8617 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8618 default:
8619 break;
8620 /* Fall through. */
8621 }
8622
8623 /* Expand the paired predicates. */
23a651fc 8624 d = bdesc_paired_preds;
96038623
DE
8625 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8626 if (d->code == fcode)
8627 return paired_expand_predicate_builtin (d->icode, exp, target);
8628
8629 *expandedp = false;
8630 return NULL_RTX;
8631}
8632
a3170dc6
AH
8633/* Binops that need to be initialized manually, but can be expanded
8634 automagically by rs6000_expand_binop_builtin. */
8635static struct builtin_description bdesc_2arg_spe[] =
8636{
8637 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8638 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8639 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8640 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8641 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8642 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8643 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8644 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8645 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8646 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8647 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8648 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8649 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8650 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8651 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8652 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8653 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8654 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8655 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8656 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8657 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8658 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8659};
8660
8661/* Expand the builtin in EXP and store the result in TARGET. Store
8662 true in *EXPANDEDP if we found a builtin to expand.
8663
8664 This expands the SPE builtins that are not simple unary and binary
8665 operations. */
8666static rtx
a2369ed3 8667spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8668{
5039610b 8669 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8670 tree arg1, arg0;
8671 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8672 enum insn_code icode;
8673 enum machine_mode tmode, mode0;
8674 rtx pat, op0;
8675 struct builtin_description *d;
8676 size_t i;
8677
8678 *expandedp = true;
8679
8680 /* Syntax check for a 5-bit unsigned immediate. */
8681 switch (fcode)
8682 {
8683 case SPE_BUILTIN_EVSTDD:
8684 case SPE_BUILTIN_EVSTDH:
8685 case SPE_BUILTIN_EVSTDW:
8686 case SPE_BUILTIN_EVSTWHE:
8687 case SPE_BUILTIN_EVSTWHO:
8688 case SPE_BUILTIN_EVSTWWE:
8689 case SPE_BUILTIN_EVSTWWO:
5039610b 8690 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8691 if (TREE_CODE (arg1) != INTEGER_CST
8692 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8693 {
8694 error ("argument 2 must be a 5-bit unsigned literal");
8695 return const0_rtx;
8696 }
8697 break;
8698 default:
8699 break;
8700 }
8701
00332c9f
AH
8702 /* The evsplat*i instructions are not quite generic. */
8703 switch (fcode)
8704 {
8705 case SPE_BUILTIN_EVSPLATFI:
8706 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8707 exp, target);
00332c9f
AH
8708 case SPE_BUILTIN_EVSPLATI:
8709 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8710 exp, target);
00332c9f
AH
8711 default:
8712 break;
8713 }
8714
a3170dc6
AH
8715 d = (struct builtin_description *) bdesc_2arg_spe;
8716 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8717 if (d->code == fcode)
5039610b 8718 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8719
8720 d = (struct builtin_description *) bdesc_spe_predicates;
8721 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8722 if (d->code == fcode)
5039610b 8723 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8724
8725 d = (struct builtin_description *) bdesc_spe_evsel;
8726 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8727 if (d->code == fcode)
5039610b 8728 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8729
8730 switch (fcode)
8731 {
8732 case SPE_BUILTIN_EVSTDDX:
5039610b 8733 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8734 case SPE_BUILTIN_EVSTDHX:
5039610b 8735 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8736 case SPE_BUILTIN_EVSTDWX:
5039610b 8737 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8738 case SPE_BUILTIN_EVSTWHEX:
5039610b 8739 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8740 case SPE_BUILTIN_EVSTWHOX:
5039610b 8741 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8742 case SPE_BUILTIN_EVSTWWEX:
5039610b 8743 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8744 case SPE_BUILTIN_EVSTWWOX:
5039610b 8745 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8746 case SPE_BUILTIN_EVSTDD:
5039610b 8747 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8748 case SPE_BUILTIN_EVSTDH:
5039610b 8749 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8750 case SPE_BUILTIN_EVSTDW:
5039610b 8751 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8752 case SPE_BUILTIN_EVSTWHE:
5039610b 8753 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8754 case SPE_BUILTIN_EVSTWHO:
5039610b 8755 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8756 case SPE_BUILTIN_EVSTWWE:
5039610b 8757 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8758 case SPE_BUILTIN_EVSTWWO:
5039610b 8759 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8760 case SPE_BUILTIN_MFSPEFSCR:
8761 icode = CODE_FOR_spe_mfspefscr;
8762 tmode = insn_data[icode].operand[0].mode;
8763
8764 if (target == 0
8765 || GET_MODE (target) != tmode
8766 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8767 target = gen_reg_rtx (tmode);
f676971a 8768
a3170dc6
AH
8769 pat = GEN_FCN (icode) (target);
8770 if (! pat)
8771 return 0;
8772 emit_insn (pat);
8773 return target;
8774 case SPE_BUILTIN_MTSPEFSCR:
8775 icode = CODE_FOR_spe_mtspefscr;
5039610b 8776 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8777 op0 = expand_normal (arg0);
a3170dc6
AH
8778 mode0 = insn_data[icode].operand[0].mode;
8779
8780 if (arg0 == error_mark_node)
8781 return const0_rtx;
8782
8783 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8784 op0 = copy_to_mode_reg (mode0, op0);
8785
8786 pat = GEN_FCN (icode) (op0);
8787 if (pat)
8788 emit_insn (pat);
8789 return NULL_RTX;
8790 default:
8791 break;
8792 }
8793
8794 *expandedp = false;
8795 return NULL_RTX;
8796}
8797
96038623
DE
8798static rtx
8799paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8800{
8801 rtx pat, scratch, tmp;
8802 tree form = CALL_EXPR_ARG (exp, 0);
8803 tree arg0 = CALL_EXPR_ARG (exp, 1);
8804 tree arg1 = CALL_EXPR_ARG (exp, 2);
8805 rtx op0 = expand_normal (arg0);
8806 rtx op1 = expand_normal (arg1);
8807 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8808 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8809 int form_int;
8810 enum rtx_code code;
8811
8812 if (TREE_CODE (form) != INTEGER_CST)
8813 {
8814 error ("argument 1 of __builtin_paired_predicate must be a constant");
8815 return const0_rtx;
8816 }
8817 else
8818 form_int = TREE_INT_CST_LOW (form);
8819
8820 gcc_assert (mode0 == mode1);
8821
8822 if (arg0 == error_mark_node || arg1 == error_mark_node)
8823 return const0_rtx;
8824
8825 if (target == 0
8826 || GET_MODE (target) != SImode
8827 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8828 target = gen_reg_rtx (SImode);
8829 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8830 op0 = copy_to_mode_reg (mode0, op0);
8831 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8832 op1 = copy_to_mode_reg (mode1, op1);
8833
8834 scratch = gen_reg_rtx (CCFPmode);
8835
8836 pat = GEN_FCN (icode) (scratch, op0, op1);
8837 if (!pat)
8838 return const0_rtx;
8839
8840 emit_insn (pat);
8841
8842 switch (form_int)
8843 {
8844 /* LT bit. */
8845 case 0:
8846 code = LT;
8847 break;
8848 /* GT bit. */
8849 case 1:
8850 code = GT;
8851 break;
8852 /* EQ bit. */
8853 case 2:
8854 code = EQ;
8855 break;
8856 /* UN bit. */
8857 case 3:
8858 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8859 return target;
8860 default:
8861 error ("argument 1 of __builtin_paired_predicate is out of range");
8862 return const0_rtx;
8863 }
8864
8865 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8866 emit_move_insn (target, tmp);
8867 return target;
8868}
8869
a3170dc6 8870static rtx
5039610b 8871spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8872{
8873 rtx pat, scratch, tmp;
5039610b
SL
8874 tree form = CALL_EXPR_ARG (exp, 0);
8875 tree arg0 = CALL_EXPR_ARG (exp, 1);
8876 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8877 rtx op0 = expand_normal (arg0);
8878 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8879 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8880 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8881 int form_int;
8882 enum rtx_code code;
8883
8884 if (TREE_CODE (form) != INTEGER_CST)
8885 {
8886 error ("argument 1 of __builtin_spe_predicate must be a constant");
8887 return const0_rtx;
8888 }
8889 else
8890 form_int = TREE_INT_CST_LOW (form);
8891
37409796 8892 gcc_assert (mode0 == mode1);
a3170dc6
AH
8893
8894 if (arg0 == error_mark_node || arg1 == error_mark_node)
8895 return const0_rtx;
8896
8897 if (target == 0
8898 || GET_MODE (target) != SImode
8899 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8900 target = gen_reg_rtx (SImode);
8901
8902 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8903 op0 = copy_to_mode_reg (mode0, op0);
8904 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8905 op1 = copy_to_mode_reg (mode1, op1);
8906
8907 scratch = gen_reg_rtx (CCmode);
8908
8909 pat = GEN_FCN (icode) (scratch, op0, op1);
8910 if (! pat)
8911 return const0_rtx;
8912 emit_insn (pat);
8913
8914 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8915 _lower_. We use one compare, but look in different bits of the
8916 CR for each variant.
8917
8918 There are 2 elements in each SPE simd type (upper/lower). The CR
8919 bits are set as follows:
8920
8921 BIT0 | BIT 1 | BIT 2 | BIT 3
8922 U | L | (U | L) | (U & L)
8923
8924 So, for an "all" relationship, BIT 3 would be set.
8925 For an "any" relationship, BIT 2 would be set. Etc.
8926
8927 Following traditional nomenclature, these bits map to:
8928
8929 BIT0 | BIT 1 | BIT 2 | BIT 3
8930 LT | GT | EQ | OV
8931
8932 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8933 */
8934
8935 switch (form_int)
8936 {
8937 /* All variant. OV bit. */
8938 case 0:
8939 /* We need to get to the OV bit, which is the ORDERED bit. We
8940 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8941 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8942 So let's just use another pattern. */
8943 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8944 return target;
8945 /* Any variant. EQ bit. */
8946 case 1:
8947 code = EQ;
8948 break;
8949 /* Upper variant. LT bit. */
8950 case 2:
8951 code = LT;
8952 break;
8953 /* Lower variant. GT bit. */
8954 case 3:
8955 code = GT;
8956 break;
8957 default:
8958 error ("argument 1 of __builtin_spe_predicate is out of range");
8959 return const0_rtx;
8960 }
8961
8962 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8963 emit_move_insn (target, tmp);
8964
8965 return target;
8966}
8967
8968/* The evsel builtins look like this:
8969
8970 e = __builtin_spe_evsel_OP (a, b, c, d);
8971
8972 and work like this:
8973
8974 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8975 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8976*/
8977
8978static rtx
5039610b 8979spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8980{
8981 rtx pat, scratch;
5039610b
SL
8982 tree arg0 = CALL_EXPR_ARG (exp, 0);
8983 tree arg1 = CALL_EXPR_ARG (exp, 1);
8984 tree arg2 = CALL_EXPR_ARG (exp, 2);
8985 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8986 rtx op0 = expand_normal (arg0);
8987 rtx op1 = expand_normal (arg1);
8988 rtx op2 = expand_normal (arg2);
8989 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8990 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8991 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8992
37409796 8993 gcc_assert (mode0 == mode1);
a3170dc6
AH
8994
8995 if (arg0 == error_mark_node || arg1 == error_mark_node
8996 || arg2 == error_mark_node || arg3 == error_mark_node)
8997 return const0_rtx;
8998
8999 if (target == 0
9000 || GET_MODE (target) != mode0
9001 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9002 target = gen_reg_rtx (mode0);
9003
9004 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9005 op0 = copy_to_mode_reg (mode0, op0);
9006 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9007 op1 = copy_to_mode_reg (mode0, op1);
9008 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9009 op2 = copy_to_mode_reg (mode0, op2);
9010 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9011 op3 = copy_to_mode_reg (mode0, op3);
9012
9013 /* Generate the compare. */
9014 scratch = gen_reg_rtx (CCmode);
9015 pat = GEN_FCN (icode) (scratch, op0, op1);
9016 if (! pat)
9017 return const0_rtx;
9018 emit_insn (pat);
9019
9020 if (mode0 == V2SImode)
9021 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9022 else
9023 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9024
9025 return target;
9026}
9027
0ac081f6
AH
9028/* Expand an expression EXP that calls a built-in function,
9029 with result going to TARGET if that's convenient
9030 (and in mode MODE if that's convenient).
9031 SUBTARGET may be used as the target for computing one of EXP's operands.
9032 IGNORE is nonzero if the value is to be ignored. */
9033
9034static rtx
a2369ed3 9035rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9036 enum machine_mode mode ATTRIBUTE_UNUSED,
9037 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9038{
5039610b 9039 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9040 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9041 const struct builtin_description *d;
92898235
AH
9042 size_t i;
9043 rtx ret;
9044 bool success;
f676971a 9045
9c78b944
DE
9046 if (fcode == RS6000_BUILTIN_RECIP)
9047 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9048
9049 if (fcode == RS6000_BUILTIN_RECIPF)
9050 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9051
9052 if (fcode == RS6000_BUILTIN_RSQRTF)
9053 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9054
7ccf35ed
DN
9055 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9056 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9057 {
9058 int icode = (int) CODE_FOR_altivec_lvsr;
9059 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9060 enum machine_mode mode = insn_data[icode].operand[1].mode;
9061 tree arg;
9062 rtx op, addr, pat;
9063
37409796 9064 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9065
5039610b 9066 arg = CALL_EXPR_ARG (exp, 0);
37409796 9067 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9068 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9069 addr = memory_address (mode, op);
9070 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9071 op = addr;
9072 else
9073 {
9074 /* For the load case need to negate the address. */
9075 op = gen_reg_rtx (GET_MODE (addr));
9076 emit_insn (gen_rtx_SET (VOIDmode, op,
9077 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9078 }
7ccf35ed
DN
9079 op = gen_rtx_MEM (mode, op);
9080
9081 if (target == 0
9082 || GET_MODE (target) != tmode
9083 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9084 target = gen_reg_rtx (tmode);
9085
9086 /*pat = gen_altivec_lvsr (target, op);*/
9087 pat = GEN_FCN (icode) (target, op);
9088 if (!pat)
9089 return 0;
9090 emit_insn (pat);
9091
9092 return target;
9093 }
5039610b
SL
9094
9095 /* FIXME: There's got to be a nicer way to handle this case than
9096 constructing a new CALL_EXPR. */
f57d17f1
TM
9097 if (fcode == ALTIVEC_BUILTIN_VCFUX
9098 || fcode == ALTIVEC_BUILTIN_VCFSX)
9099 {
5039610b
SL
9100 if (call_expr_nargs (exp) == 1)
9101 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9102 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9103 }
7ccf35ed 9104
0ac081f6 9105 if (TARGET_ALTIVEC)
92898235
AH
9106 {
9107 ret = altivec_expand_builtin (exp, target, &success);
9108
a3170dc6
AH
9109 if (success)
9110 return ret;
9111 }
9112 if (TARGET_SPE)
9113 {
9114 ret = spe_expand_builtin (exp, target, &success);
9115
92898235
AH
9116 if (success)
9117 return ret;
9118 }
96038623
DE
9119 if (TARGET_PAIRED_FLOAT)
9120 {
9121 ret = paired_expand_builtin (exp, target, &success);
9122
9123 if (success)
9124 return ret;
9125 }
92898235 9126
96038623 9127 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9128
37409796
NS
9129 /* Handle simple unary operations. */
9130 d = (struct builtin_description *) bdesc_1arg;
9131 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9132 if (d->code == fcode)
5039610b 9133 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9134
37409796
NS
9135 /* Handle simple binary operations. */
9136 d = (struct builtin_description *) bdesc_2arg;
9137 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9138 if (d->code == fcode)
5039610b 9139 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9140
37409796 9141 /* Handle simple ternary operations. */
586de218 9142 d = bdesc_3arg;
37409796
NS
9143 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9144 if (d->code == fcode)
5039610b 9145 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9146
37409796 9147 gcc_unreachable ();
0ac081f6
AH
9148}
9149
7c62e993
PB
9150static tree
9151build_opaque_vector_type (tree node, int nunits)
9152{
9153 node = copy_node (node);
9154 TYPE_MAIN_VARIANT (node) = node;
0627bfb4 9155 TYPE_CANONICAL (node) = node;
7c62e993
PB
9156 return build_vector_type (node, nunits);
9157}
9158
0ac081f6 9159static void
863d938c 9160rs6000_init_builtins (void)
0ac081f6 9161{
4a5eab38
PB
9162 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9163 V2SF_type_node = build_vector_type (float_type_node, 2);
9164 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9165 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9166 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9167 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9168 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9169
9170 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9171 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9172 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9173
7c62e993
PB
9174 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9175 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9176 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9177 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9178
8bb418a3
ZL
9179 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9180 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9181 'vector unsigned short'. */
9182
8dd16ecc
NS
9183 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9184 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9185 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9186 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9187
58646b77
PB
9188 long_integer_type_internal_node = long_integer_type_node;
9189 long_unsigned_type_internal_node = long_unsigned_type_node;
9190 intQI_type_internal_node = intQI_type_node;
9191 uintQI_type_internal_node = unsigned_intQI_type_node;
9192 intHI_type_internal_node = intHI_type_node;
9193 uintHI_type_internal_node = unsigned_intHI_type_node;
9194 intSI_type_internal_node = intSI_type_node;
9195 uintSI_type_internal_node = unsigned_intSI_type_node;
9196 float_type_internal_node = float_type_node;
9197 void_type_internal_node = void_type_node;
9198
8bb418a3
ZL
9199 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9200 get_identifier ("__bool char"),
9201 bool_char_type_node));
9202 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9203 get_identifier ("__bool short"),
9204 bool_short_type_node));
9205 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9206 get_identifier ("__bool int"),
9207 bool_int_type_node));
9208 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9209 get_identifier ("__pixel"),
9210 pixel_type_node));
9211
4a5eab38
PB
9212 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9213 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9214 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9215 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9216
9217 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9218 get_identifier ("__vector unsigned char"),
9219 unsigned_V16QI_type_node));
9220 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9221 get_identifier ("__vector signed char"),
9222 V16QI_type_node));
9223 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9224 get_identifier ("__vector __bool char"),
9225 bool_V16QI_type_node));
9226
9227 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9228 get_identifier ("__vector unsigned short"),
9229 unsigned_V8HI_type_node));
9230 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9231 get_identifier ("__vector signed short"),
9232 V8HI_type_node));
9233 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9234 get_identifier ("__vector __bool short"),
9235 bool_V8HI_type_node));
9236
9237 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9238 get_identifier ("__vector unsigned int"),
9239 unsigned_V4SI_type_node));
9240 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9241 get_identifier ("__vector signed int"),
9242 V4SI_type_node));
9243 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9244 get_identifier ("__vector __bool int"),
9245 bool_V4SI_type_node));
9246
9247 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9248 get_identifier ("__vector float"),
9249 V4SF_type_node));
9250 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9251 get_identifier ("__vector __pixel"),
9252 pixel_V8HI_type_node));
9253
96038623
DE
9254 if (TARGET_PAIRED_FLOAT)
9255 paired_init_builtins ();
a3170dc6 9256 if (TARGET_SPE)
3fdaa45a 9257 spe_init_builtins ();
0ac081f6
AH
9258 if (TARGET_ALTIVEC)
9259 altivec_init_builtins ();
96038623 9260 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9261 rs6000_common_init_builtins ();
9c78b944
DE
9262 if (TARGET_PPC_GFXOPT)
9263 {
9264 tree ftype = build_function_type_list (float_type_node,
9265 float_type_node,
9266 float_type_node,
9267 NULL_TREE);
9268 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9269 RS6000_BUILTIN_RECIPF);
9270
9271 ftype = build_function_type_list (float_type_node,
9272 float_type_node,
9273 NULL_TREE);
9274 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9275 RS6000_BUILTIN_RSQRTF);
9276 }
9277 if (TARGET_POPCNTB)
9278 {
9279 tree ftype = build_function_type_list (double_type_node,
9280 double_type_node,
9281 double_type_node,
9282 NULL_TREE);
9283 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9284 RS6000_BUILTIN_RECIP);
9285
9286 }
69ca3549
DE
9287
9288#if TARGET_XCOFF
9289 /* AIX libm provides clog as __clog. */
9290 if (built_in_decls [BUILT_IN_CLOG])
9291 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9292#endif
fb220235
FXC
9293
9294#ifdef SUBTARGET_INIT_BUILTINS
9295 SUBTARGET_INIT_BUILTINS;
9296#endif
0ac081f6
AH
9297}
9298
a3170dc6
AH
9299/* Search through a set of builtins and enable the mask bits.
9300 DESC is an array of builtins.
b6d08ca1 9301 SIZE is the total number of builtins.
a3170dc6
AH
9302 START is the builtin enum at which to start.
9303 END is the builtin enum at which to end. */
0ac081f6 9304static void
a2369ed3 9305enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9306 enum rs6000_builtins start,
a2369ed3 9307 enum rs6000_builtins end)
a3170dc6
AH
9308{
9309 int i;
9310
9311 for (i = 0; i < size; ++i)
9312 if (desc[i].code == start)
9313 break;
9314
9315 if (i == size)
9316 return;
9317
9318 for (; i < size; ++i)
9319 {
9320 /* Flip all the bits on. */
9321 desc[i].mask = target_flags;
9322 if (desc[i].code == end)
9323 break;
9324 }
9325}
9326
9327static void
863d938c 9328spe_init_builtins (void)
0ac081f6 9329{
a3170dc6
AH
9330 tree endlink = void_list_node;
9331 tree puint_type_node = build_pointer_type (unsigned_type_node);
9332 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9333 struct builtin_description *d;
0ac081f6
AH
9334 size_t i;
9335
a3170dc6
AH
9336 tree v2si_ftype_4_v2si
9337 = build_function_type
3fdaa45a
AH
9338 (opaque_V2SI_type_node,
9339 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9340 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9341 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9342 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9343 endlink)))));
9344
9345 tree v2sf_ftype_4_v2sf
9346 = build_function_type
3fdaa45a
AH
9347 (opaque_V2SF_type_node,
9348 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9349 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9350 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9351 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9352 endlink)))));
9353
9354 tree int_ftype_int_v2si_v2si
9355 = build_function_type
9356 (integer_type_node,
9357 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9358 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9359 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9360 endlink))));
9361
9362 tree int_ftype_int_v2sf_v2sf
9363 = build_function_type
9364 (integer_type_node,
9365 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9366 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9367 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9368 endlink))));
9369
9370 tree void_ftype_v2si_puint_int
9371 = build_function_type (void_type_node,
3fdaa45a 9372 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9373 tree_cons (NULL_TREE, puint_type_node,
9374 tree_cons (NULL_TREE,
9375 integer_type_node,
9376 endlink))));
9377
9378 tree void_ftype_v2si_puint_char
9379 = build_function_type (void_type_node,
3fdaa45a 9380 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9381 tree_cons (NULL_TREE, puint_type_node,
9382 tree_cons (NULL_TREE,
9383 char_type_node,
9384 endlink))));
9385
9386 tree void_ftype_v2si_pv2si_int
9387 = build_function_type (void_type_node,
3fdaa45a 9388 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9389 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9390 tree_cons (NULL_TREE,
9391 integer_type_node,
9392 endlink))));
9393
9394 tree void_ftype_v2si_pv2si_char
9395 = build_function_type (void_type_node,
3fdaa45a 9396 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9397 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9398 tree_cons (NULL_TREE,
9399 char_type_node,
9400 endlink))));
9401
9402 tree void_ftype_int
9403 = build_function_type (void_type_node,
9404 tree_cons (NULL_TREE, integer_type_node, endlink));
9405
9406 tree int_ftype_void
36e8d515 9407 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9408
9409 tree v2si_ftype_pv2si_int
3fdaa45a 9410 = build_function_type (opaque_V2SI_type_node,
6035d635 9411 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9412 tree_cons (NULL_TREE, integer_type_node,
9413 endlink)));
9414
9415 tree v2si_ftype_puint_int
3fdaa45a 9416 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9417 tree_cons (NULL_TREE, puint_type_node,
9418 tree_cons (NULL_TREE, integer_type_node,
9419 endlink)));
9420
9421 tree v2si_ftype_pushort_int
3fdaa45a 9422 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9423 tree_cons (NULL_TREE, pushort_type_node,
9424 tree_cons (NULL_TREE, integer_type_node,
9425 endlink)));
9426
00332c9f
AH
9427 tree v2si_ftype_signed_char
9428 = build_function_type (opaque_V2SI_type_node,
9429 tree_cons (NULL_TREE, signed_char_type_node,
9430 endlink));
9431
a3170dc6
AH
9432 /* The initialization of the simple binary and unary builtins is
9433 done in rs6000_common_init_builtins, but we have to enable the
9434 mask bits here manually because we have run out of `target_flags'
9435 bits. We really need to redesign this mask business. */
9436
9437 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9438 ARRAY_SIZE (bdesc_2arg),
9439 SPE_BUILTIN_EVADDW,
9440 SPE_BUILTIN_EVXOR);
9441 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9442 ARRAY_SIZE (bdesc_1arg),
9443 SPE_BUILTIN_EVABS,
9444 SPE_BUILTIN_EVSUBFUSIAAW);
9445 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9446 ARRAY_SIZE (bdesc_spe_predicates),
9447 SPE_BUILTIN_EVCMPEQ,
9448 SPE_BUILTIN_EVFSTSTLT);
9449 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9450 ARRAY_SIZE (bdesc_spe_evsel),
9451 SPE_BUILTIN_EVSEL_CMPGTS,
9452 SPE_BUILTIN_EVSEL_FSTSTEQ);
9453
36252949
AH
9454 (*lang_hooks.decls.pushdecl)
9455 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9456 opaque_V2SI_type_node));
9457
a3170dc6 9458 /* Initialize irregular SPE builtins. */
f676971a 9459
a3170dc6
AH
9460 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9461 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9462 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9463 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9464 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9465 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9466 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9467 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9468 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9469 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9470 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9471 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9472 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9473 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9474 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9475 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9476 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9477 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9478
9479 /* Loads. */
9480 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9481 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9482 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9483 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9484 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9485 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9486 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9487 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9488 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9489 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9490 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9491 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9492 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9493 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9494 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9495 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9496 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9497 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9498 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9499 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9500 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9501 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9502
9503 /* Predicates. */
9504 d = (struct builtin_description *) bdesc_spe_predicates;
9505 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9506 {
9507 tree type;
9508
9509 switch (insn_data[d->icode].operand[1].mode)
9510 {
9511 case V2SImode:
9512 type = int_ftype_int_v2si_v2si;
9513 break;
9514 case V2SFmode:
9515 type = int_ftype_int_v2sf_v2sf;
9516 break;
9517 default:
37409796 9518 gcc_unreachable ();
a3170dc6
AH
9519 }
9520
9521 def_builtin (d->mask, d->name, type, d->code);
9522 }
9523
9524 /* Evsel predicates. */
9525 d = (struct builtin_description *) bdesc_spe_evsel;
9526 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9527 {
9528 tree type;
9529
9530 switch (insn_data[d->icode].operand[1].mode)
9531 {
9532 case V2SImode:
9533 type = v2si_ftype_4_v2si;
9534 break;
9535 case V2SFmode:
9536 type = v2sf_ftype_4_v2sf;
9537 break;
9538 default:
37409796 9539 gcc_unreachable ();
a3170dc6
AH
9540 }
9541
9542 def_builtin (d->mask, d->name, type, d->code);
9543 }
9544}
9545
96038623
DE
9546static void
9547paired_init_builtins (void)
9548{
23a651fc 9549 const struct builtin_description *d;
96038623
DE
9550 size_t i;
9551 tree endlink = void_list_node;
9552
9553 tree int_ftype_int_v2sf_v2sf
9554 = build_function_type
9555 (integer_type_node,
9556 tree_cons (NULL_TREE, integer_type_node,
9557 tree_cons (NULL_TREE, V2SF_type_node,
9558 tree_cons (NULL_TREE, V2SF_type_node,
9559 endlink))));
9560 tree pcfloat_type_node =
9561 build_pointer_type (build_qualified_type
9562 (float_type_node, TYPE_QUAL_CONST));
9563
9564 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9565 long_integer_type_node,
9566 pcfloat_type_node,
9567 NULL_TREE);
9568 tree void_ftype_v2sf_long_pcfloat =
9569 build_function_type_list (void_type_node,
9570 V2SF_type_node,
9571 long_integer_type_node,
9572 pcfloat_type_node,
9573 NULL_TREE);
9574
9575
9576 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9577 PAIRED_BUILTIN_LX);
9578
9579
9580 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9581 PAIRED_BUILTIN_STX);
9582
9583 /* Predicates. */
23a651fc 9584 d = bdesc_paired_preds;
96038623
DE
9585 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9586 {
9587 tree type;
9588
9589 switch (insn_data[d->icode].operand[1].mode)
9590 {
9591 case V2SFmode:
9592 type = int_ftype_int_v2sf_v2sf;
9593 break;
9594 default:
9595 gcc_unreachable ();
9596 }
9597
9598 def_builtin (d->mask, d->name, type, d->code);
9599 }
9600}
9601
a3170dc6 9602static void
863d938c 9603altivec_init_builtins (void)
a3170dc6 9604{
586de218
KG
9605 const struct builtin_description *d;
9606 const struct builtin_description_predicates *dp;
a3170dc6 9607 size_t i;
7a4eca66
DE
9608 tree ftype;
9609
a3170dc6
AH
9610 tree pfloat_type_node = build_pointer_type (float_type_node);
9611 tree pint_type_node = build_pointer_type (integer_type_node);
9612 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9613 tree pchar_type_node = build_pointer_type (char_type_node);
9614
9615 tree pvoid_type_node = build_pointer_type (void_type_node);
9616
0dbc3651
ZW
9617 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9618 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9619 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9620 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9621
9622 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9623
58646b77
PB
9624 tree int_ftype_opaque
9625 = build_function_type_list (integer_type_node,
9626 opaque_V4SI_type_node, NULL_TREE);
9627
9628 tree opaque_ftype_opaque_int
9629 = build_function_type_list (opaque_V4SI_type_node,
9630 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9631 tree opaque_ftype_opaque_opaque_int
9632 = build_function_type_list (opaque_V4SI_type_node,
9633 opaque_V4SI_type_node, opaque_V4SI_type_node,
9634 integer_type_node, NULL_TREE);
9635 tree int_ftype_int_opaque_opaque
9636 = build_function_type_list (integer_type_node,
9637 integer_type_node, opaque_V4SI_type_node,
9638 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9639 tree int_ftype_int_v4si_v4si
9640 = build_function_type_list (integer_type_node,
9641 integer_type_node, V4SI_type_node,
9642 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9643 tree v4sf_ftype_pcfloat
9644 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9645 tree void_ftype_pfloat_v4sf
b4de2f7d 9646 = build_function_type_list (void_type_node,
a3170dc6 9647 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9648 tree v4si_ftype_pcint
9649 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9650 tree void_ftype_pint_v4si
b4de2f7d
AH
9651 = build_function_type_list (void_type_node,
9652 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9653 tree v8hi_ftype_pcshort
9654 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9655 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9656 = build_function_type_list (void_type_node,
9657 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9658 tree v16qi_ftype_pcchar
9659 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9660 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9661 = build_function_type_list (void_type_node,
9662 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9663 tree void_ftype_v4si
b4de2f7d 9664 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9665 tree v8hi_ftype_void
9666 = build_function_type (V8HI_type_node, void_list_node);
9667 tree void_ftype_void
9668 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9669 tree void_ftype_int
9670 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9671
58646b77
PB
9672 tree opaque_ftype_long_pcvoid
9673 = build_function_type_list (opaque_V4SI_type_node,
9674 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9675 tree v16qi_ftype_long_pcvoid
a3170dc6 9676 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9677 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9678 tree v8hi_ftype_long_pcvoid
a3170dc6 9679 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9680 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9681 tree v4si_ftype_long_pcvoid
a3170dc6 9682 = build_function_type_list (V4SI_type_node,
b4a62fa0 9683 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9684
58646b77
PB
9685 tree void_ftype_opaque_long_pvoid
9686 = build_function_type_list (void_type_node,
9687 opaque_V4SI_type_node, long_integer_type_node,
9688 pvoid_type_node, NULL_TREE);
b4a62fa0 9689 tree void_ftype_v4si_long_pvoid
b4de2f7d 9690 = build_function_type_list (void_type_node,
b4a62fa0 9691 V4SI_type_node, long_integer_type_node,
b4de2f7d 9692 pvoid_type_node, NULL_TREE);
b4a62fa0 9693 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9694 = build_function_type_list (void_type_node,
b4a62fa0 9695 V16QI_type_node, long_integer_type_node,
b4de2f7d 9696 pvoid_type_node, NULL_TREE);
b4a62fa0 9697 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9698 = build_function_type_list (void_type_node,
b4a62fa0 9699 V8HI_type_node, long_integer_type_node,
b4de2f7d 9700 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9701 tree int_ftype_int_v8hi_v8hi
9702 = build_function_type_list (integer_type_node,
9703 integer_type_node, V8HI_type_node,
9704 V8HI_type_node, NULL_TREE);
9705 tree int_ftype_int_v16qi_v16qi
9706 = build_function_type_list (integer_type_node,
9707 integer_type_node, V16QI_type_node,
9708 V16QI_type_node, NULL_TREE);
9709 tree int_ftype_int_v4sf_v4sf
9710 = build_function_type_list (integer_type_node,
9711 integer_type_node, V4SF_type_node,
9712 V4SF_type_node, NULL_TREE);
9713 tree v4si_ftype_v4si
9714 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9715 tree v8hi_ftype_v8hi
9716 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9717 tree v16qi_ftype_v16qi
9718 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9719 tree v4sf_ftype_v4sf
9720 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9721 tree void_ftype_pcvoid_int_int
a3170dc6 9722 = build_function_type_list (void_type_node,
0dbc3651 9723 pcvoid_type_node, integer_type_node,
8bb418a3 9724 integer_type_node, NULL_TREE);
8bb418a3 9725
0dbc3651
ZW
9726 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9727 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9728 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9729 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9730 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9731 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9732 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9733 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9734 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9735 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9736 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9737 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9738 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9739 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9740 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9741 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9742 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9743 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9744 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9745 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9746 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9747 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9748 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9749 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9750 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9751 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9752 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9753 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9754 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9755 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9756 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9757 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9758 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9759 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9760 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9761 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9762 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9763 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9764 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9765 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9766 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9767 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9768 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9769 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9770 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9771 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9772
9773 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9774
9775 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9776 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9777 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9778 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9779 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9780 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9781 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9782 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9783 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9784 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9785
a3170dc6 9786 /* Add the DST variants. */
586de218 9787 d = bdesc_dst;
a3170dc6 9788 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9789 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9790
9791 /* Initialize the predicates. */
586de218 9792 dp = bdesc_altivec_preds;
a3170dc6
AH
9793 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9794 {
9795 enum machine_mode mode1;
9796 tree type;
58646b77
PB
9797 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9798 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9799
58646b77
PB
9800 if (is_overloaded)
9801 mode1 = VOIDmode;
9802 else
9803 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9804
9805 switch (mode1)
9806 {
58646b77
PB
9807 case VOIDmode:
9808 type = int_ftype_int_opaque_opaque;
9809 break;
a3170dc6
AH
9810 case V4SImode:
9811 type = int_ftype_int_v4si_v4si;
9812 break;
9813 case V8HImode:
9814 type = int_ftype_int_v8hi_v8hi;
9815 break;
9816 case V16QImode:
9817 type = int_ftype_int_v16qi_v16qi;
9818 break;
9819 case V4SFmode:
9820 type = int_ftype_int_v4sf_v4sf;
9821 break;
9822 default:
37409796 9823 gcc_unreachable ();
a3170dc6 9824 }
f676971a 9825
a3170dc6
AH
9826 def_builtin (dp->mask, dp->name, type, dp->code);
9827 }
9828
9829 /* Initialize the abs* operators. */
586de218 9830 d = bdesc_abs;
a3170dc6
AH
9831 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9832 {
9833 enum machine_mode mode0;
9834 tree type;
9835
9836 mode0 = insn_data[d->icode].operand[0].mode;
9837
9838 switch (mode0)
9839 {
9840 case V4SImode:
9841 type = v4si_ftype_v4si;
9842 break;
9843 case V8HImode:
9844 type = v8hi_ftype_v8hi;
9845 break;
9846 case V16QImode:
9847 type = v16qi_ftype_v16qi;
9848 break;
9849 case V4SFmode:
9850 type = v4sf_ftype_v4sf;
9851 break;
9852 default:
37409796 9853 gcc_unreachable ();
a3170dc6 9854 }
f676971a 9855
a3170dc6
AH
9856 def_builtin (d->mask, d->name, type, d->code);
9857 }
7ccf35ed 9858
13c62176
DN
9859 if (TARGET_ALTIVEC)
9860 {
9861 tree decl;
9862
9863 /* Initialize target builtin that implements
9864 targetm.vectorize.builtin_mask_for_load. */
9865
c79efc4d
RÁE
9866 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9867 v16qi_ftype_long_pcvoid,
9868 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9869 BUILT_IN_MD, NULL, NULL_TREE);
9870 TREE_READONLY (decl) = 1;
13c62176
DN
9871 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9872 altivec_builtin_mask_for_load = decl;
13c62176 9873 }
7a4eca66
DE
9874
9875 /* Access to the vec_init patterns. */
9876 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9877 integer_type_node, integer_type_node,
9878 integer_type_node, NULL_TREE);
9879 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9880 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9881
9882 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9883 short_integer_type_node,
9884 short_integer_type_node,
9885 short_integer_type_node,
9886 short_integer_type_node,
9887 short_integer_type_node,
9888 short_integer_type_node,
9889 short_integer_type_node, NULL_TREE);
9890 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9891 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9892
9893 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9894 char_type_node, char_type_node,
9895 char_type_node, char_type_node,
9896 char_type_node, char_type_node,
9897 char_type_node, char_type_node,
9898 char_type_node, char_type_node,
9899 char_type_node, char_type_node,
9900 char_type_node, char_type_node,
9901 char_type_node, NULL_TREE);
9902 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9903 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9904
9905 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9906 float_type_node, float_type_node,
9907 float_type_node, NULL_TREE);
9908 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9909 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9910
9911 /* Access to the vec_set patterns. */
9912 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9913 intSI_type_node,
9914 integer_type_node, NULL_TREE);
9915 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9916 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9917
9918 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9919 intHI_type_node,
9920 integer_type_node, NULL_TREE);
9921 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9922 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9923
9924 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9925 intQI_type_node,
9926 integer_type_node, NULL_TREE);
9927 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9928 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9929
9930 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9931 float_type_node,
9932 integer_type_node, NULL_TREE);
9933 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9934 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9935
9936 /* Access to the vec_extract patterns. */
9937 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9938 integer_type_node, NULL_TREE);
9939 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9940 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9941
9942 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9943 integer_type_node, NULL_TREE);
9944 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9945 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9946
9947 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9948 integer_type_node, NULL_TREE);
9949 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9950 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9951
9952 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9953 integer_type_node, NULL_TREE);
9954 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9955 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9956}
9957
9958static void
863d938c 9959rs6000_common_init_builtins (void)
a3170dc6 9960{
586de218 9961 const struct builtin_description *d;
a3170dc6
AH
9962 size_t i;
9963
96038623
DE
9964 tree v2sf_ftype_v2sf_v2sf_v2sf
9965 = build_function_type_list (V2SF_type_node,
9966 V2SF_type_node, V2SF_type_node,
9967 V2SF_type_node, NULL_TREE);
9968
a3170dc6
AH
9969 tree v4sf_ftype_v4sf_v4sf_v16qi
9970 = build_function_type_list (V4SF_type_node,
9971 V4SF_type_node, V4SF_type_node,
9972 V16QI_type_node, NULL_TREE);
9973 tree v4si_ftype_v4si_v4si_v16qi
9974 = build_function_type_list (V4SI_type_node,
9975 V4SI_type_node, V4SI_type_node,
9976 V16QI_type_node, NULL_TREE);
9977 tree v8hi_ftype_v8hi_v8hi_v16qi
9978 = build_function_type_list (V8HI_type_node,
9979 V8HI_type_node, V8HI_type_node,
9980 V16QI_type_node, NULL_TREE);
9981 tree v16qi_ftype_v16qi_v16qi_v16qi
9982 = build_function_type_list (V16QI_type_node,
9983 V16QI_type_node, V16QI_type_node,
9984 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9985 tree v4si_ftype_int
9986 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9987 tree v8hi_ftype_int
9988 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9989 tree v16qi_ftype_int
9990 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9991 tree v8hi_ftype_v16qi
9992 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9993 tree v4sf_ftype_v4sf
9994 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9995
9996 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9997 = build_function_type_list (opaque_V2SI_type_node,
9998 opaque_V2SI_type_node,
9999 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10000
96038623 10001 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10002 = build_function_type_list (opaque_V2SF_type_node,
10003 opaque_V2SF_type_node,
10004 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10005
96038623
DE
10006 tree v2sf_ftype_v2sf_v2sf
10007 = build_function_type_list (V2SF_type_node,
10008 V2SF_type_node,
10009 V2SF_type_node, NULL_TREE);
10010
10011
a3170dc6 10012 tree v2si_ftype_int_int
2abe3e28 10013 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10014 integer_type_node, integer_type_node,
10015 NULL_TREE);
10016
58646b77
PB
10017 tree opaque_ftype_opaque
10018 = build_function_type_list (opaque_V4SI_type_node,
10019 opaque_V4SI_type_node, NULL_TREE);
10020
a3170dc6 10021 tree v2si_ftype_v2si
2abe3e28
AH
10022 = build_function_type_list (opaque_V2SI_type_node,
10023 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10024
96038623 10025 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10026 = build_function_type_list (opaque_V2SF_type_node,
10027 opaque_V2SF_type_node, NULL_TREE);
f676971a 10028
96038623
DE
10029 tree v2sf_ftype_v2sf
10030 = build_function_type_list (V2SF_type_node,
10031 V2SF_type_node, NULL_TREE);
10032
a3170dc6 10033 tree v2sf_ftype_v2si
2abe3e28
AH
10034 = build_function_type_list (opaque_V2SF_type_node,
10035 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10036
10037 tree v2si_ftype_v2sf
2abe3e28
AH
10038 = build_function_type_list (opaque_V2SI_type_node,
10039 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10040
10041 tree v2si_ftype_v2si_char
2abe3e28
AH
10042 = build_function_type_list (opaque_V2SI_type_node,
10043 opaque_V2SI_type_node,
10044 char_type_node, NULL_TREE);
a3170dc6
AH
10045
10046 tree v2si_ftype_int_char
2abe3e28 10047 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10048 integer_type_node, char_type_node, NULL_TREE);
10049
10050 tree v2si_ftype_char
2abe3e28
AH
10051 = build_function_type_list (opaque_V2SI_type_node,
10052 char_type_node, NULL_TREE);
a3170dc6
AH
10053
10054 tree int_ftype_int_int
10055 = build_function_type_list (integer_type_node,
10056 integer_type_node, integer_type_node,
10057 NULL_TREE);
95385cbb 10058
58646b77
PB
10059 tree opaque_ftype_opaque_opaque
10060 = build_function_type_list (opaque_V4SI_type_node,
10061 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10062 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10063 = build_function_type_list (V4SI_type_node,
10064 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10065 tree v4sf_ftype_v4si_int
b4de2f7d 10066 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10067 V4SI_type_node, integer_type_node, NULL_TREE);
10068 tree v4si_ftype_v4sf_int
b4de2f7d 10069 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10070 V4SF_type_node, integer_type_node, NULL_TREE);
10071 tree v4si_ftype_v4si_int
b4de2f7d 10072 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10073 V4SI_type_node, integer_type_node, NULL_TREE);
10074 tree v8hi_ftype_v8hi_int
b4de2f7d 10075 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10076 V8HI_type_node, integer_type_node, NULL_TREE);
10077 tree v16qi_ftype_v16qi_int
b4de2f7d 10078 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10079 V16QI_type_node, integer_type_node, NULL_TREE);
10080 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10081 = build_function_type_list (V16QI_type_node,
10082 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10083 integer_type_node, NULL_TREE);
10084 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10085 = build_function_type_list (V8HI_type_node,
10086 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10087 integer_type_node, NULL_TREE);
10088 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10089 = build_function_type_list (V4SI_type_node,
10090 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10091 integer_type_node, NULL_TREE);
10092 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10093 = build_function_type_list (V4SF_type_node,
10094 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10095 integer_type_node, NULL_TREE);
0ac081f6 10096 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10097 = build_function_type_list (V4SF_type_node,
10098 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10099 tree opaque_ftype_opaque_opaque_opaque
10100 = build_function_type_list (opaque_V4SI_type_node,
10101 opaque_V4SI_type_node, opaque_V4SI_type_node,
10102 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10103 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10104 = build_function_type_list (V4SF_type_node,
10105 V4SF_type_node, V4SF_type_node,
10106 V4SI_type_node, NULL_TREE);
2212663f 10107 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10108 = build_function_type_list (V4SF_type_node,
10109 V4SF_type_node, V4SF_type_node,
10110 V4SF_type_node, NULL_TREE);
f676971a 10111 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10112 = build_function_type_list (V4SI_type_node,
10113 V4SI_type_node, V4SI_type_node,
10114 V4SI_type_node, NULL_TREE);
0ac081f6 10115 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10116 = build_function_type_list (V8HI_type_node,
10117 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10118 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10119 = build_function_type_list (V8HI_type_node,
10120 V8HI_type_node, V8HI_type_node,
10121 V8HI_type_node, NULL_TREE);
c4ad648e 10122 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10123 = build_function_type_list (V4SI_type_node,
10124 V8HI_type_node, V8HI_type_node,
10125 V4SI_type_node, NULL_TREE);
c4ad648e 10126 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10127 = build_function_type_list (V4SI_type_node,
10128 V16QI_type_node, V16QI_type_node,
10129 V4SI_type_node, NULL_TREE);
0ac081f6 10130 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10131 = build_function_type_list (V16QI_type_node,
10132 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10133 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10134 = build_function_type_list (V4SI_type_node,
10135 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10136 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10137 = build_function_type_list (V8HI_type_node,
10138 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10139 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10140 = build_function_type_list (V4SI_type_node,
10141 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10142 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10143 = build_function_type_list (V8HI_type_node,
10144 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10145 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10146 = build_function_type_list (V16QI_type_node,
10147 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10148 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10149 = build_function_type_list (V4SI_type_node,
10150 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10151 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10152 = build_function_type_list (V4SI_type_node,
10153 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10154 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10155 = build_function_type_list (V4SI_type_node,
10156 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10157 tree v4si_ftype_v8hi
10158 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10159 tree int_ftype_v4si_v4si
10160 = build_function_type_list (integer_type_node,
10161 V4SI_type_node, V4SI_type_node, NULL_TREE);
10162 tree int_ftype_v4sf_v4sf
10163 = build_function_type_list (integer_type_node,
10164 V4SF_type_node, V4SF_type_node, NULL_TREE);
10165 tree int_ftype_v16qi_v16qi
10166 = build_function_type_list (integer_type_node,
10167 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10168 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10169 = build_function_type_list (integer_type_node,
10170 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10171
6f317ef3 10172 /* Add the simple ternary operators. */
586de218 10173 d = bdesc_3arg;
ca7558fc 10174 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10175 {
2212663f
DB
10176 enum machine_mode mode0, mode1, mode2, mode3;
10177 tree type;
58646b77
PB
10178 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10179 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10180
58646b77
PB
10181 if (is_overloaded)
10182 {
10183 mode0 = VOIDmode;
10184 mode1 = VOIDmode;
10185 mode2 = VOIDmode;
10186 mode3 = VOIDmode;
10187 }
10188 else
10189 {
10190 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10191 continue;
f676971a 10192
58646b77
PB
10193 mode0 = insn_data[d->icode].operand[0].mode;
10194 mode1 = insn_data[d->icode].operand[1].mode;
10195 mode2 = insn_data[d->icode].operand[2].mode;
10196 mode3 = insn_data[d->icode].operand[3].mode;
10197 }
bb8df8a6 10198
2212663f
DB
10199 /* When all four are of the same mode. */
10200 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10201 {
10202 switch (mode0)
10203 {
58646b77
PB
10204 case VOIDmode:
10205 type = opaque_ftype_opaque_opaque_opaque;
10206 break;
617e0e1d
DB
10207 case V4SImode:
10208 type = v4si_ftype_v4si_v4si_v4si;
10209 break;
2212663f
DB
10210 case V4SFmode:
10211 type = v4sf_ftype_v4sf_v4sf_v4sf;
10212 break;
10213 case V8HImode:
10214 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10215 break;
2212663f
DB
10216 case V16QImode:
10217 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10218 break;
96038623
DE
10219 case V2SFmode:
10220 type = v2sf_ftype_v2sf_v2sf_v2sf;
10221 break;
2212663f 10222 default:
37409796 10223 gcc_unreachable ();
2212663f
DB
10224 }
10225 }
10226 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10227 {
2212663f
DB
10228 switch (mode0)
10229 {
10230 case V4SImode:
10231 type = v4si_ftype_v4si_v4si_v16qi;
10232 break;
10233 case V4SFmode:
10234 type = v4sf_ftype_v4sf_v4sf_v16qi;
10235 break;
10236 case V8HImode:
10237 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10238 break;
2212663f
DB
10239 case V16QImode:
10240 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10241 break;
2212663f 10242 default:
37409796 10243 gcc_unreachable ();
2212663f
DB
10244 }
10245 }
f676971a 10246 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10247 && mode3 == V4SImode)
24408032 10248 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10249 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10250 && mode3 == V4SImode)
24408032 10251 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10252 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10253 && mode3 == V4SImode)
24408032
AH
10254 type = v4sf_ftype_v4sf_v4sf_v4si;
10255
a7b376ee 10256 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10257 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10258 && mode3 == QImode)
b9e4e5d1 10259 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10260
a7b376ee 10261 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10262 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10263 && mode3 == QImode)
b9e4e5d1 10264 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10265
a7b376ee 10266 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10267 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10268 && mode3 == QImode)
b9e4e5d1 10269 type = v4si_ftype_v4si_v4si_int;
24408032 10270
a7b376ee 10271 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10272 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10273 && mode3 == QImode)
b9e4e5d1 10274 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10275
2212663f 10276 else
37409796 10277 gcc_unreachable ();
2212663f
DB
10278
10279 def_builtin (d->mask, d->name, type, d->code);
10280 }
10281
0ac081f6 10282 /* Add the simple binary operators. */
00b960c7 10283 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10284 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10285 {
10286 enum machine_mode mode0, mode1, mode2;
10287 tree type;
58646b77
PB
10288 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10289 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10290
58646b77
PB
10291 if (is_overloaded)
10292 {
10293 mode0 = VOIDmode;
10294 mode1 = VOIDmode;
10295 mode2 = VOIDmode;
10296 }
10297 else
bb8df8a6 10298 {
58646b77
PB
10299 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10300 continue;
f676971a 10301
58646b77
PB
10302 mode0 = insn_data[d->icode].operand[0].mode;
10303 mode1 = insn_data[d->icode].operand[1].mode;
10304 mode2 = insn_data[d->icode].operand[2].mode;
10305 }
0ac081f6
AH
10306
10307 /* When all three operands are of the same mode. */
10308 if (mode0 == mode1 && mode1 == mode2)
10309 {
10310 switch (mode0)
10311 {
58646b77
PB
10312 case VOIDmode:
10313 type = opaque_ftype_opaque_opaque;
10314 break;
0ac081f6
AH
10315 case V4SFmode:
10316 type = v4sf_ftype_v4sf_v4sf;
10317 break;
10318 case V4SImode:
10319 type = v4si_ftype_v4si_v4si;
10320 break;
10321 case V16QImode:
10322 type = v16qi_ftype_v16qi_v16qi;
10323 break;
10324 case V8HImode:
10325 type = v8hi_ftype_v8hi_v8hi;
10326 break;
a3170dc6
AH
10327 case V2SImode:
10328 type = v2si_ftype_v2si_v2si;
10329 break;
96038623
DE
10330 case V2SFmode:
10331 if (TARGET_PAIRED_FLOAT)
10332 type = v2sf_ftype_v2sf_v2sf;
10333 else
10334 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10335 break;
10336 case SImode:
10337 type = int_ftype_int_int;
10338 break;
0ac081f6 10339 default:
37409796 10340 gcc_unreachable ();
0ac081f6
AH
10341 }
10342 }
10343
10344 /* A few other combos we really don't want to do manually. */
10345
10346 /* vint, vfloat, vfloat. */
10347 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10348 type = v4si_ftype_v4sf_v4sf;
10349
10350 /* vshort, vchar, vchar. */
10351 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10352 type = v8hi_ftype_v16qi_v16qi;
10353
10354 /* vint, vshort, vshort. */
10355 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10356 type = v4si_ftype_v8hi_v8hi;
10357
10358 /* vshort, vint, vint. */
10359 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10360 type = v8hi_ftype_v4si_v4si;
10361
10362 /* vchar, vshort, vshort. */
10363 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10364 type = v16qi_ftype_v8hi_v8hi;
10365
10366 /* vint, vchar, vint. */
10367 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10368 type = v4si_ftype_v16qi_v4si;
10369
fa066a23
AH
10370 /* vint, vchar, vchar. */
10371 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10372 type = v4si_ftype_v16qi_v16qi;
10373
0ac081f6
AH
10374 /* vint, vshort, vint. */
10375 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10376 type = v4si_ftype_v8hi_v4si;
f676971a 10377
a7b376ee 10378 /* vint, vint, 5-bit literal. */
2212663f 10379 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10380 type = v4si_ftype_v4si_int;
f676971a 10381
a7b376ee 10382 /* vshort, vshort, 5-bit literal. */
2212663f 10383 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10384 type = v8hi_ftype_v8hi_int;
f676971a 10385
a7b376ee 10386 /* vchar, vchar, 5-bit literal. */
2212663f 10387 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10388 type = v16qi_ftype_v16qi_int;
0ac081f6 10389
a7b376ee 10390 /* vfloat, vint, 5-bit literal. */
617e0e1d 10391 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10392 type = v4sf_ftype_v4si_int;
f676971a 10393
a7b376ee 10394 /* vint, vfloat, 5-bit literal. */
617e0e1d 10395 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10396 type = v4si_ftype_v4sf_int;
617e0e1d 10397
a3170dc6
AH
10398 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10399 type = v2si_ftype_int_int;
10400
10401 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10402 type = v2si_ftype_v2si_char;
10403
10404 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10405 type = v2si_ftype_int_char;
10406
37409796 10407 else
0ac081f6 10408 {
37409796
NS
10409 /* int, x, x. */
10410 gcc_assert (mode0 == SImode);
0ac081f6
AH
10411 switch (mode1)
10412 {
10413 case V4SImode:
10414 type = int_ftype_v4si_v4si;
10415 break;
10416 case V4SFmode:
10417 type = int_ftype_v4sf_v4sf;
10418 break;
10419 case V16QImode:
10420 type = int_ftype_v16qi_v16qi;
10421 break;
10422 case V8HImode:
10423 type = int_ftype_v8hi_v8hi;
10424 break;
10425 default:
37409796 10426 gcc_unreachable ();
0ac081f6
AH
10427 }
10428 }
10429
2212663f
DB
10430 def_builtin (d->mask, d->name, type, d->code);
10431 }
24408032 10432
2212663f
DB
10433 /* Add the simple unary operators. */
10434 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10435 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10436 {
10437 enum machine_mode mode0, mode1;
10438 tree type;
58646b77
PB
10439 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10440 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10441
10442 if (is_overloaded)
10443 {
10444 mode0 = VOIDmode;
10445 mode1 = VOIDmode;
10446 }
10447 else
10448 {
10449 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10450 continue;
bb8df8a6 10451
58646b77
PB
10452 mode0 = insn_data[d->icode].operand[0].mode;
10453 mode1 = insn_data[d->icode].operand[1].mode;
10454 }
2212663f
DB
10455
10456 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10457 type = v4si_ftype_int;
2212663f 10458 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10459 type = v8hi_ftype_int;
2212663f 10460 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10461 type = v16qi_ftype_int;
58646b77
PB
10462 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10463 type = opaque_ftype_opaque;
617e0e1d
DB
10464 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10465 type = v4sf_ftype_v4sf;
20e26713
AH
10466 else if (mode0 == V8HImode && mode1 == V16QImode)
10467 type = v8hi_ftype_v16qi;
10468 else if (mode0 == V4SImode && mode1 == V8HImode)
10469 type = v4si_ftype_v8hi;
a3170dc6
AH
10470 else if (mode0 == V2SImode && mode1 == V2SImode)
10471 type = v2si_ftype_v2si;
10472 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10473 {
10474 if (TARGET_PAIRED_FLOAT)
10475 type = v2sf_ftype_v2sf;
10476 else
10477 type = v2sf_ftype_v2sf_spe;
10478 }
a3170dc6
AH
10479 else if (mode0 == V2SFmode && mode1 == V2SImode)
10480 type = v2sf_ftype_v2si;
10481 else if (mode0 == V2SImode && mode1 == V2SFmode)
10482 type = v2si_ftype_v2sf;
10483 else if (mode0 == V2SImode && mode1 == QImode)
10484 type = v2si_ftype_char;
2212663f 10485 else
37409796 10486 gcc_unreachable ();
2212663f 10487
0ac081f6
AH
10488 def_builtin (d->mask, d->name, type, d->code);
10489 }
10490}
10491
c15c90bb
ZW
10492static void
10493rs6000_init_libfuncs (void)
10494{
602ea4d3
JJ
10495 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10496 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10497 {
602ea4d3
JJ
10498 /* AIX library routines for float->int conversion. */
10499 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10500 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10501 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10502 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10503 }
c15c90bb 10504
602ea4d3 10505 if (!TARGET_IEEEQUAD)
98c41d98 10506 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10507 if (!TARGET_XL_COMPAT)
10508 {
10509 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10510 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10511 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10512 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10513
17caeff2 10514 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10515 {
10516 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10517 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10518 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10519 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10520 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10521 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10522 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10523
10524 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10525 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10526 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10527 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10528 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10529 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10530 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10531 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10532 }
b26941b4
JM
10533
10534 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10535 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10536 }
10537 else
10538 {
10539 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10540 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10541 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10542 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10543 }
c9034561 10544 else
c15c90bb 10545 {
c9034561 10546 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10547
10548 set_optab_libfunc (add_optab, TFmode, "_q_add");
10549 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10550 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10551 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10552 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10553 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10554 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10555
c9034561
ZW
10556 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10557 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10558 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10559 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10560 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10561 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10562
85363ca0
ZW
10563 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10564 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10565 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10566 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10567 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10568 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10569 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10570 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10571 }
10572}
fba73eb1
DE
10573
10574\f
10575/* Expand a block clear operation, and return 1 if successful. Return 0
10576 if we should let the compiler generate normal code.
10577
10578 operands[0] is the destination
10579 operands[1] is the length
57e84f18 10580 operands[3] is the alignment */
fba73eb1
DE
10581
10582int
10583expand_block_clear (rtx operands[])
10584{
10585 rtx orig_dest = operands[0];
10586 rtx bytes_rtx = operands[1];
57e84f18 10587 rtx align_rtx = operands[3];
5514620a
GK
10588 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10589 HOST_WIDE_INT align;
10590 HOST_WIDE_INT bytes;
fba73eb1
DE
10591 int offset;
10592 int clear_bytes;
5514620a 10593 int clear_step;
fba73eb1
DE
10594
10595 /* If this is not a fixed size move, just call memcpy */
10596 if (! constp)
10597 return 0;
10598
37409796
NS
10599 /* This must be a fixed size alignment */
10600 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10601 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10602
10603 /* Anything to clear? */
10604 bytes = INTVAL (bytes_rtx);
10605 if (bytes <= 0)
10606 return 1;
10607
5514620a
GK
10608 /* Use the builtin memset after a point, to avoid huge code bloat.
10609 When optimize_size, avoid any significant code bloat; calling
10610 memset is about 4 instructions, so allow for one instruction to
10611 load zero and three to do clearing. */
10612 if (TARGET_ALTIVEC && align >= 128)
10613 clear_step = 16;
10614 else if (TARGET_POWERPC64 && align >= 32)
10615 clear_step = 8;
21d818ff
NF
10616 else if (TARGET_SPE && align >= 64)
10617 clear_step = 8;
5514620a
GK
10618 else
10619 clear_step = 4;
fba73eb1 10620
5514620a
GK
10621 if (optimize_size && bytes > 3 * clear_step)
10622 return 0;
10623 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10624 return 0;
10625
10626 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10627 {
fba73eb1
DE
10628 enum machine_mode mode = BLKmode;
10629 rtx dest;
f676971a 10630
5514620a
GK
10631 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10632 {
10633 clear_bytes = 16;
10634 mode = V4SImode;
10635 }
21d818ff
NF
10636 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10637 {
10638 clear_bytes = 8;
10639 mode = V2SImode;
10640 }
5514620a 10641 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10642 /* 64-bit loads and stores require word-aligned
10643 displacements. */
10644 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10645 {
10646 clear_bytes = 8;
10647 mode = DImode;
fba73eb1 10648 }
5514620a 10649 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10650 { /* move 4 bytes */
10651 clear_bytes = 4;
10652 mode = SImode;
fba73eb1 10653 }
ec53fc93 10654 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10655 { /* move 2 bytes */
10656 clear_bytes = 2;
10657 mode = HImode;
fba73eb1
DE
10658 }
10659 else /* move 1 byte at a time */
10660 {
10661 clear_bytes = 1;
10662 mode = QImode;
fba73eb1 10663 }
f676971a 10664
fba73eb1 10665 dest = adjust_address (orig_dest, mode, offset);
f676971a 10666
5514620a 10667 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10668 }
10669
10670 return 1;
10671}
10672
35aff10b 10673\f
7e69e155
MM
10674/* Expand a block move operation, and return 1 if successful. Return 0
10675 if we should let the compiler generate normal code.
10676
10677 operands[0] is the destination
10678 operands[1] is the source
10679 operands[2] is the length
10680 operands[3] is the alignment */
10681
3933e0e1
MM
10682#define MAX_MOVE_REG 4
10683
7e69e155 10684int
a2369ed3 10685expand_block_move (rtx operands[])
7e69e155 10686{
b6c9286a
MM
10687 rtx orig_dest = operands[0];
10688 rtx orig_src = operands[1];
7e69e155 10689 rtx bytes_rtx = operands[2];
7e69e155 10690 rtx align_rtx = operands[3];
3933e0e1 10691 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10692 int align;
3933e0e1
MM
10693 int bytes;
10694 int offset;
7e69e155 10695 int move_bytes;
cabfd258
GK
10696 rtx stores[MAX_MOVE_REG];
10697 int num_reg = 0;
7e69e155 10698
3933e0e1 10699 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10700 if (! constp)
3933e0e1
MM
10701 return 0;
10702
37409796
NS
10703 /* This must be a fixed size alignment */
10704 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10705 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10706
7e69e155 10707 /* Anything to move? */
3933e0e1
MM
10708 bytes = INTVAL (bytes_rtx);
10709 if (bytes <= 0)
7e69e155
MM
10710 return 1;
10711
ea9982a8 10712 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10713 reg_parm_stack_space. */
ea9982a8 10714 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10715 return 0;
10716
cabfd258 10717 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10718 {
cabfd258 10719 union {
70128ad9 10720 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10721 rtx (*mov) (rtx, rtx);
cabfd258
GK
10722 } gen_func;
10723 enum machine_mode mode = BLKmode;
10724 rtx src, dest;
f676971a 10725
5514620a
GK
10726 /* Altivec first, since it will be faster than a string move
10727 when it applies, and usually not significantly larger. */
10728 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10729 {
10730 move_bytes = 16;
10731 mode = V4SImode;
10732 gen_func.mov = gen_movv4si;
10733 }
21d818ff
NF
10734 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10735 {
10736 move_bytes = 8;
10737 mode = V2SImode;
10738 gen_func.mov = gen_movv2si;
10739 }
5514620a 10740 else if (TARGET_STRING
cabfd258
GK
10741 && bytes > 24 /* move up to 32 bytes at a time */
10742 && ! fixed_regs[5]
10743 && ! fixed_regs[6]
10744 && ! fixed_regs[7]
10745 && ! fixed_regs[8]
10746 && ! fixed_regs[9]
10747 && ! fixed_regs[10]
10748 && ! fixed_regs[11]
10749 && ! fixed_regs[12])
7e69e155 10750 {
cabfd258 10751 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10752 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10753 }
10754 else if (TARGET_STRING
10755 && bytes > 16 /* move up to 24 bytes at a time */
10756 && ! fixed_regs[5]
10757 && ! fixed_regs[6]
10758 && ! fixed_regs[7]
10759 && ! fixed_regs[8]
10760 && ! fixed_regs[9]
10761 && ! fixed_regs[10])
10762 {
10763 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10764 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10765 }
10766 else if (TARGET_STRING
10767 && bytes > 8 /* move up to 16 bytes at a time */
10768 && ! fixed_regs[5]
10769 && ! fixed_regs[6]
10770 && ! fixed_regs[7]
10771 && ! fixed_regs[8])
10772 {
10773 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10774 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10775 }
10776 else if (bytes >= 8 && TARGET_POWERPC64
10777 /* 64-bit loads and stores require word-aligned
10778 displacements. */
fba73eb1 10779 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10780 {
10781 move_bytes = 8;
10782 mode = DImode;
10783 gen_func.mov = gen_movdi;
10784 }
10785 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10786 { /* move up to 8 bytes at a time */
10787 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10788 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10789 }
cd7d9ca4 10790 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10791 { /* move 4 bytes */
10792 move_bytes = 4;
10793 mode = SImode;
10794 gen_func.mov = gen_movsi;
10795 }
ec53fc93 10796 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10797 { /* move 2 bytes */
10798 move_bytes = 2;
10799 mode = HImode;
10800 gen_func.mov = gen_movhi;
10801 }
10802 else if (TARGET_STRING && bytes > 1)
10803 { /* move up to 4 bytes at a time */
10804 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10805 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10806 }
10807 else /* move 1 byte at a time */
10808 {
10809 move_bytes = 1;
10810 mode = QImode;
10811 gen_func.mov = gen_movqi;
10812 }
f676971a 10813
cabfd258
GK
10814 src = adjust_address (orig_src, mode, offset);
10815 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10816
10817 if (mode != BLKmode)
cabfd258
GK
10818 {
10819 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10820
cabfd258
GK
10821 emit_insn ((*gen_func.mov) (tmp_reg, src));
10822 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10823 }
3933e0e1 10824
cabfd258
GK
10825 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10826 {
10827 int i;
10828 for (i = 0; i < num_reg; i++)
10829 emit_insn (stores[i]);
10830 num_reg = 0;
10831 }
35aff10b 10832
cabfd258 10833 if (mode == BLKmode)
7e69e155 10834 {
70128ad9 10835 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10836 patterns require zero offset. */
10837 if (!REG_P (XEXP (src, 0)))
b6c9286a 10838 {
cabfd258
GK
10839 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10840 src = replace_equiv_address (src, src_reg);
b6c9286a 10841 }
cabfd258 10842 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10843
cabfd258 10844 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10845 {
cabfd258
GK
10846 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10847 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10848 }
cabfd258 10849 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10850
70128ad9 10851 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10852 GEN_INT (move_bytes & 31),
10853 align_rtx));
7e69e155 10854 }
7e69e155
MM
10855 }
10856
10857 return 1;
10858}
10859
d62294f5 10860\f
9caa3eb2
DE
10861/* Return a string to perform a load_multiple operation.
10862 operands[0] is the vector.
10863 operands[1] is the source address.
10864 operands[2] is the first destination register. */
10865
10866const char *
a2369ed3 10867rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10868{
10869 /* We have to handle the case where the pseudo used to contain the address
10870 is assigned to one of the output registers. */
10871 int i, j;
10872 int words = XVECLEN (operands[0], 0);
10873 rtx xop[10];
10874
10875 if (XVECLEN (operands[0], 0) == 1)
10876 return "{l|lwz} %2,0(%1)";
10877
10878 for (i = 0; i < words; i++)
10879 if (refers_to_regno_p (REGNO (operands[2]) + i,
10880 REGNO (operands[2]) + i + 1, operands[1], 0))
10881 {
10882 if (i == words-1)
10883 {
10884 xop[0] = GEN_INT (4 * (words-1));
10885 xop[1] = operands[1];
10886 xop[2] = operands[2];
10887 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10888 return "";
10889 }
10890 else if (i == 0)
10891 {
10892 xop[0] = GEN_INT (4 * (words-1));
10893 xop[1] = operands[1];
10894 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10895 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10896 return "";
10897 }
10898 else
10899 {
10900 for (j = 0; j < words; j++)
10901 if (j != i)
10902 {
10903 xop[0] = GEN_INT (j * 4);
10904 xop[1] = operands[1];
10905 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10906 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10907 }
10908 xop[0] = GEN_INT (i * 4);
10909 xop[1] = operands[1];
10910 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10911 return "";
10912 }
10913 }
10914
10915 return "{lsi|lswi} %2,%1,%N0";
10916}
10917
9878760c 10918\f
a4f6c312
SS
10919/* A validation routine: say whether CODE, a condition code, and MODE
10920 match. The other alternatives either don't make sense or should
10921 never be generated. */
39a10a29 10922
48d72335 10923void
a2369ed3 10924validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10925{
37409796
NS
10926 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10927 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10928 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10929
10930 /* These don't make sense. */
37409796
NS
10931 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10932 || mode != CCUNSmode);
39a10a29 10933
37409796
NS
10934 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10935 || mode == CCUNSmode);
39a10a29 10936
37409796
NS
10937 gcc_assert (mode == CCFPmode
10938 || (code != ORDERED && code != UNORDERED
10939 && code != UNEQ && code != LTGT
10940 && code != UNGT && code != UNLT
10941 && code != UNGE && code != UNLE));
f676971a
EC
10942
10943 /* These should never be generated except for
bc9ec0e0 10944 flag_finite_math_only. */
37409796
NS
10945 gcc_assert (mode != CCFPmode
10946 || flag_finite_math_only
10947 || (code != LE && code != GE
10948 && code != UNEQ && code != LTGT
10949 && code != UNGT && code != UNLT));
39a10a29
GK
10950
10951 /* These are invalid; the information is not there. */
37409796 10952 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10953}
10954
9878760c
RK
10955\f
10956/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10957 mask required to convert the result of a rotate insn into a shift
b1765bde 10958 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10959
10960int
a2369ed3 10961includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10962{
e2c953b6
DE
10963 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10964
10965 shift_mask <<= INTVAL (shiftop);
9878760c 10966
b1765bde 10967 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10968}
10969
10970/* Similar, but for right shift. */
10971
10972int
a2369ed3 10973includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10974{
a7653a2c 10975 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10976
10977 shift_mask >>= INTVAL (shiftop);
10978
b1765bde 10979 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10980}
10981
c5059423
AM
10982/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10983 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10984 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10985
10986int
a2369ed3 10987includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10988{
c5059423
AM
10989 if (GET_CODE (andop) == CONST_INT)
10990 {
02071907 10991 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10992
c5059423 10993 c = INTVAL (andop);
02071907 10994 if (c == 0 || c == ~0)
c5059423 10995 return 0;
e2c953b6 10996
02071907 10997 shift_mask = ~0;
c5059423
AM
10998 shift_mask <<= INTVAL (shiftop);
10999
b6d08ca1 11000 /* Find the least significant one bit. */
c5059423
AM
11001 lsb = c & -c;
11002
11003 /* It must coincide with the LSB of the shift mask. */
11004 if (-lsb != shift_mask)
11005 return 0;
e2c953b6 11006
c5059423
AM
11007 /* Invert to look for the next transition (if any). */
11008 c = ~c;
11009
11010 /* Remove the low group of ones (originally low group of zeros). */
11011 c &= -lsb;
11012
11013 /* Again find the lsb, and check we have all 1's above. */
11014 lsb = c & -c;
11015 return c == -lsb;
11016 }
11017 else if (GET_CODE (andop) == CONST_DOUBLE
11018 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11019 {
02071907
AM
11020 HOST_WIDE_INT low, high, lsb;
11021 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11022
11023 low = CONST_DOUBLE_LOW (andop);
11024 if (HOST_BITS_PER_WIDE_INT < 64)
11025 high = CONST_DOUBLE_HIGH (andop);
11026
11027 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11028 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11029 return 0;
11030
11031 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11032 {
02071907 11033 shift_mask_high = ~0;
c5059423
AM
11034 if (INTVAL (shiftop) > 32)
11035 shift_mask_high <<= INTVAL (shiftop) - 32;
11036
11037 lsb = high & -high;
11038
11039 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11040 return 0;
11041
11042 high = ~high;
11043 high &= -lsb;
11044
11045 lsb = high & -high;
11046 return high == -lsb;
11047 }
11048
02071907 11049 shift_mask_low = ~0;
c5059423
AM
11050 shift_mask_low <<= INTVAL (shiftop);
11051
11052 lsb = low & -low;
11053
11054 if (-lsb != shift_mask_low)
11055 return 0;
11056
11057 if (HOST_BITS_PER_WIDE_INT < 64)
11058 high = ~high;
11059 low = ~low;
11060 low &= -lsb;
11061
11062 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11063 {
11064 lsb = high & -high;
11065 return high == -lsb;
11066 }
11067
11068 lsb = low & -low;
11069 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11070 }
11071 else
11072 return 0;
11073}
e2c953b6 11074
c5059423
AM
11075/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11076 to perform a left shift. It must have SHIFTOP or more least
c1207243 11077 significant 0's, with the remainder of the word 1's. */
e2c953b6 11078
c5059423 11079int
a2369ed3 11080includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11081{
e2c953b6 11082 if (GET_CODE (andop) == CONST_INT)
c5059423 11083 {
02071907 11084 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11085
02071907 11086 shift_mask = ~0;
c5059423
AM
11087 shift_mask <<= INTVAL (shiftop);
11088 c = INTVAL (andop);
11089
c1207243 11090 /* Find the least significant one bit. */
c5059423
AM
11091 lsb = c & -c;
11092
11093 /* It must be covered by the shift mask.
a4f6c312 11094 This test also rejects c == 0. */
c5059423
AM
11095 if ((lsb & shift_mask) == 0)
11096 return 0;
11097
11098 /* Check we have all 1's above the transition, and reject all 1's. */
11099 return c == -lsb && lsb != 1;
11100 }
11101 else if (GET_CODE (andop) == CONST_DOUBLE
11102 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11103 {
02071907 11104 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11105
11106 low = CONST_DOUBLE_LOW (andop);
11107
11108 if (HOST_BITS_PER_WIDE_INT < 64)
11109 {
02071907 11110 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11111
11112 high = CONST_DOUBLE_HIGH (andop);
11113
11114 if (low == 0)
11115 {
02071907 11116 shift_mask_high = ~0;
c5059423
AM
11117 if (INTVAL (shiftop) > 32)
11118 shift_mask_high <<= INTVAL (shiftop) - 32;
11119
11120 lsb = high & -high;
11121
11122 if ((lsb & shift_mask_high) == 0)
11123 return 0;
11124
11125 return high == -lsb;
11126 }
11127 if (high != ~0)
11128 return 0;
11129 }
11130
02071907 11131 shift_mask_low = ~0;
c5059423
AM
11132 shift_mask_low <<= INTVAL (shiftop);
11133
11134 lsb = low & -low;
11135
11136 if ((lsb & shift_mask_low) == 0)
11137 return 0;
11138
11139 return low == -lsb && lsb != 1;
11140 }
e2c953b6 11141 else
c5059423 11142 return 0;
9878760c 11143}
35068b43 11144
11ac38b2
DE
11145/* Return 1 if operands will generate a valid arguments to rlwimi
11146instruction for insert with right shift in 64-bit mode. The mask may
11147not start on the first bit or stop on the last bit because wrap-around
11148effects of instruction do not correspond to semantics of RTL insn. */
11149
11150int
11151insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11152{
429ec7dc
DE
11153 if (INTVAL (startop) > 32
11154 && INTVAL (startop) < 64
11155 && INTVAL (sizeop) > 1
11156 && INTVAL (sizeop) + INTVAL (startop) < 64
11157 && INTVAL (shiftop) > 0
11158 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11159 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11160 return 1;
11161
11162 return 0;
11163}
11164
35068b43 11165/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11166 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11167
11168int
a2369ed3 11169registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11170{
11171 /* We might have been passed a SUBREG. */
f676971a 11172 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11173 return 0;
f676971a 11174
90f81f99
AP
11175 /* We might have been passed non floating point registers. */
11176 if (!FP_REGNO_P (REGNO (reg1))
11177 || !FP_REGNO_P (REGNO (reg2)))
11178 return 0;
35068b43
RK
11179
11180 return (REGNO (reg1) == REGNO (reg2) - 1);
11181}
11182
a4f6c312
SS
11183/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11184 addr1 and addr2 must be in consecutive memory locations
11185 (addr2 == addr1 + 8). */
35068b43
RK
11186
11187int
90f81f99 11188mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11189{
90f81f99 11190 rtx addr1, addr2;
bb8df8a6
EC
11191 unsigned int reg1, reg2;
11192 int offset1, offset2;
35068b43 11193
90f81f99
AP
11194 /* The mems cannot be volatile. */
11195 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11196 return 0;
f676971a 11197
90f81f99
AP
11198 addr1 = XEXP (mem1, 0);
11199 addr2 = XEXP (mem2, 0);
11200
35068b43
RK
11201 /* Extract an offset (if used) from the first addr. */
11202 if (GET_CODE (addr1) == PLUS)
11203 {
11204 /* If not a REG, return zero. */
11205 if (GET_CODE (XEXP (addr1, 0)) != REG)
11206 return 0;
11207 else
11208 {
c4ad648e 11209 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11210 /* The offset must be constant! */
11211 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11212 return 0;
11213 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11214 }
11215 }
11216 else if (GET_CODE (addr1) != REG)
11217 return 0;
11218 else
11219 {
11220 reg1 = REGNO (addr1);
11221 /* This was a simple (mem (reg)) expression. Offset is 0. */
11222 offset1 = 0;
11223 }
11224
bb8df8a6
EC
11225 /* And now for the second addr. */
11226 if (GET_CODE (addr2) == PLUS)
11227 {
11228 /* If not a REG, return zero. */
11229 if (GET_CODE (XEXP (addr2, 0)) != REG)
11230 return 0;
11231 else
11232 {
11233 reg2 = REGNO (XEXP (addr2, 0));
11234 /* The offset must be constant. */
11235 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11236 return 0;
11237 offset2 = INTVAL (XEXP (addr2, 1));
11238 }
11239 }
11240 else if (GET_CODE (addr2) != REG)
35068b43 11241 return 0;
bb8df8a6
EC
11242 else
11243 {
11244 reg2 = REGNO (addr2);
11245 /* This was a simple (mem (reg)) expression. Offset is 0. */
11246 offset2 = 0;
11247 }
35068b43 11248
bb8df8a6
EC
11249 /* Both of these must have the same base register. */
11250 if (reg1 != reg2)
35068b43
RK
11251 return 0;
11252
11253 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11254 if (offset2 != offset1 + 8)
35068b43
RK
11255 return 0;
11256
11257 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11258 instructions. */
11259 return 1;
11260}
9878760c 11261\f
e41b2a33
PB
11262
11263rtx
11264rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11265{
11266 static bool eliminated = false;
11267 if (mode != SDmode)
11268 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11269 else
11270 {
11271 rtx mem = cfun->machine->sdmode_stack_slot;
11272 gcc_assert (mem != NULL_RTX);
11273
11274 if (!eliminated)
11275 {
11276 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11277 cfun->machine->sdmode_stack_slot = mem;
11278 eliminated = true;
11279 }
11280 return mem;
11281 }
11282}
11283
11284static tree
11285rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11286{
11287 /* Don't walk into types. */
11288 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11289 {
11290 *walk_subtrees = 0;
11291 return NULL_TREE;
11292 }
11293
11294 switch (TREE_CODE (*tp))
11295 {
11296 case VAR_DECL:
11297 case PARM_DECL:
11298 case FIELD_DECL:
11299 case RESULT_DECL:
11300 case REAL_CST:
fdf4f148 11301 case INDIRECT_REF:
a0f39282
JJ
11302 case ALIGN_INDIRECT_REF:
11303 case MISALIGNED_INDIRECT_REF:
fdf4f148 11304 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11305 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11306 return *tp;
11307 break;
11308 default:
11309 break;
11310 }
11311
11312 return NULL_TREE;
11313}
11314
11315
11316/* Allocate a 64-bit stack slot to be used for copying SDmode
11317 values through if this function has any SDmode references. */
11318
11319static void
11320rs6000_alloc_sdmode_stack_slot (void)
11321{
11322 tree t;
11323 basic_block bb;
11324 block_stmt_iterator bsi;
11325
11326 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11327
11328 FOR_EACH_BB (bb)
11329 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
11330 {
11331 tree ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
11332 rs6000_check_sdmode, NULL);
11333 if (ret)
11334 {
11335 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11336 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11337 SDmode, 0);
11338 return;
11339 }
11340 }
11341
11342 /* Check for any SDmode parameters of the function. */
11343 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11344 {
11345 if (TREE_TYPE (t) == error_mark_node)
11346 continue;
11347
11348 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11349 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11350 {
11351 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11352 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11353 SDmode, 0);
11354 return;
11355 }
11356 }
11357}
11358
11359static void
11360rs6000_instantiate_decls (void)
11361{
11362 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11363 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11364}
11365
9878760c
RK
11366/* Return the register class of a scratch register needed to copy IN into
11367 or out of a register in CLASS in MODE. If it can be done directly,
11368 NO_REGS is returned. */
11369
11370enum reg_class
3c4774e0
R
11371rs6000_secondary_reload_class (enum reg_class class,
11372 enum machine_mode mode ATTRIBUTE_UNUSED,
11373 rtx in)
9878760c 11374{
5accd822 11375 int regno;
9878760c 11376
ab82a49f
AP
11377 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11378#if TARGET_MACHO
c4ad648e 11379 && MACHOPIC_INDIRECT
ab82a49f 11380#endif
c4ad648e 11381 ))
46fad5b7
DJ
11382 {
11383 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11384 other than BASE_REGS for TARGET_ELF. So indicate that a
11385 register from BASE_REGS is needed as an intermediate
11386 register.
f676971a 11387
46fad5b7
DJ
11388 On Darwin, pic addresses require a load from memory, which
11389 needs a base register. */
11390 if (class != BASE_REGS
c4ad648e
AM
11391 && (GET_CODE (in) == SYMBOL_REF
11392 || GET_CODE (in) == HIGH
11393 || GET_CODE (in) == LABEL_REF
11394 || GET_CODE (in) == CONST))
11395 return BASE_REGS;
46fad5b7 11396 }
e7b7998a 11397
5accd822
DE
11398 if (GET_CODE (in) == REG)
11399 {
11400 regno = REGNO (in);
11401 if (regno >= FIRST_PSEUDO_REGISTER)
11402 {
11403 regno = true_regnum (in);
11404 if (regno >= FIRST_PSEUDO_REGISTER)
11405 regno = -1;
11406 }
11407 }
11408 else if (GET_CODE (in) == SUBREG)
11409 {
11410 regno = true_regnum (in);
11411 if (regno >= FIRST_PSEUDO_REGISTER)
11412 regno = -1;
11413 }
11414 else
11415 regno = -1;
11416
9878760c
RK
11417 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11418 into anything. */
11419 if (class == GENERAL_REGS || class == BASE_REGS
11420 || (regno >= 0 && INT_REGNO_P (regno)))
11421 return NO_REGS;
11422
11423 /* Constants, memory, and FP registers can go into FP registers. */
11424 if ((regno == -1 || FP_REGNO_P (regno))
11425 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
e41b2a33 11426 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11427
0ac081f6
AH
11428 /* Memory, and AltiVec registers can go into AltiVec registers. */
11429 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11430 && class == ALTIVEC_REGS)
11431 return NO_REGS;
11432
9878760c
RK
11433 /* We can copy among the CR registers. */
11434 if ((class == CR_REGS || class == CR0_REGS)
11435 && regno >= 0 && CR_REGNO_P (regno))
11436 return NO_REGS;
11437
11438 /* Otherwise, we need GENERAL_REGS. */
11439 return GENERAL_REGS;
11440}
11441\f
11442/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11443 know this is a valid comparison.
9878760c
RK
11444
11445 SCC_P is 1 if this is for an scc. That means that %D will have been
11446 used instead of %C, so the bits will be in different places.
11447
b4ac57ab 11448 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11449
11450int
a2369ed3 11451ccr_bit (rtx op, int scc_p)
9878760c
RK
11452{
11453 enum rtx_code code = GET_CODE (op);
11454 enum machine_mode cc_mode;
11455 int cc_regnum;
11456 int base_bit;
9ebbca7d 11457 rtx reg;
9878760c 11458
ec8e098d 11459 if (!COMPARISON_P (op))
9878760c
RK
11460 return -1;
11461
9ebbca7d
GK
11462 reg = XEXP (op, 0);
11463
37409796 11464 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11465
11466 cc_mode = GET_MODE (reg);
11467 cc_regnum = REGNO (reg);
11468 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11469
39a10a29 11470 validate_condition_mode (code, cc_mode);
c5defebb 11471
b7053a3f
GK
11472 /* When generating a sCOND operation, only positive conditions are
11473 allowed. */
37409796
NS
11474 gcc_assert (!scc_p
11475 || code == EQ || code == GT || code == LT || code == UNORDERED
11476 || code == GTU || code == LTU);
f676971a 11477
9878760c
RK
11478 switch (code)
11479 {
11480 case NE:
11481 return scc_p ? base_bit + 3 : base_bit + 2;
11482 case EQ:
11483 return base_bit + 2;
1c882ea4 11484 case GT: case GTU: case UNLE:
9878760c 11485 return base_bit + 1;
1c882ea4 11486 case LT: case LTU: case UNGE:
9878760c 11487 return base_bit;
1c882ea4
GK
11488 case ORDERED: case UNORDERED:
11489 return base_bit + 3;
9878760c
RK
11490
11491 case GE: case GEU:
39a10a29 11492 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11493 unordered position. So test that bit. For integer, this is ! LT
11494 unless this is an scc insn. */
39a10a29 11495 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11496
11497 case LE: case LEU:
39a10a29 11498 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11499
9878760c 11500 default:
37409796 11501 gcc_unreachable ();
9878760c
RK
11502 }
11503}
1ff7789b 11504\f
8d30c4ee 11505/* Return the GOT register. */
1ff7789b 11506
9390387d 11507rtx
a2369ed3 11508rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11509{
a4f6c312
SS
11510 /* The second flow pass currently (June 1999) can't update
11511 regs_ever_live without disturbing other parts of the compiler, so
11512 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11513 if (!can_create_pseudo_p ()
11514 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11515 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11516
e3b5732b 11517 crtl->uses_pic_offset_table = 1;
3cb999d8 11518
1ff7789b
MM
11519 return pic_offset_table_rtx;
11520}
a7df97e6 11521\f
e2500fed
GK
11522/* Function to init struct machine_function.
11523 This will be called, via a pointer variable,
11524 from push_function_context. */
a7df97e6 11525
e2500fed 11526static struct machine_function *
863d938c 11527rs6000_init_machine_status (void)
a7df97e6 11528{
5ead67f6 11529 return GGC_CNEW (machine_function);
a7df97e6 11530}
9878760c 11531\f
0ba1b2ff
AM
11532/* These macros test for integers and extract the low-order bits. */
11533#define INT_P(X) \
11534((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11535 && GET_MODE (X) == VOIDmode)
11536
11537#define INT_LOWPART(X) \
11538 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11539
11540int
a2369ed3 11541extract_MB (rtx op)
0ba1b2ff
AM
11542{
11543 int i;
11544 unsigned long val = INT_LOWPART (op);
11545
11546 /* If the high bit is zero, the value is the first 1 bit we find
11547 from the left. */
11548 if ((val & 0x80000000) == 0)
11549 {
37409796 11550 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11551
11552 i = 1;
11553 while (((val <<= 1) & 0x80000000) == 0)
11554 ++i;
11555 return i;
11556 }
11557
11558 /* If the high bit is set and the low bit is not, or the mask is all
11559 1's, the value is zero. */
11560 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11561 return 0;
11562
11563 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11564 from the right. */
11565 i = 31;
11566 while (((val >>= 1) & 1) != 0)
11567 --i;
11568
11569 return i;
11570}
11571
11572int
a2369ed3 11573extract_ME (rtx op)
0ba1b2ff
AM
11574{
11575 int i;
11576 unsigned long val = INT_LOWPART (op);
11577
11578 /* If the low bit is zero, the value is the first 1 bit we find from
11579 the right. */
11580 if ((val & 1) == 0)
11581 {
37409796 11582 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11583
11584 i = 30;
11585 while (((val >>= 1) & 1) == 0)
11586 --i;
11587
11588 return i;
11589 }
11590
11591 /* If the low bit is set and the high bit is not, or the mask is all
11592 1's, the value is 31. */
11593 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11594 return 31;
11595
11596 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11597 from the left. */
11598 i = 0;
11599 while (((val <<= 1) & 0x80000000) != 0)
11600 ++i;
11601
11602 return i;
11603}
11604
c4501e62
JJ
11605/* Locate some local-dynamic symbol still in use by this function
11606 so that we can print its name in some tls_ld pattern. */
11607
11608static const char *
863d938c 11609rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11610{
11611 rtx insn;
11612
11613 if (cfun->machine->some_ld_name)
11614 return cfun->machine->some_ld_name;
11615
11616 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11617 if (INSN_P (insn)
11618 && for_each_rtx (&PATTERN (insn),
11619 rs6000_get_some_local_dynamic_name_1, 0))
11620 return cfun->machine->some_ld_name;
11621
37409796 11622 gcc_unreachable ();
c4501e62
JJ
11623}
11624
11625/* Helper function for rs6000_get_some_local_dynamic_name. */
11626
11627static int
a2369ed3 11628rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11629{
11630 rtx x = *px;
11631
11632 if (GET_CODE (x) == SYMBOL_REF)
11633 {
11634 const char *str = XSTR (x, 0);
11635 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11636 {
11637 cfun->machine->some_ld_name = str;
11638 return 1;
11639 }
11640 }
11641
11642 return 0;
11643}
11644
85b776df
AM
11645/* Write out a function code label. */
11646
11647void
11648rs6000_output_function_entry (FILE *file, const char *fname)
11649{
11650 if (fname[0] != '.')
11651 {
11652 switch (DEFAULT_ABI)
11653 {
11654 default:
37409796 11655 gcc_unreachable ();
85b776df
AM
11656
11657 case ABI_AIX:
11658 if (DOT_SYMBOLS)
11659 putc ('.', file);
11660 else
11661 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11662 break;
11663
11664 case ABI_V4:
11665 case ABI_DARWIN:
11666 break;
11667 }
11668 }
11669 if (TARGET_AIX)
11670 RS6000_OUTPUT_BASENAME (file, fname);
11671 else
11672 assemble_name (file, fname);
11673}
11674
9878760c
RK
11675/* Print an operand. Recognize special options, documented below. */
11676
38c1f2d7 11677#if TARGET_ELF
d9407988 11678#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11679#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11680#else
11681#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11682#define SMALL_DATA_REG 0
ba5e43aa
MM
11683#endif
11684
9878760c 11685void
a2369ed3 11686print_operand (FILE *file, rtx x, int code)
9878760c
RK
11687{
11688 int i;
a260abc9 11689 HOST_WIDE_INT val;
0ba1b2ff 11690 unsigned HOST_WIDE_INT uval;
9878760c
RK
11691
11692 switch (code)
11693 {
a8b3aeda 11694 case '.':
a85d226b
RK
11695 /* Write out an instruction after the call which may be replaced
11696 with glue code by the loader. This depends on the AIX version. */
11697 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11698 return;
11699
81eace42
GK
11700 /* %a is output_address. */
11701
9854d9ed
RK
11702 case 'A':
11703 /* If X is a constant integer whose low-order 5 bits are zero,
11704 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11705 in the AIX assembler where "sri" with a zero shift count
20e26713 11706 writes a trash instruction. */
9854d9ed 11707 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11708 putc ('l', file);
9854d9ed 11709 else
76229ac8 11710 putc ('r', file);
9854d9ed
RK
11711 return;
11712
11713 case 'b':
e2c953b6
DE
11714 /* If constant, low-order 16 bits of constant, unsigned.
11715 Otherwise, write normally. */
11716 if (INT_P (x))
11717 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11718 else
11719 print_operand (file, x, 0);
cad12a8d
RK
11720 return;
11721
a260abc9
DE
11722 case 'B':
11723 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11724 for 64-bit mask direction. */
9390387d 11725 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11726 return;
a260abc9 11727
81eace42
GK
11728 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11729 output_operand. */
11730
423c1189
AH
11731 case 'c':
11732 /* X is a CR register. Print the number of the GT bit of the CR. */
11733 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11734 output_operand_lossage ("invalid %%E value");
11735 else
11736 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11737 return;
11738
11739 case 'D':
cef6b86c 11740 /* Like 'J' but get to the GT bit only. */
37409796 11741 gcc_assert (GET_CODE (x) == REG);
423c1189 11742
cef6b86c
EB
11743 /* Bit 1 is GT bit. */
11744 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11745
cef6b86c
EB
11746 /* Add one for shift count in rlinm for scc. */
11747 fprintf (file, "%d", i + 1);
423c1189
AH
11748 return;
11749
9854d9ed 11750 case 'E':
39a10a29 11751 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11752 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11753 output_operand_lossage ("invalid %%E value");
78fbdbf7 11754 else
39a10a29 11755 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11756 return;
9854d9ed
RK
11757
11758 case 'f':
11759 /* X is a CR register. Print the shift count needed to move it
11760 to the high-order four bits. */
11761 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11762 output_operand_lossage ("invalid %%f value");
11763 else
9ebbca7d 11764 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11765 return;
11766
11767 case 'F':
11768 /* Similar, but print the count for the rotate in the opposite
11769 direction. */
11770 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11771 output_operand_lossage ("invalid %%F value");
11772 else
9ebbca7d 11773 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11774 return;
11775
11776 case 'G':
11777 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11778 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11779 if (GET_CODE (x) != CONST_INT)
11780 output_operand_lossage ("invalid %%G value");
11781 else if (INTVAL (x) >= 0)
76229ac8 11782 putc ('z', file);
9854d9ed 11783 else
76229ac8 11784 putc ('m', file);
9854d9ed 11785 return;
e2c953b6 11786
9878760c 11787 case 'h':
a4f6c312
SS
11788 /* If constant, output low-order five bits. Otherwise, write
11789 normally. */
9878760c 11790 if (INT_P (x))
5f59ecb7 11791 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11792 else
11793 print_operand (file, x, 0);
11794 return;
11795
64305719 11796 case 'H':
a4f6c312
SS
11797 /* If constant, output low-order six bits. Otherwise, write
11798 normally. */
64305719 11799 if (INT_P (x))
5f59ecb7 11800 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11801 else
11802 print_operand (file, x, 0);
11803 return;
11804
9854d9ed
RK
11805 case 'I':
11806 /* Print `i' if this is a constant, else nothing. */
9878760c 11807 if (INT_P (x))
76229ac8 11808 putc ('i', file);
9878760c
RK
11809 return;
11810
9854d9ed
RK
11811 case 'j':
11812 /* Write the bit number in CCR for jump. */
11813 i = ccr_bit (x, 0);
11814 if (i == -1)
11815 output_operand_lossage ("invalid %%j code");
9878760c 11816 else
9854d9ed 11817 fprintf (file, "%d", i);
9878760c
RK
11818 return;
11819
9854d9ed
RK
11820 case 'J':
11821 /* Similar, but add one for shift count in rlinm for scc and pass
11822 scc flag to `ccr_bit'. */
11823 i = ccr_bit (x, 1);
11824 if (i == -1)
11825 output_operand_lossage ("invalid %%J code");
11826 else
a0466a68
RK
11827 /* If we want bit 31, write a shift count of zero, not 32. */
11828 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11829 return;
11830
9854d9ed
RK
11831 case 'k':
11832 /* X must be a constant. Write the 1's complement of the
11833 constant. */
9878760c 11834 if (! INT_P (x))
9854d9ed 11835 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11836 else
11837 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11838 return;
11839
81eace42 11840 case 'K':
9ebbca7d
GK
11841 /* X must be a symbolic constant on ELF. Write an
11842 expression suitable for an 'addi' that adds in the low 16
11843 bits of the MEM. */
11844 if (GET_CODE (x) != CONST)
11845 {
11846 print_operand_address (file, x);
11847 fputs ("@l", file);
11848 }
11849 else
11850 {
11851 if (GET_CODE (XEXP (x, 0)) != PLUS
11852 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11853 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11854 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11855 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11856 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11857 fputs ("@l", file);
ed8d2920
MM
11858 /* For GNU as, there must be a non-alphanumeric character
11859 between 'l' and the number. The '-' is added by
11860 print_operand() already. */
11861 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11862 fputs ("+", file);
9ebbca7d
GK
11863 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11864 }
81eace42
GK
11865 return;
11866
11867 /* %l is output_asm_label. */
9ebbca7d 11868
9854d9ed
RK
11869 case 'L':
11870 /* Write second word of DImode or DFmode reference. Works on register
11871 or non-indexed memory only. */
11872 if (GET_CODE (x) == REG)
fb5c67a7 11873 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11874 else if (GET_CODE (x) == MEM)
11875 {
11876 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11877 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11878 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11879 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11880 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11881 UNITS_PER_WORD));
6fb5fa3c
DB
11882 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11883 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11884 UNITS_PER_WORD));
9854d9ed 11885 else
d7624dc0
RK
11886 output_address (XEXP (adjust_address_nv (x, SImode,
11887 UNITS_PER_WORD),
11888 0));
ed8908e7 11889
ba5e43aa 11890 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11891 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11892 reg_names[SMALL_DATA_REG]);
9854d9ed 11893 }
9878760c 11894 return;
f676971a 11895
9878760c
RK
11896 case 'm':
11897 /* MB value for a mask operand. */
b1765bde 11898 if (! mask_operand (x, SImode))
9878760c
RK
11899 output_operand_lossage ("invalid %%m value");
11900
0ba1b2ff 11901 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11902 return;
11903
11904 case 'M':
11905 /* ME value for a mask operand. */
b1765bde 11906 if (! mask_operand (x, SImode))
a260abc9 11907 output_operand_lossage ("invalid %%M value");
9878760c 11908
0ba1b2ff 11909 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11910 return;
11911
81eace42
GK
11912 /* %n outputs the negative of its operand. */
11913
9878760c
RK
11914 case 'N':
11915 /* Write the number of elements in the vector times 4. */
11916 if (GET_CODE (x) != PARALLEL)
11917 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11918 else
11919 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11920 return;
11921
11922 case 'O':
11923 /* Similar, but subtract 1 first. */
11924 if (GET_CODE (x) != PARALLEL)
1427100a 11925 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11926 else
11927 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11928 return;
11929
9854d9ed
RK
11930 case 'p':
11931 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11932 if (! INT_P (x)
2bfcf297 11933 || INT_LOWPART (x) < 0
9854d9ed
RK
11934 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11935 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11936 else
11937 fprintf (file, "%d", i);
9854d9ed
RK
11938 return;
11939
9878760c
RK
11940 case 'P':
11941 /* The operand must be an indirect memory reference. The result
8bb418a3 11942 is the register name. */
9878760c
RK
11943 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11944 || REGNO (XEXP (x, 0)) >= 32)
11945 output_operand_lossage ("invalid %%P value");
e2c953b6 11946 else
fb5c67a7 11947 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11948 return;
11949
dfbdccdb
GK
11950 case 'q':
11951 /* This outputs the logical code corresponding to a boolean
11952 expression. The expression may have one or both operands
39a10a29 11953 negated (if one, only the first one). For condition register
c4ad648e
AM
11954 logical operations, it will also treat the negated
11955 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11956 {
63bc1d05 11957 const char *const *t = 0;
dfbdccdb
GK
11958 const char *s;
11959 enum rtx_code code = GET_CODE (x);
11960 static const char * const tbl[3][3] = {
11961 { "and", "andc", "nor" },
11962 { "or", "orc", "nand" },
11963 { "xor", "eqv", "xor" } };
11964
11965 if (code == AND)
11966 t = tbl[0];
11967 else if (code == IOR)
11968 t = tbl[1];
11969 else if (code == XOR)
11970 t = tbl[2];
11971 else
11972 output_operand_lossage ("invalid %%q value");
11973
11974 if (GET_CODE (XEXP (x, 0)) != NOT)
11975 s = t[0];
11976 else
11977 {
11978 if (GET_CODE (XEXP (x, 1)) == NOT)
11979 s = t[2];
11980 else
11981 s = t[1];
11982 }
f676971a 11983
dfbdccdb
GK
11984 fputs (s, file);
11985 }
11986 return;
11987
2c4a9cff
DE
11988 case 'Q':
11989 if (TARGET_MFCRF)
3b6ce0af 11990 fputc (',', file);
5efb1046 11991 /* FALLTHRU */
2c4a9cff
DE
11992 else
11993 return;
11994
9854d9ed
RK
11995 case 'R':
11996 /* X is a CR register. Print the mask for `mtcrf'. */
11997 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11998 output_operand_lossage ("invalid %%R value");
11999 else
9ebbca7d 12000 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12001 return;
9854d9ed
RK
12002
12003 case 's':
12004 /* Low 5 bits of 32 - value */
12005 if (! INT_P (x))
12006 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12007 else
12008 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12009 return;
9854d9ed 12010
a260abc9 12011 case 'S':
0ba1b2ff 12012 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12013 CONST_INT 32-bit mask is considered sign-extended so any
12014 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12015 if (! mask64_operand (x, DImode))
a260abc9
DE
12016 output_operand_lossage ("invalid %%S value");
12017
0ba1b2ff 12018 uval = INT_LOWPART (x);
a260abc9 12019
0ba1b2ff 12020 if (uval & 1) /* Clear Left */
a260abc9 12021 {
f099d360
GK
12022#if HOST_BITS_PER_WIDE_INT > 64
12023 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12024#endif
0ba1b2ff 12025 i = 64;
a260abc9 12026 }
0ba1b2ff 12027 else /* Clear Right */
a260abc9 12028 {
0ba1b2ff 12029 uval = ~uval;
f099d360
GK
12030#if HOST_BITS_PER_WIDE_INT > 64
12031 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12032#endif
0ba1b2ff 12033 i = 63;
a260abc9 12034 }
0ba1b2ff
AM
12035 while (uval != 0)
12036 --i, uval >>= 1;
37409796 12037 gcc_assert (i >= 0);
0ba1b2ff
AM
12038 fprintf (file, "%d", i);
12039 return;
a260abc9 12040
a3170dc6
AH
12041 case 't':
12042 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12043 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12044
12045 /* Bit 3 is OV bit. */
12046 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12047
12048 /* If we want bit 31, write a shift count of zero, not 32. */
12049 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12050 return;
12051
cccf3bdc
DE
12052 case 'T':
12053 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12054 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12055 && REGNO (x) != CTR_REGNO))
cccf3bdc 12056 output_operand_lossage ("invalid %%T value");
1de43f85 12057 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12058 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12059 else
12060 fputs ("ctr", file);
12061 return;
12062
9854d9ed 12063 case 'u':
802a0058 12064 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12065 if (! INT_P (x))
12066 output_operand_lossage ("invalid %%u value");
e2c953b6 12067 else
f676971a 12068 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12069 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12070 return;
12071
802a0058
MM
12072 case 'v':
12073 /* High-order 16 bits of constant for use in signed operand. */
12074 if (! INT_P (x))
12075 output_operand_lossage ("invalid %%v value");
e2c953b6 12076 else
134c32f6
DE
12077 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12078 (INT_LOWPART (x) >> 16) & 0xffff);
12079 return;
802a0058 12080
9854d9ed
RK
12081 case 'U':
12082 /* Print `u' if this has an auto-increment or auto-decrement. */
12083 if (GET_CODE (x) == MEM
12084 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12085 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12086 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12087 putc ('u', file);
9854d9ed 12088 return;
9878760c 12089
e0cd0770
JC
12090 case 'V':
12091 /* Print the trap code for this operand. */
12092 switch (GET_CODE (x))
12093 {
12094 case EQ:
12095 fputs ("eq", file); /* 4 */
12096 break;
12097 case NE:
12098 fputs ("ne", file); /* 24 */
12099 break;
12100 case LT:
12101 fputs ("lt", file); /* 16 */
12102 break;
12103 case LE:
12104 fputs ("le", file); /* 20 */
12105 break;
12106 case GT:
12107 fputs ("gt", file); /* 8 */
12108 break;
12109 case GE:
12110 fputs ("ge", file); /* 12 */
12111 break;
12112 case LTU:
12113 fputs ("llt", file); /* 2 */
12114 break;
12115 case LEU:
12116 fputs ("lle", file); /* 6 */
12117 break;
12118 case GTU:
12119 fputs ("lgt", file); /* 1 */
12120 break;
12121 case GEU:
12122 fputs ("lge", file); /* 5 */
12123 break;
12124 default:
37409796 12125 gcc_unreachable ();
e0cd0770
JC
12126 }
12127 break;
12128
9854d9ed
RK
12129 case 'w':
12130 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12131 normally. */
12132 if (INT_P (x))
f676971a 12133 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12134 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12135 else
12136 print_operand (file, x, 0);
9878760c
RK
12137 return;
12138
9854d9ed 12139 case 'W':
e2c953b6 12140 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12141 val = (GET_CODE (x) == CONST_INT
12142 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12143
12144 if (val < 0)
12145 i = -1;
9854d9ed 12146 else
e2c953b6
DE
12147 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12148 if ((val <<= 1) < 0)
12149 break;
12150
12151#if HOST_BITS_PER_WIDE_INT == 32
12152 if (GET_CODE (x) == CONST_INT && i >= 0)
12153 i += 32; /* zero-extend high-part was all 0's */
12154 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12155 {
12156 val = CONST_DOUBLE_LOW (x);
12157
37409796
NS
12158 gcc_assert (val);
12159 if (val < 0)
e2c953b6
DE
12160 --i;
12161 else
12162 for ( ; i < 64; i++)
12163 if ((val <<= 1) < 0)
12164 break;
12165 }
12166#endif
12167
12168 fprintf (file, "%d", i + 1);
9854d9ed 12169 return;
9878760c 12170
9854d9ed
RK
12171 case 'X':
12172 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12173 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12174 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12175 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12176 putc ('x', file);
9854d9ed 12177 return;
9878760c 12178
9854d9ed
RK
12179 case 'Y':
12180 /* Like 'L', for third word of TImode */
12181 if (GET_CODE (x) == REG)
fb5c67a7 12182 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12183 else if (GET_CODE (x) == MEM)
9878760c 12184 {
9854d9ed
RK
12185 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12186 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12187 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12188 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12189 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12190 else
d7624dc0 12191 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12192 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12193 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12194 reg_names[SMALL_DATA_REG]);
9878760c
RK
12195 }
12196 return;
f676971a 12197
9878760c 12198 case 'z':
b4ac57ab
RS
12199 /* X is a SYMBOL_REF. Write out the name preceded by a
12200 period and without any trailing data in brackets. Used for function
4d30c363
MM
12201 names. If we are configured for System V (or the embedded ABI) on
12202 the PowerPC, do not emit the period, since those systems do not use
12203 TOCs and the like. */
37409796 12204 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12205
c4ad648e
AM
12206 /* Mark the decl as referenced so that cgraph will output the
12207 function. */
9bf6462a 12208 if (SYMBOL_REF_DECL (x))
c4ad648e 12209 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12210
85b776df 12211 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12212 if (TARGET_MACHO)
12213 {
12214 const char *name = XSTR (x, 0);
a031e781 12215#if TARGET_MACHO
3b48085e 12216 if (MACHOPIC_INDIRECT
11abc112
MM
12217 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12218 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12219#endif
12220 assemble_name (file, name);
12221 }
85b776df 12222 else if (!DOT_SYMBOLS)
9739c90c 12223 assemble_name (file, XSTR (x, 0));
85b776df
AM
12224 else
12225 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12226 return;
12227
9854d9ed
RK
12228 case 'Z':
12229 /* Like 'L', for last word of TImode. */
12230 if (GET_CODE (x) == REG)
fb5c67a7 12231 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12232 else if (GET_CODE (x) == MEM)
12233 {
12234 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12235 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12236 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12237 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12238 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12239 else
d7624dc0 12240 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12241 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12242 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12243 reg_names[SMALL_DATA_REG]);
9854d9ed 12244 }
5c23c401 12245 return;
0ac081f6 12246
a3170dc6 12247 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12248 case 'y':
12249 {
12250 rtx tmp;
12251
37409796 12252 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12253
12254 tmp = XEXP (x, 0);
12255
90d3ff1c 12256 /* Ugly hack because %y is overloaded. */
8ef65e3d 12257 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12258 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12259 || GET_MODE (x) == TFmode
12260 || GET_MODE (x) == TImode))
a3170dc6
AH
12261 {
12262 /* Handle [reg]. */
12263 if (GET_CODE (tmp) == REG)
12264 {
12265 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12266 break;
12267 }
12268 /* Handle [reg+UIMM]. */
12269 else if (GET_CODE (tmp) == PLUS &&
12270 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12271 {
12272 int x;
12273
37409796 12274 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12275
12276 x = INTVAL (XEXP (tmp, 1));
12277 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12278 break;
12279 }
12280
12281 /* Fall through. Must be [reg+reg]. */
12282 }
850e8d3d
DN
12283 if (TARGET_ALTIVEC
12284 && GET_CODE (tmp) == AND
12285 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12286 && INTVAL (XEXP (tmp, 1)) == -16)
12287 tmp = XEXP (tmp, 0);
0ac081f6 12288 if (GET_CODE (tmp) == REG)
c62f2db5 12289 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12290 else
0ac081f6 12291 {
cb8cc791
AP
12292 if (!GET_CODE (tmp) == PLUS
12293 || !REG_P (XEXP (tmp, 0))
12294 || !REG_P (XEXP (tmp, 1)))
12295 {
12296 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12297 break;
12298 }
bb8df8a6 12299
0ac081f6
AH
12300 if (REGNO (XEXP (tmp, 0)) == 0)
12301 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12302 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12303 else
12304 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12305 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12306 }
0ac081f6
AH
12307 break;
12308 }
f676971a 12309
9878760c
RK
12310 case 0:
12311 if (GET_CODE (x) == REG)
12312 fprintf (file, "%s", reg_names[REGNO (x)]);
12313 else if (GET_CODE (x) == MEM)
12314 {
12315 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12316 know the width from the mode. */
12317 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12318 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12319 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12320 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12321 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12322 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12323 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12324 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12325 else
a54d04b7 12326 output_address (XEXP (x, 0));
9878760c
RK
12327 }
12328 else
a54d04b7 12329 output_addr_const (file, x);
a85d226b 12330 return;
9878760c 12331
c4501e62
JJ
12332 case '&':
12333 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12334 return;
12335
9878760c
RK
12336 default:
12337 output_operand_lossage ("invalid %%xn code");
12338 }
12339}
12340\f
12341/* Print the address of an operand. */
12342
12343void
a2369ed3 12344print_operand_address (FILE *file, rtx x)
9878760c
RK
12345{
12346 if (GET_CODE (x) == REG)
4697a36c 12347 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12348 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12349 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12350 {
12351 output_addr_const (file, x);
ba5e43aa 12352 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12353 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12354 reg_names[SMALL_DATA_REG]);
37409796
NS
12355 else
12356 gcc_assert (!TARGET_TOC);
9878760c
RK
12357 }
12358 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12359 {
9024f4b8 12360 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12361 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12362 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12363 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12364 else
4697a36c
MM
12365 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12366 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12367 }
12368 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12369 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12370 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12371#if TARGET_ELF
12372 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12373 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12374 {
12375 output_addr_const (file, XEXP (x, 1));
12376 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12377 }
c859cda6
DJ
12378#endif
12379#if TARGET_MACHO
12380 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12381 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12382 {
12383 fprintf (file, "lo16(");
12384 output_addr_const (file, XEXP (x, 1));
12385 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12386 }
3cb999d8 12387#endif
4d588c14 12388 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12389 {
2bfcf297 12390 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12391 {
2bfcf297
DB
12392 rtx contains_minus = XEXP (x, 1);
12393 rtx minus, symref;
12394 const char *name;
f676971a 12395
9ebbca7d 12396 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12397 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12398 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12399 contains_minus = XEXP (contains_minus, 0);
12400
2bfcf297
DB
12401 minus = XEXP (contains_minus, 0);
12402 symref = XEXP (minus, 0);
0cdc04e8 12403 gcc_assert (GET_CODE (XEXP (minus, 1)) == SYMBOL_REF);
2bfcf297
DB
12404 XEXP (contains_minus, 0) = symref;
12405 if (TARGET_ELF)
12406 {
12407 char *newname;
12408
12409 name = XSTR (symref, 0);
5ead67f6 12410 newname = XALLOCAVEC (char, strlen (name) + sizeof ("@toc"));
2bfcf297
DB
12411 strcpy (newname, name);
12412 strcat (newname, "@toc");
12413 XSTR (symref, 0) = newname;
12414 }
12415 output_addr_const (file, XEXP (x, 1));
12416 if (TARGET_ELF)
12417 XSTR (symref, 0) = name;
9ebbca7d
GK
12418 XEXP (contains_minus, 0) = minus;
12419 }
12420 else
12421 output_addr_const (file, XEXP (x, 1));
12422
12423 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12424 }
9878760c 12425 else
37409796 12426 gcc_unreachable ();
9878760c
RK
12427}
12428\f
88cad84b 12429/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12430 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12431 is defined. It also needs to handle DI-mode objects on 64-bit
12432 targets. */
12433
12434static bool
a2369ed3 12435rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12436{
f4f4921e 12437#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12438 /* Special handling for SI values. */
84dcde01 12439 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12440 {
301d03af 12441 static int recurse = 0;
f676971a 12442
301d03af
RS
12443 /* For -mrelocatable, we mark all addresses that need to be fixed up
12444 in the .fixup section. */
12445 if (TARGET_RELOCATABLE
d6b5193b
RS
12446 && in_section != toc_section
12447 && in_section != text_section
4325ca90 12448 && !unlikely_text_section_p (in_section)
301d03af
RS
12449 && !recurse
12450 && GET_CODE (x) != CONST_INT
12451 && GET_CODE (x) != CONST_DOUBLE
12452 && CONSTANT_P (x))
12453 {
12454 char buf[256];
12455
12456 recurse = 1;
12457 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12458 fixuplabelno++;
12459 ASM_OUTPUT_LABEL (asm_out_file, buf);
12460 fprintf (asm_out_file, "\t.long\t(");
12461 output_addr_const (asm_out_file, x);
12462 fprintf (asm_out_file, ")@fixup\n");
12463 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12464 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12465 fprintf (asm_out_file, "\t.long\t");
12466 assemble_name (asm_out_file, buf);
12467 fprintf (asm_out_file, "\n\t.previous\n");
12468 recurse = 0;
12469 return true;
12470 }
12471 /* Remove initial .'s to turn a -mcall-aixdesc function
12472 address into the address of the descriptor, not the function
12473 itself. */
12474 else if (GET_CODE (x) == SYMBOL_REF
12475 && XSTR (x, 0)[0] == '.'
12476 && DEFAULT_ABI == ABI_AIX)
12477 {
12478 const char *name = XSTR (x, 0);
12479 while (*name == '.')
12480 name++;
12481
12482 fprintf (asm_out_file, "\t.long\t%s\n", name);
12483 return true;
12484 }
12485 }
f4f4921e 12486#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12487 return default_assemble_integer (x, size, aligned_p);
12488}
93638d7a
AM
12489
12490#ifdef HAVE_GAS_HIDDEN
12491/* Emit an assembler directive to set symbol visibility for DECL to
12492 VISIBILITY_TYPE. */
12493
5add3202 12494static void
a2369ed3 12495rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12496{
93638d7a
AM
12497 /* Functions need to have their entry point symbol visibility set as
12498 well as their descriptor symbol visibility. */
85b776df
AM
12499 if (DEFAULT_ABI == ABI_AIX
12500 && DOT_SYMBOLS
12501 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12502 {
25fdb4dc 12503 static const char * const visibility_types[] = {
c4ad648e 12504 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12505 };
12506
12507 const char *name, *type;
93638d7a
AM
12508
12509 name = ((* targetm.strip_name_encoding)
12510 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12511 type = visibility_types[vis];
93638d7a 12512
25fdb4dc
RH
12513 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12514 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12515 }
25fdb4dc
RH
12516 else
12517 default_assemble_visibility (decl, vis);
93638d7a
AM
12518}
12519#endif
301d03af 12520\f
39a10a29 12521enum rtx_code
a2369ed3 12522rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12523{
12524 /* Reversal of FP compares takes care -- an ordered compare
12525 becomes an unordered compare and vice versa. */
f676971a 12526 if (mode == CCFPmode
bc9ec0e0
GK
12527 && (!flag_finite_math_only
12528 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12529 || code == UNEQ || code == LTGT))
bab6226b 12530 return reverse_condition_maybe_unordered (code);
39a10a29 12531 else
bab6226b 12532 return reverse_condition (code);
39a10a29
GK
12533}
12534
39a10a29
GK
12535/* Generate a compare for CODE. Return a brand-new rtx that
12536 represents the result of the compare. */
a4f6c312 12537
39a10a29 12538static rtx
a2369ed3 12539rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12540{
12541 enum machine_mode comp_mode;
12542 rtx compare_result;
12543
12544 if (rs6000_compare_fp_p)
12545 comp_mode = CCFPmode;
12546 else if (code == GTU || code == LTU
c4ad648e 12547 || code == GEU || code == LEU)
39a10a29 12548 comp_mode = CCUNSmode;
60934f9c
NS
12549 else if ((code == EQ || code == NE)
12550 && GET_CODE (rs6000_compare_op0) == SUBREG
12551 && GET_CODE (rs6000_compare_op1) == SUBREG
12552 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12553 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12554 /* These are unsigned values, perhaps there will be a later
12555 ordering compare that can be shared with this one.
12556 Unfortunately we cannot detect the signedness of the operands
12557 for non-subregs. */
12558 comp_mode = CCUNSmode;
39a10a29
GK
12559 else
12560 comp_mode = CCmode;
12561
12562 /* First, the compare. */
12563 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12564
cef6b86c 12565 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12566 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12567 && rs6000_compare_fp_p)
a3170dc6 12568 {
64022b5d 12569 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12570 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12571
12572 if (op_mode == VOIDmode)
12573 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12574
cef6b86c
EB
12575 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12576 This explains the following mess. */
423c1189 12577
a3170dc6
AH
12578 switch (code)
12579 {
423c1189 12580 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12581 switch (op_mode)
12582 {
12583 case SFmode:
12584 cmp = flag_unsafe_math_optimizations
12585 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12586 rs6000_compare_op1)
12587 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12588 rs6000_compare_op1);
12589 break;
12590
12591 case DFmode:
12592 cmp = flag_unsafe_math_optimizations
12593 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12594 rs6000_compare_op1)
12595 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12596 rs6000_compare_op1);
12597 break;
12598
17caeff2
JM
12599 case TFmode:
12600 cmp = flag_unsafe_math_optimizations
12601 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12602 rs6000_compare_op1)
12603 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12604 rs6000_compare_op1);
12605 break;
12606
37409796
NS
12607 default:
12608 gcc_unreachable ();
12609 }
a3170dc6 12610 break;
bb8df8a6 12611
423c1189 12612 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12613 switch (op_mode)
12614 {
12615 case SFmode:
12616 cmp = flag_unsafe_math_optimizations
12617 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12618 rs6000_compare_op1)
12619 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12620 rs6000_compare_op1);
12621 break;
bb8df8a6 12622
37409796
NS
12623 case DFmode:
12624 cmp = flag_unsafe_math_optimizations
12625 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12626 rs6000_compare_op1)
12627 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12628 rs6000_compare_op1);
12629 break;
12630
17caeff2
JM
12631 case TFmode:
12632 cmp = flag_unsafe_math_optimizations
12633 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12634 rs6000_compare_op1)
12635 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12636 rs6000_compare_op1);
12637 break;
12638
37409796
NS
12639 default:
12640 gcc_unreachable ();
12641 }
a3170dc6 12642 break;
bb8df8a6 12643
423c1189 12644 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12645 switch (op_mode)
12646 {
12647 case SFmode:
12648 cmp = flag_unsafe_math_optimizations
12649 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12650 rs6000_compare_op1)
12651 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12652 rs6000_compare_op1);
12653 break;
bb8df8a6 12654
37409796
NS
12655 case DFmode:
12656 cmp = flag_unsafe_math_optimizations
12657 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12658 rs6000_compare_op1)
12659 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12660 rs6000_compare_op1);
12661 break;
12662
17caeff2
JM
12663 case TFmode:
12664 cmp = flag_unsafe_math_optimizations
12665 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12666 rs6000_compare_op1)
12667 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12668 rs6000_compare_op1);
12669 break;
12670
37409796
NS
12671 default:
12672 gcc_unreachable ();
12673 }
a3170dc6 12674 break;
4d4cbc0e 12675 default:
37409796 12676 gcc_unreachable ();
a3170dc6
AH
12677 }
12678
12679 /* Synthesize LE and GE from LT/GT || EQ. */
12680 if (code == LE || code == GE || code == LEU || code == GEU)
12681 {
a3170dc6
AH
12682 emit_insn (cmp);
12683
12684 switch (code)
12685 {
12686 case LE: code = LT; break;
12687 case GE: code = GT; break;
12688 case LEU: code = LT; break;
12689 case GEU: code = GT; break;
37409796 12690 default: gcc_unreachable ();
a3170dc6
AH
12691 }
12692
a3170dc6
AH
12693 compare_result2 = gen_reg_rtx (CCFPmode);
12694
12695 /* Do the EQ. */
37409796
NS
12696 switch (op_mode)
12697 {
12698 case SFmode:
12699 cmp = flag_unsafe_math_optimizations
12700 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12701 rs6000_compare_op1)
12702 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12703 rs6000_compare_op1);
12704 break;
12705
12706 case DFmode:
12707 cmp = flag_unsafe_math_optimizations
12708 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12709 rs6000_compare_op1)
12710 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12711 rs6000_compare_op1);
12712 break;
12713
17caeff2
JM
12714 case TFmode:
12715 cmp = flag_unsafe_math_optimizations
12716 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12717 rs6000_compare_op1)
12718 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12719 rs6000_compare_op1);
12720 break;
12721
37409796
NS
12722 default:
12723 gcc_unreachable ();
12724 }
a3170dc6
AH
12725 emit_insn (cmp);
12726
a3170dc6 12727 /* OR them together. */
64022b5d
AH
12728 or_result = gen_reg_rtx (CCFPmode);
12729 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12730 compare_result2);
a3170dc6
AH
12731 compare_result = or_result;
12732 code = EQ;
12733 }
12734 else
12735 {
a3170dc6 12736 if (code == NE || code == LTGT)
a3170dc6 12737 code = NE;
423c1189
AH
12738 else
12739 code = EQ;
a3170dc6
AH
12740 }
12741
12742 emit_insn (cmp);
12743 }
12744 else
de17c25f
DE
12745 {
12746 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12747 CLOBBERs to match cmptf_internal2 pattern. */
12748 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12749 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12750 && !TARGET_IEEEQUAD
de17c25f
DE
12751 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12752 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12753 gen_rtvec (9,
12754 gen_rtx_SET (VOIDmode,
12755 compare_result,
12756 gen_rtx_COMPARE (comp_mode,
12757 rs6000_compare_op0,
12758 rs6000_compare_op1)),
12759 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12760 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12761 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12762 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12763 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12764 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12765 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12766 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12767 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12768 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12769 {
12770 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12771 comp_mode = CCEQmode;
12772 compare_result = gen_reg_rtx (CCEQmode);
12773 if (TARGET_64BIT)
12774 emit_insn (gen_stack_protect_testdi (compare_result,
12775 rs6000_compare_op0, op1));
12776 else
12777 emit_insn (gen_stack_protect_testsi (compare_result,
12778 rs6000_compare_op0, op1));
12779 }
de17c25f
DE
12780 else
12781 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12782 gen_rtx_COMPARE (comp_mode,
12783 rs6000_compare_op0,
12784 rs6000_compare_op1)));
12785 }
f676971a 12786
ca5adc63 12787 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12788 under flag_finite_math_only we don't bother. */
39a10a29 12789 if (rs6000_compare_fp_p
e7108df9 12790 && !flag_finite_math_only
8ef65e3d 12791 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12792 && (code == LE || code == GE
12793 || code == UNEQ || code == LTGT
12794 || code == UNGT || code == UNLT))
12795 {
12796 enum rtx_code or1, or2;
12797 rtx or1_rtx, or2_rtx, compare2_rtx;
12798 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12799
39a10a29
GK
12800 switch (code)
12801 {
12802 case LE: or1 = LT; or2 = EQ; break;
12803 case GE: or1 = GT; or2 = EQ; break;
12804 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12805 case LTGT: or1 = LT; or2 = GT; break;
12806 case UNGT: or1 = UNORDERED; or2 = GT; break;
12807 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12808 default: gcc_unreachable ();
39a10a29
GK
12809 }
12810 validate_condition_mode (or1, comp_mode);
12811 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12812 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12813 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12814 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12815 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12816 const_true_rtx);
12817 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12818
12819 compare_result = or_result;
12820 code = EQ;
12821 }
12822
12823 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12824
1c563bed 12825 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12826}
12827
12828
12829/* Emit the RTL for an sCOND pattern. */
12830
12831void
a2369ed3 12832rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12833{
12834 rtx condition_rtx;
12835 enum machine_mode op_mode;
b7053a3f 12836 enum rtx_code cond_code;
39a10a29
GK
12837
12838 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12839 cond_code = GET_CODE (condition_rtx);
12840
8ef65e3d 12841 if (rs6000_compare_fp_p
423c1189
AH
12842 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12843 {
12844 rtx t;
12845
12846 PUT_MODE (condition_rtx, SImode);
12847 t = XEXP (condition_rtx, 0);
12848
37409796 12849 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12850
12851 if (cond_code == NE)
64022b5d 12852 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12853
64022b5d 12854 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12855 return;
12856 }
12857
b7053a3f
GK
12858 if (cond_code == NE
12859 || cond_code == GE || cond_code == LE
12860 || cond_code == GEU || cond_code == LEU
12861 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12862 {
12863 rtx not_result = gen_reg_rtx (CCEQmode);
12864 rtx not_op, rev_cond_rtx;
12865 enum machine_mode cc_mode;
f676971a 12866
b7053a3f
GK
12867 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12868
1c563bed 12869 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12870 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12871 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12872 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12873 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12874 }
39a10a29
GK
12875
12876 op_mode = GET_MODE (rs6000_compare_op0);
12877 if (op_mode == VOIDmode)
12878 op_mode = GET_MODE (rs6000_compare_op1);
12879
12880 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12881 {
12882 PUT_MODE (condition_rtx, DImode);
12883 convert_move (result, condition_rtx, 0);
12884 }
12885 else
12886 {
12887 PUT_MODE (condition_rtx, SImode);
12888 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12889 }
12890}
12891
39a10a29
GK
12892/* Emit a branch of kind CODE to location LOC. */
12893
12894void
a2369ed3 12895rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12896{
12897 rtx condition_rtx, loc_ref;
12898
12899 condition_rtx = rs6000_generate_compare (code);
12900 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12901 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12902 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12903 loc_ref, pc_rtx)));
12904}
12905
12a4e8c5
GK
12906/* Return the string to output a conditional branch to LABEL, which is
12907 the operand number of the label, or -1 if the branch is really a
f676971a 12908 conditional return.
12a4e8c5
GK
12909
12910 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12911 condition code register and its mode specifies what kind of
12912 comparison we made.
12913
a0ab749a 12914 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12915
12916 INSN is the insn. */
12917
12918char *
a2369ed3 12919output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12920{
12921 static char string[64];
12922 enum rtx_code code = GET_CODE (op);
12923 rtx cc_reg = XEXP (op, 0);
12924 enum machine_mode mode = GET_MODE (cc_reg);
12925 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12926 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12927 int really_reversed = reversed ^ need_longbranch;
12928 char *s = string;
12929 const char *ccode;
12930 const char *pred;
12931 rtx note;
12932
39a10a29
GK
12933 validate_condition_mode (code, mode);
12934
12935 /* Work out which way this really branches. We could use
12936 reverse_condition_maybe_unordered here always but this
12937 makes the resulting assembler clearer. */
12a4e8c5 12938 if (really_reversed)
de40e1df
DJ
12939 {
12940 /* Reversal of FP compares takes care -- an ordered compare
12941 becomes an unordered compare and vice versa. */
12942 if (mode == CCFPmode)
12943 code = reverse_condition_maybe_unordered (code);
12944 else
12945 code = reverse_condition (code);
12946 }
12a4e8c5 12947
8ef65e3d 12948 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12949 {
12950 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12951 to the GT bit. */
37409796
NS
12952 switch (code)
12953 {
12954 case EQ:
12955 /* Opposite of GT. */
12956 code = GT;
12957 break;
12958
12959 case NE:
12960 code = UNLE;
12961 break;
12962
12963 default:
12964 gcc_unreachable ();
12965 }
a3170dc6
AH
12966 }
12967
39a10a29 12968 switch (code)
12a4e8c5
GK
12969 {
12970 /* Not all of these are actually distinct opcodes, but
12971 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12972 case NE: case LTGT:
12973 ccode = "ne"; break;
12974 case EQ: case UNEQ:
12975 ccode = "eq"; break;
f676971a 12976 case GE: case GEU:
50a0b056 12977 ccode = "ge"; break;
f676971a 12978 case GT: case GTU: case UNGT:
50a0b056 12979 ccode = "gt"; break;
f676971a 12980 case LE: case LEU:
50a0b056 12981 ccode = "le"; break;
f676971a 12982 case LT: case LTU: case UNLT:
50a0b056 12983 ccode = "lt"; break;
12a4e8c5
GK
12984 case UNORDERED: ccode = "un"; break;
12985 case ORDERED: ccode = "nu"; break;
12986 case UNGE: ccode = "nl"; break;
12987 case UNLE: ccode = "ng"; break;
12988 default:
37409796 12989 gcc_unreachable ();
12a4e8c5 12990 }
f676971a
EC
12991
12992 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12993 The old mnemonics don't have a way to specify this information. */
f4857b9b 12994 pred = "";
12a4e8c5
GK
12995 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12996 if (note != NULL_RTX)
12997 {
12998 /* PROB is the difference from 50%. */
12999 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13000
13001 /* Only hint for highly probable/improbable branches on newer
13002 cpus as static prediction overrides processor dynamic
13003 prediction. For older cpus we may as well always hint, but
13004 assume not taken for branches that are very close to 50% as a
13005 mispredicted taken branch is more expensive than a
f676971a 13006 mispredicted not-taken branch. */
ec507f2d 13007 if (rs6000_always_hint
2c9e13f3
JH
13008 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13009 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13010 {
13011 if (abs (prob) > REG_BR_PROB_BASE / 20
13012 && ((prob > 0) ^ need_longbranch))
c4ad648e 13013 pred = "+";
f4857b9b
AM
13014 else
13015 pred = "-";
13016 }
12a4e8c5 13017 }
12a4e8c5
GK
13018
13019 if (label == NULL)
94a54f47 13020 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13021 else
94a54f47 13022 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13023
37c67319 13024 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13025 Assume they'd only be the first character.... */
37c67319
GK
13026 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13027 *s++ = '%';
94a54f47 13028 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13029
13030 if (label != NULL)
13031 {
13032 /* If the branch distance was too far, we may have to use an
13033 unconditional branch to go the distance. */
13034 if (need_longbranch)
44518ddd 13035 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13036 else
13037 s += sprintf (s, ",%s", label);
13038 }
13039
13040 return string;
13041}
50a0b056 13042
64022b5d 13043/* Return the string to flip the GT bit on a CR. */
423c1189 13044char *
64022b5d 13045output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13046{
13047 static char string[64];
13048 int a, b;
13049
37409796
NS
13050 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13051 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13052
64022b5d
AH
13053 /* GT bit. */
13054 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13055 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13056
13057 sprintf (string, "crnot %d,%d", a, b);
13058 return string;
13059}
13060
21213b4c
DP
13061/* Return insn index for the vector compare instruction for given CODE,
13062 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13063 not available. */
13064
13065static int
94ff898d 13066get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13067 enum machine_mode dest_mode,
13068 enum machine_mode op_mode)
13069{
13070 if (!TARGET_ALTIVEC)
13071 return INSN_NOT_AVAILABLE;
13072
13073 switch (code)
13074 {
13075 case EQ:
13076 if (dest_mode == V16QImode && op_mode == V16QImode)
13077 return UNSPEC_VCMPEQUB;
13078 if (dest_mode == V8HImode && op_mode == V8HImode)
13079 return UNSPEC_VCMPEQUH;
13080 if (dest_mode == V4SImode && op_mode == V4SImode)
13081 return UNSPEC_VCMPEQUW;
13082 if (dest_mode == V4SImode && op_mode == V4SFmode)
13083 return UNSPEC_VCMPEQFP;
13084 break;
13085 case GE:
13086 if (dest_mode == V4SImode && op_mode == V4SFmode)
13087 return UNSPEC_VCMPGEFP;
13088 case GT:
13089 if (dest_mode == V16QImode && op_mode == V16QImode)
13090 return UNSPEC_VCMPGTSB;
13091 if (dest_mode == V8HImode && op_mode == V8HImode)
13092 return UNSPEC_VCMPGTSH;
13093 if (dest_mode == V4SImode && op_mode == V4SImode)
13094 return UNSPEC_VCMPGTSW;
13095 if (dest_mode == V4SImode && op_mode == V4SFmode)
13096 return UNSPEC_VCMPGTFP;
13097 break;
13098 case GTU:
13099 if (dest_mode == V16QImode && op_mode == V16QImode)
13100 return UNSPEC_VCMPGTUB;
13101 if (dest_mode == V8HImode && op_mode == V8HImode)
13102 return UNSPEC_VCMPGTUH;
13103 if (dest_mode == V4SImode && op_mode == V4SImode)
13104 return UNSPEC_VCMPGTUW;
13105 break;
13106 default:
13107 break;
13108 }
13109 return INSN_NOT_AVAILABLE;
13110}
13111
13112/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13113 DMODE is expected destination mode. This is a recursive function. */
13114
13115static rtx
13116rs6000_emit_vector_compare (enum rtx_code rcode,
13117 rtx op0, rtx op1,
13118 enum machine_mode dmode)
13119{
13120 int vec_cmp_insn;
13121 rtx mask;
13122 enum machine_mode dest_mode;
13123 enum machine_mode op_mode = GET_MODE (op1);
13124
37409796
NS
13125 gcc_assert (TARGET_ALTIVEC);
13126 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13127
13128 /* Floating point vector compare instructions uses destination V4SImode.
13129 Move destination to appropriate mode later. */
13130 if (dmode == V4SFmode)
13131 dest_mode = V4SImode;
13132 else
13133 dest_mode = dmode;
13134
13135 mask = gen_reg_rtx (dest_mode);
13136 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13137
13138 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13139 {
13140 bool swap_operands = false;
13141 bool try_again = false;
13142 switch (rcode)
13143 {
13144 case LT:
13145 rcode = GT;
13146 swap_operands = true;
13147 try_again = true;
13148 break;
13149 case LTU:
13150 rcode = GTU;
13151 swap_operands = true;
13152 try_again = true;
13153 break;
13154 case NE:
370df7db
JC
13155 case UNLE:
13156 case UNLT:
13157 case UNGE:
13158 case UNGT:
13159 /* Invert condition and try again.
13160 e.g., A != B becomes ~(A==B). */
21213b4c 13161 {
370df7db 13162 enum rtx_code rev_code;
21213b4c 13163 enum insn_code nor_code;
d1123cde 13164 rtx eq_rtx;
370df7db
JC
13165
13166 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13167 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13168 dest_mode);
94ff898d 13169
166cdb08 13170 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13171 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13172 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13173
13174 if (dmode != dest_mode)
13175 {
13176 rtx temp = gen_reg_rtx (dest_mode);
13177 convert_move (temp, mask, 0);
13178 return temp;
13179 }
13180 return mask;
13181 }
13182 break;
13183 case GE:
13184 case GEU:
13185 case LE:
13186 case LEU:
13187 /* Try GT/GTU/LT/LTU OR EQ */
13188 {
13189 rtx c_rtx, eq_rtx;
13190 enum insn_code ior_code;
13191 enum rtx_code new_code;
13192
37409796
NS
13193 switch (rcode)
13194 {
13195 case GE:
13196 new_code = GT;
13197 break;
13198
13199 case GEU:
13200 new_code = GTU;
13201 break;
13202
13203 case LE:
13204 new_code = LT;
13205 break;
13206
13207 case LEU:
13208 new_code = LTU;
13209 break;
13210
13211 default:
13212 gcc_unreachable ();
13213 }
21213b4c
DP
13214
13215 c_rtx = rs6000_emit_vector_compare (new_code,
13216 op0, op1, dest_mode);
13217 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13218 dest_mode);
13219
166cdb08 13220 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13221 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13222 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13223 if (dmode != dest_mode)
13224 {
13225 rtx temp = gen_reg_rtx (dest_mode);
13226 convert_move (temp, mask, 0);
13227 return temp;
13228 }
13229 return mask;
13230 }
13231 break;
13232 default:
37409796 13233 gcc_unreachable ();
21213b4c
DP
13234 }
13235
13236 if (try_again)
13237 {
13238 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13239 /* You only get two chances. */
13240 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13241 }
13242
13243 if (swap_operands)
13244 {
13245 rtx tmp;
13246 tmp = op0;
13247 op0 = op1;
13248 op1 = tmp;
13249 }
13250 }
13251
915167f5
GK
13252 emit_insn (gen_rtx_SET (VOIDmode, mask,
13253 gen_rtx_UNSPEC (dest_mode,
13254 gen_rtvec (2, op0, op1),
13255 vec_cmp_insn)));
21213b4c
DP
13256 if (dmode != dest_mode)
13257 {
13258 rtx temp = gen_reg_rtx (dest_mode);
13259 convert_move (temp, mask, 0);
13260 return temp;
13261 }
13262 return mask;
13263}
13264
13265/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13266 valid insn doesn exist for given mode. */
13267
13268static int
13269get_vsel_insn (enum machine_mode mode)
13270{
13271 switch (mode)
13272 {
13273 case V4SImode:
13274 return UNSPEC_VSEL4SI;
13275 break;
13276 case V4SFmode:
13277 return UNSPEC_VSEL4SF;
13278 break;
13279 case V8HImode:
13280 return UNSPEC_VSEL8HI;
13281 break;
13282 case V16QImode:
13283 return UNSPEC_VSEL16QI;
13284 break;
13285 default:
13286 return INSN_NOT_AVAILABLE;
13287 break;
13288 }
13289 return INSN_NOT_AVAILABLE;
13290}
13291
13292/* Emit vector select insn where DEST is destination using
13293 operands OP1, OP2 and MASK. */
13294
13295static void
13296rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13297{
13298 rtx t, temp;
13299 enum machine_mode dest_mode = GET_MODE (dest);
13300 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13301
13302 temp = gen_reg_rtx (dest_mode);
94ff898d 13303
bb8df8a6 13304 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13305 select op2. */
915167f5
GK
13306 t = gen_rtx_SET (VOIDmode, temp,
13307 gen_rtx_UNSPEC (dest_mode,
13308 gen_rtvec (3, op2, op1, mask),
13309 vsel_insn_index));
21213b4c
DP
13310 emit_insn (t);
13311 emit_move_insn (dest, temp);
13312 return;
13313}
13314
94ff898d 13315/* Emit vector conditional expression.
21213b4c
DP
13316 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13317 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13318
13319int
13320rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13321 rtx cond, rtx cc_op0, rtx cc_op1)
13322{
13323 enum machine_mode dest_mode = GET_MODE (dest);
13324 enum rtx_code rcode = GET_CODE (cond);
13325 rtx mask;
13326
13327 if (!TARGET_ALTIVEC)
13328 return 0;
13329
13330 /* Get the vector mask for the given relational operations. */
13331 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13332
13333 rs6000_emit_vector_select (dest, op1, op2, mask);
13334
13335 return 1;
13336}
13337
50a0b056
GK
13338/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13339 operands of the last comparison is nonzero/true, FALSE_COND if it
13340 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13341
50a0b056 13342int
a2369ed3 13343rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13344{
13345 enum rtx_code code = GET_CODE (op);
13346 rtx op0 = rs6000_compare_op0;
13347 rtx op1 = rs6000_compare_op1;
13348 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13349 enum machine_mode compare_mode = GET_MODE (op0);
13350 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13351 rtx temp;
add2402e 13352 bool is_against_zero;
50a0b056 13353
a3c9585f 13354 /* These modes should always match. */
a3170dc6
AH
13355 if (GET_MODE (op1) != compare_mode
13356 /* In the isel case however, we can use a compare immediate, so
13357 op1 may be a small constant. */
13358 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13359 return 0;
178c3eff 13360 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13361 return 0;
178c3eff 13362 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13363 return 0;
13364
50a0b056 13365 /* First, work out if the hardware can do this at all, or
a3c9585f 13366 if it's too slow.... */
50a0b056 13367 if (! rs6000_compare_fp_p)
a3170dc6
AH
13368 {
13369 if (TARGET_ISEL)
13370 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13371 return 0;
13372 }
8ef65e3d 13373 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13374 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13375 return 0;
50a0b056 13376
add2402e 13377 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13378
add2402e
GK
13379 /* A floating-point subtract might overflow, underflow, or produce
13380 an inexact result, thus changing the floating-point flags, so it
13381 can't be generated if we care about that. It's safe if one side
13382 of the construct is zero, since then no subtract will be
13383 generated. */
ebb109ad 13384 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13385 && flag_trapping_math && ! is_against_zero)
13386 return 0;
13387
50a0b056
GK
13388 /* Eliminate half of the comparisons by switching operands, this
13389 makes the remaining code simpler. */
13390 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13391 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13392 {
13393 code = reverse_condition_maybe_unordered (code);
13394 temp = true_cond;
13395 true_cond = false_cond;
13396 false_cond = temp;
13397 }
13398
13399 /* UNEQ and LTGT take four instructions for a comparison with zero,
13400 it'll probably be faster to use a branch here too. */
bc9ec0e0 13401 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13402 return 0;
f676971a 13403
50a0b056
GK
13404 if (GET_CODE (op1) == CONST_DOUBLE)
13405 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13406
b6d08ca1 13407 /* We're going to try to implement comparisons by performing
50a0b056
GK
13408 a subtract, then comparing against zero. Unfortunately,
13409 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13410 know that the operand is finite and the comparison
50a0b056 13411 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13412 if (HONOR_INFINITIES (compare_mode)
50a0b056 13413 && code != GT && code != UNGE
045572c7 13414 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13415 /* Constructs of the form (a OP b ? a : b) are safe. */
13416 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13417 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13418 && ! rtx_equal_p (op1, true_cond))))
13419 return 0;
add2402e 13420
50a0b056
GK
13421 /* At this point we know we can use fsel. */
13422
13423 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13424 if (! is_against_zero)
13425 {
13426 temp = gen_reg_rtx (compare_mode);
13427 emit_insn (gen_rtx_SET (VOIDmode, temp,
13428 gen_rtx_MINUS (compare_mode, op0, op1)));
13429 op0 = temp;
13430 op1 = CONST0_RTX (compare_mode);
13431 }
50a0b056
GK
13432
13433 /* If we don't care about NaNs we can reduce some of the comparisons
13434 down to faster ones. */
bc9ec0e0 13435 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13436 switch (code)
13437 {
13438 case GT:
13439 code = LE;
13440 temp = true_cond;
13441 true_cond = false_cond;
13442 false_cond = temp;
13443 break;
13444 case UNGE:
13445 code = GE;
13446 break;
13447 case UNEQ:
13448 code = EQ;
13449 break;
13450 default:
13451 break;
13452 }
13453
13454 /* Now, reduce everything down to a GE. */
13455 switch (code)
13456 {
13457 case GE:
13458 break;
13459
13460 case LE:
3148ad6d
DJ
13461 temp = gen_reg_rtx (compare_mode);
13462 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13463 op0 = temp;
13464 break;
13465
13466 case ORDERED:
3148ad6d
DJ
13467 temp = gen_reg_rtx (compare_mode);
13468 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13469 op0 = temp;
13470 break;
13471
13472 case EQ:
3148ad6d 13473 temp = gen_reg_rtx (compare_mode);
f676971a 13474 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13475 gen_rtx_NEG (compare_mode,
13476 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13477 op0 = temp;
13478 break;
13479
13480 case UNGE:
bc9ec0e0 13481 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13482 temp = gen_reg_rtx (result_mode);
50a0b056 13483 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13484 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13485 gen_rtx_GE (VOIDmode,
13486 op0, op1),
13487 true_cond, false_cond)));
bc9ec0e0
GK
13488 false_cond = true_cond;
13489 true_cond = temp;
50a0b056 13490
3148ad6d
DJ
13491 temp = gen_reg_rtx (compare_mode);
13492 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13493 op0 = temp;
13494 break;
13495
13496 case GT:
bc9ec0e0 13497 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13498 temp = gen_reg_rtx (result_mode);
50a0b056 13499 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13500 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13501 gen_rtx_GE (VOIDmode,
13502 op0, op1),
13503 true_cond, false_cond)));
bc9ec0e0
GK
13504 true_cond = false_cond;
13505 false_cond = temp;
50a0b056 13506
3148ad6d
DJ
13507 temp = gen_reg_rtx (compare_mode);
13508 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13509 op0 = temp;
13510 break;
13511
13512 default:
37409796 13513 gcc_unreachable ();
50a0b056
GK
13514 }
13515
13516 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13517 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13518 gen_rtx_GE (VOIDmode,
13519 op0, op1),
13520 true_cond, false_cond)));
13521 return 1;
13522}
13523
a3170dc6
AH
13524/* Same as above, but for ints (isel). */
13525
13526static int
a2369ed3 13527rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13528{
13529 rtx condition_rtx, cr;
13530
13531 /* All isel implementations thus far are 32-bits. */
13532 if (GET_MODE (rs6000_compare_op0) != SImode)
13533 return 0;
13534
13535 /* We still have to do the compare, because isel doesn't do a
13536 compare, it just looks at the CRx bits set by a previous compare
13537 instruction. */
13538 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13539 cr = XEXP (condition_rtx, 0);
13540
13541 if (GET_MODE (cr) == CCmode)
13542 emit_insn (gen_isel_signed (dest, condition_rtx,
13543 true_cond, false_cond, cr));
13544 else
13545 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13546 true_cond, false_cond, cr));
13547
13548 return 1;
13549}
13550
13551const char *
a2369ed3 13552output_isel (rtx *operands)
a3170dc6
AH
13553{
13554 enum rtx_code code;
13555
13556 code = GET_CODE (operands[1]);
13557 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13558 {
13559 PUT_CODE (operands[1], reverse_condition (code));
13560 return "isel %0,%3,%2,%j1";
13561 }
13562 else
13563 return "isel %0,%2,%3,%j1";
13564}
13565
50a0b056 13566void
a2369ed3 13567rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13568{
13569 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13570 enum rtx_code c;
50a0b056 13571 rtx target;
5dc8d536
AH
13572
13573 if (code == SMAX || code == SMIN)
13574 c = GE;
13575 else
13576 c = GEU;
13577
50a0b056 13578 if (code == SMAX || code == UMAX)
f676971a 13579 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13580 op0, op1, mode, 0);
13581 else
f676971a 13582 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13583 op1, op0, mode, 0);
37409796 13584 gcc_assert (target);
50a0b056
GK
13585 if (target != dest)
13586 emit_move_insn (dest, target);
13587}
46c07df8 13588
915167f5
GK
13589/* Emit instructions to perform a load-reserved/store-conditional operation.
13590 The operation performed is an atomic
13591 (set M (CODE:MODE M OP))
13592 If not NULL, BEFORE is atomically set to M before the operation, and
13593 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13594 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13595 Either OP or M may be wrapped in a NOT operation. */
13596
13597void
13598rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13599 rtx m, rtx op, rtx before_param, rtx after_param,
13600 bool sync_p)
13601{
13602 enum machine_mode used_mode;
13603 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13604 rtx used_m;
13605 rtvec vec;
13606 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13607 rtx shift = NULL_RTX;
bb8df8a6 13608
915167f5
GK
13609 if (sync_p)
13610 emit_insn (gen_memory_barrier ());
bb8df8a6 13611
915167f5
GK
13612 if (GET_CODE (m) == NOT)
13613 used_m = XEXP (m, 0);
13614 else
13615 used_m = m;
13616
13617 /* If this is smaller than SImode, we'll have to use SImode with
13618 adjustments. */
13619 if (mode == QImode || mode == HImode)
13620 {
13621 rtx newop, oldop;
13622
13623 if (MEM_ALIGN (used_m) >= 32)
13624 {
13625 int ishift = 0;
13626 if (BYTES_BIG_ENDIAN)
13627 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13628
915167f5 13629 shift = GEN_INT (ishift);
c75c6d11 13630 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13631 }
13632 else
13633 {
13634 rtx addrSI, aligned_addr;
a9c9d3fa 13635 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13636
c75c6d11
JJ
13637 addrSI = gen_lowpart_common (SImode,
13638 force_reg (Pmode, XEXP (used_m, 0)));
13639 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13640 shift = gen_reg_rtx (SImode);
13641
13642 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13643 GEN_INT (shift_mask)));
13644 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13645
13646 aligned_addr = expand_binop (Pmode, and_optab,
13647 XEXP (used_m, 0),
13648 GEN_INT (-4), NULL_RTX,
13649 1, OPTAB_LIB_WIDEN);
13650 used_m = change_address (used_m, SImode, aligned_addr);
13651 set_mem_align (used_m, 32);
915167f5 13652 }
c75c6d11
JJ
13653 /* It's safe to keep the old alias set of USED_M, because
13654 the operation is atomic and only affects the original
13655 USED_M. */
13656 if (GET_CODE (m) == NOT)
13657 m = gen_rtx_NOT (SImode, used_m);
13658 else
13659 m = used_m;
915167f5
GK
13660
13661 if (GET_CODE (op) == NOT)
13662 {
13663 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13664 oldop = gen_rtx_NOT (SImode, oldop);
13665 }
13666 else
13667 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13668
915167f5
GK
13669 switch (code)
13670 {
13671 case IOR:
13672 case XOR:
13673 newop = expand_binop (SImode, and_optab,
13674 oldop, GEN_INT (imask), NULL_RTX,
13675 1, OPTAB_LIB_WIDEN);
13676 emit_insn (gen_ashlsi3 (newop, newop, shift));
13677 break;
13678
13679 case AND:
13680 newop = expand_binop (SImode, ior_optab,
13681 oldop, GEN_INT (~imask), NULL_RTX,
13682 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13683 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13684 break;
13685
13686 case PLUS:
9f0076e5 13687 case MINUS:
915167f5
GK
13688 {
13689 rtx mask;
bb8df8a6 13690
915167f5
GK
13691 newop = expand_binop (SImode, and_optab,
13692 oldop, GEN_INT (imask), NULL_RTX,
13693 1, OPTAB_LIB_WIDEN);
13694 emit_insn (gen_ashlsi3 (newop, newop, shift));
13695
13696 mask = gen_reg_rtx (SImode);
13697 emit_move_insn (mask, GEN_INT (imask));
13698 emit_insn (gen_ashlsi3 (mask, mask, shift));
13699
9f0076e5
DE
13700 if (code == PLUS)
13701 newop = gen_rtx_PLUS (SImode, m, newop);
13702 else
13703 newop = gen_rtx_MINUS (SImode, m, newop);
13704 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13705 newop = gen_rtx_IOR (SImode, newop,
13706 gen_rtx_AND (SImode,
13707 gen_rtx_NOT (SImode, mask),
13708 m));
13709 break;
13710 }
13711
13712 default:
13713 gcc_unreachable ();
13714 }
13715
a9c9d3fa
GK
13716 if (GET_CODE (m) == NOT)
13717 {
13718 rtx mask, xorm;
13719
13720 mask = gen_reg_rtx (SImode);
13721 emit_move_insn (mask, GEN_INT (imask));
13722 emit_insn (gen_ashlsi3 (mask, mask, shift));
13723
13724 xorm = gen_rtx_XOR (SImode, used_m, mask);
13725 /* Depending on the value of 'op', the XOR or the operation might
13726 be able to be simplified away. */
13727 newop = simplify_gen_binary (code, SImode, xorm, newop);
13728 }
915167f5
GK
13729 op = newop;
13730 used_mode = SImode;
13731 before = gen_reg_rtx (used_mode);
13732 after = gen_reg_rtx (used_mode);
13733 }
13734 else
13735 {
13736 used_mode = mode;
13737 before = before_param;
13738 after = after_param;
13739
13740 if (before == NULL_RTX)
13741 before = gen_reg_rtx (used_mode);
13742 if (after == NULL_RTX)
13743 after = gen_reg_rtx (used_mode);
13744 }
bb8df8a6 13745
9f0076e5
DE
13746 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13747 && used_mode != mode)
915167f5
GK
13748 the_op = op; /* Computed above. */
13749 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13750 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13751 else
13752 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13753
13754 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13755 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13756 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13757 gen_rtx_UNSPEC (used_mode,
13758 gen_rtvec (1, the_op),
13759 UNSPEC_SYNC_OP));
915167f5
GK
13760 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13761
9f0076e5 13762 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13763 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13764 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13765 else
13766 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13767 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13768
13769 /* Shift and mask the return values properly. */
13770 if (used_mode != mode && before_param)
13771 {
13772 emit_insn (gen_lshrsi3 (before, before, shift));
13773 convert_move (before_param, before, 1);
13774 }
13775
13776 if (used_mode != mode && after_param)
13777 {
13778 emit_insn (gen_lshrsi3 (after, after, shift));
13779 convert_move (after_param, after, 1);
13780 }
13781
13782 /* The previous sequence will end with a branch that's dependent on
13783 the conditional store, so placing an isync will ensure that no
13784 other instructions (especially, no load or store instructions)
13785 can start before the atomic operation completes. */
13786 if (sync_p)
13787 emit_insn (gen_isync ());
13788}
13789
b52110d4
DE
13790/* A subroutine of the atomic operation splitters. Jump to LABEL if
13791 COND is true. Mark the jump as unlikely to be taken. */
13792
13793static void
13794emit_unlikely_jump (rtx cond, rtx label)
13795{
13796 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13797 rtx x;
13798
13799 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13800 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13801 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13802}
13803
13804/* A subroutine of the atomic operation splitters. Emit a load-locked
13805 instruction in MODE. */
13806
13807static void
13808emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13809{
13810 rtx (*fn) (rtx, rtx) = NULL;
13811 if (mode == SImode)
13812 fn = gen_load_locked_si;
13813 else if (mode == DImode)
13814 fn = gen_load_locked_di;
13815 emit_insn (fn (reg, mem));
13816}
13817
13818/* A subroutine of the atomic operation splitters. Emit a store-conditional
13819 instruction in MODE. */
13820
13821static void
13822emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13823{
13824 rtx (*fn) (rtx, rtx, rtx) = NULL;
13825 if (mode == SImode)
13826 fn = gen_store_conditional_si;
13827 else if (mode == DImode)
13828 fn = gen_store_conditional_di;
13829
9f0076e5 13830 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13831 if (PPC405_ERRATUM77)
13832 emit_insn (gen_memory_barrier ());
13833
13834 emit_insn (fn (res, mem, val));
13835}
13836
ea2c620c 13837/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13838 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13839 operand of the binary operator. BEFORE and AFTER are optional locations to
13840 return the value of MEM either before of after the operation. SCRATCH is
13841 a scratch register. */
13842
13843void
13844rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13845 rtx before, rtx after, rtx scratch)
13846{
13847 enum machine_mode mode = GET_MODE (mem);
13848 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13849
13850 emit_insn (gen_memory_barrier ());
13851
13852 label = gen_label_rtx ();
13853 emit_label (label);
13854 label = gen_rtx_LABEL_REF (VOIDmode, label);
13855
13856 if (before == NULL_RTX)
13857 before = scratch;
13858 emit_load_locked (mode, before, mem);
13859
13860 if (code == NOT)
13861 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13862 else if (code == AND)
13863 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13864 else
13865 x = gen_rtx_fmt_ee (code, mode, before, val);
13866
13867 if (after != NULL_RTX)
13868 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13869 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13870
13871 emit_store_conditional (mode, cond, mem, scratch);
13872
13873 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13874 emit_unlikely_jump (x, label);
13875
13876 emit_insn (gen_isync ());
13877}
13878
b52110d4
DE
13879/* Expand an atomic compare and swap operation. MEM is the memory on which
13880 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13881 value to be stored. SCRATCH is a scratch GPR. */
13882
13883void
13884rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13885 rtx scratch)
13886{
13887 enum machine_mode mode = GET_MODE (mem);
13888 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13889
13890 emit_insn (gen_memory_barrier ());
13891
13892 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13893 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13894 emit_label (XEXP (label1, 0));
13895
13896 emit_load_locked (mode, retval, mem);
13897
13898 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13899 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13900
13901 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13902 emit_unlikely_jump (x, label2);
13903
13904 emit_move_insn (scratch, newval);
13905 emit_store_conditional (mode, cond, mem, scratch);
13906
13907 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13908 emit_unlikely_jump (x, label1);
13909
13910 emit_insn (gen_isync ());
13911 emit_label (XEXP (label2, 0));
13912}
13913
13914/* Expand an atomic test and set operation. MEM is the memory on which
13915 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13916
13917void
13918rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13919{
13920 enum machine_mode mode = GET_MODE (mem);
13921 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13922
13923 emit_insn (gen_memory_barrier ());
13924
13925 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13926 emit_label (XEXP (label, 0));
13927
13928 emit_load_locked (mode, retval, mem);
13929 emit_move_insn (scratch, val);
13930 emit_store_conditional (mode, cond, mem, scratch);
13931
13932 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13933 emit_unlikely_jump (x, label);
13934
13935 emit_insn (gen_isync ());
13936}
13937
9fc75b97
DE
13938void
13939rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13940{
13941 enum machine_mode mode = GET_MODE (mem);
13942 rtx addrSI, align, wdst, shift, mask;
13943 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13944 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13945
13946 /* Shift amount for subword relative to aligned word. */
13947 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13948 shift = gen_reg_rtx (SImode);
13949 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13950 GEN_INT (shift_mask)));
13951 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13952
13953 /* Shift and mask old value into position within word. */
13954 oldval = convert_modes (SImode, mode, oldval, 1);
13955 oldval = expand_binop (SImode, and_optab,
13956 oldval, GEN_INT (imask), NULL_RTX,
13957 1, OPTAB_LIB_WIDEN);
13958 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13959
13960 /* Shift and mask new value into position within word. */
13961 newval = convert_modes (SImode, mode, newval, 1);
13962 newval = expand_binop (SImode, and_optab,
13963 newval, GEN_INT (imask), NULL_RTX,
13964 1, OPTAB_LIB_WIDEN);
13965 emit_insn (gen_ashlsi3 (newval, newval, shift));
13966
13967 /* Mask for insertion. */
13968 mask = gen_reg_rtx (SImode);
13969 emit_move_insn (mask, GEN_INT (imask));
13970 emit_insn (gen_ashlsi3 (mask, mask, shift));
13971
13972 /* Address of aligned word containing subword. */
13973 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13974 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13975 mem = change_address (mem, SImode, align);
13976 set_mem_align (mem, 32);
13977 MEM_VOLATILE_P (mem) = 1;
13978
13979 wdst = gen_reg_rtx (SImode);
13980 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13981 oldval, newval, mem));
13982
2725b75c
JJ
13983 /* Shift the result back. */
13984 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
13985
9fc75b97
DE
13986 emit_move_insn (dst, gen_lowpart (mode, wdst));
13987}
13988
13989void
13990rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13991 rtx oldval, rtx newval, rtx mem,
13992 rtx scratch)
13993{
13994 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13995
13996 emit_insn (gen_memory_barrier ());
13997 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13998 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13999 emit_label (XEXP (label1, 0));
14000
14001 emit_load_locked (SImode, scratch, mem);
14002
14003 /* Mask subword within loaded value for comparison with oldval.
14004 Use UNSPEC_AND to avoid clobber.*/
14005 emit_insn (gen_rtx_SET (SImode, dest,
14006 gen_rtx_UNSPEC (SImode,
14007 gen_rtvec (2, scratch, mask),
14008 UNSPEC_AND)));
14009
14010 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14011 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14012
14013 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14014 emit_unlikely_jump (x, label2);
14015
14016 /* Clear subword within loaded value for insertion of new value. */
14017 emit_insn (gen_rtx_SET (SImode, scratch,
14018 gen_rtx_AND (SImode,
14019 gen_rtx_NOT (SImode, mask), scratch)));
14020 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14021 emit_store_conditional (SImode, cond, mem, scratch);
14022
14023 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14024 emit_unlikely_jump (x, label1);
14025
14026 emit_insn (gen_isync ());
14027 emit_label (XEXP (label2, 0));
14028}
14029
14030
b52110d4 14031 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14032 multi-register moves. It will emit at most one instruction for
14033 each register that is accessed; that is, it won't emit li/lis pairs
14034 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14035 register. */
46c07df8 14036
46c07df8 14037void
a9baceb1 14038rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14039{
a9baceb1
GK
14040 /* The register number of the first register being moved. */
14041 int reg;
14042 /* The mode that is to be moved. */
14043 enum machine_mode mode;
14044 /* The mode that the move is being done in, and its size. */
14045 enum machine_mode reg_mode;
14046 int reg_mode_size;
14047 /* The number of registers that will be moved. */
14048 int nregs;
14049
14050 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14051 mode = GET_MODE (dst);
c8b622ff 14052 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14053 if (FP_REGNO_P (reg))
7393f7f8 14054 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
14055 else if (ALTIVEC_REGNO_P (reg))
14056 reg_mode = V16QImode;
4f011e1e
JM
14057 else if (TARGET_E500_DOUBLE && mode == TFmode)
14058 reg_mode = DFmode;
a9baceb1
GK
14059 else
14060 reg_mode = word_mode;
14061 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14062
37409796 14063 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14064
a9baceb1
GK
14065 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14066 {
14067 /* Move register range backwards, if we might have destructive
14068 overlap. */
14069 int i;
14070 for (i = nregs - 1; i >= 0; i--)
f676971a 14071 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14072 simplify_gen_subreg (reg_mode, dst, mode,
14073 i * reg_mode_size),
14074 simplify_gen_subreg (reg_mode, src, mode,
14075 i * reg_mode_size)));
14076 }
46c07df8
HP
14077 else
14078 {
a9baceb1
GK
14079 int i;
14080 int j = -1;
14081 bool used_update = false;
46c07df8 14082
c1e55850 14083 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14084 {
14085 rtx breg;
3a1f863f 14086
a9baceb1
GK
14087 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14088 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14089 {
14090 rtx delta_rtx;
a9baceb1 14091 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14092 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14093 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14094 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14095 emit_insn (TARGET_32BIT
14096 ? gen_addsi3 (breg, breg, delta_rtx)
14097 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14098 src = replace_equiv_address (src, breg);
3a1f863f 14099 }
d04b6e6e 14100 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14101 {
13e2e16e 14102 rtx basereg;
c1e55850
GK
14103 basereg = gen_rtx_REG (Pmode, reg);
14104 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14105 src = replace_equiv_address (src, basereg);
c1e55850 14106 }
3a1f863f 14107
0423421f
AM
14108 breg = XEXP (src, 0);
14109 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14110 breg = XEXP (breg, 0);
14111
14112 /* If the base register we are using to address memory is
14113 also a destination reg, then change that register last. */
14114 if (REG_P (breg)
14115 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14116 && REGNO (breg) < REGNO (dst) + nregs)
14117 j = REGNO (breg) - REGNO (dst);
c4ad648e 14118 }
46c07df8 14119
a9baceb1 14120 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14121 {
14122 rtx breg;
14123
a9baceb1
GK
14124 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14125 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14126 {
14127 rtx delta_rtx;
a9baceb1 14128 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14129 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14130 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14131 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14132
14133 /* We have to update the breg before doing the store.
14134 Use store with update, if available. */
14135
14136 if (TARGET_UPDATE)
14137 {
a9baceb1 14138 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14139 emit_insn (TARGET_32BIT
14140 ? (TARGET_POWERPC64
14141 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14142 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14143 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14144 used_update = true;
3a1f863f
DE
14145 }
14146 else
a9baceb1
GK
14147 emit_insn (TARGET_32BIT
14148 ? gen_addsi3 (breg, breg, delta_rtx)
14149 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14150 dst = replace_equiv_address (dst, breg);
3a1f863f 14151 }
37409796 14152 else
d04b6e6e 14153 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14154 }
14155
46c07df8 14156 for (i = 0; i < nregs; i++)
f676971a 14157 {
3a1f863f
DE
14158 /* Calculate index to next subword. */
14159 ++j;
f676971a 14160 if (j == nregs)
3a1f863f 14161 j = 0;
46c07df8 14162
112cdef5 14163 /* If compiler already emitted move of first word by
a9baceb1 14164 store with update, no need to do anything. */
3a1f863f 14165 if (j == 0 && used_update)
a9baceb1 14166 continue;
f676971a 14167
a9baceb1
GK
14168 emit_insn (gen_rtx_SET (VOIDmode,
14169 simplify_gen_subreg (reg_mode, dst, mode,
14170 j * reg_mode_size),
14171 simplify_gen_subreg (reg_mode, src, mode,
14172 j * reg_mode_size)));
3a1f863f 14173 }
46c07df8
HP
14174 }
14175}
14176
12a4e8c5 14177\f
a4f6c312
SS
14178/* This page contains routines that are used to determine what the
14179 function prologue and epilogue code will do and write them out. */
9878760c 14180
a4f6c312
SS
14181/* Return the first fixed-point register that is required to be
14182 saved. 32 if none. */
9878760c
RK
14183
14184int
863d938c 14185first_reg_to_save (void)
9878760c
RK
14186{
14187 int first_reg;
14188
14189 /* Find lowest numbered live register. */
14190 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14191 if (df_regs_ever_live_p (first_reg)
a38d360d 14192 && (! call_used_regs[first_reg]
1db02437 14193 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14194 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14195 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14196 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14197 break;
14198
ee890fe2 14199#if TARGET_MACHO
93638d7a 14200 if (flag_pic
e3b5732b 14201 && crtl->uses_pic_offset_table
93638d7a 14202 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14203 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14204#endif
14205
9878760c
RK
14206 return first_reg;
14207}
14208
14209/* Similar, for FP regs. */
14210
14211int
863d938c 14212first_fp_reg_to_save (void)
9878760c
RK
14213{
14214 int first_reg;
14215
14216 /* Find lowest numbered live register. */
14217 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14218 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14219 break;
14220
14221 return first_reg;
14222}
00b960c7
AH
14223
14224/* Similar, for AltiVec regs. */
14225
14226static int
863d938c 14227first_altivec_reg_to_save (void)
00b960c7
AH
14228{
14229 int i;
14230
14231 /* Stack frame remains as is unless we are in AltiVec ABI. */
14232 if (! TARGET_ALTIVEC_ABI)
14233 return LAST_ALTIVEC_REGNO + 1;
14234
22fa69da 14235 /* On Darwin, the unwind routines are compiled without
982afe02 14236 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14237 altivec registers when necessary. */
e3b5732b 14238 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14239 && ! TARGET_ALTIVEC)
14240 return FIRST_ALTIVEC_REGNO + 20;
14241
00b960c7
AH
14242 /* Find lowest numbered live register. */
14243 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14244 if (df_regs_ever_live_p (i))
00b960c7
AH
14245 break;
14246
14247 return i;
14248}
14249
14250/* Return a 32-bit mask of the AltiVec registers we need to set in
14251 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14252 the 32-bit word is 0. */
14253
14254static unsigned int
863d938c 14255compute_vrsave_mask (void)
00b960c7
AH
14256{
14257 unsigned int i, mask = 0;
14258
22fa69da 14259 /* On Darwin, the unwind routines are compiled without
982afe02 14260 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14261 call-saved altivec registers when necessary. */
e3b5732b 14262 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14263 && ! TARGET_ALTIVEC)
14264 mask |= 0xFFF;
14265
00b960c7
AH
14266 /* First, find out if we use _any_ altivec registers. */
14267 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14268 if (df_regs_ever_live_p (i))
00b960c7
AH
14269 mask |= ALTIVEC_REG_BIT (i);
14270
14271 if (mask == 0)
14272 return mask;
14273
00b960c7
AH
14274 /* Next, remove the argument registers from the set. These must
14275 be in the VRSAVE mask set by the caller, so we don't need to add
14276 them in again. More importantly, the mask we compute here is
14277 used to generate CLOBBERs in the set_vrsave insn, and we do not
14278 wish the argument registers to die. */
38173d38 14279 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14280 mask &= ~ALTIVEC_REG_BIT (i);
14281
14282 /* Similarly, remove the return value from the set. */
14283 {
14284 bool yes = false;
14285 diddle_return_value (is_altivec_return_reg, &yes);
14286 if (yes)
14287 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14288 }
14289
14290 return mask;
14291}
14292
d62294f5 14293/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14294 size of prologues/epilogues by calling our own save/restore-the-world
14295 routines. */
d62294f5
FJ
14296
14297static void
f57fe068
AM
14298compute_save_world_info (rs6000_stack_t *info_ptr)
14299{
14300 info_ptr->world_save_p = 1;
14301 info_ptr->world_save_p
14302 = (WORLD_SAVE_P (info_ptr)
14303 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14304 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14305 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14306 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14307 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14308 && info_ptr->cr_save_p);
f676971a 14309
d62294f5
FJ
14310 /* This will not work in conjunction with sibcalls. Make sure there
14311 are none. (This check is expensive, but seldom executed.) */
f57fe068 14312 if (WORLD_SAVE_P (info_ptr))
f676971a 14313 {
d62294f5
FJ
14314 rtx insn;
14315 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14316 if ( GET_CODE (insn) == CALL_INSN
14317 && SIBLING_CALL_P (insn))
14318 {
14319 info_ptr->world_save_p = 0;
14320 break;
14321 }
d62294f5 14322 }
f676971a 14323
f57fe068 14324 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14325 {
14326 /* Even if we're not touching VRsave, make sure there's room on the
14327 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14328 will attempt to save it. */
d62294f5
FJ
14329 info_ptr->vrsave_size = 4;
14330
298ac1dd
AP
14331 /* If we are going to save the world, we need to save the link register too. */
14332 info_ptr->lr_save_p = 1;
14333
d62294f5
FJ
14334 /* "Save" the VRsave register too if we're saving the world. */
14335 if (info_ptr->vrsave_mask == 0)
c4ad648e 14336 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14337
14338 /* Because the Darwin register save/restore routines only handle
c4ad648e 14339 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14340 check. */
37409796
NS
14341 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14342 && (info_ptr->first_altivec_reg_save
14343 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14344 }
f676971a 14345 return;
d62294f5
FJ
14346}
14347
14348
00b960c7 14349static void
a2369ed3 14350is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14351{
14352 bool *yes = (bool *) xyes;
14353 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14354 *yes = true;
14355}
14356
4697a36c
MM
14357\f
14358/* Calculate the stack information for the current function. This is
14359 complicated by having two separate calling sequences, the AIX calling
14360 sequence and the V.4 calling sequence.
14361
592696dd 14362 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14363 32-bit 64-bit
4697a36c 14364 SP----> +---------------------------------------+
a260abc9 14365 | back chain to caller | 0 0
4697a36c 14366 +---------------------------------------+
a260abc9 14367 | saved CR | 4 8 (8-11)
4697a36c 14368 +---------------------------------------+
a260abc9 14369 | saved LR | 8 16
4697a36c 14370 +---------------------------------------+
a260abc9 14371 | reserved for compilers | 12 24
4697a36c 14372 +---------------------------------------+
a260abc9 14373 | reserved for binders | 16 32
4697a36c 14374 +---------------------------------------+
a260abc9 14375 | saved TOC pointer | 20 40
4697a36c 14376 +---------------------------------------+
a260abc9 14377 | Parameter save area (P) | 24 48
4697a36c 14378 +---------------------------------------+
a260abc9 14379 | Alloca space (A) | 24+P etc.
802a0058 14380 +---------------------------------------+
a7df97e6 14381 | Local variable space (L) | 24+P+A
4697a36c 14382 +---------------------------------------+
a7df97e6 14383 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14384 +---------------------------------------+
00b960c7
AH
14385 | Save area for AltiVec registers (W) | 24+P+A+L+X
14386 +---------------------------------------+
14387 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14388 +---------------------------------------+
14389 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14390 +---------------------------------------+
00b960c7
AH
14391 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14392 +---------------------------------------+
14393 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14394 +---------------------------------------+
14395 old SP->| back chain to caller's caller |
14396 +---------------------------------------+
14397
5376a30c
KR
14398 The required alignment for AIX configurations is two words (i.e., 8
14399 or 16 bytes).
14400
14401
4697a36c
MM
14402 V.4 stack frames look like:
14403
14404 SP----> +---------------------------------------+
14405 | back chain to caller | 0
14406 +---------------------------------------+
5eb387b8 14407 | caller's saved LR | 4
4697a36c
MM
14408 +---------------------------------------+
14409 | Parameter save area (P) | 8
14410 +---------------------------------------+
a7df97e6 14411 | Alloca space (A) | 8+P
f676971a 14412 +---------------------------------------+
a7df97e6 14413 | Varargs save area (V) | 8+P+A
f676971a 14414 +---------------------------------------+
a7df97e6 14415 | Local variable space (L) | 8+P+A+V
f676971a 14416 +---------------------------------------+
a7df97e6 14417 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14418 +---------------------------------------+
00b960c7
AH
14419 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14420 +---------------------------------------+
14421 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14422 +---------------------------------------+
14423 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14424 +---------------------------------------+
c4ad648e
AM
14425 | SPE: area for 64-bit GP registers |
14426 +---------------------------------------+
14427 | SPE alignment padding |
14428 +---------------------------------------+
00b960c7 14429 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14430 +---------------------------------------+
00b960c7 14431 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14432 +---------------------------------------+
00b960c7 14433 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14434 +---------------------------------------+
14435 old SP->| back chain to caller's caller |
14436 +---------------------------------------+
b6c9286a 14437
5376a30c
KR
14438 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14439 given. (But note below and in sysv4.h that we require only 8 and
14440 may round up the size of our stack frame anyways. The historical
14441 reason is early versions of powerpc-linux which didn't properly
14442 align the stack at program startup. A happy side-effect is that
14443 -mno-eabi libraries can be used with -meabi programs.)
14444
50d440bc 14445 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14446 the stack alignment requirements may differ. If -mno-eabi is not
14447 given, the required stack alignment is 8 bytes; if -mno-eabi is
14448 given, the required alignment is 16 bytes. (But see V.4 comment
14449 above.) */
4697a36c 14450
61b2fbe7
MM
14451#ifndef ABI_STACK_BOUNDARY
14452#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14453#endif
14454
d1d0c603 14455static rs6000_stack_t *
863d938c 14456rs6000_stack_info (void)
4697a36c 14457{
022123e6 14458 static rs6000_stack_t info;
4697a36c 14459 rs6000_stack_t *info_ptr = &info;
327e5343 14460 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14461 int ehrd_size;
64045029 14462 int save_align;
8070c91a 14463 int first_gp;
44688022 14464 HOST_WIDE_INT non_fixed_size;
4697a36c 14465
022123e6 14466 memset (&info, 0, sizeof (info));
4697a36c 14467
c19de7aa
AH
14468 if (TARGET_SPE)
14469 {
14470 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14471 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14472 cfun->machine->insn_chain_scanned_p
14473 = spe_func_has_64bit_regs_p () + 1;
14474 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14475 }
14476
a4f6c312 14477 /* Select which calling sequence. */
178274da 14478 info_ptr->abi = DEFAULT_ABI;
9878760c 14479
a4f6c312 14480 /* Calculate which registers need to be saved & save area size. */
4697a36c 14481 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14482 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14483 even if it currently looks like we won't. Reload may need it to
14484 get at a constant; if so, it will have already created a constant
14485 pool entry for it. */
2bfcf297 14486 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14487 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14488 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14489 && crtl->uses_const_pool
1db02437 14490 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14491 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14492 else
8070c91a
DJ
14493 first_gp = info_ptr->first_gp_reg_save;
14494
14495 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14496
a3170dc6
AH
14497 /* For the SPE, we have an additional upper 32-bits on each GPR.
14498 Ideally we should save the entire 64-bits only when the upper
14499 half is used in SIMD instructions. Since we only record
14500 registers live (not the size they are used in), this proves
14501 difficult because we'd have to traverse the instruction chain at
14502 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14503 so we opt to save the GPRs in 64-bits always if but one register
14504 gets used in 64-bits. Otherwise, all the registers in the frame
14505 get saved in 32-bits.
a3170dc6 14506
c19de7aa 14507 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14508 traditional GP save area will be empty. */
c19de7aa 14509 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14510 info_ptr->gp_size = 0;
14511
4697a36c
MM
14512 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14513 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14514
00b960c7
AH
14515 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14516 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14517 - info_ptr->first_altivec_reg_save);
14518
592696dd 14519 /* Does this function call anything? */
71f123ca
FS
14520 info_ptr->calls_p = (! current_function_is_leaf
14521 || cfun->machine->ra_needs_full_frame);
b6c9286a 14522
a4f6c312 14523 /* Determine if we need to save the link register. */
022123e6 14524 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14525 && crtl->profile
022123e6 14526 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14527#ifdef TARGET_RELOCATABLE
14528 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14529#endif
14530 || (info_ptr->first_fp_reg_save != 64
14531 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14532 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14533 || info_ptr->calls_p
14534 || rs6000_ra_ever_killed ())
4697a36c
MM
14535 {
14536 info_ptr->lr_save_p = 1;
1de43f85 14537 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14538 }
14539
9ebbca7d 14540 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14541 if (df_regs_ever_live_p (CR2_REGNO)
14542 || df_regs_ever_live_p (CR3_REGNO)
14543 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14544 {
14545 info_ptr->cr_save_p = 1;
178274da 14546 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14547 info_ptr->cr_size = reg_size;
14548 }
14549
83720594
RH
14550 /* If the current function calls __builtin_eh_return, then we need
14551 to allocate stack space for registers that will hold data for
14552 the exception handler. */
e3b5732b 14553 if (crtl->calls_eh_return)
83720594
RH
14554 {
14555 unsigned int i;
14556 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14557 continue;
a3170dc6
AH
14558
14559 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14560 ehrd_size = i * (TARGET_SPE_ABI
14561 && info_ptr->spe_64bit_regs_used != 0
14562 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14563 }
14564 else
14565 ehrd_size = 0;
14566
592696dd 14567 /* Determine various sizes. */
4697a36c
MM
14568 info_ptr->reg_size = reg_size;
14569 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14570 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14571 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14572 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14573 if (FRAME_GROWS_DOWNWARD)
14574 info_ptr->vars_size
5b667039
JJ
14575 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14576 + info_ptr->parm_size,
7d5175e1 14577 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14578 - (info_ptr->fixed_size + info_ptr->vars_size
14579 + info_ptr->parm_size);
00b960c7 14580
c19de7aa 14581 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14582 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14583 else
14584 info_ptr->spe_gp_size = 0;
14585
4d774ff8
HP
14586 if (TARGET_ALTIVEC_ABI)
14587 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14588 else
4d774ff8
HP
14589 info_ptr->vrsave_mask = 0;
14590
14591 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14592 info_ptr->vrsave_size = 4;
14593 else
14594 info_ptr->vrsave_size = 0;
b6c9286a 14595
d62294f5
FJ
14596 compute_save_world_info (info_ptr);
14597
592696dd 14598 /* Calculate the offsets. */
178274da 14599 switch (DEFAULT_ABI)
4697a36c 14600 {
b6c9286a 14601 case ABI_NONE:
24d304eb 14602 default:
37409796 14603 gcc_unreachable ();
b6c9286a
MM
14604
14605 case ABI_AIX:
ee890fe2 14606 case ABI_DARWIN:
b6c9286a
MM
14607 info_ptr->fp_save_offset = - info_ptr->fp_size;
14608 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14609
14610 if (TARGET_ALTIVEC_ABI)
14611 {
14612 info_ptr->vrsave_save_offset
14613 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14614
982afe02 14615 /* Align stack so vector save area is on a quadword boundary.
9278121c 14616 The padding goes above the vectors. */
00b960c7
AH
14617 if (info_ptr->altivec_size != 0)
14618 info_ptr->altivec_padding_size
9278121c 14619 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14620 else
14621 info_ptr->altivec_padding_size = 0;
14622
14623 info_ptr->altivec_save_offset
14624 = info_ptr->vrsave_save_offset
14625 - info_ptr->altivec_padding_size
14626 - info_ptr->altivec_size;
9278121c
GK
14627 gcc_assert (info_ptr->altivec_size == 0
14628 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14629
14630 /* Adjust for AltiVec case. */
14631 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14632 }
14633 else
14634 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14635 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14636 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14637 break;
14638
14639 case ABI_V4:
b6c9286a
MM
14640 info_ptr->fp_save_offset = - info_ptr->fp_size;
14641 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14642 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14643
c19de7aa 14644 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14645 {
14646 /* Align stack so SPE GPR save area is aligned on a
14647 double-word boundary. */
f78c3290 14648 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14649 info_ptr->spe_padding_size
14650 = 8 - (-info_ptr->cr_save_offset % 8);
14651 else
14652 info_ptr->spe_padding_size = 0;
14653
14654 info_ptr->spe_gp_save_offset
14655 = info_ptr->cr_save_offset
14656 - info_ptr->spe_padding_size
14657 - info_ptr->spe_gp_size;
14658
14659 /* Adjust for SPE case. */
022123e6 14660 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14661 }
a3170dc6 14662 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14663 {
14664 info_ptr->vrsave_save_offset
14665 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14666
14667 /* Align stack so vector save area is on a quadword boundary. */
14668 if (info_ptr->altivec_size != 0)
14669 info_ptr->altivec_padding_size
14670 = 16 - (-info_ptr->vrsave_save_offset % 16);
14671 else
14672 info_ptr->altivec_padding_size = 0;
14673
14674 info_ptr->altivec_save_offset
14675 = info_ptr->vrsave_save_offset
14676 - info_ptr->altivec_padding_size
14677 - info_ptr->altivec_size;
14678
14679 /* Adjust for AltiVec case. */
022123e6 14680 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14681 }
14682 else
022123e6
AM
14683 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14684 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14685 info_ptr->lr_save_offset = reg_size;
14686 break;
4697a36c
MM
14687 }
14688
64045029 14689 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14690 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14691 + info_ptr->gp_size
14692 + info_ptr->altivec_size
14693 + info_ptr->altivec_padding_size
a3170dc6
AH
14694 + info_ptr->spe_gp_size
14695 + info_ptr->spe_padding_size
00b960c7
AH
14696 + ehrd_size
14697 + info_ptr->cr_size
022123e6 14698 + info_ptr->vrsave_size,
64045029 14699 save_align);
00b960c7 14700
44688022 14701 non_fixed_size = (info_ptr->vars_size
ff381587 14702 + info_ptr->parm_size
5b667039 14703 + info_ptr->save_size);
ff381587 14704
44688022
AM
14705 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14706 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14707
14708 /* Determine if we need to allocate any stack frame:
14709
a4f6c312
SS
14710 For AIX we need to push the stack if a frame pointer is needed
14711 (because the stack might be dynamically adjusted), if we are
14712 debugging, if we make calls, or if the sum of fp_save, gp_save,
14713 and local variables are more than the space needed to save all
14714 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14715 + 18*8 = 288 (GPR13 reserved).
ff381587 14716
a4f6c312
SS
14717 For V.4 we don't have the stack cushion that AIX uses, but assume
14718 that the debugger can handle stackless frames. */
ff381587
MM
14719
14720 if (info_ptr->calls_p)
14721 info_ptr->push_p = 1;
14722
178274da 14723 else if (DEFAULT_ABI == ABI_V4)
44688022 14724 info_ptr->push_p = non_fixed_size != 0;
ff381587 14725
178274da
AM
14726 else if (frame_pointer_needed)
14727 info_ptr->push_p = 1;
14728
14729 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14730 info_ptr->push_p = 1;
14731
ff381587 14732 else
44688022 14733 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14734
a4f6c312 14735 /* Zero offsets if we're not saving those registers. */
8dda1a21 14736 if (info_ptr->fp_size == 0)
4697a36c
MM
14737 info_ptr->fp_save_offset = 0;
14738
8dda1a21 14739 if (info_ptr->gp_size == 0)
4697a36c
MM
14740 info_ptr->gp_save_offset = 0;
14741
00b960c7
AH
14742 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14743 info_ptr->altivec_save_offset = 0;
14744
14745 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14746 info_ptr->vrsave_save_offset = 0;
14747
c19de7aa
AH
14748 if (! TARGET_SPE_ABI
14749 || info_ptr->spe_64bit_regs_used == 0
14750 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14751 info_ptr->spe_gp_save_offset = 0;
14752
c81fc13e 14753 if (! info_ptr->lr_save_p)
4697a36c
MM
14754 info_ptr->lr_save_offset = 0;
14755
c81fc13e 14756 if (! info_ptr->cr_save_p)
4697a36c
MM
14757 info_ptr->cr_save_offset = 0;
14758
14759 return info_ptr;
14760}
14761
c19de7aa
AH
14762/* Return true if the current function uses any GPRs in 64-bit SIMD
14763 mode. */
14764
14765static bool
863d938c 14766spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14767{
14768 rtx insns, insn;
14769
14770 /* Functions that save and restore all the call-saved registers will
14771 need to save/restore the registers in 64-bits. */
e3b5732b
JH
14772 if (crtl->calls_eh_return
14773 || cfun->calls_setjmp
14774 || crtl->has_nonlocal_goto)
c19de7aa
AH
14775 return true;
14776
14777 insns = get_insns ();
14778
14779 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14780 {
14781 if (INSN_P (insn))
14782 {
14783 rtx i;
14784
b5a5beb9
AH
14785 /* FIXME: This should be implemented with attributes...
14786
14787 (set_attr "spe64" "true")....then,
14788 if (get_spe64(insn)) return true;
14789
14790 It's the only reliable way to do the stuff below. */
14791
c19de7aa 14792 i = PATTERN (insn);
f82f556d
AH
14793 if (GET_CODE (i) == SET)
14794 {
14795 enum machine_mode mode = GET_MODE (SET_SRC (i));
14796
14797 if (SPE_VECTOR_MODE (mode))
14798 return true;
4f011e1e 14799 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
14800 return true;
14801 }
c19de7aa
AH
14802 }
14803 }
14804
14805 return false;
14806}
14807
d1d0c603 14808static void
a2369ed3 14809debug_stack_info (rs6000_stack_t *info)
9878760c 14810{
d330fd93 14811 const char *abi_string;
24d304eb 14812
c81fc13e 14813 if (! info)
4697a36c
MM
14814 info = rs6000_stack_info ();
14815
14816 fprintf (stderr, "\nStack information for function %s:\n",
14817 ((current_function_decl && DECL_NAME (current_function_decl))
14818 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14819 : "<unknown>"));
14820
24d304eb
RK
14821 switch (info->abi)
14822 {
b6c9286a
MM
14823 default: abi_string = "Unknown"; break;
14824 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14825 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14826 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14827 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14828 }
14829
14830 fprintf (stderr, "\tABI = %5s\n", abi_string);
14831
00b960c7
AH
14832 if (TARGET_ALTIVEC_ABI)
14833 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14834
a3170dc6
AH
14835 if (TARGET_SPE_ABI)
14836 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14837
4697a36c
MM
14838 if (info->first_gp_reg_save != 32)
14839 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14840
14841 if (info->first_fp_reg_save != 64)
14842 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14843
00b960c7
AH
14844 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14845 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14846 info->first_altivec_reg_save);
14847
4697a36c
MM
14848 if (info->lr_save_p)
14849 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14850
4697a36c
MM
14851 if (info->cr_save_p)
14852 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14853
00b960c7
AH
14854 if (info->vrsave_mask)
14855 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14856
4697a36c
MM
14857 if (info->push_p)
14858 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14859
14860 if (info->calls_p)
14861 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14862
4697a36c
MM
14863 if (info->gp_save_offset)
14864 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14865
14866 if (info->fp_save_offset)
14867 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14868
00b960c7
AH
14869 if (info->altivec_save_offset)
14870 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14871 info->altivec_save_offset);
14872
a3170dc6
AH
14873 if (info->spe_gp_save_offset)
14874 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14875 info->spe_gp_save_offset);
14876
00b960c7
AH
14877 if (info->vrsave_save_offset)
14878 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14879 info->vrsave_save_offset);
14880
4697a36c
MM
14881 if (info->lr_save_offset)
14882 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14883
14884 if (info->cr_save_offset)
14885 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14886
14887 if (info->varargs_save_offset)
14888 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14889
14890 if (info->total_size)
d1d0c603
JJ
14891 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14892 info->total_size);
4697a36c 14893
4697a36c 14894 if (info->vars_size)
d1d0c603
JJ
14895 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14896 info->vars_size);
4697a36c
MM
14897
14898 if (info->parm_size)
14899 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14900
14901 if (info->fixed_size)
14902 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14903
14904 if (info->gp_size)
14905 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14906
a3170dc6
AH
14907 if (info->spe_gp_size)
14908 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14909
4697a36c
MM
14910 if (info->fp_size)
14911 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14912
00b960c7
AH
14913 if (info->altivec_size)
14914 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14915
14916 if (info->vrsave_size)
14917 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14918
14919 if (info->altivec_padding_size)
14920 fprintf (stderr, "\taltivec_padding_size= %5d\n",
14921 info->altivec_padding_size);
14922
a3170dc6
AH
14923 if (info->spe_padding_size)
14924 fprintf (stderr, "\tspe_padding_size = %5d\n",
14925 info->spe_padding_size);
14926
4697a36c
MM
14927 if (info->cr_size)
14928 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14929
14930 if (info->save_size)
14931 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14932
14933 if (info->reg_size != 4)
14934 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14935
14936 fprintf (stderr, "\n");
9878760c 14937}
71f123ca
FS
14938
14939rtx
a2369ed3 14940rs6000_return_addr (int count, rtx frame)
71f123ca 14941{
a4f6c312
SS
14942 /* Currently we don't optimize very well between prolog and body
14943 code and for PIC code the code can be actually quite bad, so
14944 don't try to be too clever here. */
f1384257 14945 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14946 {
14947 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14948
14949 return
14950 gen_rtx_MEM
14951 (Pmode,
14952 memory_address
14953 (Pmode,
14954 plus_constant (copy_to_reg
14955 (gen_rtx_MEM (Pmode,
14956 memory_address (Pmode, frame))),
14957 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14958 }
14959
8c29550d 14960 cfun->machine->ra_need_lr = 1;
1de43f85 14961 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14962}
14963
5e1bf043
DJ
14964/* Say whether a function is a candidate for sibcall handling or not.
14965 We do not allow indirect calls to be optimized into sibling calls.
14966 Also, we can't do it if there are any vector parameters; there's
14967 nowhere to put the VRsave code so it works; note that functions with
14968 vector parameters are required to have a prototype, so the argument
14969 type info must be available here. (The tail recursion case can work
14970 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14971static bool
a2369ed3 14972rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14973{
14974 tree type;
4977bab6 14975 if (decl)
5e1bf043
DJ
14976 {
14977 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14978 {
4977bab6 14979 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14980 type; type = TREE_CHAIN (type))
14981 {
c15b529f 14982 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14983 return false;
5e1bf043 14984 }
c4ad648e 14985 }
5e1bf043 14986 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14987 || ((*targetm.binds_local_p) (decl)
14988 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14989 {
4977bab6 14990 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14991
14992 if (!lookup_attribute ("longcall", attr_list)
14993 || lookup_attribute ("shortcall", attr_list))
4977bab6 14994 return true;
2bcc50d0 14995 }
5e1bf043 14996 }
4977bab6 14997 return false;
5e1bf043
DJ
14998}
14999
e7e64a25
AS
15000/* NULL if INSN insn is valid within a low-overhead loop.
15001 Otherwise return why doloop cannot be applied.
9419649c
DE
15002 PowerPC uses the COUNT register for branch on table instructions. */
15003
e7e64a25 15004static const char *
3101faab 15005rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15006{
15007 if (CALL_P (insn))
e7e64a25 15008 return "Function call in the loop.";
9419649c
DE
15009
15010 if (JUMP_P (insn)
15011 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15012 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15013 return "Computed branch in the loop.";
9419649c 15014
e7e64a25 15015 return NULL;
9419649c
DE
15016}
15017
71f123ca 15018static int
863d938c 15019rs6000_ra_ever_killed (void)
71f123ca
FS
15020{
15021 rtx top;
5e1bf043
DJ
15022 rtx reg;
15023 rtx insn;
71f123ca 15024
e3b5732b 15025 if (crtl->is_thunk)
71f123ca 15026 return 0;
eb0424da 15027
36f7e964
AH
15028 /* regs_ever_live has LR marked as used if any sibcalls are present,
15029 but this should not force saving and restoring in the
15030 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15031 clobbers LR, so that is inappropriate. */
36f7e964 15032
5e1bf043
DJ
15033 /* Also, the prologue can generate a store into LR that
15034 doesn't really count, like this:
36f7e964 15035
5e1bf043
DJ
15036 move LR->R0
15037 bcl to set PIC register
15038 move LR->R31
15039 move R0->LR
36f7e964
AH
15040
15041 When we're called from the epilogue, we need to avoid counting
15042 this as a store. */
f676971a 15043
71f123ca
FS
15044 push_topmost_sequence ();
15045 top = get_insns ();
15046 pop_topmost_sequence ();
1de43f85 15047 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15048
5e1bf043
DJ
15049 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15050 {
15051 if (INSN_P (insn))
15052 {
022123e6
AM
15053 if (CALL_P (insn))
15054 {
15055 if (!SIBLING_CALL_P (insn))
15056 return 1;
15057 }
1de43f85 15058 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15059 return 1;
36f7e964
AH
15060 else if (set_of (reg, insn) != NULL_RTX
15061 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15062 return 1;
15063 }
15064 }
15065 return 0;
71f123ca 15066}
4697a36c 15067\f
9ebbca7d 15068/* Emit instructions needed to load the TOC register.
c7ca610e 15069 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15070 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15071
15072void
a2369ed3 15073rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15074{
6fb5fa3c 15075 rtx dest;
1db02437 15076 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15077
7f970b70 15078 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15079 {
7f970b70 15080 char buf[30];
e65a3857 15081 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15082
15083 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15084 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15085 if (flag_pic == 2)
15086 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15087 else
15088 got = rs6000_got_sym ();
15089 tmp1 = tmp2 = dest;
15090 if (!fromprolog)
15091 {
15092 tmp1 = gen_reg_rtx (Pmode);
15093 tmp2 = gen_reg_rtx (Pmode);
15094 }
6fb5fa3c
DB
15095 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15096 emit_move_insn (tmp1,
1de43f85 15097 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15098 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15099 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15100 }
15101 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15102 {
6fb5fa3c 15103 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15104 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15105 }
15106 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15107 {
15108 char buf[30];
20b71b17
AM
15109 rtx temp0 = (fromprolog
15110 ? gen_rtx_REG (Pmode, 0)
15111 : gen_reg_rtx (Pmode));
20b71b17 15112
20b71b17
AM
15113 if (fromprolog)
15114 {
ccbca5e4 15115 rtx symF, symL;
38c1f2d7 15116
20b71b17
AM
15117 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15118 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15119
20b71b17
AM
15120 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15121 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15122
6fb5fa3c
DB
15123 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15124 emit_move_insn (dest,
1de43f85 15125 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15126 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15127 }
15128 else
20b71b17
AM
15129 {
15130 rtx tocsym;
20b71b17
AM
15131
15132 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15133 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15134 emit_move_insn (dest,
1de43f85 15135 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15136 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15137 }
6fb5fa3c 15138 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15139 }
20b71b17
AM
15140 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15141 {
15142 /* This is for AIX code running in non-PIC ELF32. */
15143 char buf[30];
15144 rtx realsym;
15145 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15146 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15147
6fb5fa3c
DB
15148 emit_insn (gen_elf_high (dest, realsym));
15149 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15150 }
37409796 15151 else
9ebbca7d 15152 {
37409796 15153 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15154
9ebbca7d 15155 if (TARGET_32BIT)
6fb5fa3c 15156 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15157 else
6fb5fa3c 15158 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15159 }
15160}
15161
d1d0c603
JJ
15162/* Emit instructions to restore the link register after determining where
15163 its value has been stored. */
15164
15165void
15166rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15167{
15168 rs6000_stack_t *info = rs6000_stack_info ();
15169 rtx operands[2];
15170
15171 operands[0] = source;
15172 operands[1] = scratch;
15173
15174 if (info->lr_save_p)
15175 {
15176 rtx frame_rtx = stack_pointer_rtx;
15177 HOST_WIDE_INT sp_offset = 0;
15178 rtx tmp;
15179
15180 if (frame_pointer_needed
e3b5732b 15181 || cfun->calls_alloca
d1d0c603
JJ
15182 || info->total_size > 32767)
15183 {
0be76840 15184 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15185 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15186 frame_rtx = operands[1];
15187 }
15188 else if (info->push_p)
15189 sp_offset = info->total_size;
15190
15191 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15192 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15193 emit_move_insn (tmp, operands[0]);
15194 }
15195 else
1de43f85 15196 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15197}
15198
4862826d 15199static GTY(()) alias_set_type set = -1;
f103e34d 15200
4862826d 15201alias_set_type
863d938c 15202get_TOC_alias_set (void)
9ebbca7d 15203{
f103e34d
GK
15204 if (set == -1)
15205 set = new_alias_set ();
15206 return set;
f676971a 15207}
9ebbca7d 15208
c1207243 15209/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15210 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15211 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15212#if TARGET_ELF
3c9eb5f4 15213static int
f676971a 15214uses_TOC (void)
9ebbca7d 15215{
c4501e62 15216 rtx insn;
38c1f2d7 15217
c4501e62
JJ
15218 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15219 if (INSN_P (insn))
15220 {
15221 rtx pat = PATTERN (insn);
15222 int i;
9ebbca7d 15223
f676971a 15224 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15225 for (i = 0; i < XVECLEN (pat, 0); i++)
15226 {
15227 rtx sub = XVECEXP (pat, 0, i);
15228 if (GET_CODE (sub) == USE)
15229 {
15230 sub = XEXP (sub, 0);
15231 if (GET_CODE (sub) == UNSPEC
15232 && XINT (sub, 1) == UNSPEC_TOC)
15233 return 1;
15234 }
15235 }
15236 }
15237 return 0;
9ebbca7d 15238}
c954844a 15239#endif
38c1f2d7 15240
9ebbca7d 15241rtx
f676971a 15242create_TOC_reference (rtx symbol)
9ebbca7d 15243{
b3a13419 15244 if (!can_create_pseudo_p ())
6fb5fa3c 15245 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15246 return gen_rtx_PLUS (Pmode,
a8a05998 15247 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
15248 gen_rtx_CONST (Pmode,
15249 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 15250 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 15251}
38c1f2d7 15252
fc4767bb
JJ
15253/* If _Unwind_* has been called from within the same module,
15254 toc register is not guaranteed to be saved to 40(1) on function
15255 entry. Save it there in that case. */
c7ca610e 15256
9ebbca7d 15257void
863d938c 15258rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15259{
15260 rtx mem;
15261 rtx stack_top = gen_reg_rtx (Pmode);
15262 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15263 rtx opcode = gen_reg_rtx (SImode);
15264 rtx tocompare = gen_reg_rtx (SImode);
15265 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15266
8308679f 15267 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15268 emit_move_insn (stack_top, mem);
15269
8308679f
DE
15270 mem = gen_frame_mem (Pmode,
15271 gen_rtx_PLUS (Pmode, stack_top,
15272 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15273 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15274 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15275 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15276 : 0xE8410028, SImode));
9ebbca7d 15277
fc4767bb 15278 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15279 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15280 no_toc_save_needed);
9ebbca7d 15281
8308679f
DE
15282 mem = gen_frame_mem (Pmode,
15283 gen_rtx_PLUS (Pmode, stack_top,
15284 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15285 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15286 emit_label (no_toc_save_needed);
9ebbca7d 15287}
38c1f2d7 15288\f
0be76840
DE
15289/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15290 and the change to the stack pointer. */
ba4828e0 15291
9ebbca7d 15292static void
863d938c 15293rs6000_emit_stack_tie (void)
9ebbca7d 15294{
0be76840
DE
15295 rtx mem = gen_frame_mem (BLKmode,
15296 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15297
9ebbca7d
GK
15298 emit_insn (gen_stack_tie (mem));
15299}
38c1f2d7 15300
9ebbca7d
GK
15301/* Emit the correct code for allocating stack space, as insns.
15302 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15303 If COPY_R11, make sure a copy of the old frame is left in r11,
15304 in preference to r12 if COPY_R12.
9ebbca7d
GK
15305 The generated code may use hard register 0 as a temporary. */
15306
15307static void
f78c3290 15308rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15309{
9ebbca7d
GK
15310 rtx insn;
15311 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15312 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15313 rtx todec = gen_int_mode (-size, Pmode);
15314
15315 if (INTVAL (todec) != -size)
15316 {
d4ee4d25 15317 warning (0, "stack frame too large");
61168ff1
RS
15318 emit_insn (gen_trap ());
15319 return;
15320 }
a157febd 15321
e3b5732b 15322 if (crtl->limit_stack)
a157febd
GK
15323 {
15324 if (REG_P (stack_limit_rtx)
f676971a 15325 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15326 && REGNO (stack_limit_rtx) <= 31)
15327 {
5b71a4e7 15328 emit_insn (TARGET_32BIT
9ebbca7d
GK
15329 ? gen_addsi3 (tmp_reg,
15330 stack_limit_rtx,
15331 GEN_INT (size))
15332 : gen_adddi3 (tmp_reg,
15333 stack_limit_rtx,
15334 GEN_INT (size)));
5b71a4e7 15335
9ebbca7d
GK
15336 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15337 const0_rtx));
a157febd
GK
15338 }
15339 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15340 && TARGET_32BIT
f607bc57 15341 && DEFAULT_ABI == ABI_V4)
a157febd 15342 {
9ebbca7d 15343 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15344 gen_rtx_PLUS (Pmode,
15345 stack_limit_rtx,
9ebbca7d 15346 GEN_INT (size)));
5b71a4e7 15347
9ebbca7d
GK
15348 emit_insn (gen_elf_high (tmp_reg, toload));
15349 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15350 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15351 const0_rtx));
a157febd
GK
15352 }
15353 else
d4ee4d25 15354 warning (0, "stack limit expression is not supported");
a157febd
GK
15355 }
15356
f78c3290
NF
15357 if (copy_r12 || copy_r11 || ! TARGET_UPDATE)
15358 emit_move_insn (copy_r11
15359 ? gen_rtx_REG (Pmode, 11)
15360 : gen_rtx_REG (Pmode, 12),
15361 stack_reg);
9ebbca7d 15362
38c1f2d7
MM
15363 if (TARGET_UPDATE)
15364 {
9ebbca7d 15365 if (size > 32767)
38c1f2d7 15366 {
9ebbca7d 15367 /* Need a note here so that try_split doesn't get confused. */
9390387d 15368 if (get_last_insn () == NULL_RTX)
2e040219 15369 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15370 insn = emit_move_insn (tmp_reg, todec);
15371 try_split (PATTERN (insn), insn, 0);
15372 todec = tmp_reg;
38c1f2d7 15373 }
5b71a4e7
DE
15374
15375 insn = emit_insn (TARGET_32BIT
15376 ? gen_movsi_update (stack_reg, stack_reg,
15377 todec, stack_reg)
c4ad648e 15378 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15379 todec, stack_reg));
38c1f2d7
MM
15380 }
15381 else
15382 {
5b71a4e7
DE
15383 insn = emit_insn (TARGET_32BIT
15384 ? gen_addsi3 (stack_reg, stack_reg, todec)
15385 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d 15386 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
f78c3290
NF
15387 copy_r11
15388 ? gen_rtx_REG (Pmode, 11)
15389 : gen_rtx_REG (Pmode, 12));
9ebbca7d 15390 }
f676971a 15391
9ebbca7d 15392 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15393 REG_NOTES (insn) =
9ebbca7d 15394 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15395 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15396 gen_rtx_PLUS (Pmode, stack_reg,
15397 GEN_INT (-size))),
15398 REG_NOTES (insn));
15399}
15400
a4f6c312
SS
15401/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15402 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15403 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15404 deduce these equivalences by itself so it wasn't necessary to hold
15405 its hand so much. */
9ebbca7d
GK
15406
15407static void
f676971a 15408rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15409 rtx reg2, rtx rreg)
9ebbca7d
GK
15410{
15411 rtx real, temp;
15412
e56c4463
JL
15413 /* copy_rtx will not make unique copies of registers, so we need to
15414 ensure we don't have unwanted sharing here. */
15415 if (reg == reg2)
15416 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15417
15418 if (reg == rreg)
15419 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15420
9ebbca7d
GK
15421 real = copy_rtx (PATTERN (insn));
15422
89e7058f
AH
15423 if (reg2 != NULL_RTX)
15424 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15425
15426 real = replace_rtx (real, reg,
9ebbca7d
GK
15427 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15428 STACK_POINTER_REGNUM),
15429 GEN_INT (val)));
f676971a 15430
9ebbca7d
GK
15431 /* We expect that 'real' is either a SET or a PARALLEL containing
15432 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15433 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15434
15435 if (GET_CODE (real) == SET)
15436 {
15437 rtx set = real;
f676971a 15438
9ebbca7d
GK
15439 temp = simplify_rtx (SET_SRC (set));
15440 if (temp)
15441 SET_SRC (set) = temp;
15442 temp = simplify_rtx (SET_DEST (set));
15443 if (temp)
15444 SET_DEST (set) = temp;
15445 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15446 {
9ebbca7d
GK
15447 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15448 if (temp)
15449 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15450 }
38c1f2d7 15451 }
37409796 15452 else
9ebbca7d
GK
15453 {
15454 int i;
37409796
NS
15455
15456 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15457 for (i = 0; i < XVECLEN (real, 0); i++)
15458 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15459 {
15460 rtx set = XVECEXP (real, 0, i);
f676971a 15461
9ebbca7d
GK
15462 temp = simplify_rtx (SET_SRC (set));
15463 if (temp)
15464 SET_SRC (set) = temp;
15465 temp = simplify_rtx (SET_DEST (set));
15466 if (temp)
15467 SET_DEST (set) = temp;
15468 if (GET_CODE (SET_DEST (set)) == MEM)
15469 {
15470 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15471 if (temp)
15472 XEXP (SET_DEST (set), 0) = temp;
15473 }
15474 RTX_FRAME_RELATED_P (set) = 1;
15475 }
15476 }
c19de7aa 15477
9ebbca7d
GK
15478 RTX_FRAME_RELATED_P (insn) = 1;
15479 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15480 real,
15481 REG_NOTES (insn));
38c1f2d7
MM
15482}
15483
00b960c7
AH
15484/* Returns an insn that has a vrsave set operation with the
15485 appropriate CLOBBERs. */
15486
15487static rtx
a2369ed3 15488generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15489{
15490 int nclobs, i;
15491 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15492 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15493
a004eb82
AH
15494 clobs[0]
15495 = gen_rtx_SET (VOIDmode,
15496 vrsave,
15497 gen_rtx_UNSPEC_VOLATILE (SImode,
15498 gen_rtvec (2, reg, vrsave),
3aca4bff 15499 UNSPECV_SET_VRSAVE));
00b960c7
AH
15500
15501 nclobs = 1;
15502
9aa86737
AH
15503 /* We need to clobber the registers in the mask so the scheduler
15504 does not move sets to VRSAVE before sets of AltiVec registers.
15505
15506 However, if the function receives nonlocal gotos, reload will set
15507 all call saved registers live. We will end up with:
15508
15509 (set (reg 999) (mem))
15510 (parallel [ (set (reg vrsave) (unspec blah))
15511 (clobber (reg 999))])
15512
15513 The clobber will cause the store into reg 999 to be dead, and
15514 flow will attempt to delete an epilogue insn. In this case, we
15515 need an unspec use/set of the register. */
00b960c7
AH
15516
15517 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15518 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15519 {
15520 if (!epiloguep || call_used_regs [i])
15521 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15522 gen_rtx_REG (V4SImode, i));
15523 else
15524 {
15525 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15526
15527 clobs[nclobs++]
a004eb82
AH
15528 = gen_rtx_SET (VOIDmode,
15529 reg,
15530 gen_rtx_UNSPEC (V4SImode,
15531 gen_rtvec (1, reg), 27));
9aa86737
AH
15532 }
15533 }
00b960c7
AH
15534
15535 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15536
15537 for (i = 0; i < nclobs; ++i)
15538 XVECEXP (insn, 0, i) = clobs[i];
15539
15540 return insn;
15541}
15542
89e7058f
AH
15543/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15544 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15545
15546static void
f676971a 15547emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15548 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15549{
15550 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15551 rtx replacea, replaceb;
15552
15553 int_rtx = GEN_INT (offset);
15554
15555 /* Some cases that need register indexed addressing. */
15556 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15557 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15558 || (TARGET_SPE_ABI
15559 && SPE_VECTOR_MODE (mode)
15560 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15561 {
15562 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15563 flow path of instructions in the prologue. */
89e7058f
AH
15564 offset_rtx = gen_rtx_REG (Pmode, 11);
15565 emit_move_insn (offset_rtx, int_rtx);
15566
15567 replacea = offset_rtx;
15568 replaceb = int_rtx;
15569 }
15570 else
15571 {
15572 offset_rtx = int_rtx;
15573 replacea = NULL_RTX;
15574 replaceb = NULL_RTX;
15575 }
15576
15577 reg = gen_rtx_REG (mode, regno);
15578 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15579 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15580
15581 insn = emit_move_insn (mem, reg);
15582
15583 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15584}
15585
a3170dc6
AH
15586/* Emit an offset memory reference suitable for a frame store, while
15587 converting to a valid addressing mode. */
15588
15589static rtx
a2369ed3 15590gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15591{
15592 rtx int_rtx, offset_rtx;
15593
15594 int_rtx = GEN_INT (offset);
15595
4d4cbc0e 15596 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15597 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15598 {
15599 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15600 emit_move_insn (offset_rtx, int_rtx);
15601 }
15602 else
15603 offset_rtx = int_rtx;
15604
0be76840 15605 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15606}
15607
6d0a8091
DJ
15608/* Look for user-defined global regs. We should not save and restore these,
15609 and cannot use stmw/lmw if there are any in its range. */
15610
15611static bool
f78c3290 15612no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15613{
15614 int i;
f78c3290
NF
15615 for (i = first; i < gpr ? 32 : 64 ; i++)
15616 if (global_regs[i])
6d0a8091
DJ
15617 return false;
15618 return true;
15619}
15620
699c914a
MS
15621#ifndef TARGET_FIX_AND_CONTINUE
15622#define TARGET_FIX_AND_CONTINUE 0
15623#endif
15624
f78c3290
NF
15625/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15626#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15627#define LAST_SAVRES_REGISTER 31
15628#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15629
15630static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15631
15632/* Return the symbol for an out-of-line register save/restore routine.
15633 We are saving/restoring GPRs if GPR is true. */
15634
15635static rtx
15636rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15637{
15638 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15639 rtx sym;
15640 int select = ((savep ? 1 : 0) << 2
15641 | (gpr
15642 /* On the SPE, we never have any FPRs, but we do have
15643 32/64-bit versions of the routines. */
15644 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15645 : 0) << 1
15646 | (exitp ? 1: 0));
15647
15648 /* Don't generate bogus routine names. */
15649 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15650
15651 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15652
15653 if (sym == NULL)
15654 {
15655 char name[30];
15656 const char *action;
15657 const char *regkind;
15658 const char *exit_suffix;
15659
15660 action = savep ? "save" : "rest";
15661
15662 /* SPE has slightly different names for its routines depending on
15663 whether we are saving 32-bit or 64-bit registers. */
15664 if (TARGET_SPE_ABI)
15665 {
15666 /* No floating point saves on the SPE. */
15667 gcc_assert (gpr);
15668
15669 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15670 }
15671 else
15672 regkind = gpr ? "gpr" : "fpr";
15673
15674 exit_suffix = exitp ? "_x" : "";
15675
15676 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15677
15678 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15679 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15680 }
15681
15682 return sym;
15683}
15684
15685/* Emit a sequence of insns, including a stack tie if needed, for
15686 resetting the stack pointer. If SAVRES is true, then don't reset the
15687 stack pointer, but move the base of the frame into r11 for use by
15688 out-of-line register restore routines. */
15689
15690static void
15691rs6000_emit_stack_reset (rs6000_stack_t *info,
15692 rtx sp_reg_rtx, rtx frame_reg_rtx,
15693 int sp_offset, bool savres)
15694{
15695 /* This blockage is needed so that sched doesn't decide to move
15696 the sp change before the register restores. */
15697 if (frame_reg_rtx != sp_reg_rtx
15698 || (TARGET_SPE_ABI
15699 && info->spe_64bit_regs_used != 0
15700 && info->first_gp_reg_save != 32))
15701 rs6000_emit_stack_tie ();
15702
15703 if (frame_reg_rtx != sp_reg_rtx)
15704 {
15705 rs6000_emit_stack_tie ();
15706 if (sp_offset != 0)
15707 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15708 GEN_INT (sp_offset)));
15709 else if (!savres)
15710 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15711 }
15712 else if (sp_offset != 0)
15713 {
15714 /* If we are restoring registers out-of-line, we will be using the
15715 "exit" variants of the restore routines, which will reset the
15716 stack for us. But we do need to point r11 into the right place
15717 for those routines. */
15718 rtx dest_reg = (savres
15719 ? gen_rtx_REG (Pmode, 11)
15720 : sp_reg_rtx);
15721
15722 emit_insn (TARGET_32BIT
15723 ? gen_addsi3 (dest_reg, sp_reg_rtx,
15724 GEN_INT (sp_offset))
15725 : gen_adddi3 (dest_reg, sp_reg_rtx,
15726 GEN_INT (sp_offset)));
15727 }
15728}
15729
15730/* Construct a parallel rtx describing the effect of a call to an
15731 out-of-line register save/restore routine. */
15732
15733static rtx
15734rs6000_make_savres_rtx (rs6000_stack_t *info,
15735 rtx frame_reg_rtx, int save_area_offset,
15736 enum machine_mode reg_mode,
15737 bool savep, bool gpr, bool exitp)
15738{
15739 int i;
15740 int offset, start_reg, end_reg, n_regs;
15741 int reg_size = GET_MODE_SIZE (reg_mode);
15742 rtx sym;
15743 rtvec p;
15744
15745 offset = 0;
15746 start_reg = (gpr
15747 ? info->first_gp_reg_save
15748 : info->first_fp_reg_save);
15749 end_reg = gpr ? 32 : 64;
15750 n_regs = end_reg - start_reg;
15751 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15752
15753 /* If we're saving registers, then we should never say we're exiting. */
15754 gcc_assert ((savep && !exitp) || !savep);
15755
15756 if (exitp)
15757 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15758
15759 RTVEC_ELT (p, offset++)
15760 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15761
15762 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15763 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
15764 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
15765
15766 for (i = 0; i < end_reg - start_reg; i++)
15767 {
15768 rtx addr, reg, mem;
15769 reg = gen_rtx_REG (reg_mode, start_reg + i);
15770 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15771 GEN_INT (save_area_offset + reg_size*i));
15772 mem = gen_frame_mem (reg_mode, addr);
15773
15774 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
15775 savep ? mem : reg,
15776 savep ? reg : mem);
15777 }
15778
15779 return gen_rtx_PARALLEL (VOIDmode, p);
15780}
15781
52ff33d0
NF
15782/* Determine whether the gp REG is really used. */
15783
15784static bool
15785rs6000_reg_live_or_pic_offset_p (int reg)
15786{
6fb5fa3c 15787 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15788 && (!call_used_regs[reg]
15789 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15790 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15791 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15792 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15793 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15794}
15795
f78c3290
NF
15796enum {
15797 SAVRES_MULTIPLE = 0x1,
15798 SAVRES_INLINE_FPRS = 0x2,
15799 SAVRES_INLINE_GPRS = 0x4
15800};
15801
15802/* Determine the strategy for savings/restoring registers. */
15803
15804static int
15805rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
15806 int using_static_chain_p, int sibcall)
15807{
15808 bool using_multiple_p;
15809 bool common;
15810 bool savres_fprs_inline;
15811 bool savres_gprs_inline;
15812 bool noclobber_global_gprs
15813 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
15814
15815 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
15816 && (!TARGET_SPE_ABI
15817 || info->spe_64bit_regs_used == 0)
15818 && info->first_gp_reg_save < 31
15819 && noclobber_global_gprs);
15820 /* Don't bother to try to save things out-of-line if r11 is occupied
15821 by the static chain. It would require too much fiddling and the
15822 static chain is rarely used anyway. */
15823 common = (using_static_chain_p
15824 || sibcall
15825 || crtl->calls_eh_return
15826 || !info->lr_save_p
15827 || cfun->machine->ra_need_lr
15828 || info->total_size > 32767);
15829 savres_fprs_inline = (common
15830 || info->first_fp_reg_save == 64
15831 || !no_global_regs_above (info->first_fp_reg_save,
15832 /*gpr=*/false)
15833 || FP_SAVE_INLINE (info->first_fp_reg_save));
15834 savres_gprs_inline = (common
15835 /* Saving CR interferes with the exit routines
15836 used on the SPE, so just punt here. */
15837 || (!savep
15838 && TARGET_SPE_ABI
15839 && info->spe_64bit_regs_used != 0
15840 && info->cr_save_p != 0)
15841 || info->first_gp_reg_save == 32
15842 || !noclobber_global_gprs
15843 || GP_SAVE_INLINE (info->first_gp_reg_save));
15844
15845 if (savep)
15846 /* If we are going to use store multiple, then don't even bother
15847 with the out-of-line routines, since the store-multiple instruction
15848 will always be smaller. */
15849 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
15850 else
15851 {
15852 /* The situation is more complicated with load multiple. We'd
15853 prefer to use the out-of-line routines for restores, since the
15854 "exit" out-of-line routines can handle the restore of LR and
15855 the frame teardown. But we can only use the out-of-line
15856 routines if we know that we've used store multiple or
15857 out-of-line routines in the prologue, i.e. if we've saved all
15858 the registers from first_gp_reg_save. Otherwise, we risk
15859 loading garbage from the stack. Furthermore, we can only use
15860 the "exit" out-of-line gpr restore if we haven't saved any
15861 fprs. */
15862 bool saved_all = !savres_gprs_inline || using_multiple_p;
15863
15864 if (saved_all && info->first_fp_reg_save != 64)
15865 /* We can't use the exit routine; use load multiple if it's
15866 available. */
15867 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
15868 }
15869
15870 return (using_multiple_p
15871 | (savres_fprs_inline << 1)
15872 | (savres_gprs_inline << 2));
15873}
15874
9ebbca7d
GK
15875/* Emit function prologue as insns. */
15876
9878760c 15877void
863d938c 15878rs6000_emit_prologue (void)
9878760c 15879{
4697a36c 15880 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15881 enum machine_mode reg_mode = Pmode;
327e5343 15882 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15883 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15884 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15885 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15886 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 15887 rtx insn;
f78c3290 15888 int strategy;
9ebbca7d 15889 int saving_FPRs_inline;
f78c3290 15890 int saving_GPRs_inline;
9ebbca7d 15891 int using_store_multiple;
f78c3290
NF
15892 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
15893 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
15894 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 15895 HOST_WIDE_INT sp_offset = 0;
f676971a 15896
699c914a
MS
15897 if (TARGET_FIX_AND_CONTINUE)
15898 {
15899 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15900 address by modifying the first 5 instructions of the function
699c914a
MS
15901 to branch to the overriding function. This is necessary to
15902 permit function pointers that point to the old function to
15903 actually forward to the new function. */
15904 emit_insn (gen_nop ());
15905 emit_insn (gen_nop ());
de2ab0ca 15906 emit_insn (gen_nop ());
699c914a
MS
15907 emit_insn (gen_nop ());
15908 emit_insn (gen_nop ());
15909 }
15910
15911 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15912 {
15913 reg_mode = V2SImode;
15914 reg_size = 8;
15915 }
a3170dc6 15916
f78c3290
NF
15917 strategy = rs6000_savres_strategy (info, /*savep=*/true,
15918 /*static_chain_p=*/using_static_chain_p,
15919 /*sibcall=*/0);
15920 using_store_multiple = strategy & SAVRES_MULTIPLE;
15921 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
15922 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
15923
15924 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
15925 if (! WORLD_SAVE_P (info)
15926 && info->push_p
acd0b319 15927 && (DEFAULT_ABI == ABI_V4
e3b5732b 15928 || crtl->calls_eh_return))
9ebbca7d 15929 {
f78c3290
NF
15930 bool need_r11 = (TARGET_SPE
15931 ? (!saving_GPRs_inline
15932 && info->spe_64bit_regs_used == 0)
15933 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
15934 if (info->total_size < 32767)
15935 sp_offset = info->total_size;
15936 else
f78c3290
NF
15937 frame_reg_rtx = (need_r11
15938 ? gen_rtx_REG (Pmode, 11)
15939 : frame_ptr_rtx);
f676971a 15940 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
15941 (frame_reg_rtx != sp_reg_rtx
15942 && (info->cr_save_p
15943 || info->lr_save_p
15944 || info->first_fp_reg_save < 64
15945 || info->first_gp_reg_save < 32
f78c3290
NF
15946 )),
15947 need_r11);
9ebbca7d
GK
15948 if (frame_reg_rtx != sp_reg_rtx)
15949 rs6000_emit_stack_tie ();
15950 }
15951
d62294f5 15952 /* Handle world saves specially here. */
f57fe068 15953 if (WORLD_SAVE_P (info))
d62294f5
FJ
15954 {
15955 int i, j, sz;
15956 rtx treg;
15957 rtvec p;
22fa69da 15958 rtx reg0;
d62294f5
FJ
15959
15960 /* save_world expects lr in r0. */
22fa69da 15961 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 15962 if (info->lr_save_p)
c4ad648e 15963 {
22fa69da 15964 insn = emit_move_insn (reg0,
1de43f85 15965 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
15966 RTX_FRAME_RELATED_P (insn) = 1;
15967 }
d62294f5
FJ
15968
15969 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 15970 assumptions about the offsets of various bits of the stack
992d08b1 15971 frame. */
37409796
NS
15972 gcc_assert (info->gp_save_offset == -220
15973 && info->fp_save_offset == -144
15974 && info->lr_save_offset == 8
15975 && info->cr_save_offset == 4
15976 && info->push_p
15977 && info->lr_save_p
e3b5732b 15978 && (!crtl->calls_eh_return
37409796
NS
15979 || info->ehrd_offset == -432)
15980 && info->vrsave_save_offset == -224
22fa69da 15981 && info->altivec_save_offset == -416);
d62294f5
FJ
15982
15983 treg = gen_rtx_REG (SImode, 11);
15984 emit_move_insn (treg, GEN_INT (-info->total_size));
15985
15986 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 15987 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
15988
15989 /* Preserve CR2 for save_world prologues */
22fa69da 15990 sz = 5;
d62294f5
FJ
15991 sz += 32 - info->first_gp_reg_save;
15992 sz += 64 - info->first_fp_reg_save;
15993 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
15994 p = rtvec_alloc (sz);
15995 j = 0;
15996 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 15997 gen_rtx_REG (SImode,
1de43f85 15998 LR_REGNO));
d62294f5 15999 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16000 gen_rtx_SYMBOL_REF (Pmode,
16001 "*save_world"));
d62294f5 16002 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16003 properly. */
16004 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16005 {
16006 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16007 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16008 GEN_INT (info->fp_save_offset
16009 + sp_offset + 8 * i));
0be76840 16010 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16011
16012 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16013 }
d62294f5 16014 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16015 {
16016 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16017 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16018 GEN_INT (info->altivec_save_offset
16019 + sp_offset + 16 * i));
0be76840 16020 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16021
16022 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16023 }
d62294f5 16024 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16025 {
16026 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16027 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16028 GEN_INT (info->gp_save_offset
16029 + sp_offset + reg_size * i));
0be76840 16030 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16031
16032 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16033 }
16034
16035 {
16036 /* CR register traditionally saved as CR2. */
16037 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16038 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16039 GEN_INT (info->cr_save_offset
16040 + sp_offset));
0be76840 16041 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16042
16043 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16044 }
22fa69da
GK
16045 /* Explain about use of R0. */
16046 if (info->lr_save_p)
16047 {
16048 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16049 GEN_INT (info->lr_save_offset
16050 + sp_offset));
16051 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16052
22fa69da
GK
16053 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16054 }
16055 /* Explain what happens to the stack pointer. */
16056 {
16057 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16058 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16059 }
d62294f5
FJ
16060
16061 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16062 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16063 treg, GEN_INT (-info->total_size));
16064 sp_offset = info->total_size;
d62294f5
FJ
16065 }
16066
9ebbca7d 16067 /* If we use the link register, get it into r0. */
f57fe068 16068 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16069 {
52ff33d0
NF
16070 rtx addr, reg, mem;
16071
f8a57be8 16072 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16073 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16074 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16075
16076 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16077 GEN_INT (info->lr_save_offset + sp_offset));
16078 reg = gen_rtx_REG (Pmode, 0);
16079 mem = gen_rtx_MEM (Pmode, addr);
16080 /* This should not be of rs6000_sr_alias_set, because of
16081 __builtin_return_address. */
16082
16083 insn = emit_move_insn (mem, reg);
16084 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16085 NULL_RTX, NULL_RTX);
f8a57be8 16086 }
9ebbca7d
GK
16087
16088 /* If we need to save CR, put it into r12. */
f57fe068 16089 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16090 {
f8a57be8 16091 rtx set;
f676971a 16092
9ebbca7d 16093 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16094 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16095 RTX_FRAME_RELATED_P (insn) = 1;
16096 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16097 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16098 But that's OK. All we have to do is specify that _one_ condition
16099 code register is saved in this stack slot. The thrower's epilogue
16100 will then restore all the call-saved registers.
16101 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16102 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16103 gen_rtx_REG (SImode, CR2_REGNO));
16104 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16105 set,
16106 REG_NOTES (insn));
9ebbca7d
GK
16107 }
16108
a4f6c312
SS
16109 /* Do any required saving of fpr's. If only one or two to save, do
16110 it ourselves. Otherwise, call function. */
f57fe068 16111 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16112 {
16113 int i;
16114 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16115 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16116 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
16117 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
16118 info->first_fp_reg_save + i,
16119 info->fp_save_offset + sp_offset + 8 * i,
16120 info->total_size);
9ebbca7d 16121 }
f57fe068 16122 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16123 {
16124 rtx par;
16125
16126 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16127 info->fp_save_offset + sp_offset,
16128 DFmode,
16129 /*savep=*/true, /*gpr=*/false,
16130 /*exitp=*/false);
16131 insn = emit_insn (par);
16132 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16133 NULL_RTX, NULL_RTX);
16134 }
16135
16136 /* Save GPRs. This is done as a PARALLEL if we are using
16137 the store-multiple instructions. */
16138 if (!WORLD_SAVE_P (info)
16139 && TARGET_SPE_ABI
16140 && info->spe_64bit_regs_used != 0
16141 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16142 {
16143 int i;
f78c3290
NF
16144 rtx spe_save_area_ptr;
16145
16146 /* Determine whether we can address all of the registers that need
16147 to be saved with an offset from the stack pointer that fits in
16148 the small const field for SPE memory instructions. */
16149 int spe_regs_addressable_via_sp
16150 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16151 + (32 - info->first_gp_reg_save - 1) * reg_size)
16152 && saving_GPRs_inline);
16153 int spe_offset;
16154
16155 if (spe_regs_addressable_via_sp)
16156 {
16157 spe_save_area_ptr = frame_reg_rtx;
16158 spe_offset = info->spe_gp_save_offset + sp_offset;
16159 }
16160 else
16161 {
16162 /* Make r11 point to the start of the SPE save area. We need
16163 to be careful here if r11 is holding the static chain. If
16164 it is, then temporarily save it in r0. We would use r0 as
16165 our base register here, but using r0 as a base register in
16166 loads and stores means something different from what we
16167 would like. */
16168 int ool_adjust = (saving_GPRs_inline
16169 ? 0
16170 : (info->first_gp_reg_save
16171 - (FIRST_SAVRES_REGISTER+1))*8);
16172 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16173 + sp_offset - ool_adjust);
16174
16175 if (using_static_chain_p)
16176 {
16177 rtx r0 = gen_rtx_REG (Pmode, 0);
16178 gcc_assert (info->first_gp_reg_save > 11);
16179
16180 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16181 }
16182
16183 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16184 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16185 frame_reg_rtx,
16186 GEN_INT (offset)));
16187 /* We need to make sure the move to r11 gets noted for
16188 properly outputting unwind information. */
16189 if (!saving_GPRs_inline)
16190 rs6000_frame_related (insn, frame_reg_rtx, offset,
16191 NULL_RTX, NULL_RTX);
16192 spe_offset = 0;
16193 }
16194
16195 if (saving_GPRs_inline)
16196 {
16197 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16198 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16199 {
16200 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16201 rtx offset, addr, mem;
f676971a 16202
f78c3290
NF
16203 /* We're doing all this to ensure that the offset fits into
16204 the immediate offset of 'evstdd'. */
16205 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16206
16207 offset = GEN_INT (reg_size * i + spe_offset);
16208 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16209 mem = gen_rtx_MEM (V2SImode, addr);
16210
16211 insn = emit_move_insn (mem, reg);
16212
16213 rs6000_frame_related (insn, spe_save_area_ptr,
16214 info->spe_gp_save_offset
16215 + sp_offset + reg_size * i,
16216 offset, const0_rtx);
16217 }
16218 }
16219 else
9ebbca7d 16220 {
f78c3290 16221 rtx par;
9ebbca7d 16222
f78c3290
NF
16223 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16224 0, reg_mode,
16225 /*savep=*/true, /*gpr=*/true,
16226 /*exitp=*/false);
16227 insn = emit_insn (par);
16228 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16229 NULL_RTX, NULL_RTX);
9ebbca7d 16230 }
f78c3290
NF
16231
16232
16233 /* Move the static chain pointer back. */
16234 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16235 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16236 }
16237 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16238 {
16239 rtx par;
16240
16241 /* Need to adjust r11 if we saved any FPRs. */
16242 if (info->first_fp_reg_save != 64)
16243 {
16244 rtx r11 = gen_rtx_REG (reg_mode, 11);
16245 rtx offset = GEN_INT (info->total_size
16246 + (-8 * (64-info->first_fp_reg_save)));
16247 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16248 ? sp_reg_rtx : r11);
16249
16250 emit_insn (TARGET_32BIT
16251 ? gen_addsi3 (r11, ptr_reg, offset)
16252 : gen_adddi3 (r11, ptr_reg, offset));
16253 }
16254
16255 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16256 info->gp_save_offset + sp_offset,
16257 reg_mode,
16258 /*savep=*/true, /*gpr=*/true,
16259 /*exitp=*/false);
16260 insn = emit_insn (par);
f676971a 16261 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16262 NULL_RTX, NULL_RTX);
16263 }
f78c3290 16264 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16265 {
308c142a 16266 rtvec p;
9ebbca7d
GK
16267 int i;
16268 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16269 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16270 {
16271 rtx addr, reg, mem;
16272 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16273 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16274 GEN_INT (info->gp_save_offset
16275 + sp_offset
9ebbca7d 16276 + reg_size * i));
0be76840 16277 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16278
16279 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16280 }
16281 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16282 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16283 NULL_RTX, NULL_RTX);
b6c9286a 16284 }
f57fe068 16285 else if (!WORLD_SAVE_P (info))
b6c9286a 16286 {
9ebbca7d
GK
16287 int i;
16288 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16289 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16290 {
16291 rtx addr, reg, mem;
16292 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16293
52ff33d0
NF
16294 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16295 GEN_INT (info->gp_save_offset
16296 + sp_offset
16297 + reg_size * i));
16298 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16299
52ff33d0
NF
16300 insn = emit_move_insn (mem, reg);
16301 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16302 NULL_RTX, NULL_RTX);
16303 }
9ebbca7d
GK
16304 }
16305
83720594
RH
16306 /* ??? There's no need to emit actual instructions here, but it's the
16307 easiest way to get the frame unwind information emitted. */
e3b5732b 16308 if (crtl->calls_eh_return)
83720594 16309 {
78e1b90d
DE
16310 unsigned int i, regno;
16311
fc4767bb
JJ
16312 /* In AIX ABI we need to pretend we save r2 here. */
16313 if (TARGET_AIX)
16314 {
16315 rtx addr, reg, mem;
16316
16317 reg = gen_rtx_REG (reg_mode, 2);
16318 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16319 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16320 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16321
16322 insn = emit_move_insn (mem, reg);
f676971a 16323 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16324 NULL_RTX, NULL_RTX);
16325 PATTERN (insn) = gen_blockage ();
16326 }
16327
83720594
RH
16328 for (i = 0; ; ++i)
16329 {
83720594
RH
16330 regno = EH_RETURN_DATA_REGNO (i);
16331 if (regno == INVALID_REGNUM)
16332 break;
16333
89e7058f
AH
16334 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16335 info->ehrd_offset + sp_offset
16336 + reg_size * (int) i,
16337 info->total_size);
83720594
RH
16338 }
16339 }
16340
9ebbca7d 16341 /* Save CR if we use any that must be preserved. */
f57fe068 16342 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16343 {
16344 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16345 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16346 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16347 /* See the large comment above about why CR2_REGNO is used. */
16348 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16349
9ebbca7d
GK
16350 /* If r12 was used to hold the original sp, copy cr into r0 now
16351 that it's free. */
16352 if (REGNO (frame_reg_rtx) == 12)
16353 {
f8a57be8
GK
16354 rtx set;
16355
9ebbca7d 16356 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16357 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16358 RTX_FRAME_RELATED_P (insn) = 1;
16359 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16360 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16361 set,
16362 REG_NOTES (insn));
f676971a 16363
9ebbca7d
GK
16364 }
16365 insn = emit_move_insn (mem, cr_save_rtx);
16366
f676971a 16367 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16368 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16369 }
16370
f676971a 16371 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16372 for which it was done previously. */
f57fe068 16373 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16374 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16375 {
bcb2d701 16376 if (info->total_size < 32767)
2b2c2fe5 16377 sp_offset = info->total_size;
bcb2d701
EC
16378 else
16379 frame_reg_rtx = frame_ptr_rtx;
16380 rs6000_emit_allocate_stack (info->total_size,
16381 (frame_reg_rtx != sp_reg_rtx
16382 && ((info->altivec_size != 0)
16383 || (info->vrsave_mask != 0)
f78c3290
NF
16384 )),
16385 FALSE);
bcb2d701
EC
16386 if (frame_reg_rtx != sp_reg_rtx)
16387 rs6000_emit_stack_tie ();
2b2c2fe5 16388 }
9ebbca7d
GK
16389
16390 /* Set frame pointer, if needed. */
16391 if (frame_pointer_needed)
16392 {
7d5175e1 16393 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16394 sp_reg_rtx);
16395 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16396 }
9878760c 16397
2b2c2fe5
EC
16398 /* Save AltiVec registers if needed. Save here because the red zone does
16399 not include AltiVec registers. */
16400 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16401 {
16402 int i;
16403
16404 /* There should be a non inline version of this, for when we
16405 are saving lots of vector registers. */
16406 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16407 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16408 {
16409 rtx areg, savereg, mem;
16410 int offset;
16411
16412 offset = info->altivec_save_offset + sp_offset
16413 + 16 * (i - info->first_altivec_reg_save);
16414
16415 savereg = gen_rtx_REG (V4SImode, i);
16416
16417 areg = gen_rtx_REG (Pmode, 0);
16418 emit_move_insn (areg, GEN_INT (offset));
16419
16420 /* AltiVec addressing mode is [reg+reg]. */
16421 mem = gen_frame_mem (V4SImode,
16422 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16423
16424 insn = emit_move_insn (mem, savereg);
16425
16426 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16427 areg, GEN_INT (offset));
16428 }
16429 }
16430
16431 /* VRSAVE is a bit vector representing which AltiVec registers
16432 are used. The OS uses this to determine which vector
16433 registers to save on a context switch. We need to save
16434 VRSAVE on the stack frame, add whatever AltiVec registers we
16435 used in this function, and do the corresponding magic in the
16436 epilogue. */
16437
16438 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16439 && info->vrsave_mask != 0)
16440 {
16441 rtx reg, mem, vrsave;
16442 int offset;
16443
16444 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16445 as frame_reg_rtx and r11 as the static chain pointer for
16446 nested functions. */
16447 reg = gen_rtx_REG (SImode, 0);
16448 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16449 if (TARGET_MACHO)
16450 emit_insn (gen_get_vrsave_internal (reg));
16451 else
16452 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16453
16454 if (!WORLD_SAVE_P (info))
16455 {
16456 /* Save VRSAVE. */
16457 offset = info->vrsave_save_offset + sp_offset;
16458 mem = gen_frame_mem (SImode,
16459 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16460 GEN_INT (offset)));
16461 insn = emit_move_insn (mem, reg);
16462 }
16463
16464 /* Include the registers in the mask. */
16465 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16466
16467 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16468 }
16469
1db02437 16470 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16471 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16472 || (DEFAULT_ABI == ABI_V4
16473 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16474 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16475 {
16476 /* If emit_load_toc_table will use the link register, we need to save
16477 it. We use R12 for this purpose because emit_load_toc_table
16478 can use register 0. This allows us to use a plain 'blr' to return
16479 from the procedure more often. */
16480 int save_LR_around_toc_setup = (TARGET_ELF
16481 && DEFAULT_ABI != ABI_AIX
16482 && flag_pic
16483 && ! info->lr_save_p
16484 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16485 if (save_LR_around_toc_setup)
16486 {
1de43f85 16487 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16488
c4ad648e 16489 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16490 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16491
c4ad648e 16492 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16493
c4ad648e 16494 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16495 RTX_FRAME_RELATED_P (insn) = 1;
16496 }
16497 else
16498 rs6000_emit_load_toc_table (TRUE);
16499 }
ee890fe2 16500
fcce224d 16501#if TARGET_MACHO
ee890fe2 16502 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16503 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16504 {
1de43f85 16505 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 16506 rtx src = machopic_function_base_sym ();
ee890fe2 16507
6d0a8091
DJ
16508 /* Save and restore LR locally around this call (in R0). */
16509 if (!info->lr_save_p)
6fb5fa3c 16510 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16511
6fb5fa3c 16512 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16513
6fb5fa3c
DB
16514 emit_move_insn (gen_rtx_REG (Pmode,
16515 RS6000_PIC_OFFSET_TABLE_REGNUM),
16516 lr);
6d0a8091
DJ
16517
16518 if (!info->lr_save_p)
6fb5fa3c 16519 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16520 }
fcce224d 16521#endif
9ebbca7d
GK
16522}
16523
9ebbca7d 16524/* Write function prologue. */
a4f6c312 16525
08c148a8 16526static void
f676971a 16527rs6000_output_function_prologue (FILE *file,
a2369ed3 16528 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16529{
16530 rs6000_stack_t *info = rs6000_stack_info ();
16531
4697a36c
MM
16532 if (TARGET_DEBUG_STACK)
16533 debug_stack_info (info);
9878760c 16534
a4f6c312
SS
16535 /* Write .extern for any function we will call to save and restore
16536 fp values. */
16537 if (info->first_fp_reg_save < 64
16538 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16539 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16540 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16541 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16542
c764f757
RK
16543 /* Write .extern for AIX common mode routines, if needed. */
16544 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16545 {
f6709c70
JW
16546 fputs ("\t.extern __mulh\n", file);
16547 fputs ("\t.extern __mull\n", file);
16548 fputs ("\t.extern __divss\n", file);
16549 fputs ("\t.extern __divus\n", file);
16550 fputs ("\t.extern __quoss\n", file);
16551 fputs ("\t.extern __quous\n", file);
c764f757
RK
16552 common_mode_defined = 1;
16553 }
9878760c 16554
9ebbca7d 16555 if (! HAVE_prologue)
979721f8 16556 {
9ebbca7d 16557 start_sequence ();
9dda4cc8 16558
a4f6c312
SS
16559 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16560 the "toplevel" insn chain. */
2e040219 16561 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16562 rs6000_emit_prologue ();
2e040219 16563 emit_note (NOTE_INSN_DELETED);
178c3eff 16564
a3c9585f 16565 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16566 {
16567 rtx insn;
16568 unsigned addr = 0;
16569 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16570 {
16571 INSN_ADDRESSES_NEW (insn, addr);
16572 addr += 4;
16573 }
16574 }
9dda4cc8 16575
9ebbca7d 16576 if (TARGET_DEBUG_STACK)
a4f6c312 16577 debug_rtx_list (get_insns (), 100);
c9d691e9 16578 final (get_insns (), file, FALSE);
9ebbca7d 16579 end_sequence ();
979721f8
MM
16580 }
16581
9ebbca7d
GK
16582 rs6000_pic_labelno++;
16583}
f676971a 16584
1c9c5e43
AM
16585/* Non-zero if vmx regs are restored before the frame pop, zero if
16586 we restore after the pop when possible. */
16587#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16588
f78c3290
NF
16589/* Reload CR from REG. */
16590
16591static void
16592rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16593{
16594 int count = 0;
16595 int i;
16596
16597 if (using_mfcr_multiple)
16598 {
16599 for (i = 0; i < 8; i++)
16600 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16601 count++;
16602 gcc_assert (count);
16603 }
16604
16605 if (using_mfcr_multiple && count > 1)
16606 {
16607 rtvec p;
16608 int ndx;
16609
16610 p = rtvec_alloc (count);
16611
16612 ndx = 0;
16613 for (i = 0; i < 8; i++)
16614 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16615 {
16616 rtvec r = rtvec_alloc (2);
16617 RTVEC_ELT (r, 0) = reg;
16618 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16619 RTVEC_ELT (p, ndx) =
16620 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16621 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16622 ndx++;
16623 }
16624 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16625 gcc_assert (ndx == count);
16626 }
16627 else
16628 for (i = 0; i < 8; i++)
16629 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16630 {
16631 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16632 CR0_REGNO+i),
16633 reg));
16634 }
16635}
16636
9ebbca7d 16637/* Emit function epilogue as insns.
9878760c 16638
9ebbca7d
GK
16639 At present, dwarf2out_frame_debug_expr doesn't understand
16640 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16641 anywhere in the epilogue. Most of the insns below would in any case
16642 need special notes to explain where r11 is in relation to the stack. */
9878760c 16643
9ebbca7d 16644void
a2369ed3 16645rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16646{
16647 rs6000_stack_t *info;
f78c3290 16648 int restoring_GPRs_inline;
9ebbca7d
GK
16649 int restoring_FPRs_inline;
16650 int using_load_multiple;
d296e02e 16651 int using_mtcr_multiple;
9ebbca7d 16652 int use_backchain_to_restore_sp;
f78c3290
NF
16653 int restore_lr;
16654 int strategy;
9ebbca7d
GK
16655 int sp_offset = 0;
16656 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16657 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16658 enum machine_mode reg_mode = Pmode;
327e5343 16659 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16660 int i;
16661
c19de7aa
AH
16662 info = rs6000_stack_info ();
16663
16664 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16665 {
16666 reg_mode = V2SImode;
16667 reg_size = 8;
16668 }
16669
f78c3290
NF
16670 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16671 /*static_chain_p=*/0, sibcall);
16672 using_load_multiple = strategy & SAVRES_MULTIPLE;
16673 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16674 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16675 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16676 || rs6000_cpu == PROCESSOR_PPC603
16677 || rs6000_cpu == PROCESSOR_PPC750
16678 || optimize_size);
1c9c5e43
AM
16679 /* Restore via the backchain when we have a large frame, since this
16680 is more efficient than an addis, addi pair. The second condition
16681 here will not trigger at the moment; We don't actually need a
16682 frame pointer for alloca, but the generic parts of the compiler
16683 give us one anyway. */
16684 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16685 || info->total_size
16686 + (info->lr_save_p ? info->lr_save_offset : 0)
16687 > 32767
1c9c5e43
AM
16688 || (cfun->calls_alloca
16689 && !frame_pointer_needed));
f78c3290
NF
16690 restore_lr = (info->lr_save_p
16691 && restoring_GPRs_inline
16692 && restoring_FPRs_inline);
9ebbca7d 16693
f57fe068 16694 if (WORLD_SAVE_P (info))
d62294f5
FJ
16695 {
16696 int i, j;
16697 char rname[30];
16698 const char *alloc_rname;
16699 rtvec p;
16700
16701 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16702 stack slot (which is not likely to be our caller.)
16703 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16704 rest_world is similar, except any R10 parameter is ignored.
16705 The exception-handling stuff that was here in 2.95 is no
16706 longer necessary. */
d62294f5
FJ
16707
16708 p = rtvec_alloc (9
16709 + 1
f676971a 16710 + 32 - info->first_gp_reg_save
c4ad648e
AM
16711 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16712 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16713
e3b5732b 16714 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16715 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16716 alloc_rname = ggc_strdup (rname);
16717
16718 j = 0;
16719 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16720 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16721 gen_rtx_REG (Pmode,
1de43f85 16722 LR_REGNO));
d62294f5 16723 RTVEC_ELT (p, j++)
c4ad648e 16724 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16725 /* The instruction pattern requires a clobber here;
c4ad648e 16726 it is shared with the restVEC helper. */
d62294f5 16727 RTVEC_ELT (p, j++)
c4ad648e 16728 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16729
16730 {
c4ad648e
AM
16731 /* CR register traditionally saved as CR2. */
16732 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16733 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16734 GEN_INT (info->cr_save_offset));
0be76840 16735 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16736
16737 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16738 }
16739
16740 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16741 {
16742 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16743 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16744 GEN_INT (info->gp_save_offset
16745 + reg_size * i));
0be76840 16746 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16747
16748 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16749 }
d62294f5 16750 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16751 {
16752 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16753 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16754 GEN_INT (info->altivec_save_offset
16755 + 16 * i));
0be76840 16756 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16757
16758 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16759 }
d62294f5 16760 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16761 {
16762 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16763 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16764 GEN_INT (info->fp_save_offset
16765 + 8 * i));
0be76840 16766 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16767
16768 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16769 }
d62294f5 16770 RTVEC_ELT (p, j++)
c4ad648e 16771 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16772 RTVEC_ELT (p, j++)
c4ad648e 16773 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16774 RTVEC_ELT (p, j++)
c4ad648e 16775 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16776 RTVEC_ELT (p, j++)
c4ad648e 16777 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16778 RTVEC_ELT (p, j++)
c4ad648e 16779 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16780 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16781
16782 return;
16783 }
16784
45b194f8
AM
16785 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16786 if (info->push_p)
2b2c2fe5 16787 sp_offset = info->total_size;
f676971a 16788
e6477eaa
AM
16789 /* Restore AltiVec registers if we must do so before adjusting the
16790 stack. */
16791 if (TARGET_ALTIVEC_ABI
16792 && info->altivec_size != 0
1c9c5e43
AM
16793 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16794 || (DEFAULT_ABI != ABI_V4
16795 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))))
9aa86737
AH
16796 {
16797 int i;
16798
e6477eaa
AM
16799 if (use_backchain_to_restore_sp)
16800 {
16801 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16802 emit_move_insn (frame_reg_rtx,
16803 gen_rtx_MEM (Pmode, sp_reg_rtx));
16804 sp_offset = 0;
16805 }
1c9c5e43
AM
16806 else if (frame_pointer_needed)
16807 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 16808
9aa86737
AH
16809 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16810 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16811 {
16812 rtx addr, areg, mem;
16813
16814 areg = gen_rtx_REG (Pmode, 0);
16815 emit_move_insn
16816 (areg, GEN_INT (info->altivec_save_offset
16817 + sp_offset
16818 + 16 * (i - info->first_altivec_reg_save)));
16819
16820 /* AltiVec addressing mode is [reg+reg]. */
16821 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16822 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16823
16824 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16825 }
16826 }
16827
e6477eaa
AM
16828 /* Restore VRSAVE if we must do so before adjusting the stack. */
16829 if (TARGET_ALTIVEC
16830 && TARGET_ALTIVEC_VRSAVE
16831 && info->vrsave_mask != 0
1c9c5e43
AM
16832 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16833 || (DEFAULT_ABI != ABI_V4
16834 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))))
e6477eaa
AM
16835 {
16836 rtx addr, mem, reg;
16837
1c9c5e43 16838 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 16839 {
1c9c5e43
AM
16840 if (use_backchain_to_restore_sp)
16841 {
16842 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16843 emit_move_insn (frame_reg_rtx,
16844 gen_rtx_MEM (Pmode, sp_reg_rtx));
16845 sp_offset = 0;
16846 }
16847 else if (frame_pointer_needed)
16848 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
16849 }
16850
16851 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16852 GEN_INT (info->vrsave_save_offset + sp_offset));
16853 mem = gen_frame_mem (SImode, addr);
16854 reg = gen_rtx_REG (SImode, 12);
16855 emit_move_insn (reg, mem);
16856
16857 emit_insn (generate_set_vrsave (reg, info, 1));
16858 }
16859
1c9c5e43
AM
16860 /* If we have a large stack frame, restore the old stack pointer
16861 using the backchain. */
2b2c2fe5
EC
16862 if (use_backchain_to_restore_sp)
16863 {
1c9c5e43 16864 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
16865 {
16866 /* Under V.4, don't reset the stack pointer until after we're done
16867 loading the saved registers. */
16868 if (DEFAULT_ABI == ABI_V4)
16869 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16870
16871 emit_move_insn (frame_reg_rtx,
16872 gen_rtx_MEM (Pmode, sp_reg_rtx));
16873 sp_offset = 0;
16874 }
1c9c5e43
AM
16875 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16876 && DEFAULT_ABI == ABI_V4)
16877 /* frame_reg_rtx has been set up by the altivec restore. */
16878 ;
16879 else
16880 {
16881 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
16882 frame_reg_rtx = sp_reg_rtx;
16883 }
16884 }
16885 /* If we have a frame pointer, we can restore the old stack pointer
16886 from it. */
16887 else if (frame_pointer_needed)
16888 {
16889 frame_reg_rtx = sp_reg_rtx;
16890 if (DEFAULT_ABI == ABI_V4)
16891 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16892
16893 emit_insn (TARGET_32BIT
16894 ? gen_addsi3 (frame_reg_rtx, hard_frame_pointer_rtx,
16895 GEN_INT (info->total_size))
16896 : gen_adddi3 (frame_reg_rtx, hard_frame_pointer_rtx,
16897 GEN_INT (info->total_size)));
16898 sp_offset = 0;
2b2c2fe5 16899 }
45b194f8
AM
16900 else if (info->push_p
16901 && DEFAULT_ABI != ABI_V4
e3b5732b 16902 && !crtl->calls_eh_return)
2b2c2fe5 16903 {
45b194f8
AM
16904 emit_insn (TARGET_32BIT
16905 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16906 GEN_INT (info->total_size))
16907 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16908 GEN_INT (info->total_size)));
16909 sp_offset = 0;
2b2c2fe5
EC
16910 }
16911
e6477eaa 16912 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
16913 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16914 && TARGET_ALTIVEC_ABI
e6477eaa
AM
16915 && info->altivec_size != 0
16916 && (DEFAULT_ABI == ABI_V4
16917 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
16918 {
16919 int i;
16920
16921 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16922 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16923 {
16924 rtx addr, areg, mem;
16925
16926 areg = gen_rtx_REG (Pmode, 0);
16927 emit_move_insn
16928 (areg, GEN_INT (info->altivec_save_offset
16929 + sp_offset
16930 + 16 * (i - info->first_altivec_reg_save)));
16931
16932 /* AltiVec addressing mode is [reg+reg]. */
16933 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
16934 mem = gen_frame_mem (V4SImode, addr);
16935
16936 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16937 }
16938 }
16939
16940 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
16941 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16942 && TARGET_ALTIVEC
e6477eaa
AM
16943 && TARGET_ALTIVEC_VRSAVE
16944 && info->vrsave_mask != 0
16945 && (DEFAULT_ABI == ABI_V4
16946 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
554c2941
AM
16947 {
16948 rtx addr, mem, reg;
16949
16950 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16951 GEN_INT (info->vrsave_save_offset + sp_offset));
16952 mem = gen_frame_mem (SImode, addr);
16953 reg = gen_rtx_REG (SImode, 12);
16954 emit_move_insn (reg, mem);
16955
16956 emit_insn (generate_set_vrsave (reg, info, 1));
16957 }
16958
f78c3290
NF
16959 /* Get the old lr if we saved it. If we are restoring registers
16960 out-of-line, then the out-of-line routines can do this for us. */
16961 if (restore_lr)
b6c9286a 16962 {
a3170dc6
AH
16963 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
16964 info->lr_save_offset + sp_offset);
ba4828e0 16965
9ebbca7d 16966 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 16967 }
f676971a 16968
9ebbca7d
GK
16969 /* Get the old cr if we saved it. */
16970 if (info->cr_save_p)
16971 {
16972 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16973 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16974 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 16975
9ebbca7d
GK
16976 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
16977 }
f676971a 16978
9ebbca7d 16979 /* Set LR here to try to overlap restores below. */
f78c3290 16980 if (restore_lr)
1de43f85 16981 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 16982 gen_rtx_REG (Pmode, 0));
f676971a 16983
83720594 16984 /* Load exception handler data registers, if needed. */
e3b5732b 16985 if (crtl->calls_eh_return)
83720594 16986 {
78e1b90d
DE
16987 unsigned int i, regno;
16988
fc4767bb
JJ
16989 if (TARGET_AIX)
16990 {
16991 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16992 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16993 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16994
16995 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
16996 }
16997
83720594
RH
16998 for (i = 0; ; ++i)
16999 {
a3170dc6 17000 rtx mem;
83720594
RH
17001
17002 regno = EH_RETURN_DATA_REGNO (i);
17003 if (regno == INVALID_REGNUM)
17004 break;
17005
a3170dc6
AH
17006 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17007 info->ehrd_offset + sp_offset
17008 + reg_size * (int) i);
83720594
RH
17009
17010 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17011 }
17012 }
f676971a 17013
9ebbca7d
GK
17014 /* Restore GPRs. This is done as a PARALLEL if we are using
17015 the load-multiple instructions. */
f78c3290
NF
17016 if (TARGET_SPE_ABI
17017 && info->spe_64bit_regs_used != 0
17018 && info->first_gp_reg_save != 32)
52ff33d0 17019 {
52ff33d0
NF
17020 /* Determine whether we can address all of the registers that need
17021 to be saved with an offset from the stack pointer that fits in
17022 the small const field for SPE memory instructions. */
17023 int spe_regs_addressable_via_sp
f78c3290
NF
17024 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17025 + (32 - info->first_gp_reg_save - 1) * reg_size)
17026 && restoring_GPRs_inline);
52ff33d0
NF
17027 int spe_offset;
17028
17029 if (spe_regs_addressable_via_sp)
45b194f8 17030 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17031 else
17032 {
45b194f8 17033 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17034 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17035 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17036 There's no need to worry here because the static chain is passed
17037 anew to every function. */
f78c3290
NF
17038 int ool_adjust = (restoring_GPRs_inline
17039 ? 0
17040 : (info->first_gp_reg_save
17041 - (FIRST_SAVRES_REGISTER+1))*8);
17042
45b194f8
AM
17043 if (frame_reg_rtx == sp_reg_rtx)
17044 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17045 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17046 GEN_INT (info->spe_gp_save_offset
17047 + sp_offset
17048 - ool_adjust)));
45b194f8
AM
17049 /* Keep the invariant that frame_reg_rtx + sp_offset points
17050 at the top of the stack frame. */
17051 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17052
17053 spe_offset = 0;
17054 }
17055
f78c3290
NF
17056 if (restoring_GPRs_inline)
17057 {
17058 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17059 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17060 {
17061 rtx offset, addr, mem;
52ff33d0 17062
f78c3290
NF
17063 /* We're doing all this to ensure that the immediate offset
17064 fits into the immediate field of 'evldd'. */
17065 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17066
f78c3290
NF
17067 offset = GEN_INT (spe_offset + reg_size * i);
17068 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17069 mem = gen_rtx_MEM (V2SImode, addr);
52ff33d0 17070
f78c3290
NF
17071 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17072 mem);
17073 }
17074 }
17075 else
17076 {
17077 rtx par;
17078
17079 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17080 0, reg_mode,
17081 /*savep=*/false, /*gpr=*/true,
17082 /*exitp=*/true);
17083 emit_jump_insn (par);
17084
17085 /* We don't want anybody else emitting things after we jumped
17086 back. */
17087 return;
17088 }
52ff33d0 17089 }
f78c3290
NF
17090 else if (!restoring_GPRs_inline)
17091 {
17092 /* We are jumping to an out-of-line function. */
17093 bool can_use_exit = info->first_fp_reg_save == 64;
17094 rtx par;
17095
17096 /* Emit stack reset code if we need it. */
17097 if (can_use_exit)
17098 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17099 sp_offset, can_use_exit);
17100 else
17101 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17102 sp_reg_rtx,
17103 GEN_INT (sp_offset - info->fp_size)));
17104
17105 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17106 info->gp_save_offset, reg_mode,
17107 /*savep=*/false, /*gpr=*/true,
17108 /*exitp=*/can_use_exit);
17109
17110 if (can_use_exit)
17111 {
17112 if (info->cr_save_p)
17113 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17114 using_mtcr_multiple);
17115
17116 emit_jump_insn (par);
17117
17118 /* We don't want anybody else emitting things after we jumped
17119 back. */
17120 return;
17121 }
17122 else
17123 emit_insn (par);
17124 }
17125 else if (using_load_multiple)
17126 {
17127 rtvec p;
17128 p = rtvec_alloc (32 - info->first_gp_reg_save);
17129 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17130 {
f676971a
EC
17131 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17132 GEN_INT (info->gp_save_offset
17133 + sp_offset
9ebbca7d 17134 + reg_size * i));
0be76840 17135 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 17136
f78c3290
NF
17137 RTVEC_ELT (p, i) =
17138 gen_rtx_SET (VOIDmode,
17139 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17140 mem);
9ebbca7d 17141 }
f78c3290
NF
17142 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17143 }
17144 else
17145 {
17146 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17147 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17148 {
17149 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17150 GEN_INT (info->gp_save_offset
17151 + sp_offset
17152 + reg_size * i));
17153 rtx mem = gen_frame_mem (reg_mode, addr);
17154
17155 emit_move_insn (gen_rtx_REG (reg_mode,
17156 info->first_gp_reg_save + i), mem);
17157 }
17158 }
9878760c 17159
9ebbca7d
GK
17160 /* Restore fpr's if we need to do it without calling a function. */
17161 if (restoring_FPRs_inline)
17162 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17163 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17164 && ! call_used_regs[info->first_fp_reg_save+i]))
17165 {
17166 rtx addr, mem;
17167 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17168 GEN_INT (info->fp_save_offset
17169 + sp_offset
a4f6c312 17170 + 8 * i));
0be76840 17171 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17172
f676971a 17173 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
17174 info->first_fp_reg_save + i),
17175 mem);
17176 }
8d30c4ee 17177
9ebbca7d
GK
17178 /* If we saved cr, restore it here. Just those that were used. */
17179 if (info->cr_save_p)
f78c3290 17180 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
979721f8 17181
9ebbca7d 17182 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17183 have been done. */
f78c3290
NF
17184 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17185 sp_offset, !restoring_FPRs_inline);
b6c9286a 17186
e3b5732b 17187 if (crtl->calls_eh_return)
83720594
RH
17188 {
17189 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 17190 emit_insn (TARGET_32BIT
83720594
RH
17191 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
17192 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
17193 }
17194
9ebbca7d
GK
17195 if (!sibcall)
17196 {
17197 rtvec p;
17198 if (! restoring_FPRs_inline)
f78c3290 17199 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17200 else
17201 p = rtvec_alloc (2);
b6c9286a 17202
e35b9579 17203 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17204 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17205 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17206 : gen_rtx_CLOBBER (VOIDmode,
17207 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17208
17209 /* If we have to restore more than two FP registers, branch to the
17210 restore function. It will return to our caller. */
17211 if (! restoring_FPRs_inline)
17212 {
17213 int i;
f78c3290
NF
17214 rtx sym;
17215
17216 sym = rs6000_savres_routine_sym (info,
17217 /*savep=*/false,
17218 /*gpr=*/false,
17219 /*exitp=*/true);
17220 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17221 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17222 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17223 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17224 {
17225 rtx addr, mem;
17226 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17227 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17228 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17229
f78c3290 17230 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17231 gen_rtx_SET (VOIDmode,
17232 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17233 mem);
b6c9286a
MM
17234 }
17235 }
f676971a 17236
9ebbca7d 17237 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17238 }
9878760c
RK
17239}
17240
17241/* Write function epilogue. */
17242
08c148a8 17243static void
f676971a 17244rs6000_output_function_epilogue (FILE *file,
a2369ed3 17245 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17246{
9ebbca7d 17247 if (! HAVE_epilogue)
9878760c 17248 {
9ebbca7d
GK
17249 rtx insn = get_last_insn ();
17250 /* If the last insn was a BARRIER, we don't have to write anything except
17251 the trace table. */
17252 if (GET_CODE (insn) == NOTE)
17253 insn = prev_nonnote_insn (insn);
17254 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17255 {
9ebbca7d
GK
17256 /* This is slightly ugly, but at least we don't have two
17257 copies of the epilogue-emitting code. */
17258 start_sequence ();
17259
17260 /* A NOTE_INSN_DELETED is supposed to be at the start
17261 and end of the "toplevel" insn chain. */
2e040219 17262 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17263 rs6000_emit_epilogue (FALSE);
2e040219 17264 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17265
a3c9585f 17266 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17267 {
17268 rtx insn;
17269 unsigned addr = 0;
17270 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17271 {
17272 INSN_ADDRESSES_NEW (insn, addr);
17273 addr += 4;
17274 }
17275 }
17276
9ebbca7d 17277 if (TARGET_DEBUG_STACK)
a4f6c312 17278 debug_rtx_list (get_insns (), 100);
c9d691e9 17279 final (get_insns (), file, FALSE);
9ebbca7d 17280 end_sequence ();
4697a36c 17281 }
9878760c 17282 }
b4ac57ab 17283
efdba735
SH
17284#if TARGET_MACHO
17285 macho_branch_islands ();
0e5da0be
GK
17286 /* Mach-O doesn't support labels at the end of objects, so if
17287 it looks like we might want one, insert a NOP. */
17288 {
17289 rtx insn = get_last_insn ();
17290 while (insn
17291 && NOTE_P (insn)
a38e7aa5 17292 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17293 insn = PREV_INSN (insn);
f676971a
EC
17294 if (insn
17295 && (LABEL_P (insn)
0e5da0be 17296 || (NOTE_P (insn)
a38e7aa5 17297 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17298 fputs ("\tnop\n", file);
17299 }
17300#endif
17301
9b30bae2 17302 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17303 on its format.
17304
17305 We don't output a traceback table if -finhibit-size-directive was
17306 used. The documentation for -finhibit-size-directive reads
17307 ``don't output a @code{.size} assembler directive, or anything
17308 else that would cause trouble if the function is split in the
17309 middle, and the two halves are placed at locations far apart in
17310 memory.'' The traceback table has this property, since it
17311 includes the offset from the start of the function to the
4d30c363
MM
17312 traceback table itself.
17313
17314 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17315 different traceback table. */
57ac7be9 17316 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
e3b5732b 17317 && rs6000_traceback != traceback_none && !crtl->is_thunk)
9b30bae2 17318 {
69c75916 17319 const char *fname = NULL;
3ac88239 17320 const char *language_string = lang_hooks.name;
6041bf2f 17321 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17322 int i;
57ac7be9 17323 int optional_tbtab;
8097c268 17324 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17325
17326 if (rs6000_traceback == traceback_full)
17327 optional_tbtab = 1;
17328 else if (rs6000_traceback == traceback_part)
17329 optional_tbtab = 0;
17330 else
17331 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17332
69c75916
AM
17333 if (optional_tbtab)
17334 {
17335 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17336 while (*fname == '.') /* V.4 encodes . in the name */
17337 fname++;
17338
17339 /* Need label immediately before tbtab, so we can compute
17340 its offset from the function start. */
17341 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17342 ASM_OUTPUT_LABEL (file, fname);
17343 }
314fc5a9
ILT
17344
17345 /* The .tbtab pseudo-op can only be used for the first eight
17346 expressions, since it can't handle the possibly variable
17347 length fields that follow. However, if you omit the optional
17348 fields, the assembler outputs zeros for all optional fields
17349 anyways, giving each variable length field is minimum length
17350 (as defined in sys/debug.h). Thus we can not use the .tbtab
17351 pseudo-op at all. */
17352
17353 /* An all-zero word flags the start of the tbtab, for debuggers
17354 that have to find it by searching forward from the entry
17355 point or from the current pc. */
19d2d16f 17356 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17357
17358 /* Tbtab format type. Use format type 0. */
19d2d16f 17359 fputs ("\t.byte 0,", file);
314fc5a9 17360
5fc921c1
DE
17361 /* Language type. Unfortunately, there does not seem to be any
17362 official way to discover the language being compiled, so we
17363 use language_string.
17364 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17365 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17366 a number, so for now use 9. */
5fc921c1 17367 if (! strcmp (language_string, "GNU C"))
314fc5a9 17368 i = 0;
6de9cd9a 17369 else if (! strcmp (language_string, "GNU F77")
7f62878c 17370 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17371 i = 1;
8b83775b 17372 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17373 i = 2;
5fc921c1
DE
17374 else if (! strcmp (language_string, "GNU Ada"))
17375 i = 3;
56438901
AM
17376 else if (! strcmp (language_string, "GNU C++")
17377 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17378 i = 9;
9517ead8
AG
17379 else if (! strcmp (language_string, "GNU Java"))
17380 i = 13;
5fc921c1
DE
17381 else if (! strcmp (language_string, "GNU Objective-C"))
17382 i = 14;
314fc5a9 17383 else
37409796 17384 gcc_unreachable ();
314fc5a9
ILT
17385 fprintf (file, "%d,", i);
17386
17387 /* 8 single bit fields: global linkage (not set for C extern linkage,
17388 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17389 from start of procedure stored in tbtab, internal function, function
17390 has controlled storage, function has no toc, function uses fp,
17391 function logs/aborts fp operations. */
17392 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17393 fprintf (file, "%d,",
17394 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17395
17396 /* 6 bitfields: function is interrupt handler, name present in
17397 proc table, function calls alloca, on condition directives
17398 (controls stack walks, 3 bits), saves condition reg, saves
17399 link reg. */
17400 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17401 set up as a frame pointer, even when there is no alloca call. */
17402 fprintf (file, "%d,",
6041bf2f
DE
17403 ((optional_tbtab << 6)
17404 | ((optional_tbtab & frame_pointer_needed) << 5)
17405 | (info->cr_save_p << 1)
17406 | (info->lr_save_p)));
314fc5a9 17407
6041bf2f 17408 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17409 (6 bits). */
17410 fprintf (file, "%d,",
4697a36c 17411 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17412
17413 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17414 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17415
6041bf2f
DE
17416 if (optional_tbtab)
17417 {
17418 /* Compute the parameter info from the function decl argument
17419 list. */
17420 tree decl;
17421 int next_parm_info_bit = 31;
314fc5a9 17422
6041bf2f
DE
17423 for (decl = DECL_ARGUMENTS (current_function_decl);
17424 decl; decl = TREE_CHAIN (decl))
17425 {
17426 rtx parameter = DECL_INCOMING_RTL (decl);
17427 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17428
6041bf2f
DE
17429 if (GET_CODE (parameter) == REG)
17430 {
ebb109ad 17431 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17432 {
17433 int bits;
17434
17435 float_parms++;
17436
37409796
NS
17437 switch (mode)
17438 {
17439 case SFmode:
e41b2a33 17440 case SDmode:
37409796
NS
17441 bits = 0x2;
17442 break;
17443
17444 case DFmode:
7393f7f8 17445 case DDmode:
37409796 17446 case TFmode:
7393f7f8 17447 case TDmode:
37409796
NS
17448 bits = 0x3;
17449 break;
17450
17451 default:
17452 gcc_unreachable ();
17453 }
6041bf2f
DE
17454
17455 /* If only one bit will fit, don't or in this entry. */
17456 if (next_parm_info_bit > 0)
17457 parm_info |= (bits << (next_parm_info_bit - 1));
17458 next_parm_info_bit -= 2;
17459 }
17460 else
17461 {
17462 fixed_parms += ((GET_MODE_SIZE (mode)
17463 + (UNITS_PER_WORD - 1))
17464 / UNITS_PER_WORD);
17465 next_parm_info_bit -= 1;
17466 }
17467 }
17468 }
17469 }
314fc5a9
ILT
17470
17471 /* Number of fixed point parameters. */
17472 /* This is actually the number of words of fixed point parameters; thus
17473 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17474 fprintf (file, "%d,", fixed_parms);
17475
17476 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17477 all on stack. */
17478 /* This is actually the number of fp registers that hold parameters;
17479 and thus the maximum value is 13. */
17480 /* Set parameters on stack bit if parameters are not in their original
17481 registers, regardless of whether they are on the stack? Xlc
17482 seems to set the bit when not optimizing. */
17483 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17484
6041bf2f
DE
17485 if (! optional_tbtab)
17486 return;
17487
314fc5a9
ILT
17488 /* Optional fields follow. Some are variable length. */
17489
17490 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17491 11 double float. */
17492 /* There is an entry for each parameter in a register, in the order that
17493 they occur in the parameter list. Any intervening arguments on the
17494 stack are ignored. If the list overflows a long (max possible length
17495 34 bits) then completely leave off all elements that don't fit. */
17496 /* Only emit this long if there was at least one parameter. */
17497 if (fixed_parms || float_parms)
17498 fprintf (file, "\t.long %d\n", parm_info);
17499
17500 /* Offset from start of code to tb table. */
19d2d16f 17501 fputs ("\t.long ", file);
314fc5a9 17502 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17503 if (TARGET_AIX)
17504 RS6000_OUTPUT_BASENAME (file, fname);
17505 else
17506 assemble_name (file, fname);
17507 putc ('-', file);
17508 rs6000_output_function_entry (file, fname);
19d2d16f 17509 putc ('\n', file);
314fc5a9
ILT
17510
17511 /* Interrupt handler mask. */
17512 /* Omit this long, since we never set the interrupt handler bit
17513 above. */
17514
17515 /* Number of CTL (controlled storage) anchors. */
17516 /* Omit this long, since the has_ctl bit is never set above. */
17517
17518 /* Displacement into stack of each CTL anchor. */
17519 /* Omit this list of longs, because there are no CTL anchors. */
17520
17521 /* Length of function name. */
69c75916
AM
17522 if (*fname == '*')
17523 ++fname;
296b8152 17524 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17525
17526 /* Function name. */
17527 assemble_string (fname, strlen (fname));
17528
17529 /* Register for alloca automatic storage; this is always reg 31.
17530 Only emit this if the alloca bit was set above. */
17531 if (frame_pointer_needed)
19d2d16f 17532 fputs ("\t.byte 31\n", file);
b1765bde
DE
17533
17534 fputs ("\t.align 2\n", file);
9b30bae2 17535 }
9878760c 17536}
17167fd8 17537\f
a4f6c312
SS
17538/* A C compound statement that outputs the assembler code for a thunk
17539 function, used to implement C++ virtual function calls with
17540 multiple inheritance. The thunk acts as a wrapper around a virtual
17541 function, adjusting the implicit object parameter before handing
17542 control off to the real function.
17543
17544 First, emit code to add the integer DELTA to the location that
17545 contains the incoming first argument. Assume that this argument
17546 contains a pointer, and is the one used to pass the `this' pointer
17547 in C++. This is the incoming argument *before* the function
17548 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17549 values of all other incoming arguments.
17167fd8
MM
17550
17551 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17552 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17553 not touch the return address. Hence returning from FUNCTION will
17554 return to whoever called the current `thunk'.
17167fd8 17555
a4f6c312
SS
17556 The effect must be as if FUNCTION had been called directly with the
17557 adjusted first argument. This macro is responsible for emitting
17558 all of the code for a thunk function; output_function_prologue()
17559 and output_function_epilogue() are not invoked.
17167fd8 17560
a4f6c312
SS
17561 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17562 been extracted from it.) It might possibly be useful on some
17563 targets, but probably not.
17167fd8 17564
a4f6c312
SS
17565 If you do not define this macro, the target-independent code in the
17566 C++ frontend will generate a less efficient heavyweight thunk that
17567 calls FUNCTION instead of jumping to it. The generic approach does
17568 not support varargs. */
17167fd8 17569
3961e8fe 17570static void
f676971a
EC
17571rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17572 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17573 tree function)
17167fd8 17574{
5b71a4e7 17575 rtx this, insn, funexp;
17167fd8 17576
5b71a4e7 17577 reload_completed = 1;
fe3ad572 17578 epilogue_completed = 1;
56a7189a 17579
5b71a4e7 17580 /* Mark the end of the (empty) prologue. */
2e040219 17581 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17582
5b71a4e7
DE
17583 /* Find the "this" pointer. If the function returns a structure,
17584 the structure return pointer is in r3. */
61f71b34 17585 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 17586 this = gen_rtx_REG (Pmode, 4);
56a7189a 17587 else
5b71a4e7 17588 this = gen_rtx_REG (Pmode, 3);
17167fd8 17589
5b71a4e7
DE
17590 /* Apply the constant offset, if required. */
17591 if (delta)
17592 {
17593 rtx delta_rtx = GEN_INT (delta);
17594 emit_insn (TARGET_32BIT
17595 ? gen_addsi3 (this, this, delta_rtx)
17596 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
17597 }
17598
5b71a4e7
DE
17599 /* Apply the offset from the vtable, if required. */
17600 if (vcall_offset)
17167fd8 17601 {
5b71a4e7
DE
17602 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17603 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17604
5b71a4e7 17605 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
17606 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17607 {
17608 emit_insn (TARGET_32BIT
17609 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17610 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17611 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17612 }
17613 else
17614 {
17615 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17616
17617 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17618 }
5b71a4e7
DE
17619 emit_insn (TARGET_32BIT
17620 ? gen_addsi3 (this, this, tmp)
17621 : gen_adddi3 (this, this, tmp));
17167fd8
MM
17622 }
17623
5b71a4e7
DE
17624 /* Generate a tail call to the target function. */
17625 if (!TREE_USED (function))
17626 {
17627 assemble_external (function);
17628 TREE_USED (function) = 1;
17629 }
17630 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17631 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17632
17633#if TARGET_MACHO
ab82a49f 17634 if (MACHOPIC_INDIRECT)
5b71a4e7 17635 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17636#endif
5b71a4e7
DE
17637
17638 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17639 generate sibcall RTL explicitly. */
5b71a4e7
DE
17640 insn = emit_call_insn (
17641 gen_rtx_PARALLEL (VOIDmode,
17642 gen_rtvec (4,
17643 gen_rtx_CALL (VOIDmode,
17644 funexp, const0_rtx),
17645 gen_rtx_USE (VOIDmode, const0_rtx),
17646 gen_rtx_USE (VOIDmode,
17647 gen_rtx_REG (SImode,
1de43f85 17648 LR_REGNO)),
5b71a4e7
DE
17649 gen_rtx_RETURN (VOIDmode))));
17650 SIBLING_CALL_P (insn) = 1;
17651 emit_barrier ();
17652
17653 /* Run just enough of rest_of_compilation to get the insns emitted.
17654 There's not really enough bulk here to make other passes such as
17655 instruction scheduling worth while. Note that use_thunk calls
17656 assemble_start_function and assemble_end_function. */
17657 insn = get_insns ();
55e092c4 17658 insn_locators_alloc ();
5b71a4e7
DE
17659 shorten_branches (insn);
17660 final_start_function (insn, file, 1);
c9d691e9 17661 final (insn, file, 1);
5b71a4e7 17662 final_end_function ();
d7087dd2 17663 free_after_compilation (cfun);
5b71a4e7
DE
17664
17665 reload_completed = 0;
fe3ad572 17666 epilogue_completed = 0;
9ebbca7d 17667}
9ebbca7d
GK
17668\f
17669/* A quick summary of the various types of 'constant-pool tables'
17670 under PowerPC:
17671
f676971a 17672 Target Flags Name One table per
9ebbca7d
GK
17673 AIX (none) AIX TOC object file
17674 AIX -mfull-toc AIX TOC object file
17675 AIX -mminimal-toc AIX minimal TOC translation unit
17676 SVR4/EABI (none) SVR4 SDATA object file
17677 SVR4/EABI -fpic SVR4 pic object file
17678 SVR4/EABI -fPIC SVR4 PIC translation unit
17679 SVR4/EABI -mrelocatable EABI TOC function
17680 SVR4/EABI -maix AIX TOC object file
f676971a 17681 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17682 AIX minimal TOC translation unit
17683
17684 Name Reg. Set by entries contains:
17685 made by addrs? fp? sum?
17686
17687 AIX TOC 2 crt0 as Y option option
17688 AIX minimal TOC 30 prolog gcc Y Y option
17689 SVR4 SDATA 13 crt0 gcc N Y N
17690 SVR4 pic 30 prolog ld Y not yet N
17691 SVR4 PIC 30 prolog gcc Y option option
17692 EABI TOC 30 prolog gcc Y option option
17693
17694*/
17695
9ebbca7d
GK
17696/* Hash functions for the hash table. */
17697
17698static unsigned
a2369ed3 17699rs6000_hash_constant (rtx k)
9ebbca7d 17700{
46b33600
RH
17701 enum rtx_code code = GET_CODE (k);
17702 enum machine_mode mode = GET_MODE (k);
17703 unsigned result = (code << 3) ^ mode;
17704 const char *format;
17705 int flen, fidx;
f676971a 17706
46b33600
RH
17707 format = GET_RTX_FORMAT (code);
17708 flen = strlen (format);
17709 fidx = 0;
9ebbca7d 17710
46b33600
RH
17711 switch (code)
17712 {
17713 case LABEL_REF:
17714 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17715
17716 case CONST_DOUBLE:
17717 if (mode != VOIDmode)
17718 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17719 flen = 2;
17720 break;
17721
17722 case CODE_LABEL:
17723 fidx = 3;
17724 break;
17725
17726 default:
17727 break;
17728 }
9ebbca7d
GK
17729
17730 for (; fidx < flen; fidx++)
17731 switch (format[fidx])
17732 {
17733 case 's':
17734 {
17735 unsigned i, len;
17736 const char *str = XSTR (k, fidx);
17737 len = strlen (str);
17738 result = result * 613 + len;
17739 for (i = 0; i < len; i++)
17740 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17741 break;
17742 }
9ebbca7d
GK
17743 case 'u':
17744 case 'e':
17745 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17746 break;
17747 case 'i':
17748 case 'n':
17749 result = result * 613 + (unsigned) XINT (k, fidx);
17750 break;
17751 case 'w':
17752 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17753 result = result * 613 + (unsigned) XWINT (k, fidx);
17754 else
17755 {
17756 size_t i;
9390387d 17757 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17758 result = result * 613 + (unsigned) (XWINT (k, fidx)
17759 >> CHAR_BIT * i);
17760 }
17761 break;
09501938
DE
17762 case '0':
17763 break;
9ebbca7d 17764 default:
37409796 17765 gcc_unreachable ();
9ebbca7d 17766 }
46b33600 17767
9ebbca7d
GK
17768 return result;
17769}
17770
17771static unsigned
a2369ed3 17772toc_hash_function (const void *hash_entry)
9ebbca7d 17773{
f676971a 17774 const struct toc_hash_struct *thc =
a9098fd0
GK
17775 (const struct toc_hash_struct *) hash_entry;
17776 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17777}
17778
17779/* Compare H1 and H2 for equivalence. */
17780
17781static int
a2369ed3 17782toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17783{
17784 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17785 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17786
a9098fd0
GK
17787 if (((const struct toc_hash_struct *) h1)->key_mode
17788 != ((const struct toc_hash_struct *) h2)->key_mode)
17789 return 0;
17790
5692c7bc 17791 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17792}
17793
28e510bd
MM
17794/* These are the names given by the C++ front-end to vtables, and
17795 vtable-like objects. Ideally, this logic should not be here;
17796 instead, there should be some programmatic way of inquiring as
17797 to whether or not an object is a vtable. */
17798
17799#define VTABLE_NAME_P(NAME) \
9390387d 17800 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17801 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17802 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17803 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17804 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17805
17806void
a2369ed3 17807rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17808{
17809 /* Currently C++ toc references to vtables can be emitted before it
17810 is decided whether the vtable is public or private. If this is
17811 the case, then the linker will eventually complain that there is
f676971a 17812 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17813 we emit the TOC reference to reference the symbol and not the
17814 section. */
17815 const char *name = XSTR (x, 0);
54ee9799 17816
f676971a 17817 if (VTABLE_NAME_P (name))
54ee9799
DE
17818 {
17819 RS6000_OUTPUT_BASENAME (file, name);
17820 }
17821 else
17822 assemble_name (file, name);
28e510bd
MM
17823}
17824
a4f6c312
SS
17825/* Output a TOC entry. We derive the entry name from what is being
17826 written. */
9878760c
RK
17827
17828void
a2369ed3 17829output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17830{
17831 char buf[256];
3cce094d 17832 const char *name = buf;
ec940faa 17833 const char *real_name;
9878760c 17834 rtx base = x;
16fdeb48 17835 HOST_WIDE_INT offset = 0;
9878760c 17836
37409796 17837 gcc_assert (!TARGET_NO_TOC);
4697a36c 17838
9ebbca7d
GK
17839 /* When the linker won't eliminate them, don't output duplicate
17840 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17841 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17842 CODE_LABELs. */
17843 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17844 {
17845 struct toc_hash_struct *h;
17846 void * * found;
f676971a 17847
17211ab5 17848 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17849 time because GGC is not initialized at that point. */
17211ab5 17850 if (toc_hash_table == NULL)
f676971a 17851 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17852 toc_hash_eq, NULL);
17853
5ead67f6 17854 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 17855 h->key = x;
a9098fd0 17856 h->key_mode = mode;
9ebbca7d 17857 h->labelno = labelno;
f676971a 17858
9ebbca7d
GK
17859 found = htab_find_slot (toc_hash_table, h, 1);
17860 if (*found == NULL)
17861 *found = h;
f676971a 17862 else /* This is indeed a duplicate.
9ebbca7d
GK
17863 Set this label equal to that label. */
17864 {
17865 fputs ("\t.set ", file);
17866 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17867 fprintf (file, "%d,", labelno);
17868 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17869 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17870 found)->labelno));
17871 return;
17872 }
17873 }
17874
17875 /* If we're going to put a double constant in the TOC, make sure it's
17876 aligned properly when strict alignment is on. */
ff1720ed
RK
17877 if (GET_CODE (x) == CONST_DOUBLE
17878 && STRICT_ALIGNMENT
a9098fd0 17879 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17880 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17881 ASM_OUTPUT_ALIGN (file, 3);
17882 }
17883
4977bab6 17884 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17885
37c37a57
RK
17886 /* Handle FP constants specially. Note that if we have a minimal
17887 TOC, things we put here aren't actually in the TOC, so we can allow
17888 FP constants. */
00b79d54
BE
17889 if (GET_CODE (x) == CONST_DOUBLE &&
17890 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17891 {
17892 REAL_VALUE_TYPE rv;
17893 long k[4];
17894
17895 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17896 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17897 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17898 else
17899 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17900
17901 if (TARGET_64BIT)
17902 {
17903 if (TARGET_MINIMAL_TOC)
17904 fputs (DOUBLE_INT_ASM_OP, file);
17905 else
17906 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17907 k[0] & 0xffffffff, k[1] & 0xffffffff,
17908 k[2] & 0xffffffff, k[3] & 0xffffffff);
17909 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17910 k[0] & 0xffffffff, k[1] & 0xffffffff,
17911 k[2] & 0xffffffff, k[3] & 0xffffffff);
17912 return;
17913 }
17914 else
17915 {
17916 if (TARGET_MINIMAL_TOC)
17917 fputs ("\t.long ", file);
17918 else
17919 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17920 k[0] & 0xffffffff, k[1] & 0xffffffff,
17921 k[2] & 0xffffffff, k[3] & 0xffffffff);
17922 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
17923 k[0] & 0xffffffff, k[1] & 0xffffffff,
17924 k[2] & 0xffffffff, k[3] & 0xffffffff);
17925 return;
17926 }
17927 }
00b79d54
BE
17928 else if (GET_CODE (x) == CONST_DOUBLE &&
17929 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 17930 {
042259f2
DE
17931 REAL_VALUE_TYPE rv;
17932 long k[2];
0adc764e 17933
042259f2 17934 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17935
17936 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17937 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
17938 else
17939 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 17940
13ded975
DE
17941 if (TARGET_64BIT)
17942 {
17943 if (TARGET_MINIMAL_TOC)
2bfcf297 17944 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 17945 else
2f0552b6
AM
17946 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17947 k[0] & 0xffffffff, k[1] & 0xffffffff);
17948 fprintf (file, "0x%lx%08lx\n",
17949 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17950 return;
17951 }
1875cc88 17952 else
13ded975
DE
17953 {
17954 if (TARGET_MINIMAL_TOC)
2bfcf297 17955 fputs ("\t.long ", file);
13ded975 17956 else
2f0552b6
AM
17957 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
17958 k[0] & 0xffffffff, k[1] & 0xffffffff);
17959 fprintf (file, "0x%lx,0x%lx\n",
17960 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
17961 return;
17962 }
9878760c 17963 }
00b79d54
BE
17964 else if (GET_CODE (x) == CONST_DOUBLE &&
17965 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 17966 {
042259f2
DE
17967 REAL_VALUE_TYPE rv;
17968 long l;
9878760c 17969
042259f2 17970 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17971 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17972 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
17973 else
17974 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 17975
31bfaa0b
DE
17976 if (TARGET_64BIT)
17977 {
17978 if (TARGET_MINIMAL_TOC)
2bfcf297 17979 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 17980 else
2f0552b6
AM
17981 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17982 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
17983 return;
17984 }
042259f2 17985 else
31bfaa0b
DE
17986 {
17987 if (TARGET_MINIMAL_TOC)
2bfcf297 17988 fputs ("\t.long ", file);
31bfaa0b 17989 else
2f0552b6
AM
17990 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
17991 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
17992 return;
17993 }
042259f2 17994 }
f176e826 17995 else if (GET_MODE (x) == VOIDmode
a9098fd0 17996 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 17997 {
e2c953b6 17998 unsigned HOST_WIDE_INT low;
042259f2
DE
17999 HOST_WIDE_INT high;
18000
18001 if (GET_CODE (x) == CONST_DOUBLE)
18002 {
18003 low = CONST_DOUBLE_LOW (x);
18004 high = CONST_DOUBLE_HIGH (x);
18005 }
18006 else
18007#if HOST_BITS_PER_WIDE_INT == 32
18008 {
18009 low = INTVAL (x);
0858c623 18010 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18011 }
18012#else
18013 {
c4ad648e
AM
18014 low = INTVAL (x) & 0xffffffff;
18015 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18016 }
18017#endif
9878760c 18018
a9098fd0
GK
18019 /* TOC entries are always Pmode-sized, but since this
18020 is a bigendian machine then if we're putting smaller
18021 integer constants in the TOC we have to pad them.
18022 (This is still a win over putting the constants in
18023 a separate constant pool, because then we'd have
02a4ec28
FS
18024 to have both a TOC entry _and_ the actual constant.)
18025
18026 For a 32-bit target, CONST_INT values are loaded and shifted
18027 entirely within `low' and can be stored in one TOC entry. */
18028
37409796
NS
18029 /* It would be easy to make this work, but it doesn't now. */
18030 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18031
18032 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18033 {
18034#if HOST_BITS_PER_WIDE_INT == 32
18035 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18036 POINTER_SIZE, &low, &high, 0);
18037#else
18038 low |= high << 32;
18039 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18040 high = (HOST_WIDE_INT) low >> 32;
18041 low &= 0xffffffff;
18042#endif
18043 }
a9098fd0 18044
13ded975
DE
18045 if (TARGET_64BIT)
18046 {
18047 if (TARGET_MINIMAL_TOC)
2bfcf297 18048 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18049 else
2f0552b6
AM
18050 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18051 (long) high & 0xffffffff, (long) low & 0xffffffff);
18052 fprintf (file, "0x%lx%08lx\n",
18053 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18054 return;
18055 }
1875cc88 18056 else
13ded975 18057 {
02a4ec28
FS
18058 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18059 {
18060 if (TARGET_MINIMAL_TOC)
2bfcf297 18061 fputs ("\t.long ", file);
02a4ec28 18062 else
2bfcf297 18063 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18064 (long) high & 0xffffffff, (long) low & 0xffffffff);
18065 fprintf (file, "0x%lx,0x%lx\n",
18066 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18067 }
13ded975 18068 else
02a4ec28
FS
18069 {
18070 if (TARGET_MINIMAL_TOC)
2bfcf297 18071 fputs ("\t.long ", file);
02a4ec28 18072 else
2f0552b6
AM
18073 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18074 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18075 }
13ded975
DE
18076 return;
18077 }
9878760c
RK
18078 }
18079
18080 if (GET_CODE (x) == CONST)
18081 {
37409796 18082 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18083
9878760c
RK
18084 base = XEXP (XEXP (x, 0), 0);
18085 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18086 }
f676971a 18087
37409796
NS
18088 switch (GET_CODE (base))
18089 {
18090 case SYMBOL_REF:
18091 name = XSTR (base, 0);
18092 break;
18093
18094 case LABEL_REF:
18095 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18096 CODE_LABEL_NUMBER (XEXP (base, 0)));
18097 break;
18098
18099 case CODE_LABEL:
18100 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18101 break;
18102
18103 default:
18104 gcc_unreachable ();
18105 }
9878760c 18106
772c5265 18107 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 18108 if (TARGET_MINIMAL_TOC)
2bfcf297 18109 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18110 else
18111 {
b6c9286a 18112 fprintf (file, "\t.tc %s", real_name);
9878760c 18113
1875cc88 18114 if (offset < 0)
16fdeb48 18115 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18116 else if (offset)
16fdeb48 18117 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18118
19d2d16f 18119 fputs ("[TC],", file);
1875cc88 18120 }
581bc4de
MM
18121
18122 /* Currently C++ toc references to vtables can be emitted before it
18123 is decided whether the vtable is public or private. If this is
18124 the case, then the linker will eventually complain that there is
18125 a TOC reference to an unknown section. Thus, for vtables only,
18126 we emit the TOC reference to reference the symbol and not the
18127 section. */
28e510bd 18128 if (VTABLE_NAME_P (name))
581bc4de 18129 {
54ee9799 18130 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18131 if (offset < 0)
16fdeb48 18132 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18133 else if (offset > 0)
16fdeb48 18134 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18135 }
18136 else
18137 output_addr_const (file, x);
19d2d16f 18138 putc ('\n', file);
9878760c
RK
18139}
18140\f
18141/* Output an assembler pseudo-op to write an ASCII string of N characters
18142 starting at P to FILE.
18143
18144 On the RS/6000, we have to do this using the .byte operation and
18145 write out special characters outside the quoted string.
18146 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18147 so we must artificially break them up early. */
9878760c
RK
18148
18149void
a2369ed3 18150output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18151{
18152 char c;
18153 int i, count_string;
d330fd93
KG
18154 const char *for_string = "\t.byte \"";
18155 const char *for_decimal = "\t.byte ";
18156 const char *to_close = NULL;
9878760c
RK
18157
18158 count_string = 0;
18159 for (i = 0; i < n; i++)
18160 {
18161 c = *p++;
18162 if (c >= ' ' && c < 0177)
18163 {
18164 if (for_string)
18165 fputs (for_string, file);
18166 putc (c, file);
18167
18168 /* Write two quotes to get one. */
18169 if (c == '"')
18170 {
18171 putc (c, file);
18172 ++count_string;
18173 }
18174
18175 for_string = NULL;
18176 for_decimal = "\"\n\t.byte ";
18177 to_close = "\"\n";
18178 ++count_string;
18179
18180 if (count_string >= 512)
18181 {
18182 fputs (to_close, file);
18183
18184 for_string = "\t.byte \"";
18185 for_decimal = "\t.byte ";
18186 to_close = NULL;
18187 count_string = 0;
18188 }
18189 }
18190 else
18191 {
18192 if (for_decimal)
18193 fputs (for_decimal, file);
18194 fprintf (file, "%d", c);
18195
18196 for_string = "\n\t.byte \"";
18197 for_decimal = ", ";
18198 to_close = "\n";
18199 count_string = 0;
18200 }
18201 }
18202
18203 /* Now close the string if we have written one. Then end the line. */
18204 if (to_close)
9ebbca7d 18205 fputs (to_close, file);
9878760c
RK
18206}
18207\f
18208/* Generate a unique section name for FILENAME for a section type
18209 represented by SECTION_DESC. Output goes into BUF.
18210
18211 SECTION_DESC can be any string, as long as it is different for each
18212 possible section type.
18213
18214 We name the section in the same manner as xlc. The name begins with an
18215 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18216 names) with the last period replaced by the string SECTION_DESC. If
18217 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18218 the name. */
9878760c
RK
18219
18220void
f676971a 18221rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18222 const char *section_desc)
9878760c 18223{
9ebbca7d 18224 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18225 char *p;
18226 int len;
9878760c
RK
18227
18228 after_last_slash = filename;
18229 for (q = filename; *q; q++)
11e5fe42
RK
18230 {
18231 if (*q == '/')
18232 after_last_slash = q + 1;
18233 else if (*q == '.')
18234 last_period = q;
18235 }
9878760c 18236
11e5fe42 18237 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18238 *buf = (char *) xmalloc (len);
9878760c
RK
18239
18240 p = *buf;
18241 *p++ = '_';
18242
18243 for (q = after_last_slash; *q; q++)
18244 {
11e5fe42 18245 if (q == last_period)
c4ad648e 18246 {
9878760c
RK
18247 strcpy (p, section_desc);
18248 p += strlen (section_desc);
e3981aab 18249 break;
c4ad648e 18250 }
9878760c 18251
e9a780ec 18252 else if (ISALNUM (*q))
c4ad648e 18253 *p++ = *q;
9878760c
RK
18254 }
18255
11e5fe42 18256 if (last_period == 0)
9878760c
RK
18257 strcpy (p, section_desc);
18258 else
18259 *p = '\0';
18260}
e165f3f0 18261\f
a4f6c312 18262/* Emit profile function. */
411707f4 18263
411707f4 18264void
a2369ed3 18265output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18266{
858081ad
AH
18267 /* Non-standard profiling for kernels, which just saves LR then calls
18268 _mcount without worrying about arg saves. The idea is to change
18269 the function prologue as little as possible as it isn't easy to
18270 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18271 if (TARGET_PROFILE_KERNEL)
18272 return;
18273
8480e480
CC
18274 if (DEFAULT_ABI == ABI_AIX)
18275 {
9739c90c
JJ
18276#ifndef NO_PROFILE_COUNTERS
18277# define NO_PROFILE_COUNTERS 0
18278#endif
f676971a 18279 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
18280 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
18281 else
18282 {
18283 char buf[30];
18284 const char *label_name;
18285 rtx fun;
411707f4 18286
9739c90c
JJ
18287 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18288 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18289 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18290
9739c90c
JJ
18291 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
18292 fun, Pmode);
18293 }
8480e480 18294 }
ee890fe2
SS
18295 else if (DEFAULT_ABI == ABI_DARWIN)
18296 {
d5fa86ba 18297 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18298 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18299
18300 /* Be conservative and always set this, at least for now. */
e3b5732b 18301 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18302
18303#if TARGET_MACHO
18304 /* For PIC code, set up a stub and collect the caller's address
18305 from r0, which is where the prologue puts it. */
11abc112 18306 if (MACHOPIC_INDIRECT
e3b5732b 18307 && crtl->uses_pic_offset_table)
11abc112 18308 caller_addr_regno = 0;
ee890fe2
SS
18309#endif
18310 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
18311 0, VOIDmode, 1,
18312 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18313 }
411707f4
CC
18314}
18315
a4f6c312 18316/* Write function profiler code. */
e165f3f0
RK
18317
18318void
a2369ed3 18319output_function_profiler (FILE *file, int labelno)
e165f3f0 18320{
3daf36a4 18321 char buf[100];
e165f3f0 18322
38c1f2d7 18323 switch (DEFAULT_ABI)
3daf36a4 18324 {
38c1f2d7 18325 default:
37409796 18326 gcc_unreachable ();
38c1f2d7
MM
18327
18328 case ABI_V4:
09eeeacb
AM
18329 if (!TARGET_32BIT)
18330 {
d4ee4d25 18331 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18332 return;
18333 }
ffcfcb5f 18334 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18335 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18336 if (NO_PROFILE_COUNTERS)
18337 {
18338 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18339 reg_names[0], reg_names[1]);
18340 }
18341 else if (TARGET_SECURE_PLT && flag_pic)
18342 {
18343 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18344 reg_names[0], reg_names[1]);
18345 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18346 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18347 reg_names[12], reg_names[12]);
18348 assemble_name (file, buf);
18349 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18350 assemble_name (file, buf);
18351 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18352 }
18353 else if (flag_pic == 1)
38c1f2d7 18354 {
dfdfa60f 18355 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18356 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18357 reg_names[0], reg_names[1]);
17167fd8 18358 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18359 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18360 assemble_name (file, buf);
17167fd8 18361 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18362 }
9ebbca7d 18363 else if (flag_pic > 1)
38c1f2d7 18364 {
71625f3d
AM
18365 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18366 reg_names[0], reg_names[1]);
9ebbca7d 18367 /* Now, we need to get the address of the label. */
71625f3d 18368 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18369 assemble_name (file, buf);
9ebbca7d
GK
18370 fputs ("-.\n1:", file);
18371 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18372 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18373 reg_names[0], reg_names[11]);
18374 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18375 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18376 }
38c1f2d7
MM
18377 else
18378 {
17167fd8 18379 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18380 assemble_name (file, buf);
dfdfa60f 18381 fputs ("@ha\n", file);
71625f3d
AM
18382 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18383 reg_names[0], reg_names[1]);
a260abc9 18384 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18385 assemble_name (file, buf);
17167fd8 18386 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18387 }
18388
50d440bc 18389 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18390 fprintf (file, "\tbl %s%s\n",
18391 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18392 break;
18393
18394 case ABI_AIX:
ee890fe2 18395 case ABI_DARWIN:
ffcfcb5f
AM
18396 if (!TARGET_PROFILE_KERNEL)
18397 {
a3c9585f 18398 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18399 }
18400 else
18401 {
37409796 18402 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18403
18404 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18405 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18406
6de9cd9a 18407 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18408 {
18409 asm_fprintf (file, "\tstd %s,24(%s)\n",
18410 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18411 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18412 asm_fprintf (file, "\tld %s,24(%s)\n",
18413 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18414 }
18415 else
18416 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18417 }
38c1f2d7
MM
18418 break;
18419 }
e165f3f0 18420}
a251ffd0 18421
b54cf83a 18422\f
44cd321e
PS
18423
18424/* The following variable value is the last issued insn. */
18425
18426static rtx last_scheduled_insn;
18427
18428/* The following variable helps to balance issuing of load and
18429 store instructions */
18430
18431static int load_store_pendulum;
18432
b54cf83a
DE
18433/* Power4 load update and store update instructions are cracked into a
18434 load or store and an integer insn which are executed in the same cycle.
18435 Branches have their own dispatch slot which does not count against the
18436 GCC issue rate, but it changes the program flow so there are no other
18437 instructions to issue in this cycle. */
18438
18439static int
f676971a
EC
18440rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18441 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18442 rtx insn, int more)
b54cf83a 18443{
44cd321e 18444 last_scheduled_insn = insn;
b54cf83a
DE
18445 if (GET_CODE (PATTERN (insn)) == USE
18446 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18447 {
18448 cached_can_issue_more = more;
18449 return cached_can_issue_more;
18450 }
18451
18452 if (insn_terminates_group_p (insn, current_group))
18453 {
18454 cached_can_issue_more = 0;
18455 return cached_can_issue_more;
18456 }
b54cf83a 18457
d296e02e
AP
18458 /* If no reservation, but reach here */
18459 if (recog_memoized (insn) < 0)
18460 return more;
18461
ec507f2d 18462 if (rs6000_sched_groups)
b54cf83a 18463 {
cbe26ab8 18464 if (is_microcoded_insn (insn))
44cd321e 18465 cached_can_issue_more = 0;
cbe26ab8 18466 else if (is_cracked_insn (insn))
44cd321e
PS
18467 cached_can_issue_more = more > 2 ? more - 2 : 0;
18468 else
18469 cached_can_issue_more = more - 1;
18470
18471 return cached_can_issue_more;
b54cf83a 18472 }
165b263e 18473
d296e02e
AP
18474 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18475 return 0;
18476
44cd321e
PS
18477 cached_can_issue_more = more - 1;
18478 return cached_can_issue_more;
b54cf83a
DE
18479}
18480
a251ffd0
TG
18481/* Adjust the cost of a scheduling dependency. Return the new cost of
18482 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18483
c237e94a 18484static int
0a4f0294 18485rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18486{
44cd321e 18487 enum attr_type attr_type;
a251ffd0 18488
44cd321e 18489 if (! recog_memoized (insn))
a251ffd0
TG
18490 return 0;
18491
44cd321e 18492 switch (REG_NOTE_KIND (link))
a251ffd0 18493 {
44cd321e
PS
18494 case REG_DEP_TRUE:
18495 {
18496 /* Data dependency; DEP_INSN writes a register that INSN reads
18497 some cycles later. */
18498
18499 /* Separate a load from a narrower, dependent store. */
18500 if (rs6000_sched_groups
18501 && GET_CODE (PATTERN (insn)) == SET
18502 && GET_CODE (PATTERN (dep_insn)) == SET
18503 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18504 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18505 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18506 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18507 return cost + 14;
18508
18509 attr_type = get_attr_type (insn);
18510
18511 switch (attr_type)
18512 {
18513 case TYPE_JMPREG:
18514 /* Tell the first scheduling pass about the latency between
18515 a mtctr and bctr (and mtlr and br/blr). The first
18516 scheduling pass will not know about this latency since
18517 the mtctr instruction, which has the latency associated
18518 to it, will be generated by reload. */
18519 return TARGET_POWER ? 5 : 4;
18520 case TYPE_BRANCH:
18521 /* Leave some extra cycles between a compare and its
18522 dependent branch, to inhibit expensive mispredicts. */
18523 if ((rs6000_cpu_attr == CPU_PPC603
18524 || rs6000_cpu_attr == CPU_PPC604
18525 || rs6000_cpu_attr == CPU_PPC604E
18526 || rs6000_cpu_attr == CPU_PPC620
18527 || rs6000_cpu_attr == CPU_PPC630
18528 || rs6000_cpu_attr == CPU_PPC750
18529 || rs6000_cpu_attr == CPU_PPC7400
18530 || rs6000_cpu_attr == CPU_PPC7450
18531 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18532 || rs6000_cpu_attr == CPU_POWER5
18533 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18534 && recog_memoized (dep_insn)
18535 && (INSN_CODE (dep_insn) >= 0))
982afe02 18536
44cd321e
PS
18537 switch (get_attr_type (dep_insn))
18538 {
18539 case TYPE_CMP:
18540 case TYPE_COMPARE:
18541 case TYPE_DELAYED_COMPARE:
18542 case TYPE_IMUL_COMPARE:
18543 case TYPE_LMUL_COMPARE:
18544 case TYPE_FPCOMPARE:
18545 case TYPE_CR_LOGICAL:
18546 case TYPE_DELAYED_CR:
18547 return cost + 2;
18548 default:
18549 break;
18550 }
18551 break;
18552
18553 case TYPE_STORE:
18554 case TYPE_STORE_U:
18555 case TYPE_STORE_UX:
18556 case TYPE_FPSTORE:
18557 case TYPE_FPSTORE_U:
18558 case TYPE_FPSTORE_UX:
18559 if ((rs6000_cpu == PROCESSOR_POWER6)
18560 && recog_memoized (dep_insn)
18561 && (INSN_CODE (dep_insn) >= 0))
18562 {
18563
18564 if (GET_CODE (PATTERN (insn)) != SET)
18565 /* If this happens, we have to extend this to schedule
18566 optimally. Return default for now. */
18567 return cost;
18568
18569 /* Adjust the cost for the case where the value written
18570 by a fixed point operation is used as the address
18571 gen value on a store. */
18572 switch (get_attr_type (dep_insn))
18573 {
18574 case TYPE_LOAD:
18575 case TYPE_LOAD_U:
18576 case TYPE_LOAD_UX:
18577 case TYPE_CNTLZ:
18578 {
18579 if (! store_data_bypass_p (dep_insn, insn))
18580 return 4;
18581 break;
18582 }
18583 case TYPE_LOAD_EXT:
18584 case TYPE_LOAD_EXT_U:
18585 case TYPE_LOAD_EXT_UX:
18586 case TYPE_VAR_SHIFT_ROTATE:
18587 case TYPE_VAR_DELAYED_COMPARE:
18588 {
18589 if (! store_data_bypass_p (dep_insn, insn))
18590 return 6;
18591 break;
18592 }
18593 case TYPE_INTEGER:
18594 case TYPE_COMPARE:
18595 case TYPE_FAST_COMPARE:
18596 case TYPE_EXTS:
18597 case TYPE_SHIFT:
18598 case TYPE_INSERT_WORD:
18599 case TYPE_INSERT_DWORD:
18600 case TYPE_FPLOAD_U:
18601 case TYPE_FPLOAD_UX:
18602 case TYPE_STORE_U:
18603 case TYPE_STORE_UX:
18604 case TYPE_FPSTORE_U:
18605 case TYPE_FPSTORE_UX:
18606 {
18607 if (! store_data_bypass_p (dep_insn, insn))
18608 return 3;
18609 break;
18610 }
18611 case TYPE_IMUL:
18612 case TYPE_IMUL2:
18613 case TYPE_IMUL3:
18614 case TYPE_LMUL:
18615 case TYPE_IMUL_COMPARE:
18616 case TYPE_LMUL_COMPARE:
18617 {
18618 if (! store_data_bypass_p (dep_insn, insn))
18619 return 17;
18620 break;
18621 }
18622 case TYPE_IDIV:
18623 {
18624 if (! store_data_bypass_p (dep_insn, insn))
18625 return 45;
18626 break;
18627 }
18628 case TYPE_LDIV:
18629 {
18630 if (! store_data_bypass_p (dep_insn, insn))
18631 return 57;
18632 break;
18633 }
18634 default:
18635 break;
18636 }
18637 }
18638 break;
18639
18640 case TYPE_LOAD:
18641 case TYPE_LOAD_U:
18642 case TYPE_LOAD_UX:
18643 case TYPE_LOAD_EXT:
18644 case TYPE_LOAD_EXT_U:
18645 case TYPE_LOAD_EXT_UX:
18646 if ((rs6000_cpu == PROCESSOR_POWER6)
18647 && recog_memoized (dep_insn)
18648 && (INSN_CODE (dep_insn) >= 0))
18649 {
18650
18651 /* Adjust the cost for the case where the value written
18652 by a fixed point instruction is used within the address
18653 gen portion of a subsequent load(u)(x) */
18654 switch (get_attr_type (dep_insn))
18655 {
18656 case TYPE_LOAD:
18657 case TYPE_LOAD_U:
18658 case TYPE_LOAD_UX:
18659 case TYPE_CNTLZ:
18660 {
18661 if (set_to_load_agen (dep_insn, insn))
18662 return 4;
18663 break;
18664 }
18665 case TYPE_LOAD_EXT:
18666 case TYPE_LOAD_EXT_U:
18667 case TYPE_LOAD_EXT_UX:
18668 case TYPE_VAR_SHIFT_ROTATE:
18669 case TYPE_VAR_DELAYED_COMPARE:
18670 {
18671 if (set_to_load_agen (dep_insn, insn))
18672 return 6;
18673 break;
18674 }
18675 case TYPE_INTEGER:
18676 case TYPE_COMPARE:
18677 case TYPE_FAST_COMPARE:
18678 case TYPE_EXTS:
18679 case TYPE_SHIFT:
18680 case TYPE_INSERT_WORD:
18681 case TYPE_INSERT_DWORD:
18682 case TYPE_FPLOAD_U:
18683 case TYPE_FPLOAD_UX:
18684 case TYPE_STORE_U:
18685 case TYPE_STORE_UX:
18686 case TYPE_FPSTORE_U:
18687 case TYPE_FPSTORE_UX:
18688 {
18689 if (set_to_load_agen (dep_insn, insn))
18690 return 3;
18691 break;
18692 }
18693 case TYPE_IMUL:
18694 case TYPE_IMUL2:
18695 case TYPE_IMUL3:
18696 case TYPE_LMUL:
18697 case TYPE_IMUL_COMPARE:
18698 case TYPE_LMUL_COMPARE:
18699 {
18700 if (set_to_load_agen (dep_insn, insn))
18701 return 17;
18702 break;
18703 }
18704 case TYPE_IDIV:
18705 {
18706 if (set_to_load_agen (dep_insn, insn))
18707 return 45;
18708 break;
18709 }
18710 case TYPE_LDIV:
18711 {
18712 if (set_to_load_agen (dep_insn, insn))
18713 return 57;
18714 break;
18715 }
18716 default:
18717 break;
18718 }
18719 }
18720 break;
18721
18722 case TYPE_FPLOAD:
18723 if ((rs6000_cpu == PROCESSOR_POWER6)
18724 && recog_memoized (dep_insn)
18725 && (INSN_CODE (dep_insn) >= 0)
18726 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18727 return 2;
18728
18729 default:
18730 break;
18731 }
c9dbf840 18732
a251ffd0 18733 /* Fall out to return default cost. */
44cd321e
PS
18734 }
18735 break;
18736
18737 case REG_DEP_OUTPUT:
18738 /* Output dependency; DEP_INSN writes a register that INSN writes some
18739 cycles later. */
18740 if ((rs6000_cpu == PROCESSOR_POWER6)
18741 && recog_memoized (dep_insn)
18742 && (INSN_CODE (dep_insn) >= 0))
18743 {
18744 attr_type = get_attr_type (insn);
18745
18746 switch (attr_type)
18747 {
18748 case TYPE_FP:
18749 if (get_attr_type (dep_insn) == TYPE_FP)
18750 return 1;
18751 break;
18752 case TYPE_FPLOAD:
18753 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18754 return 2;
18755 break;
18756 default:
18757 break;
18758 }
18759 }
18760 case REG_DEP_ANTI:
18761 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18762 cycles later. */
18763 return 0;
18764
18765 default:
18766 gcc_unreachable ();
a251ffd0
TG
18767 }
18768
18769 return cost;
18770}
b6c9286a 18771
cbe26ab8 18772/* The function returns a true if INSN is microcoded.
839a4992 18773 Return false otherwise. */
cbe26ab8
DN
18774
18775static bool
18776is_microcoded_insn (rtx insn)
18777{
18778 if (!insn || !INSN_P (insn)
18779 || GET_CODE (PATTERN (insn)) == USE
18780 || GET_CODE (PATTERN (insn)) == CLOBBER)
18781 return false;
18782
d296e02e
AP
18783 if (rs6000_cpu_attr == CPU_CELL)
18784 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18785
ec507f2d 18786 if (rs6000_sched_groups)
cbe26ab8
DN
18787 {
18788 enum attr_type type = get_attr_type (insn);
18789 if (type == TYPE_LOAD_EXT_U
18790 || type == TYPE_LOAD_EXT_UX
18791 || type == TYPE_LOAD_UX
18792 || type == TYPE_STORE_UX
18793 || type == TYPE_MFCR)
c4ad648e 18794 return true;
cbe26ab8
DN
18795 }
18796
18797 return false;
18798}
18799
cbe26ab8
DN
18800/* The function returns true if INSN is cracked into 2 instructions
18801 by the processor (and therefore occupies 2 issue slots). */
18802
18803static bool
18804is_cracked_insn (rtx insn)
18805{
18806 if (!insn || !INSN_P (insn)
18807 || GET_CODE (PATTERN (insn)) == USE
18808 || GET_CODE (PATTERN (insn)) == CLOBBER)
18809 return false;
18810
ec507f2d 18811 if (rs6000_sched_groups)
cbe26ab8
DN
18812 {
18813 enum attr_type type = get_attr_type (insn);
18814 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18815 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18816 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18817 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18818 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18819 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18820 || type == TYPE_IDIV || type == TYPE_LDIV
18821 || type == TYPE_INSERT_WORD)
18822 return true;
cbe26ab8
DN
18823 }
18824
18825 return false;
18826}
18827
18828/* The function returns true if INSN can be issued only from
a3c9585f 18829 the branch slot. */
cbe26ab8
DN
18830
18831static bool
18832is_branch_slot_insn (rtx insn)
18833{
18834 if (!insn || !INSN_P (insn)
18835 || GET_CODE (PATTERN (insn)) == USE
18836 || GET_CODE (PATTERN (insn)) == CLOBBER)
18837 return false;
18838
ec507f2d 18839 if (rs6000_sched_groups)
cbe26ab8
DN
18840 {
18841 enum attr_type type = get_attr_type (insn);
18842 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18843 return true;
cbe26ab8
DN
18844 return false;
18845 }
18846
18847 return false;
18848}
79ae11c4 18849
44cd321e
PS
18850/* The function returns true if out_inst sets a value that is
18851 used in the address generation computation of in_insn */
18852static bool
18853set_to_load_agen (rtx out_insn, rtx in_insn)
18854{
18855 rtx out_set, in_set;
18856
18857 /* For performance reasons, only handle the simple case where
18858 both loads are a single_set. */
18859 out_set = single_set (out_insn);
18860 if (out_set)
18861 {
18862 in_set = single_set (in_insn);
18863 if (in_set)
18864 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18865 }
18866
18867 return false;
18868}
18869
18870/* The function returns true if the target storage location of
18871 out_insn is adjacent to the target storage location of in_insn */
18872/* Return 1 if memory locations are adjacent. */
18873
18874static bool
18875adjacent_mem_locations (rtx insn1, rtx insn2)
18876{
18877
e3a0e200
PB
18878 rtx a = get_store_dest (PATTERN (insn1));
18879 rtx b = get_store_dest (PATTERN (insn2));
18880
44cd321e
PS
18881 if ((GET_CODE (XEXP (a, 0)) == REG
18882 || (GET_CODE (XEXP (a, 0)) == PLUS
18883 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18884 && (GET_CODE (XEXP (b, 0)) == REG
18885 || (GET_CODE (XEXP (b, 0)) == PLUS
18886 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18887 {
f98e8938 18888 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18889 rtx reg0, reg1;
44cd321e
PS
18890
18891 if (GET_CODE (XEXP (a, 0)) == PLUS)
18892 {
18893 reg0 = XEXP (XEXP (a, 0), 0);
18894 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18895 }
18896 else
18897 reg0 = XEXP (a, 0);
18898
18899 if (GET_CODE (XEXP (b, 0)) == PLUS)
18900 {
18901 reg1 = XEXP (XEXP (b, 0), 0);
18902 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18903 }
18904 else
18905 reg1 = XEXP (b, 0);
18906
18907 val_diff = val1 - val0;
18908
18909 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18910 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18911 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18912 }
18913
18914 return false;
18915}
18916
a4f6c312 18917/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18918 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18919 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
18920 define this macro if you do not need to adjust the scheduling
18921 priorities of insns. */
bef84347 18922
c237e94a 18923static int
a2369ed3 18924rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 18925{
a4f6c312
SS
18926 /* On machines (like the 750) which have asymmetric integer units,
18927 where one integer unit can do multiply and divides and the other
18928 can't, reduce the priority of multiply/divide so it is scheduled
18929 before other integer operations. */
bef84347
VM
18930
18931#if 0
2c3c49de 18932 if (! INSN_P (insn))
bef84347
VM
18933 return priority;
18934
18935 if (GET_CODE (PATTERN (insn)) == USE)
18936 return priority;
18937
18938 switch (rs6000_cpu_attr) {
18939 case CPU_PPC750:
18940 switch (get_attr_type (insn))
18941 {
18942 default:
18943 break;
18944
18945 case TYPE_IMUL:
18946 case TYPE_IDIV:
3cb999d8
DE
18947 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
18948 priority, priority);
bef84347
VM
18949 if (priority >= 0 && priority < 0x01000000)
18950 priority >>= 3;
18951 break;
18952 }
18953 }
18954#endif
18955
44cd321e 18956 if (insn_must_be_first_in_group (insn)
79ae11c4 18957 && reload_completed
f676971a 18958 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
18959 && rs6000_sched_restricted_insns_priority)
18960 {
18961
c4ad648e
AM
18962 /* Prioritize insns that can be dispatched only in the first
18963 dispatch slot. */
79ae11c4 18964 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
18965 /* Attach highest priority to insn. This means that in
18966 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 18967 precede 'priority' (critical path) considerations. */
f676971a 18968 return current_sched_info->sched_max_insns_priority;
79ae11c4 18969 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 18970 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
18971 haifa-sched.c:ready_sort(), only 'priority' (critical path)
18972 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
18973 return (priority + 1);
18974 }
79ae11c4 18975
44cd321e
PS
18976 if (rs6000_cpu == PROCESSOR_POWER6
18977 && ((load_store_pendulum == -2 && is_load_insn (insn))
18978 || (load_store_pendulum == 2 && is_store_insn (insn))))
18979 /* Attach highest priority to insn if the scheduler has just issued two
18980 stores and this instruction is a load, or two loads and this instruction
18981 is a store. Power6 wants loads and stores scheduled alternately
18982 when possible */
18983 return current_sched_info->sched_max_insns_priority;
18984
bef84347
VM
18985 return priority;
18986}
18987
d296e02e
AP
18988/* Return true if the instruction is nonpipelined on the Cell. */
18989static bool
18990is_nonpipeline_insn (rtx insn)
18991{
18992 enum attr_type type;
18993 if (!insn || !INSN_P (insn)
18994 || GET_CODE (PATTERN (insn)) == USE
18995 || GET_CODE (PATTERN (insn)) == CLOBBER)
18996 return false;
18997
18998 type = get_attr_type (insn);
18999 if (type == TYPE_IMUL
19000 || type == TYPE_IMUL2
19001 || type == TYPE_IMUL3
19002 || type == TYPE_LMUL
19003 || type == TYPE_IDIV
19004 || type == TYPE_LDIV
19005 || type == TYPE_SDIV
19006 || type == TYPE_DDIV
19007 || type == TYPE_SSQRT
19008 || type == TYPE_DSQRT
19009 || type == TYPE_MFCR
19010 || type == TYPE_MFCRF
19011 || type == TYPE_MFJMPR)
19012 {
19013 return true;
19014 }
19015 return false;
19016}
19017
19018
a4f6c312
SS
19019/* Return how many instructions the machine can issue per cycle. */
19020
c237e94a 19021static int
863d938c 19022rs6000_issue_rate (void)
b6c9286a 19023{
3317bab1
DE
19024 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19025 if (!reload_completed)
19026 return 1;
19027
b6c9286a 19028 switch (rs6000_cpu_attr) {
3cb999d8
DE
19029 case CPU_RIOS1: /* ? */
19030 case CPU_RS64A:
19031 case CPU_PPC601: /* ? */
ed947a96 19032 case CPU_PPC7450:
3cb999d8 19033 return 3;
b54cf83a 19034 case CPU_PPC440:
b6c9286a 19035 case CPU_PPC603:
bef84347 19036 case CPU_PPC750:
ed947a96 19037 case CPU_PPC7400:
be12c2b0 19038 case CPU_PPC8540:
d296e02e 19039 case CPU_CELL:
fa41c305
EW
19040 case CPU_PPCE300C2:
19041 case CPU_PPCE300C3:
edae5fe3 19042 case CPU_PPCE500MC:
f676971a 19043 return 2;
3cb999d8 19044 case CPU_RIOS2:
b6c9286a 19045 case CPU_PPC604:
19684119 19046 case CPU_PPC604E:
b6c9286a 19047 case CPU_PPC620:
3cb999d8 19048 case CPU_PPC630:
b6c9286a 19049 return 4;
cbe26ab8 19050 case CPU_POWER4:
ec507f2d 19051 case CPU_POWER5:
44cd321e 19052 case CPU_POWER6:
cbe26ab8 19053 return 5;
b6c9286a
MM
19054 default:
19055 return 1;
19056 }
19057}
19058
be12c2b0
VM
19059/* Return how many instructions to look ahead for better insn
19060 scheduling. */
19061
19062static int
863d938c 19063rs6000_use_sched_lookahead (void)
be12c2b0
VM
19064{
19065 if (rs6000_cpu_attr == CPU_PPC8540)
19066 return 4;
d296e02e
AP
19067 if (rs6000_cpu_attr == CPU_CELL)
19068 return (reload_completed ? 8 : 0);
be12c2b0
VM
19069 return 0;
19070}
19071
d296e02e
AP
19072/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19073static int
19074rs6000_use_sched_lookahead_guard (rtx insn)
19075{
19076 if (rs6000_cpu_attr != CPU_CELL)
19077 return 1;
19078
19079 if (insn == NULL_RTX || !INSN_P (insn))
19080 abort ();
982afe02 19081
d296e02e
AP
19082 if (!reload_completed
19083 || is_nonpipeline_insn (insn)
19084 || is_microcoded_insn (insn))
19085 return 0;
19086
19087 return 1;
19088}
19089
569fa502
DN
19090/* Determine is PAT refers to memory. */
19091
19092static bool
19093is_mem_ref (rtx pat)
19094{
19095 const char * fmt;
19096 int i, j;
19097 bool ret = false;
19098
1de59bbd
DE
19099 /* stack_tie does not produce any real memory traffic. */
19100 if (GET_CODE (pat) == UNSPEC
19101 && XINT (pat, 1) == UNSPEC_TIE)
19102 return false;
19103
569fa502
DN
19104 if (GET_CODE (pat) == MEM)
19105 return true;
19106
19107 /* Recursively process the pattern. */
19108 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19109
19110 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19111 {
19112 if (fmt[i] == 'e')
19113 ret |= is_mem_ref (XEXP (pat, i));
19114 else if (fmt[i] == 'E')
19115 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19116 ret |= is_mem_ref (XVECEXP (pat, i, j));
19117 }
19118
19119 return ret;
19120}
19121
19122/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19123
569fa502
DN
19124static bool
19125is_load_insn1 (rtx pat)
19126{
19127 if (!pat || pat == NULL_RTX)
19128 return false;
19129
19130 if (GET_CODE (pat) == SET)
19131 return is_mem_ref (SET_SRC (pat));
19132
19133 if (GET_CODE (pat) == PARALLEL)
19134 {
19135 int i;
19136
19137 for (i = 0; i < XVECLEN (pat, 0); i++)
19138 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19139 return true;
19140 }
19141
19142 return false;
19143}
19144
19145/* Determine if INSN loads from memory. */
19146
19147static bool
19148is_load_insn (rtx insn)
19149{
19150 if (!insn || !INSN_P (insn))
19151 return false;
19152
19153 if (GET_CODE (insn) == CALL_INSN)
19154 return false;
19155
19156 return is_load_insn1 (PATTERN (insn));
19157}
19158
19159/* Determine if PAT is a PATTERN of a store insn. */
19160
19161static bool
19162is_store_insn1 (rtx pat)
19163{
19164 if (!pat || pat == NULL_RTX)
19165 return false;
19166
19167 if (GET_CODE (pat) == SET)
19168 return is_mem_ref (SET_DEST (pat));
19169
19170 if (GET_CODE (pat) == PARALLEL)
19171 {
19172 int i;
19173
19174 for (i = 0; i < XVECLEN (pat, 0); i++)
19175 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19176 return true;
19177 }
19178
19179 return false;
19180}
19181
19182/* Determine if INSN stores to memory. */
19183
19184static bool
19185is_store_insn (rtx insn)
19186{
19187 if (!insn || !INSN_P (insn))
19188 return false;
19189
19190 return is_store_insn1 (PATTERN (insn));
19191}
19192
e3a0e200
PB
19193/* Return the dest of a store insn. */
19194
19195static rtx
19196get_store_dest (rtx pat)
19197{
19198 gcc_assert (is_store_insn1 (pat));
19199
19200 if (GET_CODE (pat) == SET)
19201 return SET_DEST (pat);
19202 else if (GET_CODE (pat) == PARALLEL)
19203 {
19204 int i;
19205
19206 for (i = 0; i < XVECLEN (pat, 0); i++)
19207 {
19208 rtx inner_pat = XVECEXP (pat, 0, i);
19209 if (GET_CODE (inner_pat) == SET
19210 && is_mem_ref (SET_DEST (inner_pat)))
19211 return inner_pat;
19212 }
19213 }
19214 /* We shouldn't get here, because we should have either a simple
19215 store insn or a store with update which are covered above. */
19216 gcc_unreachable();
19217}
19218
569fa502
DN
19219/* Returns whether the dependence between INSN and NEXT is considered
19220 costly by the given target. */
19221
19222static bool
b198261f 19223rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19224{
b198261f
MK
19225 rtx insn;
19226 rtx next;
19227
aabcd309 19228 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19229 allow all dependent insns in the same group.
569fa502
DN
19230 This is the most aggressive option. */
19231 if (rs6000_sched_costly_dep == no_dep_costly)
19232 return false;
19233
f676971a 19234 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19235 do not allow dependent instructions in the same group.
19236 This is the most conservative option. */
19237 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19238 return true;
569fa502 19239
b198261f
MK
19240 insn = DEP_PRO (dep);
19241 next = DEP_CON (dep);
19242
f676971a
EC
19243 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19244 && is_load_insn (next)
569fa502
DN
19245 && is_store_insn (insn))
19246 /* Prevent load after store in the same group. */
19247 return true;
19248
19249 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19250 && is_load_insn (next)
569fa502 19251 && is_store_insn (insn)
e2f6ff94 19252 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19253 /* Prevent load after store in the same group if it is a true
19254 dependence. */
569fa502 19255 return true;
f676971a
EC
19256
19257 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19258 and will not be scheduled in the same group. */
19259 if (rs6000_sched_costly_dep <= max_dep_latency
19260 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19261 return true;
19262
19263 return false;
19264}
19265
f676971a 19266/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19267 skipping any "non-active" insns - insns that will not actually occupy
19268 an issue slot. Return NULL_RTX if such an insn is not found. */
19269
19270static rtx
19271get_next_active_insn (rtx insn, rtx tail)
19272{
f489aff8 19273 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19274 return NULL_RTX;
19275
f489aff8 19276 while (1)
cbe26ab8 19277 {
f489aff8
AM
19278 insn = NEXT_INSN (insn);
19279 if (insn == NULL_RTX || insn == tail)
19280 return NULL_RTX;
cbe26ab8 19281
f489aff8
AM
19282 if (CALL_P (insn)
19283 || JUMP_P (insn)
19284 || (NONJUMP_INSN_P (insn)
19285 && GET_CODE (PATTERN (insn)) != USE
19286 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19287 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19288 break;
19289 }
19290 return insn;
cbe26ab8
DN
19291}
19292
44cd321e
PS
19293/* We are about to begin issuing insns for this clock cycle. */
19294
19295static int
19296rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19297 rtx *ready ATTRIBUTE_UNUSED,
19298 int *pn_ready ATTRIBUTE_UNUSED,
19299 int clock_var ATTRIBUTE_UNUSED)
19300{
d296e02e
AP
19301 int n_ready = *pn_ready;
19302
44cd321e
PS
19303 if (sched_verbose)
19304 fprintf (dump, "// rs6000_sched_reorder :\n");
19305
d296e02e
AP
19306 /* Reorder the ready list, if the second to last ready insn
19307 is a nonepipeline insn. */
19308 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19309 {
19310 if (is_nonpipeline_insn (ready[n_ready - 1])
19311 && (recog_memoized (ready[n_ready - 2]) > 0))
19312 /* Simply swap first two insns. */
19313 {
19314 rtx tmp = ready[n_ready - 1];
19315 ready[n_ready - 1] = ready[n_ready - 2];
19316 ready[n_ready - 2] = tmp;
19317 }
19318 }
19319
44cd321e
PS
19320 if (rs6000_cpu == PROCESSOR_POWER6)
19321 load_store_pendulum = 0;
19322
19323 return rs6000_issue_rate ();
19324}
19325
19326/* Like rs6000_sched_reorder, but called after issuing each insn. */
19327
19328static int
19329rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19330 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19331{
19332 if (sched_verbose)
19333 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19334
19335 /* For Power6, we need to handle some special cases to try and keep the
19336 store queue from overflowing and triggering expensive flushes.
19337
19338 This code monitors how load and store instructions are being issued
19339 and skews the ready list one way or the other to increase the likelihood
19340 that a desired instruction is issued at the proper time.
19341
19342 A couple of things are done. First, we maintain a "load_store_pendulum"
19343 to track the current state of load/store issue.
19344
19345 - If the pendulum is at zero, then no loads or stores have been
19346 issued in the current cycle so we do nothing.
19347
19348 - If the pendulum is 1, then a single load has been issued in this
19349 cycle and we attempt to locate another load in the ready list to
19350 issue with it.
19351
2f8e468b 19352 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19353 issued in this cycle, so we increase the priority of the first load
19354 in the ready list to increase it's likelihood of being chosen first
19355 in the next cycle.
19356
19357 - If the pendulum is -1, then a single store has been issued in this
19358 cycle and we attempt to locate another store in the ready list to
19359 issue with it, preferring a store to an adjacent memory location to
19360 facilitate store pairing in the store queue.
19361
19362 - If the pendulum is 2, then two loads have already been
19363 issued in this cycle, so we increase the priority of the first store
19364 in the ready list to increase it's likelihood of being chosen first
19365 in the next cycle.
19366
19367 - If the pendulum < -2 or > 2, then do nothing.
19368
19369 Note: This code covers the most common scenarios. There exist non
19370 load/store instructions which make use of the LSU and which
19371 would need to be accounted for to strictly model the behavior
19372 of the machine. Those instructions are currently unaccounted
19373 for to help minimize compile time overhead of this code.
19374 */
19375 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19376 {
19377 int pos;
19378 int i;
19379 rtx tmp;
19380
19381 if (is_store_insn (last_scheduled_insn))
19382 /* Issuing a store, swing the load_store_pendulum to the left */
19383 load_store_pendulum--;
19384 else if (is_load_insn (last_scheduled_insn))
19385 /* Issuing a load, swing the load_store_pendulum to the right */
19386 load_store_pendulum++;
19387 else
19388 return cached_can_issue_more;
19389
19390 /* If the pendulum is balanced, or there is only one instruction on
19391 the ready list, then all is well, so return. */
19392 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19393 return cached_can_issue_more;
19394
19395 if (load_store_pendulum == 1)
19396 {
19397 /* A load has been issued in this cycle. Scan the ready list
19398 for another load to issue with it */
19399 pos = *pn_ready-1;
19400
19401 while (pos >= 0)
19402 {
19403 if (is_load_insn (ready[pos]))
19404 {
19405 /* Found a load. Move it to the head of the ready list,
19406 and adjust it's priority so that it is more likely to
19407 stay there */
19408 tmp = ready[pos];
19409 for (i=pos; i<*pn_ready-1; i++)
19410 ready[i] = ready[i + 1];
19411 ready[*pn_ready-1] = tmp;
19412 if INSN_PRIORITY_KNOWN (tmp)
19413 INSN_PRIORITY (tmp)++;
19414 break;
19415 }
19416 pos--;
19417 }
19418 }
19419 else if (load_store_pendulum == -2)
19420 {
19421 /* Two stores have been issued in this cycle. Increase the
19422 priority of the first load in the ready list to favor it for
19423 issuing in the next cycle. */
19424 pos = *pn_ready-1;
19425
19426 while (pos >= 0)
19427 {
19428 if (is_load_insn (ready[pos])
19429 && INSN_PRIORITY_KNOWN (ready[pos]))
19430 {
19431 INSN_PRIORITY (ready[pos])++;
19432
19433 /* Adjust the pendulum to account for the fact that a load
19434 was found and increased in priority. This is to prevent
19435 increasing the priority of multiple loads */
19436 load_store_pendulum--;
19437
19438 break;
19439 }
19440 pos--;
19441 }
19442 }
19443 else if (load_store_pendulum == -1)
19444 {
19445 /* A store has been issued in this cycle. Scan the ready list for
19446 another store to issue with it, preferring a store to an adjacent
19447 memory location */
19448 int first_store_pos = -1;
19449
19450 pos = *pn_ready-1;
19451
19452 while (pos >= 0)
19453 {
19454 if (is_store_insn (ready[pos]))
19455 {
19456 /* Maintain the index of the first store found on the
19457 list */
19458 if (first_store_pos == -1)
19459 first_store_pos = pos;
19460
19461 if (is_store_insn (last_scheduled_insn)
19462 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19463 {
19464 /* Found an adjacent store. Move it to the head of the
19465 ready list, and adjust it's priority so that it is
19466 more likely to stay there */
19467 tmp = ready[pos];
19468 for (i=pos; i<*pn_ready-1; i++)
19469 ready[i] = ready[i + 1];
19470 ready[*pn_ready-1] = tmp;
19471 if INSN_PRIORITY_KNOWN (tmp)
19472 INSN_PRIORITY (tmp)++;
19473 first_store_pos = -1;
19474
19475 break;
19476 };
19477 }
19478 pos--;
19479 }
19480
19481 if (first_store_pos >= 0)
19482 {
19483 /* An adjacent store wasn't found, but a non-adjacent store was,
19484 so move the non-adjacent store to the front of the ready
19485 list, and adjust its priority so that it is more likely to
19486 stay there. */
19487 tmp = ready[first_store_pos];
19488 for (i=first_store_pos; i<*pn_ready-1; i++)
19489 ready[i] = ready[i + 1];
19490 ready[*pn_ready-1] = tmp;
19491 if INSN_PRIORITY_KNOWN (tmp)
19492 INSN_PRIORITY (tmp)++;
19493 }
19494 }
19495 else if (load_store_pendulum == 2)
19496 {
19497 /* Two loads have been issued in this cycle. Increase the priority
19498 of the first store in the ready list to favor it for issuing in
19499 the next cycle. */
19500 pos = *pn_ready-1;
19501
19502 while (pos >= 0)
19503 {
19504 if (is_store_insn (ready[pos])
19505 && INSN_PRIORITY_KNOWN (ready[pos]))
19506 {
19507 INSN_PRIORITY (ready[pos])++;
19508
19509 /* Adjust the pendulum to account for the fact that a store
19510 was found and increased in priority. This is to prevent
19511 increasing the priority of multiple stores */
19512 load_store_pendulum++;
19513
19514 break;
19515 }
19516 pos--;
19517 }
19518 }
19519 }
19520
19521 return cached_can_issue_more;
19522}
19523
839a4992 19524/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19525 of group WHICH_GROUP.
19526
19527 If WHICH_GROUP == current_group, this function will return true if INSN
19528 causes the termination of the current group (i.e, the dispatch group to
19529 which INSN belongs). This means that INSN will be the last insn in the
19530 group it belongs to.
19531
19532 If WHICH_GROUP == previous_group, this function will return true if INSN
19533 causes the termination of the previous group (i.e, the dispatch group that
19534 precedes the group to which INSN belongs). This means that INSN will be
19535 the first insn in the group it belongs to). */
19536
19537static bool
19538insn_terminates_group_p (rtx insn, enum group_termination which_group)
19539{
44cd321e 19540 bool first, last;
cbe26ab8
DN
19541
19542 if (! insn)
19543 return false;
569fa502 19544
44cd321e
PS
19545 first = insn_must_be_first_in_group (insn);
19546 last = insn_must_be_last_in_group (insn);
cbe26ab8 19547
44cd321e 19548 if (first && last)
cbe26ab8
DN
19549 return true;
19550
19551 if (which_group == current_group)
44cd321e 19552 return last;
cbe26ab8 19553 else if (which_group == previous_group)
44cd321e
PS
19554 return first;
19555
19556 return false;
19557}
19558
19559
19560static bool
19561insn_must_be_first_in_group (rtx insn)
19562{
19563 enum attr_type type;
19564
19565 if (!insn
19566 || insn == NULL_RTX
19567 || GET_CODE (insn) == NOTE
19568 || GET_CODE (PATTERN (insn)) == USE
19569 || GET_CODE (PATTERN (insn)) == CLOBBER)
19570 return false;
19571
19572 switch (rs6000_cpu)
cbe26ab8 19573 {
44cd321e
PS
19574 case PROCESSOR_POWER5:
19575 if (is_cracked_insn (insn))
19576 return true;
19577 case PROCESSOR_POWER4:
19578 if (is_microcoded_insn (insn))
19579 return true;
19580
19581 if (!rs6000_sched_groups)
19582 return false;
19583
19584 type = get_attr_type (insn);
19585
19586 switch (type)
19587 {
19588 case TYPE_MFCR:
19589 case TYPE_MFCRF:
19590 case TYPE_MTCR:
19591 case TYPE_DELAYED_CR:
19592 case TYPE_CR_LOGICAL:
19593 case TYPE_MTJMPR:
19594 case TYPE_MFJMPR:
19595 case TYPE_IDIV:
19596 case TYPE_LDIV:
19597 case TYPE_LOAD_L:
19598 case TYPE_STORE_C:
19599 case TYPE_ISYNC:
19600 case TYPE_SYNC:
19601 return true;
19602 default:
19603 break;
19604 }
19605 break;
19606 case PROCESSOR_POWER6:
19607 type = get_attr_type (insn);
19608
19609 switch (type)
19610 {
19611 case TYPE_INSERT_DWORD:
19612 case TYPE_EXTS:
19613 case TYPE_CNTLZ:
19614 case TYPE_SHIFT:
19615 case TYPE_VAR_SHIFT_ROTATE:
19616 case TYPE_TRAP:
19617 case TYPE_IMUL:
19618 case TYPE_IMUL2:
19619 case TYPE_IMUL3:
19620 case TYPE_LMUL:
19621 case TYPE_IDIV:
19622 case TYPE_INSERT_WORD:
19623 case TYPE_DELAYED_COMPARE:
19624 case TYPE_IMUL_COMPARE:
19625 case TYPE_LMUL_COMPARE:
19626 case TYPE_FPCOMPARE:
19627 case TYPE_MFCR:
19628 case TYPE_MTCR:
19629 case TYPE_MFJMPR:
19630 case TYPE_MTJMPR:
19631 case TYPE_ISYNC:
19632 case TYPE_SYNC:
19633 case TYPE_LOAD_L:
19634 case TYPE_STORE_C:
19635 case TYPE_LOAD_U:
19636 case TYPE_LOAD_UX:
19637 case TYPE_LOAD_EXT_UX:
19638 case TYPE_STORE_U:
19639 case TYPE_STORE_UX:
19640 case TYPE_FPLOAD_U:
19641 case TYPE_FPLOAD_UX:
19642 case TYPE_FPSTORE_U:
19643 case TYPE_FPSTORE_UX:
19644 return true;
19645 default:
19646 break;
19647 }
19648 break;
19649 default:
19650 break;
19651 }
19652
19653 return false;
19654}
19655
19656static bool
19657insn_must_be_last_in_group (rtx insn)
19658{
19659 enum attr_type type;
19660
19661 if (!insn
19662 || insn == NULL_RTX
19663 || GET_CODE (insn) == NOTE
19664 || GET_CODE (PATTERN (insn)) == USE
19665 || GET_CODE (PATTERN (insn)) == CLOBBER)
19666 return false;
19667
19668 switch (rs6000_cpu) {
19669 case PROCESSOR_POWER4:
19670 case PROCESSOR_POWER5:
19671 if (is_microcoded_insn (insn))
19672 return true;
19673
19674 if (is_branch_slot_insn (insn))
19675 return true;
19676
19677 break;
19678 case PROCESSOR_POWER6:
19679 type = get_attr_type (insn);
19680
19681 switch (type)
19682 {
19683 case TYPE_EXTS:
19684 case TYPE_CNTLZ:
19685 case TYPE_SHIFT:
19686 case TYPE_VAR_SHIFT_ROTATE:
19687 case TYPE_TRAP:
19688 case TYPE_IMUL:
19689 case TYPE_IMUL2:
19690 case TYPE_IMUL3:
19691 case TYPE_LMUL:
19692 case TYPE_IDIV:
19693 case TYPE_DELAYED_COMPARE:
19694 case TYPE_IMUL_COMPARE:
19695 case TYPE_LMUL_COMPARE:
19696 case TYPE_FPCOMPARE:
19697 case TYPE_MFCR:
19698 case TYPE_MTCR:
19699 case TYPE_MFJMPR:
19700 case TYPE_MTJMPR:
19701 case TYPE_ISYNC:
19702 case TYPE_SYNC:
19703 case TYPE_LOAD_L:
19704 case TYPE_STORE_C:
19705 return true;
19706 default:
19707 break;
cbe26ab8 19708 }
44cd321e
PS
19709 break;
19710 default:
19711 break;
19712 }
cbe26ab8
DN
19713
19714 return false;
19715}
19716
839a4992 19717/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19718 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19719
19720static bool
19721is_costly_group (rtx *group_insns, rtx next_insn)
19722{
19723 int i;
cbe26ab8
DN
19724 int issue_rate = rs6000_issue_rate ();
19725
19726 for (i = 0; i < issue_rate; i++)
19727 {
e2f6ff94
MK
19728 sd_iterator_def sd_it;
19729 dep_t dep;
cbe26ab8 19730 rtx insn = group_insns[i];
b198261f 19731
cbe26ab8 19732 if (!insn)
c4ad648e 19733 continue;
b198261f 19734
e2f6ff94 19735 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19736 {
b198261f
MK
19737 rtx next = DEP_CON (dep);
19738
19739 if (next == next_insn
19740 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19741 return true;
c4ad648e 19742 }
cbe26ab8
DN
19743 }
19744
19745 return false;
19746}
19747
f676971a 19748/* Utility of the function redefine_groups.
cbe26ab8
DN
19749 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19750 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19751 to keep it "far" (in a separate group) from GROUP_INSNS, following
19752 one of the following schemes, depending on the value of the flag
19753 -minsert_sched_nops = X:
19754 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19755 in order to force NEXT_INSN into a separate group.
f676971a
EC
19756 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19757 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19758 insertion (has a group just ended, how many vacant issue slots remain in the
19759 last group, and how many dispatch groups were encountered so far). */
19760
f676971a 19761static int
c4ad648e
AM
19762force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19763 rtx next_insn, bool *group_end, int can_issue_more,
19764 int *group_count)
cbe26ab8
DN
19765{
19766 rtx nop;
19767 bool force;
19768 int issue_rate = rs6000_issue_rate ();
19769 bool end = *group_end;
19770 int i;
19771
19772 if (next_insn == NULL_RTX)
19773 return can_issue_more;
19774
19775 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19776 return can_issue_more;
19777
19778 force = is_costly_group (group_insns, next_insn);
19779 if (!force)
19780 return can_issue_more;
19781
19782 if (sched_verbose > 6)
19783 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19784 *group_count ,can_issue_more);
cbe26ab8
DN
19785
19786 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19787 {
19788 if (*group_end)
c4ad648e 19789 can_issue_more = 0;
cbe26ab8
DN
19790
19791 /* Since only a branch can be issued in the last issue_slot, it is
19792 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19793 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19794 in this case the last nop will start a new group and the branch
19795 will be forced to the new group. */
cbe26ab8 19796 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19797 can_issue_more--;
cbe26ab8
DN
19798
19799 while (can_issue_more > 0)
c4ad648e 19800 {
9390387d 19801 nop = gen_nop ();
c4ad648e
AM
19802 emit_insn_before (nop, next_insn);
19803 can_issue_more--;
19804 }
cbe26ab8
DN
19805
19806 *group_end = true;
19807 return 0;
f676971a 19808 }
cbe26ab8
DN
19809
19810 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19811 {
19812 int n_nops = rs6000_sched_insert_nops;
19813
f676971a 19814 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19815 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19816 if (can_issue_more == 0)
c4ad648e 19817 can_issue_more = issue_rate;
cbe26ab8
DN
19818 can_issue_more--;
19819 if (can_issue_more == 0)
c4ad648e
AM
19820 {
19821 can_issue_more = issue_rate - 1;
19822 (*group_count)++;
19823 end = true;
19824 for (i = 0; i < issue_rate; i++)
19825 {
19826 group_insns[i] = 0;
19827 }
19828 }
cbe26ab8
DN
19829
19830 while (n_nops > 0)
c4ad648e
AM
19831 {
19832 nop = gen_nop ();
19833 emit_insn_before (nop, next_insn);
19834 if (can_issue_more == issue_rate - 1) /* new group begins */
19835 end = false;
19836 can_issue_more--;
19837 if (can_issue_more == 0)
19838 {
19839 can_issue_more = issue_rate - 1;
19840 (*group_count)++;
19841 end = true;
19842 for (i = 0; i < issue_rate; i++)
19843 {
19844 group_insns[i] = 0;
19845 }
19846 }
19847 n_nops--;
19848 }
cbe26ab8
DN
19849
19850 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19851 can_issue_more++;
cbe26ab8 19852
c4ad648e
AM
19853 /* Is next_insn going to start a new group? */
19854 *group_end
19855 = (end
cbe26ab8
DN
19856 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19857 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19858 || (can_issue_more < issue_rate &&
c4ad648e 19859 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19860 if (*group_end && end)
c4ad648e 19861 (*group_count)--;
cbe26ab8
DN
19862
19863 if (sched_verbose > 6)
c4ad648e
AM
19864 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19865 *group_count, can_issue_more);
f676971a
EC
19866 return can_issue_more;
19867 }
cbe26ab8
DN
19868
19869 return can_issue_more;
19870}
19871
19872/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19873 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19874 form in practice. It tries to achieve this synchronization by forcing the
19875 estimated processor grouping on the compiler (as opposed to the function
19876 'pad_goups' which tries to force the scheduler's grouping on the processor).
19877
19878 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19879 examines the (estimated) dispatch groups that will be formed by the processor
19880 dispatcher. It marks these group boundaries to reflect the estimated
19881 processor grouping, overriding the grouping that the scheduler had marked.
19882 Depending on the value of the flag '-minsert-sched-nops' this function can
19883 force certain insns into separate groups or force a certain distance between
19884 them by inserting nops, for example, if there exists a "costly dependence"
19885 between the insns.
19886
19887 The function estimates the group boundaries that the processor will form as
0fa2e4df 19888 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19889 each insn. A subsequent insn will start a new group if one of the following
19890 4 cases applies:
19891 - no more vacant issue slots remain in the current dispatch group.
19892 - only the last issue slot, which is the branch slot, is vacant, but the next
19893 insn is not a branch.
19894 - only the last 2 or less issue slots, including the branch slot, are vacant,
19895 which means that a cracked insn (which occupies two issue slots) can't be
19896 issued in this group.
f676971a 19897 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19898 start a new group. */
19899
19900static int
19901redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19902{
19903 rtx insn, next_insn;
19904 int issue_rate;
19905 int can_issue_more;
19906 int slot, i;
19907 bool group_end;
19908 int group_count = 0;
19909 rtx *group_insns;
19910
19911 /* Initialize. */
19912 issue_rate = rs6000_issue_rate ();
5ead67f6 19913 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 19914 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
19915 {
19916 group_insns[i] = 0;
19917 }
19918 can_issue_more = issue_rate;
19919 slot = 0;
19920 insn = get_next_active_insn (prev_head_insn, tail);
19921 group_end = false;
19922
19923 while (insn != NULL_RTX)
19924 {
19925 slot = (issue_rate - can_issue_more);
19926 group_insns[slot] = insn;
19927 can_issue_more =
c4ad648e 19928 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 19929 if (insn_terminates_group_p (insn, current_group))
c4ad648e 19930 can_issue_more = 0;
cbe26ab8
DN
19931
19932 next_insn = get_next_active_insn (insn, tail);
19933 if (next_insn == NULL_RTX)
c4ad648e 19934 return group_count + 1;
cbe26ab8 19935
c4ad648e
AM
19936 /* Is next_insn going to start a new group? */
19937 group_end
19938 = (can_issue_more == 0
19939 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19940 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19941 || (can_issue_more < issue_rate &&
19942 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19943
f676971a 19944 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
19945 next_insn, &group_end, can_issue_more,
19946 &group_count);
cbe26ab8
DN
19947
19948 if (group_end)
c4ad648e
AM
19949 {
19950 group_count++;
19951 can_issue_more = 0;
19952 for (i = 0; i < issue_rate; i++)
19953 {
19954 group_insns[i] = 0;
19955 }
19956 }
cbe26ab8
DN
19957
19958 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 19959 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 19960 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 19961 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
19962
19963 insn = next_insn;
19964 if (can_issue_more == 0)
c4ad648e
AM
19965 can_issue_more = issue_rate;
19966 } /* while */
cbe26ab8
DN
19967
19968 return group_count;
19969}
19970
19971/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
19972 dispatch group boundaries that the scheduler had marked. Pad with nops
19973 any dispatch groups which have vacant issue slots, in order to force the
19974 scheduler's grouping on the processor dispatcher. The function
19975 returns the number of dispatch groups found. */
19976
19977static int
19978pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19979{
19980 rtx insn, next_insn;
19981 rtx nop;
19982 int issue_rate;
19983 int can_issue_more;
19984 int group_end;
19985 int group_count = 0;
19986
19987 /* Initialize issue_rate. */
19988 issue_rate = rs6000_issue_rate ();
19989 can_issue_more = issue_rate;
19990
19991 insn = get_next_active_insn (prev_head_insn, tail);
19992 next_insn = get_next_active_insn (insn, tail);
19993
19994 while (insn != NULL_RTX)
19995 {
19996 can_issue_more =
19997 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
19998
19999 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20000
20001 if (next_insn == NULL_RTX)
c4ad648e 20002 break;
cbe26ab8
DN
20003
20004 if (group_end)
c4ad648e
AM
20005 {
20006 /* If the scheduler had marked group termination at this location
20007 (between insn and next_indn), and neither insn nor next_insn will
20008 force group termination, pad the group with nops to force group
20009 termination. */
20010 if (can_issue_more
20011 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20012 && !insn_terminates_group_p (insn, current_group)
20013 && !insn_terminates_group_p (next_insn, previous_group))
20014 {
9390387d 20015 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20016 can_issue_more--;
20017
20018 while (can_issue_more)
20019 {
20020 nop = gen_nop ();
20021 emit_insn_before (nop, next_insn);
20022 can_issue_more--;
20023 }
20024 }
20025
20026 can_issue_more = issue_rate;
20027 group_count++;
20028 }
cbe26ab8
DN
20029
20030 insn = next_insn;
20031 next_insn = get_next_active_insn (insn, tail);
20032 }
20033
20034 return group_count;
20035}
20036
44cd321e
PS
20037/* We're beginning a new block. Initialize data structures as necessary. */
20038
20039static void
20040rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20041 int sched_verbose ATTRIBUTE_UNUSED,
20042 int max_ready ATTRIBUTE_UNUSED)
982afe02 20043{
44cd321e
PS
20044 last_scheduled_insn = NULL_RTX;
20045 load_store_pendulum = 0;
20046}
20047
cbe26ab8
DN
20048/* The following function is called at the end of scheduling BB.
20049 After reload, it inserts nops at insn group bundling. */
20050
20051static void
38f391a5 20052rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20053{
20054 int n_groups;
20055
20056 if (sched_verbose)
20057 fprintf (dump, "=== Finishing schedule.\n");
20058
ec507f2d 20059 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
20060 {
20061 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20062 return;
cbe26ab8
DN
20063
20064 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20065 n_groups = pad_groups (dump, sched_verbose,
20066 current_sched_info->prev_head,
20067 current_sched_info->next_tail);
cbe26ab8 20068 else
c4ad648e
AM
20069 n_groups = redefine_groups (dump, sched_verbose,
20070 current_sched_info->prev_head,
20071 current_sched_info->next_tail);
cbe26ab8
DN
20072
20073 if (sched_verbose >= 6)
20074 {
20075 fprintf (dump, "ngroups = %d\n", n_groups);
20076 print_rtl (dump, current_sched_info->prev_head);
20077 fprintf (dump, "Done finish_sched\n");
20078 }
20079 }
20080}
b6c9286a 20081\f
b6c9286a
MM
20082/* Length in units of the trampoline for entering a nested function. */
20083
20084int
863d938c 20085rs6000_trampoline_size (void)
b6c9286a
MM
20086{
20087 int ret = 0;
20088
20089 switch (DEFAULT_ABI)
20090 {
20091 default:
37409796 20092 gcc_unreachable ();
b6c9286a
MM
20093
20094 case ABI_AIX:
8f802bfb 20095 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20096 break;
20097
4dabc42d 20098 case ABI_DARWIN:
b6c9286a 20099 case ABI_V4:
03a7e1a5 20100 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20101 break;
b6c9286a
MM
20102 }
20103
20104 return ret;
20105}
20106
20107/* Emit RTL insns to initialize the variable parts of a trampoline.
20108 FNADDR is an RTX for the address of the function's pure code.
20109 CXT is an RTX for the static chain value for the function. */
20110
20111void
a2369ed3 20112rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20113{
8bd04c56 20114 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20115 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20116
20117 switch (DEFAULT_ABI)
20118 {
20119 default:
37409796 20120 gcc_unreachable ();
b6c9286a 20121
8bd04c56 20122/* Macros to shorten the code expansions below. */
9613eaff 20123#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20124#define MEM_PLUS(addr,offset) \
9613eaff 20125 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20126
b6c9286a
MM
20127 /* Under AIX, just build the 3 word function descriptor */
20128 case ABI_AIX:
8bd04c56 20129 {
9613eaff
SH
20130 rtx fn_reg = gen_reg_rtx (Pmode);
20131 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20132 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20133 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20134 emit_move_insn (MEM_DEREF (addr), fn_reg);
20135 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20136 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20137 }
b6c9286a
MM
20138 break;
20139
4dabc42d
TC
20140 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20141 case ABI_DARWIN:
b6c9286a 20142 case ABI_V4:
9613eaff 20143 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 20144 FALSE, VOIDmode, 4,
9613eaff 20145 addr, Pmode,
eaf1bcf1 20146 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20147 fnaddr, Pmode,
20148 ctx_reg, Pmode);
b6c9286a 20149 break;
b6c9286a
MM
20150 }
20151
20152 return;
20153}
7509c759
MM
20154
20155\f
91d231cb 20156/* Table of valid machine attributes. */
a4f6c312 20157
91d231cb 20158const struct attribute_spec rs6000_attribute_table[] =
7509c759 20159{
91d231cb 20160 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20161 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20162 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20163 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20164 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20165 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20166#ifdef SUBTARGET_ATTRIBUTE_TABLE
20167 SUBTARGET_ATTRIBUTE_TABLE,
20168#endif
a5c76ee6 20169 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20170};
7509c759 20171
8bb418a3
ZL
20172/* Handle the "altivec" attribute. The attribute may have
20173 arguments as follows:
f676971a 20174
8bb418a3
ZL
20175 __attribute__((altivec(vector__)))
20176 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20177 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20178
20179 and may appear more than once (e.g., 'vector bool char') in a
20180 given declaration. */
20181
20182static tree
f90ac3f0
UP
20183rs6000_handle_altivec_attribute (tree *node,
20184 tree name ATTRIBUTE_UNUSED,
20185 tree args,
8bb418a3
ZL
20186 int flags ATTRIBUTE_UNUSED,
20187 bool *no_add_attrs)
20188{
20189 tree type = *node, result = NULL_TREE;
20190 enum machine_mode mode;
20191 int unsigned_p;
20192 char altivec_type
20193 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20194 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20195 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20196 : '?');
8bb418a3
ZL
20197
20198 while (POINTER_TYPE_P (type)
20199 || TREE_CODE (type) == FUNCTION_TYPE
20200 || TREE_CODE (type) == METHOD_TYPE
20201 || TREE_CODE (type) == ARRAY_TYPE)
20202 type = TREE_TYPE (type);
20203
20204 mode = TYPE_MODE (type);
20205
f90ac3f0
UP
20206 /* Check for invalid AltiVec type qualifiers. */
20207 if (type == long_unsigned_type_node || type == long_integer_type_node)
20208 {
20209 if (TARGET_64BIT)
20210 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20211 else if (rs6000_warn_altivec_long)
d4ee4d25 20212 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20213 }
20214 else if (type == long_long_unsigned_type_node
20215 || type == long_long_integer_type_node)
20216 error ("use of %<long long%> in AltiVec types is invalid");
20217 else if (type == double_type_node)
20218 error ("use of %<double%> in AltiVec types is invalid");
20219 else if (type == long_double_type_node)
20220 error ("use of %<long double%> in AltiVec types is invalid");
20221 else if (type == boolean_type_node)
20222 error ("use of boolean types in AltiVec types is invalid");
20223 else if (TREE_CODE (type) == COMPLEX_TYPE)
20224 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20225 else if (DECIMAL_FLOAT_MODE_P (mode))
20226 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20227
20228 switch (altivec_type)
20229 {
20230 case 'v':
8df83eae 20231 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20232 switch (mode)
20233 {
c4ad648e
AM
20234 case SImode:
20235 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20236 break;
20237 case HImode:
20238 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20239 break;
20240 case QImode:
20241 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20242 break;
20243 case SFmode: result = V4SF_type_node; break;
20244 /* If the user says 'vector int bool', we may be handed the 'bool'
20245 attribute _before_ the 'vector' attribute, and so select the
20246 proper type in the 'b' case below. */
20247 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20248 result = type;
20249 default: break;
8bb418a3
ZL
20250 }
20251 break;
20252 case 'b':
20253 switch (mode)
20254 {
c4ad648e
AM
20255 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20256 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20257 case QImode: case V16QImode: result = bool_V16QI_type_node;
20258 default: break;
8bb418a3
ZL
20259 }
20260 break;
20261 case 'p':
20262 switch (mode)
20263 {
c4ad648e
AM
20264 case V8HImode: result = pixel_V8HI_type_node;
20265 default: break;
8bb418a3
ZL
20266 }
20267 default: break;
20268 }
20269
7958a2a6
FJ
20270 if (result && result != type && TYPE_READONLY (type))
20271 result = build_qualified_type (result, TYPE_QUAL_CONST);
20272
8bb418a3
ZL
20273 *no_add_attrs = true; /* No need to hang on to the attribute. */
20274
f90ac3f0 20275 if (result)
5dc11954 20276 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20277
20278 return NULL_TREE;
20279}
20280
f18eca82
ZL
20281/* AltiVec defines four built-in scalar types that serve as vector
20282 elements; we must teach the compiler how to mangle them. */
20283
20284static const char *
3101faab 20285rs6000_mangle_type (const_tree type)
f18eca82 20286{
608063c3
JB
20287 type = TYPE_MAIN_VARIANT (type);
20288
20289 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20290 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20291 return NULL;
20292
f18eca82
ZL
20293 if (type == bool_char_type_node) return "U6__boolc";
20294 if (type == bool_short_type_node) return "U6__bools";
20295 if (type == pixel_type_node) return "u7__pixel";
20296 if (type == bool_int_type_node) return "U6__booli";
20297
337bde91
DE
20298 /* Mangle IBM extended float long double as `g' (__float128) on
20299 powerpc*-linux where long-double-64 previously was the default. */
20300 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20301 && TARGET_ELF
20302 && TARGET_LONG_DOUBLE_128
20303 && !TARGET_IEEEQUAD)
20304 return "g";
20305
f18eca82
ZL
20306 /* For all other types, use normal C++ mangling. */
20307 return NULL;
20308}
20309
a5c76ee6
ZW
20310/* Handle a "longcall" or "shortcall" attribute; arguments as in
20311 struct attribute_spec.handler. */
a4f6c312 20312
91d231cb 20313static tree
f676971a
EC
20314rs6000_handle_longcall_attribute (tree *node, tree name,
20315 tree args ATTRIBUTE_UNUSED,
20316 int flags ATTRIBUTE_UNUSED,
a2369ed3 20317 bool *no_add_attrs)
91d231cb
JM
20318{
20319 if (TREE_CODE (*node) != FUNCTION_TYPE
20320 && TREE_CODE (*node) != FIELD_DECL
20321 && TREE_CODE (*node) != TYPE_DECL)
20322 {
5c498b10 20323 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
20324 IDENTIFIER_POINTER (name));
20325 *no_add_attrs = true;
20326 }
6a4cee5f 20327
91d231cb 20328 return NULL_TREE;
7509c759
MM
20329}
20330
a5c76ee6
ZW
20331/* Set longcall attributes on all functions declared when
20332 rs6000_default_long_calls is true. */
20333static void
a2369ed3 20334rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20335{
20336 if (rs6000_default_long_calls
20337 && (TREE_CODE (type) == FUNCTION_TYPE
20338 || TREE_CODE (type) == METHOD_TYPE))
20339 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20340 NULL_TREE,
20341 TYPE_ATTRIBUTES (type));
16d6f994
EC
20342
20343#if TARGET_MACHO
20344 darwin_set_default_type_attributes (type);
20345#endif
a5c76ee6
ZW
20346}
20347
3cb999d8
DE
20348/* Return a reference suitable for calling a function with the
20349 longcall attribute. */
a4f6c312 20350
9390387d 20351rtx
a2369ed3 20352rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20353{
d330fd93 20354 const char *call_name;
6a4cee5f
MM
20355 tree node;
20356
20357 if (GET_CODE (call_ref) != SYMBOL_REF)
20358 return call_ref;
20359
20360 /* System V adds '.' to the internal name, so skip them. */
20361 call_name = XSTR (call_ref, 0);
20362 if (*call_name == '.')
20363 {
20364 while (*call_name == '.')
20365 call_name++;
20366
20367 node = get_identifier (call_name);
39403d82 20368 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20369 }
20370
20371 return force_reg (Pmode, call_ref);
20372}
7509c759 20373\f
77ccdfed
EC
20374#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20375#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20376#endif
20377
20378/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20379 struct attribute_spec.handler. */
20380static tree
20381rs6000_handle_struct_attribute (tree *node, tree name,
20382 tree args ATTRIBUTE_UNUSED,
20383 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20384{
20385 tree *type = NULL;
20386 if (DECL_P (*node))
20387 {
20388 if (TREE_CODE (*node) == TYPE_DECL)
20389 type = &TREE_TYPE (*node);
20390 }
20391 else
20392 type = node;
20393
20394 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20395 || TREE_CODE (*type) == UNION_TYPE)))
20396 {
20397 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
20398 *no_add_attrs = true;
20399 }
20400
20401 else if ((is_attribute_p ("ms_struct", name)
20402 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20403 || ((is_attribute_p ("gcc_struct", name)
20404 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20405 {
20406 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
20407 IDENTIFIER_POINTER (name));
20408 *no_add_attrs = true;
20409 }
20410
20411 return NULL_TREE;
20412}
20413
20414static bool
3101faab 20415rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20416{
20417 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20418 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20419 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20420}
20421\f
b64a1b53
RH
20422#ifdef USING_ELFOS_H
20423
d6b5193b 20424/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20425
d6b5193b
RS
20426static void
20427rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20428{
20429 if (DEFAULT_ABI == ABI_AIX
20430 && TARGET_MINIMAL_TOC
20431 && !TARGET_RELOCATABLE)
20432 {
20433 if (!toc_initialized)
20434 {
20435 toc_initialized = 1;
20436 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20437 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20438 fprintf (asm_out_file, "\t.tc ");
20439 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20440 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20441 fprintf (asm_out_file, "\n");
20442
20443 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20444 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20445 fprintf (asm_out_file, " = .+32768\n");
20446 }
20447 else
20448 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20449 }
20450 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20451 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20452 else
20453 {
20454 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20455 if (!toc_initialized)
20456 {
20457 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20458 fprintf (asm_out_file, " = .+32768\n");
20459 toc_initialized = 1;
20460 }
20461 }
20462}
20463
20464/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20465
b64a1b53 20466static void
d6b5193b
RS
20467rs6000_elf_asm_init_sections (void)
20468{
20469 toc_section
20470 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20471
20472 sdata2_section
20473 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20474 SDATA2_SECTION_ASM_OP);
20475}
20476
20477/* Implement TARGET_SELECT_RTX_SECTION. */
20478
20479static section *
f676971a 20480rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20481 unsigned HOST_WIDE_INT align)
7509c759 20482{
a9098fd0 20483 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20484 return toc_section;
7509c759 20485 else
d6b5193b 20486 return default_elf_select_rtx_section (mode, x, align);
7509c759 20487}
d9407988 20488\f
d1908feb
JJ
20489/* For a SYMBOL_REF, set generic flags and then perform some
20490 target-specific processing.
20491
d1908feb
JJ
20492 When the AIX ABI is requested on a non-AIX system, replace the
20493 function name with the real name (with a leading .) rather than the
20494 function descriptor name. This saves a lot of overriding code to
20495 read the prefixes. */
d9407988 20496
fb49053f 20497static void
a2369ed3 20498rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20499{
d1908feb 20500 default_encode_section_info (decl, rtl, first);
b2003250 20501
d1908feb
JJ
20502 if (first
20503 && TREE_CODE (decl) == FUNCTION_DECL
20504 && !TARGET_AIX
20505 && DEFAULT_ABI == ABI_AIX)
d9407988 20506 {
c6a2438a 20507 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20508 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20509 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20510 str[0] = '.';
20511 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20512 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20513 }
d9407988
MM
20514}
20515
21d9bb3f
PB
20516static inline bool
20517compare_section_name (const char *section, const char *template)
20518{
20519 int len;
20520
20521 len = strlen (template);
20522 return (strncmp (section, template, len) == 0
20523 && (section[len] == 0 || section[len] == '.'));
20524}
20525
c1b7d95a 20526bool
3101faab 20527rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20528{
20529 if (rs6000_sdata == SDATA_NONE)
20530 return false;
20531
7482ad25
AF
20532 /* We want to merge strings, so we never consider them small data. */
20533 if (TREE_CODE (decl) == STRING_CST)
20534 return false;
20535
20536 /* Functions are never in the small data area. */
20537 if (TREE_CODE (decl) == FUNCTION_DECL)
20538 return false;
20539
0e5dbd9b
DE
20540 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20541 {
20542 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20543 if (compare_section_name (section, ".sdata")
20544 || compare_section_name (section, ".sdata2")
20545 || compare_section_name (section, ".gnu.linkonce.s")
20546 || compare_section_name (section, ".sbss")
20547 || compare_section_name (section, ".sbss2")
20548 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20549 || strcmp (section, ".PPC.EMB.sdata0") == 0
20550 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20551 return true;
20552 }
20553 else
20554 {
20555 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20556
20557 if (size > 0
307b599c 20558 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20559 /* If it's not public, and we're not going to reference it there,
20560 there's no need to put it in the small data section. */
0e5dbd9b
DE
20561 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20562 return true;
20563 }
20564
20565 return false;
20566}
20567
b91da81f 20568#endif /* USING_ELFOS_H */
aacd3885
RS
20569\f
20570/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20571
aacd3885 20572static bool
3101faab 20573rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20574{
20575 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20576}
a6c2a102 20577\f
000034eb 20578/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20579 ADDR can be effectively incremented by incrementing REG.
20580
20581 r0 is special and we must not select it as an address
20582 register by this routine since our caller will try to
20583 increment the returned register via an "la" instruction. */
000034eb 20584
9390387d 20585rtx
a2369ed3 20586find_addr_reg (rtx addr)
000034eb
DE
20587{
20588 while (GET_CODE (addr) == PLUS)
20589 {
02441cd6
JL
20590 if (GET_CODE (XEXP (addr, 0)) == REG
20591 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20592 addr = XEXP (addr, 0);
02441cd6
JL
20593 else if (GET_CODE (XEXP (addr, 1)) == REG
20594 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20595 addr = XEXP (addr, 1);
20596 else if (CONSTANT_P (XEXP (addr, 0)))
20597 addr = XEXP (addr, 1);
20598 else if (CONSTANT_P (XEXP (addr, 1)))
20599 addr = XEXP (addr, 0);
20600 else
37409796 20601 gcc_unreachable ();
000034eb 20602 }
37409796
NS
20603 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20604 return addr;
000034eb
DE
20605}
20606
a6c2a102 20607void
a2369ed3 20608rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20609{
20610 fatal_insn ("bad address", op);
20611}
c8023011 20612
ee890fe2
SS
20613#if TARGET_MACHO
20614
efdba735 20615static tree branch_island_list = 0;
ee890fe2 20616
efdba735
SH
20617/* Remember to generate a branch island for far calls to the given
20618 function. */
ee890fe2 20619
f676971a 20620static void
c4ad648e
AM
20621add_compiler_branch_island (tree label_name, tree function_name,
20622 int line_number)
ee890fe2 20623{
efdba735 20624 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20625 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20626 TREE_CHAIN (branch_island) = branch_island_list;
20627 branch_island_list = branch_island;
ee890fe2
SS
20628}
20629
efdba735
SH
20630#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20631#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20632#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20633 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20634
efdba735
SH
20635/* Generate far-jump branch islands for everything on the
20636 branch_island_list. Invoked immediately after the last instruction
20637 of the epilogue has been emitted; the branch-islands must be
20638 appended to, and contiguous with, the function body. Mach-O stubs
20639 are generated in machopic_output_stub(). */
ee890fe2 20640
efdba735
SH
20641static void
20642macho_branch_islands (void)
20643{
20644 char tmp_buf[512];
20645 tree branch_island;
20646
20647 for (branch_island = branch_island_list;
20648 branch_island;
20649 branch_island = TREE_CHAIN (branch_island))
20650 {
20651 const char *label =
20652 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20653 const char *name =
11abc112 20654 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20655 char name_buf[512];
20656 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20657 if (name[0] == '*' || name[0] == '&')
20658 strcpy (name_buf, name+1);
20659 else
20660 {
20661 name_buf[0] = '_';
20662 strcpy (name_buf+1, name);
20663 }
20664 strcpy (tmp_buf, "\n");
20665 strcat (tmp_buf, label);
ee890fe2 20666#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20667 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20668 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20669#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20670 if (flag_pic)
20671 {
20672 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20673 strcat (tmp_buf, label);
20674 strcat (tmp_buf, "_pic\n");
20675 strcat (tmp_buf, label);
20676 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20677
efdba735
SH
20678 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20679 strcat (tmp_buf, name_buf);
20680 strcat (tmp_buf, " - ");
20681 strcat (tmp_buf, label);
20682 strcat (tmp_buf, "_pic)\n");
f676971a 20683
efdba735 20684 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20685
efdba735
SH
20686 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20687 strcat (tmp_buf, name_buf);
20688 strcat (tmp_buf, " - ");
20689 strcat (tmp_buf, label);
20690 strcat (tmp_buf, "_pic)\n");
f676971a 20691
efdba735
SH
20692 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20693 }
20694 else
20695 {
20696 strcat (tmp_buf, ":\nlis r12,hi16(");
20697 strcat (tmp_buf, name_buf);
20698 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
20699 strcat (tmp_buf, name_buf);
20700 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
20701 }
20702 output_asm_insn (tmp_buf, 0);
ee890fe2 20703#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20704 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20705 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20706#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 20707 }
ee890fe2 20708
efdba735 20709 branch_island_list = 0;
ee890fe2
SS
20710}
20711
20712/* NO_PREVIOUS_DEF checks in the link list whether the function name is
20713 already there or not. */
20714
efdba735 20715static int
a2369ed3 20716no_previous_def (tree function_name)
ee890fe2 20717{
efdba735
SH
20718 tree branch_island;
20719 for (branch_island = branch_island_list;
20720 branch_island;
20721 branch_island = TREE_CHAIN (branch_island))
20722 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20723 return 0;
20724 return 1;
20725}
20726
20727/* GET_PREV_LABEL gets the label name from the previous definition of
20728 the function. */
20729
efdba735 20730static tree
a2369ed3 20731get_prev_label (tree function_name)
ee890fe2 20732{
efdba735
SH
20733 tree branch_island;
20734 for (branch_island = branch_island_list;
20735 branch_island;
20736 branch_island = TREE_CHAIN (branch_island))
20737 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20738 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20739 return 0;
20740}
20741
75b1b789
MS
20742#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20743#define DARWIN_LINKER_GENERATES_ISLANDS 0
20744#endif
20745
20746/* KEXTs still need branch islands. */
20747#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20748 || flag_mkernel || flag_apple_kext)
20749
ee890fe2 20750/* INSN is either a function call or a millicode call. It may have an
f676971a 20751 unconditional jump in its delay slot.
ee890fe2
SS
20752
20753 CALL_DEST is the routine we are calling. */
20754
20755char *
c4ad648e
AM
20756output_call (rtx insn, rtx *operands, int dest_operand_number,
20757 int cookie_operand_number)
ee890fe2
SS
20758{
20759 static char buf[256];
75b1b789
MS
20760 if (DARWIN_GENERATE_ISLANDS
20761 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20762 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20763 {
20764 tree labelname;
efdba735 20765 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20766
ee890fe2
SS
20767 if (no_previous_def (funname))
20768 {
ee890fe2
SS
20769 rtx label_rtx = gen_label_rtx ();
20770 char *label_buf, temp_buf[256];
20771 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20772 CODE_LABEL_NUMBER (label_rtx));
20773 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20774 labelname = get_identifier (label_buf);
a38e7aa5 20775 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20776 }
20777 else
20778 labelname = get_prev_label (funname);
20779
efdba735
SH
20780 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20781 instruction will reach 'foo', otherwise link as 'bl L42'".
20782 "L42" should be a 'branch island', that will do a far jump to
20783 'foo'. Branch islands are generated in
20784 macho_branch_islands(). */
ee890fe2 20785 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20786 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20787 }
20788 else
efdba735
SH
20789 sprintf (buf, "bl %%z%d", dest_operand_number);
20790 return buf;
ee890fe2
SS
20791}
20792
ee890fe2
SS
20793/* Generate PIC and indirect symbol stubs. */
20794
20795void
a2369ed3 20796machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20797{
20798 unsigned int length;
a4f6c312
SS
20799 char *symbol_name, *lazy_ptr_name;
20800 char *local_label_0;
ee890fe2
SS
20801 static int label = 0;
20802
df56a27f 20803 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20804 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20805
ee890fe2 20806
ee890fe2 20807 length = strlen (symb);
5ead67f6 20808 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
20809 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20810
5ead67f6 20811 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
20812 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20813
ee890fe2 20814 if (flag_pic == 2)
56c779bc 20815 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20816 else
56c779bc 20817 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20818
20819 if (flag_pic == 2)
20820 {
d974312d
DJ
20821 fprintf (file, "\t.align 5\n");
20822
20823 fprintf (file, "%s:\n", stub);
20824 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20825
876455fa 20826 label++;
5ead67f6 20827 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 20828 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20829
ee890fe2
SS
20830 fprintf (file, "\tmflr r0\n");
20831 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20832 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20833 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20834 lazy_ptr_name, local_label_0);
20835 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20836 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20837 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20838 lazy_ptr_name, local_label_0);
20839 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20840 fprintf (file, "\tbctr\n");
20841 }
20842 else
d974312d
DJ
20843 {
20844 fprintf (file, "\t.align 4\n");
20845
20846 fprintf (file, "%s:\n", stub);
20847 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20848
20849 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
20850 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
20851 (TARGET_64BIT ? "ldu" : "lwzu"),
20852 lazy_ptr_name);
d974312d
DJ
20853 fprintf (file, "\tmtctr r12\n");
20854 fprintf (file, "\tbctr\n");
20855 }
f676971a 20856
56c779bc 20857 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
20858 fprintf (file, "%s:\n", lazy_ptr_name);
20859 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
20860 fprintf (file, "%sdyld_stub_binding_helper\n",
20861 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
20862}
20863
20864/* Legitimize PIC addresses. If the address is already
20865 position-independent, we return ORIG. Newly generated
20866 position-independent addresses go into a reg. This is REG if non
20867 zero, otherwise we allocate register(s) as necessary. */
20868
4fbbe694 20869#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
20870
20871rtx
f676971a 20872rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 20873 rtx reg)
ee890fe2
SS
20874{
20875 rtx base, offset;
20876
20877 if (reg == NULL && ! reload_in_progress && ! reload_completed)
20878 reg = gen_reg_rtx (Pmode);
20879
20880 if (GET_CODE (orig) == CONST)
20881 {
37409796
NS
20882 rtx reg_temp;
20883
ee890fe2
SS
20884 if (GET_CODE (XEXP (orig, 0)) == PLUS
20885 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
20886 return orig;
20887
37409796 20888 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 20889
37409796
NS
20890 /* Use a different reg for the intermediate value, as
20891 it will be marked UNCHANGING. */
b3a13419 20892 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
20893 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
20894 Pmode, reg_temp);
20895 offset =
20896 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
20897 Pmode, reg);
bb8df8a6 20898
ee890fe2
SS
20899 if (GET_CODE (offset) == CONST_INT)
20900 {
20901 if (SMALL_INT (offset))
ed8908e7 20902 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
20903 else if (! reload_in_progress && ! reload_completed)
20904 offset = force_reg (Pmode, offset);
20905 else
c859cda6
DJ
20906 {
20907 rtx mem = force_const_mem (Pmode, orig);
20908 return machopic_legitimize_pic_address (mem, Pmode, reg);
20909 }
ee890fe2 20910 }
f1c25d3b 20911 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
20912 }
20913
20914 /* Fall back on generic machopic code. */
20915 return machopic_legitimize_pic_address (orig, mode, reg);
20916}
20917
c4e18b1c
GK
20918/* Output a .machine directive for the Darwin assembler, and call
20919 the generic start_file routine. */
20920
20921static void
20922rs6000_darwin_file_start (void)
20923{
94ff898d 20924 static const struct
c4e18b1c
GK
20925 {
20926 const char *arg;
20927 const char *name;
20928 int if_set;
20929 } mapping[] = {
55dbfb48 20930 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
20931 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
20932 { "power4", "ppc970", 0 },
20933 { "G5", "ppc970", 0 },
20934 { "7450", "ppc7450", 0 },
20935 { "7400", "ppc7400", MASK_ALTIVEC },
20936 { "G4", "ppc7400", 0 },
20937 { "750", "ppc750", 0 },
20938 { "740", "ppc750", 0 },
20939 { "G3", "ppc750", 0 },
20940 { "604e", "ppc604e", 0 },
20941 { "604", "ppc604", 0 },
20942 { "603e", "ppc603", 0 },
20943 { "603", "ppc603", 0 },
20944 { "601", "ppc601", 0 },
20945 { NULL, "ppc", 0 } };
20946 const char *cpu_id = "";
20947 size_t i;
94ff898d 20948
9390387d 20949 rs6000_file_start ();
192d0f89 20950 darwin_file_start ();
c4e18b1c
GK
20951
20952 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
20953 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
20954 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
20955 && rs6000_select[i].string[0] != '\0')
20956 cpu_id = rs6000_select[i].string;
20957
20958 /* Look through the mapping array. Pick the first name that either
20959 matches the argument, has a bit set in IF_SET that is also set
20960 in the target flags, or has a NULL name. */
20961
20962 i = 0;
20963 while (mapping[i].arg != NULL
20964 && strcmp (mapping[i].arg, cpu_id) != 0
20965 && (mapping[i].if_set & target_flags) == 0)
20966 i++;
20967
20968 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
20969}
20970
ee890fe2 20971#endif /* TARGET_MACHO */
7c262518
RH
20972
20973#if TARGET_ELF
9b580a0b
RH
20974static int
20975rs6000_elf_reloc_rw_mask (void)
7c262518 20976{
9b580a0b
RH
20977 if (flag_pic)
20978 return 3;
20979 else if (DEFAULT_ABI == ABI_AIX)
20980 return 2;
20981 else
20982 return 0;
7c262518 20983}
d9f6800d
RH
20984
20985/* Record an element in the table of global constructors. SYMBOL is
20986 a SYMBOL_REF of the function to be called; PRIORITY is a number
20987 between 0 and MAX_INIT_PRIORITY.
20988
20989 This differs from default_named_section_asm_out_constructor in
20990 that we have special handling for -mrelocatable. */
20991
20992static void
a2369ed3 20993rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
20994{
20995 const char *section = ".ctors";
20996 char buf[16];
20997
20998 if (priority != DEFAULT_INIT_PRIORITY)
20999 {
21000 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21001 /* Invert the numbering so the linker puts us in the proper
21002 order; constructors are run from right to left, and the
21003 linker sorts in increasing order. */
21004 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21005 section = buf;
21006 }
21007
d6b5193b 21008 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21009 assemble_align (POINTER_SIZE);
d9f6800d
RH
21010
21011 if (TARGET_RELOCATABLE)
21012 {
21013 fputs ("\t.long (", asm_out_file);
21014 output_addr_const (asm_out_file, symbol);
21015 fputs (")@fixup\n", asm_out_file);
21016 }
21017 else
c8af3574 21018 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21019}
21020
21021static void
a2369ed3 21022rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21023{
21024 const char *section = ".dtors";
21025 char buf[16];
21026
21027 if (priority != DEFAULT_INIT_PRIORITY)
21028 {
21029 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21030 /* Invert the numbering so the linker puts us in the proper
21031 order; constructors are run from right to left, and the
21032 linker sorts in increasing order. */
21033 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21034 section = buf;
21035 }
21036
d6b5193b 21037 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21038 assemble_align (POINTER_SIZE);
d9f6800d
RH
21039
21040 if (TARGET_RELOCATABLE)
21041 {
21042 fputs ("\t.long (", asm_out_file);
21043 output_addr_const (asm_out_file, symbol);
21044 fputs (")@fixup\n", asm_out_file);
21045 }
21046 else
c8af3574 21047 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21048}
9739c90c
JJ
21049
21050void
a2369ed3 21051rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21052{
21053 if (TARGET_64BIT)
21054 {
21055 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21056 ASM_OUTPUT_LABEL (file, name);
21057 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21058 rs6000_output_function_entry (file, name);
21059 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21060 if (DOT_SYMBOLS)
9739c90c 21061 {
85b776df 21062 fputs ("\t.size\t", file);
9739c90c 21063 assemble_name (file, name);
85b776df
AM
21064 fputs (",24\n\t.type\t.", file);
21065 assemble_name (file, name);
21066 fputs (",@function\n", file);
21067 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21068 {
21069 fputs ("\t.globl\t.", file);
21070 assemble_name (file, name);
21071 putc ('\n', file);
21072 }
9739c90c 21073 }
85b776df
AM
21074 else
21075 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21076 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21077 rs6000_output_function_entry (file, name);
21078 fputs (":\n", file);
9739c90c
JJ
21079 return;
21080 }
21081
21082 if (TARGET_RELOCATABLE
7f970b70 21083 && !TARGET_SECURE_PLT
e3b5732b 21084 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21085 && uses_TOC ())
9739c90c
JJ
21086 {
21087 char buf[256];
21088
21089 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21090
21091 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21092 fprintf (file, "\t.long ");
21093 assemble_name (file, buf);
21094 putc ('-', file);
21095 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21096 assemble_name (file, buf);
21097 putc ('\n', file);
21098 }
21099
21100 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21101 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21102
21103 if (DEFAULT_ABI == ABI_AIX)
21104 {
21105 const char *desc_name, *orig_name;
21106
21107 orig_name = (*targetm.strip_name_encoding) (name);
21108 desc_name = orig_name;
21109 while (*desc_name == '.')
21110 desc_name++;
21111
21112 if (TREE_PUBLIC (decl))
21113 fprintf (file, "\t.globl %s\n", desc_name);
21114
21115 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21116 fprintf (file, "%s:\n", desc_name);
21117 fprintf (file, "\t.long %s\n", orig_name);
21118 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21119 if (DEFAULT_ABI == ABI_AIX)
21120 fputs ("\t.long 0\n", file);
21121 fprintf (file, "\t.previous\n");
21122 }
21123 ASM_OUTPUT_LABEL (file, name);
21124}
1334b570
AM
21125
21126static void
21127rs6000_elf_end_indicate_exec_stack (void)
21128{
21129 if (TARGET_32BIT)
21130 file_end_indicate_exec_stack ();
21131}
7c262518
RH
21132#endif
21133
cbaaba19 21134#if TARGET_XCOFF
0d5817b2
DE
21135static void
21136rs6000_xcoff_asm_output_anchor (rtx symbol)
21137{
21138 char buffer[100];
21139
21140 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21141 SYMBOL_REF_BLOCK_OFFSET (symbol));
21142 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21143}
21144
7c262518 21145static void
a2369ed3 21146rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21147{
21148 fputs (GLOBAL_ASM_OP, stream);
21149 RS6000_OUTPUT_BASENAME (stream, name);
21150 putc ('\n', stream);
21151}
21152
d6b5193b
RS
21153/* A get_unnamed_decl callback, used for read-only sections. PTR
21154 points to the section string variable. */
21155
21156static void
21157rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21158{
890f9edf
OH
21159 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21160 *(const char *const *) directive,
21161 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21162}
21163
21164/* Likewise for read-write sections. */
21165
21166static void
21167rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21168{
890f9edf
OH
21169 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21170 *(const char *const *) directive,
21171 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21172}
21173
21174/* A get_unnamed_section callback, used for switching to toc_section. */
21175
21176static void
21177rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21178{
21179 if (TARGET_MINIMAL_TOC)
21180 {
21181 /* toc_section is always selected at least once from
21182 rs6000_xcoff_file_start, so this is guaranteed to
21183 always be defined once and only once in each file. */
21184 if (!toc_initialized)
21185 {
21186 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21187 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21188 toc_initialized = 1;
21189 }
21190 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21191 (TARGET_32BIT ? "" : ",3"));
21192 }
21193 else
21194 fputs ("\t.toc\n", asm_out_file);
21195}
21196
21197/* Implement TARGET_ASM_INIT_SECTIONS. */
21198
21199static void
21200rs6000_xcoff_asm_init_sections (void)
21201{
21202 read_only_data_section
21203 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21204 &xcoff_read_only_section_name);
21205
21206 private_data_section
21207 = get_unnamed_section (SECTION_WRITE,
21208 rs6000_xcoff_output_readwrite_section_asm_op,
21209 &xcoff_private_data_section_name);
21210
21211 read_only_private_data_section
21212 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21213 &xcoff_private_data_section_name);
21214
21215 toc_section
21216 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21217
21218 readonly_data_section = read_only_data_section;
21219 exception_section = data_section;
21220}
21221
9b580a0b
RH
21222static int
21223rs6000_xcoff_reloc_rw_mask (void)
21224{
21225 return 3;
21226}
21227
b275d088 21228static void
c18a5b6c
MM
21229rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21230 tree decl ATTRIBUTE_UNUSED)
7c262518 21231{
0e5dbd9b
DE
21232 int smclass;
21233 static const char * const suffix[3] = { "PR", "RO", "RW" };
21234
21235 if (flags & SECTION_CODE)
21236 smclass = 0;
21237 else if (flags & SECTION_WRITE)
21238 smclass = 2;
21239 else
21240 smclass = 1;
21241
5b5198f7 21242 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21243 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21244 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21245}
ae46c4e0 21246
d6b5193b 21247static section *
f676971a 21248rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21249 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21250{
9b580a0b 21251 if (decl_readonly_section (decl, reloc))
ae46c4e0 21252 {
0e5dbd9b 21253 if (TREE_PUBLIC (decl))
d6b5193b 21254 return read_only_data_section;
ae46c4e0 21255 else
d6b5193b 21256 return read_only_private_data_section;
ae46c4e0
RH
21257 }
21258 else
21259 {
0e5dbd9b 21260 if (TREE_PUBLIC (decl))
d6b5193b 21261 return data_section;
ae46c4e0 21262 else
d6b5193b 21263 return private_data_section;
ae46c4e0
RH
21264 }
21265}
21266
21267static void
a2369ed3 21268rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21269{
21270 const char *name;
ae46c4e0 21271
5b5198f7
DE
21272 /* Use select_section for private and uninitialized data. */
21273 if (!TREE_PUBLIC (decl)
21274 || DECL_COMMON (decl)
0e5dbd9b
DE
21275 || DECL_INITIAL (decl) == NULL_TREE
21276 || DECL_INITIAL (decl) == error_mark_node
21277 || (flag_zero_initialized_in_bss
21278 && initializer_zerop (DECL_INITIAL (decl))))
21279 return;
21280
21281 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21282 name = (*targetm.strip_name_encoding) (name);
21283 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21284}
b64a1b53 21285
fb49053f
RH
21286/* Select section for constant in constant pool.
21287
21288 On RS/6000, all constants are in the private read-only data area.
21289 However, if this is being placed in the TOC it must be output as a
21290 toc entry. */
21291
d6b5193b 21292static section *
f676971a 21293rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21294 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21295{
21296 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21297 return toc_section;
b64a1b53 21298 else
d6b5193b 21299 return read_only_private_data_section;
b64a1b53 21300}
772c5265
RH
21301
21302/* Remove any trailing [DS] or the like from the symbol name. */
21303
21304static const char *
a2369ed3 21305rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21306{
21307 size_t len;
21308 if (*name == '*')
21309 name++;
21310 len = strlen (name);
21311 if (name[len - 1] == ']')
21312 return ggc_alloc_string (name, len - 4);
21313 else
21314 return name;
21315}
21316
5add3202
DE
21317/* Section attributes. AIX is always PIC. */
21318
21319static unsigned int
a2369ed3 21320rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21321{
5b5198f7 21322 unsigned int align;
9b580a0b 21323 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21324
21325 /* Align to at least UNIT size. */
21326 if (flags & SECTION_CODE)
21327 align = MIN_UNITS_PER_WORD;
21328 else
21329 /* Increase alignment of large objects if not already stricter. */
21330 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21331 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21332 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21333
21334 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21335}
a5fe455b 21336
1bc7c5b6
ZW
21337/* Output at beginning of assembler file.
21338
21339 Initialize the section names for the RS/6000 at this point.
21340
21341 Specify filename, including full path, to assembler.
21342
21343 We want to go into the TOC section so at least one .toc will be emitted.
21344 Also, in order to output proper .bs/.es pairs, we need at least one static
21345 [RW] section emitted.
21346
21347 Finally, declare mcount when profiling to make the assembler happy. */
21348
21349static void
863d938c 21350rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21351{
21352 rs6000_gen_section_name (&xcoff_bss_section_name,
21353 main_input_filename, ".bss_");
21354 rs6000_gen_section_name (&xcoff_private_data_section_name,
21355 main_input_filename, ".rw_");
21356 rs6000_gen_section_name (&xcoff_read_only_section_name,
21357 main_input_filename, ".ro_");
21358
21359 fputs ("\t.file\t", asm_out_file);
21360 output_quoted_string (asm_out_file, main_input_filename);
21361 fputc ('\n', asm_out_file);
1bc7c5b6 21362 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21363 switch_to_section (private_data_section);
21364 switch_to_section (text_section);
1bc7c5b6
ZW
21365 if (profile_flag)
21366 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21367 rs6000_file_start ();
21368}
21369
a5fe455b
ZW
21370/* Output at end of assembler file.
21371 On the RS/6000, referencing data should automatically pull in text. */
21372
21373static void
863d938c 21374rs6000_xcoff_file_end (void)
a5fe455b 21375{
d6b5193b 21376 switch_to_section (text_section);
a5fe455b 21377 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21378 switch_to_section (data_section);
a5fe455b
ZW
21379 fputs (TARGET_32BIT
21380 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21381 asm_out_file);
21382}
f1384257 21383#endif /* TARGET_XCOFF */
0e5dbd9b 21384
3c50106f
RH
21385/* Compute a (partial) cost for rtx X. Return true if the complete
21386 cost has been computed, and false if subexpressions should be
21387 scanned. In either case, *TOTAL contains the cost result. */
21388
21389static bool
1494c534 21390rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 21391{
f0517163
RS
21392 enum machine_mode mode = GET_MODE (x);
21393
3c50106f
RH
21394 switch (code)
21395 {
30a555d9 21396 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21397 case CONST_INT:
066cd967
DE
21398 if (((outer_code == SET
21399 || outer_code == PLUS
21400 || outer_code == MINUS)
279bb624
DE
21401 && (satisfies_constraint_I (x)
21402 || satisfies_constraint_L (x)))
066cd967 21403 || (outer_code == AND
279bb624
DE
21404 && (satisfies_constraint_K (x)
21405 || (mode == SImode
21406 ? satisfies_constraint_L (x)
21407 : satisfies_constraint_J (x))
1990cd79
AM
21408 || mask_operand (x, mode)
21409 || (mode == DImode
21410 && mask64_operand (x, DImode))))
22e54023 21411 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21412 && (satisfies_constraint_K (x)
21413 || (mode == SImode
21414 ? satisfies_constraint_L (x)
21415 : satisfies_constraint_J (x))))
066cd967
DE
21416 || outer_code == ASHIFT
21417 || outer_code == ASHIFTRT
21418 || outer_code == LSHIFTRT
21419 || outer_code == ROTATE
21420 || outer_code == ROTATERT
d5861a7a 21421 || outer_code == ZERO_EXTRACT
066cd967 21422 || (outer_code == MULT
279bb624 21423 && satisfies_constraint_I (x))
22e54023
DE
21424 || ((outer_code == DIV || outer_code == UDIV
21425 || outer_code == MOD || outer_code == UMOD)
21426 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21427 || (outer_code == COMPARE
279bb624
DE
21428 && (satisfies_constraint_I (x)
21429 || satisfies_constraint_K (x)))
22e54023 21430 || (outer_code == EQ
279bb624
DE
21431 && (satisfies_constraint_I (x)
21432 || satisfies_constraint_K (x)
21433 || (mode == SImode
21434 ? satisfies_constraint_L (x)
21435 : satisfies_constraint_J (x))))
22e54023 21436 || (outer_code == GTU
279bb624 21437 && satisfies_constraint_I (x))
22e54023 21438 || (outer_code == LTU
279bb624 21439 && satisfies_constraint_P (x)))
066cd967
DE
21440 {
21441 *total = 0;
21442 return true;
21443 }
21444 else if ((outer_code == PLUS
4ae234b0 21445 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21446 || (outer_code == MINUS
4ae234b0 21447 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21448 || ((outer_code == SET
21449 || outer_code == IOR
21450 || outer_code == XOR)
21451 && (INTVAL (x)
21452 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21453 {
21454 *total = COSTS_N_INSNS (1);
21455 return true;
21456 }
21457 /* FALLTHRU */
21458
21459 case CONST_DOUBLE:
f6fe3a22 21460 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21461 {
f6fe3a22
DE
21462 if ((outer_code == IOR || outer_code == XOR)
21463 && CONST_DOUBLE_HIGH (x) == 0
21464 && (CONST_DOUBLE_LOW (x)
21465 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21466 {
21467 *total = 0;
21468 return true;
21469 }
21470 else if ((outer_code == AND && and64_2_operand (x, DImode))
21471 || ((outer_code == SET
21472 || outer_code == IOR
21473 || outer_code == XOR)
21474 && CONST_DOUBLE_HIGH (x) == 0))
21475 {
21476 *total = COSTS_N_INSNS (1);
21477 return true;
21478 }
066cd967
DE
21479 }
21480 /* FALLTHRU */
21481
3c50106f 21482 case CONST:
066cd967 21483 case HIGH:
3c50106f 21484 case SYMBOL_REF:
066cd967
DE
21485 case MEM:
21486 /* When optimizing for size, MEM should be slightly more expensive
21487 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21488 L1 cache latency is about two instructions. */
066cd967 21489 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21490 return true;
21491
30a555d9
DE
21492 case LABEL_REF:
21493 *total = 0;
21494 return true;
21495
3c50106f 21496 case PLUS:
f0517163 21497 if (mode == DFmode)
066cd967
DE
21498 {
21499 if (GET_CODE (XEXP (x, 0)) == MULT)
21500 {
21501 /* FNMA accounted in outer NEG. */
21502 if (outer_code == NEG)
21503 *total = rs6000_cost->dmul - rs6000_cost->fp;
21504 else
21505 *total = rs6000_cost->dmul;
21506 }
21507 else
21508 *total = rs6000_cost->fp;
21509 }
f0517163 21510 else if (mode == SFmode)
066cd967
DE
21511 {
21512 /* FNMA accounted in outer NEG. */
21513 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21514 *total = 0;
21515 else
21516 *total = rs6000_cost->fp;
21517 }
f0517163 21518 else
066cd967
DE
21519 *total = COSTS_N_INSNS (1);
21520 return false;
3c50106f 21521
52190329 21522 case MINUS:
f0517163 21523 if (mode == DFmode)
066cd967 21524 {
762c919f
JM
21525 if (GET_CODE (XEXP (x, 0)) == MULT
21526 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21527 {
21528 /* FNMA accounted in outer NEG. */
21529 if (outer_code == NEG)
762c919f 21530 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21531 else
21532 *total = rs6000_cost->dmul;
21533 }
21534 else
21535 *total = rs6000_cost->fp;
21536 }
f0517163 21537 else if (mode == SFmode)
066cd967
DE
21538 {
21539 /* FNMA accounted in outer NEG. */
21540 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21541 *total = 0;
21542 else
21543 *total = rs6000_cost->fp;
21544 }
f0517163 21545 else
c4ad648e 21546 *total = COSTS_N_INSNS (1);
066cd967 21547 return false;
3c50106f
RH
21548
21549 case MULT:
c9dbf840 21550 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21551 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21552 {
8b897cfa
RS
21553 if (INTVAL (XEXP (x, 1)) >= -256
21554 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21555 *total = rs6000_cost->mulsi_const9;
8b897cfa 21556 else
06a67bdd 21557 *total = rs6000_cost->mulsi_const;
3c50106f 21558 }
066cd967
DE
21559 /* FMA accounted in outer PLUS/MINUS. */
21560 else if ((mode == DFmode || mode == SFmode)
21561 && (outer_code == PLUS || outer_code == MINUS))
21562 *total = 0;
f0517163 21563 else if (mode == DFmode)
06a67bdd 21564 *total = rs6000_cost->dmul;
f0517163 21565 else if (mode == SFmode)
06a67bdd 21566 *total = rs6000_cost->fp;
f0517163 21567 else if (mode == DImode)
06a67bdd 21568 *total = rs6000_cost->muldi;
8b897cfa 21569 else
06a67bdd 21570 *total = rs6000_cost->mulsi;
066cd967 21571 return false;
3c50106f
RH
21572
21573 case DIV:
21574 case MOD:
f0517163
RS
21575 if (FLOAT_MODE_P (mode))
21576 {
06a67bdd
RS
21577 *total = mode == DFmode ? rs6000_cost->ddiv
21578 : rs6000_cost->sdiv;
066cd967 21579 return false;
f0517163 21580 }
5efb1046 21581 /* FALLTHRU */
3c50106f
RH
21582
21583 case UDIV:
21584 case UMOD:
627b6fe2
DJ
21585 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21586 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21587 {
21588 if (code == DIV || code == MOD)
21589 /* Shift, addze */
21590 *total = COSTS_N_INSNS (2);
21591 else
21592 /* Shift */
21593 *total = COSTS_N_INSNS (1);
21594 }
c4ad648e 21595 else
627b6fe2
DJ
21596 {
21597 if (GET_MODE (XEXP (x, 1)) == DImode)
21598 *total = rs6000_cost->divdi;
21599 else
21600 *total = rs6000_cost->divsi;
21601 }
21602 /* Add in shift and subtract for MOD. */
21603 if (code == MOD || code == UMOD)
21604 *total += COSTS_N_INSNS (2);
066cd967 21605 return false;
3c50106f 21606
32f56aad 21607 case CTZ:
3c50106f
RH
21608 case FFS:
21609 *total = COSTS_N_INSNS (4);
066cd967 21610 return false;
3c50106f 21611
32f56aad
DE
21612 case POPCOUNT:
21613 *total = COSTS_N_INSNS (6);
21614 return false;
21615
06a67bdd 21616 case NOT:
066cd967
DE
21617 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21618 {
21619 *total = 0;
21620 return false;
21621 }
21622 /* FALLTHRU */
21623
21624 case AND:
32f56aad 21625 case CLZ:
066cd967
DE
21626 case IOR:
21627 case XOR:
d5861a7a
DE
21628 case ZERO_EXTRACT:
21629 *total = COSTS_N_INSNS (1);
21630 return false;
21631
066cd967
DE
21632 case ASHIFT:
21633 case ASHIFTRT:
21634 case LSHIFTRT:
21635 case ROTATE:
21636 case ROTATERT:
d5861a7a 21637 /* Handle mul_highpart. */
066cd967
DE
21638 if (outer_code == TRUNCATE
21639 && GET_CODE (XEXP (x, 0)) == MULT)
21640 {
21641 if (mode == DImode)
21642 *total = rs6000_cost->muldi;
21643 else
21644 *total = rs6000_cost->mulsi;
21645 return true;
21646 }
d5861a7a
DE
21647 else if (outer_code == AND)
21648 *total = 0;
21649 else
21650 *total = COSTS_N_INSNS (1);
21651 return false;
21652
21653 case SIGN_EXTEND:
21654 case ZERO_EXTEND:
21655 if (GET_CODE (XEXP (x, 0)) == MEM)
21656 *total = 0;
21657 else
21658 *total = COSTS_N_INSNS (1);
066cd967 21659 return false;
06a67bdd 21660
066cd967
DE
21661 case COMPARE:
21662 case NEG:
21663 case ABS:
21664 if (!FLOAT_MODE_P (mode))
21665 {
21666 *total = COSTS_N_INSNS (1);
21667 return false;
21668 }
21669 /* FALLTHRU */
21670
21671 case FLOAT:
21672 case UNSIGNED_FLOAT:
21673 case FIX:
21674 case UNSIGNED_FIX:
06a67bdd
RS
21675 case FLOAT_TRUNCATE:
21676 *total = rs6000_cost->fp;
066cd967 21677 return false;
06a67bdd 21678
a2af5043
DJ
21679 case FLOAT_EXTEND:
21680 if (mode == DFmode)
21681 *total = 0;
21682 else
21683 *total = rs6000_cost->fp;
21684 return false;
21685
06a67bdd
RS
21686 case UNSPEC:
21687 switch (XINT (x, 1))
21688 {
21689 case UNSPEC_FRSP:
21690 *total = rs6000_cost->fp;
21691 return true;
21692
21693 default:
21694 break;
21695 }
21696 break;
21697
21698 case CALL:
21699 case IF_THEN_ELSE:
21700 if (optimize_size)
21701 {
21702 *total = COSTS_N_INSNS (1);
21703 return true;
21704 }
066cd967
DE
21705 else if (FLOAT_MODE_P (mode)
21706 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
21707 {
21708 *total = rs6000_cost->fp;
21709 return false;
21710 }
06a67bdd
RS
21711 break;
21712
c0600ecd
DE
21713 case EQ:
21714 case GTU:
21715 case LTU:
22e54023
DE
21716 /* Carry bit requires mode == Pmode.
21717 NEG or PLUS already counted so only add one. */
21718 if (mode == Pmode
21719 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21720 {
22e54023
DE
21721 *total = COSTS_N_INSNS (1);
21722 return true;
21723 }
21724 if (outer_code == SET)
21725 {
21726 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21727 {
22e54023 21728 *total = COSTS_N_INSNS (2);
c0600ecd 21729 return true;
c0600ecd 21730 }
22e54023
DE
21731 else if (mode == Pmode)
21732 {
21733 *total = COSTS_N_INSNS (3);
21734 return false;
21735 }
21736 }
21737 /* FALLTHRU */
21738
21739 case GT:
21740 case LT:
21741 case UNORDERED:
21742 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21743 {
21744 *total = COSTS_N_INSNS (2);
21745 return true;
c0600ecd 21746 }
22e54023
DE
21747 /* CC COMPARE. */
21748 if (outer_code == COMPARE)
21749 {
21750 *total = 0;
21751 return true;
21752 }
21753 break;
c0600ecd 21754
3c50106f 21755 default:
06a67bdd 21756 break;
3c50106f 21757 }
06a67bdd
RS
21758
21759 return false;
3c50106f
RH
21760}
21761
34bb030a
DE
21762/* A C expression returning the cost of moving data from a register of class
21763 CLASS1 to one of CLASS2. */
21764
21765int
f676971a 21766rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21767 enum reg_class from, enum reg_class to)
34bb030a
DE
21768{
21769 /* Moves from/to GENERAL_REGS. */
21770 if (reg_classes_intersect_p (to, GENERAL_REGS)
21771 || reg_classes_intersect_p (from, GENERAL_REGS))
21772 {
21773 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21774 from = to;
21775
21776 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21777 return (rs6000_memory_move_cost (mode, from, 0)
21778 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21779
c4ad648e
AM
21780 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21781 shift. */
34bb030a
DE
21782 else if (from == CR_REGS)
21783 return 4;
21784
aafc759a
PH
21785 /* Power6 has slower LR/CTR moves so make them more expensive than
21786 memory in order to bias spills to memory .*/
21787 else if (rs6000_cpu == PROCESSOR_POWER6
21788 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
21789 return 6 * hard_regno_nregs[0][mode];
21790
34bb030a 21791 else
c4ad648e 21792 /* A move will cost one instruction per GPR moved. */
c8b622ff 21793 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21794 }
21795
c4ad648e 21796 /* Moving between two similar registers is just one instruction. */
34bb030a 21797 else if (reg_classes_intersect_p (to, from))
7393f7f8 21798 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21799
c4ad648e 21800 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21801 else
f676971a 21802 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21803 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21804}
21805
21806/* A C expressions returning the cost of moving data of MODE from a register to
21807 or from memory. */
21808
21809int
f676971a 21810rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 21811 int in ATTRIBUTE_UNUSED)
34bb030a
DE
21812{
21813 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 21814 return 4 * hard_regno_nregs[0][mode];
34bb030a 21815 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 21816 return 4 * hard_regno_nregs[32][mode];
34bb030a 21817 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 21818 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
21819 else
21820 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
21821}
21822
9c78b944
DE
21823/* Returns a code for a target-specific builtin that implements
21824 reciprocal of the function, or NULL_TREE if not available. */
21825
21826static tree
21827rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21828 bool sqrt ATTRIBUTE_UNUSED)
21829{
21830 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21831 && flag_finite_math_only && !flag_trapping_math
21832 && flag_unsafe_math_optimizations))
21833 return NULL_TREE;
21834
21835 if (md_fn)
21836 return NULL_TREE;
21837 else
21838 switch (fn)
21839 {
21840 case BUILT_IN_SQRTF:
21841 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21842
21843 default:
21844 return NULL_TREE;
21845 }
21846}
21847
ef765ea9
DE
21848/* Newton-Raphson approximation of single-precision floating point divide n/d.
21849 Assumes no trapping math and finite arguments. */
21850
21851void
9c78b944 21852rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21853{
21854 rtx x0, e0, e1, y1, u0, v0, one;
21855
21856 x0 = gen_reg_rtx (SFmode);
21857 e0 = gen_reg_rtx (SFmode);
21858 e1 = gen_reg_rtx (SFmode);
21859 y1 = gen_reg_rtx (SFmode);
21860 u0 = gen_reg_rtx (SFmode);
21861 v0 = gen_reg_rtx (SFmode);
21862 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21863
21864 /* x0 = 1./d estimate */
21865 emit_insn (gen_rtx_SET (VOIDmode, x0,
21866 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
21867 UNSPEC_FRES)));
21868 /* e0 = 1. - d * x0 */
21869 emit_insn (gen_rtx_SET (VOIDmode, e0,
21870 gen_rtx_MINUS (SFmode, one,
21871 gen_rtx_MULT (SFmode, d, x0))));
21872 /* e1 = e0 + e0 * e0 */
21873 emit_insn (gen_rtx_SET (VOIDmode, e1,
21874 gen_rtx_PLUS (SFmode,
21875 gen_rtx_MULT (SFmode, e0, e0), e0)));
21876 /* y1 = x0 + e1 * x0 */
21877 emit_insn (gen_rtx_SET (VOIDmode, y1,
21878 gen_rtx_PLUS (SFmode,
21879 gen_rtx_MULT (SFmode, e1, x0), x0)));
21880 /* u0 = n * y1 */
21881 emit_insn (gen_rtx_SET (VOIDmode, u0,
21882 gen_rtx_MULT (SFmode, n, y1)));
21883 /* v0 = n - d * u0 */
21884 emit_insn (gen_rtx_SET (VOIDmode, v0,
21885 gen_rtx_MINUS (SFmode, n,
21886 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
21887 /* dst = u0 + v0 * y1 */
21888 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21889 gen_rtx_PLUS (SFmode,
21890 gen_rtx_MULT (SFmode, v0, y1), u0)));
21891}
21892
21893/* Newton-Raphson approximation of double-precision floating point divide n/d.
21894 Assumes no trapping math and finite arguments. */
21895
21896void
9c78b944 21897rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
21898{
21899 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
21900
21901 x0 = gen_reg_rtx (DFmode);
21902 e0 = gen_reg_rtx (DFmode);
21903 e1 = gen_reg_rtx (DFmode);
21904 e2 = gen_reg_rtx (DFmode);
21905 y1 = gen_reg_rtx (DFmode);
21906 y2 = gen_reg_rtx (DFmode);
21907 y3 = gen_reg_rtx (DFmode);
21908 u0 = gen_reg_rtx (DFmode);
21909 v0 = gen_reg_rtx (DFmode);
21910 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
21911
21912 /* x0 = 1./d estimate */
21913 emit_insn (gen_rtx_SET (VOIDmode, x0,
21914 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
21915 UNSPEC_FRES)));
21916 /* e0 = 1. - d * x0 */
21917 emit_insn (gen_rtx_SET (VOIDmode, e0,
21918 gen_rtx_MINUS (DFmode, one,
21919 gen_rtx_MULT (SFmode, d, x0))));
21920 /* y1 = x0 + e0 * x0 */
21921 emit_insn (gen_rtx_SET (VOIDmode, y1,
21922 gen_rtx_PLUS (DFmode,
21923 gen_rtx_MULT (DFmode, e0, x0), x0)));
21924 /* e1 = e0 * e0 */
21925 emit_insn (gen_rtx_SET (VOIDmode, e1,
21926 gen_rtx_MULT (DFmode, e0, e0)));
21927 /* y2 = y1 + e1 * y1 */
21928 emit_insn (gen_rtx_SET (VOIDmode, y2,
21929 gen_rtx_PLUS (DFmode,
21930 gen_rtx_MULT (DFmode, e1, y1), y1)));
21931 /* e2 = e1 * e1 */
21932 emit_insn (gen_rtx_SET (VOIDmode, e2,
21933 gen_rtx_MULT (DFmode, e1, e1)));
21934 /* y3 = y2 + e2 * y2 */
21935 emit_insn (gen_rtx_SET (VOIDmode, y3,
21936 gen_rtx_PLUS (DFmode,
21937 gen_rtx_MULT (DFmode, e2, y2), y2)));
21938 /* u0 = n * y3 */
21939 emit_insn (gen_rtx_SET (VOIDmode, u0,
21940 gen_rtx_MULT (DFmode, n, y3)));
21941 /* v0 = n - d * u0 */
21942 emit_insn (gen_rtx_SET (VOIDmode, v0,
21943 gen_rtx_MINUS (DFmode, n,
21944 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
21945 /* dst = u0 + v0 * y3 */
21946 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
21947 gen_rtx_PLUS (DFmode,
21948 gen_rtx_MULT (DFmode, v0, y3), u0)));
21949}
21950
565ef4ba 21951
9c78b944
DE
21952/* Newton-Raphson approximation of single-precision floating point rsqrt.
21953 Assumes no trapping math and finite arguments. */
21954
21955void
21956rs6000_emit_swrsqrtsf (rtx dst, rtx src)
21957{
21958 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
21959 half, one, halfthree, c1, cond, label;
21960
21961 x0 = gen_reg_rtx (SFmode);
21962 x1 = gen_reg_rtx (SFmode);
21963 x2 = gen_reg_rtx (SFmode);
21964 y1 = gen_reg_rtx (SFmode);
21965 u0 = gen_reg_rtx (SFmode);
21966 u1 = gen_reg_rtx (SFmode);
21967 u2 = gen_reg_rtx (SFmode);
21968 v0 = gen_reg_rtx (SFmode);
21969 v1 = gen_reg_rtx (SFmode);
21970 v2 = gen_reg_rtx (SFmode);
21971 t0 = gen_reg_rtx (SFmode);
21972 halfthree = gen_reg_rtx (SFmode);
21973 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
21974 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
21975
21976 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
21977 emit_insn (gen_rtx_SET (VOIDmode, t0,
21978 gen_rtx_MULT (SFmode, src, src)));
21979
21980 emit_insn (gen_rtx_SET (VOIDmode, cond,
21981 gen_rtx_COMPARE (CCFPmode, t0, src)));
21982 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
21983 emit_unlikely_jump (c1, label);
21984
21985 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
21986 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
21987
21988 /* halfthree = 1.5 = 1.0 + 0.5 */
21989 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
21990 gen_rtx_PLUS (SFmode, one, half)));
21991
21992 /* x0 = rsqrt estimate */
21993 emit_insn (gen_rtx_SET (VOIDmode, x0,
21994 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
21995 UNSPEC_RSQRT)));
21996
21997 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
21998 emit_insn (gen_rtx_SET (VOIDmode, y1,
21999 gen_rtx_MINUS (SFmode,
22000 gen_rtx_MULT (SFmode, src, halfthree),
22001 src)));
22002
22003 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22004 emit_insn (gen_rtx_SET (VOIDmode, u0,
22005 gen_rtx_MULT (SFmode, x0, x0)));
22006 emit_insn (gen_rtx_SET (VOIDmode, v0,
22007 gen_rtx_MINUS (SFmode,
22008 halfthree,
22009 gen_rtx_MULT (SFmode, y1, u0))));
22010 emit_insn (gen_rtx_SET (VOIDmode, x1,
22011 gen_rtx_MULT (SFmode, x0, v0)));
22012
22013 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22014 emit_insn (gen_rtx_SET (VOIDmode, u1,
22015 gen_rtx_MULT (SFmode, x1, x1)));
22016 emit_insn (gen_rtx_SET (VOIDmode, v1,
22017 gen_rtx_MINUS (SFmode,
22018 halfthree,
22019 gen_rtx_MULT (SFmode, y1, u1))));
22020 emit_insn (gen_rtx_SET (VOIDmode, x2,
22021 gen_rtx_MULT (SFmode, x1, v1)));
22022
22023 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22024 emit_insn (gen_rtx_SET (VOIDmode, u2,
22025 gen_rtx_MULT (SFmode, x2, x2)));
22026 emit_insn (gen_rtx_SET (VOIDmode, v2,
22027 gen_rtx_MINUS (SFmode,
22028 halfthree,
22029 gen_rtx_MULT (SFmode, y1, u2))));
22030 emit_insn (gen_rtx_SET (VOIDmode, dst,
22031 gen_rtx_MULT (SFmode, x2, v2)));
22032
22033 emit_label (XEXP (label, 0));
22034}
22035
565ef4ba
RS
22036/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22037 target, and SRC is the argument operand. */
22038
22039void
22040rs6000_emit_popcount (rtx dst, rtx src)
22041{
22042 enum machine_mode mode = GET_MODE (dst);
22043 rtx tmp1, tmp2;
22044
22045 tmp1 = gen_reg_rtx (mode);
22046
22047 if (mode == SImode)
22048 {
22049 emit_insn (gen_popcntbsi2 (tmp1, src));
22050 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22051 NULL_RTX, 0);
22052 tmp2 = force_reg (SImode, tmp2);
22053 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22054 }
22055 else
22056 {
22057 emit_insn (gen_popcntbdi2 (tmp1, src));
22058 tmp2 = expand_mult (DImode, tmp1,
22059 GEN_INT ((HOST_WIDE_INT)
22060 0x01010101 << 32 | 0x01010101),
22061 NULL_RTX, 0);
22062 tmp2 = force_reg (DImode, tmp2);
22063 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22064 }
22065}
22066
22067
22068/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22069 target, and SRC is the argument operand. */
22070
22071void
22072rs6000_emit_parity (rtx dst, rtx src)
22073{
22074 enum machine_mode mode = GET_MODE (dst);
22075 rtx tmp;
22076
22077 tmp = gen_reg_rtx (mode);
22078 if (mode == SImode)
22079 {
22080 /* Is mult+shift >= shift+xor+shift+xor? */
22081 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22082 {
22083 rtx tmp1, tmp2, tmp3, tmp4;
22084
22085 tmp1 = gen_reg_rtx (SImode);
22086 emit_insn (gen_popcntbsi2 (tmp1, src));
22087
22088 tmp2 = gen_reg_rtx (SImode);
22089 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22090 tmp3 = gen_reg_rtx (SImode);
22091 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22092
22093 tmp4 = gen_reg_rtx (SImode);
22094 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22095 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22096 }
22097 else
22098 rs6000_emit_popcount (tmp, src);
22099 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22100 }
22101 else
22102 {
22103 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22104 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22105 {
22106 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22107
22108 tmp1 = gen_reg_rtx (DImode);
22109 emit_insn (gen_popcntbdi2 (tmp1, src));
22110
22111 tmp2 = gen_reg_rtx (DImode);
22112 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22113 tmp3 = gen_reg_rtx (DImode);
22114 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22115
22116 tmp4 = gen_reg_rtx (DImode);
22117 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22118 tmp5 = gen_reg_rtx (DImode);
22119 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22120
22121 tmp6 = gen_reg_rtx (DImode);
22122 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22123 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22124 }
22125 else
22126 rs6000_emit_popcount (tmp, src);
22127 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22128 }
22129}
22130
ded9bf77
AH
22131/* Return an RTX representing where to find the function value of a
22132 function returning MODE. */
22133static rtx
22134rs6000_complex_function_value (enum machine_mode mode)
22135{
22136 unsigned int regno;
22137 rtx r1, r2;
22138 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22139 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22140
18f63bfa
AH
22141 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22142 regno = FP_ARG_RETURN;
354ed18f
AH
22143 else
22144 {
18f63bfa 22145 regno = GP_ARG_RETURN;
ded9bf77 22146
18f63bfa
AH
22147 /* 32-bit is OK since it'll go in r3/r4. */
22148 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22149 return gen_rtx_REG (mode, regno);
22150 }
22151
18f63bfa
AH
22152 if (inner_bytes >= 8)
22153 return gen_rtx_REG (mode, regno);
22154
ded9bf77
AH
22155 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22156 const0_rtx);
22157 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22158 GEN_INT (inner_bytes));
ded9bf77
AH
22159 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22160}
22161
a6ebc39a
AH
22162/* Define how to find the value returned by a function.
22163 VALTYPE is the data type of the value (as a tree).
22164 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22165 otherwise, FUNC is 0.
22166
22167 On the SPE, both FPs and vectors are returned in r3.
22168
22169 On RS/6000 an integer value is in r3 and a floating-point value is in
22170 fp1, unless -msoft-float. */
22171
22172rtx
586de218 22173rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22174{
22175 enum machine_mode mode;
2a8fa26c 22176 unsigned int regno;
a6ebc39a 22177
594a51fe
SS
22178 /* Special handling for structs in darwin64. */
22179 if (rs6000_darwin64_abi
22180 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22181 && TREE_CODE (valtype) == RECORD_TYPE
22182 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22183 {
22184 CUMULATIVE_ARGS valcum;
22185 rtx valret;
22186
0b5383eb 22187 valcum.words = 0;
594a51fe
SS
22188 valcum.fregno = FP_ARG_MIN_REG;
22189 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22190 /* Do a trial code generation as if this were going to be passed as
22191 an argument; if any part goes in memory, we return NULL. */
22192 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22193 if (valret)
22194 return valret;
22195 /* Otherwise fall through to standard ABI rules. */
22196 }
22197
0e67400a
FJ
22198 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22199 {
22200 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22201 return gen_rtx_PARALLEL (DImode,
22202 gen_rtvec (2,
22203 gen_rtx_EXPR_LIST (VOIDmode,
22204 gen_rtx_REG (SImode, GP_ARG_RETURN),
22205 const0_rtx),
22206 gen_rtx_EXPR_LIST (VOIDmode,
22207 gen_rtx_REG (SImode,
22208 GP_ARG_RETURN + 1),
22209 GEN_INT (4))));
22210 }
0f086e42
FJ
22211 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22212 {
22213 return gen_rtx_PARALLEL (DCmode,
22214 gen_rtvec (4,
22215 gen_rtx_EXPR_LIST (VOIDmode,
22216 gen_rtx_REG (SImode, GP_ARG_RETURN),
22217 const0_rtx),
22218 gen_rtx_EXPR_LIST (VOIDmode,
22219 gen_rtx_REG (SImode,
22220 GP_ARG_RETURN + 1),
22221 GEN_INT (4)),
22222 gen_rtx_EXPR_LIST (VOIDmode,
22223 gen_rtx_REG (SImode,
22224 GP_ARG_RETURN + 2),
22225 GEN_INT (8)),
22226 gen_rtx_EXPR_LIST (VOIDmode,
22227 gen_rtx_REG (SImode,
22228 GP_ARG_RETURN + 3),
22229 GEN_INT (12))));
22230 }
602ea4d3 22231
7348aa7f
FXC
22232 mode = TYPE_MODE (valtype);
22233 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22234 || POINTER_TYPE_P (valtype))
b78d48dd 22235 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22236
e41b2a33
PB
22237 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22238 /* _Decimal128 must use an even/odd register pair. */
22239 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22240 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 22241 regno = FP_ARG_RETURN;
ded9bf77 22242 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22243 && targetm.calls.split_complex_arg)
ded9bf77 22244 return rs6000_complex_function_value (mode);
44688022 22245 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22246 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22247 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22248 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22249 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22250 && (mode == DFmode || mode == DCmode
22251 || mode == TFmode || mode == TCmode))
18f63bfa 22252 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22253 else
22254 regno = GP_ARG_RETURN;
22255
22256 return gen_rtx_REG (mode, regno);
22257}
22258
ded9bf77
AH
22259/* Define how to find the value returned by a library function
22260 assuming the value has mode MODE. */
22261rtx
22262rs6000_libcall_value (enum machine_mode mode)
22263{
22264 unsigned int regno;
22265
2e6c9641
FJ
22266 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22267 {
22268 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22269 return gen_rtx_PARALLEL (DImode,
22270 gen_rtvec (2,
22271 gen_rtx_EXPR_LIST (VOIDmode,
22272 gen_rtx_REG (SImode, GP_ARG_RETURN),
22273 const0_rtx),
22274 gen_rtx_EXPR_LIST (VOIDmode,
22275 gen_rtx_REG (SImode,
22276 GP_ARG_RETURN + 1),
22277 GEN_INT (4))));
22278 }
22279
e41b2a33
PB
22280 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22281 /* _Decimal128 must use an even/odd register pair. */
22282 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22283 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
22284 && TARGET_HARD_FLOAT && TARGET_FPRS)
22285 regno = FP_ARG_RETURN;
44688022
AM
22286 else if (ALTIVEC_VECTOR_MODE (mode)
22287 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22288 regno = ALTIVEC_ARG_RETURN;
42ba5130 22289 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22290 return rs6000_complex_function_value (mode);
18f63bfa 22291 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22292 && (mode == DFmode || mode == DCmode
22293 || mode == TFmode || mode == TCmode))
18f63bfa 22294 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22295 else
22296 regno = GP_ARG_RETURN;
22297
22298 return gen_rtx_REG (mode, regno);
22299}
22300
d1d0c603
JJ
22301/* Define the offset between two registers, FROM to be eliminated and its
22302 replacement TO, at the start of a routine. */
22303HOST_WIDE_INT
22304rs6000_initial_elimination_offset (int from, int to)
22305{
22306 rs6000_stack_t *info = rs6000_stack_info ();
22307 HOST_WIDE_INT offset;
22308
7d5175e1 22309 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22310 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22311 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22312 {
22313 offset = info->push_p ? 0 : -info->total_size;
22314 if (FRAME_GROWS_DOWNWARD)
5b667039 22315 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22316 }
22317 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22318 offset = FRAME_GROWS_DOWNWARD
5b667039 22319 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22320 : 0;
22321 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22322 offset = info->total_size;
22323 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22324 offset = info->push_p ? info->total_size : 0;
22325 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22326 offset = 0;
22327 else
37409796 22328 gcc_unreachable ();
d1d0c603
JJ
22329
22330 return offset;
22331}
22332
58646b77 22333/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 22334
c8e4f0e9 22335static bool
3101faab 22336rs6000_is_opaque_type (const_tree type)
62e1dfcf 22337{
58646b77 22338 return (type == opaque_V2SI_type_node
2abe3e28 22339 || type == opaque_V2SF_type_node
58646b77 22340 || type == opaque_V4SI_type_node);
62e1dfcf
NC
22341}
22342
96714395 22343static rtx
a2369ed3 22344rs6000_dwarf_register_span (rtx reg)
96714395
AH
22345{
22346 unsigned regno;
22347
4d4cbc0e
AH
22348 if (TARGET_SPE
22349 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
22350 || (TARGET_E500_DOUBLE
22351 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
22352 ;
22353 else
96714395
AH
22354 return NULL_RTX;
22355
22356 regno = REGNO (reg);
22357
22358 /* The duality of the SPE register size wreaks all kinds of havoc.
22359 This is a way of distinguishing r0 in 32-bits from r0 in
22360 64-bits. */
22361 return
22362 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
22363 BYTES_BIG_ENDIAN
22364 ? gen_rtvec (2,
22365 gen_rtx_REG (SImode, regno + 1200),
22366 gen_rtx_REG (SImode, regno))
22367 : gen_rtvec (2,
22368 gen_rtx_REG (SImode, regno),
22369 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
22370}
22371
37ea0b7e
JM
22372/* Fill in sizes for SPE register high parts in table used by unwinder. */
22373
22374static void
22375rs6000_init_dwarf_reg_sizes_extra (tree address)
22376{
22377 if (TARGET_SPE)
22378 {
22379 int i;
22380 enum machine_mode mode = TYPE_MODE (char_type_node);
22381 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
22382 rtx mem = gen_rtx_MEM (BLKmode, addr);
22383 rtx value = gen_int_mode (4, mode);
22384
22385 for (i = 1201; i < 1232; i++)
22386 {
22387 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22388 HOST_WIDE_INT offset
22389 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22390
22391 emit_move_insn (adjust_address (mem, mode, offset), value);
22392 }
22393 }
22394}
22395
93c9d1ba
AM
22396/* Map internal gcc register numbers to DWARF2 register numbers. */
22397
22398unsigned int
22399rs6000_dbx_register_number (unsigned int regno)
22400{
22401 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22402 return regno;
22403 if (regno == MQ_REGNO)
22404 return 100;
1de43f85 22405 if (regno == LR_REGNO)
93c9d1ba 22406 return 108;
1de43f85 22407 if (regno == CTR_REGNO)
93c9d1ba
AM
22408 return 109;
22409 if (CR_REGNO_P (regno))
22410 return regno - CR0_REGNO + 86;
22411 if (regno == XER_REGNO)
22412 return 101;
22413 if (ALTIVEC_REGNO_P (regno))
22414 return regno - FIRST_ALTIVEC_REGNO + 1124;
22415 if (regno == VRSAVE_REGNO)
22416 return 356;
22417 if (regno == VSCR_REGNO)
22418 return 67;
22419 if (regno == SPE_ACC_REGNO)
22420 return 99;
22421 if (regno == SPEFSCR_REGNO)
22422 return 612;
22423 /* SPE high reg number. We get these values of regno from
22424 rs6000_dwarf_register_span. */
37409796
NS
22425 gcc_assert (regno >= 1200 && regno < 1232);
22426 return regno;
93c9d1ba
AM
22427}
22428
93f90be6 22429/* target hook eh_return_filter_mode */
f676971a 22430static enum machine_mode
93f90be6
FJ
22431rs6000_eh_return_filter_mode (void)
22432{
22433 return TARGET_32BIT ? SImode : word_mode;
22434}
22435
00b79d54
BE
22436/* Target hook for scalar_mode_supported_p. */
22437static bool
22438rs6000_scalar_mode_supported_p (enum machine_mode mode)
22439{
22440 if (DECIMAL_FLOAT_MODE_P (mode))
22441 return true;
22442 else
22443 return default_scalar_mode_supported_p (mode);
22444}
22445
f676971a
EC
22446/* Target hook for vector_mode_supported_p. */
22447static bool
22448rs6000_vector_mode_supported_p (enum machine_mode mode)
22449{
22450
96038623
DE
22451 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22452 return true;
22453
f676971a
EC
22454 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22455 return true;
22456
22457 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22458 return true;
22459
22460 else
22461 return false;
22462}
22463
bb8df8a6
EC
22464/* Target hook for invalid_arg_for_unprototyped_fn. */
22465static const char *
3101faab 22466invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22467{
22468 return (!rs6000_darwin64_abi
22469 && typelist == 0
22470 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22471 && (funcdecl == NULL_TREE
22472 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22473 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22474 ? N_("AltiVec argument passed to unprototyped function")
22475 : NULL;
22476}
22477
3aebbe5f
JJ
22478/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22479 setup by using __stack_chk_fail_local hidden function instead of
22480 calling __stack_chk_fail directly. Otherwise it is better to call
22481 __stack_chk_fail directly. */
22482
22483static tree
22484rs6000_stack_protect_fail (void)
22485{
22486 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22487 ? default_hidden_stack_protect_fail ()
22488 : default_external_stack_protect_fail ();
22489}
22490
17211ab5 22491#include "gt-rs6000.h"