]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
dojump.c (do_jump): Handle side-effecting TRUTH_AND_EXPR and TRUTH_OR_EXPR.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
5b86a469 3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
fab3bcc3 4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 5
5de601cf 6 This file is part of GCC.
9878760c 7
5de601cf
NC
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
9878760c 12
5de601cf
NC
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
9878760c 17
5de601cf
NC
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
39d14dda
KC
20 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
21 MA 02110-1301, USA. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
1bc7c5b6
ZW
57#if TARGET_XCOFF
58#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59#endif
93a27b7b
ZW
60#if TARGET_MACHO
61#include "gstab.h" /* for N_SLINE */
62#endif
9b30bae2 63
7509c759
MM
64#ifndef TARGET_NO_PROTOTYPE
65#define TARGET_NO_PROTOTYPE 0
66#endif
67
9878760c
RK
68#define min(A,B) ((A) < (B) ? (A) : (B))
69#define max(A,B) ((A) > (B) ? (A) : (B))
70
d1d0c603
JJ
71/* Structure used to define the rs6000 stack */
72typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
c4ad648e 82 int world_save_p; /* true if we're saving *everything*:
d62294f5 83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
104 int lr_size; /* size to hold LR if not in save_size */
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
110 int toc_size; /* size to hold TOC if not in save_size */
111 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
112 int spe_64bit_regs_used;
113} rs6000_stack_t;
114
5b667039
JJ
115/* A C structure for machine-specific, per-function data.
116 This is added to the cfun structure. */
117typedef struct machine_function GTY(())
118{
119 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
120 int ra_needs_full_frame;
121 /* Some local-dynamic symbol. */
122 const char *some_ld_name;
123 /* Whether the instruction chain has been scanned already. */
124 int insn_chain_scanned_p;
125 /* Flags if __builtin_return_address (0) was used. */
126 int ra_need_lr;
127 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
128 varargs save area. */
129 HOST_WIDE_INT varargs_save_offset;
130} machine_function;
131
5248c961
RK
132/* Target cpu type */
133
134enum processor_type rs6000_cpu;
8e3f41e7
MM
135struct rs6000_cpu_select rs6000_select[3] =
136{
815cdc52
MM
137 /* switch name, tune arch */
138 { (const char *)0, "--with-cpu=", 1, 1 },
139 { (const char *)0, "-mcpu=", 1, 1 },
140 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 141};
5248c961 142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
569fa502
DN
149/* Support for -msched-costly-dep option. */
150const char *rs6000_sched_costly_dep_str;
151enum rs6000_dependence_cost rs6000_sched_costly_dep;
152
cbe26ab8
DN
153/* Support for -minsert-sched-nops option. */
154const char *rs6000_sched_insert_nops_str;
155enum rs6000_nop_insertion rs6000_sched_insert_nops;
156
7ccf35ed 157/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 158static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 159
6fa3f289 160/* Size of long double */
6fa3f289
ZW
161int rs6000_long_double_type_size;
162
163/* Whether -mabi=altivec has appeared */
164int rs6000_altivec_abi;
165
a3170dc6
AH
166/* Nonzero if we want SPE ABI extensions. */
167int rs6000_spe_abi;
168
5da702b1
AH
169/* Nonzero if floating point operations are done in the GPRs. */
170int rs6000_float_gprs = 0;
171
594a51fe
SS
172/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
173int rs6000_darwin64_abi;
174
a0ab749a 175/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 176static GTY(()) int common_mode_defined;
c81bebd7 177
9878760c
RK
178/* Save information from a "cmpxx" operation until the branch or scc is
179 emitted. */
9878760c
RK
180rtx rs6000_compare_op0, rs6000_compare_op1;
181int rs6000_compare_fp_p;
874a0744 182
874a0744
MM
183/* Label number of label created for -mrelocatable, to call to so we can
184 get the address of the GOT section */
185int rs6000_pic_labelno;
c81bebd7 186
b91da81f 187#ifdef USING_ELFOS_H
c81bebd7 188/* Which abi to adhere to */
9739c90c 189const char *rs6000_abi_name;
d9407988
MM
190
191/* Semantics of the small data area */
192enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
193
194/* Which small data model to use */
815cdc52 195const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
196
197/* Counter for labels which are to be placed in .fixup. */
198int fixuplabelno = 0;
874a0744 199#endif
4697a36c 200
c4501e62
JJ
201/* Bit size of immediate TLS offsets and string from which it is decoded. */
202int rs6000_tls_size = 32;
203const char *rs6000_tls_size_string;
204
b6c9286a
MM
205/* ABI enumeration available for subtarget to use. */
206enum rs6000_abi rs6000_current_abi;
207
85b776df
AM
208/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
209int dot_symbols;
210
38c1f2d7 211/* Debug flags */
815cdc52 212const char *rs6000_debug_name;
38c1f2d7
MM
213int rs6000_debug_stack; /* debug stack applications */
214int rs6000_debug_arg; /* debug argument handling */
215
aabcd309 216/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
217bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
218
58646b77
PB
219/* Built in types. */
220
221tree rs6000_builtin_types[RS6000_BTI_MAX];
222tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 223
57ac7be9
AM
224const char *rs6000_traceback_name;
225static enum {
226 traceback_default = 0,
227 traceback_none,
228 traceback_part,
229 traceback_full
230} rs6000_traceback;
231
38c1f2d7
MM
232/* Flag to say the TOC is initialized */
233int toc_initialized;
9ebbca7d 234char toc_label_name[10];
38c1f2d7 235
9ebbca7d 236/* Alias set for saves and restores from the rs6000 stack. */
f103e34d 237static GTY(()) int rs6000_sr_alias_set;
c8023011 238
a3c9585f
KH
239/* Control alignment for fields within structures. */
240/* String from -malign-XXXXX. */
025d9908
KH
241int rs6000_alignment_flags;
242
78f5898b
AH
243/* True for any options that were explicitly set. */
244struct {
df01da37 245 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b
AH
246 bool alignment; /* True if -malign- was used. */
247 bool abi; /* True if -mabi= was used. */
248 bool spe; /* True if -mspe= was used. */
249 bool float_gprs; /* True if -mfloat-gprs= was used. */
250 bool isel; /* True if -misel was used. */
251 bool long_double; /* True if -mlong-double- was used. */
252} rs6000_explicit_options;
253
a3170dc6
AH
254struct builtin_description
255{
256 /* mask is not const because we're going to alter it below. This
257 nonsense will go away when we rewrite the -march infrastructure
258 to give us more target flag bits. */
259 unsigned int mask;
260 const enum insn_code icode;
261 const char *const name;
262 const enum rs6000_builtins code;
263};
8b897cfa
RS
264\f
265/* Target cpu costs. */
266
267struct processor_costs {
c4ad648e 268 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
269 const int mulsi_const; /* cost of SImode multiplication by constant. */
270 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
271 const int muldi; /* cost of DImode multiplication. */
272 const int divsi; /* cost of SImode division. */
273 const int divdi; /* cost of DImode division. */
274 const int fp; /* cost of simple SFmode and DFmode insns. */
275 const int dmul; /* cost of DFmode multiplication (and fmadd). */
276 const int sdiv; /* cost of SFmode division (fdivs). */
277 const int ddiv; /* cost of DFmode division (fdiv). */
8b897cfa
RS
278};
279
280const struct processor_costs *rs6000_cost;
281
282/* Processor costs (relative to an add) */
283
284/* Instruction size costs on 32bit processors. */
285static const
286struct processor_costs size32_cost = {
06a67bdd
RS
287 COSTS_N_INSNS (1), /* mulsi */
288 COSTS_N_INSNS (1), /* mulsi_const */
289 COSTS_N_INSNS (1), /* mulsi_const9 */
290 COSTS_N_INSNS (1), /* muldi */
291 COSTS_N_INSNS (1), /* divsi */
292 COSTS_N_INSNS (1), /* divdi */
293 COSTS_N_INSNS (1), /* fp */
294 COSTS_N_INSNS (1), /* dmul */
295 COSTS_N_INSNS (1), /* sdiv */
296 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
297};
298
299/* Instruction size costs on 64bit processors. */
300static const
301struct processor_costs size64_cost = {
06a67bdd
RS
302 COSTS_N_INSNS (1), /* mulsi */
303 COSTS_N_INSNS (1), /* mulsi_const */
304 COSTS_N_INSNS (1), /* mulsi_const9 */
305 COSTS_N_INSNS (1), /* muldi */
306 COSTS_N_INSNS (1), /* divsi */
307 COSTS_N_INSNS (1), /* divdi */
308 COSTS_N_INSNS (1), /* fp */
309 COSTS_N_INSNS (1), /* dmul */
310 COSTS_N_INSNS (1), /* sdiv */
311 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
312};
313
314/* Instruction costs on RIOS1 processors. */
315static const
316struct processor_costs rios1_cost = {
06a67bdd
RS
317 COSTS_N_INSNS (5), /* mulsi */
318 COSTS_N_INSNS (4), /* mulsi_const */
319 COSTS_N_INSNS (3), /* mulsi_const9 */
320 COSTS_N_INSNS (5), /* muldi */
321 COSTS_N_INSNS (19), /* divsi */
322 COSTS_N_INSNS (19), /* divdi */
323 COSTS_N_INSNS (2), /* fp */
324 COSTS_N_INSNS (2), /* dmul */
325 COSTS_N_INSNS (19), /* sdiv */
326 COSTS_N_INSNS (19), /* ddiv */
8b897cfa
RS
327};
328
329/* Instruction costs on RIOS2 processors. */
330static const
331struct processor_costs rios2_cost = {
06a67bdd
RS
332 COSTS_N_INSNS (2), /* mulsi */
333 COSTS_N_INSNS (2), /* mulsi_const */
334 COSTS_N_INSNS (2), /* mulsi_const9 */
335 COSTS_N_INSNS (2), /* muldi */
336 COSTS_N_INSNS (13), /* divsi */
337 COSTS_N_INSNS (13), /* divdi */
338 COSTS_N_INSNS (2), /* fp */
339 COSTS_N_INSNS (2), /* dmul */
340 COSTS_N_INSNS (17), /* sdiv */
341 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
342};
343
344/* Instruction costs on RS64A processors. */
345static const
346struct processor_costs rs64a_cost = {
06a67bdd
RS
347 COSTS_N_INSNS (20), /* mulsi */
348 COSTS_N_INSNS (12), /* mulsi_const */
349 COSTS_N_INSNS (8), /* mulsi_const9 */
350 COSTS_N_INSNS (34), /* muldi */
351 COSTS_N_INSNS (65), /* divsi */
352 COSTS_N_INSNS (67), /* divdi */
353 COSTS_N_INSNS (4), /* fp */
354 COSTS_N_INSNS (4), /* dmul */
355 COSTS_N_INSNS (31), /* sdiv */
356 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
357};
358
359/* Instruction costs on MPCCORE processors. */
360static const
361struct processor_costs mpccore_cost = {
06a67bdd
RS
362 COSTS_N_INSNS (2), /* mulsi */
363 COSTS_N_INSNS (2), /* mulsi_const */
364 COSTS_N_INSNS (2), /* mulsi_const9 */
365 COSTS_N_INSNS (2), /* muldi */
366 COSTS_N_INSNS (6), /* divsi */
367 COSTS_N_INSNS (6), /* divdi */
368 COSTS_N_INSNS (4), /* fp */
369 COSTS_N_INSNS (5), /* dmul */
370 COSTS_N_INSNS (10), /* sdiv */
371 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
372};
373
374/* Instruction costs on PPC403 processors. */
375static const
376struct processor_costs ppc403_cost = {
06a67bdd
RS
377 COSTS_N_INSNS (4), /* mulsi */
378 COSTS_N_INSNS (4), /* mulsi_const */
379 COSTS_N_INSNS (4), /* mulsi_const9 */
380 COSTS_N_INSNS (4), /* muldi */
381 COSTS_N_INSNS (33), /* divsi */
382 COSTS_N_INSNS (33), /* divdi */
383 COSTS_N_INSNS (11), /* fp */
384 COSTS_N_INSNS (11), /* dmul */
385 COSTS_N_INSNS (11), /* sdiv */
386 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
387};
388
389/* Instruction costs on PPC405 processors. */
390static const
391struct processor_costs ppc405_cost = {
06a67bdd
RS
392 COSTS_N_INSNS (5), /* mulsi */
393 COSTS_N_INSNS (4), /* mulsi_const */
394 COSTS_N_INSNS (3), /* mulsi_const9 */
395 COSTS_N_INSNS (5), /* muldi */
396 COSTS_N_INSNS (35), /* divsi */
397 COSTS_N_INSNS (35), /* divdi */
398 COSTS_N_INSNS (11), /* fp */
399 COSTS_N_INSNS (11), /* dmul */
400 COSTS_N_INSNS (11), /* sdiv */
401 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
402};
403
404/* Instruction costs on PPC440 processors. */
405static const
406struct processor_costs ppc440_cost = {
06a67bdd
RS
407 COSTS_N_INSNS (3), /* mulsi */
408 COSTS_N_INSNS (2), /* mulsi_const */
409 COSTS_N_INSNS (2), /* mulsi_const9 */
410 COSTS_N_INSNS (3), /* muldi */
411 COSTS_N_INSNS (34), /* divsi */
412 COSTS_N_INSNS (34), /* divdi */
413 COSTS_N_INSNS (5), /* fp */
414 COSTS_N_INSNS (5), /* dmul */
415 COSTS_N_INSNS (19), /* sdiv */
416 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
417};
418
419/* Instruction costs on PPC601 processors. */
420static const
421struct processor_costs ppc601_cost = {
06a67bdd
RS
422 COSTS_N_INSNS (5), /* mulsi */
423 COSTS_N_INSNS (5), /* mulsi_const */
424 COSTS_N_INSNS (5), /* mulsi_const9 */
425 COSTS_N_INSNS (5), /* muldi */
426 COSTS_N_INSNS (36), /* divsi */
427 COSTS_N_INSNS (36), /* divdi */
428 COSTS_N_INSNS (4), /* fp */
429 COSTS_N_INSNS (5), /* dmul */
430 COSTS_N_INSNS (17), /* sdiv */
431 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
432};
433
434/* Instruction costs on PPC603 processors. */
435static const
436struct processor_costs ppc603_cost = {
06a67bdd
RS
437 COSTS_N_INSNS (5), /* mulsi */
438 COSTS_N_INSNS (3), /* mulsi_const */
439 COSTS_N_INSNS (2), /* mulsi_const9 */
440 COSTS_N_INSNS (5), /* muldi */
441 COSTS_N_INSNS (37), /* divsi */
442 COSTS_N_INSNS (37), /* divdi */
443 COSTS_N_INSNS (3), /* fp */
444 COSTS_N_INSNS (4), /* dmul */
445 COSTS_N_INSNS (18), /* sdiv */
446 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
447};
448
449/* Instruction costs on PPC604 processors. */
450static const
451struct processor_costs ppc604_cost = {
06a67bdd
RS
452 COSTS_N_INSNS (4), /* mulsi */
453 COSTS_N_INSNS (4), /* mulsi_const */
454 COSTS_N_INSNS (4), /* mulsi_const9 */
455 COSTS_N_INSNS (4), /* muldi */
456 COSTS_N_INSNS (20), /* divsi */
457 COSTS_N_INSNS (20), /* divdi */
458 COSTS_N_INSNS (3), /* fp */
459 COSTS_N_INSNS (3), /* dmul */
460 COSTS_N_INSNS (18), /* sdiv */
461 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
462};
463
464/* Instruction costs on PPC604e processors. */
465static const
466struct processor_costs ppc604e_cost = {
06a67bdd
RS
467 COSTS_N_INSNS (2), /* mulsi */
468 COSTS_N_INSNS (2), /* mulsi_const */
469 COSTS_N_INSNS (2), /* mulsi_const9 */
470 COSTS_N_INSNS (2), /* muldi */
471 COSTS_N_INSNS (20), /* divsi */
472 COSTS_N_INSNS (20), /* divdi */
473 COSTS_N_INSNS (3), /* fp */
474 COSTS_N_INSNS (3), /* dmul */
475 COSTS_N_INSNS (18), /* sdiv */
476 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
477};
478
f0517163 479/* Instruction costs on PPC620 processors. */
8b897cfa
RS
480static const
481struct processor_costs ppc620_cost = {
06a67bdd
RS
482 COSTS_N_INSNS (5), /* mulsi */
483 COSTS_N_INSNS (4), /* mulsi_const */
484 COSTS_N_INSNS (3), /* mulsi_const9 */
485 COSTS_N_INSNS (7), /* muldi */
486 COSTS_N_INSNS (21), /* divsi */
487 COSTS_N_INSNS (37), /* divdi */
488 COSTS_N_INSNS (3), /* fp */
489 COSTS_N_INSNS (3), /* dmul */
490 COSTS_N_INSNS (18), /* sdiv */
491 COSTS_N_INSNS (32), /* ddiv */
f0517163
RS
492};
493
494/* Instruction costs on PPC630 processors. */
495static const
496struct processor_costs ppc630_cost = {
06a67bdd
RS
497 COSTS_N_INSNS (5), /* mulsi */
498 COSTS_N_INSNS (4), /* mulsi_const */
499 COSTS_N_INSNS (3), /* mulsi_const9 */
500 COSTS_N_INSNS (7), /* muldi */
501 COSTS_N_INSNS (21), /* divsi */
502 COSTS_N_INSNS (37), /* divdi */
503 COSTS_N_INSNS (3), /* fp */
504 COSTS_N_INSNS (3), /* dmul */
505 COSTS_N_INSNS (17), /* sdiv */
506 COSTS_N_INSNS (21), /* ddiv */
8b897cfa
RS
507};
508
509/* Instruction costs on PPC750 and PPC7400 processors. */
510static const
511struct processor_costs ppc750_cost = {
06a67bdd
RS
512 COSTS_N_INSNS (5), /* mulsi */
513 COSTS_N_INSNS (3), /* mulsi_const */
514 COSTS_N_INSNS (2), /* mulsi_const9 */
515 COSTS_N_INSNS (5), /* muldi */
516 COSTS_N_INSNS (17), /* divsi */
517 COSTS_N_INSNS (17), /* divdi */
518 COSTS_N_INSNS (3), /* fp */
519 COSTS_N_INSNS (3), /* dmul */
520 COSTS_N_INSNS (17), /* sdiv */
521 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
522};
523
524/* Instruction costs on PPC7450 processors. */
525static const
526struct processor_costs ppc7450_cost = {
06a67bdd
RS
527 COSTS_N_INSNS (4), /* mulsi */
528 COSTS_N_INSNS (3), /* mulsi_const */
529 COSTS_N_INSNS (3), /* mulsi_const9 */
530 COSTS_N_INSNS (4), /* muldi */
531 COSTS_N_INSNS (23), /* divsi */
532 COSTS_N_INSNS (23), /* divdi */
533 COSTS_N_INSNS (5), /* fp */
534 COSTS_N_INSNS (5), /* dmul */
535 COSTS_N_INSNS (21), /* sdiv */
536 COSTS_N_INSNS (35), /* ddiv */
8b897cfa 537};
a3170dc6 538
8b897cfa
RS
539/* Instruction costs on PPC8540 processors. */
540static const
541struct processor_costs ppc8540_cost = {
06a67bdd
RS
542 COSTS_N_INSNS (4), /* mulsi */
543 COSTS_N_INSNS (4), /* mulsi_const */
544 COSTS_N_INSNS (4), /* mulsi_const9 */
545 COSTS_N_INSNS (4), /* muldi */
546 COSTS_N_INSNS (19), /* divsi */
547 COSTS_N_INSNS (19), /* divdi */
548 COSTS_N_INSNS (4), /* fp */
549 COSTS_N_INSNS (4), /* dmul */
550 COSTS_N_INSNS (29), /* sdiv */
551 COSTS_N_INSNS (29), /* ddiv */
8b897cfa
RS
552};
553
554/* Instruction costs on POWER4 and POWER5 processors. */
555static const
556struct processor_costs power4_cost = {
06a67bdd
RS
557 COSTS_N_INSNS (3), /* mulsi */
558 COSTS_N_INSNS (2), /* mulsi_const */
559 COSTS_N_INSNS (2), /* mulsi_const9 */
560 COSTS_N_INSNS (4), /* muldi */
561 COSTS_N_INSNS (18), /* divsi */
562 COSTS_N_INSNS (34), /* divdi */
563 COSTS_N_INSNS (3), /* fp */
564 COSTS_N_INSNS (3), /* dmul */
565 COSTS_N_INSNS (17), /* sdiv */
566 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
567};
568
569\f
a2369ed3 570static bool rs6000_function_ok_for_sibcall (tree, tree);
2ffa9a0c 571static const char *rs6000_invalid_within_doloop (rtx);
a2369ed3
DJ
572static rtx rs6000_generate_compare (enum rtx_code);
573static void rs6000_maybe_dead (rtx);
574static void rs6000_emit_stack_tie (void);
575static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
576static rtx spe_synthesize_frame_save (rtx);
577static bool spe_func_has_64bit_regs_p (void);
b20a9cca 578static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 579 int, HOST_WIDE_INT);
a2369ed3
DJ
580static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
581static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
582static unsigned rs6000_hash_constant (rtx);
583static unsigned toc_hash_function (const void *);
584static int toc_hash_eq (const void *, const void *);
585static int constant_pool_expr_1 (rtx, int *, int *);
586static bool constant_pool_expr_p (rtx);
a2369ed3 587static bool legitimate_indexed_address_p (rtx, int);
a2369ed3
DJ
588static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
589static struct machine_function * rs6000_init_machine_status (void);
590static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 591static bool no_global_regs_above (int);
5add3202 592#ifdef HAVE_GAS_HIDDEN
a2369ed3 593static void rs6000_assemble_visibility (tree, int);
5add3202 594#endif
a2369ed3
DJ
595static int rs6000_ra_ever_killed (void);
596static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 597static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
76d2b81d 598static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
f18eca82 599static const char *rs6000_mangle_fundamental_type (tree);
b86fe7b4 600extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3
DJ
601static void rs6000_set_default_type_attributes (tree);
602static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
603static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
604static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
605 tree);
a2369ed3 606static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
c6e8c921 607static bool rs6000_return_in_memory (tree, tree);
a2369ed3 608static void rs6000_file_start (void);
7c262518 609#if TARGET_ELF
a2369ed3
DJ
610static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
611static void rs6000_elf_asm_out_constructor (rtx, int);
612static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 613static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
a2369ed3
DJ
614static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
615static void rs6000_elf_unique_section (tree, int);
616static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
b20a9cca 617 unsigned HOST_WIDE_INT);
a56d7372 618static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 619 ATTRIBUTE_UNUSED;
a2369ed3 620static bool rs6000_elf_in_small_data_p (tree);
7c262518 621#endif
cbaaba19 622#if TARGET_XCOFF
a2369ed3 623static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
8210e4c4 624static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
a2369ed3
DJ
625static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
626static void rs6000_xcoff_unique_section (tree, int);
627static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
b20a9cca 628 unsigned HOST_WIDE_INT);
a2369ed3
DJ
629static const char * rs6000_xcoff_strip_name_encoding (const char *);
630static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
631static void rs6000_xcoff_file_start (void);
632static void rs6000_xcoff_file_end (void);
f1384257 633#endif
a2369ed3
DJ
634static int rs6000_variable_issue (FILE *, int, rtx, int);
635static bool rs6000_rtx_costs (rtx, int, int, int *);
636static int rs6000_adjust_cost (rtx, rtx, rtx, int);
cbe26ab8 637static bool is_microcoded_insn (rtx);
79ae11c4 638static int is_dispatch_slot_restricted (rtx);
cbe26ab8
DN
639static bool is_cracked_insn (rtx);
640static bool is_branch_slot_insn (rtx);
a2369ed3
DJ
641static int rs6000_adjust_priority (rtx, int);
642static int rs6000_issue_rate (void);
569fa502 643static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
cbe26ab8
DN
644static rtx get_next_active_insn (rtx, rtx);
645static bool insn_terminates_group_p (rtx , enum group_termination);
646static bool is_costly_group (rtx *, rtx);
647static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
648static int redefine_groups (FILE *, int, rtx, rtx);
649static int pad_groups (FILE *, int, rtx, rtx);
650static void rs6000_sched_finish (FILE *, int);
a2369ed3 651static int rs6000_use_sched_lookahead (void);
7ccf35ed 652static tree rs6000_builtin_mask_for_load (void);
a2369ed3 653
58646b77 654static void def_builtin (int, const char *, tree, int);
a2369ed3
DJ
655static void rs6000_init_builtins (void);
656static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
657static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
658static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
659static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
660static void altivec_init_builtins (void);
661static void rs6000_common_init_builtins (void);
c15c90bb 662static void rs6000_init_libfuncs (void);
a2369ed3 663
b20a9cca
AM
664static void enable_mask_for_builtins (struct builtin_description *, int,
665 enum rs6000_builtins,
666 enum rs6000_builtins);
7c62e993 667static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
668static void spe_init_builtins (void);
669static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 670static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
671static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
672static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
673static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
674static rs6000_stack_t *rs6000_stack_info (void);
675static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
676
677static rtx altivec_expand_builtin (tree, rtx, bool *);
678static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
679static rtx altivec_expand_st_builtin (tree, rtx, bool *);
680static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
681static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 682static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 683 const char *, tree, rtx);
b4a62fa0 684static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 685static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
686static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
687static rtx altivec_expand_vec_set_builtin (tree);
688static rtx altivec_expand_vec_ext_builtin (tree, rtx);
689static int get_element_number (tree, tree);
78f5898b 690static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 691static void rs6000_parse_tls_size_option (void);
5da702b1 692static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
693static int first_altivec_reg_to_save (void);
694static unsigned int compute_vrsave_mask (void);
9390387d 695static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
696static void is_altivec_return_reg (rtx, void *);
697static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
698int easy_vector_constant (rtx, enum machine_mode);
58646b77 699static bool rs6000_is_opaque_type (tree);
a2369ed3
DJ
700static rtx rs6000_dwarf_register_span (rtx);
701static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 702static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
703static rtx rs6000_tls_get_addr (void);
704static rtx rs6000_got_sym (void);
9390387d 705static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
706static const char *rs6000_get_some_local_dynamic_name (void);
707static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 708static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 709static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 710 enum machine_mode, tree);
0b5383eb
DJ
711static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
712 HOST_WIDE_INT);
713static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
714 tree, HOST_WIDE_INT);
715static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
716 HOST_WIDE_INT,
717 rtx[], int *);
718static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
719 tree, HOST_WIDE_INT,
720 rtx[], int *);
721static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
ec6376ab 722static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 723static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
724static void setup_incoming_varargs (CUMULATIVE_ARGS *,
725 enum machine_mode, tree,
726 int *, int);
8cd5a4e0
RH
727static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
728 tree, bool);
78a52f11
RH
729static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
730 tree, bool);
4d3e6fae 731static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
efdba735
SH
732#if TARGET_MACHO
733static void macho_branch_islands (void);
734static void add_compiler_branch_island (tree, tree, int);
735static int no_previous_def (tree function_name);
736static tree get_prev_label (tree function_name);
c4e18b1c 737static void rs6000_darwin_file_start (void);
efdba735
SH
738#endif
739
c35d187f 740static tree rs6000_build_builtin_va_list (void);
23a60a04 741static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
fe984136 742static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
f676971a 743static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 744static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 745 enum machine_mode);
94ff898d 746static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
747 enum machine_mode);
748static int get_vsel_insn (enum machine_mode);
749static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 750static tree rs6000_stack_protect_fail (void);
21213b4c
DP
751
752const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
753static enum machine_mode rs6000_eh_return_filter_mode (void);
754
17211ab5
GK
755/* Hash table stuff for keeping track of TOC entries. */
756
757struct toc_hash_struct GTY(())
758{
759 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
760 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
761 rtx key;
762 enum machine_mode key_mode;
763 int labelno;
764};
765
766static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
767\f
768/* Default register names. */
769char rs6000_reg_names[][8] =
770{
802a0058
MM
771 "0", "1", "2", "3", "4", "5", "6", "7",
772 "8", "9", "10", "11", "12", "13", "14", "15",
773 "16", "17", "18", "19", "20", "21", "22", "23",
774 "24", "25", "26", "27", "28", "29", "30", "31",
775 "0", "1", "2", "3", "4", "5", "6", "7",
776 "8", "9", "10", "11", "12", "13", "14", "15",
777 "16", "17", "18", "19", "20", "21", "22", "23",
778 "24", "25", "26", "27", "28", "29", "30", "31",
779 "mq", "lr", "ctr","ap",
780 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
781 "xer",
782 /* AltiVec registers. */
0cd5e3a1
AH
783 "0", "1", "2", "3", "4", "5", "6", "7",
784 "8", "9", "10", "11", "12", "13", "14", "15",
785 "16", "17", "18", "19", "20", "21", "22", "23",
786 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
787 "vrsave", "vscr",
788 /* SPE registers. */
7d5175e1
JJ
789 "spe_acc", "spefscr",
790 /* Soft frame pointer. */
791 "sfp"
c81bebd7
MM
792};
793
794#ifdef TARGET_REGNAMES
8b60264b 795static const char alt_reg_names[][8] =
c81bebd7 796{
802a0058
MM
797 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
798 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
799 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
800 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
801 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
802 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
803 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
804 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
805 "mq", "lr", "ctr", "ap",
806 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 807 "xer",
59a4c851 808 /* AltiVec registers. */
0ac081f6 809 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
810 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
811 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
812 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
813 "vrsave", "vscr",
814 /* SPE registers. */
7d5175e1
JJ
815 "spe_acc", "spefscr",
816 /* Soft frame pointer. */
817 "sfp"
c81bebd7
MM
818};
819#endif
9878760c 820\f
daf11973
MM
821#ifndef MASK_STRICT_ALIGN
822#define MASK_STRICT_ALIGN 0
823#endif
ffcfcb5f
AM
824#ifndef TARGET_PROFILE_KERNEL
825#define TARGET_PROFILE_KERNEL 0
826#endif
3961e8fe
RH
827
828/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
829#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
830\f
831/* Initialize the GCC target structure. */
91d231cb
JM
832#undef TARGET_ATTRIBUTE_TABLE
833#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
834#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
835#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 836
301d03af
RS
837#undef TARGET_ASM_ALIGNED_DI_OP
838#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
839
840/* Default unaligned ops are only provided for ELF. Find the ops needed
841 for non-ELF systems. */
842#ifndef OBJECT_FORMAT_ELF
cbaaba19 843#if TARGET_XCOFF
ae6c1efd 844/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
845 64-bit targets. */
846#undef TARGET_ASM_UNALIGNED_HI_OP
847#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
848#undef TARGET_ASM_UNALIGNED_SI_OP
849#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
850#undef TARGET_ASM_UNALIGNED_DI_OP
851#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
852#else
853/* For Darwin. */
854#undef TARGET_ASM_UNALIGNED_HI_OP
855#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
856#undef TARGET_ASM_UNALIGNED_SI_OP
857#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
858#undef TARGET_ASM_UNALIGNED_DI_OP
859#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
860#undef TARGET_ASM_ALIGNED_DI_OP
861#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
862#endif
863#endif
864
865/* This hook deals with fixups for relocatable code and DI-mode objects
866 in 64-bit code. */
867#undef TARGET_ASM_INTEGER
868#define TARGET_ASM_INTEGER rs6000_assemble_integer
869
93638d7a
AM
870#ifdef HAVE_GAS_HIDDEN
871#undef TARGET_ASM_ASSEMBLE_VISIBILITY
872#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
873#endif
874
c4501e62
JJ
875#undef TARGET_HAVE_TLS
876#define TARGET_HAVE_TLS HAVE_AS_TLS
877
878#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 879#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 880
08c148a8
NB
881#undef TARGET_ASM_FUNCTION_PROLOGUE
882#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
883#undef TARGET_ASM_FUNCTION_EPILOGUE
884#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
885
b54cf83a
DE
886#undef TARGET_SCHED_VARIABLE_ISSUE
887#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
888
c237e94a
ZW
889#undef TARGET_SCHED_ISSUE_RATE
890#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
891#undef TARGET_SCHED_ADJUST_COST
892#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
893#undef TARGET_SCHED_ADJUST_PRIORITY
894#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 895#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 896#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
cbe26ab8
DN
897#undef TARGET_SCHED_FINISH
898#define TARGET_SCHED_FINISH rs6000_sched_finish
c237e94a 899
be12c2b0
VM
900#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
901#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
902
7ccf35ed
DN
903#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
904#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
905
0ac081f6
AH
906#undef TARGET_INIT_BUILTINS
907#define TARGET_INIT_BUILTINS rs6000_init_builtins
908
909#undef TARGET_EXPAND_BUILTIN
910#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
911
f18eca82
ZL
912#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
913#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
914
c15c90bb
ZW
915#undef TARGET_INIT_LIBFUNCS
916#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
917
f1384257 918#if TARGET_MACHO
0e5dbd9b 919#undef TARGET_BINDS_LOCAL_P
31920d83 920#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 921#endif
0e5dbd9b 922
3961e8fe
RH
923#undef TARGET_ASM_OUTPUT_MI_THUNK
924#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
925
3961e8fe 926#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5b71a4e7 927#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
00b960c7 928
4977bab6
ZW
929#undef TARGET_FUNCTION_OK_FOR_SIBCALL
930#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
931
2e3f0db6
DJ
932#undef TARGET_INVALID_WITHIN_DOLOOP
933#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 934
3c50106f
RH
935#undef TARGET_RTX_COSTS
936#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
937#undef TARGET_ADDRESS_COST
938#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 939
c8e4f0e9 940#undef TARGET_VECTOR_OPAQUE_P
58646b77 941#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 942
96714395
AH
943#undef TARGET_DWARF_REGISTER_SPAN
944#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
945
c6e8c921
GK
946/* On rs6000, function arguments are promoted, as are function return
947 values. */
948#undef TARGET_PROMOTE_FUNCTION_ARGS
949#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
950#undef TARGET_PROMOTE_FUNCTION_RETURN
951#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
952
c6e8c921
GK
953#undef TARGET_RETURN_IN_MEMORY
954#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
955
956#undef TARGET_SETUP_INCOMING_VARARGS
957#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
958
959/* Always strict argument naming on rs6000. */
960#undef TARGET_STRICT_ARGUMENT_NAMING
961#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
962#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
963#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130
RH
964#undef TARGET_SPLIT_COMPLEX_ARG
965#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
fe984136
RH
966#undef TARGET_MUST_PASS_IN_STACK
967#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
968#undef TARGET_PASS_BY_REFERENCE
969#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
970#undef TARGET_ARG_PARTIAL_BYTES
971#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 972
c35d187f
RH
973#undef TARGET_BUILD_BUILTIN_VA_LIST
974#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
975
cd3ce9b4
JM
976#undef TARGET_GIMPLIFY_VA_ARG_EXPR
977#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
978
93f90be6
FJ
979#undef TARGET_EH_RETURN_FILTER_MODE
980#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
981
f676971a
EC
982#undef TARGET_VECTOR_MODE_SUPPORTED_P
983#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
984
4d3e6fae
FJ
985#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
986#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
987
78f5898b
AH
988#undef TARGET_HANDLE_OPTION
989#define TARGET_HANDLE_OPTION rs6000_handle_option
990
991#undef TARGET_DEFAULT_TARGET_FLAGS
992#define TARGET_DEFAULT_TARGET_FLAGS \
993 (TARGET_DEFAULT | MASK_SCHED_PROLOG)
994
3aebbe5f
JJ
995#undef TARGET_STACK_PROTECT_FAIL
996#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
997
445cf5eb
JM
998/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
999 The PowerPC architecture requires only weak consistency among
1000 processors--that is, memory accesses between processors need not be
1001 sequentially consistent and memory accesses among processors can occur
1002 in any order. The ability to order memory accesses weakly provides
1003 opportunities for more efficient use of the system bus. Unless a
1004 dependency exists, the 604e allows read operations to precede store
1005 operations. */
1006#undef TARGET_RELAXED_ORDERING
1007#define TARGET_RELAXED_ORDERING true
1008
fdbe66f2
EB
1009#ifdef HAVE_AS_TLS
1010#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1011#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1012#endif
1013
f6897b10 1014struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1015\f
0d1fbc8c
AH
1016
1017/* Value is 1 if hard register REGNO can hold a value of machine-mode
1018 MODE. */
1019static int
1020rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1021{
1022 /* The GPRs can hold any mode, but values bigger than one register
1023 cannot go past R31. */
1024 if (INT_REGNO_P (regno))
1025 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1026
1027 /* The float registers can only hold floating modes and DImode. */
1028 if (FP_REGNO_P (regno))
1029 return
1030 (GET_MODE_CLASS (mode) == MODE_FLOAT
1031 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1032 || (GET_MODE_CLASS (mode) == MODE_INT
1033 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1034
1035 /* The CR register can only hold CC modes. */
1036 if (CR_REGNO_P (regno))
1037 return GET_MODE_CLASS (mode) == MODE_CC;
1038
1039 if (XER_REGNO_P (regno))
1040 return mode == PSImode;
1041
1042 /* AltiVec only in AldyVec registers. */
1043 if (ALTIVEC_REGNO_P (regno))
1044 return ALTIVEC_VECTOR_MODE (mode);
1045
1046 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1047 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1048 return 1;
1049
1050 /* We cannot put TImode anywhere except general register and it must be
1051 able to fit within the register set. */
1052
1053 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1054}
1055
1056/* Initialize rs6000_hard_regno_mode_ok_p table. */
1057static void
1058rs6000_init_hard_regno_mode_ok (void)
1059{
1060 int r, m;
1061
1062 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1063 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1064 if (rs6000_hard_regno_mode_ok (r, m))
1065 rs6000_hard_regno_mode_ok_p[m][r] = true;
1066}
1067
c1e55850
GK
1068/* If not otherwise specified by a target, make 'long double' equivalent to
1069 'double'. */
1070
1071#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1072#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1073#endif
1074
5248c961
RK
1075/* Override command line options. Mostly we process the processor
1076 type and sometimes adjust other TARGET_ options. */
1077
1078void
d779d0dc 1079rs6000_override_options (const char *default_cpu)
5248c961 1080{
c4d38ccb 1081 size_t i, j;
8e3f41e7 1082 struct rs6000_cpu_select *ptr;
66188a7e 1083 int set_masks;
5248c961 1084
66188a7e 1085 /* Simplifications for entries below. */
85638c0d 1086
66188a7e
GK
1087 enum {
1088 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1089 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1090 };
85638c0d 1091
66188a7e
GK
1092 /* This table occasionally claims that a processor does not support
1093 a particular feature even though it does, but the feature is slower
1094 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1095 complete description of the processor's support.
66188a7e
GK
1096
1097 Please keep this list in order, and don't forget to update the
1098 documentation in invoke.texi when adding a new processor or
1099 flag. */
5248c961
RK
1100 static struct ptt
1101 {
8b60264b
KG
1102 const char *const name; /* Canonical processor name. */
1103 const enum processor_type processor; /* Processor type enum value. */
1104 const int target_enable; /* Target flags to enable. */
8b60264b 1105 } const processor_target_table[]
66188a7e 1106 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1107 {"403", PROCESSOR_PPC403,
66188a7e
GK
1108 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1109 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1110 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1111 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1112 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1113 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1114 {"601", PROCESSOR_PPC601,
66188a7e
GK
1115 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1116 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1117 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1118 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1119 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1120 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1121 {"620", PROCESSOR_PPC620,
1122 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1123 {"630", PROCESSOR_PPC630,
1124 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1125 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1126 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1127 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1128 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1129 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1130 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1131 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1132 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
4d4cbc0e
AH
1133 /* 8548 has a dummy entry for now. */
1134 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1135 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1136 {"970", PROCESSOR_POWER4,
66188a7e
GK
1137 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1138 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1139 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1140 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1141 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1142 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1143 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1144 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1145 {"power2", PROCESSOR_POWER,
1146 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1147 {"power3", PROCESSOR_PPC630,
1148 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1149 {"power4", PROCESSOR_POWER4,
fc091c8e 1150 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1151 {"power5", PROCESSOR_POWER5,
432218ba
DE
1152 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1153 | MASK_MFCRF | MASK_POPCNTB},
66188a7e
GK
1154 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1155 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1156 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1157 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1158 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1159 {"rios2", PROCESSOR_RIOS2,
1160 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1161 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1162 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1163 {"rs64", PROCESSOR_RS64A,
1164 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1165 };
5248c961 1166
ca7558fc 1167 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1168
66188a7e
GK
1169 /* Some OSs don't support saving the high part of 64-bit registers on
1170 context switch. Other OSs don't support saving Altivec registers.
1171 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1172 settings; if the user wants either, the user must explicitly specify
1173 them and we won't interfere with the user's specification. */
1174
1175 enum {
1176 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
f676971a 1177 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
66188a7e
GK
1178 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1179 | MASK_MFCRF)
1180 };
0d1fbc8c
AH
1181
1182 rs6000_init_hard_regno_mode_ok ();
1183
c4ad648e 1184 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1185#ifdef OS_MISSING_POWERPC64
1186 if (OS_MISSING_POWERPC64)
1187 set_masks &= ~MASK_POWERPC64;
1188#endif
1189#ifdef OS_MISSING_ALTIVEC
1190 if (OS_MISSING_ALTIVEC)
1191 set_masks &= ~MASK_ALTIVEC;
1192#endif
1193
768875a8
AM
1194 /* Don't override by the processor default if given explicitly. */
1195 set_masks &= ~target_flags_explicit;
957211c3 1196
a4f6c312 1197 /* Identify the processor type. */
8e3f41e7 1198 rs6000_select[0].string = default_cpu;
3cb999d8 1199 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1200
b6a1cbae 1201 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1202 {
8e3f41e7
MM
1203 ptr = &rs6000_select[i];
1204 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1205 {
8e3f41e7
MM
1206 for (j = 0; j < ptt_size; j++)
1207 if (! strcmp (ptr->string, processor_target_table[j].name))
1208 {
1209 if (ptr->set_tune_p)
1210 rs6000_cpu = processor_target_table[j].processor;
1211
1212 if (ptr->set_arch_p)
1213 {
66188a7e
GK
1214 target_flags &= ~set_masks;
1215 target_flags |= (processor_target_table[j].target_enable
1216 & set_masks);
8e3f41e7
MM
1217 }
1218 break;
1219 }
1220
4406229e 1221 if (j == ptt_size)
8e3f41e7 1222 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1223 }
1224 }
8a61d227 1225
993f19a8 1226 if (TARGET_E500)
a3170dc6
AH
1227 rs6000_isel = 1;
1228
dff9f1b6
DE
1229 /* If we are optimizing big endian systems for space, use the load/store
1230 multiple and string instructions. */
ef792183 1231 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1232 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1233
a4f6c312
SS
1234 /* Don't allow -mmultiple or -mstring on little endian systems
1235 unless the cpu is a 750, because the hardware doesn't support the
1236 instructions used in little endian mode, and causes an alignment
1237 trap. The 750 does not cause an alignment trap (except when the
1238 target is unaligned). */
bef84347 1239
b21fb038 1240 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1241 {
1242 if (TARGET_MULTIPLE)
1243 {
1244 target_flags &= ~MASK_MULTIPLE;
b21fb038 1245 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1246 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1247 }
1248
1249 if (TARGET_STRING)
1250 {
1251 target_flags &= ~MASK_STRING;
b21fb038 1252 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1253 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1254 }
1255 }
3933e0e1 1256
38c1f2d7
MM
1257 /* Set debug flags */
1258 if (rs6000_debug_name)
1259 {
bfc79d3b 1260 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1261 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1262 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1263 rs6000_debug_stack = 1;
bfc79d3b 1264 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1265 rs6000_debug_arg = 1;
1266 else
c725bd79 1267 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1268 }
1269
57ac7be9
AM
1270 if (rs6000_traceback_name)
1271 {
1272 if (! strncmp (rs6000_traceback_name, "full", 4))
1273 rs6000_traceback = traceback_full;
1274 else if (! strncmp (rs6000_traceback_name, "part", 4))
1275 rs6000_traceback = traceback_part;
1276 else if (! strncmp (rs6000_traceback_name, "no", 2))
1277 rs6000_traceback = traceback_none;
1278 else
9e637a26 1279 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1280 rs6000_traceback_name);
1281 }
1282
78f5898b
AH
1283 if (!rs6000_explicit_options.long_double)
1284 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1285
6d0ef01e
HP
1286 /* Set Altivec ABI as default for powerpc64 linux. */
1287 if (TARGET_ELF && TARGET_64BIT)
1288 {
1289 rs6000_altivec_abi = 1;
78f5898b 1290 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1291 }
1292
594a51fe
SS
1293 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1294 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1295 {
1296 rs6000_darwin64_abi = 1;
9c7956fd 1297#if TARGET_MACHO
6ac49599 1298 darwin_one_byte_bool = 1;
9c7956fd 1299#endif
d9168963
SS
1300 /* Default to natural alignment, for better performance. */
1301 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1302 }
1303
c4501e62
JJ
1304 /* Handle -mtls-size option. */
1305 rs6000_parse_tls_size_option ();
1306
a7ae18e2
AH
1307#ifdef SUBTARGET_OVERRIDE_OPTIONS
1308 SUBTARGET_OVERRIDE_OPTIONS;
1309#endif
1310#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1311 SUBSUBTARGET_OVERRIDE_OPTIONS;
1312#endif
4d4cbc0e
AH
1313#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1314 SUB3TARGET_OVERRIDE_OPTIONS;
1315#endif
a7ae18e2 1316
5da702b1
AH
1317 if (TARGET_E500)
1318 {
e4463bf1
AH
1319 if (TARGET_ALTIVEC)
1320 error ("AltiVec and E500 instructions cannot coexist");
1321
5da702b1
AH
1322 /* The e500 does not have string instructions, and we set
1323 MASK_STRING above when optimizing for size. */
1324 if ((target_flags & MASK_STRING) != 0)
1325 target_flags = target_flags & ~MASK_STRING;
1326 }
1327 else if (rs6000_select[1].string != NULL)
1328 {
1329 /* For the powerpc-eabispe configuration, we set all these by
1330 default, so let's unset them if we manually set another
1331 CPU that is not the E500. */
78f5898b 1332 if (!rs6000_explicit_options.abi)
5da702b1 1333 rs6000_spe_abi = 0;
78f5898b 1334 if (!rs6000_explicit_options.spe)
5da702b1 1335 rs6000_spe = 0;
78f5898b 1336 if (!rs6000_explicit_options.float_gprs)
5da702b1 1337 rs6000_float_gprs = 0;
78f5898b 1338 if (!rs6000_explicit_options.isel)
5da702b1 1339 rs6000_isel = 0;
78f5898b 1340 if (!rs6000_explicit_options.long_double)
c1e55850 1341 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
5da702b1 1342 }
b5044283 1343
ec507f2d
DE
1344 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1345 && rs6000_cpu != PROCESSOR_POWER5);
1346 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1347 || rs6000_cpu == PROCESSOR_POWER5);
1348
ec507f2d
DE
1349 rs6000_sched_restricted_insns_priority
1350 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1351
569fa502 1352 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1353 rs6000_sched_costly_dep
1354 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1355
569fa502
DN
1356 if (rs6000_sched_costly_dep_str)
1357 {
f676971a 1358 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1359 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1360 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1361 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1362 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1363 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1364 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1365 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1366 else
c4ad648e 1367 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1368 }
1369
1370 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1371 rs6000_sched_insert_nops
1372 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1373
cbe26ab8
DN
1374 if (rs6000_sched_insert_nops_str)
1375 {
1376 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1377 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1378 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1379 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1380 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1381 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1382 else
c4ad648e 1383 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1384 }
1385
c81bebd7 1386#ifdef TARGET_REGNAMES
a4f6c312
SS
1387 /* If the user desires alternate register names, copy in the
1388 alternate names now. */
c81bebd7 1389 if (TARGET_REGNAMES)
4e135bdd 1390 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1391#endif
1392
df01da37 1393 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1394 If -maix-struct-return or -msvr4-struct-return was explicitly
1395 used, don't override with the ABI default. */
df01da37
DE
1396 if (!rs6000_explicit_options.aix_struct_ret)
1397 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1398
fcce224d
DE
1399 if (TARGET_LONG_DOUBLE_128
1400 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
70a01792 1401 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1402
9ebbca7d
GK
1403 /* Allocate an alias set for register saves & restores from stack. */
1404 rs6000_sr_alias_set = new_alias_set ();
1405
f676971a 1406 if (TARGET_TOC)
9ebbca7d 1407 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1408
301d03af
RS
1409 /* We can only guarantee the availability of DI pseudo-ops when
1410 assembling for 64-bit targets. */
ae6c1efd 1411 if (!TARGET_64BIT)
301d03af
RS
1412 {
1413 targetm.asm_out.aligned_op.di = NULL;
1414 targetm.asm_out.unaligned_op.di = NULL;
1415 }
1416
1494c534
DE
1417 /* Set branch target alignment, if not optimizing for size. */
1418 if (!optimize_size)
1419 {
1420 if (rs6000_sched_groups)
1421 {
1422 if (align_functions <= 0)
1423 align_functions = 16;
1424 if (align_jumps <= 0)
1425 align_jumps = 16;
1426 if (align_loops <= 0)
1427 align_loops = 16;
1428 }
1429 if (align_jumps_max_skip <= 0)
1430 align_jumps_max_skip = 15;
1431 if (align_loops_max_skip <= 0)
1432 align_loops_max_skip = 15;
1433 }
2792d578 1434
71f123ca
FS
1435 /* Arrange to save and restore machine status around nested functions. */
1436 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1437
1438 /* We should always be splitting complex arguments, but we can't break
1439 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1440 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1441 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1442
1443 /* Initialize rs6000_cost with the appropriate target costs. */
1444 if (optimize_size)
1445 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1446 else
1447 switch (rs6000_cpu)
1448 {
1449 case PROCESSOR_RIOS1:
1450 rs6000_cost = &rios1_cost;
1451 break;
1452
1453 case PROCESSOR_RIOS2:
1454 rs6000_cost = &rios2_cost;
1455 break;
1456
1457 case PROCESSOR_RS64A:
1458 rs6000_cost = &rs64a_cost;
1459 break;
1460
1461 case PROCESSOR_MPCCORE:
1462 rs6000_cost = &mpccore_cost;
1463 break;
1464
1465 case PROCESSOR_PPC403:
1466 rs6000_cost = &ppc403_cost;
1467 break;
1468
1469 case PROCESSOR_PPC405:
1470 rs6000_cost = &ppc405_cost;
1471 break;
1472
1473 case PROCESSOR_PPC440:
1474 rs6000_cost = &ppc440_cost;
1475 break;
1476
1477 case PROCESSOR_PPC601:
1478 rs6000_cost = &ppc601_cost;
1479 break;
1480
1481 case PROCESSOR_PPC603:
1482 rs6000_cost = &ppc603_cost;
1483 break;
1484
1485 case PROCESSOR_PPC604:
1486 rs6000_cost = &ppc604_cost;
1487 break;
1488
1489 case PROCESSOR_PPC604e:
1490 rs6000_cost = &ppc604e_cost;
1491 break;
1492
1493 case PROCESSOR_PPC620:
8b897cfa
RS
1494 rs6000_cost = &ppc620_cost;
1495 break;
1496
f0517163
RS
1497 case PROCESSOR_PPC630:
1498 rs6000_cost = &ppc630_cost;
1499 break;
1500
8b897cfa
RS
1501 case PROCESSOR_PPC750:
1502 case PROCESSOR_PPC7400:
1503 rs6000_cost = &ppc750_cost;
1504 break;
1505
1506 case PROCESSOR_PPC7450:
1507 rs6000_cost = &ppc7450_cost;
1508 break;
1509
1510 case PROCESSOR_PPC8540:
1511 rs6000_cost = &ppc8540_cost;
1512 break;
1513
1514 case PROCESSOR_POWER4:
1515 case PROCESSOR_POWER5:
1516 rs6000_cost = &power4_cost;
1517 break;
1518
1519 default:
37409796 1520 gcc_unreachable ();
8b897cfa 1521 }
5248c961 1522}
5accd822 1523
7ccf35ed
DN
1524/* Implement targetm.vectorize.builtin_mask_for_load. */
1525static tree
1526rs6000_builtin_mask_for_load (void)
1527{
1528 if (TARGET_ALTIVEC)
1529 return altivec_builtin_mask_for_load;
1530 else
1531 return 0;
1532}
1533
5da702b1
AH
1534/* Handle generic options of the form -mfoo=yes/no.
1535 NAME is the option name.
1536 VALUE is the option value.
1537 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1538 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1539static void
5da702b1 1540rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1541{
5da702b1 1542 if (value == 0)
993f19a8 1543 return;
5da702b1
AH
1544 else if (!strcmp (value, "yes"))
1545 *flag = 1;
1546 else if (!strcmp (value, "no"))
1547 *flag = 0;
08b57fb3 1548 else
5da702b1 1549 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1550}
1551
c4501e62
JJ
1552/* Validate and record the size specified with the -mtls-size option. */
1553
1554static void
863d938c 1555rs6000_parse_tls_size_option (void)
c4501e62
JJ
1556{
1557 if (rs6000_tls_size_string == 0)
1558 return;
1559 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1560 rs6000_tls_size = 16;
1561 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1562 rs6000_tls_size = 32;
1563 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1564 rs6000_tls_size = 64;
1565 else
9e637a26 1566 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1567}
1568
5accd822 1569void
a2369ed3 1570optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1571{
2e3f0db6
DJ
1572 if (DEFAULT_ABI == ABI_DARWIN)
1573 /* The Darwin libraries never set errno, so we might as well
1574 avoid calling them when that's the only reason we would. */
1575 flag_errno_math = 0;
5accd822 1576}
78f5898b
AH
1577
1578/* Implement TARGET_HANDLE_OPTION. */
1579
1580static bool
1581rs6000_handle_option (size_t code, const char *arg, int value)
1582{
1583 switch (code)
1584 {
1585 case OPT_mno_power:
1586 target_flags &= ~(MASK_POWER | MASK_POWER2
1587 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
1588 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1589 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
1590 break;
1591 case OPT_mno_powerpc:
1592 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1593 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
1594 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1595 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
1596 break;
1597 case OPT_mfull_toc:
1598 target_flags &= ~(MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1599 | MASK_NO_SUM_IN_TOC);
c2dba4ab
AH
1600 target_flags_explicit |= (MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1601 | MASK_NO_SUM_IN_TOC);
78f5898b
AH
1602#ifdef TARGET_USES_SYSV4_OPT
1603 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1604 just the same as -mminimal-toc. */
1605 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1606 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1607#endif
1608 break;
1609
1610#ifdef TARGET_USES_SYSV4_OPT
1611 case OPT_mtoc:
1612 /* Make -mtoc behave like -mminimal-toc. */
1613 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1614 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1615 break;
1616#endif
1617
1618#ifdef TARGET_USES_AIX64_OPT
1619 case OPT_maix64:
1620#else
1621 case OPT_m64:
1622#endif
1623 target_flags |= MASK_POWERPC64 | MASK_POWERPC | MASK_PPC_GFXOPT;
c2dba4ab
AH
1624 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC
1625 | MASK_PPC_GFXOPT;
78f5898b
AH
1626 break;
1627
1628#ifdef TARGET_USES_AIX64_OPT
1629 case OPT_maix32:
1630#else
1631 case OPT_m32:
1632#endif
1633 target_flags &= ~MASK_POWERPC64;
c2dba4ab 1634 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
1635 break;
1636
1637 case OPT_minsert_sched_nops_:
1638 rs6000_sched_insert_nops_str = arg;
1639 break;
1640
1641 case OPT_mminimal_toc:
1642 if (value == 1)
1643 {
c2dba4ab
AH
1644 target_flags &= ~(MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1645 target_flags_explicit |= (MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
78f5898b
AH
1646 }
1647 break;
1648
1649 case OPT_mpower:
1650 if (value == 1)
c2dba4ab
AH
1651 {
1652 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1653 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1654 }
78f5898b
AH
1655 break;
1656
1657 case OPT_mpower2:
1658 if (value == 1)
c2dba4ab
AH
1659 {
1660 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1661 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1662 }
78f5898b
AH
1663 break;
1664
1665 case OPT_mpowerpc_gpopt:
1666 case OPT_mpowerpc_gfxopt:
1667 if (value == 1)
c2dba4ab
AH
1668 {
1669 target_flags |= MASK_POWERPC;
1670 target_flags_explicit |= MASK_POWERPC;
1671 }
78f5898b
AH
1672 break;
1673
df01da37
DE
1674 case OPT_maix_struct_return:
1675 case OPT_msvr4_struct_return:
1676 rs6000_explicit_options.aix_struct_ret = true;
1677 break;
1678
78f5898b
AH
1679 case OPT_mvrsave_:
1680 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1681 break;
78f5898b
AH
1682
1683 case OPT_misel_:
1684 rs6000_explicit_options.isel = true;
1685 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1686 break;
1687
1688 case OPT_mspe_:
1689 rs6000_explicit_options.spe = true;
1690 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1691 /* No SPE means 64-bit long doubles, even if an E500. */
1692 if (!rs6000_spe)
1693 rs6000_long_double_type_size = 64;
1694 break;
1695
1696 case OPT_mdebug_:
1697 rs6000_debug_name = arg;
1698 break;
1699
1700#ifdef TARGET_USES_SYSV4_OPT
1701 case OPT_mcall_:
1702 rs6000_abi_name = arg;
1703 break;
1704
1705 case OPT_msdata_:
1706 rs6000_sdata_name = arg;
1707 break;
1708
1709 case OPT_mtls_size_:
1710 rs6000_tls_size_string = arg;
1711 break;
1712
1713 case OPT_mrelocatable:
1714 if (value == 1)
c2dba4ab
AH
1715 {
1716 target_flags |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1717 target_flags_explicit |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1718 }
78f5898b
AH
1719 break;
1720
1721 case OPT_mrelocatable_lib:
1722 if (value == 1)
c2dba4ab
AH
1723 {
1724 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1725 | MASK_NO_FP_IN_TOC;
1726 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1727 | MASK_NO_FP_IN_TOC;
1728 }
78f5898b 1729 else
c2dba4ab
AH
1730 {
1731 target_flags &= ~MASK_RELOCATABLE;
1732 target_flags_explicit |= MASK_RELOCATABLE;
1733 }
78f5898b
AH
1734 break;
1735#endif
1736
1737 case OPT_mabi_:
1738 rs6000_explicit_options.abi = true;
1739 if (!strcmp (arg, "altivec"))
1740 {
1741 rs6000_altivec_abi = 1;
1742 rs6000_spe_abi = 0;
1743 }
1744 else if (! strcmp (arg, "no-altivec"))
1745 rs6000_altivec_abi = 0;
1746 else if (! strcmp (arg, "spe"))
1747 {
1748 rs6000_spe_abi = 1;
1749 rs6000_altivec_abi = 0;
1750 if (!TARGET_SPE_ABI)
1751 error ("not configured for ABI: '%s'", arg);
1752 }
1753 else if (! strcmp (arg, "no-spe"))
1754 rs6000_spe_abi = 0;
1755
1756 /* These are here for testing during development only, do not
1757 document in the manual please. */
1758 else if (! strcmp (arg, "d64"))
1759 {
1760 rs6000_darwin64_abi = 1;
1761 warning (0, "Using darwin64 ABI");
1762 }
1763 else if (! strcmp (arg, "d32"))
1764 {
1765 rs6000_darwin64_abi = 0;
1766 warning (0, "Using old darwin ABI");
1767 }
1768
1769 else
1770 {
1771 error ("unknown ABI specified: '%s'", arg);
1772 return false;
1773 }
1774 break;
1775
1776 case OPT_mcpu_:
1777 rs6000_select[1].string = arg;
1778 break;
1779
1780 case OPT_mtune_:
1781 rs6000_select[2].string = arg;
1782 break;
1783
1784 case OPT_mtraceback_:
1785 rs6000_traceback_name = arg;
1786 break;
1787
1788 case OPT_mfloat_gprs_:
1789 rs6000_explicit_options.float_gprs = true;
1790 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1791 rs6000_float_gprs = 1;
1792 else if (! strcmp (arg, "double"))
1793 rs6000_float_gprs = 2;
1794 else if (! strcmp (arg, "no"))
1795 rs6000_float_gprs = 0;
1796 else
1797 {
1798 error ("invalid option for -mfloat-gprs: '%s'", arg);
1799 return false;
1800 }
1801 break;
1802
1803 case OPT_mlong_double_:
1804 rs6000_explicit_options.long_double = true;
1805 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1806 if (value != 64 && value != 128)
1807 {
1808 error ("Unknown switch -mlong-double-%s", arg);
1809 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1810 return false;
1811 }
1812 else
1813 rs6000_long_double_type_size = value;
1814 break;
1815
1816 case OPT_msched_costly_dep_:
1817 rs6000_sched_costly_dep_str = arg;
1818 break;
1819
1820 case OPT_malign_:
1821 rs6000_explicit_options.alignment = true;
1822 if (! strcmp (arg, "power"))
1823 {
1824 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1825 some C library functions, so warn about it. The flag may be
1826 useful for performance studies from time to time though, so
1827 don't disable it entirely. */
1828 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1829 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1830 " it is incompatible with the installed C and C++ libraries");
1831 rs6000_alignment_flags = MASK_ALIGN_POWER;
1832 }
1833 else if (! strcmp (arg, "natural"))
1834 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1835 else
1836 {
1837 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1838 return false;
1839 }
1840 break;
1841 }
1842 return true;
1843}
3cfa4909
MM
1844\f
1845/* Do anything needed at the start of the asm file. */
1846
1bc7c5b6 1847static void
863d938c 1848rs6000_file_start (void)
3cfa4909 1849{
c4d38ccb 1850 size_t i;
3cfa4909 1851 char buffer[80];
d330fd93 1852 const char *start = buffer;
3cfa4909 1853 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
1854 const char *default_cpu = TARGET_CPU_DEFAULT;
1855 FILE *file = asm_out_file;
1856
1857 default_file_start ();
1858
1859#ifdef TARGET_BI_ARCH
1860 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1861 default_cpu = 0;
1862#endif
3cfa4909
MM
1863
1864 if (flag_verbose_asm)
1865 {
1866 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1867 rs6000_select[0].string = default_cpu;
1868
b6a1cbae 1869 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
1870 {
1871 ptr = &rs6000_select[i];
1872 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1873 {
1874 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1875 start = "";
1876 }
1877 }
1878
9c6b4ed9 1879 if (PPC405_ERRATUM77)
b0bfee6e 1880 {
9c6b4ed9 1881 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
1882 start = "";
1883 }
b0bfee6e 1884
b91da81f 1885#ifdef USING_ELFOS_H
3cfa4909
MM
1886 switch (rs6000_sdata)
1887 {
1888 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1889 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1890 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1891 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1892 }
1893
1894 if (rs6000_sdata && g_switch_value)
1895 {
307b599c
MK
1896 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1897 g_switch_value);
3cfa4909
MM
1898 start = "";
1899 }
1900#endif
1901
1902 if (*start == '\0')
949ea356 1903 putc ('\n', file);
3cfa4909 1904 }
b723e82f
JJ
1905
1906 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1907 {
1908 toc_section ();
1909 text_section ();
1910 }
3cfa4909 1911}
c4e18b1c 1912
5248c961 1913\f
a0ab749a 1914/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
1915
1916int
863d938c 1917direct_return (void)
9878760c 1918{
4697a36c
MM
1919 if (reload_completed)
1920 {
1921 rs6000_stack_t *info = rs6000_stack_info ();
1922
1923 if (info->first_gp_reg_save == 32
1924 && info->first_fp_reg_save == 64
00b960c7 1925 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
1926 && ! info->lr_save_p
1927 && ! info->cr_save_p
00b960c7 1928 && info->vrsave_mask == 0
c81fc13e 1929 && ! info->push_p)
4697a36c
MM
1930 return 1;
1931 }
1932
1933 return 0;
9878760c
RK
1934}
1935
4e74d8ec
MM
1936/* Return the number of instructions it takes to form a constant in an
1937 integer register. */
1938
48d72335 1939int
a2369ed3 1940num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
1941{
1942 /* signed constant loadable with {cal|addi} */
5f59ecb7 1943 if (CONST_OK_FOR_LETTER_P (value, 'I'))
0865c631
GK
1944 return 1;
1945
4e74d8ec 1946 /* constant loadable with {cau|addis} */
5f59ecb7 1947 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
4e74d8ec
MM
1948 return 1;
1949
5f59ecb7 1950#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 1951 else if (TARGET_POWERPC64)
4e74d8ec 1952 {
a65c591c
DE
1953 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1954 HOST_WIDE_INT high = value >> 31;
4e74d8ec 1955
a65c591c 1956 if (high == 0 || high == -1)
4e74d8ec
MM
1957 return 2;
1958
a65c591c 1959 high >>= 1;
4e74d8ec 1960
a65c591c 1961 if (low == 0)
4e74d8ec 1962 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
1963 else
1964 return (num_insns_constant_wide (high)
e396202a 1965 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
1966 }
1967#endif
1968
1969 else
1970 return 2;
1971}
1972
1973int
a2369ed3 1974num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 1975{
37409796 1976 HOST_WIDE_INT low, high;
bb8df8a6 1977
37409796 1978 switch (GET_CODE (op))
0d30d435 1979 {
37409796 1980 case CONST_INT:
0d30d435 1981#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 1982 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 1983 && mask64_operand (op, mode))
c4ad648e 1984 return 2;
0d30d435
DE
1985 else
1986#endif
1987 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 1988
37409796
NS
1989 case CONST_DOUBLE:
1990 if (mode == SFmode)
1991 {
1992 long l;
1993 REAL_VALUE_TYPE rv;
bb8df8a6 1994
37409796
NS
1995 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1996 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1997 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1998 }
a260abc9 1999
37409796
NS
2000 if (mode == VOIDmode || mode == DImode)
2001 {
2002 high = CONST_DOUBLE_HIGH (op);
2003 low = CONST_DOUBLE_LOW (op);
2004 }
2005 else
2006 {
2007 long l[2];
2008 REAL_VALUE_TYPE rv;
bb8df8a6 2009
37409796
NS
2010 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2011 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2012 high = l[WORDS_BIG_ENDIAN == 0];
2013 low = l[WORDS_BIG_ENDIAN != 0];
2014 }
47ad8c61 2015
37409796
NS
2016 if (TARGET_32BIT)
2017 return (num_insns_constant_wide (low)
2018 + num_insns_constant_wide (high));
2019 else
2020 {
2021 if ((high == 0 && low >= 0)
2022 || (high == -1 && low < 0))
2023 return num_insns_constant_wide (low);
bb8df8a6 2024
1990cd79 2025 else if (mask64_operand (op, mode))
37409796 2026 return 2;
bb8df8a6 2027
37409796
NS
2028 else if (low == 0)
2029 return num_insns_constant_wide (high) + 1;
bb8df8a6 2030
37409796
NS
2031 else
2032 return (num_insns_constant_wide (high)
2033 + num_insns_constant_wide (low) + 1);
2034 }
bb8df8a6 2035
37409796
NS
2036 default:
2037 gcc_unreachable ();
4e74d8ec 2038 }
4e74d8ec
MM
2039}
2040
effa5d5d 2041/* Returns the constant for the splat instruction, if exists. */
452a7d36 2042
48d72335 2043int
452a7d36
HP
2044easy_vector_splat_const (int cst, enum machine_mode mode)
2045{
f676971a 2046 switch (mode)
452a7d36
HP
2047 {
2048 case V4SImode:
f676971a
EC
2049 if (EASY_VECTOR_15 (cst)
2050 || EASY_VECTOR_15_ADD_SELF (cst))
452a7d36
HP
2051 return cst;
2052 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2053 break;
2054 cst = cst >> 16;
c4ad648e
AM
2055 /* Fall thru */
2056
452a7d36 2057 case V8HImode:
f676971a
EC
2058 if (EASY_VECTOR_15 (cst)
2059 || EASY_VECTOR_15_ADD_SELF (cst))
452a7d36
HP
2060 return cst;
2061 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2062 break;
2063 cst = cst >> 8;
c4ad648e
AM
2064 /* Fall thru */
2065
452a7d36 2066 case V16QImode:
c4ad648e
AM
2067 if (EASY_VECTOR_15 (cst)
2068 || EASY_VECTOR_15_ADD_SELF (cst))
2069 return cst;
f676971a 2070 default:
452a7d36
HP
2071 break;
2072 }
2073 return 0;
2074}
2075
72ac76be 2076/* Return nonzero if all elements of a vector have the same value. */
69ef87e2 2077
48d72335 2078int
a2369ed3 2079easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
d744e06e
AH
2080{
2081 int units, i, cst;
2082
2083 units = CONST_VECTOR_NUNITS (op);
2084
2085 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2086 for (i = 1; i < units; ++i)
2087 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2088 break;
452a7d36 2089 if (i == units && easy_vector_splat_const (cst, mode))
d744e06e
AH
2090 return 1;
2091 return 0;
2092}
2093
452a7d36 2094/* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
d744e06e 2095
f676971a 2096rtx
452a7d36
HP
2097gen_easy_vector_constant_add_self (rtx op)
2098{
2099 int i, units;
2100 rtvec v;
2101 units = GET_MODE_NUNITS (GET_MODE (op));
2102 v = rtvec_alloc (units);
2103
2104 for (i = 0; i < units; i++)
f676971a 2105 RTVEC_ELT (v, i) =
452a7d36
HP
2106 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2107 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
d744e06e
AH
2108}
2109
2110const char *
a2369ed3 2111output_vec_const_move (rtx *operands)
d744e06e
AH
2112{
2113 int cst, cst2;
2114 enum machine_mode mode;
2115 rtx dest, vec;
2116
2117 dest = operands[0];
2118 vec = operands[1];
69ef87e2 2119
d744e06e
AH
2120 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2121 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2122 mode = GET_MODE (dest);
69ef87e2 2123
d744e06e
AH
2124 if (TARGET_ALTIVEC)
2125 {
2126 if (zero_constant (vec, mode))
2127 return "vxor %0,%0,%0";
37409796
NS
2128
2129 gcc_assert (easy_vector_constant (vec, mode));
bb8df8a6 2130
37409796
NS
2131 operands[1] = GEN_INT (cst);
2132 switch (mode)
98ef3137 2133 {
37409796
NS
2134 case V4SImode:
2135 if (EASY_VECTOR_15 (cst))
d744e06e 2136 {
37409796
NS
2137 operands[1] = GEN_INT (cst);
2138 return "vspltisw %0,%1";
2139 }
2140 else if (EASY_VECTOR_15_ADD_SELF (cst))
2141 return "#";
2142 cst = cst >> 16;
2143 /* Fall thru */
c4ad648e 2144
37409796
NS
2145 case V8HImode:
2146 if (EASY_VECTOR_15 (cst))
2147 {
2148 operands[1] = GEN_INT (cst);
2149 return "vspltish %0,%1";
2150 }
2151 else if (EASY_VECTOR_15_ADD_SELF (cst))
2152 return "#";
2153 cst = cst >> 8;
2154 /* Fall thru */
c4ad648e 2155
37409796
NS
2156 case V16QImode:
2157 if (EASY_VECTOR_15 (cst))
2158 {
2159 operands[1] = GEN_INT (cst);
2160 return "vspltisb %0,%1";
d744e06e 2161 }
37409796
NS
2162 else if (EASY_VECTOR_15_ADD_SELF (cst))
2163 return "#";
bb8df8a6 2164
37409796
NS
2165 default:
2166 gcc_unreachable ();
98ef3137 2167 }
69ef87e2
AH
2168 }
2169
37409796 2170 gcc_assert (TARGET_SPE);
bb8df8a6 2171
37409796
NS
2172 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2173 pattern of V1DI, V4HI, and V2SF.
2174
2175 FIXME: We should probably return # and add post reload
2176 splitters for these, but this way is so easy ;-). */
2177 operands[1] = GEN_INT (cst);
2178 operands[2] = GEN_INT (cst2);
2179 if (cst == cst2)
2180 return "li %0,%1\n\tevmergelo %0,%0,%0";
2181 else
2182 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2183}
2184
7a4eca66
DE
2185/* Initialize vector TARGET to VALS. */
2186
2187void
2188rs6000_expand_vector_init (rtx target, rtx vals)
2189{
2190 enum machine_mode mode = GET_MODE (target);
2191 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2192 int n_elts = GET_MODE_NUNITS (mode);
2193 int n_var = 0, one_var = -1;
2194 bool all_same = true, all_const_zero = true;
2195 rtx x, mem;
2196 int i;
2197
2198 for (i = 0; i < n_elts; ++i)
2199 {
2200 x = XVECEXP (vals, 0, i);
2201 if (!CONSTANT_P (x))
2202 ++n_var, one_var = i;
2203 else if (x != CONST0_RTX (inner_mode))
2204 all_const_zero = false;
2205
2206 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2207 all_same = false;
2208 }
2209
2210 if (n_var == 0)
2211 {
2212 if (mode != V4SFmode && all_const_zero)
2213 {
2214 /* Zero register. */
2215 emit_insn (gen_rtx_SET (VOIDmode, target,
2216 gen_rtx_XOR (mode, target, target)));
2217 return;
2218 }
2219 else if (mode != V4SFmode && easy_vector_same (vals, mode))
2220 {
2221 /* Splat immediate. */
2222 x = gen_rtx_VEC_DUPLICATE (mode, CONST_VECTOR_ELT (vals, 0));
2223 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2224 return;
2225 }
2226 else if (all_same)
2227 ; /* Splat vector element. */
2228 else
2229 {
2230 /* Load from constant pool. */
2231 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2232 return;
2233 }
2234 }
2235
2236 /* Store value to stack temp. Load vector element. Splat. */
2237 if (all_same)
2238 {
2239 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2240 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2241 XVECEXP (vals, 0, 0));
2242 x = gen_rtx_UNSPEC (VOIDmode,
2243 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2244 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2245 gen_rtvec (2,
2246 gen_rtx_SET (VOIDmode,
2247 target, mem),
2248 x)));
2249 x = gen_rtx_VEC_SELECT (inner_mode, target,
2250 gen_rtx_PARALLEL (VOIDmode,
2251 gen_rtvec (1, const0_rtx)));
2252 emit_insn (gen_rtx_SET (VOIDmode, target,
2253 gen_rtx_VEC_DUPLICATE (mode, x)));
2254 return;
2255 }
2256
2257 /* One field is non-constant. Load constant then overwrite
2258 varying field. */
2259 if (n_var == 1)
2260 {
2261 rtx copy = copy_rtx (vals);
2262
57b51d4d 2263 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2264 varying element. */
2265 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2266 rs6000_expand_vector_init (target, copy);
2267
2268 /* Insert variable. */
2269 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2270 return;
2271 }
2272
2273 /* Construct the vector in memory one field at a time
2274 and load the whole vector. */
2275 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2276 for (i = 0; i < n_elts; i++)
2277 emit_move_insn (adjust_address_nv (mem, inner_mode,
2278 i * GET_MODE_SIZE (inner_mode)),
2279 XVECEXP (vals, 0, i));
2280 emit_move_insn (target, mem);
2281}
2282
2283/* Set field ELT of TARGET to VAL. */
2284
2285void
2286rs6000_expand_vector_set (rtx target, rtx val, int elt)
2287{
2288 enum machine_mode mode = GET_MODE (target);
2289 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2290 rtx reg = gen_reg_rtx (mode);
2291 rtx mask, mem, x;
2292 int width = GET_MODE_SIZE (inner_mode);
2293 int i;
2294
2295 /* Load single variable value. */
2296 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2297 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2298 x = gen_rtx_UNSPEC (VOIDmode,
2299 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2300 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2301 gen_rtvec (2,
2302 gen_rtx_SET (VOIDmode,
2303 reg, mem),
2304 x)));
2305
2306 /* Linear sequence. */
2307 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2308 for (i = 0; i < 16; ++i)
2309 XVECEXP (mask, 0, i) = GEN_INT (i);
2310
2311 /* Set permute mask to insert element into target. */
2312 for (i = 0; i < width; ++i)
2313 XVECEXP (mask, 0, elt*width + i)
2314 = GEN_INT (i + 0x10);
2315 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2316 x = gen_rtx_UNSPEC (mode,
2317 gen_rtvec (3, target, reg,
2318 force_reg (V16QImode, x)),
2319 UNSPEC_VPERM);
2320 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2321}
2322
2323/* Extract field ELT from VEC into TARGET. */
2324
2325void
2326rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2327{
2328 enum machine_mode mode = GET_MODE (vec);
2329 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2330 rtx mem, x;
2331
2332 /* Allocate mode-sized buffer. */
2333 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2334
2335 /* Add offset to field within buffer matching vector element. */
2336 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2337
2338 /* Store single field into mode-sized buffer. */
2339 x = gen_rtx_UNSPEC (VOIDmode,
2340 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2341 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2342 gen_rtvec (2,
2343 gen_rtx_SET (VOIDmode,
2344 mem, vec),
2345 x)));
2346 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2347}
2348
0ba1b2ff
AM
2349/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2350 implement ANDing by the mask IN. */
2351void
a2369ed3 2352build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2353{
2354#if HOST_BITS_PER_WIDE_INT >= 64
2355 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2356 int shift;
2357
37409796 2358 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2359
2360 c = INTVAL (in);
2361 if (c & 1)
2362 {
2363 /* Assume c initially something like 0x00fff000000fffff. The idea
2364 is to rotate the word so that the middle ^^^^^^ group of zeros
2365 is at the MS end and can be cleared with an rldicl mask. We then
2366 rotate back and clear off the MS ^^ group of zeros with a
2367 second rldicl. */
2368 c = ~c; /* c == 0xff000ffffff00000 */
2369 lsb = c & -c; /* lsb == 0x0000000000100000 */
2370 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2371 c = ~c; /* c == 0x00fff000000fffff */
2372 c &= -lsb; /* c == 0x00fff00000000000 */
2373 lsb = c & -c; /* lsb == 0x0000100000000000 */
2374 c = ~c; /* c == 0xff000fffffffffff */
2375 c &= -lsb; /* c == 0xff00000000000000 */
2376 shift = 0;
2377 while ((lsb >>= 1) != 0)
2378 shift++; /* shift == 44 on exit from loop */
2379 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2380 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2381 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2382 }
2383 else
0ba1b2ff
AM
2384 {
2385 /* Assume c initially something like 0xff000f0000000000. The idea
2386 is to rotate the word so that the ^^^ middle group of zeros
2387 is at the LS end and can be cleared with an rldicr mask. We then
2388 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2389 a second rldicr. */
2390 lsb = c & -c; /* lsb == 0x0000010000000000 */
2391 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2392 c = ~c; /* c == 0x00fff0ffffffffff */
2393 c &= -lsb; /* c == 0x00fff00000000000 */
2394 lsb = c & -c; /* lsb == 0x0000100000000000 */
2395 c = ~c; /* c == 0xff000fffffffffff */
2396 c &= -lsb; /* c == 0xff00000000000000 */
2397 shift = 0;
2398 while ((lsb >>= 1) != 0)
2399 shift++; /* shift == 44 on exit from loop */
2400 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2401 m1 >>= shift; /* m1 == 0x0000000000000fff */
2402 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2403 }
2404
2405 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2406 masks will be all 1's. We are guaranteed more than one transition. */
2407 out[0] = GEN_INT (64 - shift);
2408 out[1] = GEN_INT (m1);
2409 out[2] = GEN_INT (shift);
2410 out[3] = GEN_INT (m2);
2411#else
045572c7
GK
2412 (void)in;
2413 (void)out;
37409796 2414 gcc_unreachable ();
0ba1b2ff 2415#endif
a260abc9
DE
2416}
2417
54b695e7 2418/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
2419
2420bool
54b695e7
AH
2421invalid_e500_subreg (rtx op, enum machine_mode mode)
2422{
2423 /* Reject (subreg:SI (reg:DF)). */
2424 if (GET_CODE (op) == SUBREG
2425 && mode == SImode
2426 && REG_P (SUBREG_REG (op))
2427 && GET_MODE (SUBREG_REG (op)) == DFmode)
2428 return true;
2429
2430 /* Reject (subreg:DF (reg:DI)). */
2431 if (GET_CODE (op) == SUBREG
2432 && mode == DFmode
2433 && REG_P (SUBREG_REG (op))
2434 && GET_MODE (SUBREG_REG (op)) == DImode)
2435 return true;
2436
2437 return false;
2438}
2439
95727fb8
AP
2440/* Darwin, AIX increases natural record alignment to doubleword if the first
2441 field is an FP double while the FP fields remain word aligned. */
2442
19d66194 2443unsigned int
95727fb8
AP
2444rs6000_special_round_type_align (tree type, int computed, int specified)
2445{
2446 tree field = TYPE_FIELDS (type);
95727fb8 2447
bb8df8a6 2448 /* Skip all non field decls */
85962ac8 2449 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
2450 field = TREE_CHAIN (field);
2451
3ce5437a 2452 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
95727fb8
AP
2453 return MAX (computed, specified);
2454
2455 return MAX (MAX (computed, specified), 64);
2456}
2457
a4f6c312 2458/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
2459
2460int
f676971a 2461small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 2462 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 2463{
38c1f2d7 2464#if TARGET_ELF
5f59ecb7 2465 rtx sym_ref;
7509c759 2466
d9407988 2467 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 2468 return 0;
a54d04b7 2469
f607bc57 2470 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
2471 return 0;
2472
88228c4b
MM
2473 if (GET_CODE (op) == SYMBOL_REF)
2474 sym_ref = op;
2475
2476 else if (GET_CODE (op) != CONST
2477 || GET_CODE (XEXP (op, 0)) != PLUS
2478 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2479 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
2480 return 0;
2481
88228c4b 2482 else
dbf55e53
MM
2483 {
2484 rtx sum = XEXP (op, 0);
2485 HOST_WIDE_INT summand;
2486
2487 /* We have to be careful here, because it is the referenced address
c4ad648e 2488 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 2489 summand = INTVAL (XEXP (sum, 1));
307b599c 2490 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 2491 return 0;
dbf55e53
MM
2492
2493 sym_ref = XEXP (sum, 0);
2494 }
88228c4b 2495
20bfcd69 2496 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
2497#else
2498 return 0;
2499#endif
7509c759 2500}
46c07df8 2501
3a1f863f 2502/* Return true if either operand is a general purpose register. */
46c07df8 2503
3a1f863f
DE
2504bool
2505gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 2506{
3a1f863f
DE
2507 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2508 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
2509}
2510
9ebbca7d 2511\f
4d588c14
RH
2512/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2513
f676971a
EC
2514static int
2515constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 2516{
9390387d 2517 switch (GET_CODE (op))
9ebbca7d
GK
2518 {
2519 case SYMBOL_REF:
c4501e62
JJ
2520 if (RS6000_SYMBOL_REF_TLS_P (op))
2521 return 0;
2522 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
2523 {
2524 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2525 {
2526 *have_sym = 1;
2527 return 1;
2528 }
2529 else
2530 return 0;
2531 }
2532 else if (! strcmp (XSTR (op, 0), toc_label_name))
2533 {
2534 *have_toc = 1;
2535 return 1;
2536 }
2537 else
2538 return 0;
9ebbca7d
GK
2539 case PLUS:
2540 case MINUS:
c1f11548
DE
2541 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2542 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 2543 case CONST:
a4f6c312 2544 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 2545 case CONST_INT:
a4f6c312 2546 return 1;
9ebbca7d 2547 default:
a4f6c312 2548 return 0;
9ebbca7d
GK
2549 }
2550}
2551
4d588c14 2552static bool
a2369ed3 2553constant_pool_expr_p (rtx op)
9ebbca7d
GK
2554{
2555 int have_sym = 0;
2556 int have_toc = 0;
2557 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2558}
2559
48d72335 2560bool
a2369ed3 2561toc_relative_expr_p (rtx op)
9ebbca7d 2562{
4d588c14
RH
2563 int have_sym = 0;
2564 int have_toc = 0;
2565 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2566}
2567
4d588c14 2568bool
a2369ed3 2569legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
2570{
2571 return (TARGET_TOC
2572 && GET_CODE (x) == PLUS
2573 && GET_CODE (XEXP (x, 0)) == REG
2574 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2575 && constant_pool_expr_p (XEXP (x, 1)));
2576}
2577
0c380712
AM
2578bool
2579rs6000_legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
2580{
2581 return (DEFAULT_ABI == ABI_V4
2582 && !flag_pic && !TARGET_TOC
2583 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2584 && small_data_operand (x, mode));
2585}
2586
60cdabab
DE
2587/* SPE offset addressing is limited to 5-bits worth of double words. */
2588#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2589
76d2b81d
DJ
2590bool
2591rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
2592{
2593 unsigned HOST_WIDE_INT offset, extra;
2594
2595 if (GET_CODE (x) != PLUS)
2596 return false;
2597 if (GET_CODE (XEXP (x, 0)) != REG)
2598 return false;
2599 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2600 return false;
60cdabab
DE
2601 if (legitimate_constant_pool_address_p (x))
2602 return true;
4d588c14
RH
2603 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2604 return false;
2605
2606 offset = INTVAL (XEXP (x, 1));
2607 extra = 0;
2608 switch (mode)
2609 {
2610 case V16QImode:
2611 case V8HImode:
2612 case V4SFmode:
2613 case V4SImode:
7a4eca66
DE
2614 /* AltiVec vector modes. Only reg+reg addressing is valid and
2615 constant offset zero should not occur due to canonicalization.
2616 Allow any offset when not strict before reload. */
2617 return !strict;
4d588c14
RH
2618
2619 case V4HImode:
2620 case V2SImode:
2621 case V1DImode:
2622 case V2SFmode:
2623 /* SPE vector modes. */
2624 return SPE_CONST_OFFSET_OK (offset);
2625
2626 case DFmode:
4d4cbc0e
AH
2627 if (TARGET_E500_DOUBLE)
2628 return SPE_CONST_OFFSET_OK (offset);
2629
4d588c14 2630 case DImode:
54b695e7
AH
2631 /* On e500v2, we may have:
2632
2633 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2634
2635 Which gets addressed with evldd instructions. */
2636 if (TARGET_E500_DOUBLE)
2637 return SPE_CONST_OFFSET_OK (offset);
2638
3364872d 2639 if (mode == DFmode || !TARGET_POWERPC64)
4d588c14
RH
2640 extra = 4;
2641 else if (offset & 3)
2642 return false;
2643 break;
2644
2645 case TFmode:
2646 case TImode:
3364872d 2647 if (mode == TFmode || !TARGET_POWERPC64)
4d588c14
RH
2648 extra = 12;
2649 else if (offset & 3)
2650 return false;
2651 else
2652 extra = 8;
2653 break;
2654
2655 default:
2656 break;
2657 }
2658
b1917422
AM
2659 offset += 0x8000;
2660 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
2661}
2662
2663static bool
a2369ed3 2664legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
2665{
2666 rtx op0, op1;
2667
2668 if (GET_CODE (x) != PLUS)
2669 return false;
850e8d3d 2670
4d588c14
RH
2671 op0 = XEXP (x, 0);
2672 op1 = XEXP (x, 1);
2673
2674 if (!REG_P (op0) || !REG_P (op1))
2675 return false;
2676
2677 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2678 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2679 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2680 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
9ebbca7d
GK
2681}
2682
48d72335 2683inline bool
a2369ed3 2684legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
2685{
2686 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2687}
2688
48d72335 2689bool
4c81e946
FJ
2690macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2691{
c4ad648e 2692 if (!TARGET_MACHO || !flag_pic
9390387d 2693 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
2694 return false;
2695 x = XEXP (x, 0);
4c81e946
FJ
2696
2697 if (GET_CODE (x) != LO_SUM)
2698 return false;
2699 if (GET_CODE (XEXP (x, 0)) != REG)
2700 return false;
2701 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2702 return false;
2703 x = XEXP (x, 1);
2704
2705 return CONSTANT_P (x);
2706}
2707
4d588c14 2708static bool
a2369ed3 2709legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
2710{
2711 if (GET_CODE (x) != LO_SUM)
2712 return false;
2713 if (GET_CODE (XEXP (x, 0)) != REG)
2714 return false;
2715 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2716 return false;
54b695e7
AH
2717 /* Restrict addressing for DI because of our SUBREG hackery. */
2718 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
f82f556d 2719 return false;
4d588c14
RH
2720 x = XEXP (x, 1);
2721
8622e235 2722 if (TARGET_ELF || TARGET_MACHO)
4d588c14 2723 {
a29077da 2724 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
2725 return false;
2726 if (TARGET_TOC)
2727 return false;
2728 if (GET_MODE_NUNITS (mode) != 1)
2729 return false;
5e5f01b9 2730 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
2731 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2732 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
2733 return false;
2734
2735 return CONSTANT_P (x);
2736 }
2737
2738 return false;
2739}
2740
2741
9ebbca7d
GK
2742/* Try machine-dependent ways of modifying an illegitimate address
2743 to be legitimate. If we find one, return the new, valid address.
2744 This is used from only one place: `memory_address' in explow.c.
2745
a4f6c312
SS
2746 OLDX is the address as it was before break_out_memory_refs was
2747 called. In some cases it is useful to look at this to decide what
2748 needs to be done.
9ebbca7d 2749
a4f6c312 2750 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 2751
a4f6c312
SS
2752 It is always safe for this function to do nothing. It exists to
2753 recognize opportunities to optimize the output.
9ebbca7d
GK
2754
2755 On RS/6000, first check for the sum of a register with a constant
2756 integer that is out of range. If so, generate code to add the
2757 constant with the low-order 16 bits masked to the register and force
2758 this result into another register (this can be done with `cau').
2759 Then generate an address of REG+(CONST&0xffff), allowing for the
2760 possibility of bit 16 being a one.
2761
2762 Then check for the sum of a register and something not constant, try to
2763 load the other things into a register and return the sum. */
4d588c14 2764
9ebbca7d 2765rtx
a2369ed3
DJ
2766rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2767 enum machine_mode mode)
0ac081f6 2768{
c4501e62
JJ
2769 if (GET_CODE (x) == SYMBOL_REF)
2770 {
2771 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2772 if (model != 0)
2773 return rs6000_legitimize_tls_address (x, model);
2774 }
2775
f676971a 2776 if (GET_CODE (x) == PLUS
9ebbca7d
GK
2777 && GET_CODE (XEXP (x, 0)) == REG
2778 && GET_CODE (XEXP (x, 1)) == CONST_INT
2779 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
f676971a 2780 {
9ebbca7d
GK
2781 HOST_WIDE_INT high_int, low_int;
2782 rtx sum;
a65c591c
DE
2783 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2784 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
2785 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2786 GEN_INT (high_int)), 0);
2787 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2788 }
f676971a 2789 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
2790 && GET_CODE (XEXP (x, 0)) == REG
2791 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 2792 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
2793 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2794 || TARGET_POWERPC64
54b695e7
AH
2795 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2796 && mode != TFmode))
9ebbca7d
GK
2797 && (TARGET_POWERPC64 || mode != DImode)
2798 && mode != TImode)
2799 {
2800 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2801 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2802 }
0ac081f6
AH
2803 else if (ALTIVEC_VECTOR_MODE (mode))
2804 {
2805 rtx reg;
2806
2807 /* Make sure both operands are registers. */
2808 if (GET_CODE (x) == PLUS)
9f85ed45 2809 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
2810 force_reg (Pmode, XEXP (x, 1)));
2811
2812 reg = force_reg (Pmode, x);
2813 return reg;
2814 }
4d4cbc0e 2815 else if (SPE_VECTOR_MODE (mode)
54b695e7
AH
2816 || (TARGET_E500_DOUBLE && (mode == DFmode
2817 || mode == DImode)))
a3170dc6 2818 {
54b695e7
AH
2819 if (mode == DImode)
2820 return NULL_RTX;
a3170dc6
AH
2821 /* We accept [reg + reg] and [reg + OFFSET]. */
2822
2823 if (GET_CODE (x) == PLUS)
c4ad648e
AM
2824 {
2825 rtx op1 = XEXP (x, 0);
2826 rtx op2 = XEXP (x, 1);
a3170dc6 2827
c4ad648e 2828 op1 = force_reg (Pmode, op1);
a3170dc6 2829
c4ad648e
AM
2830 if (GET_CODE (op2) != REG
2831 && (GET_CODE (op2) != CONST_INT
2832 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2833 op2 = force_reg (Pmode, op2);
a3170dc6 2834
c4ad648e
AM
2835 return gen_rtx_PLUS (Pmode, op1, op2);
2836 }
a3170dc6
AH
2837
2838 return force_reg (Pmode, x);
2839 }
f1384257
AM
2840 else if (TARGET_ELF
2841 && TARGET_32BIT
2842 && TARGET_NO_TOC
2843 && ! flag_pic
9ebbca7d 2844 && GET_CODE (x) != CONST_INT
f676971a 2845 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 2846 && CONSTANT_P (x)
6ac7bf2c
GK
2847 && GET_MODE_NUNITS (mode) == 1
2848 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 2849 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
2850 {
2851 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
2852 emit_insn (gen_elf_high (reg, x));
2853 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 2854 }
ee890fe2
SS
2855 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2856 && ! flag_pic
ab82a49f
AP
2857#if TARGET_MACHO
2858 && ! MACHO_DYNAMIC_NO_PIC_P
2859#endif
ee890fe2 2860 && GET_CODE (x) != CONST_INT
f676971a 2861 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 2862 && CONSTANT_P (x)
f82f556d 2863 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 2864 && mode != DImode
ee890fe2
SS
2865 && mode != TImode)
2866 {
2867 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
2868 emit_insn (gen_macho_high (reg, x));
2869 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 2870 }
f676971a 2871 else if (TARGET_TOC
4d588c14 2872 && constant_pool_expr_p (x)
a9098fd0 2873 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
2874 {
2875 return create_TOC_reference (x);
2876 }
2877 else
2878 return NULL_RTX;
2879}
258bfae2 2880
fdbe66f2 2881/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
2882 We need to emit DTP-relative relocations. */
2883
fdbe66f2 2884static void
c973d557
JJ
2885rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2886{
2887 switch (size)
2888 {
2889 case 4:
2890 fputs ("\t.long\t", file);
2891 break;
2892 case 8:
2893 fputs (DOUBLE_INT_ASM_OP, file);
2894 break;
2895 default:
37409796 2896 gcc_unreachable ();
c973d557
JJ
2897 }
2898 output_addr_const (file, x);
2899 fputs ("@dtprel+0x8000", file);
2900}
2901
c4501e62
JJ
2902/* Construct the SYMBOL_REF for the tls_get_addr function. */
2903
2904static GTY(()) rtx rs6000_tls_symbol;
2905static rtx
863d938c 2906rs6000_tls_get_addr (void)
c4501e62
JJ
2907{
2908 if (!rs6000_tls_symbol)
2909 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2910
2911 return rs6000_tls_symbol;
2912}
2913
2914/* Construct the SYMBOL_REF for TLS GOT references. */
2915
2916static GTY(()) rtx rs6000_got_symbol;
2917static rtx
863d938c 2918rs6000_got_sym (void)
c4501e62
JJ
2919{
2920 if (!rs6000_got_symbol)
2921 {
2922 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2923 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2924 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 2925 }
c4501e62
JJ
2926
2927 return rs6000_got_symbol;
2928}
2929
2930/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2931 this (thread-local) address. */
2932
2933static rtx
a2369ed3 2934rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
2935{
2936 rtx dest, insn;
2937
2938 dest = gen_reg_rtx (Pmode);
2939 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2940 {
2941 rtx tlsreg;
2942
2943 if (TARGET_64BIT)
2944 {
2945 tlsreg = gen_rtx_REG (Pmode, 13);
2946 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2947 }
2948 else
2949 {
2950 tlsreg = gen_rtx_REG (Pmode, 2);
2951 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2952 }
2953 emit_insn (insn);
2954 }
2955 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2956 {
2957 rtx tlsreg, tmp;
2958
2959 tmp = gen_reg_rtx (Pmode);
2960 if (TARGET_64BIT)
2961 {
2962 tlsreg = gen_rtx_REG (Pmode, 13);
2963 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2964 }
2965 else
2966 {
2967 tlsreg = gen_rtx_REG (Pmode, 2);
2968 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2969 }
2970 emit_insn (insn);
2971 if (TARGET_64BIT)
2972 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2973 else
2974 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2975 emit_insn (insn);
2976 }
2977 else
2978 {
2979 rtx r3, got, tga, tmp1, tmp2, eqv;
2980
2981 if (TARGET_64BIT)
2982 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2983 else
2984 {
2985 if (flag_pic == 1)
2986 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2987 else
2988 {
2989 rtx gsym = rs6000_got_sym ();
2990 got = gen_reg_rtx (Pmode);
2991 if (flag_pic == 0)
2992 rs6000_emit_move (got, gsym, Pmode);
2993 else
2994 {
ccbca5e4 2995 rtx tempLR, tmp3, mem;
c4501e62
JJ
2996 rtx first, last;
2997
c4501e62
JJ
2998 tempLR = gen_reg_rtx (Pmode);
2999 tmp1 = gen_reg_rtx (Pmode);
3000 tmp2 = gen_reg_rtx (Pmode);
3001 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3002 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3003
ccbca5e4 3004 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
c4501e62
JJ
3005 emit_move_insn (tmp1, tempLR);
3006 emit_move_insn (tmp2, mem);
3007 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3008 last = emit_move_insn (got, tmp3);
3009 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3010 REG_NOTES (last));
3011 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3012 REG_NOTES (first));
3013 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3014 REG_NOTES (last));
3015 }
3016 }
3017 }
3018
3019 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3020 {
3021 r3 = gen_rtx_REG (Pmode, 3);
3022 if (TARGET_64BIT)
3023 insn = gen_tls_gd_64 (r3, got, addr);
3024 else
3025 insn = gen_tls_gd_32 (r3, got, addr);
3026 start_sequence ();
3027 emit_insn (insn);
3028 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3029 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3030 insn = emit_call_insn (insn);
3031 CONST_OR_PURE_CALL_P (insn) = 1;
3032 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3033 insn = get_insns ();
3034 end_sequence ();
3035 emit_libcall_block (insn, dest, r3, addr);
3036 }
3037 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3038 {
3039 r3 = gen_rtx_REG (Pmode, 3);
3040 if (TARGET_64BIT)
3041 insn = gen_tls_ld_64 (r3, got);
3042 else
3043 insn = gen_tls_ld_32 (r3, got);
3044 start_sequence ();
3045 emit_insn (insn);
3046 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3047 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3048 insn = emit_call_insn (insn);
3049 CONST_OR_PURE_CALL_P (insn) = 1;
3050 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3051 insn = get_insns ();
3052 end_sequence ();
3053 tmp1 = gen_reg_rtx (Pmode);
3054 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3055 UNSPEC_TLSLD);
3056 emit_libcall_block (insn, tmp1, r3, eqv);
3057 if (rs6000_tls_size == 16)
3058 {
3059 if (TARGET_64BIT)
3060 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3061 else
3062 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3063 }
3064 else if (rs6000_tls_size == 32)
3065 {
3066 tmp2 = gen_reg_rtx (Pmode);
3067 if (TARGET_64BIT)
3068 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3069 else
3070 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3071 emit_insn (insn);
3072 if (TARGET_64BIT)
3073 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3074 else
3075 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3076 }
3077 else
3078 {
3079 tmp2 = gen_reg_rtx (Pmode);
3080 if (TARGET_64BIT)
3081 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3082 else
3083 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3084 emit_insn (insn);
3085 insn = gen_rtx_SET (Pmode, dest,
3086 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3087 }
3088 emit_insn (insn);
3089 }
3090 else
3091 {
3092 /* IE, or 64 bit offset LE. */
3093 tmp2 = gen_reg_rtx (Pmode);
3094 if (TARGET_64BIT)
3095 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3096 else
3097 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3098 emit_insn (insn);
3099 if (TARGET_64BIT)
3100 insn = gen_tls_tls_64 (dest, tmp2, addr);
3101 else
3102 insn = gen_tls_tls_32 (dest, tmp2, addr);
3103 emit_insn (insn);
3104 }
3105 }
3106
3107 return dest;
3108}
3109
c4501e62
JJ
3110/* Return 1 if X contains a thread-local symbol. */
3111
3112bool
a2369ed3 3113rs6000_tls_referenced_p (rtx x)
c4501e62 3114{
cd413cab
AP
3115 if (! TARGET_HAVE_TLS)
3116 return false;
3117
c4501e62
JJ
3118 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3119}
3120
3121/* Return 1 if *X is a thread-local symbol. This is the same as
3122 rs6000_tls_symbol_ref except for the type of the unused argument. */
3123
9390387d 3124static int
a2369ed3 3125rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3126{
3127 return RS6000_SYMBOL_REF_TLS_P (*x);
3128}
3129
24ea750e
DJ
3130/* The convention appears to be to define this wherever it is used.
3131 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3132 is now used here. */
3133#ifndef REG_MODE_OK_FOR_BASE_P
3134#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3135#endif
3136
3137/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3138 replace the input X, or the original X if no replacement is called for.
3139 The output parameter *WIN is 1 if the calling macro should goto WIN,
3140 0 if it should not.
3141
3142 For RS/6000, we wish to handle large displacements off a base
3143 register by splitting the addend across an addiu/addis and the mem insn.
3144 This cuts number of extra insns needed from 3 to 1.
3145
3146 On Darwin, we use this to generate code for floating point constants.
3147 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3148 The Darwin code is inside #if TARGET_MACHO because only then is
3149 machopic_function_base_name() defined. */
3150rtx
f676971a 3151rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3152 int opnum, int type,
3153 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3154{
f676971a 3155 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3156 if (GET_CODE (x) == PLUS
3157 && GET_CODE (XEXP (x, 0)) == PLUS
3158 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3159 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3160 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3161 {
3162 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3163 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3164 opnum, (enum reload_type)type);
24ea750e
DJ
3165 *win = 1;
3166 return x;
3167 }
3deb2758 3168
24ea750e
DJ
3169#if TARGET_MACHO
3170 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3171 && GET_CODE (x) == LO_SUM
3172 && GET_CODE (XEXP (x, 0)) == PLUS
3173 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3174 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3175 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3176 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3177 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3178 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3179 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3180 {
3181 /* Result of previous invocation of this function on Darwin
6f317ef3 3182 floating point constant. */
24ea750e 3183 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3184 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3185 opnum, (enum reload_type)type);
24ea750e
DJ
3186 *win = 1;
3187 return x;
3188 }
3189#endif
4937d02d
DE
3190
3191 /* Force ld/std non-word aligned offset into base register by wrapping
3192 in offset 0. */
3193 if (GET_CODE (x) == PLUS
3194 && GET_CODE (XEXP (x, 0)) == REG
3195 && REGNO (XEXP (x, 0)) < 32
3196 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3197 && GET_CODE (XEXP (x, 1)) == CONST_INT
3198 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3199 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3200 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3201 && TARGET_POWERPC64)
3202 {
3203 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3204 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3205 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3206 opnum, (enum reload_type) type);
3207 *win = 1;
3208 return x;
3209 }
3210
24ea750e
DJ
3211 if (GET_CODE (x) == PLUS
3212 && GET_CODE (XEXP (x, 0)) == REG
3213 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3214 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3215 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3216 && !SPE_VECTOR_MODE (mode)
54b695e7
AH
3217 && !(TARGET_E500_DOUBLE && (mode == DFmode
3218 || mode == DImode))
78c875e8 3219 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3220 {
3221 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3222 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3223 HOST_WIDE_INT high
c4ad648e 3224 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3225
3226 /* Check for 32-bit overflow. */
3227 if (high + low != val)
c4ad648e 3228 {
24ea750e
DJ
3229 *win = 0;
3230 return x;
3231 }
3232
3233 /* Reload the high part into a base reg; leave the low part
c4ad648e 3234 in the mem directly. */
24ea750e
DJ
3235
3236 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3237 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3238 GEN_INT (high)),
3239 GEN_INT (low));
24ea750e
DJ
3240
3241 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3242 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3243 opnum, (enum reload_type)type);
24ea750e
DJ
3244 *win = 1;
3245 return x;
3246 }
4937d02d 3247
24ea750e 3248 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3249 && !ALTIVEC_VECTOR_MODE (mode)
8308679f
DE
3250#if TARGET_MACHO
3251 && DEFAULT_ABI == ABI_DARWIN
a29077da 3252 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3253#else
3254 && DEFAULT_ABI == ABI_V4
3255 && !flag_pic
3256#endif
0d8c1c97
AM
3257 /* Don't do this for TFmode, since the result isn't offsettable.
3258 The same goes for DImode without 64-bit gprs. */
3259 && mode != TFmode
3260 && (mode != DImode || TARGET_POWERPC64))
24ea750e 3261 {
8308679f 3262#if TARGET_MACHO
a29077da
GK
3263 if (flag_pic)
3264 {
3265 rtx offset = gen_rtx_CONST (Pmode,
3266 gen_rtx_MINUS (Pmode, x,
11abc112 3267 machopic_function_base_sym ()));
a29077da
GK
3268 x = gen_rtx_LO_SUM (GET_MODE (x),
3269 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3270 gen_rtx_HIGH (Pmode, offset)), offset);
3271 }
3272 else
8308679f 3273#endif
a29077da 3274 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3275 gen_rtx_HIGH (Pmode, x), x);
a29077da 3276
24ea750e 3277 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3278 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3279 opnum, (enum reload_type)type);
24ea750e
DJ
3280 *win = 1;
3281 return x;
3282 }
4937d02d 3283
dec1f3aa
DE
3284 /* Reload an offset address wrapped by an AND that represents the
3285 masking of the lower bits. Strip the outer AND and let reload
3286 convert the offset address into an indirect address. */
3287 if (TARGET_ALTIVEC
3288 && ALTIVEC_VECTOR_MODE (mode)
3289 && GET_CODE (x) == AND
3290 && GET_CODE (XEXP (x, 0)) == PLUS
3291 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3292 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3293 && GET_CODE (XEXP (x, 1)) == CONST_INT
3294 && INTVAL (XEXP (x, 1)) == -16)
3295 {
3296 x = XEXP (x, 0);
3297 *win = 1;
3298 return x;
3299 }
3300
24ea750e 3301 if (TARGET_TOC
4d588c14 3302 && constant_pool_expr_p (x)
c1f11548 3303 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e
DJ
3304 {
3305 (x) = create_TOC_reference (x);
3306 *win = 1;
3307 return x;
3308 }
3309 *win = 0;
3310 return x;
f676971a 3311}
24ea750e 3312
258bfae2
FS
3313/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3314 that is a valid memory address for an instruction.
3315 The MODE argument is the machine mode for the MEM expression
3316 that wants to use this address.
3317
3318 On the RS/6000, there are four valid address: a SYMBOL_REF that
3319 refers to a constant pool entry of an address (or the sum of it
3320 plus a constant), a short (16-bit signed) constant plus a register,
3321 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3322 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3323 we must ensure that both words are addressable or PowerPC64 with offset
3324 word aligned.
3325
3326 For modes spanning multiple registers (DFmode in 32-bit GPRs,
76d2b81d 3327 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
258bfae2
FS
3328 adjacent memory cells are accessed by adding word-sized offsets
3329 during assembly output. */
3330int
a2369ed3 3331rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 3332{
850e8d3d
DN
3333 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3334 if (TARGET_ALTIVEC
3335 && ALTIVEC_VECTOR_MODE (mode)
3336 && GET_CODE (x) == AND
3337 && GET_CODE (XEXP (x, 1)) == CONST_INT
3338 && INTVAL (XEXP (x, 1)) == -16)
3339 x = XEXP (x, 0);
3340
c4501e62
JJ
3341 if (RS6000_SYMBOL_REF_TLS_P (x))
3342 return 0;
4d588c14 3343 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
3344 return 1;
3345 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 3346 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 3347 && !SPE_VECTOR_MODE (mode)
54b695e7
AH
3348 /* Restrict addressing for DI because of our SUBREG hackery. */
3349 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
258bfae2 3350 && TARGET_UPDATE
4d588c14 3351 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 3352 return 1;
0c380712 3353 if (rs6000_legitimate_small_data_p (mode, x))
258bfae2 3354 return 1;
4d588c14 3355 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
3356 return 1;
3357 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3358 if (! reg_ok_strict
3359 && GET_CODE (x) == PLUS
3360 && GET_CODE (XEXP (x, 0)) == REG
708d2456 3361 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 3362 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
3363 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3364 return 1;
76d2b81d 3365 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3366 return 1;
3367 if (mode != TImode
76d2b81d 3368 && mode != TFmode
a3170dc6
AH
3369 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3370 || TARGET_POWERPC64
4d4cbc0e 3371 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 3372 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 3373 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 3374 return 1;
4d588c14 3375 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3376 return 1;
3377 return 0;
3378}
4d588c14
RH
3379
3380/* Go to LABEL if ADDR (a legitimate address expression)
3381 has an effect that depends on the machine mode it is used for.
3382
3383 On the RS/6000 this is true of all integral offsets (since AltiVec
3384 modes don't allow them) or is a pre-increment or decrement.
3385
3386 ??? Except that due to conceptual problems in offsettable_address_p
3387 we can't really report the problems of integral offsets. So leave
f676971a 3388 this assuming that the adjustable offset must be valid for the
4d588c14
RH
3389 sub-words of a TFmode operand, which is what we had before. */
3390
3391bool
a2369ed3 3392rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
3393{
3394 switch (GET_CODE (addr))
3395 {
3396 case PLUS:
3397 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3398 {
3399 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3400 return val + 12 + 0x8000 >= 0x10000;
3401 }
3402 break;
3403
3404 case LO_SUM:
3405 return true;
3406
3407 case PRE_INC:
3408 case PRE_DEC:
3409 return TARGET_UPDATE;
3410
3411 default:
3412 break;
3413 }
3414
3415 return false;
3416}
d8ecbcdb
AH
3417
3418/* Return number of consecutive hard regs needed starting at reg REGNO
3419 to hold something of mode MODE.
3420 This is ordinarily the length in words of a value of mode MODE
3421 but can be less for certain modes in special long registers.
3422
3423 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3424 scalar instructions. The upper 32 bits are only available to the
3425 SIMD instructions.
3426
3427 POWER and PowerPC GPRs hold 32 bits worth;
3428 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3429
3430int
3431rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3432{
3433 if (FP_REGNO_P (regno))
3434 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3435
4d4cbc0e
AH
3436 if (TARGET_E500_DOUBLE && mode == DFmode)
3437 return 1;
3438
d8ecbcdb
AH
3439 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3440 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3441
3442 if (ALTIVEC_REGNO_P (regno))
3443 return
3444 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3445
3446 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3447}
2aa4498c
AH
3448
3449/* Change register usage conditional on target flags. */
3450void
3451rs6000_conditional_register_usage (void)
3452{
3453 int i;
3454
3455 /* Set MQ register fixed (already call_used) if not POWER
3456 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3457 be allocated. */
3458 if (! TARGET_POWER)
3459 fixed_regs[64] = 1;
3460
7c9ac5c0 3461 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
3462 if (TARGET_64BIT)
3463 fixed_regs[13] = call_used_regs[13]
3464 = call_really_used_regs[13] = 1;
3465
3466 /* Conditionally disable FPRs. */
3467 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3468 for (i = 32; i < 64; i++)
3469 fixed_regs[i] = call_used_regs[i]
c4ad648e 3470 = call_really_used_regs[i] = 1;
2aa4498c 3471
7c9ac5c0
PH
3472 /* The TOC register is not killed across calls in a way that is
3473 visible to the compiler. */
3474 if (DEFAULT_ABI == ABI_AIX)
3475 call_really_used_regs[2] = 0;
3476
2aa4498c
AH
3477 if (DEFAULT_ABI == ABI_V4
3478 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3479 && flag_pic == 2)
3480 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3481
3482 if (DEFAULT_ABI == ABI_V4
3483 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3484 && flag_pic == 1)
3485 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3486 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3487 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3488
3489 if (DEFAULT_ABI == ABI_DARWIN
3490 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 3491 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
3492 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3493 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3494
b4db40bf
JJ
3495 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3496 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3497 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3498
2aa4498c
AH
3499 if (TARGET_ALTIVEC)
3500 global_regs[VSCR_REGNO] = 1;
3501
3502 if (TARGET_SPE)
3503 {
3504 global_regs[SPEFSCR_REGNO] = 1;
3505 fixed_regs[FIXED_SCRATCH]
c4ad648e 3506 = call_used_regs[FIXED_SCRATCH]
2aa4498c
AH
3507 = call_really_used_regs[FIXED_SCRATCH] = 1;
3508 }
3509
3510 if (! TARGET_ALTIVEC)
3511 {
3512 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3513 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3514 call_really_used_regs[VRSAVE_REGNO] = 1;
3515 }
3516
3517 if (TARGET_ALTIVEC_ABI)
3518 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3519 call_used_regs[i] = call_really_used_regs[i] = 1;
3520}
fb4d4348 3521\f
a4f6c312
SS
3522/* Try to output insns to set TARGET equal to the constant C if it can
3523 be done in less than N insns. Do all computations in MODE.
3524 Returns the place where the output has been placed if it can be
3525 done and the insns have been emitted. If it would take more than N
3526 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
3527
3528rtx
f676971a 3529rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 3530 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 3531{
af8cb5c5 3532 rtx result, insn, set;
2bfcf297
DB
3533 HOST_WIDE_INT c0, c1;
3534
37409796 3535 switch (mode)
2bfcf297 3536 {
37409796
NS
3537 case QImode:
3538 case HImode:
2bfcf297 3539 if (dest == NULL)
c4ad648e 3540 dest = gen_reg_rtx (mode);
2bfcf297
DB
3541 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3542 return dest;
bb8df8a6 3543
37409796 3544 case SImode:
af8cb5c5 3545 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
bb8df8a6 3546
af8cb5c5
DE
3547 emit_insn (gen_rtx_SET (VOIDmode, result,
3548 GEN_INT (INTVAL (source)
3549 & (~ (HOST_WIDE_INT) 0xffff))));
3550 emit_insn (gen_rtx_SET (VOIDmode, dest,
3551 gen_rtx_IOR (SImode, result,
3552 GEN_INT (INTVAL (source) & 0xffff))));
3553 result = dest;
37409796
NS
3554 break;
3555
3556 case DImode:
3557 switch (GET_CODE (source))
af8cb5c5 3558 {
37409796 3559 case CONST_INT:
af8cb5c5
DE
3560 c0 = INTVAL (source);
3561 c1 = -(c0 < 0);
37409796 3562 break;
bb8df8a6 3563
37409796 3564 case CONST_DOUBLE:
2bfcf297 3565#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
3566 c0 = CONST_DOUBLE_LOW (source);
3567 c1 = -(c0 < 0);
2bfcf297 3568#else
af8cb5c5
DE
3569 c0 = CONST_DOUBLE_LOW (source);
3570 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 3571#endif
37409796
NS
3572 break;
3573
3574 default:
3575 gcc_unreachable ();
af8cb5c5 3576 }
af8cb5c5
DE
3577
3578 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
3579 break;
3580
3581 default:
3582 gcc_unreachable ();
2bfcf297 3583 }
2bfcf297 3584
af8cb5c5
DE
3585 insn = get_last_insn ();
3586 set = single_set (insn);
3587 if (! CONSTANT_P (SET_SRC (set)))
3588 set_unique_reg_note (insn, REG_EQUAL, source);
3589
3590 return result;
2bfcf297
DB
3591}
3592
3593/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3594 fall back to a straight forward decomposition. We do this to avoid
3595 exponential run times encountered when looking for longer sequences
3596 with rs6000_emit_set_const. */
3597static rtx
a2369ed3 3598rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
3599{
3600 if (!TARGET_POWERPC64)
3601 {
3602 rtx operand1, operand2;
3603
3604 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3605 DImode);
3606 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3607 DImode);
3608 emit_move_insn (operand1, GEN_INT (c1));
3609 emit_move_insn (operand2, GEN_INT (c2));
3610 }
3611 else
3612 {
bc06712d 3613 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 3614
bc06712d 3615 ud1 = c1 & 0xffff;
f921c9c9 3616 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 3617#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 3618 c2 = c1 >> 32;
2bfcf297 3619#endif
bc06712d 3620 ud3 = c2 & 0xffff;
f921c9c9 3621 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 3622
f676971a 3623 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 3624 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 3625 {
bc06712d 3626 if (ud1 & 0x8000)
b78d48dd 3627 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
3628 else
3629 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 3630 }
2bfcf297 3631
f676971a 3632 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 3633 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 3634 {
bc06712d 3635 if (ud2 & 0x8000)
f676971a 3636 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 3637 - 0x80000000));
252b88f7 3638 else
bc06712d
TR
3639 emit_move_insn (dest, GEN_INT (ud2 << 16));
3640 if (ud1 != 0)
3641 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
252b88f7 3642 }
f676971a 3643 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
3644 || (ud4 == 0 && ! (ud3 & 0x8000)))
3645 {
3646 if (ud3 & 0x8000)
f676971a 3647 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
3648 - 0x80000000));
3649 else
3650 emit_move_insn (dest, GEN_INT (ud3 << 16));
3651
3652 if (ud2 != 0)
3653 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3654 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3655 if (ud1 != 0)
3656 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3657 }
f676971a 3658 else
bc06712d
TR
3659 {
3660 if (ud4 & 0x8000)
f676971a 3661 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
3662 - 0x80000000));
3663 else
3664 emit_move_insn (dest, GEN_INT (ud4 << 16));
3665
3666 if (ud3 != 0)
3667 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2bfcf297 3668
bc06712d
TR
3669 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3670 if (ud2 != 0)
f676971a
EC
3671 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3672 GEN_INT (ud2 << 16)));
bc06712d
TR
3673 if (ud1 != 0)
3674 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3675 }
3676 }
2bfcf297
DB
3677 return dest;
3678}
3679
76d2b81d
DJ
3680/* Helper for the following. Get rid of [r+r] memory refs
3681 in cases where it won't work (TImode, TFmode). */
3682
3683static void
3684rs6000_eliminate_indexed_memrefs (rtx operands[2])
3685{
3686 if (GET_CODE (operands[0]) == MEM
3687 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 3688 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
3689 && ! reload_in_progress)
3690 operands[0]
3691 = replace_equiv_address (operands[0],
3692 copy_addr_to_reg (XEXP (operands[0], 0)));
3693
3694 if (GET_CODE (operands[1]) == MEM
3695 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 3696 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
3697 && ! reload_in_progress)
3698 operands[1]
3699 = replace_equiv_address (operands[1],
3700 copy_addr_to_reg (XEXP (operands[1], 0)));
3701}
3702
fb4d4348
GK
3703/* Emit a move from SOURCE to DEST in mode MODE. */
3704void
a2369ed3 3705rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
3706{
3707 rtx operands[2];
3708 operands[0] = dest;
3709 operands[1] = source;
f676971a 3710
fb4d4348
GK
3711 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3712 if (GET_CODE (operands[1]) == CONST_DOUBLE
3713 && ! FLOAT_MODE_P (mode)
3714 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3715 {
3716 /* FIXME. This should never happen. */
3717 /* Since it seems that it does, do the safe thing and convert
3718 to a CONST_INT. */
2496c7bd 3719 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 3720 }
37409796
NS
3721 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3722 || FLOAT_MODE_P (mode)
3723 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3724 || CONST_DOUBLE_LOW (operands[1]) < 0)
3725 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3726 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 3727
c9e8cb32
DD
3728 /* Check if GCC is setting up a block move that will end up using FP
3729 registers as temporaries. We must make sure this is acceptable. */
3730 if (GET_CODE (operands[0]) == MEM
3731 && GET_CODE (operands[1]) == MEM
3732 && mode == DImode
41543739
GK
3733 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3734 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3735 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3736 ? 32 : MEM_ALIGN (operands[0])))
3737 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 3738 ? 32
41543739
GK
3739 : MEM_ALIGN (operands[1]))))
3740 && ! MEM_VOLATILE_P (operands [0])
3741 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 3742 {
41543739
GK
3743 emit_move_insn (adjust_address (operands[0], SImode, 0),
3744 adjust_address (operands[1], SImode, 0));
3745 emit_move_insn (adjust_address (operands[0], SImode, 4),
3746 adjust_address (operands[1], SImode, 4));
c9e8cb32
DD
3747 return;
3748 }
630d42a0 3749
55aa0757 3750 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
c9dbf840 3751 && !gpc_reg_operand (operands[1], mode))
f6219a5e 3752 operands[1] = force_reg (mode, operands[1]);
a9098fd0 3753
a3170dc6
AH
3754 if (mode == SFmode && ! TARGET_POWERPC
3755 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 3756 && GET_CODE (operands[0]) == MEM)
fb4d4348 3757 {
ffc14f31
GK
3758 int regnum;
3759
3760 if (reload_in_progress || reload_completed)
3761 regnum = true_regnum (operands[1]);
3762 else if (GET_CODE (operands[1]) == REG)
3763 regnum = REGNO (operands[1]);
3764 else
3765 regnum = -1;
f676971a 3766
fb4d4348
GK
3767 /* If operands[1] is a register, on POWER it may have
3768 double-precision data in it, so truncate it to single
3769 precision. */
3770 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3771 {
3772 rtx newreg;
3773 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3774 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3775 operands[1] = newreg;
3776 }
3777 }
3778
c4501e62
JJ
3779 /* Recognize the case where operand[1] is a reference to thread-local
3780 data and load its address to a register. */
84f52ebd 3781 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 3782 {
84f52ebd
RH
3783 enum tls_model model;
3784 rtx tmp = operands[1];
3785 rtx addend = NULL;
3786
3787 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
3788 {
3789 addend = XEXP (XEXP (tmp, 0), 1);
3790 tmp = XEXP (XEXP (tmp, 0), 0);
3791 }
3792
3793 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
3794 model = SYMBOL_REF_TLS_MODEL (tmp);
3795 gcc_assert (model != 0);
3796
3797 tmp = rs6000_legitimize_tls_address (tmp, model);
3798 if (addend)
3799 {
3800 tmp = gen_rtx_PLUS (mode, tmp, addend);
3801 tmp = force_operand (tmp, operands[0]);
3802 }
3803 operands[1] = tmp;
c4501e62
JJ
3804 }
3805
8f4e6caf
RH
3806 /* Handle the case where reload calls us with an invalid address. */
3807 if (reload_in_progress && mode == Pmode
69ef87e2 3808 && (! general_operand (operands[1], mode)
8f4e6caf
RH
3809 || ! nonimmediate_operand (operands[0], mode)))
3810 goto emit_set;
3811
a9baceb1
GK
3812 /* 128-bit constant floating-point values on Darwin should really be
3813 loaded as two parts. */
3814 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3815 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3816 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3817 {
3818 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3819 know how to get a DFmode SUBREG of a TFmode. */
3820 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3821 simplify_gen_subreg (DImode, operands[1], mode, 0),
3822 DImode);
3823 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3824 GET_MODE_SIZE (DImode)),
3825 simplify_gen_subreg (DImode, operands[1], mode,
3826 GET_MODE_SIZE (DImode)),
3827 DImode);
3828 return;
3829 }
3830
fb4d4348
GK
3831 /* FIXME: In the long term, this switch statement should go away
3832 and be replaced by a sequence of tests based on things like
3833 mode == Pmode. */
3834 switch (mode)
3835 {
3836 case HImode:
3837 case QImode:
3838 if (CONSTANT_P (operands[1])
3839 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 3840 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
3841 break;
3842
06f4e019 3843 case TFmode:
76d2b81d
DJ
3844 rs6000_eliminate_indexed_memrefs (operands);
3845 /* fall through */
3846
fb4d4348
GK
3847 case DFmode:
3848 case SFmode:
f676971a 3849 if (CONSTANT_P (operands[1])
fb4d4348 3850 && ! easy_fp_constant (operands[1], mode))
a9098fd0 3851 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 3852 break;
f676971a 3853
0ac081f6
AH
3854 case V16QImode:
3855 case V8HImode:
3856 case V4SFmode:
3857 case V4SImode:
a3170dc6
AH
3858 case V4HImode:
3859 case V2SFmode:
3860 case V2SImode:
00a892b8 3861 case V1DImode:
69ef87e2 3862 if (CONSTANT_P (operands[1])
d744e06e 3863 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
3864 operands[1] = force_const_mem (mode, operands[1]);
3865 break;
f676971a 3866
fb4d4348 3867 case SImode:
a9098fd0 3868 case DImode:
fb4d4348
GK
3869 /* Use default pattern for address of ELF small data */
3870 if (TARGET_ELF
a9098fd0 3871 && mode == Pmode
f607bc57 3872 && DEFAULT_ABI == ABI_V4
f676971a 3873 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
3874 || GET_CODE (operands[1]) == CONST)
3875 && small_data_operand (operands[1], mode))
fb4d4348
GK
3876 {
3877 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3878 return;
3879 }
3880
f607bc57 3881 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
3882 && mode == Pmode && mode == SImode
3883 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
3884 {
3885 emit_insn (gen_movsi_got (operands[0], operands[1]));
3886 return;
3887 }
3888
ee890fe2 3889 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
3890 && TARGET_NO_TOC
3891 && ! flag_pic
a9098fd0 3892 && mode == Pmode
fb4d4348
GK
3893 && CONSTANT_P (operands[1])
3894 && GET_CODE (operands[1]) != HIGH
3895 && GET_CODE (operands[1]) != CONST_INT)
3896 {
a9098fd0 3897 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
fb4d4348
GK
3898
3899 /* If this is a function address on -mcall-aixdesc,
3900 convert it to the address of the descriptor. */
3901 if (DEFAULT_ABI == ABI_AIX
3902 && GET_CODE (operands[1]) == SYMBOL_REF
3903 && XSTR (operands[1], 0)[0] == '.')
3904 {
3905 const char *name = XSTR (operands[1], 0);
3906 rtx new_ref;
3907 while (*name == '.')
3908 name++;
3909 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3910 CONSTANT_POOL_ADDRESS_P (new_ref)
3911 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 3912 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 3913 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
d1908feb 3914 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
fb4d4348
GK
3915 operands[1] = new_ref;
3916 }
7509c759 3917
ee890fe2
SS
3918 if (DEFAULT_ABI == ABI_DARWIN)
3919 {
ab82a49f
AP
3920#if TARGET_MACHO
3921 if (MACHO_DYNAMIC_NO_PIC_P)
3922 {
3923 /* Take care of any required data indirection. */
3924 operands[1] = rs6000_machopic_legitimize_pic_address (
3925 operands[1], mode, operands[0]);
3926 if (operands[0] != operands[1])
3927 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 3928 operands[0], operands[1]));
ab82a49f
AP
3929 return;
3930 }
3931#endif
b8a55285
AP
3932 emit_insn (gen_macho_high (target, operands[1]));
3933 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
3934 return;
3935 }
3936
fb4d4348
GK
3937 emit_insn (gen_elf_high (target, operands[1]));
3938 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3939 return;
3940 }
3941
a9098fd0
GK
3942 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3943 and we have put it in the TOC, we just need to make a TOC-relative
3944 reference to it. */
3945 if (TARGET_TOC
3946 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 3947 && constant_pool_expr_p (operands[1])
a9098fd0
GK
3948 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3949 get_pool_mode (operands[1])))
fb4d4348 3950 {
a9098fd0 3951 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 3952 }
a9098fd0
GK
3953 else if (mode == Pmode
3954 && CONSTANT_P (operands[1])
38886f37
AO
3955 && ((GET_CODE (operands[1]) != CONST_INT
3956 && ! easy_fp_constant (operands[1], mode))
3957 || (GET_CODE (operands[1]) == CONST_INT
3958 && num_insns_constant (operands[1], mode) > 2)
3959 || (GET_CODE (operands[0]) == REG
3960 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 3961 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
3962 && ! legitimate_constant_pool_address_p (operands[1])
3963 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
3964 {
3965 /* Emit a USE operation so that the constant isn't deleted if
3966 expensive optimizations are turned on because nobody
3967 references it. This should only be done for operands that
3968 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3969 This should not be done for operands that contain LABEL_REFs.
3970 For now, we just handle the obvious case. */
3971 if (GET_CODE (operands[1]) != LABEL_REF)
3972 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3973
c859cda6 3974#if TARGET_MACHO
ee890fe2 3975 /* Darwin uses a special PIC legitimizer. */
ab82a49f 3976 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 3977 {
ee890fe2
SS
3978 operands[1] =
3979 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
3980 operands[0]);
3981 if (operands[0] != operands[1])
3982 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
3983 return;
3984 }
c859cda6 3985#endif
ee890fe2 3986
fb4d4348
GK
3987 /* If we are to limit the number of things we put in the TOC and
3988 this is a symbol plus a constant we can add in one insn,
3989 just put the symbol in the TOC and add the constant. Don't do
3990 this if reload is in progress. */
3991 if (GET_CODE (operands[1]) == CONST
3992 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3993 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 3994 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
3995 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3996 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3997 && ! side_effects_p (operands[0]))
3998 {
a4f6c312
SS
3999 rtx sym =
4000 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4001 rtx other = XEXP (XEXP (operands[1], 0), 1);
4002
a9098fd0
GK
4003 sym = force_reg (mode, sym);
4004 if (mode == SImode)
4005 emit_insn (gen_addsi3 (operands[0], sym, other));
4006 else
4007 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4008 return;
4009 }
4010
a9098fd0 4011 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4012
f676971a 4013 if (TARGET_TOC
4d588c14 4014 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4015 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4016 get_pool_constant (XEXP (operands[1], 0)),
4017 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4018 {
ba4828e0 4019 operands[1]
542a8afa 4020 = gen_const_mem (mode,
c4ad648e 4021 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4022 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4023 }
fb4d4348
GK
4024 }
4025 break;
a9098fd0 4026
fb4d4348 4027 case TImode:
76d2b81d
DJ
4028 rs6000_eliminate_indexed_memrefs (operands);
4029
27dc0551
DE
4030 if (TARGET_POWER)
4031 {
4032 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4033 gen_rtvec (2,
4034 gen_rtx_SET (VOIDmode,
4035 operands[0], operands[1]),
4036 gen_rtx_CLOBBER (VOIDmode,
4037 gen_rtx_SCRATCH (SImode)))));
4038 return;
4039 }
fb4d4348
GK
4040 break;
4041
4042 default:
37409796 4043 gcc_unreachable ();
fb4d4348
GK
4044 }
4045
a9098fd0
GK
4046 /* Above, we may have called force_const_mem which may have returned
4047 an invalid address. If we can, fix this up; otherwise, reload will
4048 have to deal with it. */
8f4e6caf
RH
4049 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4050 operands[1] = validize_mem (operands[1]);
a9098fd0 4051
8f4e6caf 4052 emit_set:
fb4d4348
GK
4053 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4054}
4697a36c 4055\f
2858f73a
GK
4056/* Nonzero if we can use a floating-point register to pass this arg. */
4057#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4058 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4059 && (CUM)->fregno <= FP_ARG_MAX_REG \
4060 && TARGET_HARD_FLOAT && TARGET_FPRS)
4061
4062/* Nonzero if we can use an AltiVec register to pass this arg. */
4063#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4064 (ALTIVEC_VECTOR_MODE (MODE) \
4065 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4066 && TARGET_ALTIVEC_ABI \
83953138 4067 && (NAMED))
2858f73a 4068
c6e8c921
GK
4069/* Return a nonzero value to say to return the function value in
4070 memory, just as large structures are always returned. TYPE will be
4071 the data type of the value, and FNTYPE will be the type of the
4072 function doing the returning, or @code{NULL} for libcalls.
4073
4074 The AIX ABI for the RS/6000 specifies that all structures are
4075 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4076 specifies that structures <= 8 bytes are returned in r3/r4, but a
4077 draft put them in memory, and GCC used to implement the draft
df01da37 4078 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4079 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4080 compatibility can change DRAFT_V4_STRUCT_RET to override the
4081 default, and -m switches get the final word. See
4082 rs6000_override_options for more details.
4083
4084 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4085 long double support is enabled. These values are returned in memory.
4086
4087 int_size_in_bytes returns -1 for variable size objects, which go in
4088 memory always. The cast to unsigned makes -1 > 8. */
4089
4090static bool
4091rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4092{
594a51fe
SS
4093 /* In the darwin64 abi, try to use registers for larger structs
4094 if possible. */
0b5383eb 4095 if (rs6000_darwin64_abi
594a51fe 4096 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4097 && int_size_in_bytes (type) > 0)
4098 {
4099 CUMULATIVE_ARGS valcum;
4100 rtx valret;
4101
4102 valcum.words = 0;
4103 valcum.fregno = FP_ARG_MIN_REG;
4104 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4105 /* Do a trial code generation as if this were going to be passed
4106 as an argument; if any part goes in memory, we return NULL. */
4107 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4108 if (valret)
4109 return false;
4110 /* Otherwise fall through to more conventional ABI rules. */
4111 }
594a51fe 4112
c6e8c921 4113 if (AGGREGATE_TYPE_P (type)
df01da37 4114 && (aix_struct_return
c6e8c921
GK
4115 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4116 return true;
b693336b 4117
bada2eb8
DE
4118 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4119 modes only exist for GCC vector types if -maltivec. */
4120 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4121 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4122 return false;
4123
b693336b
PB
4124 /* Return synthetic vectors in memory. */
4125 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4126 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4127 {
4128 static bool warned_for_return_big_vectors = false;
4129 if (!warned_for_return_big_vectors)
4130 {
d4ee4d25 4131 warning (0, "GCC vector returned by reference: "
b693336b
PB
4132 "non-standard ABI extension with no compatibility guarantee");
4133 warned_for_return_big_vectors = true;
4134 }
4135 return true;
4136 }
4137
c6e8c921
GK
4138 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4139 return true;
ad630bef 4140
c6e8c921
GK
4141 return false;
4142}
4143
4697a36c
MM
4144/* Initialize a variable CUM of type CUMULATIVE_ARGS
4145 for a call to a function whose data type is FNTYPE.
4146 For a library call, FNTYPE is 0.
4147
4148 For incoming args we set the number of arguments in the prototype large
1c20ae99 4149 so we never return a PARALLEL. */
4697a36c
MM
4150
4151void
f676971a 4152init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4153 rtx libname ATTRIBUTE_UNUSED, int incoming,
4154 int libcall, int n_named_args)
4697a36c
MM
4155{
4156 static CUMULATIVE_ARGS zero_cumulative;
4157
4158 *cum = zero_cumulative;
4159 cum->words = 0;
4160 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4161 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4162 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4163 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4164 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4165 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4166 cum->stdarg = fntype
4167 && (TYPE_ARG_TYPES (fntype) != 0
4168 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4169 != void_type_node));
4697a36c 4170
0f6937fe
AM
4171 cum->nargs_prototype = 0;
4172 if (incoming || cum->prototype)
4173 cum->nargs_prototype = n_named_args;
4697a36c 4174
a5c76ee6 4175 /* Check for a longcall attribute. */
3eb4e360
AM
4176 if ((!fntype && rs6000_default_long_calls)
4177 || (fntype
4178 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4179 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4180 cum->call_cookie |= CALL_LONG;
6a4cee5f 4181
4697a36c
MM
4182 if (TARGET_DEBUG_ARG)
4183 {
4184 fprintf (stderr, "\ninit_cumulative_args:");
4185 if (fntype)
4186 {
4187 tree ret_type = TREE_TYPE (fntype);
4188 fprintf (stderr, " ret code = %s,",
4189 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4190 }
4191
6a4cee5f
MM
4192 if (cum->call_cookie & CALL_LONG)
4193 fprintf (stderr, " longcall,");
4194
4697a36c
MM
4195 fprintf (stderr, " proto = %d, nargs = %d\n",
4196 cum->prototype, cum->nargs_prototype);
4197 }
f676971a 4198
c4ad648e
AM
4199 if (fntype
4200 && !TARGET_ALTIVEC
4201 && TARGET_ALTIVEC_ABI
4202 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4203 {
c85ce869 4204 error ("cannot return value in vector register because"
c4ad648e 4205 " altivec instructions are disabled, use -maltivec"
c85ce869 4206 " to enable them");
c4ad648e 4207 }
4697a36c
MM
4208}
4209\f
fe984136
RH
4210/* Return true if TYPE must be passed on the stack and not in registers. */
4211
4212static bool
4213rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4214{
4215 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4216 return must_pass_in_stack_var_size (mode, type);
4217 else
4218 return must_pass_in_stack_var_size_or_pad (mode, type);
4219}
4220
c229cba9
DE
4221/* If defined, a C expression which determines whether, and in which
4222 direction, to pad out an argument with extra space. The value
4223 should be of type `enum direction': either `upward' to pad above
4224 the argument, `downward' to pad below, or `none' to inhibit
4225 padding.
4226
4227 For the AIX ABI structs are always stored left shifted in their
4228 argument slot. */
4229
9ebbca7d 4230enum direction
a2369ed3 4231function_arg_padding (enum machine_mode mode, tree type)
c229cba9 4232{
6e985040
AM
4233#ifndef AGGREGATE_PADDING_FIXED
4234#define AGGREGATE_PADDING_FIXED 0
4235#endif
4236#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4237#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4238#endif
4239
4240 if (!AGGREGATE_PADDING_FIXED)
4241 {
4242 /* GCC used to pass structures of the same size as integer types as
4243 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4244 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4245 passed padded downward, except that -mstrict-align further
4246 muddied the water in that multi-component structures of 2 and 4
4247 bytes in size were passed padded upward.
4248
4249 The following arranges for best compatibility with previous
4250 versions of gcc, but removes the -mstrict-align dependency. */
4251 if (BYTES_BIG_ENDIAN)
4252 {
4253 HOST_WIDE_INT size = 0;
4254
4255 if (mode == BLKmode)
4256 {
4257 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4258 size = int_size_in_bytes (type);
4259 }
4260 else
4261 size = GET_MODE_SIZE (mode);
4262
4263 if (size == 1 || size == 2 || size == 4)
4264 return downward;
4265 }
4266 return upward;
4267 }
4268
4269 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4270 {
4271 if (type != 0 && AGGREGATE_TYPE_P (type))
4272 return upward;
4273 }
c229cba9 4274
d3704c46
KH
4275 /* Fall back to the default. */
4276 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
4277}
4278
b6c9286a 4279/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 4280 of an argument with the specified mode and type. If it is not defined,
b6c9286a 4281 PARM_BOUNDARY is used for all arguments.
f676971a 4282
b693336b
PB
4283 V.4 wants long longs to be double word aligned.
4284 Doubleword align SPE vectors.
4285 Quadword align Altivec vectors.
4286 Quadword align large synthetic vector types. */
b6c9286a
MM
4287
4288int
b693336b 4289function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 4290{
4ed78545
AM
4291 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4292 return 64;
ad630bef
DE
4293 else if (SPE_VECTOR_MODE (mode)
4294 || (type && TREE_CODE (type) == VECTOR_TYPE
4295 && int_size_in_bytes (type) >= 8
4296 && int_size_in_bytes (type) < 16))
e1f83b4d 4297 return 64;
ad630bef
DE
4298 else if (ALTIVEC_VECTOR_MODE (mode)
4299 || (type && TREE_CODE (type) == VECTOR_TYPE
4300 && int_size_in_bytes (type) >= 16))
0ac081f6 4301 return 128;
0b5383eb
DJ
4302 else if (rs6000_darwin64_abi && mode == BLKmode
4303 && type && TYPE_ALIGN (type) > 64)
4304 return 128;
9ebbca7d 4305 else
b6c9286a 4306 return PARM_BOUNDARY;
b6c9286a 4307}
c53bdcf5 4308
294bd182
AM
4309/* For a function parm of MODE and TYPE, return the starting word in
4310 the parameter area. NWORDS of the parameter area are already used. */
4311
4312static unsigned int
4313rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4314{
4315 unsigned int align;
4316 unsigned int parm_offset;
4317
4318 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4319 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4320 return nwords + (-(parm_offset + nwords) & align);
4321}
4322
c53bdcf5
AM
4323/* Compute the size (in words) of a function argument. */
4324
4325static unsigned long
4326rs6000_arg_size (enum machine_mode mode, tree type)
4327{
4328 unsigned long size;
4329
4330 if (mode != BLKmode)
4331 size = GET_MODE_SIZE (mode);
4332 else
4333 size = int_size_in_bytes (type);
4334
4335 if (TARGET_32BIT)
4336 return (size + 3) >> 2;
4337 else
4338 return (size + 7) >> 3;
4339}
b6c9286a 4340\f
0b5383eb 4341/* Use this to flush pending int fields. */
594a51fe
SS
4342
4343static void
0b5383eb
DJ
4344rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4345 HOST_WIDE_INT bitpos)
594a51fe 4346{
0b5383eb
DJ
4347 unsigned int startbit, endbit;
4348 int intregs, intoffset;
4349 enum machine_mode mode;
594a51fe 4350
0b5383eb
DJ
4351 if (cum->intoffset == -1)
4352 return;
594a51fe 4353
0b5383eb
DJ
4354 intoffset = cum->intoffset;
4355 cum->intoffset = -1;
4356
4357 if (intoffset % BITS_PER_WORD != 0)
4358 {
4359 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4360 MODE_INT, 0);
4361 if (mode == BLKmode)
594a51fe 4362 {
0b5383eb
DJ
4363 /* We couldn't find an appropriate mode, which happens,
4364 e.g., in packed structs when there are 3 bytes to load.
4365 Back intoffset back to the beginning of the word in this
4366 case. */
4367 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 4368 }
594a51fe 4369 }
0b5383eb
DJ
4370
4371 startbit = intoffset & -BITS_PER_WORD;
4372 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4373 intregs = (endbit - startbit) / BITS_PER_WORD;
4374 cum->words += intregs;
4375}
4376
4377/* The darwin64 ABI calls for us to recurse down through structs,
4378 looking for elements passed in registers. Unfortunately, we have
4379 to track int register count here also because of misalignments
4380 in powerpc alignment mode. */
4381
4382static void
4383rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4384 tree type,
4385 HOST_WIDE_INT startbitpos)
4386{
4387 tree f;
4388
4389 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4390 if (TREE_CODE (f) == FIELD_DECL)
4391 {
4392 HOST_WIDE_INT bitpos = startbitpos;
4393 tree ftype = TREE_TYPE (f);
4394 enum machine_mode mode = TYPE_MODE (ftype);
4395
4396 if (DECL_SIZE (f) != 0
4397 && host_integerp (bit_position (f), 1))
4398 bitpos += int_bit_position (f);
4399
4400 /* ??? FIXME: else assume zero offset. */
4401
4402 if (TREE_CODE (ftype) == RECORD_TYPE)
4403 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4404 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4405 {
4406 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4407 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4408 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4409 }
4410 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4411 {
4412 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4413 cum->vregno++;
4414 cum->words += 2;
4415 }
4416 else if (cum->intoffset == -1)
4417 cum->intoffset = bitpos;
4418 }
594a51fe
SS
4419}
4420
4697a36c
MM
4421/* Update the data in CUM to advance over an argument
4422 of mode MODE and data type TYPE.
b2d04ecf
AM
4423 (TYPE is null for libcalls where that information may not be available.)
4424
4425 Note that for args passed by reference, function_arg will be called
4426 with MODE and TYPE set to that of the pointer to the arg, not the arg
4427 itself. */
4697a36c
MM
4428
4429void
f676971a 4430function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 4431 tree type, int named, int depth)
4697a36c 4432{
0b5383eb
DJ
4433 int size;
4434
594a51fe
SS
4435 /* Only tick off an argument if we're not recursing. */
4436 if (depth == 0)
4437 cum->nargs_prototype--;
4697a36c 4438
ad630bef
DE
4439 if (TARGET_ALTIVEC_ABI
4440 && (ALTIVEC_VECTOR_MODE (mode)
4441 || (type && TREE_CODE (type) == VECTOR_TYPE
4442 && int_size_in_bytes (type) == 16)))
0ac081f6 4443 {
4ed78545
AM
4444 bool stack = false;
4445
2858f73a 4446 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 4447 {
6d0ef01e
HP
4448 cum->vregno++;
4449 if (!TARGET_ALTIVEC)
c85ce869 4450 error ("cannot pass argument in vector register because"
6d0ef01e 4451 " altivec instructions are disabled, use -maltivec"
c85ce869 4452 " to enable them");
4ed78545
AM
4453
4454 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 4455 even if it is going to be passed in a vector register.
4ed78545
AM
4456 Darwin does the same for variable-argument functions. */
4457 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4458 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4459 stack = true;
6d0ef01e 4460 }
4ed78545
AM
4461 else
4462 stack = true;
4463
4464 if (stack)
c4ad648e 4465 {
a594a19c 4466 int align;
f676971a 4467
2858f73a
GK
4468 /* Vector parameters must be 16-byte aligned. This places
4469 them at 2 mod 4 in terms of words in 32-bit mode, since
4470 the parameter save area starts at offset 24 from the
4471 stack. In 64-bit mode, they just have to start on an
4472 even word, since the parameter save area is 16-byte
4473 aligned. Space for GPRs is reserved even if the argument
4474 will be passed in memory. */
4475 if (TARGET_32BIT)
4ed78545 4476 align = (2 - cum->words) & 3;
2858f73a
GK
4477 else
4478 align = cum->words & 1;
c53bdcf5 4479 cum->words += align + rs6000_arg_size (mode, type);
f676971a 4480
a594a19c
GK
4481 if (TARGET_DEBUG_ARG)
4482 {
f676971a 4483 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
4484 cum->words, align);
4485 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 4486 cum->nargs_prototype, cum->prototype,
2858f73a 4487 GET_MODE_NAME (mode));
a594a19c
GK
4488 }
4489 }
0ac081f6 4490 }
a4b0320c 4491 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
4492 && !cum->stdarg
4493 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 4494 cum->sysv_gregno++;
594a51fe
SS
4495
4496 else if (rs6000_darwin64_abi
4497 && mode == BLKmode
0b5383eb
DJ
4498 && TREE_CODE (type) == RECORD_TYPE
4499 && (size = int_size_in_bytes (type)) > 0)
4500 {
4501 /* Variable sized types have size == -1 and are
4502 treated as if consisting entirely of ints.
4503 Pad to 16 byte boundary if needed. */
4504 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4505 && (cum->words % 2) != 0)
4506 cum->words++;
4507 /* For varargs, we can just go up by the size of the struct. */
4508 if (!named)
4509 cum->words += (size + 7) / 8;
4510 else
4511 {
4512 /* It is tempting to say int register count just goes up by
4513 sizeof(type)/8, but this is wrong in a case such as
4514 { int; double; int; } [powerpc alignment]. We have to
4515 grovel through the fields for these too. */
4516 cum->intoffset = 0;
4517 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 4518 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
4519 size * BITS_PER_UNIT);
4520 }
4521 }
f607bc57 4522 else if (DEFAULT_ABI == ABI_V4)
4697a36c 4523 {
a3170dc6 4524 if (TARGET_HARD_FLOAT && TARGET_FPRS
4cc833b7 4525 && (mode == SFmode || mode == DFmode))
4697a36c 4526 {
4cc833b7
RH
4527 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4528 cum->fregno++;
4529 else
4530 {
4531 if (mode == DFmode)
c4ad648e 4532 cum->words += cum->words & 1;
c53bdcf5 4533 cum->words += rs6000_arg_size (mode, type);
4cc833b7 4534 }
4697a36c 4535 }
4cc833b7
RH
4536 else
4537 {
b2d04ecf 4538 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
4539 int gregno = cum->sysv_gregno;
4540
4ed78545
AM
4541 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4542 (r7,r8) or (r9,r10). As does any other 2 word item such
4543 as complex int due to a historical mistake. */
4544 if (n_words == 2)
4545 gregno += (1 - gregno) & 1;
4cc833b7 4546
4ed78545 4547 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
4548 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4549 {
4ed78545
AM
4550 /* Long long and SPE vectors are aligned on the stack.
4551 So are other 2 word items such as complex int due to
4552 a historical mistake. */
4cc833b7
RH
4553 if (n_words == 2)
4554 cum->words += cum->words & 1;
4555 cum->words += n_words;
4556 }
4697a36c 4557
4cc833b7
RH
4558 /* Note: continuing to accumulate gregno past when we've started
4559 spilling to the stack indicates the fact that we've started
4560 spilling to the stack to expand_builtin_saveregs. */
4561 cum->sysv_gregno = gregno + n_words;
4562 }
4697a36c 4563
4cc833b7
RH
4564 if (TARGET_DEBUG_ARG)
4565 {
4566 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4567 cum->words, cum->fregno);
4568 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4569 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4570 fprintf (stderr, "mode = %4s, named = %d\n",
4571 GET_MODE_NAME (mode), named);
4572 }
4697a36c
MM
4573 }
4574 else
4cc833b7 4575 {
b2d04ecf 4576 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
4577 int start_words = cum->words;
4578 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 4579
294bd182 4580 cum->words = align_words + n_words;
4697a36c 4581
a3170dc6
AH
4582 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4583 && TARGET_HARD_FLOAT && TARGET_FPRS)
c53bdcf5 4584 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
4585
4586 if (TARGET_DEBUG_ARG)
4587 {
4588 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4589 cum->words, cum->fregno);
4590 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4591 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 4592 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 4593 named, align_words - start_words, depth);
4cc833b7
RH
4594 }
4595 }
4697a36c 4596}
a6c9bed4 4597
f82f556d
AH
4598static rtx
4599spe_build_register_parallel (enum machine_mode mode, int gregno)
4600{
54b695e7 4601 rtx r1, r3;
f82f556d 4602
37409796 4603 switch (mode)
f82f556d 4604 {
37409796 4605 case DFmode:
54b695e7
AH
4606 r1 = gen_rtx_REG (DImode, gregno);
4607 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4608 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
4609
4610 case DCmode:
54b695e7
AH
4611 r1 = gen_rtx_REG (DImode, gregno);
4612 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4613 r3 = gen_rtx_REG (DImode, gregno + 2);
4614 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4615 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796
NS
4616
4617 default:
4618 gcc_unreachable ();
f82f556d 4619 }
f82f556d 4620}
b78d48dd 4621
f82f556d 4622/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 4623static rtx
f676971a 4624rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 4625 tree type)
a6c9bed4 4626{
f82f556d
AH
4627 int gregno = cum->sysv_gregno;
4628
4629 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 4630 are passed and returned in a pair of GPRs for ABI compatibility. */
18f63bfa 4631 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
f82f556d 4632 {
b5870bee
AH
4633 int n_words = rs6000_arg_size (mode, type);
4634
f82f556d 4635 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
4636 if (mode == DFmode)
4637 gregno += (1 - gregno) & 1;
f82f556d 4638
b5870bee
AH
4639 /* Multi-reg args are not split between registers and stack. */
4640 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
4641 return NULL_RTX;
4642
4643 return spe_build_register_parallel (mode, gregno);
4644 }
a6c9bed4
AH
4645 if (cum->stdarg)
4646 {
c53bdcf5 4647 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
4648
4649 /* SPE vectors are put in odd registers. */
4650 if (n_words == 2 && (gregno & 1) == 0)
4651 gregno += 1;
4652
4653 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4654 {
4655 rtx r1, r2;
4656 enum machine_mode m = SImode;
4657
4658 r1 = gen_rtx_REG (m, gregno);
4659 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4660 r2 = gen_rtx_REG (m, gregno + 1);
4661 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4662 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4663 }
4664 else
b78d48dd 4665 return NULL_RTX;
a6c9bed4
AH
4666 }
4667 else
4668 {
f82f556d
AH
4669 if (gregno <= GP_ARG_MAX_REG)
4670 return gen_rtx_REG (mode, gregno);
a6c9bed4 4671 else
b78d48dd 4672 return NULL_RTX;
a6c9bed4
AH
4673 }
4674}
4675
0b5383eb
DJ
4676/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4677 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 4678
0b5383eb 4679static void
bb8df8a6 4680rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 4681 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 4682{
0b5383eb
DJ
4683 enum machine_mode mode;
4684 unsigned int regno;
4685 unsigned int startbit, endbit;
4686 int this_regno, intregs, intoffset;
4687 rtx reg;
594a51fe 4688
0b5383eb
DJ
4689 if (cum->intoffset == -1)
4690 return;
4691
4692 intoffset = cum->intoffset;
4693 cum->intoffset = -1;
4694
4695 /* If this is the trailing part of a word, try to only load that
4696 much into the register. Otherwise load the whole register. Note
4697 that in the latter case we may pick up unwanted bits. It's not a
4698 problem at the moment but may wish to revisit. */
4699
4700 if (intoffset % BITS_PER_WORD != 0)
594a51fe 4701 {
0b5383eb
DJ
4702 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4703 MODE_INT, 0);
4704 if (mode == BLKmode)
4705 {
4706 /* We couldn't find an appropriate mode, which happens,
4707 e.g., in packed structs when there are 3 bytes to load.
4708 Back intoffset back to the beginning of the word in this
4709 case. */
4710 intoffset = intoffset & -BITS_PER_WORD;
4711 mode = word_mode;
4712 }
4713 }
4714 else
4715 mode = word_mode;
4716
4717 startbit = intoffset & -BITS_PER_WORD;
4718 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4719 intregs = (endbit - startbit) / BITS_PER_WORD;
4720 this_regno = cum->words + intoffset / BITS_PER_WORD;
4721
4722 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4723 cum->use_stack = 1;
bb8df8a6 4724
0b5383eb
DJ
4725 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4726 if (intregs <= 0)
4727 return;
4728
4729 intoffset /= BITS_PER_UNIT;
4730 do
4731 {
4732 regno = GP_ARG_MIN_REG + this_regno;
4733 reg = gen_rtx_REG (mode, regno);
4734 rvec[(*k)++] =
4735 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4736
4737 this_regno += 1;
4738 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4739 mode = word_mode;
4740 intregs -= 1;
4741 }
4742 while (intregs > 0);
4743}
4744
4745/* Recursive workhorse for the following. */
4746
4747static void
bb8df8a6 4748rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
0b5383eb
DJ
4749 HOST_WIDE_INT startbitpos, rtx rvec[],
4750 int *k)
4751{
4752 tree f;
4753
4754 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4755 if (TREE_CODE (f) == FIELD_DECL)
4756 {
4757 HOST_WIDE_INT bitpos = startbitpos;
4758 tree ftype = TREE_TYPE (f);
4759 enum machine_mode mode = TYPE_MODE (ftype);
4760
4761 if (DECL_SIZE (f) != 0
4762 && host_integerp (bit_position (f), 1))
4763 bitpos += int_bit_position (f);
4764
4765 /* ??? FIXME: else assume zero offset. */
4766
4767 if (TREE_CODE (ftype) == RECORD_TYPE)
4768 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
4769 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 4770 {
0b5383eb
DJ
4771#if 0
4772 switch (mode)
594a51fe 4773 {
0b5383eb
DJ
4774 case SCmode: mode = SFmode; break;
4775 case DCmode: mode = DFmode; break;
4776 case TCmode: mode = TFmode; break;
4777 default: break;
594a51fe 4778 }
0b5383eb
DJ
4779#endif
4780 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4781 rvec[(*k)++]
bb8df8a6 4782 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
4783 gen_rtx_REG (mode, cum->fregno++),
4784 GEN_INT (bitpos / BITS_PER_UNIT));
4785 if (mode == TFmode)
4786 cum->fregno++;
594a51fe 4787 }
0b5383eb
DJ
4788 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
4789 {
4790 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4791 rvec[(*k)++]
bb8df8a6
EC
4792 = gen_rtx_EXPR_LIST (VOIDmode,
4793 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
4794 GEN_INT (bitpos / BITS_PER_UNIT));
4795 }
4796 else if (cum->intoffset == -1)
4797 cum->intoffset = bitpos;
4798 }
4799}
594a51fe 4800
0b5383eb
DJ
4801/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4802 the register(s) to be used for each field and subfield of a struct
4803 being passed by value, along with the offset of where the
4804 register's value may be found in the block. FP fields go in FP
4805 register, vector fields go in vector registers, and everything
bb8df8a6 4806 else goes in int registers, packed as in memory.
8ff40a74 4807
0b5383eb
DJ
4808 This code is also used for function return values. RETVAL indicates
4809 whether this is the case.
8ff40a74 4810
a4d05547 4811 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 4812 calling convention. */
594a51fe 4813
0b5383eb
DJ
4814static rtx
4815rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
4816 int named, bool retval)
4817{
4818 rtx rvec[FIRST_PSEUDO_REGISTER];
4819 int k = 1, kbase = 1;
4820 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4821 /* This is a copy; modifications are not visible to our caller. */
4822 CUMULATIVE_ARGS copy_cum = *orig_cum;
4823 CUMULATIVE_ARGS *cum = &copy_cum;
4824
4825 /* Pad to 16 byte boundary if needed. */
4826 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4827 && (cum->words % 2) != 0)
4828 cum->words++;
4829
4830 cum->intoffset = 0;
4831 cum->use_stack = 0;
4832 cum->named = named;
4833
4834 /* Put entries into rvec[] for individual FP and vector fields, and
4835 for the chunks of memory that go in int regs. Note we start at
4836 element 1; 0 is reserved for an indication of using memory, and
4837 may or may not be filled in below. */
4838 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
4839 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
4840
4841 /* If any part of the struct went on the stack put all of it there.
4842 This hack is because the generic code for
4843 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4844 parts of the struct are not at the beginning. */
4845 if (cum->use_stack)
4846 {
4847 if (retval)
4848 return NULL_RTX; /* doesn't go in registers at all */
4849 kbase = 0;
4850 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4851 }
4852 if (k > 1 || cum->use_stack)
4853 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
4854 else
4855 return NULL_RTX;
4856}
4857
b78d48dd
FJ
4858/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4859
4860static rtx
ec6376ab 4861rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 4862{
ec6376ab
AM
4863 int n_units;
4864 int i, k;
4865 rtx rvec[GP_ARG_NUM_REG + 1];
4866
4867 if (align_words >= GP_ARG_NUM_REG)
4868 return NULL_RTX;
4869
4870 n_units = rs6000_arg_size (mode, type);
4871
4872 /* Optimize the simple case where the arg fits in one gpr, except in
4873 the case of BLKmode due to assign_parms assuming that registers are
4874 BITS_PER_WORD wide. */
4875 if (n_units == 0
4876 || (n_units == 1 && mode != BLKmode))
4877 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4878
4879 k = 0;
4880 if (align_words + n_units > GP_ARG_NUM_REG)
4881 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4882 using a magic NULL_RTX component.
4883 FIXME: This is not strictly correct. Only some of the arg
4884 belongs in memory, not all of it. However, there isn't any way
4885 to do this currently, apart from building rtx descriptions for
4886 the pieces of memory we want stored. Due to bugs in the generic
4887 code we can't use the normal function_arg_partial_nregs scheme
4888 with the PARALLEL arg description we emit here.
4889 In any case, the code to store the whole arg to memory is often
4890 more efficient than code to store pieces, and we know that space
4891 is available in the right place for the whole arg. */
78a52f11
RH
4892 /* FIXME: This should be fixed since the conversion to
4893 TARGET_ARG_PARTIAL_BYTES. */
ec6376ab
AM
4894 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4895
4896 i = 0;
4897 do
36a454e1 4898 {
ec6376ab
AM
4899 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4900 rtx off = GEN_INT (i++ * 4);
4901 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 4902 }
ec6376ab
AM
4903 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4904
4905 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
4906}
4907
4697a36c
MM
4908/* Determine where to put an argument to a function.
4909 Value is zero to push the argument on the stack,
4910 or a hard register in which to store the argument.
4911
4912 MODE is the argument's machine mode.
4913 TYPE is the data type of the argument (as a tree).
4914 This is null for libcalls where that information may
4915 not be available.
4916 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
4917 the preceding args and about the function being called. It is
4918 not modified in this routine.
4697a36c
MM
4919 NAMED is nonzero if this argument is a named parameter
4920 (otherwise it is an extra parameter matching an ellipsis).
4921
4922 On RS/6000 the first eight words of non-FP are normally in registers
4923 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4924 Under V.4, the first 8 FP args are in registers.
4925
4926 If this is floating-point and no prototype is specified, we use
4927 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 4928 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 4929 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
4930 doesn't support PARALLEL anyway.
4931
4932 Note that for args passed by reference, function_arg will be called
4933 with MODE and TYPE set to that of the pointer to the arg, not the arg
4934 itself. */
4697a36c 4935
9390387d 4936rtx
f676971a 4937function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 4938 tree type, int named)
4697a36c 4939{
4cc833b7 4940 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 4941
a4f6c312
SS
4942 /* Return a marker to indicate whether CR1 needs to set or clear the
4943 bit that V.4 uses to say fp args were passed in registers.
4944 Assume that we don't need the marker for software floating point,
4945 or compiler generated library calls. */
4697a36c
MM
4946 if (mode == VOIDmode)
4947 {
f607bc57 4948 if (abi == ABI_V4
b9599e46 4949 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
4950 && (cum->stdarg
4951 || (cum->nargs_prototype < 0
4952 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 4953 {
a3170dc6
AH
4954 /* For the SPE, we need to crxor CR6 always. */
4955 if (TARGET_SPE_ABI)
4956 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4957 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4958 return GEN_INT (cum->call_cookie
4959 | ((cum->fregno == FP_ARG_MIN_REG)
4960 ? CALL_V4_SET_FP_ARGS
4961 : CALL_V4_CLEAR_FP_ARGS));
7509c759 4962 }
4697a36c 4963
7509c759 4964 return GEN_INT (cum->call_cookie);
4697a36c
MM
4965 }
4966
0b5383eb
DJ
4967 if (rs6000_darwin64_abi && mode == BLKmode
4968 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 4969 {
0b5383eb 4970 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
4971 if (rslt != NULL_RTX)
4972 return rslt;
4973 /* Else fall through to usual handling. */
4974 }
4975
2858f73a 4976 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
4977 if (TARGET_64BIT && ! cum->prototype)
4978 {
c4ad648e
AM
4979 /* Vector parameters get passed in vector register
4980 and also in GPRs or memory, in absence of prototype. */
4981 int align_words;
4982 rtx slot;
4983 align_words = (cum->words + 1) & ~1;
4984
4985 if (align_words >= GP_ARG_NUM_REG)
4986 {
4987 slot = NULL_RTX;
4988 }
4989 else
4990 {
4991 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4992 }
4993 return gen_rtx_PARALLEL (mode,
4994 gen_rtvec (2,
4995 gen_rtx_EXPR_LIST (VOIDmode,
4996 slot, const0_rtx),
4997 gen_rtx_EXPR_LIST (VOIDmode,
4998 gen_rtx_REG (mode, cum->vregno),
4999 const0_rtx)));
c72d6c26
HP
5000 }
5001 else
5002 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5003 else if (TARGET_ALTIVEC_ABI
5004 && (ALTIVEC_VECTOR_MODE (mode)
5005 || (type && TREE_CODE (type) == VECTOR_TYPE
5006 && int_size_in_bytes (type) == 16)))
0ac081f6 5007 {
2858f73a 5008 if (named || abi == ABI_V4)
a594a19c 5009 return NULL_RTX;
0ac081f6 5010 else
a594a19c
GK
5011 {
5012 /* Vector parameters to varargs functions under AIX or Darwin
5013 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5014 int align, align_words, n_words;
5015 enum machine_mode part_mode;
a594a19c
GK
5016
5017 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5018 2 mod 4 in terms of words in 32-bit mode, since the parameter
5019 save area starts at offset 24 from the stack. In 64-bit mode,
5020 they just have to start on an even word, since the parameter
5021 save area is 16-byte aligned. */
5022 if (TARGET_32BIT)
4ed78545 5023 align = (2 - cum->words) & 3;
2858f73a
GK
5024 else
5025 align = cum->words & 1;
a594a19c
GK
5026 align_words = cum->words + align;
5027
5028 /* Out of registers? Memory, then. */
5029 if (align_words >= GP_ARG_NUM_REG)
5030 return NULL_RTX;
ec6376ab
AM
5031
5032 if (TARGET_32BIT && TARGET_POWERPC64)
5033 return rs6000_mixed_function_arg (mode, type, align_words);
5034
2858f73a
GK
5035 /* The vector value goes in GPRs. Only the part of the
5036 value in GPRs is reported here. */
ec6376ab
AM
5037 part_mode = mode;
5038 n_words = rs6000_arg_size (mode, type);
5039 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5040 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5041 is either wholly in GPRs or half in GPRs and half not. */
5042 part_mode = DImode;
ec6376ab
AM
5043
5044 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5045 }
0ac081f6 5046 }
f82f556d
AH
5047 else if (TARGET_SPE_ABI && TARGET_SPE
5048 && (SPE_VECTOR_MODE (mode)
18f63bfa
AH
5049 || (TARGET_E500_DOUBLE && (mode == DFmode
5050 || mode == DCmode))))
a6c9bed4 5051 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5052
f607bc57 5053 else if (abi == ABI_V4)
4697a36c 5054 {
a3170dc6 5055 if (TARGET_HARD_FLOAT && TARGET_FPRS
4cc833b7
RH
5056 && (mode == SFmode || mode == DFmode))
5057 {
5058 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5059 return gen_rtx_REG (mode, cum->fregno);
5060 else
b78d48dd 5061 return NULL_RTX;
4cc833b7
RH
5062 }
5063 else
5064 {
b2d04ecf 5065 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5066 int gregno = cum->sysv_gregno;
5067
4ed78545
AM
5068 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5069 (r7,r8) or (r9,r10). As does any other 2 word item such
5070 as complex int due to a historical mistake. */
5071 if (n_words == 2)
5072 gregno += (1 - gregno) & 1;
4cc833b7 5073
4ed78545 5074 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5075 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5076 return NULL_RTX;
ec6376ab
AM
5077
5078 if (TARGET_32BIT && TARGET_POWERPC64)
5079 return rs6000_mixed_function_arg (mode, type,
5080 gregno - GP_ARG_MIN_REG);
5081 return gen_rtx_REG (mode, gregno);
4cc833b7 5082 }
4697a36c 5083 }
4cc833b7
RH
5084 else
5085 {
294bd182 5086 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5087
2858f73a 5088 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5089 {
ec6376ab
AM
5090 rtx rvec[GP_ARG_NUM_REG + 1];
5091 rtx r;
5092 int k;
c53bdcf5
AM
5093 bool needs_psave;
5094 enum machine_mode fmode = mode;
c53bdcf5
AM
5095 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5096
5097 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5098 {
c53bdcf5
AM
5099 /* Currently, we only ever need one reg here because complex
5100 doubles are split. */
37409796 5101 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
ec6376ab
AM
5102
5103 /* Long double split over regs and memory. */
5104 fmode = DFmode;
c53bdcf5 5105 }
c53bdcf5
AM
5106
5107 /* Do we also need to pass this arg in the parameter save
5108 area? */
5109 needs_psave = (type
5110 && (cum->nargs_prototype <= 0
5111 || (DEFAULT_ABI == ABI_AIX
de17c25f 5112 && TARGET_XL_COMPAT
c53bdcf5
AM
5113 && align_words >= GP_ARG_NUM_REG)));
5114
5115 if (!needs_psave && mode == fmode)
ec6376ab 5116 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5117
ec6376ab 5118 k = 0;
c53bdcf5
AM
5119 if (needs_psave)
5120 {
ec6376ab 5121 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5122 This piece must come first, before the fprs. */
c53bdcf5
AM
5123 if (align_words < GP_ARG_NUM_REG)
5124 {
5125 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5126
5127 if (align_words + n_words > GP_ARG_NUM_REG
5128 || (TARGET_32BIT && TARGET_POWERPC64))
5129 {
5130 /* If this is partially on the stack, then we only
5131 include the portion actually in registers here. */
5132 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5133 rtx off;
2e6c9641 5134 int i=0;
c4ad648e
AM
5135 if (align_words + n_words > GP_ARG_NUM_REG
5136 && (TARGET_32BIT && TARGET_POWERPC64))
5137 /* Not all of the arg fits in gprs. Say that it
5138 goes in memory too, using a magic NULL_RTX
5139 component. Also see comment in
5140 rs6000_mixed_function_arg for why the normal
5141 function_arg_partial_nregs scheme doesn't work
5142 in this case. */
5143 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5144 const0_rtx);
ec6376ab
AM
5145 do
5146 {
5147 r = gen_rtx_REG (rmode,
5148 GP_ARG_MIN_REG + align_words);
2e6c9641 5149 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5150 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5151 }
5152 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5153 }
5154 else
5155 {
5156 /* The whole arg fits in gprs. */
5157 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5158 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5159 }
c53bdcf5 5160 }
ec6376ab
AM
5161 else
5162 /* It's entirely in memory. */
5163 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5164 }
5165
ec6376ab
AM
5166 /* Describe where this piece goes in the fprs. */
5167 r = gen_rtx_REG (fmode, cum->fregno);
5168 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5169
5170 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5171 }
5172 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5173 {
ec6376ab
AM
5174 if (TARGET_32BIT && TARGET_POWERPC64)
5175 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5176
4eeca74f
AM
5177 if (mode == BLKmode)
5178 mode = Pmode;
5179
b2d04ecf
AM
5180 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5181 }
4cc833b7
RH
5182 else
5183 return NULL_RTX;
4697a36c 5184 }
4697a36c
MM
5185}
5186\f
ec6376ab 5187/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5188 the number of bytes passed in registers. For args passed entirely in
5189 registers or entirely in memory, zero. When an arg is described by a
5190 PARALLEL, perhaps using more than one register type, this function
5191 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 5192
78a52f11
RH
5193static int
5194rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5195 tree type, bool named)
4697a36c 5196{
c53bdcf5 5197 int ret = 0;
ec6376ab 5198 int align_words;
c53bdcf5 5199
f607bc57 5200 if (DEFAULT_ABI == ABI_V4)
4697a36c 5201 return 0;
4697a36c 5202
c53bdcf5
AM
5203 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5204 && cum->nargs_prototype >= 0)
5205 return 0;
5206
0b5383eb
DJ
5207 /* In this complicated case we just disable the partial_nregs code. */
5208 if (rs6000_darwin64_abi && mode == BLKmode
5209 && TREE_CODE (type) == RECORD_TYPE
5210 && int_size_in_bytes (type) > 0)
5211 return 0;
5212
294bd182 5213 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab
AM
5214
5215 if (USE_FP_FOR_ARG_P (cum, mode, type)
fb63c729
AM
5216 /* If we are passing this arg in the fixed parameter save area
5217 (gprs or memory) as well as fprs, then this function should
5218 return the number of bytes passed in the parameter save area
bb8df8a6 5219 rather than bytes passed in fprs. */
ec6376ab
AM
5220 && !(type
5221 && (cum->nargs_prototype <= 0
5222 || (DEFAULT_ABI == ABI_AIX
de17c25f 5223 && TARGET_XL_COMPAT
ec6376ab 5224 && align_words >= GP_ARG_NUM_REG))))
4697a36c 5225 {
c53bdcf5 5226 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
ac7e839c 5227 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 5228 else if (cum->nargs_prototype >= 0)
4697a36c
MM
5229 return 0;
5230 }
5231
ec6376ab
AM
5232 if (align_words < GP_ARG_NUM_REG
5233 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 5234 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 5235
c53bdcf5 5236 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 5237 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 5238
c53bdcf5 5239 return ret;
4697a36c
MM
5240}
5241\f
5242/* A C expression that indicates when an argument must be passed by
5243 reference. If nonzero for an argument, a copy of that argument is
5244 made in memory and a pointer to the argument is passed instead of
5245 the argument itself. The pointer is passed in whatever way is
5246 appropriate for passing a pointer to that type.
5247
b2d04ecf
AM
5248 Under V.4, aggregates and long double are passed by reference.
5249
5250 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5251 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
5252
5253 As an extension to all ABIs, variable sized types are passed by
5254 reference. */
4697a36c 5255
8cd5a4e0 5256static bool
f676971a 5257rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
bada2eb8
DE
5258 enum machine_mode mode, tree type,
5259 bool named ATTRIBUTE_UNUSED)
4697a36c 5260{
bada2eb8 5261 if (DEFAULT_ABI == ABI_V4 && mode == TFmode)
4697a36c
MM
5262 {
5263 if (TARGET_DEBUG_ARG)
bada2eb8
DE
5264 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5265 return 1;
5266 }
5267
5268 if (!type)
5269 return 0;
4697a36c 5270
bada2eb8
DE
5271 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5272 {
5273 if (TARGET_DEBUG_ARG)
5274 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5275 return 1;
5276 }
5277
5278 if (int_size_in_bytes (type) < 0)
5279 {
5280 if (TARGET_DEBUG_ARG)
5281 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5282 return 1;
5283 }
5284
5285 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5286 modes only exist for GCC vector types if -maltivec. */
5287 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5288 {
5289 if (TARGET_DEBUG_ARG)
5290 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
5291 return 1;
5292 }
b693336b
PB
5293
5294 /* Pass synthetic vectors in memory. */
bada2eb8 5295 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5296 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5297 {
5298 static bool warned_for_pass_big_vectors = false;
5299 if (TARGET_DEBUG_ARG)
5300 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5301 if (!warned_for_pass_big_vectors)
5302 {
d4ee4d25 5303 warning (0, "GCC vector passed by reference: "
b693336b
PB
5304 "non-standard ABI extension with no compatibility guarantee");
5305 warned_for_pass_big_vectors = true;
5306 }
5307 return 1;
5308 }
5309
b2d04ecf 5310 return 0;
4697a36c 5311}
5985c7a6
FJ
5312
5313static void
2d9db8eb 5314rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
5315{
5316 int i;
5317 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5318
5319 if (nregs == 0)
5320 return;
5321
c4ad648e 5322 for (i = 0; i < nregs; i++)
5985c7a6 5323 {
9390387d 5324 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 5325 if (reload_completed)
c4ad648e
AM
5326 {
5327 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5328 tem = NULL_RTX;
5329 else
5330 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 5331 i * GET_MODE_SIZE (reg_mode));
c4ad648e 5332 }
5985c7a6
FJ
5333 else
5334 tem = replace_equiv_address (tem, XEXP (tem, 0));
5335
37409796 5336 gcc_assert (tem);
5985c7a6
FJ
5337
5338 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5339 }
5340}
4697a36c
MM
5341\f
5342/* Perform any needed actions needed for a function that is receiving a
f676971a 5343 variable number of arguments.
4697a36c
MM
5344
5345 CUM is as above.
5346
5347 MODE and TYPE are the mode and type of the current parameter.
5348
5349 PRETEND_SIZE is a variable that should be set to the amount of stack
5350 that must be pushed by the prolog to pretend that our caller pushed
5351 it.
5352
5353 Normally, this macro will push all remaining incoming registers on the
5354 stack and set PRETEND_SIZE to the length of the registers pushed. */
5355
c6e8c921 5356static void
f676971a 5357setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
5358 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5359 int no_rtl)
4697a36c 5360{
4cc833b7
RH
5361 CUMULATIVE_ARGS next_cum;
5362 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 5363 rtx save_area = NULL_RTX, mem;
dfafc897 5364 int first_reg_offset, set;
4697a36c 5365
f31bf321 5366 /* Skip the last named argument. */
d34c5b80 5367 next_cum = *cum;
594a51fe 5368 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 5369
f607bc57 5370 if (DEFAULT_ABI == ABI_V4)
d34c5b80 5371 {
5b667039
JJ
5372 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5373
60e2d0ca 5374 if (! no_rtl)
5b667039
JJ
5375 {
5376 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5377 HOST_WIDE_INT offset = 0;
5378
5379 /* Try to optimize the size of the varargs save area.
5380 The ABI requires that ap.reg_save_area is doubleword
5381 aligned, but we don't need to allocate space for all
5382 the bytes, only those to which we actually will save
5383 anything. */
5384 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5385 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5386 if (TARGET_HARD_FLOAT && TARGET_FPRS
5387 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5388 && cfun->va_list_fpr_size)
5389 {
5390 if (gpr_reg_num)
5391 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5392 * UNITS_PER_FP_WORD;
5393 if (cfun->va_list_fpr_size
5394 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5395 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5396 else
5397 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5398 * UNITS_PER_FP_WORD;
5399 }
5400 if (gpr_reg_num)
5401 {
5402 offset = -((first_reg_offset * reg_size) & ~7);
5403 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5404 {
5405 gpr_reg_num = cfun->va_list_gpr_size;
5406 if (reg_size == 4 && (first_reg_offset & 1))
5407 gpr_reg_num++;
5408 }
5409 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5410 }
5411 else if (fpr_size)
5412 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5413 * UNITS_PER_FP_WORD
5414 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 5415
5b667039
JJ
5416 if (gpr_size + fpr_size)
5417 {
5418 rtx reg_save_area
5419 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5420 gcc_assert (GET_CODE (reg_save_area) == MEM);
5421 reg_save_area = XEXP (reg_save_area, 0);
5422 if (GET_CODE (reg_save_area) == PLUS)
5423 {
5424 gcc_assert (XEXP (reg_save_area, 0)
5425 == virtual_stack_vars_rtx);
5426 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5427 offset += INTVAL (XEXP (reg_save_area, 1));
5428 }
5429 else
5430 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5431 }
5432
5433 cfun->machine->varargs_save_offset = offset;
5434 save_area = plus_constant (virtual_stack_vars_rtx, offset);
5435 }
4697a36c 5436 }
60e2d0ca 5437 else
4697a36c 5438 {
d34c5b80 5439 first_reg_offset = next_cum.words;
4cc833b7 5440 save_area = virtual_incoming_args_rtx;
4697a36c 5441
fe984136 5442 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 5443 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 5444 }
4697a36c 5445
dfafc897 5446 set = get_varargs_alias_set ();
9d30f3c1
JJ
5447 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5448 && cfun->va_list_gpr_size)
4cc833b7 5449 {
9d30f3c1
JJ
5450 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5451
5452 if (va_list_gpr_counter_field)
5453 {
5454 /* V4 va_list_gpr_size counts number of registers needed. */
5455 if (nregs > cfun->va_list_gpr_size)
5456 nregs = cfun->va_list_gpr_size;
5457 }
5458 else
5459 {
5460 /* char * va_list instead counts number of bytes needed. */
5461 if (nregs > cfun->va_list_gpr_size / reg_size)
5462 nregs = cfun->va_list_gpr_size / reg_size;
5463 }
5464
dfafc897 5465 mem = gen_rtx_MEM (BLKmode,
c4ad648e 5466 plus_constant (save_area,
13e2e16e
DE
5467 first_reg_offset * reg_size));
5468 MEM_NOTRAP_P (mem) = 1;
ba4828e0 5469 set_mem_alias_set (mem, set);
8ac61af7 5470 set_mem_align (mem, BITS_PER_WORD);
dfafc897 5471
f676971a 5472 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 5473 nregs);
4697a36c
MM
5474 }
5475
4697a36c 5476 /* Save FP registers if needed. */
f607bc57 5477 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
5478 && TARGET_HARD_FLOAT && TARGET_FPRS
5479 && ! no_rtl
9d30f3c1
JJ
5480 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5481 && cfun->va_list_fpr_size)
4697a36c 5482 {
9d30f3c1 5483 int fregno = next_cum.fregno, nregs;
9ebbca7d 5484 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 5485 rtx lab = gen_label_rtx ();
5b667039
JJ
5486 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5487 * UNITS_PER_FP_WORD);
4697a36c 5488
c4ad648e
AM
5489 emit_jump_insn
5490 (gen_rtx_SET (VOIDmode,
5491 pc_rtx,
5492 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 5493 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 5494 const0_rtx),
39403d82 5495 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
5496 pc_rtx)));
5497
9d30f3c1
JJ
5498 for (nregs = 0;
5499 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 5500 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 5501 {
5496b36f 5502 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 5503 MEM_NOTRAP_P (mem) = 1;
c4ad648e 5504 set_mem_alias_set (mem, set);
94ff898d 5505 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 5506 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 5507 }
4cc833b7
RH
5508
5509 emit_label (lab);
4697a36c 5510 }
4697a36c 5511}
4697a36c 5512
dfafc897 5513/* Create the va_list data type. */
2c4974b7 5514
c35d187f
RH
5515static tree
5516rs6000_build_builtin_va_list (void)
dfafc897 5517{
64c2816f 5518 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 5519
9ebbca7d
GK
5520 /* For AIX, prefer 'char *' because that's what the system
5521 header files like. */
f607bc57 5522 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 5523 return build_pointer_type (char_type_node);
dfafc897 5524
f1e639b1 5525 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 5526 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 5527
f676971a 5528 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 5529 unsigned_char_type_node);
f676971a 5530 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 5531 unsigned_char_type_node);
64c2816f
DT
5532 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5533 every user file. */
5534 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5535 short_unsigned_type_node);
dfafc897
FS
5536 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5537 ptr_type_node);
5538 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5539 ptr_type_node);
5540
9d30f3c1
JJ
5541 va_list_gpr_counter_field = f_gpr;
5542 va_list_fpr_counter_field = f_fpr;
5543
dfafc897
FS
5544 DECL_FIELD_CONTEXT (f_gpr) = record;
5545 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 5546 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
5547 DECL_FIELD_CONTEXT (f_ovf) = record;
5548 DECL_FIELD_CONTEXT (f_sav) = record;
5549
bab45a51
FS
5550 TREE_CHAIN (record) = type_decl;
5551 TYPE_NAME (record) = type_decl;
dfafc897
FS
5552 TYPE_FIELDS (record) = f_gpr;
5553 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
5554 TREE_CHAIN (f_fpr) = f_res;
5555 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
5556 TREE_CHAIN (f_ovf) = f_sav;
5557
5558 layout_type (record);
5559
5560 /* The correct type is an array type of one element. */
5561 return build_array_type (record, build_index_type (size_zero_node));
5562}
5563
5564/* Implement va_start. */
5565
5566void
a2369ed3 5567rs6000_va_start (tree valist, rtx nextarg)
4697a36c 5568{
dfafc897 5569 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 5570 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 5571 tree gpr, fpr, ovf, sav, t;
2c4974b7 5572
dfafc897 5573 /* Only SVR4 needs something special. */
f607bc57 5574 if (DEFAULT_ABI != ABI_V4)
dfafc897 5575 {
e5faf155 5576 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
5577 return;
5578 }
5579
973a648b 5580 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 5581 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
5582 f_res = TREE_CHAIN (f_fpr);
5583 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
5584 f_sav = TREE_CHAIN (f_ovf);
5585
872a65b5 5586 valist = build_va_arg_indirect_ref (valist);
44de5aeb
RK
5587 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5588 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5589 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5590 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
5591
5592 /* Count number of gp and fp argument registers used. */
4cc833b7 5593 words = current_function_args_info.words;
987732e0
DE
5594 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5595 GP_ARG_NUM_REG);
5596 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5597 FP_ARG_NUM_REG);
dfafc897
FS
5598
5599 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
5600 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5601 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5602 words, n_gpr, n_fpr);
dfafc897 5603
9d30f3c1
JJ
5604 if (cfun->va_list_gpr_size)
5605 {
5606 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5607 build_int_cst (NULL_TREE, n_gpr));
5608 TREE_SIDE_EFFECTS (t) = 1;
5609 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5610 }
58c8adc1 5611
9d30f3c1
JJ
5612 if (cfun->va_list_fpr_size)
5613 {
5614 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5615 build_int_cst (NULL_TREE, n_fpr));
5616 TREE_SIDE_EFFECTS (t) = 1;
5617 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5618 }
dfafc897
FS
5619
5620 /* Find the overflow area. */
5621 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5622 if (words != 0)
5623 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
7d60be94 5624 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
dfafc897
FS
5625 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5626 TREE_SIDE_EFFECTS (t) = 1;
5627 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5628
9d30f3c1
JJ
5629 /* If there were no va_arg invocations, don't set up the register
5630 save area. */
5631 if (!cfun->va_list_gpr_size
5632 && !cfun->va_list_fpr_size
5633 && n_gpr < GP_ARG_NUM_REG
5634 && n_fpr < FP_ARG_V4_MAX_REG)
5635 return;
5636
dfafc897
FS
5637 /* Find the register save area. */
5638 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039
JJ
5639 if (cfun->machine->varargs_save_offset)
5640 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5641 build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
dfafc897
FS
5642 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5643 TREE_SIDE_EFFECTS (t) = 1;
5644 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5645}
5646
5647/* Implement va_arg. */
5648
23a60a04
JM
5649tree
5650rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 5651{
cd3ce9b4
JM
5652 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5653 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 5654 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
5655 tree lab_false, lab_over, addr;
5656 int align;
5657 tree ptrtype = build_pointer_type (type);
5658
08b0dc1b
RH
5659 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5660 {
5661 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 5662 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
5663 }
5664
cd3ce9b4
JM
5665 if (DEFAULT_ABI != ABI_V4)
5666 {
08b0dc1b 5667 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
5668 {
5669 tree elem_type = TREE_TYPE (type);
5670 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5671 int elem_size = GET_MODE_SIZE (elem_mode);
5672
5673 if (elem_size < UNITS_PER_WORD)
5674 {
23a60a04 5675 tree real_part, imag_part;
cd3ce9b4
JM
5676 tree post = NULL_TREE;
5677
23a60a04
JM
5678 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5679 &post);
5680 /* Copy the value into a temporary, lest the formal temporary
5681 be reused out from under us. */
5682 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
5683 append_to_statement_list (post, pre_p);
5684
23a60a04
JM
5685 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5686 post_p);
cd3ce9b4 5687
23a60a04 5688 return build (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
5689 }
5690 }
5691
23a60a04 5692 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
5693 }
5694
5695 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5696 f_fpr = TREE_CHAIN (f_gpr);
5697 f_res = TREE_CHAIN (f_fpr);
5698 f_ovf = TREE_CHAIN (f_res);
5699 f_sav = TREE_CHAIN (f_ovf);
5700
872a65b5 5701 valist = build_va_arg_indirect_ref (valist);
44de5aeb
RK
5702 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5703 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5704 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5705 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
5706
5707 size = int_size_in_bytes (type);
5708 rsize = (size + 3) / 4;
5709 align = 1;
5710
08b0dc1b
RH
5711 if (TARGET_HARD_FLOAT && TARGET_FPRS
5712 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
cd3ce9b4
JM
5713 {
5714 /* FP args go in FP registers, if present. */
cd3ce9b4
JM
5715 reg = fpr;
5716 n_reg = 1;
5717 sav_ofs = 8*4;
5718 sav_scale = 8;
5719 if (TYPE_MODE (type) == DFmode)
5720 align = 8;
5721 }
5722 else
5723 {
5724 /* Otherwise into GP registers. */
cd3ce9b4
JM
5725 reg = gpr;
5726 n_reg = rsize;
5727 sav_ofs = 0;
5728 sav_scale = 4;
5729 if (n_reg == 2)
5730 align = 8;
5731 }
5732
5733 /* Pull the value out of the saved registers.... */
5734
5735 lab_over = NULL;
5736 addr = create_tmp_var (ptr_type_node, "addr");
5737 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5738
5739 /* AltiVec vectors never go in registers when -mabi=altivec. */
5740 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5741 align = 16;
5742 else
5743 {
5744 lab_false = create_artificial_label ();
5745 lab_over = create_artificial_label ();
5746
5747 /* Long long and SPE vectors are aligned in the registers.
5748 As are any other 2 gpr item such as complex int due to a
5749 historical mistake. */
5750 u = reg;
5751 if (n_reg == 2)
5752 {
5753 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
95674810 5754 size_int (n_reg - 1));
cd3ce9b4
JM
5755 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5756 }
5757
95674810 5758 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
5759 t = build2 (GE_EXPR, boolean_type_node, u, t);
5760 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5761 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5762 gimplify_and_add (t, pre_p);
5763
5764 t = sav;
5765 if (sav_ofs)
95674810 5766 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 5767
95674810 5768 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
cd3ce9b4 5769 u = build1 (CONVERT_EXPR, integer_type_node, u);
95674810 5770 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
cd3ce9b4
JM
5771 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5772
5773 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5774 gimplify_and_add (t, pre_p);
5775
5776 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5777 gimplify_and_add (t, pre_p);
5778
5779 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5780 append_to_statement_list (t, pre_p);
5781
5782 if (n_reg > 2)
5783 {
5784 /* Ensure that we don't find any more args in regs.
5785 Alignment has taken care of the n_reg == 2 case. */
95674810 5786 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
5787 gimplify_and_add (t, pre_p);
5788 }
5789 }
5790
5791 /* ... otherwise out of the overflow area. */
5792
5793 /* Care for on-stack alignment if needed. */
5794 t = ovf;
5795 if (align != 1)
5796 {
95674810 5797 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
4a90aeeb 5798 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
7d60be94 5799 build_int_cst (NULL_TREE, -align));
cd3ce9b4
JM
5800 }
5801 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5802
5803 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5804 gimplify_and_add (u, pre_p);
5805
95674810 5806 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
cd3ce9b4
JM
5807 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5808 gimplify_and_add (t, pre_p);
5809
5810 if (lab_over)
5811 {
5812 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5813 append_to_statement_list (t, pre_p);
5814 }
5815
08b0dc1b 5816 addr = fold_convert (ptrtype, addr);
872a65b5 5817 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
5818}
5819
0ac081f6
AH
5820/* Builtins. */
5821
58646b77
PB
5822static void
5823def_builtin (int mask, const char *name, tree type, int code)
5824{
5825 if (mask & target_flags)
5826 {
5827 if (rs6000_builtin_decls[code])
5828 abort ();
5829
5830 rs6000_builtin_decls[code] =
5831 lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
5832 NULL, NULL_TREE);
5833 }
5834}
0ac081f6 5835
24408032
AH
5836/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5837
2212663f 5838static const struct builtin_description bdesc_3arg[] =
24408032
AH
5839{
5840 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5841 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5842 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5843 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5844 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5845 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5846 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5847 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5848 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5849 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 5850 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
5851 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5852 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5853 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5854 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5855 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5856 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5857 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5858 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5859 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5860 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5861 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5862 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
5863
5864 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
5865 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
5866 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
5867 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
5868 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
5869 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
5870 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
5871 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
5872 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
5873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
5874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
5875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
5876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
5877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
5878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
24408032 5879};
2212663f 5880
95385cbb
AH
5881/* DST operations: void foo (void *, const int, const char). */
5882
5883static const struct builtin_description bdesc_dst[] =
5884{
5885 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5886 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5887 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
5888 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
5889
5890 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
5891 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
5892 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
5893 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
5894};
5895
2212663f 5896/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 5897
a3170dc6 5898static struct builtin_description bdesc_2arg[] =
0ac081f6 5899{
f18c054f
DB
5900 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5901 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5902 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5903 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
5904 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5905 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5906 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5907 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5908 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5909 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5910 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 5911 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 5912 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
5913 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5914 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5915 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5916 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5917 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5918 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
5919 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5920 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
5921 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5922 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5923 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5924 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5925 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5926 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5927 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5928 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5929 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5930 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5931 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5932 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5933 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
5934 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5935 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
5936 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5937 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
5938 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5939 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5940 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5941 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5942 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
5943 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5944 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5945 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5946 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5947 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5948 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
5949 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5950 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5951 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5952 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5953 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5954 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5955 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
5956 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5957 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5958 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5959 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5960 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5961 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5962 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5963 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 5964 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 5965 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
5966 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5967 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5968 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5969 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5970 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5971 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5972 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5973 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5974 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5975 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5976 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5977 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5978 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5979 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5980 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5981 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5982 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5983 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5984 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
5985 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5986 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5987 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
5988 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5989 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5990 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5991 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5992 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5993 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
5994 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5995 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
5996 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5997 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5998 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5999 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6000 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6001 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6002 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6003 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6004 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6005 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6006 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6007 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6008 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6009 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6010 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6011 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6012 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6013
58646b77
PB
6014 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6015 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6016 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6017 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6018 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6019 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6020 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6021 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6022 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6023 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6024 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6025 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6026 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6027 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6028 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6029 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6030 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6031 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6032 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6033 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6034 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6035 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6036 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6037 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6038 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6039 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6040 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6041 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6042 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6043 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6044 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6045 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6046 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6047 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6048 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6049 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6050 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6051 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6052 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6053 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6054 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6055 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6056 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6057 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6058 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6059 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6060 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6061 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6062 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6063 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6064 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6065 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6066 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6067 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6068 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6069 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6070 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6071 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6072 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6073 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6074 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6075 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6076 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6077 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6078 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6079 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6080 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6081 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6082 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6083 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6084 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6085 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6141
a3170dc6
AH
6142 /* Place holder, leave as first spe builtin. */
6143 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6144 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6145 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6146 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6147 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6148 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6149 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6150 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6151 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6152 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6153 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6154 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6155 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6156 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6157 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6158 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6159 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6160 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6161 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6162 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6163 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6164 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6165 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6166 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6167 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6168 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6169 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6170 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6171 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6172 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6173 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6174 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6175 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6176 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6177 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6178 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6179 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6180 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6181 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6182 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6183 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6184 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6185 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6186 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6187 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6188 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6189 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6190 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6191 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6192 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6193 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6194 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6195 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6196 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6197 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6198 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6199 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6200 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6201 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6202 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6203 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6204 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6205 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6206 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6207 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6208 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6209 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6210 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6211 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6212 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6213 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6214 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6215 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6216 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
6217 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6218 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
6219 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6220 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6221 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6222 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6223 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6224 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6225 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6226 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6227 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6228 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6229 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6230 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6231 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6232 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6233 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6234 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6235 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6236 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6237 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6238 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6239 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6240 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6241 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6242 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6243 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6244 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6245 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6246 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6247 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6248 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6249 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6250 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6251 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6252
6253 /* SPE binary operations expecting a 5-bit unsigned literal. */
6254 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6255
6256 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6257 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6258 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6259 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6260 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6261 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6262 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6263 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6264 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6265 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6266 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6267 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6268 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6269 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6270 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6271 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6272 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6273 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6274 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6275 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6276 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6277 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6278 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6279 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6280 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6281 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6282
6283 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 6284 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
6285};
6286
6287/* AltiVec predicates. */
6288
6289struct builtin_description_predicates
6290{
6291 const unsigned int mask;
6292 const enum insn_code icode;
6293 const char *opcode;
6294 const char *const name;
6295 const enum rs6000_builtins code;
6296};
6297
6298static const struct builtin_description_predicates bdesc_altivec_preds[] =
6299{
6300 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6301 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6302 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6303 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6304 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6305 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6306 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6307 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6308 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6309 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6310 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6311 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
6312 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6313
6314 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6315 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6316 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 6317};
24408032 6318
a3170dc6
AH
6319/* SPE predicates. */
6320static struct builtin_description bdesc_spe_predicates[] =
6321{
6322 /* Place-holder. Leave as first. */
6323 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6324 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6325 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6326 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6327 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6328 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6329 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6330 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6331 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6332 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6333 /* Place-holder. Leave as last. */
6334 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6335};
6336
6337/* SPE evsel predicates. */
6338static struct builtin_description bdesc_spe_evsel[] =
6339{
6340 /* Place-holder. Leave as first. */
6341 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6342 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6343 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6344 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6345 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6346 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6347 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6348 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6349 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6350 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6351 /* Place-holder. Leave as last. */
6352 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6353};
6354
b6d08ca1 6355/* ABS* operations. */
100c4561
AH
6356
6357static const struct builtin_description bdesc_abs[] =
6358{
6359 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6360 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6361 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6362 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6363 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6364 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6365 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6366};
6367
617e0e1d
DB
6368/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6369 foo (VECa). */
24408032 6370
a3170dc6 6371static struct builtin_description bdesc_1arg[] =
2212663f 6372{
617e0e1d
DB
6373 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6374 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6375 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6376 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6377 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6378 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6379 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6380 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
6381 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6382 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6383 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
6384 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6385 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6386 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6387 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6388 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6389 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 6390
58646b77
PB
6391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6410
a3170dc6
AH
6411 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6412 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6413 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6414 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6415 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6416 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6417 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6418 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6419 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6420 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6421 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6422 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6423 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6424 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6425 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6426 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6427 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6428 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6429 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6430 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6431 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6432 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6433 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6434 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6435 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 6436 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
6437 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6438 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6439 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6440 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
6441
6442 /* Place-holder. Leave as last unary SPE builtin. */
58646b77 6443 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
2212663f
DB
6444};
6445
6446static rtx
a2369ed3 6447rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
2212663f
DB
6448{
6449 rtx pat;
6450 tree arg0 = TREE_VALUE (arglist);
6451 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6452 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6453 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6454
0559cc77
DE
6455 if (icode == CODE_FOR_nothing)
6456 /* Builtin not supported on this processor. */
6457 return 0;
6458
20e26713
AH
6459 /* If we got invalid arguments bail out before generating bad rtl. */
6460 if (arg0 == error_mark_node)
9a171fcd 6461 return const0_rtx;
20e26713 6462
0559cc77
DE
6463 if (icode == CODE_FOR_altivec_vspltisb
6464 || icode == CODE_FOR_altivec_vspltish
6465 || icode == CODE_FOR_altivec_vspltisw
6466 || icode == CODE_FOR_spe_evsplatfi
6467 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
6468 {
6469 /* Only allow 5-bit *signed* literals. */
b44140e7 6470 if (GET_CODE (op0) != CONST_INT
afca671b
DP
6471 || INTVAL (op0) > 15
6472 || INTVAL (op0) < -16)
b44140e7
AH
6473 {
6474 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 6475 return const0_rtx;
b44140e7 6476 }
b44140e7
AH
6477 }
6478
c62f2db5 6479 if (target == 0
2212663f
DB
6480 || GET_MODE (target) != tmode
6481 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6482 target = gen_reg_rtx (tmode);
6483
6484 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6485 op0 = copy_to_mode_reg (mode0, op0);
6486
6487 pat = GEN_FCN (icode) (target, op0);
6488 if (! pat)
6489 return 0;
6490 emit_insn (pat);
0ac081f6 6491
2212663f
DB
6492 return target;
6493}
ae4b4a02 6494
100c4561 6495static rtx
a2369ed3 6496altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
100c4561
AH
6497{
6498 rtx pat, scratch1, scratch2;
6499 tree arg0 = TREE_VALUE (arglist);
6500 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6501 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6502 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6503
6504 /* If we have invalid arguments, bail out before generating bad rtl. */
6505 if (arg0 == error_mark_node)
9a171fcd 6506 return const0_rtx;
100c4561
AH
6507
6508 if (target == 0
6509 || GET_MODE (target) != tmode
6510 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6511 target = gen_reg_rtx (tmode);
6512
6513 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6514 op0 = copy_to_mode_reg (mode0, op0);
6515
6516 scratch1 = gen_reg_rtx (mode0);
6517 scratch2 = gen_reg_rtx (mode0);
6518
6519 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6520 if (! pat)
6521 return 0;
6522 emit_insn (pat);
6523
6524 return target;
6525}
6526
0ac081f6 6527static rtx
a2369ed3 6528rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
0ac081f6
AH
6529{
6530 rtx pat;
6531 tree arg0 = TREE_VALUE (arglist);
6532 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6533 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6534 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6535 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6536 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6537 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6538
0559cc77
DE
6539 if (icode == CODE_FOR_nothing)
6540 /* Builtin not supported on this processor. */
6541 return 0;
6542
20e26713
AH
6543 /* If we got invalid arguments bail out before generating bad rtl. */
6544 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 6545 return const0_rtx;
20e26713 6546
0559cc77
DE
6547 if (icode == CODE_FOR_altivec_vcfux
6548 || icode == CODE_FOR_altivec_vcfsx
6549 || icode == CODE_FOR_altivec_vctsxs
6550 || icode == CODE_FOR_altivec_vctuxs
6551 || icode == CODE_FOR_altivec_vspltb
6552 || icode == CODE_FOR_altivec_vsplth
6553 || icode == CODE_FOR_altivec_vspltw
6554 || icode == CODE_FOR_spe_evaddiw
6555 || icode == CODE_FOR_spe_evldd
6556 || icode == CODE_FOR_spe_evldh
6557 || icode == CODE_FOR_spe_evldw
6558 || icode == CODE_FOR_spe_evlhhesplat
6559 || icode == CODE_FOR_spe_evlhhossplat
6560 || icode == CODE_FOR_spe_evlhhousplat
6561 || icode == CODE_FOR_spe_evlwhe
6562 || icode == CODE_FOR_spe_evlwhos
6563 || icode == CODE_FOR_spe_evlwhou
6564 || icode == CODE_FOR_spe_evlwhsplat
6565 || icode == CODE_FOR_spe_evlwwsplat
6566 || icode == CODE_FOR_spe_evrlwi
6567 || icode == CODE_FOR_spe_evslwi
6568 || icode == CODE_FOR_spe_evsrwis
f5119d10 6569 || icode == CODE_FOR_spe_evsubifw
0559cc77 6570 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
6571 {
6572 /* Only allow 5-bit unsigned literals. */
8bb418a3 6573 STRIP_NOPS (arg1);
b44140e7
AH
6574 if (TREE_CODE (arg1) != INTEGER_CST
6575 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6576 {
6577 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 6578 return const0_rtx;
b44140e7 6579 }
b44140e7
AH
6580 }
6581
c62f2db5 6582 if (target == 0
0ac081f6
AH
6583 || GET_MODE (target) != tmode
6584 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6585 target = gen_reg_rtx (tmode);
6586
6587 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6588 op0 = copy_to_mode_reg (mode0, op0);
6589 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6590 op1 = copy_to_mode_reg (mode1, op1);
6591
6592 pat = GEN_FCN (icode) (target, op0, op1);
6593 if (! pat)
6594 return 0;
6595 emit_insn (pat);
6596
6597 return target;
6598}
6525c0e7 6599
ae4b4a02 6600static rtx
f676971a 6601altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
a2369ed3 6602 tree arglist, rtx target)
ae4b4a02
AH
6603{
6604 rtx pat, scratch;
6605 tree cr6_form = TREE_VALUE (arglist);
6606 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6607 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6608 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6609 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6610 enum machine_mode tmode = SImode;
6611 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6612 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6613 int cr6_form_int;
6614
6615 if (TREE_CODE (cr6_form) != INTEGER_CST)
6616 {
6617 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 6618 return const0_rtx;
ae4b4a02
AH
6619 }
6620 else
6621 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6622
37409796 6623 gcc_assert (mode0 == mode1);
ae4b4a02
AH
6624
6625 /* If we have invalid arguments, bail out before generating bad rtl. */
6626 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 6627 return const0_rtx;
ae4b4a02
AH
6628
6629 if (target == 0
6630 || GET_MODE (target) != tmode
6631 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6632 target = gen_reg_rtx (tmode);
6633
6634 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6635 op0 = copy_to_mode_reg (mode0, op0);
6636 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6637 op1 = copy_to_mode_reg (mode1, op1);
6638
6639 scratch = gen_reg_rtx (mode0);
6640
6641 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 6642 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
6643 if (! pat)
6644 return 0;
6645 emit_insn (pat);
6646
6647 /* The vec_any* and vec_all* predicates use the same opcodes for two
6648 different operations, but the bits in CR6 will be different
6649 depending on what information we want. So we have to play tricks
6650 with CR6 to get the right bits out.
6651
6652 If you think this is disgusting, look at the specs for the
6653 AltiVec predicates. */
6654
c4ad648e
AM
6655 switch (cr6_form_int)
6656 {
6657 case 0:
6658 emit_insn (gen_cr6_test_for_zero (target));
6659 break;
6660 case 1:
6661 emit_insn (gen_cr6_test_for_zero_reverse (target));
6662 break;
6663 case 2:
6664 emit_insn (gen_cr6_test_for_lt (target));
6665 break;
6666 case 3:
6667 emit_insn (gen_cr6_test_for_lt_reverse (target));
6668 break;
6669 default:
6670 error ("argument 1 of __builtin_altivec_predicate is out of range");
6671 break;
6672 }
ae4b4a02
AH
6673
6674 return target;
6675}
6676
b4a62fa0 6677static rtx
38f391a5 6678altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
b4a62fa0
SB
6679{
6680 rtx pat, addr;
6681 tree arg0 = TREE_VALUE (arglist);
6682 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6683 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6684 enum machine_mode mode0 = Pmode;
6685 enum machine_mode mode1 = Pmode;
6686 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6687 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6688
6689 if (icode == CODE_FOR_nothing)
6690 /* Builtin not supported on this processor. */
6691 return 0;
6692
6693 /* If we got invalid arguments bail out before generating bad rtl. */
6694 if (arg0 == error_mark_node || arg1 == error_mark_node)
6695 return const0_rtx;
6696
6697 if (target == 0
6698 || GET_MODE (target) != tmode
6699 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6700 target = gen_reg_rtx (tmode);
6701
f676971a 6702 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
6703
6704 if (op0 == const0_rtx)
6705 {
6706 addr = gen_rtx_MEM (tmode, op1);
6707 }
6708 else
6709 {
6710 op0 = copy_to_mode_reg (mode0, op0);
6711 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6712 }
6713
6714 pat = GEN_FCN (icode) (target, addr);
6715
6716 if (! pat)
6717 return 0;
6718 emit_insn (pat);
6719
6720 return target;
6721}
6722
61bea3b0
AH
6723static rtx
6724spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6725{
6726 tree arg0 = TREE_VALUE (arglist);
6727 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6728 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6729 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6730 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6731 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6732 rtx pat;
6733 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6734 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6735 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6736
6737 /* Invalid arguments. Bail before doing anything stoopid! */
6738 if (arg0 == error_mark_node
6739 || arg1 == error_mark_node
6740 || arg2 == error_mark_node)
6741 return const0_rtx;
6742
6743 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6744 op0 = copy_to_mode_reg (mode2, op0);
6745 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6746 op1 = copy_to_mode_reg (mode0, op1);
6747 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6748 op2 = copy_to_mode_reg (mode1, op2);
6749
6750 pat = GEN_FCN (icode) (op1, op2, op0);
6751 if (pat)
6752 emit_insn (pat);
6753 return NULL_RTX;
6754}
6755
6525c0e7 6756static rtx
a2369ed3 6757altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6525c0e7
AH
6758{
6759 tree arg0 = TREE_VALUE (arglist);
6760 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6761 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6762 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6763 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6764 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
b4a62fa0
SB
6765 rtx pat, addr;
6766 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6767 enum machine_mode mode1 = Pmode;
6768 enum machine_mode mode2 = Pmode;
6525c0e7
AH
6769
6770 /* Invalid arguments. Bail before doing anything stoopid! */
6771 if (arg0 == error_mark_node
6772 || arg1 == error_mark_node
6773 || arg2 == error_mark_node)
9a171fcd 6774 return const0_rtx;
6525c0e7 6775
b4a62fa0
SB
6776 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6777 op0 = copy_to_mode_reg (tmode, op0);
6778
f676971a 6779 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
6780
6781 if (op1 == const0_rtx)
6782 {
6783 addr = gen_rtx_MEM (tmode, op2);
6784 }
6785 else
6786 {
6787 op1 = copy_to_mode_reg (mode1, op1);
6788 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6789 }
6525c0e7 6790
b4a62fa0 6791 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
6792 if (pat)
6793 emit_insn (pat);
6794 return NULL_RTX;
6795}
6796
2212663f 6797static rtx
a2369ed3 6798rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
2212663f
DB
6799{
6800 rtx pat;
6801 tree arg0 = TREE_VALUE (arglist);
6802 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6803 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6804 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6805 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6806 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6807 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6808 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6809 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6810 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 6811
774b5662
DE
6812 if (icode == CODE_FOR_nothing)
6813 /* Builtin not supported on this processor. */
6814 return 0;
6815
20e26713
AH
6816 /* If we got invalid arguments bail out before generating bad rtl. */
6817 if (arg0 == error_mark_node
6818 || arg1 == error_mark_node
6819 || arg2 == error_mark_node)
9a171fcd 6820 return const0_rtx;
20e26713 6821
aba5fb01
NS
6822 if (icode == CODE_FOR_altivec_vsldoi_v4sf
6823 || icode == CODE_FOR_altivec_vsldoi_v4si
6824 || icode == CODE_FOR_altivec_vsldoi_v8hi
6825 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
6826 {
6827 /* Only allow 4-bit unsigned literals. */
8bb418a3 6828 STRIP_NOPS (arg2);
b44140e7
AH
6829 if (TREE_CODE (arg2) != INTEGER_CST
6830 || TREE_INT_CST_LOW (arg2) & ~0xf)
6831 {
6832 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 6833 return const0_rtx;
b44140e7 6834 }
b44140e7
AH
6835 }
6836
c62f2db5 6837 if (target == 0
2212663f
DB
6838 || GET_MODE (target) != tmode
6839 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6840 target = gen_reg_rtx (tmode);
6841
6842 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6843 op0 = copy_to_mode_reg (mode0, op0);
6844 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6845 op1 = copy_to_mode_reg (mode1, op1);
6846 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6847 op2 = copy_to_mode_reg (mode2, op2);
6848
6849 pat = GEN_FCN (icode) (target, op0, op1, op2);
6850 if (! pat)
6851 return 0;
6852 emit_insn (pat);
6853
6854 return target;
6855}
92898235 6856
3a9b8c7e 6857/* Expand the lvx builtins. */
0ac081f6 6858static rtx
a2369ed3 6859altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 6860{
0ac081f6
AH
6861 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6862 tree arglist = TREE_OPERAND (exp, 1);
0ac081f6 6863 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
6864 tree arg0;
6865 enum machine_mode tmode, mode0;
7c3abc73 6866 rtx pat, op0;
3a9b8c7e 6867 enum insn_code icode;
92898235 6868
0ac081f6
AH
6869 switch (fcode)
6870 {
f18c054f 6871 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 6872 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 6873 break;
f18c054f 6874 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 6875 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
6876 break;
6877 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 6878 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
6879 break;
6880 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 6881 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
6882 break;
6883 default:
6884 *expandedp = false;
6885 return NULL_RTX;
6886 }
0ac081f6 6887
3a9b8c7e 6888 *expandedp = true;
f18c054f 6889
3a9b8c7e
AH
6890 arg0 = TREE_VALUE (arglist);
6891 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6892 tmode = insn_data[icode].operand[0].mode;
6893 mode0 = insn_data[icode].operand[1].mode;
f18c054f 6894
3a9b8c7e
AH
6895 if (target == 0
6896 || GET_MODE (target) != tmode
6897 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6898 target = gen_reg_rtx (tmode);
24408032 6899
3a9b8c7e
AH
6900 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6901 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 6902
3a9b8c7e
AH
6903 pat = GEN_FCN (icode) (target, op0);
6904 if (! pat)
6905 return 0;
6906 emit_insn (pat);
6907 return target;
6908}
f18c054f 6909
3a9b8c7e
AH
6910/* Expand the stvx builtins. */
6911static rtx
f676971a 6912altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 6913 bool *expandedp)
3a9b8c7e
AH
6914{
6915 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6916 tree arglist = TREE_OPERAND (exp, 1);
6917 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6918 tree arg0, arg1;
6919 enum machine_mode mode0, mode1;
7c3abc73 6920 rtx pat, op0, op1;
3a9b8c7e 6921 enum insn_code icode;
f18c054f 6922
3a9b8c7e
AH
6923 switch (fcode)
6924 {
6925 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 6926 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
6927 break;
6928 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 6929 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
6930 break;
6931 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 6932 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
6933 break;
6934 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 6935 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
6936 break;
6937 default:
6938 *expandedp = false;
6939 return NULL_RTX;
6940 }
24408032 6941
3a9b8c7e
AH
6942 arg0 = TREE_VALUE (arglist);
6943 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6944 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6945 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6946 mode0 = insn_data[icode].operand[0].mode;
6947 mode1 = insn_data[icode].operand[1].mode;
f18c054f 6948
3a9b8c7e
AH
6949 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6950 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6951 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6952 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 6953
3a9b8c7e
AH
6954 pat = GEN_FCN (icode) (op0, op1);
6955 if (pat)
6956 emit_insn (pat);
f18c054f 6957
3a9b8c7e
AH
6958 *expandedp = true;
6959 return NULL_RTX;
6960}
f18c054f 6961
3a9b8c7e
AH
6962/* Expand the dst builtins. */
6963static rtx
f676971a 6964altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 6965 bool *expandedp)
3a9b8c7e
AH
6966{
6967 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6968 tree arglist = TREE_OPERAND (exp, 1);
6969 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6970 tree arg0, arg1, arg2;
6971 enum machine_mode mode0, mode1, mode2;
7c3abc73 6972 rtx pat, op0, op1, op2;
3a9b8c7e 6973 struct builtin_description *d;
a3170dc6 6974 size_t i;
f18c054f 6975
3a9b8c7e 6976 *expandedp = false;
f18c054f 6977
3a9b8c7e
AH
6978 /* Handle DST variants. */
6979 d = (struct builtin_description *) bdesc_dst;
6980 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6981 if (d->code == fcode)
6982 {
6983 arg0 = TREE_VALUE (arglist);
6984 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6985 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6986 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6987 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6988 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6989 mode0 = insn_data[d->icode].operand[0].mode;
6990 mode1 = insn_data[d->icode].operand[1].mode;
6991 mode2 = insn_data[d->icode].operand[2].mode;
24408032 6992
3a9b8c7e
AH
6993 /* Invalid arguments, bail out before generating bad rtl. */
6994 if (arg0 == error_mark_node
6995 || arg1 == error_mark_node
6996 || arg2 == error_mark_node)
6997 return const0_rtx;
f18c054f 6998
86e7df90 6999 *expandedp = true;
8bb418a3 7000 STRIP_NOPS (arg2);
3a9b8c7e
AH
7001 if (TREE_CODE (arg2) != INTEGER_CST
7002 || TREE_INT_CST_LOW (arg2) & ~0x3)
7003 {
9e637a26 7004 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7005 return const0_rtx;
7006 }
f18c054f 7007
3a9b8c7e 7008 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7009 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7010 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7011 op1 = copy_to_mode_reg (mode1, op1);
24408032 7012
3a9b8c7e
AH
7013 pat = GEN_FCN (d->icode) (op0, op1, op2);
7014 if (pat != 0)
7015 emit_insn (pat);
f18c054f 7016
3a9b8c7e
AH
7017 return NULL_RTX;
7018 }
f18c054f 7019
3a9b8c7e
AH
7020 return NULL_RTX;
7021}
24408032 7022
7a4eca66
DE
7023/* Expand vec_init builtin. */
7024static rtx
7025altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7026{
7027 enum machine_mode tmode = TYPE_MODE (type);
7028 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7029 int i, n_elt = GET_MODE_NUNITS (tmode);
7030 rtvec v = rtvec_alloc (n_elt);
7031
7032 gcc_assert (VECTOR_MODE_P (tmode));
7033
7034 for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7035 {
7036 rtx x = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7037 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7038 }
7039
7040 gcc_assert (arglist == NULL);
7041
7042 if (!target || !register_operand (target, tmode))
7043 target = gen_reg_rtx (tmode);
7044
7045 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7046 return target;
7047}
7048
7049/* Return the integer constant in ARG. Constrain it to be in the range
7050 of the subparts of VEC_TYPE; issue an error if not. */
7051
7052static int
7053get_element_number (tree vec_type, tree arg)
7054{
7055 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7056
7057 if (!host_integerp (arg, 1)
7058 || (elt = tree_low_cst (arg, 1), elt > max))
7059 {
7060 error ("selector must be an integer constant in the range 0..%wi", max);
7061 return 0;
7062 }
7063
7064 return elt;
7065}
7066
7067/* Expand vec_set builtin. */
7068static rtx
7069altivec_expand_vec_set_builtin (tree arglist)
7070{
7071 enum machine_mode tmode, mode1;
7072 tree arg0, arg1, arg2;
7073 int elt;
7074 rtx op0, op1;
7075
7076 arg0 = TREE_VALUE (arglist);
7077 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7078 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7079
7080 tmode = TYPE_MODE (TREE_TYPE (arg0));
7081 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7082 gcc_assert (VECTOR_MODE_P (tmode));
7083
7084 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7085 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7086 elt = get_element_number (TREE_TYPE (arg0), arg2);
7087
7088 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7089 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7090
7091 op0 = force_reg (tmode, op0);
7092 op1 = force_reg (mode1, op1);
7093
7094 rs6000_expand_vector_set (op0, op1, elt);
7095
7096 return op0;
7097}
7098
7099/* Expand vec_ext builtin. */
7100static rtx
7101altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7102{
7103 enum machine_mode tmode, mode0;
7104 tree arg0, arg1;
7105 int elt;
7106 rtx op0;
7107
7108 arg0 = TREE_VALUE (arglist);
7109 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7110
7111 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7112 elt = get_element_number (TREE_TYPE (arg0), arg1);
7113
7114 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7115 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7116 gcc_assert (VECTOR_MODE_P (mode0));
7117
7118 op0 = force_reg (mode0, op0);
7119
7120 if (optimize || !target || !register_operand (target, tmode))
7121 target = gen_reg_rtx (tmode);
7122
7123 rs6000_expand_vector_extract (target, op0, elt);
7124
7125 return target;
7126}
7127
3a9b8c7e
AH
7128/* Expand the builtin in EXP and store the result in TARGET. Store
7129 true in *EXPANDEDP if we found a builtin to expand. */
7130static rtx
a2369ed3 7131altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e
AH
7132{
7133 struct builtin_description *d;
7134 struct builtin_description_predicates *dp;
7135 size_t i;
7136 enum insn_code icode;
7137 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7138 tree arglist = TREE_OPERAND (exp, 1);
7c3abc73
AH
7139 tree arg0;
7140 rtx op0, pat;
7141 enum machine_mode tmode, mode0;
3a9b8c7e 7142 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 7143
58646b77
PB
7144 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7145 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7146 {
7147 *expandedp = true;
ea40ba9c 7148 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
7149 return const0_rtx;
7150 }
7151
3a9b8c7e
AH
7152 target = altivec_expand_ld_builtin (exp, target, expandedp);
7153 if (*expandedp)
7154 return target;
0ac081f6 7155
3a9b8c7e
AH
7156 target = altivec_expand_st_builtin (exp, target, expandedp);
7157 if (*expandedp)
7158 return target;
7159
7160 target = altivec_expand_dst_builtin (exp, target, expandedp);
7161 if (*expandedp)
7162 return target;
7163
7164 *expandedp = true;
95385cbb 7165
3a9b8c7e
AH
7166 switch (fcode)
7167 {
6525c0e7
AH
7168 case ALTIVEC_BUILTIN_STVX:
7169 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7170 case ALTIVEC_BUILTIN_STVEBX:
7171 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7172 case ALTIVEC_BUILTIN_STVEHX:
7173 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7174 case ALTIVEC_BUILTIN_STVEWX:
7175 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7176 case ALTIVEC_BUILTIN_STVXL:
7177 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3a9b8c7e 7178
95385cbb
AH
7179 case ALTIVEC_BUILTIN_MFVSCR:
7180 icode = CODE_FOR_altivec_mfvscr;
7181 tmode = insn_data[icode].operand[0].mode;
7182
7183 if (target == 0
7184 || GET_MODE (target) != tmode
7185 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7186 target = gen_reg_rtx (tmode);
f676971a 7187
95385cbb 7188 pat = GEN_FCN (icode) (target);
0ac081f6
AH
7189 if (! pat)
7190 return 0;
7191 emit_insn (pat);
95385cbb
AH
7192 return target;
7193
7194 case ALTIVEC_BUILTIN_MTVSCR:
7195 icode = CODE_FOR_altivec_mtvscr;
7196 arg0 = TREE_VALUE (arglist);
7197 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7198 mode0 = insn_data[icode].operand[0].mode;
7199
7200 /* If we got invalid arguments bail out before generating bad rtl. */
7201 if (arg0 == error_mark_node)
9a171fcd 7202 return const0_rtx;
95385cbb
AH
7203
7204 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7205 op0 = copy_to_mode_reg (mode0, op0);
7206
7207 pat = GEN_FCN (icode) (op0);
7208 if (pat)
7209 emit_insn (pat);
7210 return NULL_RTX;
3a9b8c7e 7211
95385cbb
AH
7212 case ALTIVEC_BUILTIN_DSSALL:
7213 emit_insn (gen_altivec_dssall ());
7214 return NULL_RTX;
7215
7216 case ALTIVEC_BUILTIN_DSS:
7217 icode = CODE_FOR_altivec_dss;
7218 arg0 = TREE_VALUE (arglist);
8bb418a3 7219 STRIP_NOPS (arg0);
95385cbb
AH
7220 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7221 mode0 = insn_data[icode].operand[0].mode;
7222
7223 /* If we got invalid arguments bail out before generating bad rtl. */
7224 if (arg0 == error_mark_node)
9a171fcd 7225 return const0_rtx;
95385cbb 7226
b44140e7
AH
7227 if (TREE_CODE (arg0) != INTEGER_CST
7228 || TREE_INT_CST_LOW (arg0) & ~0x3)
7229 {
7230 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 7231 return const0_rtx;
b44140e7
AH
7232 }
7233
95385cbb
AH
7234 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7235 op0 = copy_to_mode_reg (mode0, op0);
7236
7237 emit_insn (gen_altivec_dss (op0));
0ac081f6 7238 return NULL_RTX;
7a4eca66
DE
7239
7240 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7241 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7242 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7243 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7244 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7245
7246 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7247 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7248 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7249 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7250 return altivec_expand_vec_set_builtin (arglist);
7251
7252 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7253 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7254 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7255 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7256 return altivec_expand_vec_ext_builtin (arglist, target);
7257
7258 default:
7259 break;
7260 /* Fall through. */
0ac081f6 7261 }
24408032 7262
100c4561
AH
7263 /* Expand abs* operations. */
7264 d = (struct builtin_description *) bdesc_abs;
ca7558fc 7265 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561
AH
7266 if (d->code == fcode)
7267 return altivec_expand_abs_builtin (d->icode, arglist, target);
7268
ae4b4a02
AH
7269 /* Expand the AltiVec predicates. */
7270 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
ca7558fc 7271 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 7272 if (dp->code == fcode)
c4ad648e
AM
7273 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7274 arglist, target);
ae4b4a02 7275
6525c0e7
AH
7276 /* LV* are funky. We initialized them differently. */
7277 switch (fcode)
7278 {
7279 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 7280 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
c4ad648e 7281 arglist, target);
6525c0e7 7282 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 7283 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
c4ad648e 7284 arglist, target);
6525c0e7 7285 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 7286 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
c4ad648e 7287 arglist, target);
6525c0e7 7288 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 7289 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
c4ad648e 7290 arglist, target);
6525c0e7 7291 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 7292 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
c4ad648e 7293 arglist, target);
6525c0e7 7294 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 7295 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
c4ad648e 7296 arglist, target);
6525c0e7 7297 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 7298 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
c4ad648e 7299 arglist, target);
6525c0e7
AH
7300 default:
7301 break;
7302 /* Fall through. */
7303 }
95385cbb 7304
92898235 7305 *expandedp = false;
0ac081f6
AH
7306 return NULL_RTX;
7307}
7308
a3170dc6
AH
7309/* Binops that need to be initialized manually, but can be expanded
7310 automagically by rs6000_expand_binop_builtin. */
7311static struct builtin_description bdesc_2arg_spe[] =
7312{
7313 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7314 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7315 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7316 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7317 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7318 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7319 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7320 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7321 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7322 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7323 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7324 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7325 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7326 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7327 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7328 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7329 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7330 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7331 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7332 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7333 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7334 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7335};
7336
7337/* Expand the builtin in EXP and store the result in TARGET. Store
7338 true in *EXPANDEDP if we found a builtin to expand.
7339
7340 This expands the SPE builtins that are not simple unary and binary
7341 operations. */
7342static rtx
a2369ed3 7343spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6
AH
7344{
7345 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7346 tree arglist = TREE_OPERAND (exp, 1);
7347 tree arg1, arg0;
7348 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7349 enum insn_code icode;
7350 enum machine_mode tmode, mode0;
7351 rtx pat, op0;
7352 struct builtin_description *d;
7353 size_t i;
7354
7355 *expandedp = true;
7356
7357 /* Syntax check for a 5-bit unsigned immediate. */
7358 switch (fcode)
7359 {
7360 case SPE_BUILTIN_EVSTDD:
7361 case SPE_BUILTIN_EVSTDH:
7362 case SPE_BUILTIN_EVSTDW:
7363 case SPE_BUILTIN_EVSTWHE:
7364 case SPE_BUILTIN_EVSTWHO:
7365 case SPE_BUILTIN_EVSTWWE:
7366 case SPE_BUILTIN_EVSTWWO:
7367 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7368 if (TREE_CODE (arg1) != INTEGER_CST
7369 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7370 {
7371 error ("argument 2 must be a 5-bit unsigned literal");
7372 return const0_rtx;
7373 }
7374 break;
7375 default:
7376 break;
7377 }
7378
00332c9f
AH
7379 /* The evsplat*i instructions are not quite generic. */
7380 switch (fcode)
7381 {
7382 case SPE_BUILTIN_EVSPLATFI:
7383 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7384 arglist, target);
7385 case SPE_BUILTIN_EVSPLATI:
7386 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7387 arglist, target);
7388 default:
7389 break;
7390 }
7391
a3170dc6
AH
7392 d = (struct builtin_description *) bdesc_2arg_spe;
7393 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7394 if (d->code == fcode)
7395 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7396
7397 d = (struct builtin_description *) bdesc_spe_predicates;
7398 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7399 if (d->code == fcode)
7400 return spe_expand_predicate_builtin (d->icode, arglist, target);
7401
7402 d = (struct builtin_description *) bdesc_spe_evsel;
7403 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7404 if (d->code == fcode)
7405 return spe_expand_evsel_builtin (d->icode, arglist, target);
7406
7407 switch (fcode)
7408 {
7409 case SPE_BUILTIN_EVSTDDX:
61bea3b0 7410 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
a3170dc6 7411 case SPE_BUILTIN_EVSTDHX:
61bea3b0 7412 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
a3170dc6 7413 case SPE_BUILTIN_EVSTDWX:
61bea3b0 7414 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
a3170dc6 7415 case SPE_BUILTIN_EVSTWHEX:
61bea3b0 7416 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
a3170dc6 7417 case SPE_BUILTIN_EVSTWHOX:
61bea3b0 7418 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
a3170dc6 7419 case SPE_BUILTIN_EVSTWWEX:
61bea3b0 7420 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
a3170dc6 7421 case SPE_BUILTIN_EVSTWWOX:
61bea3b0 7422 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
a3170dc6 7423 case SPE_BUILTIN_EVSTDD:
61bea3b0 7424 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
a3170dc6 7425 case SPE_BUILTIN_EVSTDH:
61bea3b0 7426 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
a3170dc6 7427 case SPE_BUILTIN_EVSTDW:
61bea3b0 7428 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
a3170dc6 7429 case SPE_BUILTIN_EVSTWHE:
61bea3b0 7430 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
a3170dc6 7431 case SPE_BUILTIN_EVSTWHO:
61bea3b0 7432 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
a3170dc6 7433 case SPE_BUILTIN_EVSTWWE:
61bea3b0 7434 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
a3170dc6 7435 case SPE_BUILTIN_EVSTWWO:
61bea3b0 7436 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
a3170dc6
AH
7437 case SPE_BUILTIN_MFSPEFSCR:
7438 icode = CODE_FOR_spe_mfspefscr;
7439 tmode = insn_data[icode].operand[0].mode;
7440
7441 if (target == 0
7442 || GET_MODE (target) != tmode
7443 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7444 target = gen_reg_rtx (tmode);
f676971a 7445
a3170dc6
AH
7446 pat = GEN_FCN (icode) (target);
7447 if (! pat)
7448 return 0;
7449 emit_insn (pat);
7450 return target;
7451 case SPE_BUILTIN_MTSPEFSCR:
7452 icode = CODE_FOR_spe_mtspefscr;
7453 arg0 = TREE_VALUE (arglist);
7454 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7455 mode0 = insn_data[icode].operand[0].mode;
7456
7457 if (arg0 == error_mark_node)
7458 return const0_rtx;
7459
7460 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7461 op0 = copy_to_mode_reg (mode0, op0);
7462
7463 pat = GEN_FCN (icode) (op0);
7464 if (pat)
7465 emit_insn (pat);
7466 return NULL_RTX;
7467 default:
7468 break;
7469 }
7470
7471 *expandedp = false;
7472 return NULL_RTX;
7473}
7474
7475static rtx
a2369ed3 7476spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
a3170dc6
AH
7477{
7478 rtx pat, scratch, tmp;
7479 tree form = TREE_VALUE (arglist);
7480 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7481 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7482 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7483 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7484 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7485 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7486 int form_int;
7487 enum rtx_code code;
7488
7489 if (TREE_CODE (form) != INTEGER_CST)
7490 {
7491 error ("argument 1 of __builtin_spe_predicate must be a constant");
7492 return const0_rtx;
7493 }
7494 else
7495 form_int = TREE_INT_CST_LOW (form);
7496
37409796 7497 gcc_assert (mode0 == mode1);
a3170dc6
AH
7498
7499 if (arg0 == error_mark_node || arg1 == error_mark_node)
7500 return const0_rtx;
7501
7502 if (target == 0
7503 || GET_MODE (target) != SImode
7504 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7505 target = gen_reg_rtx (SImode);
7506
7507 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7508 op0 = copy_to_mode_reg (mode0, op0);
7509 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7510 op1 = copy_to_mode_reg (mode1, op1);
7511
7512 scratch = gen_reg_rtx (CCmode);
7513
7514 pat = GEN_FCN (icode) (scratch, op0, op1);
7515 if (! pat)
7516 return const0_rtx;
7517 emit_insn (pat);
7518
7519 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7520 _lower_. We use one compare, but look in different bits of the
7521 CR for each variant.
7522
7523 There are 2 elements in each SPE simd type (upper/lower). The CR
7524 bits are set as follows:
7525
7526 BIT0 | BIT 1 | BIT 2 | BIT 3
7527 U | L | (U | L) | (U & L)
7528
7529 So, for an "all" relationship, BIT 3 would be set.
7530 For an "any" relationship, BIT 2 would be set. Etc.
7531
7532 Following traditional nomenclature, these bits map to:
7533
7534 BIT0 | BIT 1 | BIT 2 | BIT 3
7535 LT | GT | EQ | OV
7536
7537 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7538 */
7539
7540 switch (form_int)
7541 {
7542 /* All variant. OV bit. */
7543 case 0:
7544 /* We need to get to the OV bit, which is the ORDERED bit. We
7545 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 7546 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
7547 So let's just use another pattern. */
7548 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7549 return target;
7550 /* Any variant. EQ bit. */
7551 case 1:
7552 code = EQ;
7553 break;
7554 /* Upper variant. LT bit. */
7555 case 2:
7556 code = LT;
7557 break;
7558 /* Lower variant. GT bit. */
7559 case 3:
7560 code = GT;
7561 break;
7562 default:
7563 error ("argument 1 of __builtin_spe_predicate is out of range");
7564 return const0_rtx;
7565 }
7566
7567 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7568 emit_move_insn (target, tmp);
7569
7570 return target;
7571}
7572
7573/* The evsel builtins look like this:
7574
7575 e = __builtin_spe_evsel_OP (a, b, c, d);
7576
7577 and work like this:
7578
7579 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7580 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7581*/
7582
7583static rtx
a2369ed3 7584spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
a3170dc6
AH
7585{
7586 rtx pat, scratch;
7587 tree arg0 = TREE_VALUE (arglist);
7588 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7589 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7590 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7591 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7592 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7593 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7594 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7595 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7596 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7597
37409796 7598 gcc_assert (mode0 == mode1);
a3170dc6
AH
7599
7600 if (arg0 == error_mark_node || arg1 == error_mark_node
7601 || arg2 == error_mark_node || arg3 == error_mark_node)
7602 return const0_rtx;
7603
7604 if (target == 0
7605 || GET_MODE (target) != mode0
7606 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7607 target = gen_reg_rtx (mode0);
7608
7609 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7610 op0 = copy_to_mode_reg (mode0, op0);
7611 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7612 op1 = copy_to_mode_reg (mode0, op1);
7613 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7614 op2 = copy_to_mode_reg (mode0, op2);
7615 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7616 op3 = copy_to_mode_reg (mode0, op3);
7617
7618 /* Generate the compare. */
7619 scratch = gen_reg_rtx (CCmode);
7620 pat = GEN_FCN (icode) (scratch, op0, op1);
7621 if (! pat)
7622 return const0_rtx;
7623 emit_insn (pat);
7624
7625 if (mode0 == V2SImode)
7626 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7627 else
7628 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7629
7630 return target;
7631}
7632
0ac081f6
AH
7633/* Expand an expression EXP that calls a built-in function,
7634 with result going to TARGET if that's convenient
7635 (and in mode MODE if that's convenient).
7636 SUBTARGET may be used as the target for computing one of EXP's operands.
7637 IGNORE is nonzero if the value is to be ignored. */
7638
7639static rtx
a2369ed3 7640rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
7641 enum machine_mode mode ATTRIBUTE_UNUSED,
7642 int ignore ATTRIBUTE_UNUSED)
0ac081f6 7643{
92898235
AH
7644 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7645 tree arglist = TREE_OPERAND (exp, 1);
7646 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7647 struct builtin_description *d;
7648 size_t i;
7649 rtx ret;
7650 bool success;
f676971a 7651
7ccf35ed
DN
7652 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7653 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7654 {
7655 int icode = (int) CODE_FOR_altivec_lvsr;
7656 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7657 enum machine_mode mode = insn_data[icode].operand[1].mode;
7658 tree arg;
7659 rtx op, addr, pat;
7660
37409796 7661 gcc_assert (TARGET_ALTIVEC);
7ccf35ed
DN
7662
7663 arg = TREE_VALUE (arglist);
37409796 7664 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
7665 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7666 addr = memory_address (mode, op);
7667 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7668 op = addr;
7669 else
7670 {
7671 /* For the load case need to negate the address. */
7672 op = gen_reg_rtx (GET_MODE (addr));
7673 emit_insn (gen_rtx_SET (VOIDmode, op,
7674 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 7675 }
7ccf35ed
DN
7676 op = gen_rtx_MEM (mode, op);
7677
7678 if (target == 0
7679 || GET_MODE (target) != tmode
7680 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7681 target = gen_reg_rtx (tmode);
7682
7683 /*pat = gen_altivec_lvsr (target, op);*/
7684 pat = GEN_FCN (icode) (target, op);
7685 if (!pat)
7686 return 0;
7687 emit_insn (pat);
7688
7689 return target;
7690 }
7691
0ac081f6 7692 if (TARGET_ALTIVEC)
92898235
AH
7693 {
7694 ret = altivec_expand_builtin (exp, target, &success);
7695
a3170dc6
AH
7696 if (success)
7697 return ret;
7698 }
7699 if (TARGET_SPE)
7700 {
7701 ret = spe_expand_builtin (exp, target, &success);
7702
92898235
AH
7703 if (success)
7704 return ret;
7705 }
7706
37409796 7707 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
bb8df8a6 7708
37409796
NS
7709 /* Handle simple unary operations. */
7710 d = (struct builtin_description *) bdesc_1arg;
7711 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7712 if (d->code == fcode)
7713 return rs6000_expand_unop_builtin (d->icode, arglist, target);
bb8df8a6 7714
37409796
NS
7715 /* Handle simple binary operations. */
7716 d = (struct builtin_description *) bdesc_2arg;
7717 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7718 if (d->code == fcode)
7719 return rs6000_expand_binop_builtin (d->icode, arglist, target);
0ac081f6 7720
37409796
NS
7721 /* Handle simple ternary operations. */
7722 d = (struct builtin_description *) bdesc_3arg;
7723 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7724 if (d->code == fcode)
7725 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
bb8df8a6 7726
37409796 7727 gcc_unreachable ();
0ac081f6
AH
7728}
7729
7c62e993
PB
7730static tree
7731build_opaque_vector_type (tree node, int nunits)
7732{
7733 node = copy_node (node);
7734 TYPE_MAIN_VARIANT (node) = node;
7735 return build_vector_type (node, nunits);
7736}
7737
0ac081f6 7738static void
863d938c 7739rs6000_init_builtins (void)
0ac081f6 7740{
4a5eab38
PB
7741 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7742 V2SF_type_node = build_vector_type (float_type_node, 2);
7743 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7744 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7745 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 7746 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
7747 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7748
7749 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7750 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7751 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7752
7c62e993
PB
7753 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7754 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 7755 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 7756 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 7757
8bb418a3
ZL
7758 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7759 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7760 'vector unsigned short'. */
7761
8dd16ecc
NS
7762 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7763 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7764 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7765 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 7766
58646b77
PB
7767 long_integer_type_internal_node = long_integer_type_node;
7768 long_unsigned_type_internal_node = long_unsigned_type_node;
7769 intQI_type_internal_node = intQI_type_node;
7770 uintQI_type_internal_node = unsigned_intQI_type_node;
7771 intHI_type_internal_node = intHI_type_node;
7772 uintHI_type_internal_node = unsigned_intHI_type_node;
7773 intSI_type_internal_node = intSI_type_node;
7774 uintSI_type_internal_node = unsigned_intSI_type_node;
7775 float_type_internal_node = float_type_node;
7776 void_type_internal_node = void_type_node;
7777
8bb418a3
ZL
7778 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7779 get_identifier ("__bool char"),
7780 bool_char_type_node));
7781 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7782 get_identifier ("__bool short"),
7783 bool_short_type_node));
7784 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7785 get_identifier ("__bool int"),
7786 bool_int_type_node));
7787 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7788 get_identifier ("__pixel"),
7789 pixel_type_node));
7790
4a5eab38
PB
7791 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7792 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7793 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7794 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
7795
7796 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7797 get_identifier ("__vector unsigned char"),
7798 unsigned_V16QI_type_node));
7799 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7800 get_identifier ("__vector signed char"),
7801 V16QI_type_node));
7802 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7803 get_identifier ("__vector __bool char"),
7804 bool_V16QI_type_node));
7805
7806 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7807 get_identifier ("__vector unsigned short"),
7808 unsigned_V8HI_type_node));
7809 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7810 get_identifier ("__vector signed short"),
7811 V8HI_type_node));
7812 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7813 get_identifier ("__vector __bool short"),
7814 bool_V8HI_type_node));
7815
7816 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7817 get_identifier ("__vector unsigned int"),
7818 unsigned_V4SI_type_node));
7819 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7820 get_identifier ("__vector signed int"),
7821 V4SI_type_node));
7822 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7823 get_identifier ("__vector __bool int"),
7824 bool_V4SI_type_node));
7825
7826 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7827 get_identifier ("__vector float"),
7828 V4SF_type_node));
7829 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7830 get_identifier ("__vector __pixel"),
7831 pixel_V8HI_type_node));
7832
a3170dc6 7833 if (TARGET_SPE)
3fdaa45a 7834 spe_init_builtins ();
0ac081f6
AH
7835 if (TARGET_ALTIVEC)
7836 altivec_init_builtins ();
0559cc77
DE
7837 if (TARGET_ALTIVEC || TARGET_SPE)
7838 rs6000_common_init_builtins ();
0ac081f6
AH
7839}
7840
a3170dc6
AH
7841/* Search through a set of builtins and enable the mask bits.
7842 DESC is an array of builtins.
b6d08ca1 7843 SIZE is the total number of builtins.
a3170dc6
AH
7844 START is the builtin enum at which to start.
7845 END is the builtin enum at which to end. */
0ac081f6 7846static void
a2369ed3 7847enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 7848 enum rs6000_builtins start,
a2369ed3 7849 enum rs6000_builtins end)
a3170dc6
AH
7850{
7851 int i;
7852
7853 for (i = 0; i < size; ++i)
7854 if (desc[i].code == start)
7855 break;
7856
7857 if (i == size)
7858 return;
7859
7860 for (; i < size; ++i)
7861 {
7862 /* Flip all the bits on. */
7863 desc[i].mask = target_flags;
7864 if (desc[i].code == end)
7865 break;
7866 }
7867}
7868
7869static void
863d938c 7870spe_init_builtins (void)
0ac081f6 7871{
a3170dc6
AH
7872 tree endlink = void_list_node;
7873 tree puint_type_node = build_pointer_type (unsigned_type_node);
7874 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 7875 struct builtin_description *d;
0ac081f6
AH
7876 size_t i;
7877
a3170dc6
AH
7878 tree v2si_ftype_4_v2si
7879 = build_function_type
3fdaa45a
AH
7880 (opaque_V2SI_type_node,
7881 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7882 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7883 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7884 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7885 endlink)))));
7886
7887 tree v2sf_ftype_4_v2sf
7888 = build_function_type
3fdaa45a
AH
7889 (opaque_V2SF_type_node,
7890 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7891 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7892 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7893 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
7894 endlink)))));
7895
7896 tree int_ftype_int_v2si_v2si
7897 = build_function_type
7898 (integer_type_node,
7899 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
7900 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7901 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7902 endlink))));
7903
7904 tree int_ftype_int_v2sf_v2sf
7905 = build_function_type
7906 (integer_type_node,
7907 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
7908 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7909 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
7910 endlink))));
7911
7912 tree void_ftype_v2si_puint_int
7913 = build_function_type (void_type_node,
3fdaa45a 7914 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7915 tree_cons (NULL_TREE, puint_type_node,
7916 tree_cons (NULL_TREE,
7917 integer_type_node,
7918 endlink))));
7919
7920 tree void_ftype_v2si_puint_char
7921 = build_function_type (void_type_node,
3fdaa45a 7922 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7923 tree_cons (NULL_TREE, puint_type_node,
7924 tree_cons (NULL_TREE,
7925 char_type_node,
7926 endlink))));
7927
7928 tree void_ftype_v2si_pv2si_int
7929 = build_function_type (void_type_node,
3fdaa45a 7930 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 7931 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
7932 tree_cons (NULL_TREE,
7933 integer_type_node,
7934 endlink))));
7935
7936 tree void_ftype_v2si_pv2si_char
7937 = build_function_type (void_type_node,
3fdaa45a 7938 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 7939 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
7940 tree_cons (NULL_TREE,
7941 char_type_node,
7942 endlink))));
7943
7944 tree void_ftype_int
7945 = build_function_type (void_type_node,
7946 tree_cons (NULL_TREE, integer_type_node, endlink));
7947
7948 tree int_ftype_void
36e8d515 7949 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
7950
7951 tree v2si_ftype_pv2si_int
3fdaa45a 7952 = build_function_type (opaque_V2SI_type_node,
6035d635 7953 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
7954 tree_cons (NULL_TREE, integer_type_node,
7955 endlink)));
7956
7957 tree v2si_ftype_puint_int
3fdaa45a 7958 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
7959 tree_cons (NULL_TREE, puint_type_node,
7960 tree_cons (NULL_TREE, integer_type_node,
7961 endlink)));
7962
7963 tree v2si_ftype_pushort_int
3fdaa45a 7964 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
7965 tree_cons (NULL_TREE, pushort_type_node,
7966 tree_cons (NULL_TREE, integer_type_node,
7967 endlink)));
7968
00332c9f
AH
7969 tree v2si_ftype_signed_char
7970 = build_function_type (opaque_V2SI_type_node,
7971 tree_cons (NULL_TREE, signed_char_type_node,
7972 endlink));
7973
a3170dc6
AH
7974 /* The initialization of the simple binary and unary builtins is
7975 done in rs6000_common_init_builtins, but we have to enable the
7976 mask bits here manually because we have run out of `target_flags'
7977 bits. We really need to redesign this mask business. */
7978
7979 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7980 ARRAY_SIZE (bdesc_2arg),
7981 SPE_BUILTIN_EVADDW,
7982 SPE_BUILTIN_EVXOR);
7983 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7984 ARRAY_SIZE (bdesc_1arg),
7985 SPE_BUILTIN_EVABS,
7986 SPE_BUILTIN_EVSUBFUSIAAW);
7987 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7988 ARRAY_SIZE (bdesc_spe_predicates),
7989 SPE_BUILTIN_EVCMPEQ,
7990 SPE_BUILTIN_EVFSTSTLT);
7991 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7992 ARRAY_SIZE (bdesc_spe_evsel),
7993 SPE_BUILTIN_EVSEL_CMPGTS,
7994 SPE_BUILTIN_EVSEL_FSTSTEQ);
7995
36252949
AH
7996 (*lang_hooks.decls.pushdecl)
7997 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7998 opaque_V2SI_type_node));
7999
a3170dc6 8000 /* Initialize irregular SPE builtins. */
f676971a 8001
a3170dc6
AH
8002 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8003 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8004 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8005 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8006 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8007 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8008 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8009 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8010 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8011 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8012 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8013 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8014 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8015 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8016 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8017 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
8018 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8019 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
8020
8021 /* Loads. */
8022 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8023 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8024 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8025 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8026 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8027 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8028 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8029 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8030 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8031 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8032 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8033 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8034 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8035 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8036 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8037 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8038 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8039 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8040 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8041 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8042 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8043 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8044
8045 /* Predicates. */
8046 d = (struct builtin_description *) bdesc_spe_predicates;
8047 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8048 {
8049 tree type;
8050
8051 switch (insn_data[d->icode].operand[1].mode)
8052 {
8053 case V2SImode:
8054 type = int_ftype_int_v2si_v2si;
8055 break;
8056 case V2SFmode:
8057 type = int_ftype_int_v2sf_v2sf;
8058 break;
8059 default:
37409796 8060 gcc_unreachable ();
a3170dc6
AH
8061 }
8062
8063 def_builtin (d->mask, d->name, type, d->code);
8064 }
8065
8066 /* Evsel predicates. */
8067 d = (struct builtin_description *) bdesc_spe_evsel;
8068 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8069 {
8070 tree type;
8071
8072 switch (insn_data[d->icode].operand[1].mode)
8073 {
8074 case V2SImode:
8075 type = v2si_ftype_4_v2si;
8076 break;
8077 case V2SFmode:
8078 type = v2sf_ftype_4_v2sf;
8079 break;
8080 default:
37409796 8081 gcc_unreachable ();
a3170dc6
AH
8082 }
8083
8084 def_builtin (d->mask, d->name, type, d->code);
8085 }
8086}
8087
8088static void
863d938c 8089altivec_init_builtins (void)
a3170dc6
AH
8090{
8091 struct builtin_description *d;
8092 struct builtin_description_predicates *dp;
8093 size_t i;
7a4eca66
DE
8094 tree ftype;
8095
a3170dc6
AH
8096 tree pfloat_type_node = build_pointer_type (float_type_node);
8097 tree pint_type_node = build_pointer_type (integer_type_node);
8098 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8099 tree pchar_type_node = build_pointer_type (char_type_node);
8100
8101 tree pvoid_type_node = build_pointer_type (void_type_node);
8102
0dbc3651
ZW
8103 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8104 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8105 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8106 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8107
8108 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8109
58646b77
PB
8110 tree int_ftype_opaque
8111 = build_function_type_list (integer_type_node,
8112 opaque_V4SI_type_node, NULL_TREE);
8113
8114 tree opaque_ftype_opaque_int
8115 = build_function_type_list (opaque_V4SI_type_node,
8116 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8117 tree opaque_ftype_opaque_opaque_int
8118 = build_function_type_list (opaque_V4SI_type_node,
8119 opaque_V4SI_type_node, opaque_V4SI_type_node,
8120 integer_type_node, NULL_TREE);
8121 tree int_ftype_int_opaque_opaque
8122 = build_function_type_list (integer_type_node,
8123 integer_type_node, opaque_V4SI_type_node,
8124 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
8125 tree int_ftype_int_v4si_v4si
8126 = build_function_type_list (integer_type_node,
8127 integer_type_node, V4SI_type_node,
8128 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8129 tree v4sf_ftype_pcfloat
8130 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 8131 tree void_ftype_pfloat_v4sf
b4de2f7d 8132 = build_function_type_list (void_type_node,
a3170dc6 8133 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
8134 tree v4si_ftype_pcint
8135 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8136 tree void_ftype_pint_v4si
b4de2f7d
AH
8137 = build_function_type_list (void_type_node,
8138 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8139 tree v8hi_ftype_pcshort
8140 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 8141 tree void_ftype_pshort_v8hi
b4de2f7d
AH
8142 = build_function_type_list (void_type_node,
8143 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
8144 tree v16qi_ftype_pcchar
8145 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 8146 tree void_ftype_pchar_v16qi
b4de2f7d
AH
8147 = build_function_type_list (void_type_node,
8148 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 8149 tree void_ftype_v4si
b4de2f7d 8150 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8151 tree v8hi_ftype_void
8152 = build_function_type (V8HI_type_node, void_list_node);
8153 tree void_ftype_void
8154 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
8155 tree void_ftype_int
8156 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 8157
58646b77
PB
8158 tree opaque_ftype_long_pcvoid
8159 = build_function_type_list (opaque_V4SI_type_node,
8160 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 8161 tree v16qi_ftype_long_pcvoid
a3170dc6 8162 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
8163 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8164 tree v8hi_ftype_long_pcvoid
a3170dc6 8165 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
8166 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8167 tree v4si_ftype_long_pcvoid
a3170dc6 8168 = build_function_type_list (V4SI_type_node,
b4a62fa0 8169 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 8170
58646b77
PB
8171 tree void_ftype_opaque_long_pvoid
8172 = build_function_type_list (void_type_node,
8173 opaque_V4SI_type_node, long_integer_type_node,
8174 pvoid_type_node, NULL_TREE);
b4a62fa0 8175 tree void_ftype_v4si_long_pvoid
b4de2f7d 8176 = build_function_type_list (void_type_node,
b4a62fa0 8177 V4SI_type_node, long_integer_type_node,
b4de2f7d 8178 pvoid_type_node, NULL_TREE);
b4a62fa0 8179 tree void_ftype_v16qi_long_pvoid
b4de2f7d 8180 = build_function_type_list (void_type_node,
b4a62fa0 8181 V16QI_type_node, long_integer_type_node,
b4de2f7d 8182 pvoid_type_node, NULL_TREE);
b4a62fa0 8183 tree void_ftype_v8hi_long_pvoid
b4de2f7d 8184 = build_function_type_list (void_type_node,
b4a62fa0 8185 V8HI_type_node, long_integer_type_node,
b4de2f7d 8186 pvoid_type_node, NULL_TREE);
a3170dc6
AH
8187 tree int_ftype_int_v8hi_v8hi
8188 = build_function_type_list (integer_type_node,
8189 integer_type_node, V8HI_type_node,
8190 V8HI_type_node, NULL_TREE);
8191 tree int_ftype_int_v16qi_v16qi
8192 = build_function_type_list (integer_type_node,
8193 integer_type_node, V16QI_type_node,
8194 V16QI_type_node, NULL_TREE);
8195 tree int_ftype_int_v4sf_v4sf
8196 = build_function_type_list (integer_type_node,
8197 integer_type_node, V4SF_type_node,
8198 V4SF_type_node, NULL_TREE);
8199 tree v4si_ftype_v4si
8200 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8201 tree v8hi_ftype_v8hi
8202 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8203 tree v16qi_ftype_v16qi
8204 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8205 tree v4sf_ftype_v4sf
8206 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 8207 tree void_ftype_pcvoid_int_int
a3170dc6 8208 = build_function_type_list (void_type_node,
0dbc3651 8209 pcvoid_type_node, integer_type_node,
8bb418a3 8210 integer_type_node, NULL_TREE);
8bb418a3 8211
0dbc3651
ZW
8212 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8213 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8214 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8215 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8216 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8217 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8218 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8219 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8220 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8221 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8222 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8223 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8224 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8225 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8226 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8227 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
8228 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8229 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8230 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 8231 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
8232 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8233 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8234 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8235 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8236 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8237 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8238 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8239 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8240 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8241 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8242 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8243 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
8244 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8245 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8246 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8247 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8248 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8249 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8250 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8251 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8252 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8253 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8254 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8255 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8256 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8257 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8258
8259 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8260
8261 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8262 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8263 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8264 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8265 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8266 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8267 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8268 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8269 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8270 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 8271
a3170dc6
AH
8272 /* Add the DST variants. */
8273 d = (struct builtin_description *) bdesc_dst;
8274 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 8275 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
8276
8277 /* Initialize the predicates. */
8278 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8279 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8280 {
8281 enum machine_mode mode1;
8282 tree type;
58646b77
PB
8283 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8284 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 8285
58646b77
PB
8286 if (is_overloaded)
8287 mode1 = VOIDmode;
8288 else
8289 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
8290
8291 switch (mode1)
8292 {
58646b77
PB
8293 case VOIDmode:
8294 type = int_ftype_int_opaque_opaque;
8295 break;
a3170dc6
AH
8296 case V4SImode:
8297 type = int_ftype_int_v4si_v4si;
8298 break;
8299 case V8HImode:
8300 type = int_ftype_int_v8hi_v8hi;
8301 break;
8302 case V16QImode:
8303 type = int_ftype_int_v16qi_v16qi;
8304 break;
8305 case V4SFmode:
8306 type = int_ftype_int_v4sf_v4sf;
8307 break;
8308 default:
37409796 8309 gcc_unreachable ();
a3170dc6 8310 }
f676971a 8311
a3170dc6
AH
8312 def_builtin (dp->mask, dp->name, type, dp->code);
8313 }
8314
8315 /* Initialize the abs* operators. */
8316 d = (struct builtin_description *) bdesc_abs;
8317 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8318 {
8319 enum machine_mode mode0;
8320 tree type;
8321
8322 mode0 = insn_data[d->icode].operand[0].mode;
8323
8324 switch (mode0)
8325 {
8326 case V4SImode:
8327 type = v4si_ftype_v4si;
8328 break;
8329 case V8HImode:
8330 type = v8hi_ftype_v8hi;
8331 break;
8332 case V16QImode:
8333 type = v16qi_ftype_v16qi;
8334 break;
8335 case V4SFmode:
8336 type = v4sf_ftype_v4sf;
8337 break;
8338 default:
37409796 8339 gcc_unreachable ();
a3170dc6 8340 }
f676971a 8341
a3170dc6
AH
8342 def_builtin (d->mask, d->name, type, d->code);
8343 }
7ccf35ed 8344
13c62176
DN
8345 if (TARGET_ALTIVEC)
8346 {
8347 tree decl;
8348
8349 /* Initialize target builtin that implements
8350 targetm.vectorize.builtin_mask_for_load. */
8351
8352 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8bb46326
DN
8353 v16qi_ftype_long_pcvoid,
8354 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8355 BUILT_IN_MD, NULL,
8356 tree_cons (get_identifier ("const"),
8357 NULL_TREE, NULL_TREE));
13c62176
DN
8358 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8359 altivec_builtin_mask_for_load = decl;
13c62176 8360 }
7a4eca66
DE
8361
8362 /* Access to the vec_init patterns. */
8363 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8364 integer_type_node, integer_type_node,
8365 integer_type_node, NULL_TREE);
8366 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8367 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8368
8369 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8370 short_integer_type_node,
8371 short_integer_type_node,
8372 short_integer_type_node,
8373 short_integer_type_node,
8374 short_integer_type_node,
8375 short_integer_type_node,
8376 short_integer_type_node, NULL_TREE);
8377 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8378 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8379
8380 ftype = build_function_type_list (V16QI_type_node, char_type_node,
8381 char_type_node, char_type_node,
8382 char_type_node, char_type_node,
8383 char_type_node, char_type_node,
8384 char_type_node, char_type_node,
8385 char_type_node, char_type_node,
8386 char_type_node, char_type_node,
8387 char_type_node, char_type_node,
8388 char_type_node, NULL_TREE);
8389 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8390 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8391
8392 ftype = build_function_type_list (V4SF_type_node, float_type_node,
8393 float_type_node, float_type_node,
8394 float_type_node, NULL_TREE);
8395 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8396 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8397
8398 /* Access to the vec_set patterns. */
8399 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8400 intSI_type_node,
8401 integer_type_node, NULL_TREE);
8402 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8403 ALTIVEC_BUILTIN_VEC_SET_V4SI);
8404
8405 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8406 intHI_type_node,
8407 integer_type_node, NULL_TREE);
8408 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8409 ALTIVEC_BUILTIN_VEC_SET_V8HI);
8410
8411 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8412 intQI_type_node,
8413 integer_type_node, NULL_TREE);
8414 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8415 ALTIVEC_BUILTIN_VEC_SET_V16QI);
8416
8417 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8418 float_type_node,
8419 integer_type_node, NULL_TREE);
8420 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8421 ALTIVEC_BUILTIN_VEC_SET_V4SF);
8422
8423 /* Access to the vec_extract patterns. */
8424 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8425 integer_type_node, NULL_TREE);
8426 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8427 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8428
8429 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8430 integer_type_node, NULL_TREE);
8431 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8432 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8433
8434 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8435 integer_type_node, NULL_TREE);
8436 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8437 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8438
8439 ftype = build_function_type_list (float_type_node, V4SF_type_node,
8440 integer_type_node, NULL_TREE);
8441 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8442 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
8443}
8444
8445static void
863d938c 8446rs6000_common_init_builtins (void)
a3170dc6
AH
8447{
8448 struct builtin_description *d;
8449 size_t i;
8450
8451 tree v4sf_ftype_v4sf_v4sf_v16qi
8452 = build_function_type_list (V4SF_type_node,
8453 V4SF_type_node, V4SF_type_node,
8454 V16QI_type_node, NULL_TREE);
8455 tree v4si_ftype_v4si_v4si_v16qi
8456 = build_function_type_list (V4SI_type_node,
8457 V4SI_type_node, V4SI_type_node,
8458 V16QI_type_node, NULL_TREE);
8459 tree v8hi_ftype_v8hi_v8hi_v16qi
8460 = build_function_type_list (V8HI_type_node,
8461 V8HI_type_node, V8HI_type_node,
8462 V16QI_type_node, NULL_TREE);
8463 tree v16qi_ftype_v16qi_v16qi_v16qi
8464 = build_function_type_list (V16QI_type_node,
8465 V16QI_type_node, V16QI_type_node,
8466 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
8467 tree v4si_ftype_int
8468 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8469 tree v8hi_ftype_int
8470 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8471 tree v16qi_ftype_int
8472 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
8473 tree v8hi_ftype_v16qi
8474 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8475 tree v4sf_ftype_v4sf
8476 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8477
8478 tree v2si_ftype_v2si_v2si
2abe3e28
AH
8479 = build_function_type_list (opaque_V2SI_type_node,
8480 opaque_V2SI_type_node,
8481 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8482
8483 tree v2sf_ftype_v2sf_v2sf
2abe3e28
AH
8484 = build_function_type_list (opaque_V2SF_type_node,
8485 opaque_V2SF_type_node,
8486 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
8487
8488 tree v2si_ftype_int_int
2abe3e28 8489 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
8490 integer_type_node, integer_type_node,
8491 NULL_TREE);
8492
58646b77
PB
8493 tree opaque_ftype_opaque
8494 = build_function_type_list (opaque_V4SI_type_node,
8495 opaque_V4SI_type_node, NULL_TREE);
8496
a3170dc6 8497 tree v2si_ftype_v2si
2abe3e28
AH
8498 = build_function_type_list (opaque_V2SI_type_node,
8499 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8500
8501 tree v2sf_ftype_v2sf
2abe3e28
AH
8502 = build_function_type_list (opaque_V2SF_type_node,
8503 opaque_V2SF_type_node, NULL_TREE);
f676971a 8504
a3170dc6 8505 tree v2sf_ftype_v2si
2abe3e28
AH
8506 = build_function_type_list (opaque_V2SF_type_node,
8507 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8508
8509 tree v2si_ftype_v2sf
2abe3e28
AH
8510 = build_function_type_list (opaque_V2SI_type_node,
8511 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
8512
8513 tree v2si_ftype_v2si_char
2abe3e28
AH
8514 = build_function_type_list (opaque_V2SI_type_node,
8515 opaque_V2SI_type_node,
8516 char_type_node, NULL_TREE);
a3170dc6
AH
8517
8518 tree v2si_ftype_int_char
2abe3e28 8519 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
8520 integer_type_node, char_type_node, NULL_TREE);
8521
8522 tree v2si_ftype_char
2abe3e28
AH
8523 = build_function_type_list (opaque_V2SI_type_node,
8524 char_type_node, NULL_TREE);
a3170dc6
AH
8525
8526 tree int_ftype_int_int
8527 = build_function_type_list (integer_type_node,
8528 integer_type_node, integer_type_node,
8529 NULL_TREE);
95385cbb 8530
58646b77
PB
8531 tree opaque_ftype_opaque_opaque
8532 = build_function_type_list (opaque_V4SI_type_node,
8533 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 8534 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
8535 = build_function_type_list (V4SI_type_node,
8536 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 8537 tree v4sf_ftype_v4si_int
b4de2f7d 8538 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
8539 V4SI_type_node, integer_type_node, NULL_TREE);
8540 tree v4si_ftype_v4sf_int
b4de2f7d 8541 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
8542 V4SF_type_node, integer_type_node, NULL_TREE);
8543 tree v4si_ftype_v4si_int
b4de2f7d 8544 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
8545 V4SI_type_node, integer_type_node, NULL_TREE);
8546 tree v8hi_ftype_v8hi_int
b4de2f7d 8547 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
8548 V8HI_type_node, integer_type_node, NULL_TREE);
8549 tree v16qi_ftype_v16qi_int
b4de2f7d 8550 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
8551 V16QI_type_node, integer_type_node, NULL_TREE);
8552 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
8553 = build_function_type_list (V16QI_type_node,
8554 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
8555 integer_type_node, NULL_TREE);
8556 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
8557 = build_function_type_list (V8HI_type_node,
8558 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
8559 integer_type_node, NULL_TREE);
8560 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
8561 = build_function_type_list (V4SI_type_node,
8562 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
8563 integer_type_node, NULL_TREE);
8564 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
8565 = build_function_type_list (V4SF_type_node,
8566 V4SF_type_node, V4SF_type_node,
b9e4e5d1 8567 integer_type_node, NULL_TREE);
0ac081f6 8568 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
8569 = build_function_type_list (V4SF_type_node,
8570 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
8571 tree opaque_ftype_opaque_opaque_opaque
8572 = build_function_type_list (opaque_V4SI_type_node,
8573 opaque_V4SI_type_node, opaque_V4SI_type_node,
8574 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 8575 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
8576 = build_function_type_list (V4SF_type_node,
8577 V4SF_type_node, V4SF_type_node,
8578 V4SI_type_node, NULL_TREE);
2212663f 8579 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
8580 = build_function_type_list (V4SF_type_node,
8581 V4SF_type_node, V4SF_type_node,
8582 V4SF_type_node, NULL_TREE);
f676971a 8583 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
8584 = build_function_type_list (V4SI_type_node,
8585 V4SI_type_node, V4SI_type_node,
8586 V4SI_type_node, NULL_TREE);
0ac081f6 8587 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
8588 = build_function_type_list (V8HI_type_node,
8589 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 8590 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
8591 = build_function_type_list (V8HI_type_node,
8592 V8HI_type_node, V8HI_type_node,
8593 V8HI_type_node, NULL_TREE);
c4ad648e 8594 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
8595 = build_function_type_list (V4SI_type_node,
8596 V8HI_type_node, V8HI_type_node,
8597 V4SI_type_node, NULL_TREE);
c4ad648e 8598 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
8599 = build_function_type_list (V4SI_type_node,
8600 V16QI_type_node, V16QI_type_node,
8601 V4SI_type_node, NULL_TREE);
0ac081f6 8602 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
8603 = build_function_type_list (V16QI_type_node,
8604 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8605 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
8606 = build_function_type_list (V4SI_type_node,
8607 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 8608 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
8609 = build_function_type_list (V8HI_type_node,
8610 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8611 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
8612 = build_function_type_list (V4SI_type_node,
8613 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8614 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
8615 = build_function_type_list (V8HI_type_node,
8616 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 8617 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
8618 = build_function_type_list (V16QI_type_node,
8619 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8620 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
8621 = build_function_type_list (V4SI_type_node,
8622 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 8623 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
8624 = build_function_type_list (V4SI_type_node,
8625 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8626 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
8627 = build_function_type_list (V4SI_type_node,
8628 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8629 tree v4si_ftype_v8hi
8630 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8631 tree int_ftype_v4si_v4si
8632 = build_function_type_list (integer_type_node,
8633 V4SI_type_node, V4SI_type_node, NULL_TREE);
8634 tree int_ftype_v4sf_v4sf
8635 = build_function_type_list (integer_type_node,
8636 V4SF_type_node, V4SF_type_node, NULL_TREE);
8637 tree int_ftype_v16qi_v16qi
8638 = build_function_type_list (integer_type_node,
8639 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8640 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
8641 = build_function_type_list (integer_type_node,
8642 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8643
6f317ef3 8644 /* Add the simple ternary operators. */
2212663f 8645 d = (struct builtin_description *) bdesc_3arg;
ca7558fc 8646 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 8647 {
2212663f
DB
8648 enum machine_mode mode0, mode1, mode2, mode3;
8649 tree type;
58646b77
PB
8650 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8651 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 8652
58646b77
PB
8653 if (is_overloaded)
8654 {
8655 mode0 = VOIDmode;
8656 mode1 = VOIDmode;
8657 mode2 = VOIDmode;
8658 mode3 = VOIDmode;
8659 }
8660 else
8661 {
8662 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8663 continue;
f676971a 8664
58646b77
PB
8665 mode0 = insn_data[d->icode].operand[0].mode;
8666 mode1 = insn_data[d->icode].operand[1].mode;
8667 mode2 = insn_data[d->icode].operand[2].mode;
8668 mode3 = insn_data[d->icode].operand[3].mode;
8669 }
bb8df8a6 8670
2212663f
DB
8671 /* When all four are of the same mode. */
8672 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8673 {
8674 switch (mode0)
8675 {
58646b77
PB
8676 case VOIDmode:
8677 type = opaque_ftype_opaque_opaque_opaque;
8678 break;
617e0e1d
DB
8679 case V4SImode:
8680 type = v4si_ftype_v4si_v4si_v4si;
8681 break;
2212663f
DB
8682 case V4SFmode:
8683 type = v4sf_ftype_v4sf_v4sf_v4sf;
8684 break;
8685 case V8HImode:
8686 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 8687 break;
2212663f
DB
8688 case V16QImode:
8689 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 8690 break;
2212663f 8691 default:
37409796 8692 gcc_unreachable ();
2212663f
DB
8693 }
8694 }
8695 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 8696 {
2212663f
DB
8697 switch (mode0)
8698 {
8699 case V4SImode:
8700 type = v4si_ftype_v4si_v4si_v16qi;
8701 break;
8702 case V4SFmode:
8703 type = v4sf_ftype_v4sf_v4sf_v16qi;
8704 break;
8705 case V8HImode:
8706 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 8707 break;
2212663f
DB
8708 case V16QImode:
8709 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 8710 break;
2212663f 8711 default:
37409796 8712 gcc_unreachable ();
2212663f
DB
8713 }
8714 }
f676971a 8715 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 8716 && mode3 == V4SImode)
24408032 8717 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 8718 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 8719 && mode3 == V4SImode)
24408032 8720 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 8721 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 8722 && mode3 == V4SImode)
24408032
AH
8723 type = v4sf_ftype_v4sf_v4sf_v4si;
8724
8725 /* vchar, vchar, vchar, 4 bit literal. */
8726 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8727 && mode3 == QImode)
b9e4e5d1 8728 type = v16qi_ftype_v16qi_v16qi_int;
24408032
AH
8729
8730 /* vshort, vshort, vshort, 4 bit literal. */
8731 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8732 && mode3 == QImode)
b9e4e5d1 8733 type = v8hi_ftype_v8hi_v8hi_int;
24408032
AH
8734
8735 /* vint, vint, vint, 4 bit literal. */
8736 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8737 && mode3 == QImode)
b9e4e5d1 8738 type = v4si_ftype_v4si_v4si_int;
24408032
AH
8739
8740 /* vfloat, vfloat, vfloat, 4 bit literal. */
8741 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8742 && mode3 == QImode)
b9e4e5d1 8743 type = v4sf_ftype_v4sf_v4sf_int;
24408032 8744
2212663f 8745 else
37409796 8746 gcc_unreachable ();
2212663f
DB
8747
8748 def_builtin (d->mask, d->name, type, d->code);
8749 }
8750
0ac081f6 8751 /* Add the simple binary operators. */
00b960c7 8752 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 8753 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
8754 {
8755 enum machine_mode mode0, mode1, mode2;
8756 tree type;
58646b77
PB
8757 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8758 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 8759
58646b77
PB
8760 if (is_overloaded)
8761 {
8762 mode0 = VOIDmode;
8763 mode1 = VOIDmode;
8764 mode2 = VOIDmode;
8765 }
8766 else
bb8df8a6 8767 {
58646b77
PB
8768 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8769 continue;
f676971a 8770
58646b77
PB
8771 mode0 = insn_data[d->icode].operand[0].mode;
8772 mode1 = insn_data[d->icode].operand[1].mode;
8773 mode2 = insn_data[d->icode].operand[2].mode;
8774 }
0ac081f6
AH
8775
8776 /* When all three operands are of the same mode. */
8777 if (mode0 == mode1 && mode1 == mode2)
8778 {
8779 switch (mode0)
8780 {
58646b77
PB
8781 case VOIDmode:
8782 type = opaque_ftype_opaque_opaque;
8783 break;
0ac081f6
AH
8784 case V4SFmode:
8785 type = v4sf_ftype_v4sf_v4sf;
8786 break;
8787 case V4SImode:
8788 type = v4si_ftype_v4si_v4si;
8789 break;
8790 case V16QImode:
8791 type = v16qi_ftype_v16qi_v16qi;
8792 break;
8793 case V8HImode:
8794 type = v8hi_ftype_v8hi_v8hi;
8795 break;
a3170dc6
AH
8796 case V2SImode:
8797 type = v2si_ftype_v2si_v2si;
8798 break;
8799 case V2SFmode:
8800 type = v2sf_ftype_v2sf_v2sf;
8801 break;
8802 case SImode:
8803 type = int_ftype_int_int;
8804 break;
0ac081f6 8805 default:
37409796 8806 gcc_unreachable ();
0ac081f6
AH
8807 }
8808 }
8809
8810 /* A few other combos we really don't want to do manually. */
8811
8812 /* vint, vfloat, vfloat. */
8813 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8814 type = v4si_ftype_v4sf_v4sf;
8815
8816 /* vshort, vchar, vchar. */
8817 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8818 type = v8hi_ftype_v16qi_v16qi;
8819
8820 /* vint, vshort, vshort. */
8821 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8822 type = v4si_ftype_v8hi_v8hi;
8823
8824 /* vshort, vint, vint. */
8825 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8826 type = v8hi_ftype_v4si_v4si;
8827
8828 /* vchar, vshort, vshort. */
8829 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8830 type = v16qi_ftype_v8hi_v8hi;
8831
8832 /* vint, vchar, vint. */
8833 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8834 type = v4si_ftype_v16qi_v4si;
8835
fa066a23
AH
8836 /* vint, vchar, vchar. */
8837 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8838 type = v4si_ftype_v16qi_v16qi;
8839
0ac081f6
AH
8840 /* vint, vshort, vint. */
8841 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8842 type = v4si_ftype_v8hi_v4si;
f676971a 8843
2212663f
DB
8844 /* vint, vint, 5 bit literal. */
8845 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 8846 type = v4si_ftype_v4si_int;
f676971a 8847
2212663f
DB
8848 /* vshort, vshort, 5 bit literal. */
8849 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 8850 type = v8hi_ftype_v8hi_int;
f676971a 8851
2212663f
DB
8852 /* vchar, vchar, 5 bit literal. */
8853 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 8854 type = v16qi_ftype_v16qi_int;
0ac081f6 8855
617e0e1d
DB
8856 /* vfloat, vint, 5 bit literal. */
8857 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 8858 type = v4sf_ftype_v4si_int;
f676971a 8859
617e0e1d
DB
8860 /* vint, vfloat, 5 bit literal. */
8861 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 8862 type = v4si_ftype_v4sf_int;
617e0e1d 8863
a3170dc6
AH
8864 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8865 type = v2si_ftype_int_int;
8866
8867 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8868 type = v2si_ftype_v2si_char;
8869
8870 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8871 type = v2si_ftype_int_char;
8872
37409796 8873 else
0ac081f6 8874 {
37409796
NS
8875 /* int, x, x. */
8876 gcc_assert (mode0 == SImode);
0ac081f6
AH
8877 switch (mode1)
8878 {
8879 case V4SImode:
8880 type = int_ftype_v4si_v4si;
8881 break;
8882 case V4SFmode:
8883 type = int_ftype_v4sf_v4sf;
8884 break;
8885 case V16QImode:
8886 type = int_ftype_v16qi_v16qi;
8887 break;
8888 case V8HImode:
8889 type = int_ftype_v8hi_v8hi;
8890 break;
8891 default:
37409796 8892 gcc_unreachable ();
0ac081f6
AH
8893 }
8894 }
8895
2212663f
DB
8896 def_builtin (d->mask, d->name, type, d->code);
8897 }
24408032 8898
2212663f
DB
8899 /* Add the simple unary operators. */
8900 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 8901 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
8902 {
8903 enum machine_mode mode0, mode1;
8904 tree type;
58646b77
PB
8905 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8906 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8907
8908 if (is_overloaded)
8909 {
8910 mode0 = VOIDmode;
8911 mode1 = VOIDmode;
8912 }
8913 else
8914 {
8915 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8916 continue;
bb8df8a6 8917
58646b77
PB
8918 mode0 = insn_data[d->icode].operand[0].mode;
8919 mode1 = insn_data[d->icode].operand[1].mode;
8920 }
2212663f
DB
8921
8922 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 8923 type = v4si_ftype_int;
2212663f 8924 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 8925 type = v8hi_ftype_int;
2212663f 8926 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 8927 type = v16qi_ftype_int;
58646b77
PB
8928 else if (mode0 == VOIDmode && mode1 == VOIDmode)
8929 type = opaque_ftype_opaque;
617e0e1d
DB
8930 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8931 type = v4sf_ftype_v4sf;
20e26713
AH
8932 else if (mode0 == V8HImode && mode1 == V16QImode)
8933 type = v8hi_ftype_v16qi;
8934 else if (mode0 == V4SImode && mode1 == V8HImode)
8935 type = v4si_ftype_v8hi;
a3170dc6
AH
8936 else if (mode0 == V2SImode && mode1 == V2SImode)
8937 type = v2si_ftype_v2si;
8938 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8939 type = v2sf_ftype_v2sf;
8940 else if (mode0 == V2SFmode && mode1 == V2SImode)
8941 type = v2sf_ftype_v2si;
8942 else if (mode0 == V2SImode && mode1 == V2SFmode)
8943 type = v2si_ftype_v2sf;
8944 else if (mode0 == V2SImode && mode1 == QImode)
8945 type = v2si_ftype_char;
2212663f 8946 else
37409796 8947 gcc_unreachable ();
2212663f 8948
0ac081f6
AH
8949 def_builtin (d->mask, d->name, type, d->code);
8950 }
8951}
8952
c15c90bb
ZW
8953static void
8954rs6000_init_libfuncs (void)
8955{
8956 if (!TARGET_HARD_FLOAT)
8957 return;
8958
c9034561 8959 if (DEFAULT_ABI != ABI_V4)
c15c90bb 8960 {
c9034561 8961 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
c15c90bb 8962 {
c9034561 8963 /* AIX library routines for float->int conversion. */
85363ca0
ZW
8964 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8965 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
4274207b
DE
8966 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8967 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
c15c90bb
ZW
8968 }
8969
98c41d98
DE
8970 /* AIX/Darwin/64-bit Linux quad floating point routines. */
8971 if (!TARGET_XL_COMPAT)
8972 {
8973 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
8974 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
8975 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
8976 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
8977 }
8978 else
8979 {
8980 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8981 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8982 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8983 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8984 }
c15c90bb 8985 }
c9034561 8986 else
c15c90bb 8987 {
c9034561 8988 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
8989
8990 set_optab_libfunc (add_optab, TFmode, "_q_add");
8991 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8992 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8993 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8994 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8995 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8996 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8997
c9034561
ZW
8998 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8999 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9000 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9001 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9002 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9003 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9004
85363ca0
ZW
9005 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9006 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9007 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9008 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9009 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9010 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9011 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
c15c90bb
ZW
9012 }
9013}
fba73eb1
DE
9014
9015\f
9016/* Expand a block clear operation, and return 1 if successful. Return 0
9017 if we should let the compiler generate normal code.
9018
9019 operands[0] is the destination
9020 operands[1] is the length
57e84f18 9021 operands[3] is the alignment */
fba73eb1
DE
9022
9023int
9024expand_block_clear (rtx operands[])
9025{
9026 rtx orig_dest = operands[0];
9027 rtx bytes_rtx = operands[1];
57e84f18 9028 rtx align_rtx = operands[3];
5514620a
GK
9029 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9030 HOST_WIDE_INT align;
9031 HOST_WIDE_INT bytes;
fba73eb1
DE
9032 int offset;
9033 int clear_bytes;
5514620a 9034 int clear_step;
fba73eb1
DE
9035
9036 /* If this is not a fixed size move, just call memcpy */
9037 if (! constp)
9038 return 0;
9039
37409796
NS
9040 /* This must be a fixed size alignment */
9041 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
9042 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9043
9044 /* Anything to clear? */
9045 bytes = INTVAL (bytes_rtx);
9046 if (bytes <= 0)
9047 return 1;
9048
5514620a
GK
9049 /* Use the builtin memset after a point, to avoid huge code bloat.
9050 When optimize_size, avoid any significant code bloat; calling
9051 memset is about 4 instructions, so allow for one instruction to
9052 load zero and three to do clearing. */
9053 if (TARGET_ALTIVEC && align >= 128)
9054 clear_step = 16;
9055 else if (TARGET_POWERPC64 && align >= 32)
9056 clear_step = 8;
9057 else
9058 clear_step = 4;
fba73eb1 9059
5514620a
GK
9060 if (optimize_size && bytes > 3 * clear_step)
9061 return 0;
9062 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
9063 return 0;
9064
9065 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9066 {
fba73eb1
DE
9067 enum machine_mode mode = BLKmode;
9068 rtx dest;
f676971a 9069
5514620a
GK
9070 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9071 {
9072 clear_bytes = 16;
9073 mode = V4SImode;
9074 }
9075 else if (bytes >= 8 && TARGET_POWERPC64
9076 /* 64-bit loads and stores require word-aligned
9077 displacements. */
9078 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
9079 {
9080 clear_bytes = 8;
9081 mode = DImode;
fba73eb1 9082 }
5514620a 9083 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
9084 { /* move 4 bytes */
9085 clear_bytes = 4;
9086 mode = SImode;
fba73eb1 9087 }
ec53fc93 9088 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
9089 { /* move 2 bytes */
9090 clear_bytes = 2;
9091 mode = HImode;
fba73eb1
DE
9092 }
9093 else /* move 1 byte at a time */
9094 {
9095 clear_bytes = 1;
9096 mode = QImode;
fba73eb1 9097 }
f676971a 9098
fba73eb1 9099 dest = adjust_address (orig_dest, mode, offset);
f676971a 9100
5514620a 9101 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
9102 }
9103
9104 return 1;
9105}
9106
35aff10b 9107\f
7e69e155
MM
9108/* Expand a block move operation, and return 1 if successful. Return 0
9109 if we should let the compiler generate normal code.
9110
9111 operands[0] is the destination
9112 operands[1] is the source
9113 operands[2] is the length
9114 operands[3] is the alignment */
9115
3933e0e1
MM
9116#define MAX_MOVE_REG 4
9117
7e69e155 9118int
a2369ed3 9119expand_block_move (rtx operands[])
7e69e155 9120{
b6c9286a
MM
9121 rtx orig_dest = operands[0];
9122 rtx orig_src = operands[1];
7e69e155 9123 rtx bytes_rtx = operands[2];
7e69e155 9124 rtx align_rtx = operands[3];
3933e0e1 9125 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 9126 int align;
3933e0e1
MM
9127 int bytes;
9128 int offset;
7e69e155 9129 int move_bytes;
cabfd258
GK
9130 rtx stores[MAX_MOVE_REG];
9131 int num_reg = 0;
7e69e155 9132
3933e0e1 9133 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 9134 if (! constp)
3933e0e1
MM
9135 return 0;
9136
37409796
NS
9137 /* This must be a fixed size alignment */
9138 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 9139 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 9140
7e69e155 9141 /* Anything to move? */
3933e0e1
MM
9142 bytes = INTVAL (bytes_rtx);
9143 if (bytes <= 0)
7e69e155
MM
9144 return 1;
9145
ea9982a8 9146 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 9147 reg_parm_stack_space. */
ea9982a8 9148 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
9149 return 0;
9150
cabfd258 9151 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 9152 {
cabfd258 9153 union {
70128ad9 9154 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 9155 rtx (*mov) (rtx, rtx);
cabfd258
GK
9156 } gen_func;
9157 enum machine_mode mode = BLKmode;
9158 rtx src, dest;
f676971a 9159
5514620a
GK
9160 /* Altivec first, since it will be faster than a string move
9161 when it applies, and usually not significantly larger. */
9162 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9163 {
9164 move_bytes = 16;
9165 mode = V4SImode;
9166 gen_func.mov = gen_movv4si;
9167 }
9168 else if (TARGET_STRING
cabfd258
GK
9169 && bytes > 24 /* move up to 32 bytes at a time */
9170 && ! fixed_regs[5]
9171 && ! fixed_regs[6]
9172 && ! fixed_regs[7]
9173 && ! fixed_regs[8]
9174 && ! fixed_regs[9]
9175 && ! fixed_regs[10]
9176 && ! fixed_regs[11]
9177 && ! fixed_regs[12])
7e69e155 9178 {
cabfd258 9179 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 9180 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
9181 }
9182 else if (TARGET_STRING
9183 && bytes > 16 /* move up to 24 bytes at a time */
9184 && ! fixed_regs[5]
9185 && ! fixed_regs[6]
9186 && ! fixed_regs[7]
9187 && ! fixed_regs[8]
9188 && ! fixed_regs[9]
9189 && ! fixed_regs[10])
9190 {
9191 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 9192 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
9193 }
9194 else if (TARGET_STRING
9195 && bytes > 8 /* move up to 16 bytes at a time */
9196 && ! fixed_regs[5]
9197 && ! fixed_regs[6]
9198 && ! fixed_regs[7]
9199 && ! fixed_regs[8])
9200 {
9201 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 9202 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
9203 }
9204 else if (bytes >= 8 && TARGET_POWERPC64
9205 /* 64-bit loads and stores require word-aligned
9206 displacements. */
fba73eb1 9207 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
9208 {
9209 move_bytes = 8;
9210 mode = DImode;
9211 gen_func.mov = gen_movdi;
9212 }
9213 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9214 { /* move up to 8 bytes at a time */
9215 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 9216 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 9217 }
cd7d9ca4 9218 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
9219 { /* move 4 bytes */
9220 move_bytes = 4;
9221 mode = SImode;
9222 gen_func.mov = gen_movsi;
9223 }
ec53fc93 9224 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
9225 { /* move 2 bytes */
9226 move_bytes = 2;
9227 mode = HImode;
9228 gen_func.mov = gen_movhi;
9229 }
9230 else if (TARGET_STRING && bytes > 1)
9231 { /* move up to 4 bytes at a time */
9232 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 9233 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
9234 }
9235 else /* move 1 byte at a time */
9236 {
9237 move_bytes = 1;
9238 mode = QImode;
9239 gen_func.mov = gen_movqi;
9240 }
f676971a 9241
cabfd258
GK
9242 src = adjust_address (orig_src, mode, offset);
9243 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
9244
9245 if (mode != BLKmode)
cabfd258
GK
9246 {
9247 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 9248
cabfd258
GK
9249 emit_insn ((*gen_func.mov) (tmp_reg, src));
9250 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 9251 }
3933e0e1 9252
cabfd258
GK
9253 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9254 {
9255 int i;
9256 for (i = 0; i < num_reg; i++)
9257 emit_insn (stores[i]);
9258 num_reg = 0;
9259 }
35aff10b 9260
cabfd258 9261 if (mode == BLKmode)
7e69e155 9262 {
70128ad9 9263 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
9264 patterns require zero offset. */
9265 if (!REG_P (XEXP (src, 0)))
b6c9286a 9266 {
cabfd258
GK
9267 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9268 src = replace_equiv_address (src, src_reg);
b6c9286a 9269 }
cabfd258 9270 set_mem_size (src, GEN_INT (move_bytes));
f676971a 9271
cabfd258 9272 if (!REG_P (XEXP (dest, 0)))
3933e0e1 9273 {
cabfd258
GK
9274 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9275 dest = replace_equiv_address (dest, dest_reg);
7e69e155 9276 }
cabfd258 9277 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 9278
70128ad9 9279 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
9280 GEN_INT (move_bytes & 31),
9281 align_rtx));
7e69e155 9282 }
7e69e155
MM
9283 }
9284
9285 return 1;
9286}
9287
d62294f5 9288\f
9caa3eb2
DE
9289/* Return a string to perform a load_multiple operation.
9290 operands[0] is the vector.
9291 operands[1] is the source address.
9292 operands[2] is the first destination register. */
9293
9294const char *
a2369ed3 9295rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
9296{
9297 /* We have to handle the case where the pseudo used to contain the address
9298 is assigned to one of the output registers. */
9299 int i, j;
9300 int words = XVECLEN (operands[0], 0);
9301 rtx xop[10];
9302
9303 if (XVECLEN (operands[0], 0) == 1)
9304 return "{l|lwz} %2,0(%1)";
9305
9306 for (i = 0; i < words; i++)
9307 if (refers_to_regno_p (REGNO (operands[2]) + i,
9308 REGNO (operands[2]) + i + 1, operands[1], 0))
9309 {
9310 if (i == words-1)
9311 {
9312 xop[0] = GEN_INT (4 * (words-1));
9313 xop[1] = operands[1];
9314 xop[2] = operands[2];
9315 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9316 return "";
9317 }
9318 else if (i == 0)
9319 {
9320 xop[0] = GEN_INT (4 * (words-1));
9321 xop[1] = operands[1];
9322 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9323 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9324 return "";
9325 }
9326 else
9327 {
9328 for (j = 0; j < words; j++)
9329 if (j != i)
9330 {
9331 xop[0] = GEN_INT (j * 4);
9332 xop[1] = operands[1];
9333 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9334 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9335 }
9336 xop[0] = GEN_INT (i * 4);
9337 xop[1] = operands[1];
9338 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9339 return "";
9340 }
9341 }
9342
9343 return "{lsi|lswi} %2,%1,%N0";
9344}
9345
9878760c 9346\f
a4f6c312
SS
9347/* A validation routine: say whether CODE, a condition code, and MODE
9348 match. The other alternatives either don't make sense or should
9349 never be generated. */
39a10a29 9350
48d72335 9351void
a2369ed3 9352validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 9353{
37409796
NS
9354 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9355 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9356 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
9357
9358 /* These don't make sense. */
37409796
NS
9359 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9360 || mode != CCUNSmode);
39a10a29 9361
37409796
NS
9362 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9363 || mode == CCUNSmode);
39a10a29 9364
37409796
NS
9365 gcc_assert (mode == CCFPmode
9366 || (code != ORDERED && code != UNORDERED
9367 && code != UNEQ && code != LTGT
9368 && code != UNGT && code != UNLT
9369 && code != UNGE && code != UNLE));
f676971a
EC
9370
9371 /* These should never be generated except for
bc9ec0e0 9372 flag_finite_math_only. */
37409796
NS
9373 gcc_assert (mode != CCFPmode
9374 || flag_finite_math_only
9375 || (code != LE && code != GE
9376 && code != UNEQ && code != LTGT
9377 && code != UNGT && code != UNLT));
39a10a29
GK
9378
9379 /* These are invalid; the information is not there. */
37409796 9380 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
9381}
9382
9878760c
RK
9383\f
9384/* Return 1 if ANDOP is a mask that has no bits on that are not in the
9385 mask required to convert the result of a rotate insn into a shift
b1765bde 9386 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
9387
9388int
a2369ed3 9389includes_lshift_p (rtx shiftop, rtx andop)
9878760c 9390{
e2c953b6
DE
9391 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9392
9393 shift_mask <<= INTVAL (shiftop);
9878760c 9394
b1765bde 9395 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
9396}
9397
9398/* Similar, but for right shift. */
9399
9400int
a2369ed3 9401includes_rshift_p (rtx shiftop, rtx andop)
9878760c 9402{
a7653a2c 9403 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
9404
9405 shift_mask >>= INTVAL (shiftop);
9406
b1765bde 9407 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
9408}
9409
c5059423
AM
9410/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9411 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 9412 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
9413
9414int
a2369ed3 9415includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 9416{
c5059423
AM
9417 if (GET_CODE (andop) == CONST_INT)
9418 {
02071907 9419 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 9420
c5059423 9421 c = INTVAL (andop);
02071907 9422 if (c == 0 || c == ~0)
c5059423 9423 return 0;
e2c953b6 9424
02071907 9425 shift_mask = ~0;
c5059423
AM
9426 shift_mask <<= INTVAL (shiftop);
9427
b6d08ca1 9428 /* Find the least significant one bit. */
c5059423
AM
9429 lsb = c & -c;
9430
9431 /* It must coincide with the LSB of the shift mask. */
9432 if (-lsb != shift_mask)
9433 return 0;
e2c953b6 9434
c5059423
AM
9435 /* Invert to look for the next transition (if any). */
9436 c = ~c;
9437
9438 /* Remove the low group of ones (originally low group of zeros). */
9439 c &= -lsb;
9440
9441 /* Again find the lsb, and check we have all 1's above. */
9442 lsb = c & -c;
9443 return c == -lsb;
9444 }
9445 else if (GET_CODE (andop) == CONST_DOUBLE
9446 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9447 {
02071907
AM
9448 HOST_WIDE_INT low, high, lsb;
9449 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
9450
9451 low = CONST_DOUBLE_LOW (andop);
9452 if (HOST_BITS_PER_WIDE_INT < 64)
9453 high = CONST_DOUBLE_HIGH (andop);
9454
9455 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 9456 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
9457 return 0;
9458
9459 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9460 {
02071907 9461 shift_mask_high = ~0;
c5059423
AM
9462 if (INTVAL (shiftop) > 32)
9463 shift_mask_high <<= INTVAL (shiftop) - 32;
9464
9465 lsb = high & -high;
9466
9467 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9468 return 0;
9469
9470 high = ~high;
9471 high &= -lsb;
9472
9473 lsb = high & -high;
9474 return high == -lsb;
9475 }
9476
02071907 9477 shift_mask_low = ~0;
c5059423
AM
9478 shift_mask_low <<= INTVAL (shiftop);
9479
9480 lsb = low & -low;
9481
9482 if (-lsb != shift_mask_low)
9483 return 0;
9484
9485 if (HOST_BITS_PER_WIDE_INT < 64)
9486 high = ~high;
9487 low = ~low;
9488 low &= -lsb;
9489
9490 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9491 {
9492 lsb = high & -high;
9493 return high == -lsb;
9494 }
9495
9496 lsb = low & -low;
9497 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9498 }
9499 else
9500 return 0;
9501}
e2c953b6 9502
c5059423
AM
9503/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9504 to perform a left shift. It must have SHIFTOP or more least
c1207243 9505 significant 0's, with the remainder of the word 1's. */
e2c953b6 9506
c5059423 9507int
a2369ed3 9508includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 9509{
e2c953b6 9510 if (GET_CODE (andop) == CONST_INT)
c5059423 9511 {
02071907 9512 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 9513
02071907 9514 shift_mask = ~0;
c5059423
AM
9515 shift_mask <<= INTVAL (shiftop);
9516 c = INTVAL (andop);
9517
c1207243 9518 /* Find the least significant one bit. */
c5059423
AM
9519 lsb = c & -c;
9520
9521 /* It must be covered by the shift mask.
a4f6c312 9522 This test also rejects c == 0. */
c5059423
AM
9523 if ((lsb & shift_mask) == 0)
9524 return 0;
9525
9526 /* Check we have all 1's above the transition, and reject all 1's. */
9527 return c == -lsb && lsb != 1;
9528 }
9529 else if (GET_CODE (andop) == CONST_DOUBLE
9530 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9531 {
02071907 9532 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
9533
9534 low = CONST_DOUBLE_LOW (andop);
9535
9536 if (HOST_BITS_PER_WIDE_INT < 64)
9537 {
02071907 9538 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
9539
9540 high = CONST_DOUBLE_HIGH (andop);
9541
9542 if (low == 0)
9543 {
02071907 9544 shift_mask_high = ~0;
c5059423
AM
9545 if (INTVAL (shiftop) > 32)
9546 shift_mask_high <<= INTVAL (shiftop) - 32;
9547
9548 lsb = high & -high;
9549
9550 if ((lsb & shift_mask_high) == 0)
9551 return 0;
9552
9553 return high == -lsb;
9554 }
9555 if (high != ~0)
9556 return 0;
9557 }
9558
02071907 9559 shift_mask_low = ~0;
c5059423
AM
9560 shift_mask_low <<= INTVAL (shiftop);
9561
9562 lsb = low & -low;
9563
9564 if ((lsb & shift_mask_low) == 0)
9565 return 0;
9566
9567 return low == -lsb && lsb != 1;
9568 }
e2c953b6 9569 else
c5059423 9570 return 0;
9878760c 9571}
35068b43 9572
11ac38b2
DE
9573/* Return 1 if operands will generate a valid arguments to rlwimi
9574instruction for insert with right shift in 64-bit mode. The mask may
9575not start on the first bit or stop on the last bit because wrap-around
9576effects of instruction do not correspond to semantics of RTL insn. */
9577
9578int
9579insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9580{
9581 if (INTVAL (startop) < 64
9582 && INTVAL (startop) > 32
9583 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9584 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9585 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9586 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9587 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9588 return 1;
9589
9590 return 0;
9591}
9592
35068b43 9593/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 9594 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
9595
9596int
a2369ed3 9597registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
9598{
9599 /* We might have been passed a SUBREG. */
f676971a 9600 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 9601 return 0;
f676971a 9602
90f81f99
AP
9603 /* We might have been passed non floating point registers. */
9604 if (!FP_REGNO_P (REGNO (reg1))
9605 || !FP_REGNO_P (REGNO (reg2)))
9606 return 0;
35068b43
RK
9607
9608 return (REGNO (reg1) == REGNO (reg2) - 1);
9609}
9610
a4f6c312
SS
9611/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9612 addr1 and addr2 must be in consecutive memory locations
9613 (addr2 == addr1 + 8). */
35068b43
RK
9614
9615int
90f81f99 9616mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 9617{
90f81f99 9618 rtx addr1, addr2;
bb8df8a6
EC
9619 unsigned int reg1, reg2;
9620 int offset1, offset2;
35068b43 9621
90f81f99
AP
9622 /* The mems cannot be volatile. */
9623 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9624 return 0;
f676971a 9625
90f81f99
AP
9626 addr1 = XEXP (mem1, 0);
9627 addr2 = XEXP (mem2, 0);
9628
35068b43
RK
9629 /* Extract an offset (if used) from the first addr. */
9630 if (GET_CODE (addr1) == PLUS)
9631 {
9632 /* If not a REG, return zero. */
9633 if (GET_CODE (XEXP (addr1, 0)) != REG)
9634 return 0;
9635 else
9636 {
c4ad648e 9637 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
9638 /* The offset must be constant! */
9639 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
9640 return 0;
9641 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
9642 }
9643 }
9644 else if (GET_CODE (addr1) != REG)
9645 return 0;
9646 else
9647 {
9648 reg1 = REGNO (addr1);
9649 /* This was a simple (mem (reg)) expression. Offset is 0. */
9650 offset1 = 0;
9651 }
9652
bb8df8a6
EC
9653 /* And now for the second addr. */
9654 if (GET_CODE (addr2) == PLUS)
9655 {
9656 /* If not a REG, return zero. */
9657 if (GET_CODE (XEXP (addr2, 0)) != REG)
9658 return 0;
9659 else
9660 {
9661 reg2 = REGNO (XEXP (addr2, 0));
9662 /* The offset must be constant. */
9663 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9664 return 0;
9665 offset2 = INTVAL (XEXP (addr2, 1));
9666 }
9667 }
9668 else if (GET_CODE (addr2) != REG)
35068b43 9669 return 0;
bb8df8a6
EC
9670 else
9671 {
9672 reg2 = REGNO (addr2);
9673 /* This was a simple (mem (reg)) expression. Offset is 0. */
9674 offset2 = 0;
9675 }
35068b43 9676
bb8df8a6
EC
9677 /* Both of these must have the same base register. */
9678 if (reg1 != reg2)
35068b43
RK
9679 return 0;
9680
9681 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 9682 if (offset2 != offset1 + 8)
35068b43
RK
9683 return 0;
9684
9685 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9686 instructions. */
9687 return 1;
9688}
9878760c
RK
9689\f
9690/* Return the register class of a scratch register needed to copy IN into
9691 or out of a register in CLASS in MODE. If it can be done directly,
9692 NO_REGS is returned. */
9693
9694enum reg_class
f676971a 9695secondary_reload_class (enum reg_class class,
a9baceb1
GK
9696 enum machine_mode mode ATTRIBUTE_UNUSED,
9697 rtx in)
9878760c 9698{
5accd822 9699 int regno;
9878760c 9700
ab82a49f
AP
9701 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9702#if TARGET_MACHO
c4ad648e 9703 && MACHOPIC_INDIRECT
ab82a49f 9704#endif
c4ad648e 9705 ))
46fad5b7
DJ
9706 {
9707 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
9708 other than BASE_REGS for TARGET_ELF. So indicate that a
9709 register from BASE_REGS is needed as an intermediate
9710 register.
f676971a 9711
46fad5b7
DJ
9712 On Darwin, pic addresses require a load from memory, which
9713 needs a base register. */
9714 if (class != BASE_REGS
c4ad648e
AM
9715 && (GET_CODE (in) == SYMBOL_REF
9716 || GET_CODE (in) == HIGH
9717 || GET_CODE (in) == LABEL_REF
9718 || GET_CODE (in) == CONST))
9719 return BASE_REGS;
46fad5b7 9720 }
e7b7998a 9721
5accd822
DE
9722 if (GET_CODE (in) == REG)
9723 {
9724 regno = REGNO (in);
9725 if (regno >= FIRST_PSEUDO_REGISTER)
9726 {
9727 regno = true_regnum (in);
9728 if (regno >= FIRST_PSEUDO_REGISTER)
9729 regno = -1;
9730 }
9731 }
9732 else if (GET_CODE (in) == SUBREG)
9733 {
9734 regno = true_regnum (in);
9735 if (regno >= FIRST_PSEUDO_REGISTER)
9736 regno = -1;
9737 }
9738 else
9739 regno = -1;
9740
9878760c
RK
9741 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9742 into anything. */
9743 if (class == GENERAL_REGS || class == BASE_REGS
9744 || (regno >= 0 && INT_REGNO_P (regno)))
9745 return NO_REGS;
9746
9747 /* Constants, memory, and FP registers can go into FP registers. */
9748 if ((regno == -1 || FP_REGNO_P (regno))
9749 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9750 return NO_REGS;
9751
0ac081f6
AH
9752 /* Memory, and AltiVec registers can go into AltiVec registers. */
9753 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9754 && class == ALTIVEC_REGS)
9755 return NO_REGS;
9756
9878760c
RK
9757 /* We can copy among the CR registers. */
9758 if ((class == CR_REGS || class == CR0_REGS)
9759 && regno >= 0 && CR_REGNO_P (regno))
9760 return NO_REGS;
9761
9762 /* Otherwise, we need GENERAL_REGS. */
9763 return GENERAL_REGS;
9764}
9765\f
9766/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 9767 know this is a valid comparison.
9878760c
RK
9768
9769 SCC_P is 1 if this is for an scc. That means that %D will have been
9770 used instead of %C, so the bits will be in different places.
9771
b4ac57ab 9772 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
9773
9774int
a2369ed3 9775ccr_bit (rtx op, int scc_p)
9878760c
RK
9776{
9777 enum rtx_code code = GET_CODE (op);
9778 enum machine_mode cc_mode;
9779 int cc_regnum;
9780 int base_bit;
9ebbca7d 9781 rtx reg;
9878760c 9782
ec8e098d 9783 if (!COMPARISON_P (op))
9878760c
RK
9784 return -1;
9785
9ebbca7d
GK
9786 reg = XEXP (op, 0);
9787
37409796 9788 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
9789
9790 cc_mode = GET_MODE (reg);
9791 cc_regnum = REGNO (reg);
9792 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 9793
39a10a29 9794 validate_condition_mode (code, cc_mode);
c5defebb 9795
b7053a3f
GK
9796 /* When generating a sCOND operation, only positive conditions are
9797 allowed. */
37409796
NS
9798 gcc_assert (!scc_p
9799 || code == EQ || code == GT || code == LT || code == UNORDERED
9800 || code == GTU || code == LTU);
f676971a 9801
9878760c
RK
9802 switch (code)
9803 {
9804 case NE:
9805 return scc_p ? base_bit + 3 : base_bit + 2;
9806 case EQ:
9807 return base_bit + 2;
1c882ea4 9808 case GT: case GTU: case UNLE:
9878760c 9809 return base_bit + 1;
1c882ea4 9810 case LT: case LTU: case UNGE:
9878760c 9811 return base_bit;
1c882ea4
GK
9812 case ORDERED: case UNORDERED:
9813 return base_bit + 3;
9878760c
RK
9814
9815 case GE: case GEU:
39a10a29 9816 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
9817 unordered position. So test that bit. For integer, this is ! LT
9818 unless this is an scc insn. */
39a10a29 9819 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
9820
9821 case LE: case LEU:
39a10a29 9822 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 9823
9878760c 9824 default:
37409796 9825 gcc_unreachable ();
9878760c
RK
9826 }
9827}
1ff7789b 9828\f
8d30c4ee 9829/* Return the GOT register. */
1ff7789b 9830
9390387d 9831rtx
a2369ed3 9832rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 9833{
a4f6c312
SS
9834 /* The second flow pass currently (June 1999) can't update
9835 regs_ever_live without disturbing other parts of the compiler, so
9836 update it here to make the prolog/epilogue code happy. */
1db02437
FS
9837 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9838 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
1ff7789b 9839
8d30c4ee 9840 current_function_uses_pic_offset_table = 1;
3cb999d8 9841
1ff7789b
MM
9842 return pic_offset_table_rtx;
9843}
a7df97e6 9844\f
e2500fed
GK
9845/* Function to init struct machine_function.
9846 This will be called, via a pointer variable,
9847 from push_function_context. */
a7df97e6 9848
e2500fed 9849static struct machine_function *
863d938c 9850rs6000_init_machine_status (void)
a7df97e6 9851{
e2500fed 9852 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 9853}
9878760c 9854\f
0ba1b2ff
AM
9855/* These macros test for integers and extract the low-order bits. */
9856#define INT_P(X) \
9857((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9858 && GET_MODE (X) == VOIDmode)
9859
9860#define INT_LOWPART(X) \
9861 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9862
9863int
a2369ed3 9864extract_MB (rtx op)
0ba1b2ff
AM
9865{
9866 int i;
9867 unsigned long val = INT_LOWPART (op);
9868
9869 /* If the high bit is zero, the value is the first 1 bit we find
9870 from the left. */
9871 if ((val & 0x80000000) == 0)
9872 {
37409796 9873 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
9874
9875 i = 1;
9876 while (((val <<= 1) & 0x80000000) == 0)
9877 ++i;
9878 return i;
9879 }
9880
9881 /* If the high bit is set and the low bit is not, or the mask is all
9882 1's, the value is zero. */
9883 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9884 return 0;
9885
9886 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9887 from the right. */
9888 i = 31;
9889 while (((val >>= 1) & 1) != 0)
9890 --i;
9891
9892 return i;
9893}
9894
9895int
a2369ed3 9896extract_ME (rtx op)
0ba1b2ff
AM
9897{
9898 int i;
9899 unsigned long val = INT_LOWPART (op);
9900
9901 /* If the low bit is zero, the value is the first 1 bit we find from
9902 the right. */
9903 if ((val & 1) == 0)
9904 {
37409796 9905 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
9906
9907 i = 30;
9908 while (((val >>= 1) & 1) == 0)
9909 --i;
9910
9911 return i;
9912 }
9913
9914 /* If the low bit is set and the high bit is not, or the mask is all
9915 1's, the value is 31. */
9916 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9917 return 31;
9918
9919 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9920 from the left. */
9921 i = 0;
9922 while (((val <<= 1) & 0x80000000) != 0)
9923 ++i;
9924
9925 return i;
9926}
9927
c4501e62
JJ
9928/* Locate some local-dynamic symbol still in use by this function
9929 so that we can print its name in some tls_ld pattern. */
9930
9931static const char *
863d938c 9932rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
9933{
9934 rtx insn;
9935
9936 if (cfun->machine->some_ld_name)
9937 return cfun->machine->some_ld_name;
9938
9939 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9940 if (INSN_P (insn)
9941 && for_each_rtx (&PATTERN (insn),
9942 rs6000_get_some_local_dynamic_name_1, 0))
9943 return cfun->machine->some_ld_name;
9944
37409796 9945 gcc_unreachable ();
c4501e62
JJ
9946}
9947
9948/* Helper function for rs6000_get_some_local_dynamic_name. */
9949
9950static int
a2369ed3 9951rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
9952{
9953 rtx x = *px;
9954
9955 if (GET_CODE (x) == SYMBOL_REF)
9956 {
9957 const char *str = XSTR (x, 0);
9958 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9959 {
9960 cfun->machine->some_ld_name = str;
9961 return 1;
9962 }
9963 }
9964
9965 return 0;
9966}
9967
85b776df
AM
9968/* Write out a function code label. */
9969
9970void
9971rs6000_output_function_entry (FILE *file, const char *fname)
9972{
9973 if (fname[0] != '.')
9974 {
9975 switch (DEFAULT_ABI)
9976 {
9977 default:
37409796 9978 gcc_unreachable ();
85b776df
AM
9979
9980 case ABI_AIX:
9981 if (DOT_SYMBOLS)
9982 putc ('.', file);
9983 else
9984 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
9985 break;
9986
9987 case ABI_V4:
9988 case ABI_DARWIN:
9989 break;
9990 }
9991 }
9992 if (TARGET_AIX)
9993 RS6000_OUTPUT_BASENAME (file, fname);
9994 else
9995 assemble_name (file, fname);
9996}
9997
9878760c
RK
9998/* Print an operand. Recognize special options, documented below. */
9999
38c1f2d7 10000#if TARGET_ELF
d9407988 10001#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 10002#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
10003#else
10004#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 10005#define SMALL_DATA_REG 0
ba5e43aa
MM
10006#endif
10007
9878760c 10008void
a2369ed3 10009print_operand (FILE *file, rtx x, int code)
9878760c
RK
10010{
10011 int i;
a260abc9 10012 HOST_WIDE_INT val;
0ba1b2ff 10013 unsigned HOST_WIDE_INT uval;
9878760c
RK
10014
10015 switch (code)
10016 {
a8b3aeda 10017 case '.':
a85d226b
RK
10018 /* Write out an instruction after the call which may be replaced
10019 with glue code by the loader. This depends on the AIX version. */
10020 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
10021 return;
10022
81eace42
GK
10023 /* %a is output_address. */
10024
9854d9ed
RK
10025 case 'A':
10026 /* If X is a constant integer whose low-order 5 bits are zero,
10027 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 10028 in the AIX assembler where "sri" with a zero shift count
20e26713 10029 writes a trash instruction. */
9854d9ed 10030 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 10031 putc ('l', file);
9854d9ed 10032 else
76229ac8 10033 putc ('r', file);
9854d9ed
RK
10034 return;
10035
10036 case 'b':
e2c953b6
DE
10037 /* If constant, low-order 16 bits of constant, unsigned.
10038 Otherwise, write normally. */
10039 if (INT_P (x))
10040 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10041 else
10042 print_operand (file, x, 0);
cad12a8d
RK
10043 return;
10044
a260abc9
DE
10045 case 'B':
10046 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10047 for 64-bit mask direction. */
9390387d 10048 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 10049 return;
a260abc9 10050
81eace42
GK
10051 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10052 output_operand. */
10053
423c1189
AH
10054 case 'c':
10055 /* X is a CR register. Print the number of the GT bit of the CR. */
10056 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10057 output_operand_lossage ("invalid %%E value");
10058 else
10059 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10060 return;
10061
10062 case 'D':
6b1fedc3 10063 /* Like 'J' but get to the EQ bit. */
37409796 10064 gcc_assert (GET_CODE (x) == REG);
423c1189 10065
6b1fedc3
AH
10066 /* Bit 1 is EQ bit. */
10067 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
423c1189 10068
64022b5d 10069 fprintf (file, "%d", i);
423c1189
AH
10070 return;
10071
9854d9ed 10072 case 'E':
39a10a29 10073 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
10074 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10075 output_operand_lossage ("invalid %%E value");
78fbdbf7 10076 else
39a10a29 10077 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 10078 return;
9854d9ed
RK
10079
10080 case 'f':
10081 /* X is a CR register. Print the shift count needed to move it
10082 to the high-order four bits. */
10083 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10084 output_operand_lossage ("invalid %%f value");
10085 else
9ebbca7d 10086 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10087 return;
10088
10089 case 'F':
10090 /* Similar, but print the count for the rotate in the opposite
10091 direction. */
10092 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10093 output_operand_lossage ("invalid %%F value");
10094 else
9ebbca7d 10095 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10096 return;
10097
10098 case 'G':
10099 /* X is a constant integer. If it is negative, print "m",
43aa4e05 10100 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
10101 if (GET_CODE (x) != CONST_INT)
10102 output_operand_lossage ("invalid %%G value");
10103 else if (INTVAL (x) >= 0)
76229ac8 10104 putc ('z', file);
9854d9ed 10105 else
76229ac8 10106 putc ('m', file);
9854d9ed 10107 return;
e2c953b6 10108
9878760c 10109 case 'h':
a4f6c312
SS
10110 /* If constant, output low-order five bits. Otherwise, write
10111 normally. */
9878760c 10112 if (INT_P (x))
5f59ecb7 10113 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
10114 else
10115 print_operand (file, x, 0);
10116 return;
10117
64305719 10118 case 'H':
a4f6c312
SS
10119 /* If constant, output low-order six bits. Otherwise, write
10120 normally. */
64305719 10121 if (INT_P (x))
5f59ecb7 10122 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
10123 else
10124 print_operand (file, x, 0);
10125 return;
10126
9854d9ed
RK
10127 case 'I':
10128 /* Print `i' if this is a constant, else nothing. */
9878760c 10129 if (INT_P (x))
76229ac8 10130 putc ('i', file);
9878760c
RK
10131 return;
10132
9854d9ed
RK
10133 case 'j':
10134 /* Write the bit number in CCR for jump. */
10135 i = ccr_bit (x, 0);
10136 if (i == -1)
10137 output_operand_lossage ("invalid %%j code");
9878760c 10138 else
9854d9ed 10139 fprintf (file, "%d", i);
9878760c
RK
10140 return;
10141
9854d9ed
RK
10142 case 'J':
10143 /* Similar, but add one for shift count in rlinm for scc and pass
10144 scc flag to `ccr_bit'. */
10145 i = ccr_bit (x, 1);
10146 if (i == -1)
10147 output_operand_lossage ("invalid %%J code");
10148 else
a0466a68
RK
10149 /* If we want bit 31, write a shift count of zero, not 32. */
10150 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
10151 return;
10152
9854d9ed
RK
10153 case 'k':
10154 /* X must be a constant. Write the 1's complement of the
10155 constant. */
9878760c 10156 if (! INT_P (x))
9854d9ed 10157 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
10158 else
10159 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
10160 return;
10161
81eace42 10162 case 'K':
9ebbca7d
GK
10163 /* X must be a symbolic constant on ELF. Write an
10164 expression suitable for an 'addi' that adds in the low 16
10165 bits of the MEM. */
10166 if (GET_CODE (x) != CONST)
10167 {
10168 print_operand_address (file, x);
10169 fputs ("@l", file);
10170 }
10171 else
10172 {
10173 if (GET_CODE (XEXP (x, 0)) != PLUS
10174 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10175 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10176 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 10177 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
10178 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10179 fputs ("@l", file);
ed8d2920
MM
10180 /* For GNU as, there must be a non-alphanumeric character
10181 between 'l' and the number. The '-' is added by
10182 print_operand() already. */
10183 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10184 fputs ("+", file);
9ebbca7d
GK
10185 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10186 }
81eace42
GK
10187 return;
10188
10189 /* %l is output_asm_label. */
9ebbca7d 10190
9854d9ed
RK
10191 case 'L':
10192 /* Write second word of DImode or DFmode reference. Works on register
10193 or non-indexed memory only. */
10194 if (GET_CODE (x) == REG)
fb5c67a7 10195 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
10196 else if (GET_CODE (x) == MEM)
10197 {
10198 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 10199 we have already done it, we can just use an offset of word. */
9854d9ed
RK
10200 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10201 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
10202 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10203 UNITS_PER_WORD));
9854d9ed 10204 else
d7624dc0
RK
10205 output_address (XEXP (adjust_address_nv (x, SImode,
10206 UNITS_PER_WORD),
10207 0));
ed8908e7 10208
ba5e43aa 10209 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10210 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10211 reg_names[SMALL_DATA_REG]);
9854d9ed 10212 }
9878760c 10213 return;
f676971a 10214
9878760c
RK
10215 case 'm':
10216 /* MB value for a mask operand. */
b1765bde 10217 if (! mask_operand (x, SImode))
9878760c
RK
10218 output_operand_lossage ("invalid %%m value");
10219
0ba1b2ff 10220 fprintf (file, "%d", extract_MB (x));
9878760c
RK
10221 return;
10222
10223 case 'M':
10224 /* ME value for a mask operand. */
b1765bde 10225 if (! mask_operand (x, SImode))
a260abc9 10226 output_operand_lossage ("invalid %%M value");
9878760c 10227
0ba1b2ff 10228 fprintf (file, "%d", extract_ME (x));
9878760c
RK
10229 return;
10230
81eace42
GK
10231 /* %n outputs the negative of its operand. */
10232
9878760c
RK
10233 case 'N':
10234 /* Write the number of elements in the vector times 4. */
10235 if (GET_CODE (x) != PARALLEL)
10236 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
10237 else
10238 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
10239 return;
10240
10241 case 'O':
10242 /* Similar, but subtract 1 first. */
10243 if (GET_CODE (x) != PARALLEL)
1427100a 10244 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
10245 else
10246 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
10247 return;
10248
9854d9ed
RK
10249 case 'p':
10250 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10251 if (! INT_P (x)
2bfcf297 10252 || INT_LOWPART (x) < 0
9854d9ed
RK
10253 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10254 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
10255 else
10256 fprintf (file, "%d", i);
9854d9ed
RK
10257 return;
10258
9878760c
RK
10259 case 'P':
10260 /* The operand must be an indirect memory reference. The result
8bb418a3 10261 is the register name. */
9878760c
RK
10262 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10263 || REGNO (XEXP (x, 0)) >= 32)
10264 output_operand_lossage ("invalid %%P value");
e2c953b6 10265 else
fb5c67a7 10266 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
10267 return;
10268
dfbdccdb
GK
10269 case 'q':
10270 /* This outputs the logical code corresponding to a boolean
10271 expression. The expression may have one or both operands
39a10a29 10272 negated (if one, only the first one). For condition register
c4ad648e
AM
10273 logical operations, it will also treat the negated
10274 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 10275 {
63bc1d05 10276 const char *const *t = 0;
dfbdccdb
GK
10277 const char *s;
10278 enum rtx_code code = GET_CODE (x);
10279 static const char * const tbl[3][3] = {
10280 { "and", "andc", "nor" },
10281 { "or", "orc", "nand" },
10282 { "xor", "eqv", "xor" } };
10283
10284 if (code == AND)
10285 t = tbl[0];
10286 else if (code == IOR)
10287 t = tbl[1];
10288 else if (code == XOR)
10289 t = tbl[2];
10290 else
10291 output_operand_lossage ("invalid %%q value");
10292
10293 if (GET_CODE (XEXP (x, 0)) != NOT)
10294 s = t[0];
10295 else
10296 {
10297 if (GET_CODE (XEXP (x, 1)) == NOT)
10298 s = t[2];
10299 else
10300 s = t[1];
10301 }
f676971a 10302
dfbdccdb
GK
10303 fputs (s, file);
10304 }
10305 return;
10306
2c4a9cff
DE
10307 case 'Q':
10308 if (TARGET_MFCRF)
3b6ce0af 10309 fputc (',', file);
5efb1046 10310 /* FALLTHRU */
2c4a9cff
DE
10311 else
10312 return;
10313
9854d9ed
RK
10314 case 'R':
10315 /* X is a CR register. Print the mask for `mtcrf'. */
10316 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10317 output_operand_lossage ("invalid %%R value");
10318 else
9ebbca7d 10319 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 10320 return;
9854d9ed
RK
10321
10322 case 's':
10323 /* Low 5 bits of 32 - value */
10324 if (! INT_P (x))
10325 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
10326 else
10327 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 10328 return;
9854d9ed 10329
a260abc9 10330 case 'S':
0ba1b2ff 10331 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
10332 CONST_INT 32-bit mask is considered sign-extended so any
10333 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 10334 if (! mask64_operand (x, DImode))
a260abc9
DE
10335 output_operand_lossage ("invalid %%S value");
10336
0ba1b2ff 10337 uval = INT_LOWPART (x);
a260abc9 10338
0ba1b2ff 10339 if (uval & 1) /* Clear Left */
a260abc9 10340 {
f099d360
GK
10341#if HOST_BITS_PER_WIDE_INT > 64
10342 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10343#endif
0ba1b2ff 10344 i = 64;
a260abc9 10345 }
0ba1b2ff 10346 else /* Clear Right */
a260abc9 10347 {
0ba1b2ff 10348 uval = ~uval;
f099d360
GK
10349#if HOST_BITS_PER_WIDE_INT > 64
10350 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10351#endif
0ba1b2ff 10352 i = 63;
a260abc9 10353 }
0ba1b2ff
AM
10354 while (uval != 0)
10355 --i, uval >>= 1;
37409796 10356 gcc_assert (i >= 0);
0ba1b2ff
AM
10357 fprintf (file, "%d", i);
10358 return;
a260abc9 10359
a3170dc6
AH
10360 case 't':
10361 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 10362 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
10363
10364 /* Bit 3 is OV bit. */
10365 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10366
10367 /* If we want bit 31, write a shift count of zero, not 32. */
10368 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10369 return;
10370
cccf3bdc
DE
10371 case 'T':
10372 /* Print the symbolic name of a branch target register. */
10373 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10374 && REGNO (x) != COUNT_REGISTER_REGNUM))
10375 output_operand_lossage ("invalid %%T value");
e2c953b6 10376 else if (REGNO (x) == LINK_REGISTER_REGNUM)
cccf3bdc
DE
10377 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10378 else
10379 fputs ("ctr", file);
10380 return;
10381
9854d9ed 10382 case 'u':
802a0058 10383 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
10384 if (! INT_P (x))
10385 output_operand_lossage ("invalid %%u value");
e2c953b6 10386 else
f676971a 10387 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 10388 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
10389 return;
10390
802a0058
MM
10391 case 'v':
10392 /* High-order 16 bits of constant for use in signed operand. */
10393 if (! INT_P (x))
10394 output_operand_lossage ("invalid %%v value");
e2c953b6 10395 else
134c32f6
DE
10396 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10397 (INT_LOWPART (x) >> 16) & 0xffff);
10398 return;
802a0058 10399
9854d9ed
RK
10400 case 'U':
10401 /* Print `u' if this has an auto-increment or auto-decrement. */
10402 if (GET_CODE (x) == MEM
10403 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10404 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
76229ac8 10405 putc ('u', file);
9854d9ed 10406 return;
9878760c 10407
e0cd0770
JC
10408 case 'V':
10409 /* Print the trap code for this operand. */
10410 switch (GET_CODE (x))
10411 {
10412 case EQ:
10413 fputs ("eq", file); /* 4 */
10414 break;
10415 case NE:
10416 fputs ("ne", file); /* 24 */
10417 break;
10418 case LT:
10419 fputs ("lt", file); /* 16 */
10420 break;
10421 case LE:
10422 fputs ("le", file); /* 20 */
10423 break;
10424 case GT:
10425 fputs ("gt", file); /* 8 */
10426 break;
10427 case GE:
10428 fputs ("ge", file); /* 12 */
10429 break;
10430 case LTU:
10431 fputs ("llt", file); /* 2 */
10432 break;
10433 case LEU:
10434 fputs ("lle", file); /* 6 */
10435 break;
10436 case GTU:
10437 fputs ("lgt", file); /* 1 */
10438 break;
10439 case GEU:
10440 fputs ("lge", file); /* 5 */
10441 break;
10442 default:
37409796 10443 gcc_unreachable ();
e0cd0770
JC
10444 }
10445 break;
10446
9854d9ed
RK
10447 case 'w':
10448 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10449 normally. */
10450 if (INT_P (x))
f676971a 10451 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 10452 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
10453 else
10454 print_operand (file, x, 0);
9878760c
RK
10455 return;
10456
9854d9ed 10457 case 'W':
e2c953b6 10458 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
10459 val = (GET_CODE (x) == CONST_INT
10460 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10461
10462 if (val < 0)
10463 i = -1;
9854d9ed 10464 else
e2c953b6
DE
10465 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10466 if ((val <<= 1) < 0)
10467 break;
10468
10469#if HOST_BITS_PER_WIDE_INT == 32
10470 if (GET_CODE (x) == CONST_INT && i >= 0)
10471 i += 32; /* zero-extend high-part was all 0's */
10472 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10473 {
10474 val = CONST_DOUBLE_LOW (x);
10475
37409796
NS
10476 gcc_assert (val);
10477 if (val < 0)
e2c953b6
DE
10478 --i;
10479 else
10480 for ( ; i < 64; i++)
10481 if ((val <<= 1) < 0)
10482 break;
10483 }
10484#endif
10485
10486 fprintf (file, "%d", i + 1);
9854d9ed 10487 return;
9878760c 10488
9854d9ed
RK
10489 case 'X':
10490 if (GET_CODE (x) == MEM
4d588c14 10491 && legitimate_indexed_address_p (XEXP (x, 0), 0))
76229ac8 10492 putc ('x', file);
9854d9ed 10493 return;
9878760c 10494
9854d9ed
RK
10495 case 'Y':
10496 /* Like 'L', for third word of TImode */
10497 if (GET_CODE (x) == REG)
fb5c67a7 10498 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 10499 else if (GET_CODE (x) == MEM)
9878760c 10500 {
9854d9ed
RK
10501 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10502 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 10503 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 10504 else
d7624dc0 10505 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 10506 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10507 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10508 reg_names[SMALL_DATA_REG]);
9878760c
RK
10509 }
10510 return;
f676971a 10511
9878760c 10512 case 'z':
b4ac57ab
RS
10513 /* X is a SYMBOL_REF. Write out the name preceded by a
10514 period and without any trailing data in brackets. Used for function
4d30c363
MM
10515 names. If we are configured for System V (or the embedded ABI) on
10516 the PowerPC, do not emit the period, since those systems do not use
10517 TOCs and the like. */
37409796 10518 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 10519
c4ad648e
AM
10520 /* Mark the decl as referenced so that cgraph will output the
10521 function. */
9bf6462a 10522 if (SYMBOL_REF_DECL (x))
c4ad648e 10523 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 10524
85b776df 10525 /* For macho, check to see if we need a stub. */
f9da97f0
AP
10526 if (TARGET_MACHO)
10527 {
10528 const char *name = XSTR (x, 0);
a031e781 10529#if TARGET_MACHO
3b48085e 10530 if (MACHOPIC_INDIRECT
11abc112
MM
10531 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10532 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
10533#endif
10534 assemble_name (file, name);
10535 }
85b776df 10536 else if (!DOT_SYMBOLS)
9739c90c 10537 assemble_name (file, XSTR (x, 0));
85b776df
AM
10538 else
10539 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
10540 return;
10541
9854d9ed
RK
10542 case 'Z':
10543 /* Like 'L', for last word of TImode. */
10544 if (GET_CODE (x) == REG)
fb5c67a7 10545 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
10546 else if (GET_CODE (x) == MEM)
10547 {
10548 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10549 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 10550 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 10551 else
d7624dc0 10552 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 10553 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10554 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10555 reg_names[SMALL_DATA_REG]);
9854d9ed 10556 }
5c23c401 10557 return;
0ac081f6 10558
a3170dc6 10559 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
10560 case 'y':
10561 {
10562 rtx tmp;
10563
37409796 10564 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
10565
10566 tmp = XEXP (x, 0);
10567
993f19a8 10568 if (TARGET_E500)
a3170dc6
AH
10569 {
10570 /* Handle [reg]. */
10571 if (GET_CODE (tmp) == REG)
10572 {
10573 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10574 break;
10575 }
10576 /* Handle [reg+UIMM]. */
10577 else if (GET_CODE (tmp) == PLUS &&
10578 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10579 {
10580 int x;
10581
37409796 10582 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
10583
10584 x = INTVAL (XEXP (tmp, 1));
10585 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10586 break;
10587 }
10588
10589 /* Fall through. Must be [reg+reg]. */
10590 }
850e8d3d
DN
10591 if (TARGET_ALTIVEC
10592 && GET_CODE (tmp) == AND
10593 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10594 && INTVAL (XEXP (tmp, 1)) == -16)
10595 tmp = XEXP (tmp, 0);
0ac081f6 10596 if (GET_CODE (tmp) == REG)
c62f2db5 10597 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 10598 else
0ac081f6 10599 {
37409796
NS
10600 gcc_assert (GET_CODE (tmp) == PLUS
10601 && GET_CODE (XEXP (tmp, 1)) == REG);
bb8df8a6 10602
0ac081f6
AH
10603 if (REGNO (XEXP (tmp, 0)) == 0)
10604 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10605 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10606 else
10607 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10608 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10609 }
0ac081f6
AH
10610 break;
10611 }
f676971a 10612
9878760c
RK
10613 case 0:
10614 if (GET_CODE (x) == REG)
10615 fprintf (file, "%s", reg_names[REGNO (x)]);
10616 else if (GET_CODE (x) == MEM)
10617 {
10618 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10619 know the width from the mode. */
10620 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
10621 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10622 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 10623 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
10624 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10625 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 10626 else
a54d04b7 10627 output_address (XEXP (x, 0));
9878760c
RK
10628 }
10629 else
a54d04b7 10630 output_addr_const (file, x);
a85d226b 10631 return;
9878760c 10632
c4501e62
JJ
10633 case '&':
10634 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10635 return;
10636
9878760c
RK
10637 default:
10638 output_operand_lossage ("invalid %%xn code");
10639 }
10640}
10641\f
10642/* Print the address of an operand. */
10643
10644void
a2369ed3 10645print_operand_address (FILE *file, rtx x)
9878760c
RK
10646{
10647 if (GET_CODE (x) == REG)
4697a36c 10648 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
10649 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10650 || GET_CODE (x) == LABEL_REF)
9878760c
RK
10651 {
10652 output_addr_const (file, x);
ba5e43aa 10653 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10654 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10655 reg_names[SMALL_DATA_REG]);
37409796
NS
10656 else
10657 gcc_assert (!TARGET_TOC);
9878760c
RK
10658 }
10659 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10660 {
10661 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
10662 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10663 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 10664 else
4697a36c
MM
10665 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10666 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
10667 }
10668 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
10669 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10670 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
10671#if TARGET_ELF
10672 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 10673 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
10674 {
10675 output_addr_const (file, XEXP (x, 1));
10676 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10677 }
c859cda6
DJ
10678#endif
10679#if TARGET_MACHO
10680 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 10681 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
10682 {
10683 fprintf (file, "lo16(");
10684 output_addr_const (file, XEXP (x, 1));
10685 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10686 }
3cb999d8 10687#endif
4d588c14 10688 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 10689 {
2bfcf297 10690 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 10691 {
2bfcf297
DB
10692 rtx contains_minus = XEXP (x, 1);
10693 rtx minus, symref;
10694 const char *name;
f676971a 10695
9ebbca7d 10696 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 10697 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
10698 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10699 contains_minus = XEXP (contains_minus, 0);
10700
2bfcf297
DB
10701 minus = XEXP (contains_minus, 0);
10702 symref = XEXP (minus, 0);
10703 XEXP (contains_minus, 0) = symref;
10704 if (TARGET_ELF)
10705 {
10706 char *newname;
10707
10708 name = XSTR (symref, 0);
10709 newname = alloca (strlen (name) + sizeof ("@toc"));
10710 strcpy (newname, name);
10711 strcat (newname, "@toc");
10712 XSTR (symref, 0) = newname;
10713 }
10714 output_addr_const (file, XEXP (x, 1));
10715 if (TARGET_ELF)
10716 XSTR (symref, 0) = name;
9ebbca7d
GK
10717 XEXP (contains_minus, 0) = minus;
10718 }
10719 else
10720 output_addr_const (file, XEXP (x, 1));
10721
10722 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10723 }
9878760c 10724 else
37409796 10725 gcc_unreachable ();
9878760c
RK
10726}
10727\f
88cad84b 10728/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
10729 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10730 is defined. It also needs to handle DI-mode objects on 64-bit
10731 targets. */
10732
10733static bool
a2369ed3 10734rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 10735{
f4f4921e 10736#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 10737 /* Special handling for SI values. */
84dcde01 10738 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 10739 {
a2369ed3 10740 extern int in_toc_section (void);
301d03af 10741 static int recurse = 0;
f676971a 10742
301d03af
RS
10743 /* For -mrelocatable, we mark all addresses that need to be fixed up
10744 in the .fixup section. */
10745 if (TARGET_RELOCATABLE
10746 && !in_toc_section ()
10747 && !in_text_section ()
642af3be 10748 && !in_unlikely_text_section ()
301d03af
RS
10749 && !recurse
10750 && GET_CODE (x) != CONST_INT
10751 && GET_CODE (x) != CONST_DOUBLE
10752 && CONSTANT_P (x))
10753 {
10754 char buf[256];
10755
10756 recurse = 1;
10757 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10758 fixuplabelno++;
10759 ASM_OUTPUT_LABEL (asm_out_file, buf);
10760 fprintf (asm_out_file, "\t.long\t(");
10761 output_addr_const (asm_out_file, x);
10762 fprintf (asm_out_file, ")@fixup\n");
10763 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10764 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10765 fprintf (asm_out_file, "\t.long\t");
10766 assemble_name (asm_out_file, buf);
10767 fprintf (asm_out_file, "\n\t.previous\n");
10768 recurse = 0;
10769 return true;
10770 }
10771 /* Remove initial .'s to turn a -mcall-aixdesc function
10772 address into the address of the descriptor, not the function
10773 itself. */
10774 else if (GET_CODE (x) == SYMBOL_REF
10775 && XSTR (x, 0)[0] == '.'
10776 && DEFAULT_ABI == ABI_AIX)
10777 {
10778 const char *name = XSTR (x, 0);
10779 while (*name == '.')
10780 name++;
10781
10782 fprintf (asm_out_file, "\t.long\t%s\n", name);
10783 return true;
10784 }
10785 }
f4f4921e 10786#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
10787 return default_assemble_integer (x, size, aligned_p);
10788}
93638d7a
AM
10789
10790#ifdef HAVE_GAS_HIDDEN
10791/* Emit an assembler directive to set symbol visibility for DECL to
10792 VISIBILITY_TYPE. */
10793
5add3202 10794static void
a2369ed3 10795rs6000_assemble_visibility (tree decl, int vis)
93638d7a 10796{
93638d7a
AM
10797 /* Functions need to have their entry point symbol visibility set as
10798 well as their descriptor symbol visibility. */
85b776df
AM
10799 if (DEFAULT_ABI == ABI_AIX
10800 && DOT_SYMBOLS
10801 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 10802 {
25fdb4dc 10803 static const char * const visibility_types[] = {
c4ad648e 10804 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
10805 };
10806
10807 const char *name, *type;
93638d7a
AM
10808
10809 name = ((* targetm.strip_name_encoding)
10810 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 10811 type = visibility_types[vis];
93638d7a 10812
25fdb4dc
RH
10813 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10814 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 10815 }
25fdb4dc
RH
10816 else
10817 default_assemble_visibility (decl, vis);
93638d7a
AM
10818}
10819#endif
301d03af 10820\f
39a10a29 10821enum rtx_code
a2369ed3 10822rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
10823{
10824 /* Reversal of FP compares takes care -- an ordered compare
10825 becomes an unordered compare and vice versa. */
f676971a 10826 if (mode == CCFPmode
bc9ec0e0
GK
10827 && (!flag_finite_math_only
10828 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10829 || code == UNEQ || code == LTGT))
bab6226b 10830 return reverse_condition_maybe_unordered (code);
39a10a29 10831 else
bab6226b 10832 return reverse_condition (code);
39a10a29
GK
10833}
10834
39a10a29
GK
10835/* Generate a compare for CODE. Return a brand-new rtx that
10836 represents the result of the compare. */
a4f6c312 10837
39a10a29 10838static rtx
a2369ed3 10839rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
10840{
10841 enum machine_mode comp_mode;
10842 rtx compare_result;
10843
10844 if (rs6000_compare_fp_p)
10845 comp_mode = CCFPmode;
10846 else if (code == GTU || code == LTU
c4ad648e 10847 || code == GEU || code == LEU)
39a10a29 10848 comp_mode = CCUNSmode;
60934f9c
NS
10849 else if ((code == EQ || code == NE)
10850 && GET_CODE (rs6000_compare_op0) == SUBREG
10851 && GET_CODE (rs6000_compare_op1) == SUBREG
10852 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
10853 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
10854 /* These are unsigned values, perhaps there will be a later
10855 ordering compare that can be shared with this one.
10856 Unfortunately we cannot detect the signedness of the operands
10857 for non-subregs. */
10858 comp_mode = CCUNSmode;
39a10a29
GK
10859 else
10860 comp_mode = CCmode;
10861
10862 /* First, the compare. */
10863 compare_result = gen_reg_rtx (comp_mode);
a3170dc6
AH
10864
10865 /* SPE FP compare instructions on the GPRs. Yuck! */
993f19a8
AH
10866 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10867 && rs6000_compare_fp_p)
a3170dc6 10868 {
64022b5d 10869 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
10870 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10871
10872 if (op_mode == VOIDmode)
10873 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 10874
423c1189
AH
10875 /* Note: The E500 comparison instructions set the GT bit (x +
10876 1), on success. This explains the mess. */
10877
a3170dc6
AH
10878 switch (code)
10879 {
423c1189 10880 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
10881 switch (op_mode)
10882 {
10883 case SFmode:
10884 cmp = flag_unsafe_math_optimizations
10885 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10886 rs6000_compare_op1)
10887 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10888 rs6000_compare_op1);
10889 break;
10890
10891 case DFmode:
10892 cmp = flag_unsafe_math_optimizations
10893 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10894 rs6000_compare_op1)
10895 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10896 rs6000_compare_op1);
10897 break;
10898
10899 default:
10900 gcc_unreachable ();
10901 }
a3170dc6 10902 break;
bb8df8a6 10903
423c1189 10904 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
10905 switch (op_mode)
10906 {
10907 case SFmode:
10908 cmp = flag_unsafe_math_optimizations
10909 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10910 rs6000_compare_op1)
10911 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10912 rs6000_compare_op1);
10913 break;
bb8df8a6 10914
37409796
NS
10915 case DFmode:
10916 cmp = flag_unsafe_math_optimizations
10917 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10918 rs6000_compare_op1)
10919 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10920 rs6000_compare_op1);
10921 break;
10922
10923 default:
10924 gcc_unreachable ();
10925 }
a3170dc6 10926 break;
bb8df8a6 10927
423c1189 10928 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
10929 switch (op_mode)
10930 {
10931 case SFmode:
10932 cmp = flag_unsafe_math_optimizations
10933 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10934 rs6000_compare_op1)
10935 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10936 rs6000_compare_op1);
10937 break;
bb8df8a6 10938
37409796
NS
10939 case DFmode:
10940 cmp = flag_unsafe_math_optimizations
10941 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
10942 rs6000_compare_op1)
10943 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
10944 rs6000_compare_op1);
10945 break;
10946
10947 default:
10948 gcc_unreachable ();
10949 }
a3170dc6 10950 break;
4d4cbc0e 10951 default:
37409796 10952 gcc_unreachable ();
a3170dc6
AH
10953 }
10954
10955 /* Synthesize LE and GE from LT/GT || EQ. */
10956 if (code == LE || code == GE || code == LEU || code == GEU)
10957 {
a3170dc6
AH
10958 emit_insn (cmp);
10959
10960 switch (code)
10961 {
10962 case LE: code = LT; break;
10963 case GE: code = GT; break;
10964 case LEU: code = LT; break;
10965 case GEU: code = GT; break;
37409796 10966 default: gcc_unreachable ();
a3170dc6
AH
10967 }
10968
a3170dc6
AH
10969 compare_result2 = gen_reg_rtx (CCFPmode);
10970
10971 /* Do the EQ. */
37409796
NS
10972 switch (op_mode)
10973 {
10974 case SFmode:
10975 cmp = flag_unsafe_math_optimizations
10976 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10977 rs6000_compare_op1)
10978 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10979 rs6000_compare_op1);
10980 break;
10981
10982 case DFmode:
10983 cmp = flag_unsafe_math_optimizations
10984 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
10985 rs6000_compare_op1)
10986 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
10987 rs6000_compare_op1);
10988 break;
10989
10990 default:
10991 gcc_unreachable ();
10992 }
a3170dc6
AH
10993 emit_insn (cmp);
10994
a3170dc6 10995 /* OR them together. */
64022b5d
AH
10996 or_result = gen_reg_rtx (CCFPmode);
10997 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
10998 compare_result2);
a3170dc6
AH
10999 compare_result = or_result;
11000 code = EQ;
11001 }
11002 else
11003 {
a3170dc6 11004 if (code == NE || code == LTGT)
a3170dc6 11005 code = NE;
423c1189
AH
11006 else
11007 code = EQ;
a3170dc6
AH
11008 }
11009
11010 emit_insn (cmp);
11011 }
11012 else
de17c25f
DE
11013 {
11014 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11015 CLOBBERs to match cmptf_internal2 pattern. */
11016 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11017 && GET_MODE (rs6000_compare_op0) == TFmode
11018 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
11019 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11020 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11021 gen_rtvec (9,
11022 gen_rtx_SET (VOIDmode,
11023 compare_result,
11024 gen_rtx_COMPARE (comp_mode,
11025 rs6000_compare_op0,
11026 rs6000_compare_op1)),
11027 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11028 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11029 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11030 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11031 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11032 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11033 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11034 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
11035 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11036 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11037 {
11038 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11039 comp_mode = CCEQmode;
11040 compare_result = gen_reg_rtx (CCEQmode);
11041 if (TARGET_64BIT)
11042 emit_insn (gen_stack_protect_testdi (compare_result,
11043 rs6000_compare_op0, op1));
11044 else
11045 emit_insn (gen_stack_protect_testsi (compare_result,
11046 rs6000_compare_op0, op1));
11047 }
de17c25f
DE
11048 else
11049 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11050 gen_rtx_COMPARE (comp_mode,
11051 rs6000_compare_op0,
11052 rs6000_compare_op1)));
11053 }
f676971a 11054
ca5adc63 11055 /* Some kinds of FP comparisons need an OR operation;
e7108df9 11056 under flag_finite_math_only we don't bother. */
39a10a29 11057 if (rs6000_compare_fp_p
e7108df9
DE
11058 && !flag_finite_math_only
11059 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
39a10a29
GK
11060 && (code == LE || code == GE
11061 || code == UNEQ || code == LTGT
11062 || code == UNGT || code == UNLT))
11063 {
11064 enum rtx_code or1, or2;
11065 rtx or1_rtx, or2_rtx, compare2_rtx;
11066 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 11067
39a10a29
GK
11068 switch (code)
11069 {
11070 case LE: or1 = LT; or2 = EQ; break;
11071 case GE: or1 = GT; or2 = EQ; break;
11072 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11073 case LTGT: or1 = LT; or2 = GT; break;
11074 case UNGT: or1 = UNORDERED; or2 = GT; break;
11075 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 11076 default: gcc_unreachable ();
39a10a29
GK
11077 }
11078 validate_condition_mode (or1, comp_mode);
11079 validate_condition_mode (or2, comp_mode);
1c563bed
KH
11080 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11081 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
11082 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11083 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11084 const_true_rtx);
11085 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11086
11087 compare_result = or_result;
11088 code = EQ;
11089 }
11090
11091 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 11092
1c563bed 11093 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
11094}
11095
11096
11097/* Emit the RTL for an sCOND pattern. */
11098
11099void
a2369ed3 11100rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
11101{
11102 rtx condition_rtx;
11103 enum machine_mode op_mode;
b7053a3f 11104 enum rtx_code cond_code;
39a10a29
GK
11105
11106 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
11107 cond_code = GET_CODE (condition_rtx);
11108
423c1189
AH
11109 if (TARGET_E500 && rs6000_compare_fp_p
11110 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11111 {
11112 rtx t;
11113
11114 PUT_MODE (condition_rtx, SImode);
11115 t = XEXP (condition_rtx, 0);
11116
37409796 11117 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
11118
11119 if (cond_code == NE)
64022b5d 11120 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 11121
64022b5d 11122 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
11123 return;
11124 }
11125
b7053a3f
GK
11126 if (cond_code == NE
11127 || cond_code == GE || cond_code == LE
11128 || cond_code == GEU || cond_code == LEU
11129 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11130 {
11131 rtx not_result = gen_reg_rtx (CCEQmode);
11132 rtx not_op, rev_cond_rtx;
11133 enum machine_mode cc_mode;
f676971a 11134
b7053a3f
GK
11135 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11136
1c563bed 11137 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 11138 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
11139 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11140 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11141 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11142 }
39a10a29
GK
11143
11144 op_mode = GET_MODE (rs6000_compare_op0);
11145 if (op_mode == VOIDmode)
11146 op_mode = GET_MODE (rs6000_compare_op1);
11147
11148 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11149 {
11150 PUT_MODE (condition_rtx, DImode);
11151 convert_move (result, condition_rtx, 0);
11152 }
11153 else
11154 {
11155 PUT_MODE (condition_rtx, SImode);
11156 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11157 }
11158}
11159
39a10a29
GK
11160/* Emit a branch of kind CODE to location LOC. */
11161
11162void
a2369ed3 11163rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
11164{
11165 rtx condition_rtx, loc_ref;
11166
11167 condition_rtx = rs6000_generate_compare (code);
11168 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11169 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11170 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11171 loc_ref, pc_rtx)));
11172}
11173
12a4e8c5
GK
11174/* Return the string to output a conditional branch to LABEL, which is
11175 the operand number of the label, or -1 if the branch is really a
f676971a 11176 conditional return.
12a4e8c5
GK
11177
11178 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11179 condition code register and its mode specifies what kind of
11180 comparison we made.
11181
a0ab749a 11182 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
11183
11184 INSN is the insn. */
11185
11186char *
a2369ed3 11187output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
11188{
11189 static char string[64];
11190 enum rtx_code code = GET_CODE (op);
11191 rtx cc_reg = XEXP (op, 0);
11192 enum machine_mode mode = GET_MODE (cc_reg);
11193 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 11194 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
11195 int really_reversed = reversed ^ need_longbranch;
11196 char *s = string;
11197 const char *ccode;
11198 const char *pred;
11199 rtx note;
11200
39a10a29
GK
11201 validate_condition_mode (code, mode);
11202
11203 /* Work out which way this really branches. We could use
11204 reverse_condition_maybe_unordered here always but this
11205 makes the resulting assembler clearer. */
12a4e8c5 11206 if (really_reversed)
de40e1df
DJ
11207 {
11208 /* Reversal of FP compares takes care -- an ordered compare
11209 becomes an unordered compare and vice versa. */
11210 if (mode == CCFPmode)
11211 code = reverse_condition_maybe_unordered (code);
11212 else
11213 code = reverse_condition (code);
11214 }
12a4e8c5 11215
993f19a8 11216 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
11217 {
11218 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11219 to the GT bit. */
37409796
NS
11220 switch (code)
11221 {
11222 case EQ:
11223 /* Opposite of GT. */
11224 code = GT;
11225 break;
11226
11227 case NE:
11228 code = UNLE;
11229 break;
11230
11231 default:
11232 gcc_unreachable ();
11233 }
a3170dc6
AH
11234 }
11235
39a10a29 11236 switch (code)
12a4e8c5
GK
11237 {
11238 /* Not all of these are actually distinct opcodes, but
11239 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
11240 case NE: case LTGT:
11241 ccode = "ne"; break;
11242 case EQ: case UNEQ:
11243 ccode = "eq"; break;
f676971a 11244 case GE: case GEU:
50a0b056 11245 ccode = "ge"; break;
f676971a 11246 case GT: case GTU: case UNGT:
50a0b056 11247 ccode = "gt"; break;
f676971a 11248 case LE: case LEU:
50a0b056 11249 ccode = "le"; break;
f676971a 11250 case LT: case LTU: case UNLT:
50a0b056 11251 ccode = "lt"; break;
12a4e8c5
GK
11252 case UNORDERED: ccode = "un"; break;
11253 case ORDERED: ccode = "nu"; break;
11254 case UNGE: ccode = "nl"; break;
11255 case UNLE: ccode = "ng"; break;
11256 default:
37409796 11257 gcc_unreachable ();
12a4e8c5 11258 }
f676971a
EC
11259
11260 /* Maybe we have a guess as to how likely the branch is.
94a54f47 11261 The old mnemonics don't have a way to specify this information. */
f4857b9b 11262 pred = "";
12a4e8c5
GK
11263 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11264 if (note != NULL_RTX)
11265 {
11266 /* PROB is the difference from 50%. */
11267 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
11268
11269 /* Only hint for highly probable/improbable branches on newer
11270 cpus as static prediction overrides processor dynamic
11271 prediction. For older cpus we may as well always hint, but
11272 assume not taken for branches that are very close to 50% as a
11273 mispredicted taken branch is more expensive than a
f676971a 11274 mispredicted not-taken branch. */
ec507f2d 11275 if (rs6000_always_hint
f4857b9b
AM
11276 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
11277 {
11278 if (abs (prob) > REG_BR_PROB_BASE / 20
11279 && ((prob > 0) ^ need_longbranch))
c4ad648e 11280 pred = "+";
f4857b9b
AM
11281 else
11282 pred = "-";
11283 }
12a4e8c5 11284 }
12a4e8c5
GK
11285
11286 if (label == NULL)
94a54f47 11287 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 11288 else
94a54f47 11289 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 11290
37c67319 11291 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 11292 Assume they'd only be the first character.... */
37c67319
GK
11293 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11294 *s++ = '%';
94a54f47 11295 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
11296
11297 if (label != NULL)
11298 {
11299 /* If the branch distance was too far, we may have to use an
11300 unconditional branch to go the distance. */
11301 if (need_longbranch)
44518ddd 11302 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
11303 else
11304 s += sprintf (s, ",%s", label);
11305 }
11306
11307 return string;
11308}
50a0b056 11309
64022b5d 11310/* Return the string to flip the GT bit on a CR. */
423c1189 11311char *
64022b5d 11312output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
11313{
11314 static char string[64];
11315 int a, b;
11316
37409796
NS
11317 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11318 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 11319
64022b5d
AH
11320 /* GT bit. */
11321 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11322 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
11323
11324 sprintf (string, "crnot %d,%d", a, b);
11325 return string;
11326}
11327
21213b4c
DP
11328/* Return insn index for the vector compare instruction for given CODE,
11329 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11330 not available. */
11331
11332static int
94ff898d 11333get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
11334 enum machine_mode dest_mode,
11335 enum machine_mode op_mode)
11336{
11337 if (!TARGET_ALTIVEC)
11338 return INSN_NOT_AVAILABLE;
11339
11340 switch (code)
11341 {
11342 case EQ:
11343 if (dest_mode == V16QImode && op_mode == V16QImode)
11344 return UNSPEC_VCMPEQUB;
11345 if (dest_mode == V8HImode && op_mode == V8HImode)
11346 return UNSPEC_VCMPEQUH;
11347 if (dest_mode == V4SImode && op_mode == V4SImode)
11348 return UNSPEC_VCMPEQUW;
11349 if (dest_mode == V4SImode && op_mode == V4SFmode)
11350 return UNSPEC_VCMPEQFP;
11351 break;
11352 case GE:
11353 if (dest_mode == V4SImode && op_mode == V4SFmode)
11354 return UNSPEC_VCMPGEFP;
11355 case GT:
11356 if (dest_mode == V16QImode && op_mode == V16QImode)
11357 return UNSPEC_VCMPGTSB;
11358 if (dest_mode == V8HImode && op_mode == V8HImode)
11359 return UNSPEC_VCMPGTSH;
11360 if (dest_mode == V4SImode && op_mode == V4SImode)
11361 return UNSPEC_VCMPGTSW;
11362 if (dest_mode == V4SImode && op_mode == V4SFmode)
11363 return UNSPEC_VCMPGTFP;
11364 break;
11365 case GTU:
11366 if (dest_mode == V16QImode && op_mode == V16QImode)
11367 return UNSPEC_VCMPGTUB;
11368 if (dest_mode == V8HImode && op_mode == V8HImode)
11369 return UNSPEC_VCMPGTUH;
11370 if (dest_mode == V4SImode && op_mode == V4SImode)
11371 return UNSPEC_VCMPGTUW;
11372 break;
11373 default:
11374 break;
11375 }
11376 return INSN_NOT_AVAILABLE;
11377}
11378
11379/* Emit vector compare for operands OP0 and OP1 using code RCODE.
11380 DMODE is expected destination mode. This is a recursive function. */
11381
11382static rtx
11383rs6000_emit_vector_compare (enum rtx_code rcode,
11384 rtx op0, rtx op1,
11385 enum machine_mode dmode)
11386{
11387 int vec_cmp_insn;
11388 rtx mask;
11389 enum machine_mode dest_mode;
11390 enum machine_mode op_mode = GET_MODE (op1);
11391
37409796
NS
11392 gcc_assert (TARGET_ALTIVEC);
11393 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
11394
11395 /* Floating point vector compare instructions uses destination V4SImode.
11396 Move destination to appropriate mode later. */
11397 if (dmode == V4SFmode)
11398 dest_mode = V4SImode;
11399 else
11400 dest_mode = dmode;
11401
11402 mask = gen_reg_rtx (dest_mode);
11403 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11404
11405 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11406 {
11407 bool swap_operands = false;
11408 bool try_again = false;
11409 switch (rcode)
11410 {
11411 case LT:
11412 rcode = GT;
11413 swap_operands = true;
11414 try_again = true;
11415 break;
11416 case LTU:
11417 rcode = GTU;
11418 swap_operands = true;
11419 try_again = true;
11420 break;
11421 case NE:
11422 /* Treat A != B as ~(A==B). */
11423 {
11424 enum insn_code nor_code;
11425 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11426 dest_mode);
94ff898d 11427
21213b4c 11428 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
37409796 11429 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
11430 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11431
11432 if (dmode != dest_mode)
11433 {
11434 rtx temp = gen_reg_rtx (dest_mode);
11435 convert_move (temp, mask, 0);
11436 return temp;
11437 }
11438 return mask;
11439 }
11440 break;
11441 case GE:
11442 case GEU:
11443 case LE:
11444 case LEU:
11445 /* Try GT/GTU/LT/LTU OR EQ */
11446 {
11447 rtx c_rtx, eq_rtx;
11448 enum insn_code ior_code;
11449 enum rtx_code new_code;
11450
37409796
NS
11451 switch (rcode)
11452 {
11453 case GE:
11454 new_code = GT;
11455 break;
11456
11457 case GEU:
11458 new_code = GTU;
11459 break;
11460
11461 case LE:
11462 new_code = LT;
11463 break;
11464
11465 case LEU:
11466 new_code = LTU;
11467 break;
11468
11469 default:
11470 gcc_unreachable ();
11471 }
21213b4c
DP
11472
11473 c_rtx = rs6000_emit_vector_compare (new_code,
11474 op0, op1, dest_mode);
11475 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11476 dest_mode);
11477
11478 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
37409796 11479 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
11480 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11481 if (dmode != dest_mode)
11482 {
11483 rtx temp = gen_reg_rtx (dest_mode);
11484 convert_move (temp, mask, 0);
11485 return temp;
11486 }
11487 return mask;
11488 }
11489 break;
11490 default:
37409796 11491 gcc_unreachable ();
21213b4c
DP
11492 }
11493
11494 if (try_again)
11495 {
11496 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
11497 /* You only get two chances. */
11498 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
11499 }
11500
11501 if (swap_operands)
11502 {
11503 rtx tmp;
11504 tmp = op0;
11505 op0 = op1;
11506 op1 = tmp;
11507 }
11508 }
11509
915167f5
GK
11510 emit_insn (gen_rtx_SET (VOIDmode, mask,
11511 gen_rtx_UNSPEC (dest_mode,
11512 gen_rtvec (2, op0, op1),
11513 vec_cmp_insn)));
21213b4c
DP
11514 if (dmode != dest_mode)
11515 {
11516 rtx temp = gen_reg_rtx (dest_mode);
11517 convert_move (temp, mask, 0);
11518 return temp;
11519 }
11520 return mask;
11521}
11522
11523/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11524 valid insn doesn exist for given mode. */
11525
11526static int
11527get_vsel_insn (enum machine_mode mode)
11528{
11529 switch (mode)
11530 {
11531 case V4SImode:
11532 return UNSPEC_VSEL4SI;
11533 break;
11534 case V4SFmode:
11535 return UNSPEC_VSEL4SF;
11536 break;
11537 case V8HImode:
11538 return UNSPEC_VSEL8HI;
11539 break;
11540 case V16QImode:
11541 return UNSPEC_VSEL16QI;
11542 break;
11543 default:
11544 return INSN_NOT_AVAILABLE;
11545 break;
11546 }
11547 return INSN_NOT_AVAILABLE;
11548}
11549
11550/* Emit vector select insn where DEST is destination using
11551 operands OP1, OP2 and MASK. */
11552
11553static void
11554rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11555{
11556 rtx t, temp;
11557 enum machine_mode dest_mode = GET_MODE (dest);
11558 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11559
11560 temp = gen_reg_rtx (dest_mode);
94ff898d 11561
bb8df8a6 11562 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 11563 select op2. */
915167f5
GK
11564 t = gen_rtx_SET (VOIDmode, temp,
11565 gen_rtx_UNSPEC (dest_mode,
11566 gen_rtvec (3, op2, op1, mask),
11567 vsel_insn_index));
21213b4c
DP
11568 emit_insn (t);
11569 emit_move_insn (dest, temp);
11570 return;
11571}
11572
94ff898d 11573/* Emit vector conditional expression.
21213b4c
DP
11574 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11575 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11576
11577int
11578rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11579 rtx cond, rtx cc_op0, rtx cc_op1)
11580{
11581 enum machine_mode dest_mode = GET_MODE (dest);
11582 enum rtx_code rcode = GET_CODE (cond);
11583 rtx mask;
11584
11585 if (!TARGET_ALTIVEC)
11586 return 0;
11587
11588 /* Get the vector mask for the given relational operations. */
11589 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11590
11591 rs6000_emit_vector_select (dest, op1, op2, mask);
11592
11593 return 1;
11594}
11595
50a0b056
GK
11596/* Emit a conditional move: move TRUE_COND to DEST if OP of the
11597 operands of the last comparison is nonzero/true, FALSE_COND if it
11598 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 11599
50a0b056 11600int
a2369ed3 11601rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
11602{
11603 enum rtx_code code = GET_CODE (op);
11604 rtx op0 = rs6000_compare_op0;
11605 rtx op1 = rs6000_compare_op1;
11606 REAL_VALUE_TYPE c1;
3148ad6d
DJ
11607 enum machine_mode compare_mode = GET_MODE (op0);
11608 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 11609 rtx temp;
add2402e 11610 bool is_against_zero;
50a0b056 11611
a3c9585f 11612 /* These modes should always match. */
a3170dc6
AH
11613 if (GET_MODE (op1) != compare_mode
11614 /* In the isel case however, we can use a compare immediate, so
11615 op1 may be a small constant. */
11616 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 11617 return 0;
178c3eff 11618 if (GET_MODE (true_cond) != result_mode)
3148ad6d 11619 return 0;
178c3eff 11620 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
11621 return 0;
11622
50a0b056 11623 /* First, work out if the hardware can do this at all, or
a3c9585f 11624 if it's too slow.... */
50a0b056 11625 if (! rs6000_compare_fp_p)
a3170dc6
AH
11626 {
11627 if (TARGET_ISEL)
11628 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11629 return 0;
11630 }
fef98bf2
AH
11631 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
11632 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
11633 return 0;
50a0b056 11634
add2402e 11635 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 11636
add2402e
GK
11637 /* A floating-point subtract might overflow, underflow, or produce
11638 an inexact result, thus changing the floating-point flags, so it
11639 can't be generated if we care about that. It's safe if one side
11640 of the construct is zero, since then no subtract will be
11641 generated. */
11642 if (GET_MODE_CLASS (compare_mode) == MODE_FLOAT
11643 && flag_trapping_math && ! is_against_zero)
11644 return 0;
11645
50a0b056
GK
11646 /* Eliminate half of the comparisons by switching operands, this
11647 makes the remaining code simpler. */
11648 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 11649 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
11650 {
11651 code = reverse_condition_maybe_unordered (code);
11652 temp = true_cond;
11653 true_cond = false_cond;
11654 false_cond = temp;
11655 }
11656
11657 /* UNEQ and LTGT take four instructions for a comparison with zero,
11658 it'll probably be faster to use a branch here too. */
bc9ec0e0 11659 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 11660 return 0;
f676971a 11661
50a0b056
GK
11662 if (GET_CODE (op1) == CONST_DOUBLE)
11663 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 11664
b6d08ca1 11665 /* We're going to try to implement comparisons by performing
50a0b056
GK
11666 a subtract, then comparing against zero. Unfortunately,
11667 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 11668 know that the operand is finite and the comparison
50a0b056 11669 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 11670 if (HONOR_INFINITIES (compare_mode)
50a0b056 11671 && code != GT && code != UNGE
045572c7 11672 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
11673 /* Constructs of the form (a OP b ? a : b) are safe. */
11674 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 11675 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
11676 && ! rtx_equal_p (op1, true_cond))))
11677 return 0;
add2402e 11678
50a0b056
GK
11679 /* At this point we know we can use fsel. */
11680
11681 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
11682 if (! is_against_zero)
11683 {
11684 temp = gen_reg_rtx (compare_mode);
11685 emit_insn (gen_rtx_SET (VOIDmode, temp,
11686 gen_rtx_MINUS (compare_mode, op0, op1)));
11687 op0 = temp;
11688 op1 = CONST0_RTX (compare_mode);
11689 }
50a0b056
GK
11690
11691 /* If we don't care about NaNs we can reduce some of the comparisons
11692 down to faster ones. */
bc9ec0e0 11693 if (! HONOR_NANS (compare_mode))
50a0b056
GK
11694 switch (code)
11695 {
11696 case GT:
11697 code = LE;
11698 temp = true_cond;
11699 true_cond = false_cond;
11700 false_cond = temp;
11701 break;
11702 case UNGE:
11703 code = GE;
11704 break;
11705 case UNEQ:
11706 code = EQ;
11707 break;
11708 default:
11709 break;
11710 }
11711
11712 /* Now, reduce everything down to a GE. */
11713 switch (code)
11714 {
11715 case GE:
11716 break;
11717
11718 case LE:
3148ad6d
DJ
11719 temp = gen_reg_rtx (compare_mode);
11720 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
11721 op0 = temp;
11722 break;
11723
11724 case ORDERED:
3148ad6d
DJ
11725 temp = gen_reg_rtx (compare_mode);
11726 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
11727 op0 = temp;
11728 break;
11729
11730 case EQ:
3148ad6d 11731 temp = gen_reg_rtx (compare_mode);
f676971a 11732 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
11733 gen_rtx_NEG (compare_mode,
11734 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
11735 op0 = temp;
11736 break;
11737
11738 case UNGE:
bc9ec0e0 11739 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 11740 temp = gen_reg_rtx (result_mode);
50a0b056 11741 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 11742 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
11743 gen_rtx_GE (VOIDmode,
11744 op0, op1),
11745 true_cond, false_cond)));
bc9ec0e0
GK
11746 false_cond = true_cond;
11747 true_cond = temp;
50a0b056 11748
3148ad6d
DJ
11749 temp = gen_reg_rtx (compare_mode);
11750 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
11751 op0 = temp;
11752 break;
11753
11754 case GT:
bc9ec0e0 11755 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 11756 temp = gen_reg_rtx (result_mode);
50a0b056 11757 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 11758 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
11759 gen_rtx_GE (VOIDmode,
11760 op0, op1),
11761 true_cond, false_cond)));
bc9ec0e0
GK
11762 true_cond = false_cond;
11763 false_cond = temp;
50a0b056 11764
3148ad6d
DJ
11765 temp = gen_reg_rtx (compare_mode);
11766 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
11767 op0 = temp;
11768 break;
11769
11770 default:
37409796 11771 gcc_unreachable ();
50a0b056
GK
11772 }
11773
11774 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 11775 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
11776 gen_rtx_GE (VOIDmode,
11777 op0, op1),
11778 true_cond, false_cond)));
11779 return 1;
11780}
11781
a3170dc6
AH
11782/* Same as above, but for ints (isel). */
11783
11784static int
a2369ed3 11785rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
11786{
11787 rtx condition_rtx, cr;
11788
11789 /* All isel implementations thus far are 32-bits. */
11790 if (GET_MODE (rs6000_compare_op0) != SImode)
11791 return 0;
11792
11793 /* We still have to do the compare, because isel doesn't do a
11794 compare, it just looks at the CRx bits set by a previous compare
11795 instruction. */
11796 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11797 cr = XEXP (condition_rtx, 0);
11798
11799 if (GET_MODE (cr) == CCmode)
11800 emit_insn (gen_isel_signed (dest, condition_rtx,
11801 true_cond, false_cond, cr));
11802 else
11803 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11804 true_cond, false_cond, cr));
11805
11806 return 1;
11807}
11808
11809const char *
a2369ed3 11810output_isel (rtx *operands)
a3170dc6
AH
11811{
11812 enum rtx_code code;
11813
11814 code = GET_CODE (operands[1]);
11815 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11816 {
11817 PUT_CODE (operands[1], reverse_condition (code));
11818 return "isel %0,%3,%2,%j1";
11819 }
11820 else
11821 return "isel %0,%2,%3,%j1";
11822}
11823
50a0b056 11824void
a2369ed3 11825rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
11826{
11827 enum machine_mode mode = GET_MODE (op0);
5dc8d536 11828 enum rtx_code c;
50a0b056 11829 rtx target;
5dc8d536
AH
11830
11831 if (code == SMAX || code == SMIN)
11832 c = GE;
11833 else
11834 c = GEU;
11835
50a0b056 11836 if (code == SMAX || code == UMAX)
f676971a 11837 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
11838 op0, op1, mode, 0);
11839 else
f676971a 11840 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 11841 op1, op0, mode, 0);
37409796 11842 gcc_assert (target);
50a0b056
GK
11843 if (target != dest)
11844 emit_move_insn (dest, target);
11845}
46c07df8 11846
915167f5
GK
11847/* Emit instructions to perform a load-reserved/store-conditional operation.
11848 The operation performed is an atomic
11849 (set M (CODE:MODE M OP))
11850 If not NULL, BEFORE is atomically set to M before the operation, and
11851 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 11852 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
11853 Either OP or M may be wrapped in a NOT operation. */
11854
11855void
11856rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
11857 rtx m, rtx op, rtx before_param, rtx after_param,
11858 bool sync_p)
11859{
11860 enum machine_mode used_mode;
11861 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
11862 rtx used_m;
11863 rtvec vec;
11864 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
11865 rtx shift = NULL_RTX;
bb8df8a6 11866
915167f5
GK
11867 if (sync_p)
11868 emit_insn (gen_memory_barrier ());
bb8df8a6 11869
915167f5
GK
11870 if (GET_CODE (m) == NOT)
11871 used_m = XEXP (m, 0);
11872 else
11873 used_m = m;
11874
11875 /* If this is smaller than SImode, we'll have to use SImode with
11876 adjustments. */
11877 if (mode == QImode || mode == HImode)
11878 {
11879 rtx newop, oldop;
11880
11881 if (MEM_ALIGN (used_m) >= 32)
11882 {
11883 int ishift = 0;
11884 if (BYTES_BIG_ENDIAN)
11885 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 11886
915167f5
GK
11887 shift = GEN_INT (ishift);
11888 }
11889 else
11890 {
11891 rtx addrSI, aligned_addr;
a9c9d3fa 11892 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 11893
915167f5
GK
11894 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
11895 XEXP (used_m, 0)));
11896 shift = gen_reg_rtx (SImode);
11897
11898 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
11899 GEN_INT (shift_mask)));
11900 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
11901
11902 aligned_addr = expand_binop (Pmode, and_optab,
11903 XEXP (used_m, 0),
11904 GEN_INT (-4), NULL_RTX,
11905 1, OPTAB_LIB_WIDEN);
11906 used_m = change_address (used_m, SImode, aligned_addr);
11907 set_mem_align (used_m, 32);
11908 /* It's safe to keep the old alias set of USED_M, because
11909 the operation is atomic and only affects the original
11910 USED_M. */
11911 if (GET_CODE (m) == NOT)
11912 m = gen_rtx_NOT (SImode, used_m);
11913 else
11914 m = used_m;
11915 }
11916
11917 if (GET_CODE (op) == NOT)
11918 {
11919 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
11920 oldop = gen_rtx_NOT (SImode, oldop);
11921 }
11922 else
11923 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 11924
915167f5
GK
11925 switch (code)
11926 {
11927 case IOR:
11928 case XOR:
11929 newop = expand_binop (SImode, and_optab,
11930 oldop, GEN_INT (imask), NULL_RTX,
11931 1, OPTAB_LIB_WIDEN);
11932 emit_insn (gen_ashlsi3 (newop, newop, shift));
11933 break;
11934
11935 case AND:
11936 newop = expand_binop (SImode, ior_optab,
11937 oldop, GEN_INT (~imask), NULL_RTX,
11938 1, OPTAB_LIB_WIDEN);
a9c9d3fa 11939 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
11940 break;
11941
11942 case PLUS:
9f0076e5 11943 case MINUS:
915167f5
GK
11944 {
11945 rtx mask;
bb8df8a6 11946
915167f5
GK
11947 newop = expand_binop (SImode, and_optab,
11948 oldop, GEN_INT (imask), NULL_RTX,
11949 1, OPTAB_LIB_WIDEN);
11950 emit_insn (gen_ashlsi3 (newop, newop, shift));
11951
11952 mask = gen_reg_rtx (SImode);
11953 emit_move_insn (mask, GEN_INT (imask));
11954 emit_insn (gen_ashlsi3 (mask, mask, shift));
11955
9f0076e5
DE
11956 if (code == PLUS)
11957 newop = gen_rtx_PLUS (SImode, m, newop);
11958 else
11959 newop = gen_rtx_MINUS (SImode, m, newop);
11960 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
11961 newop = gen_rtx_IOR (SImode, newop,
11962 gen_rtx_AND (SImode,
11963 gen_rtx_NOT (SImode, mask),
11964 m));
11965 break;
11966 }
11967
11968 default:
11969 gcc_unreachable ();
11970 }
11971
a9c9d3fa
GK
11972 if (GET_CODE (m) == NOT)
11973 {
11974 rtx mask, xorm;
11975
11976 mask = gen_reg_rtx (SImode);
11977 emit_move_insn (mask, GEN_INT (imask));
11978 emit_insn (gen_ashlsi3 (mask, mask, shift));
11979
11980 xorm = gen_rtx_XOR (SImode, used_m, mask);
11981 /* Depending on the value of 'op', the XOR or the operation might
11982 be able to be simplified away. */
11983 newop = simplify_gen_binary (code, SImode, xorm, newop);
11984 }
915167f5
GK
11985 op = newop;
11986 used_mode = SImode;
11987 before = gen_reg_rtx (used_mode);
11988 after = gen_reg_rtx (used_mode);
11989 }
11990 else
11991 {
11992 used_mode = mode;
11993 before = before_param;
11994 after = after_param;
11995
11996 if (before == NULL_RTX)
11997 before = gen_reg_rtx (used_mode);
11998 if (after == NULL_RTX)
11999 after = gen_reg_rtx (used_mode);
12000 }
bb8df8a6 12001
9f0076e5
DE
12002 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12003 && used_mode != mode)
915167f5
GK
12004 the_op = op; /* Computed above. */
12005 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12006 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12007 else
12008 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12009
12010 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12011 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12012 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
12013 gen_rtx_UNSPEC (used_mode,
12014 gen_rtvec (1, the_op),
12015 UNSPEC_SYNC_OP));
915167f5
GK
12016 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12017
9f0076e5 12018 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
12019 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12020 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12021 else
12022 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12023 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12024
12025 /* Shift and mask the return values properly. */
12026 if (used_mode != mode && before_param)
12027 {
12028 emit_insn (gen_lshrsi3 (before, before, shift));
12029 convert_move (before_param, before, 1);
12030 }
12031
12032 if (used_mode != mode && after_param)
12033 {
12034 emit_insn (gen_lshrsi3 (after, after, shift));
12035 convert_move (after_param, after, 1);
12036 }
12037
12038 /* The previous sequence will end with a branch that's dependent on
12039 the conditional store, so placing an isync will ensure that no
12040 other instructions (especially, no load or store instructions)
12041 can start before the atomic operation completes. */
12042 if (sync_p)
12043 emit_insn (gen_isync ());
12044}
12045
b52110d4
DE
12046/* A subroutine of the atomic operation splitters. Jump to LABEL if
12047 COND is true. Mark the jump as unlikely to be taken. */
12048
12049static void
12050emit_unlikely_jump (rtx cond, rtx label)
12051{
12052 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12053 rtx x;
12054
12055 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12056 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12057 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12058}
12059
12060/* A subroutine of the atomic operation splitters. Emit a load-locked
12061 instruction in MODE. */
12062
12063static void
12064emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12065{
12066 rtx (*fn) (rtx, rtx) = NULL;
12067 if (mode == SImode)
12068 fn = gen_load_locked_si;
12069 else if (mode == DImode)
12070 fn = gen_load_locked_di;
12071 emit_insn (fn (reg, mem));
12072}
12073
12074/* A subroutine of the atomic operation splitters. Emit a store-conditional
12075 instruction in MODE. */
12076
12077static void
12078emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12079{
12080 rtx (*fn) (rtx, rtx, rtx) = NULL;
12081 if (mode == SImode)
12082 fn = gen_store_conditional_si;
12083 else if (mode == DImode)
12084 fn = gen_store_conditional_di;
12085
9f0076e5 12086 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
12087 if (PPC405_ERRATUM77)
12088 emit_insn (gen_memory_barrier ());
12089
12090 emit_insn (fn (res, mem, val));
12091}
12092
9f0076e5 12093/* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 12094 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
12095 operand of the binary operator. BEFORE and AFTER are optional locations to
12096 return the value of MEM either before of after the operation. SCRATCH is
12097 a scratch register. */
12098
12099void
12100rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12101 rtx before, rtx after, rtx scratch)
12102{
12103 enum machine_mode mode = GET_MODE (mem);
12104 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12105
12106 emit_insn (gen_memory_barrier ());
12107
12108 label = gen_label_rtx ();
12109 emit_label (label);
12110 label = gen_rtx_LABEL_REF (VOIDmode, label);
12111
12112 if (before == NULL_RTX)
12113 before = scratch;
12114 emit_load_locked (mode, before, mem);
12115
12116 if (code == NOT)
12117 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12118 else if (code == AND)
12119 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12120 else
12121 x = gen_rtx_fmt_ee (code, mode, before, val);
12122
12123 if (after != NULL_RTX)
12124 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12125 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12126
12127 emit_store_conditional (mode, cond, mem, scratch);
12128
12129 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12130 emit_unlikely_jump (x, label);
12131
12132 emit_insn (gen_isync ());
12133}
12134
b52110d4
DE
12135/* Expand an atomic compare and swap operation. MEM is the memory on which
12136 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12137 value to be stored. SCRATCH is a scratch GPR. */
12138
12139void
12140rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12141 rtx scratch)
12142{
12143 enum machine_mode mode = GET_MODE (mem);
12144 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12145
12146 emit_insn (gen_memory_barrier ());
12147
12148 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12149 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12150 emit_label (XEXP (label1, 0));
12151
12152 emit_load_locked (mode, retval, mem);
12153
12154 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12155 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12156
12157 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12158 emit_unlikely_jump (x, label2);
12159
12160 emit_move_insn (scratch, newval);
12161 emit_store_conditional (mode, cond, mem, scratch);
12162
12163 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12164 emit_unlikely_jump (x, label1);
12165
12166 emit_insn (gen_isync ());
12167 emit_label (XEXP (label2, 0));
12168}
12169
12170/* Expand an atomic test and set operation. MEM is the memory on which
12171 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12172
12173void
12174rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12175{
12176 enum machine_mode mode = GET_MODE (mem);
12177 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12178
12179 emit_insn (gen_memory_barrier ());
12180
12181 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12182 emit_label (XEXP (label, 0));
12183
12184 emit_load_locked (mode, retval, mem);
12185 emit_move_insn (scratch, val);
12186 emit_store_conditional (mode, cond, mem, scratch);
12187
12188 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12189 emit_unlikely_jump (x, label);
12190
12191 emit_insn (gen_isync ());
12192}
12193
12194 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
12195 multi-register moves. It will emit at most one instruction for
12196 each register that is accessed; that is, it won't emit li/lis pairs
12197 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12198 register. */
46c07df8 12199
46c07df8 12200void
a9baceb1 12201rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 12202{
a9baceb1
GK
12203 /* The register number of the first register being moved. */
12204 int reg;
12205 /* The mode that is to be moved. */
12206 enum machine_mode mode;
12207 /* The mode that the move is being done in, and its size. */
12208 enum machine_mode reg_mode;
12209 int reg_mode_size;
12210 /* The number of registers that will be moved. */
12211 int nregs;
12212
12213 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12214 mode = GET_MODE (dst);
c8b622ff 12215 nregs = hard_regno_nregs[reg][mode];
a9baceb1
GK
12216 if (FP_REGNO_P (reg))
12217 reg_mode = DFmode;
12218 else if (ALTIVEC_REGNO_P (reg))
12219 reg_mode = V16QImode;
12220 else
12221 reg_mode = word_mode;
12222 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 12223
37409796 12224 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 12225
a9baceb1
GK
12226 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12227 {
12228 /* Move register range backwards, if we might have destructive
12229 overlap. */
12230 int i;
12231 for (i = nregs - 1; i >= 0; i--)
f676971a 12232 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
12233 simplify_gen_subreg (reg_mode, dst, mode,
12234 i * reg_mode_size),
12235 simplify_gen_subreg (reg_mode, src, mode,
12236 i * reg_mode_size)));
12237 }
46c07df8
HP
12238 else
12239 {
a9baceb1
GK
12240 int i;
12241 int j = -1;
12242 bool used_update = false;
46c07df8 12243
c1e55850 12244 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
12245 {
12246 rtx breg;
3a1f863f 12247
a9baceb1
GK
12248 if (GET_CODE (XEXP (src, 0)) == PRE_INC
12249 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
12250 {
12251 rtx delta_rtx;
a9baceb1 12252 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
12253 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12254 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12255 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
12256 emit_insn (TARGET_32BIT
12257 ? gen_addsi3 (breg, breg, delta_rtx)
12258 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 12259 src = replace_equiv_address (src, breg);
3a1f863f 12260 }
c1e55850
GK
12261 else if (! offsettable_memref_p (src))
12262 {
13e2e16e 12263 rtx basereg;
c1e55850
GK
12264 basereg = gen_rtx_REG (Pmode, reg);
12265 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 12266 src = replace_equiv_address (src, basereg);
c1e55850 12267 }
3a1f863f 12268
0423421f
AM
12269 breg = XEXP (src, 0);
12270 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12271 breg = XEXP (breg, 0);
12272
12273 /* If the base register we are using to address memory is
12274 also a destination reg, then change that register last. */
12275 if (REG_P (breg)
12276 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
12277 && REGNO (breg) < REGNO (dst) + nregs)
12278 j = REGNO (breg) - REGNO (dst);
c4ad648e 12279 }
46c07df8 12280
a9baceb1 12281 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
12282 {
12283 rtx breg;
12284
a9baceb1
GK
12285 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12286 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
12287 {
12288 rtx delta_rtx;
a9baceb1 12289 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
12290 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12291 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12292 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
12293
12294 /* We have to update the breg before doing the store.
12295 Use store with update, if available. */
12296
12297 if (TARGET_UPDATE)
12298 {
a9baceb1 12299 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
12300 emit_insn (TARGET_32BIT
12301 ? (TARGET_POWERPC64
12302 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12303 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
12304 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 12305 used_update = true;
3a1f863f
DE
12306 }
12307 else
a9baceb1
GK
12308 emit_insn (TARGET_32BIT
12309 ? gen_addsi3 (breg, breg, delta_rtx)
12310 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 12311 dst = replace_equiv_address (dst, breg);
3a1f863f 12312 }
37409796
NS
12313 else
12314 gcc_assert (offsettable_memref_p (dst));
3a1f863f
DE
12315 }
12316
46c07df8 12317 for (i = 0; i < nregs; i++)
f676971a 12318 {
3a1f863f
DE
12319 /* Calculate index to next subword. */
12320 ++j;
f676971a 12321 if (j == nregs)
3a1f863f 12322 j = 0;
46c07df8 12323
112cdef5 12324 /* If compiler already emitted move of first word by
a9baceb1 12325 store with update, no need to do anything. */
3a1f863f 12326 if (j == 0 && used_update)
a9baceb1 12327 continue;
f676971a 12328
a9baceb1
GK
12329 emit_insn (gen_rtx_SET (VOIDmode,
12330 simplify_gen_subreg (reg_mode, dst, mode,
12331 j * reg_mode_size),
12332 simplify_gen_subreg (reg_mode, src, mode,
12333 j * reg_mode_size)));
3a1f863f 12334 }
46c07df8
HP
12335 }
12336}
12337
12a4e8c5 12338\f
a4f6c312
SS
12339/* This page contains routines that are used to determine what the
12340 function prologue and epilogue code will do and write them out. */
9878760c 12341
a4f6c312
SS
12342/* Return the first fixed-point register that is required to be
12343 saved. 32 if none. */
9878760c
RK
12344
12345int
863d938c 12346first_reg_to_save (void)
9878760c
RK
12347{
12348 int first_reg;
12349
12350 /* Find lowest numbered live register. */
12351 for (first_reg = 13; first_reg <= 31; first_reg++)
f676971a 12352 if (regs_ever_live[first_reg]
a38d360d 12353 && (! call_used_regs[first_reg]
1db02437 12354 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 12355 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
12356 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12357 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
12358 break;
12359
ee890fe2 12360#if TARGET_MACHO
93638d7a
AM
12361 if (flag_pic
12362 && current_function_uses_pic_offset_table
12363 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 12364 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
12365#endif
12366
9878760c
RK
12367 return first_reg;
12368}
12369
12370/* Similar, for FP regs. */
12371
12372int
863d938c 12373first_fp_reg_to_save (void)
9878760c
RK
12374{
12375 int first_reg;
12376
12377 /* Find lowest numbered live register. */
12378 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12379 if (regs_ever_live[first_reg])
12380 break;
12381
12382 return first_reg;
12383}
00b960c7
AH
12384
12385/* Similar, for AltiVec regs. */
12386
12387static int
863d938c 12388first_altivec_reg_to_save (void)
00b960c7
AH
12389{
12390 int i;
12391
12392 /* Stack frame remains as is unless we are in AltiVec ABI. */
12393 if (! TARGET_ALTIVEC_ABI)
12394 return LAST_ALTIVEC_REGNO + 1;
12395
12396 /* Find lowest numbered live register. */
12397 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12398 if (regs_ever_live[i])
12399 break;
12400
12401 return i;
12402}
12403
12404/* Return a 32-bit mask of the AltiVec registers we need to set in
12405 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
12406 the 32-bit word is 0. */
12407
12408static unsigned int
863d938c 12409compute_vrsave_mask (void)
00b960c7
AH
12410{
12411 unsigned int i, mask = 0;
12412
12413 /* First, find out if we use _any_ altivec registers. */
12414 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12415 if (regs_ever_live[i])
12416 mask |= ALTIVEC_REG_BIT (i);
12417
12418 if (mask == 0)
12419 return mask;
12420
00b960c7
AH
12421 /* Next, remove the argument registers from the set. These must
12422 be in the VRSAVE mask set by the caller, so we don't need to add
12423 them in again. More importantly, the mask we compute here is
12424 used to generate CLOBBERs in the set_vrsave insn, and we do not
12425 wish the argument registers to die. */
a6cf80f2 12426 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
12427 mask &= ~ALTIVEC_REG_BIT (i);
12428
12429 /* Similarly, remove the return value from the set. */
12430 {
12431 bool yes = false;
12432 diddle_return_value (is_altivec_return_reg, &yes);
12433 if (yes)
12434 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12435 }
12436
12437 return mask;
12438}
12439
d62294f5 12440/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
12441 size of prologues/epilogues by calling our own save/restore-the-world
12442 routines. */
d62294f5
FJ
12443
12444static void
f57fe068
AM
12445compute_save_world_info (rs6000_stack_t *info_ptr)
12446{
12447 info_ptr->world_save_p = 1;
12448 info_ptr->world_save_p
12449 = (WORLD_SAVE_P (info_ptr)
12450 && DEFAULT_ABI == ABI_DARWIN
12451 && ! (current_function_calls_setjmp && flag_exceptions)
12452 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12453 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12454 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12455 && info_ptr->cr_save_p);
f676971a 12456
d62294f5
FJ
12457 /* This will not work in conjunction with sibcalls. Make sure there
12458 are none. (This check is expensive, but seldom executed.) */
f57fe068 12459 if (WORLD_SAVE_P (info_ptr))
f676971a 12460 {
d62294f5
FJ
12461 rtx insn;
12462 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
12463 if ( GET_CODE (insn) == CALL_INSN
12464 && SIBLING_CALL_P (insn))
12465 {
12466 info_ptr->world_save_p = 0;
12467 break;
12468 }
d62294f5 12469 }
f676971a 12470
f57fe068 12471 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
12472 {
12473 /* Even if we're not touching VRsave, make sure there's room on the
12474 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 12475 will attempt to save it. */
d62294f5
FJ
12476 info_ptr->vrsave_size = 4;
12477
12478 /* "Save" the VRsave register too if we're saving the world. */
12479 if (info_ptr->vrsave_mask == 0)
c4ad648e 12480 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
12481
12482 /* Because the Darwin register save/restore routines only handle
c4ad648e 12483 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 12484 check. */
37409796
NS
12485 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12486 && (info_ptr->first_altivec_reg_save
12487 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 12488 }
f676971a 12489 return;
d62294f5
FJ
12490}
12491
12492
00b960c7 12493static void
a2369ed3 12494is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
12495{
12496 bool *yes = (bool *) xyes;
12497 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12498 *yes = true;
12499}
12500
4697a36c
MM
12501\f
12502/* Calculate the stack information for the current function. This is
12503 complicated by having two separate calling sequences, the AIX calling
12504 sequence and the V.4 calling sequence.
12505
592696dd 12506 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 12507 32-bit 64-bit
4697a36c 12508 SP----> +---------------------------------------+
a260abc9 12509 | back chain to caller | 0 0
4697a36c 12510 +---------------------------------------+
a260abc9 12511 | saved CR | 4 8 (8-11)
4697a36c 12512 +---------------------------------------+
a260abc9 12513 | saved LR | 8 16
4697a36c 12514 +---------------------------------------+
a260abc9 12515 | reserved for compilers | 12 24
4697a36c 12516 +---------------------------------------+
a260abc9 12517 | reserved for binders | 16 32
4697a36c 12518 +---------------------------------------+
a260abc9 12519 | saved TOC pointer | 20 40
4697a36c 12520 +---------------------------------------+
a260abc9 12521 | Parameter save area (P) | 24 48
4697a36c 12522 +---------------------------------------+
a260abc9 12523 | Alloca space (A) | 24+P etc.
802a0058 12524 +---------------------------------------+
a7df97e6 12525 | Local variable space (L) | 24+P+A
4697a36c 12526 +---------------------------------------+
a7df97e6 12527 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 12528 +---------------------------------------+
00b960c7
AH
12529 | Save area for AltiVec registers (W) | 24+P+A+L+X
12530 +---------------------------------------+
12531 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12532 +---------------------------------------+
12533 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 12534 +---------------------------------------+
00b960c7
AH
12535 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12536 +---------------------------------------+
12537 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
12538 +---------------------------------------+
12539 old SP->| back chain to caller's caller |
12540 +---------------------------------------+
12541
5376a30c
KR
12542 The required alignment for AIX configurations is two words (i.e., 8
12543 or 16 bytes).
12544
12545
4697a36c
MM
12546 V.4 stack frames look like:
12547
12548 SP----> +---------------------------------------+
12549 | back chain to caller | 0
12550 +---------------------------------------+
5eb387b8 12551 | caller's saved LR | 4
4697a36c
MM
12552 +---------------------------------------+
12553 | Parameter save area (P) | 8
12554 +---------------------------------------+
a7df97e6 12555 | Alloca space (A) | 8+P
f676971a 12556 +---------------------------------------+
a7df97e6 12557 | Varargs save area (V) | 8+P+A
f676971a 12558 +---------------------------------------+
a7df97e6 12559 | Local variable space (L) | 8+P+A+V
f676971a 12560 +---------------------------------------+
a7df97e6 12561 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 12562 +---------------------------------------+
00b960c7
AH
12563 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12564 +---------------------------------------+
12565 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12566 +---------------------------------------+
12567 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12568 +---------------------------------------+
c4ad648e
AM
12569 | SPE: area for 64-bit GP registers |
12570 +---------------------------------------+
12571 | SPE alignment padding |
12572 +---------------------------------------+
00b960c7 12573 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 12574 +---------------------------------------+
00b960c7 12575 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 12576 +---------------------------------------+
00b960c7 12577 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
12578 +---------------------------------------+
12579 old SP->| back chain to caller's caller |
12580 +---------------------------------------+
b6c9286a 12581
5376a30c
KR
12582 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12583 given. (But note below and in sysv4.h that we require only 8 and
12584 may round up the size of our stack frame anyways. The historical
12585 reason is early versions of powerpc-linux which didn't properly
12586 align the stack at program startup. A happy side-effect is that
12587 -mno-eabi libraries can be used with -meabi programs.)
12588
50d440bc 12589 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
12590 the stack alignment requirements may differ. If -mno-eabi is not
12591 given, the required stack alignment is 8 bytes; if -mno-eabi is
12592 given, the required alignment is 16 bytes. (But see V.4 comment
12593 above.) */
4697a36c 12594
61b2fbe7
MM
12595#ifndef ABI_STACK_BOUNDARY
12596#define ABI_STACK_BOUNDARY STACK_BOUNDARY
12597#endif
12598
d1d0c603 12599static rs6000_stack_t *
863d938c 12600rs6000_stack_info (void)
4697a36c
MM
12601{
12602 static rs6000_stack_t info, zero_info;
12603 rs6000_stack_t *info_ptr = &info;
327e5343 12604 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 12605 int ehrd_size;
64045029 12606 int save_align;
44688022 12607 HOST_WIDE_INT non_fixed_size;
4697a36c 12608
a4f6c312 12609 /* Zero all fields portably. */
4697a36c
MM
12610 info = zero_info;
12611
c19de7aa
AH
12612 if (TARGET_SPE)
12613 {
12614 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 12615 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
12616 cfun->machine->insn_chain_scanned_p
12617 = spe_func_has_64bit_regs_p () + 1;
12618 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
12619 }
12620
a4f6c312 12621 /* Select which calling sequence. */
178274da 12622 info_ptr->abi = DEFAULT_ABI;
9878760c 12623
a4f6c312 12624 /* Calculate which registers need to be saved & save area size. */
4697a36c 12625 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 12626 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
906fb125 12627 even if it currently looks like we won't. */
2bfcf297 12628 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
12629 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12630 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
1db02437
FS
12631 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12632 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
906fb125
GK
12633 else
12634 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
4697a36c 12635
a3170dc6
AH
12636 /* For the SPE, we have an additional upper 32-bits on each GPR.
12637 Ideally we should save the entire 64-bits only when the upper
12638 half is used in SIMD instructions. Since we only record
12639 registers live (not the size they are used in), this proves
12640 difficult because we'd have to traverse the instruction chain at
12641 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
12642 so we opt to save the GPRs in 64-bits always if but one register
12643 gets used in 64-bits. Otherwise, all the registers in the frame
12644 get saved in 32-bits.
a3170dc6 12645
c19de7aa 12646 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 12647 traditional GP save area will be empty. */
c19de7aa 12648 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
12649 info_ptr->gp_size = 0;
12650
4697a36c
MM
12651 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
12652 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
12653
00b960c7
AH
12654 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
12655 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
12656 - info_ptr->first_altivec_reg_save);
12657
592696dd 12658 /* Does this function call anything? */
71f123ca
FS
12659 info_ptr->calls_p = (! current_function_is_leaf
12660 || cfun->machine->ra_needs_full_frame);
b6c9286a 12661
a4f6c312 12662 /* Determine if we need to save the link register. */
71f123ca 12663 if (rs6000_ra_ever_killed ()
ffcfcb5f
AM
12664 || (DEFAULT_ABI == ABI_AIX
12665 && current_function_profile
12666 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
12667#ifdef TARGET_RELOCATABLE
12668 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
12669#endif
12670 || (info_ptr->first_fp_reg_save != 64
12671 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 12672 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 12673 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
4697a36c
MM
12674 || info_ptr->calls_p)
12675 {
12676 info_ptr->lr_save_p = 1;
9ebbca7d 12677 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
4697a36c
MM
12678 }
12679
9ebbca7d 12680 /* Determine if we need to save the condition code registers. */
f676971a 12681 if (regs_ever_live[CR2_REGNO]
9ebbca7d
GK
12682 || regs_ever_live[CR3_REGNO]
12683 || regs_ever_live[CR4_REGNO])
4697a36c
MM
12684 {
12685 info_ptr->cr_save_p = 1;
178274da 12686 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
12687 info_ptr->cr_size = reg_size;
12688 }
12689
83720594
RH
12690 /* If the current function calls __builtin_eh_return, then we need
12691 to allocate stack space for registers that will hold data for
12692 the exception handler. */
12693 if (current_function_calls_eh_return)
12694 {
12695 unsigned int i;
12696 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12697 continue;
a3170dc6
AH
12698
12699 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
12700 ehrd_size = i * (TARGET_SPE_ABI
12701 && info_ptr->spe_64bit_regs_used != 0
12702 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
12703 }
12704 else
12705 ehrd_size = 0;
12706
592696dd 12707 /* Determine various sizes. */
4697a36c
MM
12708 info_ptr->reg_size = reg_size;
12709 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 12710 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 12711 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 12712 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
12713 if (FRAME_GROWS_DOWNWARD)
12714 info_ptr->vars_size
5b667039
JJ
12715 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
12716 + info_ptr->parm_size,
7d5175e1 12717 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
12718 - (info_ptr->fixed_size + info_ptr->vars_size
12719 + info_ptr->parm_size);
00b960c7 12720
c19de7aa 12721 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
12722 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12723 else
12724 info_ptr->spe_gp_size = 0;
12725
4d774ff8
HP
12726 if (TARGET_ALTIVEC_ABI)
12727 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 12728 else
4d774ff8
HP
12729 info_ptr->vrsave_mask = 0;
12730
12731 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12732 info_ptr->vrsave_size = 4;
12733 else
12734 info_ptr->vrsave_size = 0;
b6c9286a 12735
d62294f5
FJ
12736 compute_save_world_info (info_ptr);
12737
592696dd 12738 /* Calculate the offsets. */
178274da 12739 switch (DEFAULT_ABI)
4697a36c 12740 {
b6c9286a 12741 case ABI_NONE:
24d304eb 12742 default:
37409796 12743 gcc_unreachable ();
b6c9286a
MM
12744
12745 case ABI_AIX:
ee890fe2 12746 case ABI_DARWIN:
b6c9286a
MM
12747 info_ptr->fp_save_offset = - info_ptr->fp_size;
12748 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
12749
12750 if (TARGET_ALTIVEC_ABI)
12751 {
12752 info_ptr->vrsave_save_offset
12753 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12754
12755 /* Align stack so vector save area is on a quadword boundary. */
12756 if (info_ptr->altivec_size != 0)
12757 info_ptr->altivec_padding_size
12758 = 16 - (-info_ptr->vrsave_save_offset % 16);
12759 else
12760 info_ptr->altivec_padding_size = 0;
12761
12762 info_ptr->altivec_save_offset
12763 = info_ptr->vrsave_save_offset
12764 - info_ptr->altivec_padding_size
12765 - info_ptr->altivec_size;
12766
12767 /* Adjust for AltiVec case. */
12768 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12769 }
12770 else
12771 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
12772 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12773 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
12774 break;
12775
12776 case ABI_V4:
b6c9286a
MM
12777 info_ptr->fp_save_offset = - info_ptr->fp_size;
12778 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 12779 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 12780
c19de7aa 12781 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
12782 {
12783 /* Align stack so SPE GPR save area is aligned on a
12784 double-word boundary. */
12785 if (info_ptr->spe_gp_size != 0)
12786 info_ptr->spe_padding_size
12787 = 8 - (-info_ptr->cr_save_offset % 8);
12788 else
12789 info_ptr->spe_padding_size = 0;
12790
12791 info_ptr->spe_gp_save_offset
12792 = info_ptr->cr_save_offset
12793 - info_ptr->spe_padding_size
12794 - info_ptr->spe_gp_size;
12795
12796 /* Adjust for SPE case. */
12797 info_ptr->toc_save_offset
12798 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12799 }
a3170dc6 12800 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
12801 {
12802 info_ptr->vrsave_save_offset
12803 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12804
12805 /* Align stack so vector save area is on a quadword boundary. */
12806 if (info_ptr->altivec_size != 0)
12807 info_ptr->altivec_padding_size
12808 = 16 - (-info_ptr->vrsave_save_offset % 16);
12809 else
12810 info_ptr->altivec_padding_size = 0;
12811
12812 info_ptr->altivec_save_offset
12813 = info_ptr->vrsave_save_offset
12814 - info_ptr->altivec_padding_size
12815 - info_ptr->altivec_size;
12816
12817 /* Adjust for AltiVec case. */
12818 info_ptr->toc_save_offset
12819 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12820 }
12821 else
12822 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
83720594 12823 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
b6c9286a
MM
12824 info_ptr->lr_save_offset = reg_size;
12825 break;
4697a36c
MM
12826 }
12827
64045029 12828 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
12829 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12830 + info_ptr->gp_size
12831 + info_ptr->altivec_size
12832 + info_ptr->altivec_padding_size
a3170dc6
AH
12833 + info_ptr->spe_gp_size
12834 + info_ptr->spe_padding_size
00b960c7
AH
12835 + ehrd_size
12836 + info_ptr->cr_size
12837 + info_ptr->lr_size
12838 + info_ptr->vrsave_size
12839 + info_ptr->toc_size,
64045029 12840 save_align);
00b960c7 12841
44688022 12842 non_fixed_size = (info_ptr->vars_size
ff381587 12843 + info_ptr->parm_size
5b667039 12844 + info_ptr->save_size);
ff381587 12845
44688022
AM
12846 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12847 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
12848
12849 /* Determine if we need to allocate any stack frame:
12850
a4f6c312
SS
12851 For AIX we need to push the stack if a frame pointer is needed
12852 (because the stack might be dynamically adjusted), if we are
12853 debugging, if we make calls, or if the sum of fp_save, gp_save,
12854 and local variables are more than the space needed to save all
12855 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12856 + 18*8 = 288 (GPR13 reserved).
ff381587 12857
a4f6c312
SS
12858 For V.4 we don't have the stack cushion that AIX uses, but assume
12859 that the debugger can handle stackless frames. */
ff381587
MM
12860
12861 if (info_ptr->calls_p)
12862 info_ptr->push_p = 1;
12863
178274da 12864 else if (DEFAULT_ABI == ABI_V4)
44688022 12865 info_ptr->push_p = non_fixed_size != 0;
ff381587 12866
178274da
AM
12867 else if (frame_pointer_needed)
12868 info_ptr->push_p = 1;
12869
12870 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12871 info_ptr->push_p = 1;
12872
ff381587 12873 else
44688022 12874 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 12875
a4f6c312 12876 /* Zero offsets if we're not saving those registers. */
8dda1a21 12877 if (info_ptr->fp_size == 0)
4697a36c
MM
12878 info_ptr->fp_save_offset = 0;
12879
8dda1a21 12880 if (info_ptr->gp_size == 0)
4697a36c
MM
12881 info_ptr->gp_save_offset = 0;
12882
00b960c7
AH
12883 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12884 info_ptr->altivec_save_offset = 0;
12885
12886 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12887 info_ptr->vrsave_save_offset = 0;
12888
c19de7aa
AH
12889 if (! TARGET_SPE_ABI
12890 || info_ptr->spe_64bit_regs_used == 0
12891 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
12892 info_ptr->spe_gp_save_offset = 0;
12893
c81fc13e 12894 if (! info_ptr->lr_save_p)
4697a36c
MM
12895 info_ptr->lr_save_offset = 0;
12896
c81fc13e 12897 if (! info_ptr->cr_save_p)
4697a36c
MM
12898 info_ptr->cr_save_offset = 0;
12899
c81fc13e 12900 if (! info_ptr->toc_save_p)
b6c9286a
MM
12901 info_ptr->toc_save_offset = 0;
12902
4697a36c
MM
12903 return info_ptr;
12904}
12905
c19de7aa
AH
12906/* Return true if the current function uses any GPRs in 64-bit SIMD
12907 mode. */
12908
12909static bool
863d938c 12910spe_func_has_64bit_regs_p (void)
c19de7aa
AH
12911{
12912 rtx insns, insn;
12913
12914 /* Functions that save and restore all the call-saved registers will
12915 need to save/restore the registers in 64-bits. */
12916 if (current_function_calls_eh_return
12917 || current_function_calls_setjmp
12918 || current_function_has_nonlocal_goto)
12919 return true;
12920
12921 insns = get_insns ();
12922
12923 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12924 {
12925 if (INSN_P (insn))
12926 {
12927 rtx i;
12928
b5a5beb9
AH
12929 /* FIXME: This should be implemented with attributes...
12930
12931 (set_attr "spe64" "true")....then,
12932 if (get_spe64(insn)) return true;
12933
12934 It's the only reliable way to do the stuff below. */
12935
c19de7aa 12936 i = PATTERN (insn);
f82f556d
AH
12937 if (GET_CODE (i) == SET)
12938 {
12939 enum machine_mode mode = GET_MODE (SET_SRC (i));
12940
12941 if (SPE_VECTOR_MODE (mode))
12942 return true;
12943 if (TARGET_E500_DOUBLE && mode == DFmode)
12944 return true;
12945 }
c19de7aa
AH
12946 }
12947 }
12948
12949 return false;
12950}
12951
d1d0c603 12952static void
a2369ed3 12953debug_stack_info (rs6000_stack_t *info)
9878760c 12954{
d330fd93 12955 const char *abi_string;
24d304eb 12956
c81fc13e 12957 if (! info)
4697a36c
MM
12958 info = rs6000_stack_info ();
12959
12960 fprintf (stderr, "\nStack information for function %s:\n",
12961 ((current_function_decl && DECL_NAME (current_function_decl))
12962 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
12963 : "<unknown>"));
12964
24d304eb
RK
12965 switch (info->abi)
12966 {
b6c9286a
MM
12967 default: abi_string = "Unknown"; break;
12968 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 12969 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 12970 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 12971 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
12972 }
12973
12974 fprintf (stderr, "\tABI = %5s\n", abi_string);
12975
00b960c7
AH
12976 if (TARGET_ALTIVEC_ABI)
12977 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
12978
a3170dc6
AH
12979 if (TARGET_SPE_ABI)
12980 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
12981
4697a36c
MM
12982 if (info->first_gp_reg_save != 32)
12983 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
12984
12985 if (info->first_fp_reg_save != 64)
12986 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 12987
00b960c7
AH
12988 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
12989 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
12990 info->first_altivec_reg_save);
12991
4697a36c
MM
12992 if (info->lr_save_p)
12993 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 12994
4697a36c
MM
12995 if (info->cr_save_p)
12996 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
12997
b6c9286a
MM
12998 if (info->toc_save_p)
12999 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
13000
00b960c7
AH
13001 if (info->vrsave_mask)
13002 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13003
4697a36c
MM
13004 if (info->push_p)
13005 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13006
13007 if (info->calls_p)
13008 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13009
4697a36c
MM
13010 if (info->gp_save_offset)
13011 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13012
13013 if (info->fp_save_offset)
13014 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13015
00b960c7
AH
13016 if (info->altivec_save_offset)
13017 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13018 info->altivec_save_offset);
13019
a3170dc6
AH
13020 if (info->spe_gp_save_offset)
13021 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13022 info->spe_gp_save_offset);
13023
00b960c7
AH
13024 if (info->vrsave_save_offset)
13025 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13026 info->vrsave_save_offset);
13027
4697a36c
MM
13028 if (info->lr_save_offset)
13029 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13030
13031 if (info->cr_save_offset)
13032 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13033
b6c9286a
MM
13034 if (info->toc_save_offset)
13035 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
13036
4697a36c
MM
13037 if (info->varargs_save_offset)
13038 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13039
13040 if (info->total_size)
d1d0c603
JJ
13041 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13042 info->total_size);
4697a36c 13043
4697a36c 13044 if (info->vars_size)
d1d0c603
JJ
13045 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13046 info->vars_size);
4697a36c
MM
13047
13048 if (info->parm_size)
13049 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13050
13051 if (info->fixed_size)
13052 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13053
13054 if (info->gp_size)
13055 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13056
a3170dc6
AH
13057 if (info->spe_gp_size)
13058 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13059
4697a36c
MM
13060 if (info->fp_size)
13061 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13062
00b960c7
AH
13063 if (info->altivec_size)
13064 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13065
13066 if (info->vrsave_size)
13067 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13068
13069 if (info->altivec_padding_size)
13070 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13071 info->altivec_padding_size);
13072
a3170dc6
AH
13073 if (info->spe_padding_size)
13074 fprintf (stderr, "\tspe_padding_size = %5d\n",
13075 info->spe_padding_size);
13076
a4f6c312 13077 if (info->lr_size)
ed947a96 13078 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
b6c9286a 13079
4697a36c
MM
13080 if (info->cr_size)
13081 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
13082
a4f6c312 13083 if (info->toc_size)
b6c9286a
MM
13084 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
13085
4697a36c
MM
13086 if (info->save_size)
13087 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
13088
13089 if (info->reg_size != 4)
13090 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
13091
13092 fprintf (stderr, "\n");
9878760c 13093}
71f123ca
FS
13094
13095rtx
a2369ed3 13096rs6000_return_addr (int count, rtx frame)
71f123ca 13097{
a4f6c312
SS
13098 /* Currently we don't optimize very well between prolog and body
13099 code and for PIC code the code can be actually quite bad, so
13100 don't try to be too clever here. */
f1384257 13101 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
13102 {
13103 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
13104
13105 return
13106 gen_rtx_MEM
13107 (Pmode,
13108 memory_address
13109 (Pmode,
13110 plus_constant (copy_to_reg
13111 (gen_rtx_MEM (Pmode,
13112 memory_address (Pmode, frame))),
13113 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
13114 }
13115
8c29550d 13116 cfun->machine->ra_need_lr = 1;
9e2f7ec7 13117 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
71f123ca
FS
13118}
13119
5e1bf043
DJ
13120/* Say whether a function is a candidate for sibcall handling or not.
13121 We do not allow indirect calls to be optimized into sibling calls.
13122 Also, we can't do it if there are any vector parameters; there's
13123 nowhere to put the VRsave code so it works; note that functions with
13124 vector parameters are required to have a prototype, so the argument
13125 type info must be available here. (The tail recursion case can work
13126 with vector parameters, but there's no way to distinguish here.) */
4977bab6 13127static bool
a2369ed3 13128rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
13129{
13130 tree type;
4977bab6 13131 if (decl)
5e1bf043
DJ
13132 {
13133 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 13134 {
4977bab6 13135 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
13136 type; type = TREE_CHAIN (type))
13137 {
c15b529f 13138 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 13139 return false;
5e1bf043 13140 }
c4ad648e 13141 }
5e1bf043 13142 if (DEFAULT_ABI == ABI_DARWIN
4977bab6 13143 || (*targetm.binds_local_p) (decl))
2bcc50d0 13144 {
4977bab6 13145 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
13146
13147 if (!lookup_attribute ("longcall", attr_list)
13148 || lookup_attribute ("shortcall", attr_list))
4977bab6 13149 return true;
2bcc50d0 13150 }
5e1bf043 13151 }
4977bab6 13152 return false;
5e1bf043
DJ
13153}
13154
e7e64a25
AS
13155/* NULL if INSN insn is valid within a low-overhead loop.
13156 Otherwise return why doloop cannot be applied.
9419649c
DE
13157 PowerPC uses the COUNT register for branch on table instructions. */
13158
e7e64a25
AS
13159static const char *
13160rs6000_invalid_within_doloop (rtx insn)
9419649c
DE
13161{
13162 if (CALL_P (insn))
e7e64a25 13163 return "Function call in the loop.";
9419649c
DE
13164
13165 if (JUMP_P (insn)
13166 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13167 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 13168 return "Computed branch in the loop.";
9419649c 13169
e7e64a25 13170 return NULL;
9419649c
DE
13171}
13172
71f123ca 13173static int
863d938c 13174rs6000_ra_ever_killed (void)
71f123ca
FS
13175{
13176 rtx top;
5e1bf043
DJ
13177 rtx reg;
13178 rtx insn;
71f123ca 13179
dd292d0a 13180 if (current_function_is_thunk)
71f123ca 13181 return 0;
eb0424da 13182
36f7e964
AH
13183 /* regs_ever_live has LR marked as used if any sibcalls are present,
13184 but this should not force saving and restoring in the
13185 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 13186 clobbers LR, so that is inappropriate. */
36f7e964 13187
5e1bf043
DJ
13188 /* Also, the prologue can generate a store into LR that
13189 doesn't really count, like this:
36f7e964 13190
5e1bf043
DJ
13191 move LR->R0
13192 bcl to set PIC register
13193 move LR->R31
13194 move R0->LR
36f7e964
AH
13195
13196 When we're called from the epilogue, we need to avoid counting
13197 this as a store. */
f676971a 13198
71f123ca
FS
13199 push_topmost_sequence ();
13200 top = get_insns ();
13201 pop_topmost_sequence ();
5e1bf043 13202 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
71f123ca 13203
5e1bf043
DJ
13204 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13205 {
13206 if (INSN_P (insn))
13207 {
13208 if (FIND_REG_INC_NOTE (insn, reg))
13209 return 1;
f676971a 13210 else if (GET_CODE (insn) == CALL_INSN
c15b529f 13211 && !SIBLING_CALL_P (insn))
5e1bf043 13212 return 1;
36f7e964
AH
13213 else if (set_of (reg, insn) != NULL_RTX
13214 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
13215 return 1;
13216 }
13217 }
13218 return 0;
71f123ca 13219}
4697a36c 13220\f
8cd8f856
GK
13221/* Add a REG_MAYBE_DEAD note to the insn. */
13222static void
a2369ed3 13223rs6000_maybe_dead (rtx insn)
8cd8f856
GK
13224{
13225 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13226 const0_rtx,
13227 REG_NOTES (insn));
13228}
13229
9ebbca7d 13230/* Emit instructions needed to load the TOC register.
c7ca610e 13231 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 13232 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
13233
13234void
a2369ed3 13235rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 13236{
027fbf43 13237 rtx dest, insn;
1db02437 13238 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 13239
7f970b70 13240 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 13241 {
7f970b70
AM
13242 char buf[30];
13243 rtx lab, tmp1, tmp2, got, tempLR;
13244
13245 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13246 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13247 if (flag_pic == 2)
13248 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13249 else
13250 got = rs6000_got_sym ();
13251 tmp1 = tmp2 = dest;
13252 if (!fromprolog)
13253 {
13254 tmp1 = gen_reg_rtx (Pmode);
13255 tmp2 = gen_reg_rtx (Pmode);
13256 }
13257 tempLR = (fromprolog
13258 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13259 : gen_reg_rtx (Pmode));
13260 insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13261 if (fromprolog)
13262 rs6000_maybe_dead (insn);
13263 insn = emit_move_insn (tmp1, tempLR);
13264 if (fromprolog)
13265 rs6000_maybe_dead (insn);
13266 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13267 if (fromprolog)
13268 rs6000_maybe_dead (insn);
13269 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13270 if (fromprolog)
13271 rs6000_maybe_dead (insn);
13272 }
13273 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13274 {
13275 rtx tempLR = (fromprolog
13276 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13277 : gen_reg_rtx (Pmode));
13278
13279 insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
027fbf43
JJ
13280 if (fromprolog)
13281 rs6000_maybe_dead (insn);
7f970b70 13282 insn = emit_move_insn (dest, tempLR);
027fbf43
JJ
13283 if (fromprolog)
13284 rs6000_maybe_dead (insn);
20b71b17
AM
13285 }
13286 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13287 {
13288 char buf[30];
13289 rtx tempLR = (fromprolog
13290 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13291 : gen_reg_rtx (Pmode));
13292 rtx temp0 = (fromprolog
13293 ? gen_rtx_REG (Pmode, 0)
13294 : gen_reg_rtx (Pmode));
20b71b17 13295
20b71b17
AM
13296 if (fromprolog)
13297 {
ccbca5e4 13298 rtx symF, symL;
38c1f2d7 13299
20b71b17
AM
13300 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13301 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 13302
20b71b17
AM
13303 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13304 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13305
13306 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13307 symF)));
13308 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13309 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13310 symL,
13311 symF)));
9ebbca7d
GK
13312 }
13313 else
20b71b17
AM
13314 {
13315 rtx tocsym;
20b71b17
AM
13316
13317 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
ccbca5e4 13318 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
027fbf43
JJ
13319 emit_move_insn (dest, tempLR);
13320 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 13321 }
027fbf43
JJ
13322 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13323 if (fromprolog)
13324 rs6000_maybe_dead (insn);
9ebbca7d 13325 }
20b71b17
AM
13326 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13327 {
13328 /* This is for AIX code running in non-PIC ELF32. */
13329 char buf[30];
13330 rtx realsym;
13331 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13332 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13333
027fbf43
JJ
13334 insn = emit_insn (gen_elf_high (dest, realsym));
13335 if (fromprolog)
13336 rs6000_maybe_dead (insn);
13337 insn = emit_insn (gen_elf_low (dest, dest, realsym));
13338 if (fromprolog)
13339 rs6000_maybe_dead (insn);
20b71b17 13340 }
37409796 13341 else
9ebbca7d 13342 {
37409796 13343 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 13344
9ebbca7d 13345 if (TARGET_32BIT)
027fbf43 13346 insn = emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 13347 else
027fbf43
JJ
13348 insn = emit_insn (gen_load_toc_aix_di (dest));
13349 if (fromprolog)
13350 rs6000_maybe_dead (insn);
9ebbca7d
GK
13351 }
13352}
13353
d1d0c603
JJ
13354/* Emit instructions to restore the link register after determining where
13355 its value has been stored. */
13356
13357void
13358rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13359{
13360 rs6000_stack_t *info = rs6000_stack_info ();
13361 rtx operands[2];
13362
13363 operands[0] = source;
13364 operands[1] = scratch;
13365
13366 if (info->lr_save_p)
13367 {
13368 rtx frame_rtx = stack_pointer_rtx;
13369 HOST_WIDE_INT sp_offset = 0;
13370 rtx tmp;
13371
13372 if (frame_pointer_needed
13373 || current_function_calls_alloca
13374 || info->total_size > 32767)
13375 {
8308679f
DE
13376 tmp = gen_rtx_MEM (Pmode, frame_rtx);
13377 MEM_NOTRAP_P (tmp) = 1;
13378 set_mem_alias_set (tmp, rs6000_sr_alias_set);
13379 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
13380 frame_rtx = operands[1];
13381 }
13382 else if (info->push_p)
13383 sp_offset = info->total_size;
13384
13385 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13386 tmp = gen_rtx_MEM (Pmode, tmp);
8308679f
DE
13387 MEM_NOTRAP_P (tmp) = 1;
13388 set_mem_alias_set (tmp, rs6000_sr_alias_set);
d1d0c603
JJ
13389 emit_move_insn (tmp, operands[0]);
13390 }
13391 else
13392 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13393}
13394
f103e34d
GK
13395static GTY(()) int set = -1;
13396
f676971a 13397int
863d938c 13398get_TOC_alias_set (void)
9ebbca7d 13399{
f103e34d
GK
13400 if (set == -1)
13401 set = new_alias_set ();
13402 return set;
f676971a 13403}
9ebbca7d 13404
c1207243 13405/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
13406 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13407 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 13408#if TARGET_ELF
3c9eb5f4 13409static int
f676971a 13410uses_TOC (void)
9ebbca7d 13411{
c4501e62 13412 rtx insn;
38c1f2d7 13413
c4501e62
JJ
13414 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13415 if (INSN_P (insn))
13416 {
13417 rtx pat = PATTERN (insn);
13418 int i;
9ebbca7d 13419
f676971a 13420 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
13421 for (i = 0; i < XVECLEN (pat, 0); i++)
13422 {
13423 rtx sub = XVECEXP (pat, 0, i);
13424 if (GET_CODE (sub) == USE)
13425 {
13426 sub = XEXP (sub, 0);
13427 if (GET_CODE (sub) == UNSPEC
13428 && XINT (sub, 1) == UNSPEC_TOC)
13429 return 1;
13430 }
13431 }
13432 }
13433 return 0;
9ebbca7d 13434}
c954844a 13435#endif
38c1f2d7 13436
9ebbca7d 13437rtx
f676971a 13438create_TOC_reference (rtx symbol)
9ebbca7d 13439{
f676971a 13440 return gen_rtx_PLUS (Pmode,
a8a05998 13441 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
13442 gen_rtx_CONST (Pmode,
13443 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 13444 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 13445}
38c1f2d7 13446
fc4767bb
JJ
13447/* If _Unwind_* has been called from within the same module,
13448 toc register is not guaranteed to be saved to 40(1) on function
13449 entry. Save it there in that case. */
c7ca610e 13450
9ebbca7d 13451void
863d938c 13452rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
13453{
13454 rtx mem;
13455 rtx stack_top = gen_reg_rtx (Pmode);
13456 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
13457 rtx opcode = gen_reg_rtx (SImode);
13458 rtx tocompare = gen_reg_rtx (SImode);
13459 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 13460
8308679f 13461 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
13462 emit_move_insn (stack_top, mem);
13463
8308679f
DE
13464 mem = gen_frame_mem (Pmode,
13465 gen_rtx_PLUS (Pmode, stack_top,
13466 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 13467 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
13468 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13469 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 13470 : 0xE8410028, SImode));
9ebbca7d 13471
fc4767bb 13472 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 13473 SImode, NULL_RTX, NULL_RTX,
fc4767bb 13474 no_toc_save_needed);
9ebbca7d 13475
8308679f
DE
13476 mem = gen_frame_mem (Pmode,
13477 gen_rtx_PLUS (Pmode, stack_top,
13478 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
13479 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13480 emit_label (no_toc_save_needed);
9ebbca7d 13481}
38c1f2d7 13482\f
ba4828e0
RK
13483/* This ties together stack memory (MEM with an alias set of
13484 rs6000_sr_alias_set) and the change to the stack pointer. */
13485
9ebbca7d 13486static void
863d938c 13487rs6000_emit_stack_tie (void)
9ebbca7d 13488{
ba4828e0
RK
13489 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13490
13491 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
13492 emit_insn (gen_stack_tie (mem));
13493}
38c1f2d7 13494
9ebbca7d
GK
13495/* Emit the correct code for allocating stack space, as insns.
13496 If COPY_R12, make sure a copy of the old frame is left in r12.
13497 The generated code may use hard register 0 as a temporary. */
13498
13499static void
a2369ed3 13500rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 13501{
9ebbca7d
GK
13502 rtx insn;
13503 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13504 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
13505 rtx todec = gen_int_mode (-size, Pmode);
13506
13507 if (INTVAL (todec) != -size)
13508 {
d4ee4d25 13509 warning (0, "stack frame too large");
61168ff1
RS
13510 emit_insn (gen_trap ());
13511 return;
13512 }
a157febd
GK
13513
13514 if (current_function_limit_stack)
13515 {
13516 if (REG_P (stack_limit_rtx)
f676971a 13517 && REGNO (stack_limit_rtx) > 1
a157febd
GK
13518 && REGNO (stack_limit_rtx) <= 31)
13519 {
5b71a4e7 13520 emit_insn (TARGET_32BIT
9ebbca7d
GK
13521 ? gen_addsi3 (tmp_reg,
13522 stack_limit_rtx,
13523 GEN_INT (size))
13524 : gen_adddi3 (tmp_reg,
13525 stack_limit_rtx,
13526 GEN_INT (size)));
5b71a4e7 13527
9ebbca7d
GK
13528 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13529 const0_rtx));
a157febd
GK
13530 }
13531 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 13532 && TARGET_32BIT
f607bc57 13533 && DEFAULT_ABI == ABI_V4)
a157febd 13534 {
9ebbca7d 13535 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
13536 gen_rtx_PLUS (Pmode,
13537 stack_limit_rtx,
9ebbca7d 13538 GEN_INT (size)));
5b71a4e7 13539
9ebbca7d
GK
13540 emit_insn (gen_elf_high (tmp_reg, toload));
13541 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13542 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13543 const0_rtx));
a157febd
GK
13544 }
13545 else
d4ee4d25 13546 warning (0, "stack limit expression is not supported");
a157febd
GK
13547 }
13548
9ebbca7d
GK
13549 if (copy_r12 || ! TARGET_UPDATE)
13550 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13551
38c1f2d7
MM
13552 if (TARGET_UPDATE)
13553 {
9ebbca7d 13554 if (size > 32767)
38c1f2d7 13555 {
9ebbca7d 13556 /* Need a note here so that try_split doesn't get confused. */
9390387d 13557 if (get_last_insn () == NULL_RTX)
2e040219 13558 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
13559 insn = emit_move_insn (tmp_reg, todec);
13560 try_split (PATTERN (insn), insn, 0);
13561 todec = tmp_reg;
38c1f2d7 13562 }
5b71a4e7
DE
13563
13564 insn = emit_insn (TARGET_32BIT
13565 ? gen_movsi_update (stack_reg, stack_reg,
13566 todec, stack_reg)
c4ad648e 13567 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 13568 todec, stack_reg));
38c1f2d7
MM
13569 }
13570 else
13571 {
5b71a4e7
DE
13572 insn = emit_insn (TARGET_32BIT
13573 ? gen_addsi3 (stack_reg, stack_reg, todec)
13574 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
13575 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13576 gen_rtx_REG (Pmode, 12));
13577 }
f676971a 13578
9ebbca7d 13579 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 13580 REG_NOTES (insn) =
9ebbca7d 13581 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 13582 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
13583 gen_rtx_PLUS (Pmode, stack_reg,
13584 GEN_INT (-size))),
13585 REG_NOTES (insn));
13586}
13587
a4f6c312
SS
13588/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13589 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13590 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13591 deduce these equivalences by itself so it wasn't necessary to hold
13592 its hand so much. */
9ebbca7d
GK
13593
13594static void
f676971a 13595rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 13596 rtx reg2, rtx rreg)
9ebbca7d
GK
13597{
13598 rtx real, temp;
13599
e56c4463
JL
13600 /* copy_rtx will not make unique copies of registers, so we need to
13601 ensure we don't have unwanted sharing here. */
13602 if (reg == reg2)
13603 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13604
13605 if (reg == rreg)
13606 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13607
9ebbca7d
GK
13608 real = copy_rtx (PATTERN (insn));
13609
89e7058f
AH
13610 if (reg2 != NULL_RTX)
13611 real = replace_rtx (real, reg2, rreg);
f676971a
EC
13612
13613 real = replace_rtx (real, reg,
9ebbca7d
GK
13614 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
13615 STACK_POINTER_REGNUM),
13616 GEN_INT (val)));
f676971a 13617
9ebbca7d
GK
13618 /* We expect that 'real' is either a SET or a PARALLEL containing
13619 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13620 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13621
13622 if (GET_CODE (real) == SET)
13623 {
13624 rtx set = real;
f676971a 13625
9ebbca7d
GK
13626 temp = simplify_rtx (SET_SRC (set));
13627 if (temp)
13628 SET_SRC (set) = temp;
13629 temp = simplify_rtx (SET_DEST (set));
13630 if (temp)
13631 SET_DEST (set) = temp;
13632 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 13633 {
9ebbca7d
GK
13634 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13635 if (temp)
13636 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 13637 }
38c1f2d7 13638 }
37409796 13639 else
9ebbca7d
GK
13640 {
13641 int i;
37409796
NS
13642
13643 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
13644 for (i = 0; i < XVECLEN (real, 0); i++)
13645 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13646 {
13647 rtx set = XVECEXP (real, 0, i);
f676971a 13648
9ebbca7d
GK
13649 temp = simplify_rtx (SET_SRC (set));
13650 if (temp)
13651 SET_SRC (set) = temp;
13652 temp = simplify_rtx (SET_DEST (set));
13653 if (temp)
13654 SET_DEST (set) = temp;
13655 if (GET_CODE (SET_DEST (set)) == MEM)
13656 {
13657 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13658 if (temp)
13659 XEXP (SET_DEST (set), 0) = temp;
13660 }
13661 RTX_FRAME_RELATED_P (set) = 1;
13662 }
13663 }
c19de7aa
AH
13664
13665 if (TARGET_SPE)
13666 real = spe_synthesize_frame_save (real);
13667
9ebbca7d
GK
13668 RTX_FRAME_RELATED_P (insn) = 1;
13669 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13670 real,
13671 REG_NOTES (insn));
38c1f2d7
MM
13672}
13673
c19de7aa
AH
13674/* Given an SPE frame note, return a PARALLEL of SETs with the
13675 original note, plus a synthetic register save. */
13676
13677static rtx
a2369ed3 13678spe_synthesize_frame_save (rtx real)
c19de7aa
AH
13679{
13680 rtx synth, offset, reg, real2;
13681
13682 if (GET_CODE (real) != SET
13683 || GET_MODE (SET_SRC (real)) != V2SImode)
13684 return real;
13685
13686 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13687 frame related note. The parallel contains a set of the register
41f3a930 13688 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
13689 This is so we can differentiate between 64-bit and 32-bit saves.
13690 Words cannot describe this nastiness. */
13691
37409796
NS
13692 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
13693 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
13694 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
13695
13696 /* Transform:
13697 (set (mem (plus (reg x) (const y)))
13698 (reg z))
13699 into:
13700 (set (mem (plus (reg x) (const y+4)))
41f3a930 13701 (reg z+1200))
c19de7aa
AH
13702 */
13703
13704 real2 = copy_rtx (real);
13705 PUT_MODE (SET_DEST (real2), SImode);
13706 reg = SET_SRC (real2);
13707 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
13708 synth = copy_rtx (real2);
13709
13710 if (BYTES_BIG_ENDIAN)
13711 {
13712 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
13713 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
13714 }
13715
13716 reg = SET_SRC (synth);
41f3a930 13717
c19de7aa 13718 synth = replace_rtx (synth, reg,
41f3a930 13719 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
13720
13721 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
13722 synth = replace_rtx (synth, offset,
13723 GEN_INT (INTVAL (offset)
13724 + (BYTES_BIG_ENDIAN ? 0 : 4)));
13725
13726 RTX_FRAME_RELATED_P (synth) = 1;
13727 RTX_FRAME_RELATED_P (real2) = 1;
13728 if (BYTES_BIG_ENDIAN)
13729 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
13730 else
13731 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
13732
13733 return real;
13734}
13735
00b960c7
AH
13736/* Returns an insn that has a vrsave set operation with the
13737 appropriate CLOBBERs. */
13738
13739static rtx
a2369ed3 13740generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
13741{
13742 int nclobs, i;
13743 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 13744 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 13745
a004eb82
AH
13746 clobs[0]
13747 = gen_rtx_SET (VOIDmode,
13748 vrsave,
13749 gen_rtx_UNSPEC_VOLATILE (SImode,
13750 gen_rtvec (2, reg, vrsave),
3aca4bff 13751 UNSPECV_SET_VRSAVE));
00b960c7
AH
13752
13753 nclobs = 1;
13754
9aa86737
AH
13755 /* We need to clobber the registers in the mask so the scheduler
13756 does not move sets to VRSAVE before sets of AltiVec registers.
13757
13758 However, if the function receives nonlocal gotos, reload will set
13759 all call saved registers live. We will end up with:
13760
13761 (set (reg 999) (mem))
13762 (parallel [ (set (reg vrsave) (unspec blah))
13763 (clobber (reg 999))])
13764
13765 The clobber will cause the store into reg 999 to be dead, and
13766 flow will attempt to delete an epilogue insn. In this case, we
13767 need an unspec use/set of the register. */
00b960c7
AH
13768
13769 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 13770 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
13771 {
13772 if (!epiloguep || call_used_regs [i])
13773 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13774 gen_rtx_REG (V4SImode, i));
13775 else
13776 {
13777 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
13778
13779 clobs[nclobs++]
a004eb82
AH
13780 = gen_rtx_SET (VOIDmode,
13781 reg,
13782 gen_rtx_UNSPEC (V4SImode,
13783 gen_rtvec (1, reg), 27));
9aa86737
AH
13784 }
13785 }
00b960c7
AH
13786
13787 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13788
13789 for (i = 0; i < nclobs; ++i)
13790 XVECEXP (insn, 0, i) = clobs[i];
13791
13792 return insn;
13793}
13794
89e7058f
AH
13795/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13796 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13797
13798static void
f676971a 13799emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 13800 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
13801{
13802 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13803 rtx replacea, replaceb;
13804
13805 int_rtx = GEN_INT (offset);
13806
13807 /* Some cases that need register indexed addressing. */
13808 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 13809 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
13810 || (TARGET_SPE_ABI
13811 && SPE_VECTOR_MODE (mode)
13812 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
13813 {
13814 /* Whomever calls us must make sure r11 is available in the
c4ad648e 13815 flow path of instructions in the prologue. */
89e7058f
AH
13816 offset_rtx = gen_rtx_REG (Pmode, 11);
13817 emit_move_insn (offset_rtx, int_rtx);
13818
13819 replacea = offset_rtx;
13820 replaceb = int_rtx;
13821 }
13822 else
13823 {
13824 offset_rtx = int_rtx;
13825 replacea = NULL_RTX;
13826 replaceb = NULL_RTX;
13827 }
13828
13829 reg = gen_rtx_REG (mode, regno);
13830 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13831 mem = gen_rtx_MEM (mode, addr);
13832 set_mem_alias_set (mem, rs6000_sr_alias_set);
13833
13834 insn = emit_move_insn (mem, reg);
13835
13836 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13837}
13838
a3170dc6
AH
13839/* Emit an offset memory reference suitable for a frame store, while
13840 converting to a valid addressing mode. */
13841
13842static rtx
a2369ed3 13843gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
13844{
13845 rtx int_rtx, offset_rtx;
13846
13847 int_rtx = GEN_INT (offset);
13848
4d4cbc0e
AH
13849 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13850 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
13851 {
13852 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13853 emit_move_insn (offset_rtx, int_rtx);
13854 }
13855 else
13856 offset_rtx = int_rtx;
13857
13858 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13859}
13860
6d0a8091
DJ
13861/* Look for user-defined global regs. We should not save and restore these,
13862 and cannot use stmw/lmw if there are any in its range. */
13863
13864static bool
13865no_global_regs_above (int first_greg)
13866{
13867 int i;
13868 for (i = 0; i < 32 - first_greg; i++)
13869 if (global_regs[first_greg + i])
13870 return false;
13871 return true;
13872}
13873
699c914a
MS
13874#ifndef TARGET_FIX_AND_CONTINUE
13875#define TARGET_FIX_AND_CONTINUE 0
13876#endif
13877
9ebbca7d
GK
13878/* Emit function prologue as insns. */
13879
9878760c 13880void
863d938c 13881rs6000_emit_prologue (void)
9878760c 13882{
4697a36c 13883 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 13884 enum machine_mode reg_mode = Pmode;
327e5343 13885 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
13886 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13887 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13888 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 13889 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
13890 rtx insn;
13891 int saving_FPRs_inline;
13892 int using_store_multiple;
13893 HOST_WIDE_INT sp_offset = 0;
f676971a 13894
699c914a
MS
13895 if (TARGET_FIX_AND_CONTINUE)
13896 {
13897 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 13898 address by modifying the first 5 instructions of the function
699c914a
MS
13899 to branch to the overriding function. This is necessary to
13900 permit function pointers that point to the old function to
13901 actually forward to the new function. */
13902 emit_insn (gen_nop ());
13903 emit_insn (gen_nop ());
de2ab0ca 13904 emit_insn (gen_nop ());
699c914a
MS
13905 emit_insn (gen_nop ());
13906 emit_insn (gen_nop ());
13907 }
13908
13909 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13910 {
13911 reg_mode = V2SImode;
13912 reg_size = 8;
13913 }
a3170dc6 13914
9ebbca7d 13915 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
13916 && (!TARGET_SPE_ABI
13917 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
13918 && info->first_gp_reg_save < 31
13919 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 13920 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 13921 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 13922 || current_function_calls_eh_return
8c29550d 13923 || cfun->machine->ra_need_lr);
9ebbca7d
GK
13924
13925 /* For V.4, update stack before we do any saving and set back pointer. */
fc4767bb 13926 if (info->push_p
acd0b319
AM
13927 && (DEFAULT_ABI == ABI_V4
13928 || current_function_calls_eh_return))
9ebbca7d
GK
13929 {
13930 if (info->total_size < 32767)
13931 sp_offset = info->total_size;
13932 else
13933 frame_reg_rtx = frame_ptr_rtx;
f676971a 13934 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
13935 (frame_reg_rtx != sp_reg_rtx
13936 && (info->cr_save_p
13937 || info->lr_save_p
13938 || info->first_fp_reg_save < 64
13939 || info->first_gp_reg_save < 32
13940 )));
13941 if (frame_reg_rtx != sp_reg_rtx)
13942 rs6000_emit_stack_tie ();
13943 }
13944
d62294f5 13945 /* Handle world saves specially here. */
f57fe068 13946 if (WORLD_SAVE_P (info))
d62294f5
FJ
13947 {
13948 int i, j, sz;
13949 rtx treg;
13950 rtvec p;
13951
13952 /* save_world expects lr in r0. */
13953 if (info->lr_save_p)
c4ad648e
AM
13954 {
13955 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
13956 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
13957 RTX_FRAME_RELATED_P (insn) = 1;
13958 }
d62294f5
FJ
13959
13960 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 13961 assumptions about the offsets of various bits of the stack
992d08b1 13962 frame. */
37409796
NS
13963 gcc_assert (info->gp_save_offset == -220
13964 && info->fp_save_offset == -144
13965 && info->lr_save_offset == 8
13966 && info->cr_save_offset == 4
13967 && info->push_p
13968 && info->lr_save_p
13969 && (!current_function_calls_eh_return
13970 || info->ehrd_offset == -432)
13971 && info->vrsave_save_offset == -224
13972 && info->altivec_save_offset == (-224 -16 -192));
d62294f5
FJ
13973
13974 treg = gen_rtx_REG (SImode, 11);
13975 emit_move_insn (treg, GEN_INT (-info->total_size));
13976
13977 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 13978 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
13979
13980 /* Preserve CR2 for save_world prologues */
13981 sz = 6;
13982 sz += 32 - info->first_gp_reg_save;
13983 sz += 64 - info->first_fp_reg_save;
13984 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
13985 p = rtvec_alloc (sz);
13986 j = 0;
13987 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
c4ad648e
AM
13988 gen_rtx_REG (Pmode,
13989 LINK_REGISTER_REGNUM));
d62294f5 13990 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
13991 gen_rtx_SYMBOL_REF (Pmode,
13992 "*save_world"));
d62294f5 13993 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
13994 properly. */
13995 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13996 {
13997 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
13998 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13999 GEN_INT (info->fp_save_offset
14000 + sp_offset + 8 * i));
14001 rtx mem = gen_rtx_MEM (DFmode, addr);
14002 set_mem_alias_set (mem, rs6000_sr_alias_set);
14003
14004 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14005 }
d62294f5 14006 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14007 {
14008 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14009 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14010 GEN_INT (info->altivec_save_offset
14011 + sp_offset + 16 * i));
14012 rtx mem = gen_rtx_MEM (V4SImode, addr);
14013 set_mem_alias_set (mem, rs6000_sr_alias_set);
14014
14015 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14016 }
d62294f5 14017 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14018 {
14019 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14020 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14021 GEN_INT (info->gp_save_offset
14022 + sp_offset + reg_size * i));
14023 rtx mem = gen_rtx_MEM (reg_mode, addr);
14024 set_mem_alias_set (mem, rs6000_sr_alias_set);
14025
14026 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14027 }
14028
14029 {
14030 /* CR register traditionally saved as CR2. */
14031 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14032 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14033 GEN_INT (info->cr_save_offset
14034 + sp_offset));
14035 rtx mem = gen_rtx_MEM (reg_mode, addr);
14036 set_mem_alias_set (mem, rs6000_sr_alias_set);
14037
14038 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14039 }
d62294f5
FJ
14040 /* Prevent any attempt to delete the setting of r0 and treg! */
14041 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
14042 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
14043 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
14044
14045 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14046 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
c4ad648e 14047 NULL_RTX, NULL_RTX);
d62294f5
FJ
14048
14049 if (current_function_calls_eh_return)
c4ad648e
AM
14050 {
14051 unsigned int i;
14052 for (i = 0; ; ++i)
14053 {
14054 unsigned int regno = EH_RETURN_DATA_REGNO (i);
14055 if (regno == INVALID_REGNUM)
14056 break;
14057 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14058 info->ehrd_offset + sp_offset
14059 + reg_size * (int) i,
14060 info->total_size);
14061 }
14062 }
d62294f5
FJ
14063 }
14064
9aa86737 14065 /* Save AltiVec registers if needed. */
f57fe068 14066 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9aa86737
AH
14067 {
14068 int i;
14069
14070 /* There should be a non inline version of this, for when we
14071 are saving lots of vector registers. */
14072 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14073 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14074 {
14075 rtx areg, savereg, mem;
14076 int offset;
14077
14078 offset = info->altivec_save_offset + sp_offset
14079 + 16 * (i - info->first_altivec_reg_save);
14080
14081 savereg = gen_rtx_REG (V4SImode, i);
14082
14083 areg = gen_rtx_REG (Pmode, 0);
14084 emit_move_insn (areg, GEN_INT (offset));
14085
14086 /* AltiVec addressing mode is [reg+reg]. */
14087 mem = gen_rtx_MEM (V4SImode,
14088 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
f676971a 14089
9aa86737
AH
14090 set_mem_alias_set (mem, rs6000_sr_alias_set);
14091
14092 insn = emit_move_insn (mem, savereg);
14093
5c242421
SB
14094 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14095 areg, GEN_INT (offset));
9aa86737
AH
14096 }
14097 }
14098
14099 /* VRSAVE is a bit vector representing which AltiVec registers
14100 are used. The OS uses this to determine which vector
14101 registers to save on a context switch. We need to save
14102 VRSAVE on the stack frame, add whatever AltiVec registers we
14103 used in this function, and do the corresponding magic in the
14104 epilogue. */
14105
4d774ff8 14106 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
bcb604b6 14107 && info->vrsave_mask != 0)
9aa86737 14108 {
a004eb82 14109 rtx reg, mem, vrsave;
9aa86737
AH
14110 int offset;
14111
eab97e44
AM
14112 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
14113 as frame_reg_rtx and r11 as the static chain pointer for
14114 nested functions. */
14115 reg = gen_rtx_REG (SImode, 0);
a004eb82 14116 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
b188f760
AH
14117 if (TARGET_MACHO)
14118 emit_insn (gen_get_vrsave_internal (reg));
14119 else
14120 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
9aa86737 14121
bcb604b6
FJ
14122 if (!WORLD_SAVE_P (info))
14123 {
14124 /* Save VRSAVE. */
14125 offset = info->vrsave_save_offset + sp_offset;
14126 mem
14127 = gen_rtx_MEM (SImode,
14128 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
14129 set_mem_alias_set (mem, rs6000_sr_alias_set);
14130 insn = emit_move_insn (mem, reg);
14131 }
9aa86737
AH
14132
14133 /* Include the registers in the mask. */
14134 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14135
14136 insn = emit_insn (generate_set_vrsave (reg, info, 0));
14137 }
14138
9ebbca7d 14139 /* If we use the link register, get it into r0. */
f57fe068 14140 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8
GK
14141 {
14142 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14143 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14144 RTX_FRAME_RELATED_P (insn) = 1;
14145 }
9ebbca7d
GK
14146
14147 /* If we need to save CR, put it into r12. */
f57fe068 14148 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 14149 {
f8a57be8 14150 rtx set;
f676971a 14151
9ebbca7d 14152 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
14153 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14154 RTX_FRAME_RELATED_P (insn) = 1;
14155 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14156 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14157 But that's OK. All we have to do is specify that _one_ condition
14158 code register is saved in this stack slot. The thrower's epilogue
14159 will then restore all the call-saved registers.
14160 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14161 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14162 gen_rtx_REG (SImode, CR2_REGNO));
14163 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14164 set,
14165 REG_NOTES (insn));
9ebbca7d
GK
14166 }
14167
a4f6c312
SS
14168 /* Do any required saving of fpr's. If only one or two to save, do
14169 it ourselves. Otherwise, call function. */
f57fe068 14170 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
14171 {
14172 int i;
14173 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 14174 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d 14175 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
14176 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14177 info->first_fp_reg_save + i,
14178 info->fp_save_offset + sp_offset + 8 * i,
14179 info->total_size);
9ebbca7d 14180 }
f57fe068 14181 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
14182 {
14183 int i;
14184 char rname[30];
520a57c8 14185 const char *alloc_rname;
9ebbca7d
GK
14186 rtvec p;
14187 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
14188
14189 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14190 gen_rtx_REG (Pmode,
9ebbca7d
GK
14191 LINK_REGISTER_REGNUM));
14192 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14193 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 14194 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
14195 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14196 gen_rtx_SYMBOL_REF (Pmode,
14197 alloc_rname));
14198 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14199 {
14200 rtx addr, reg, mem;
14201 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14202 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 14203 GEN_INT (info->fp_save_offset
9ebbca7d
GK
14204 + sp_offset + 8*i));
14205 mem = gen_rtx_MEM (DFmode, addr);
ba4828e0 14206 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
14207
14208 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14209 }
14210 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14211 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
14212 NULL_RTX, NULL_RTX);
14213 }
b6c9286a 14214
9ebbca7d
GK
14215 /* Save GPRs. This is done as a PARALLEL if we are using
14216 the store-multiple instructions. */
f57fe068 14217 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 14218 {
308c142a 14219 rtvec p;
9ebbca7d
GK
14220 int i;
14221 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
14222 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14223 {
14224 rtx addr, reg, mem;
14225 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
14226 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14227 GEN_INT (info->gp_save_offset
14228 + sp_offset
9ebbca7d
GK
14229 + reg_size * i));
14230 mem = gen_rtx_MEM (reg_mode, addr);
ba4828e0 14231 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
14232
14233 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14234 }
14235 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14236 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 14237 NULL_RTX, NULL_RTX);
b6c9286a 14238 }
f57fe068 14239 else if (!WORLD_SAVE_P (info))
b6c9286a 14240 {
9ebbca7d
GK
14241 int i;
14242 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
14243 if ((regs_ever_live[info->first_gp_reg_save + i]
14244 && (!call_used_regs[info->first_gp_reg_save + i]
14245 || (i + info->first_gp_reg_save
b4db40bf
JJ
14246 == RS6000_PIC_OFFSET_TABLE_REGNUM
14247 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 14248 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14249 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 14250 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
a3170dc6
AH
14251 {
14252 rtx addr, reg, mem;
14253 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14254
c19de7aa 14255 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14256 {
14257 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14258 rtx b;
14259
14260 if (!SPE_CONST_OFFSET_OK (offset))
14261 {
14262 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14263 emit_move_insn (b, GEN_INT (offset));
14264 }
14265 else
14266 b = GEN_INT (offset);
14267
14268 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14269 mem = gen_rtx_MEM (V2SImode, addr);
14270 set_mem_alias_set (mem, rs6000_sr_alias_set);
14271 insn = emit_move_insn (mem, reg);
14272
14273 if (GET_CODE (b) == CONST_INT)
14274 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14275 NULL_RTX, NULL_RTX);
14276 else
14277 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14278 b, GEN_INT (offset));
14279 }
14280 else
14281 {
f676971a
EC
14282 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14283 GEN_INT (info->gp_save_offset
14284 + sp_offset
a3170dc6
AH
14285 + reg_size * i));
14286 mem = gen_rtx_MEM (reg_mode, addr);
14287 set_mem_alias_set (mem, rs6000_sr_alias_set);
14288
14289 insn = emit_move_insn (mem, reg);
f676971a 14290 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
a3170dc6
AH
14291 NULL_RTX, NULL_RTX);
14292 }
14293 }
9ebbca7d
GK
14294 }
14295
83720594
RH
14296 /* ??? There's no need to emit actual instructions here, but it's the
14297 easiest way to get the frame unwind information emitted. */
f57fe068 14298 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
83720594 14299 {
78e1b90d
DE
14300 unsigned int i, regno;
14301
fc4767bb
JJ
14302 /* In AIX ABI we need to pretend we save r2 here. */
14303 if (TARGET_AIX)
14304 {
14305 rtx addr, reg, mem;
14306
14307 reg = gen_rtx_REG (reg_mode, 2);
14308 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14309 GEN_INT (sp_offset + 5 * reg_size));
14310 mem = gen_rtx_MEM (reg_mode, addr);
14311 set_mem_alias_set (mem, rs6000_sr_alias_set);
14312
14313 insn = emit_move_insn (mem, reg);
f676971a 14314 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
14315 NULL_RTX, NULL_RTX);
14316 PATTERN (insn) = gen_blockage ();
14317 }
14318
83720594
RH
14319 for (i = 0; ; ++i)
14320 {
83720594
RH
14321 regno = EH_RETURN_DATA_REGNO (i);
14322 if (regno == INVALID_REGNUM)
14323 break;
14324
89e7058f
AH
14325 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14326 info->ehrd_offset + sp_offset
14327 + reg_size * (int) i,
14328 info->total_size);
83720594
RH
14329 }
14330 }
14331
9ebbca7d 14332 /* Save lr if we used it. */
f57fe068 14333 if (!WORLD_SAVE_P (info) && info->lr_save_p)
9ebbca7d
GK
14334 {
14335 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14336 GEN_INT (info->lr_save_offset + sp_offset));
14337 rtx reg = gen_rtx_REG (Pmode, 0);
14338 rtx mem = gen_rtx_MEM (Pmode, addr);
14339 /* This should not be of rs6000_sr_alias_set, because of
14340 __builtin_return_address. */
f676971a 14341
9ebbca7d 14342 insn = emit_move_insn (mem, reg);
f676971a 14343 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 14344 NULL_RTX, NULL_RTX);
9ebbca7d
GK
14345 }
14346
14347 /* Save CR if we use any that must be preserved. */
f57fe068 14348 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
14349 {
14350 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14351 GEN_INT (info->cr_save_offset + sp_offset));
14352 rtx mem = gen_rtx_MEM (SImode, addr);
f8a57be8
GK
14353 /* See the large comment above about why CR2_REGNO is used. */
14354 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0
RK
14355
14356 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
14357
14358 /* If r12 was used to hold the original sp, copy cr into r0 now
14359 that it's free. */
14360 if (REGNO (frame_reg_rtx) == 12)
14361 {
f8a57be8
GK
14362 rtx set;
14363
9ebbca7d 14364 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
14365 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14366 RTX_FRAME_RELATED_P (insn) = 1;
14367 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14368 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14369 set,
14370 REG_NOTES (insn));
f676971a 14371
9ebbca7d
GK
14372 }
14373 insn = emit_move_insn (mem, cr_save_rtx);
14374
f676971a 14375 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 14376 NULL_RTX, NULL_RTX);
9ebbca7d
GK
14377 }
14378
f676971a 14379 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 14380 for which it was done previously. */
f57fe068 14381 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 14382 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
9ebbca7d
GK
14383 rs6000_emit_allocate_stack (info->total_size, FALSE);
14384
14385 /* Set frame pointer, if needed. */
14386 if (frame_pointer_needed)
14387 {
7d5175e1 14388 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
14389 sp_reg_rtx);
14390 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 14391 }
9878760c 14392
1db02437 14393 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 14394 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
14395 || (DEFAULT_ABI == ABI_V4
14396 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
1db02437 14397 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
c4ad648e
AM
14398 {
14399 /* If emit_load_toc_table will use the link register, we need to save
14400 it. We use R12 for this purpose because emit_load_toc_table
14401 can use register 0. This allows us to use a plain 'blr' to return
14402 from the procedure more often. */
14403 int save_LR_around_toc_setup = (TARGET_ELF
14404 && DEFAULT_ABI != ABI_AIX
14405 && flag_pic
14406 && ! info->lr_save_p
14407 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14408 if (save_LR_around_toc_setup)
14409 {
14410 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
f8a57be8 14411
c4ad648e
AM
14412 insn = emit_move_insn (frame_ptr_rtx, lr);
14413 rs6000_maybe_dead (insn);
14414 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 14415
c4ad648e 14416 rs6000_emit_load_toc_table (TRUE);
f8a57be8 14417
c4ad648e
AM
14418 insn = emit_move_insn (lr, frame_ptr_rtx);
14419 rs6000_maybe_dead (insn);
14420 RTX_FRAME_RELATED_P (insn) = 1;
14421 }
14422 else
14423 rs6000_emit_load_toc_table (TRUE);
14424 }
ee890fe2 14425
fcce224d 14426#if TARGET_MACHO
ee890fe2
SS
14427 if (DEFAULT_ABI == ABI_DARWIN
14428 && flag_pic && current_function_uses_pic_offset_table)
14429 {
f8a57be8 14430 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11abc112 14431 rtx src = machopic_function_base_sym ();
ee890fe2 14432
6d0a8091
DJ
14433 /* Save and restore LR locally around this call (in R0). */
14434 if (!info->lr_save_p)
14435 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14436
f8a57be8 14437 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
ee890fe2 14438
f676971a 14439 insn = emit_move_insn (gen_rtx_REG (Pmode,
f8a57be8
GK
14440 RS6000_PIC_OFFSET_TABLE_REGNUM),
14441 lr);
14442 rs6000_maybe_dead (insn);
6d0a8091
DJ
14443
14444 if (!info->lr_save_p)
14445 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
ee890fe2 14446 }
fcce224d 14447#endif
9ebbca7d
GK
14448}
14449
9ebbca7d 14450/* Write function prologue. */
a4f6c312 14451
08c148a8 14452static void
f676971a 14453rs6000_output_function_prologue (FILE *file,
a2369ed3 14454 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
14455{
14456 rs6000_stack_t *info = rs6000_stack_info ();
14457
4697a36c
MM
14458 if (TARGET_DEBUG_STACK)
14459 debug_stack_info (info);
9878760c 14460
a4f6c312
SS
14461 /* Write .extern for any function we will call to save and restore
14462 fp values. */
14463 if (info->first_fp_reg_save < 64
14464 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 14465 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 14466 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
14467 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14468 RESTORE_FP_SUFFIX);
9878760c 14469
c764f757
RK
14470 /* Write .extern for AIX common mode routines, if needed. */
14471 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14472 {
f6709c70
JW
14473 fputs ("\t.extern __mulh\n", file);
14474 fputs ("\t.extern __mull\n", file);
14475 fputs ("\t.extern __divss\n", file);
14476 fputs ("\t.extern __divus\n", file);
14477 fputs ("\t.extern __quoss\n", file);
14478 fputs ("\t.extern __quous\n", file);
c764f757
RK
14479 common_mode_defined = 1;
14480 }
9878760c 14481
9ebbca7d 14482 if (! HAVE_prologue)
979721f8 14483 {
9ebbca7d 14484 start_sequence ();
9dda4cc8 14485
a4f6c312
SS
14486 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14487 the "toplevel" insn chain. */
2e040219 14488 emit_note (NOTE_INSN_DELETED);
9ebbca7d 14489 rs6000_emit_prologue ();
2e040219 14490 emit_note (NOTE_INSN_DELETED);
178c3eff 14491
a3c9585f 14492 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
14493 {
14494 rtx insn;
14495 unsigned addr = 0;
14496 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14497 {
14498 INSN_ADDRESSES_NEW (insn, addr);
14499 addr += 4;
14500 }
14501 }
9dda4cc8 14502
9ebbca7d 14503 if (TARGET_DEBUG_STACK)
a4f6c312 14504 debug_rtx_list (get_insns (), 100);
c9d691e9 14505 final (get_insns (), file, FALSE);
9ebbca7d 14506 end_sequence ();
979721f8
MM
14507 }
14508
9ebbca7d
GK
14509 rs6000_pic_labelno++;
14510}
f676971a 14511
9ebbca7d 14512/* Emit function epilogue as insns.
9878760c 14513
9ebbca7d
GK
14514 At present, dwarf2out_frame_debug_expr doesn't understand
14515 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14516 anywhere in the epilogue. Most of the insns below would in any case
14517 need special notes to explain where r11 is in relation to the stack. */
9878760c 14518
9ebbca7d 14519void
a2369ed3 14520rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
14521{
14522 rs6000_stack_t *info;
14523 int restoring_FPRs_inline;
14524 int using_load_multiple;
14525 int using_mfcr_multiple;
14526 int use_backchain_to_restore_sp;
14527 int sp_offset = 0;
14528 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14529 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 14530 enum machine_mode reg_mode = Pmode;
327e5343 14531 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
14532 int i;
14533
c19de7aa
AH
14534 info = rs6000_stack_info ();
14535
14536 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14537 {
14538 reg_mode = V2SImode;
14539 reg_size = 8;
14540 }
14541
9ebbca7d 14542 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
14543 && (!TARGET_SPE_ABI
14544 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
14545 && info->first_gp_reg_save < 31
14546 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 14547 restoring_FPRs_inline = (sibcall
83720594 14548 || current_function_calls_eh_return
9ebbca7d
GK
14549 || info->first_fp_reg_save == 64
14550 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 14551 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
14552 || current_function_calls_alloca
14553 || info->total_size > 32767);
14554 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14555 || rs6000_cpu == PROCESSOR_PPC603
14556 || rs6000_cpu == PROCESSOR_PPC750
14557 || optimize_size);
14558
f57fe068 14559 if (WORLD_SAVE_P (info))
d62294f5
FJ
14560 {
14561 int i, j;
14562 char rname[30];
14563 const char *alloc_rname;
14564 rtvec p;
14565
14566 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
14567 stack slot (which is not likely to be our caller.)
14568 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14569 rest_world is similar, except any R10 parameter is ignored.
14570 The exception-handling stuff that was here in 2.95 is no
14571 longer necessary. */
d62294f5
FJ
14572
14573 p = rtvec_alloc (9
14574 + 1
f676971a 14575 + 32 - info->first_gp_reg_save
c4ad648e
AM
14576 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14577 + 63 + 1 - info->first_fp_reg_save);
d62294f5 14578
c4ad648e
AM
14579 strcpy (rname, ((current_function_calls_eh_return) ?
14580 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
14581 alloc_rname = ggc_strdup (rname);
14582
14583 j = 0;
14584 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14585 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14586 gen_rtx_REG (Pmode,
14587 LINK_REGISTER_REGNUM));
d62294f5 14588 RTVEC_ELT (p, j++)
c4ad648e 14589 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 14590 /* The instruction pattern requires a clobber here;
c4ad648e 14591 it is shared with the restVEC helper. */
d62294f5 14592 RTVEC_ELT (p, j++)
c4ad648e 14593 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
14594
14595 {
c4ad648e
AM
14596 /* CR register traditionally saved as CR2. */
14597 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14598 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14599 GEN_INT (info->cr_save_offset));
14600 rtx mem = gen_rtx_MEM (reg_mode, addr);
14601 set_mem_alias_set (mem, rs6000_sr_alias_set);
14602
14603 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
14604 }
14605
14606 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14607 {
14608 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14609 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14610 GEN_INT (info->gp_save_offset
14611 + reg_size * i));
14612 rtx mem = gen_rtx_MEM (reg_mode, addr);
14613 set_mem_alias_set (mem, rs6000_sr_alias_set);
14614
14615 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14616 }
d62294f5 14617 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14618 {
14619 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14620 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14621 GEN_INT (info->altivec_save_offset
14622 + 16 * i));
14623 rtx mem = gen_rtx_MEM (V4SImode, addr);
14624 set_mem_alias_set (mem, rs6000_sr_alias_set);
14625
14626 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14627 }
d62294f5 14628 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
14629 {
14630 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14631 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14632 GEN_INT (info->fp_save_offset
14633 + 8 * i));
14634 rtx mem = gen_rtx_MEM (DFmode, addr);
14635 set_mem_alias_set (mem, rs6000_sr_alias_set);
14636
14637 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14638 }
d62294f5 14639 RTVEC_ELT (p, j++)
c4ad648e 14640 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 14641 RTVEC_ELT (p, j++)
c4ad648e 14642 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 14643 RTVEC_ELT (p, j++)
c4ad648e 14644 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 14645 RTVEC_ELT (p, j++)
c4ad648e 14646 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 14647 RTVEC_ELT (p, j++)
c4ad648e 14648 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
14649 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14650
14651 return;
14652 }
14653
9ebbca7d
GK
14654 /* If we have a frame pointer, a call to alloca, or a large stack
14655 frame, restore the old stack pointer using the backchain. Otherwise,
14656 we know what size to update it with. */
14657 if (use_backchain_to_restore_sp)
bacbde18 14658 {
9ebbca7d
GK
14659 /* Under V.4, don't reset the stack pointer until after we're done
14660 loading the saved registers. */
f607bc57 14661 if (DEFAULT_ABI == ABI_V4)
9ebbca7d 14662 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
4697a36c 14663
9ebbca7d
GK
14664 emit_move_insn (frame_reg_rtx,
14665 gen_rtx_MEM (Pmode, sp_reg_rtx));
f676971a 14666
bacbde18 14667 }
9ebbca7d 14668 else if (info->push_p)
85638c0d 14669 {
fc4767bb
JJ
14670 if (DEFAULT_ABI == ABI_V4
14671 || current_function_calls_eh_return)
9ebbca7d
GK
14672 sp_offset = info->total_size;
14673 else
14674 {
14675 emit_insn (TARGET_32BIT
14676 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14677 GEN_INT (info->total_size))
14678 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14679 GEN_INT (info->total_size)));
14680 }
85638c0d 14681 }
f676971a 14682
9aa86737
AH
14683 /* Restore AltiVec registers if needed. */
14684 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14685 {
14686 int i;
14687
14688 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14689 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14690 {
14691 rtx addr, areg, mem;
14692
14693 areg = gen_rtx_REG (Pmode, 0);
14694 emit_move_insn
14695 (areg, GEN_INT (info->altivec_save_offset
14696 + sp_offset
14697 + 16 * (i - info->first_altivec_reg_save)));
14698
14699 /* AltiVec addressing mode is [reg+reg]. */
14700 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
14701 mem = gen_rtx_MEM (V4SImode, addr);
14702 set_mem_alias_set (mem, rs6000_sr_alias_set);
14703
14704 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
14705 }
14706 }
14707
14708 /* Restore VRSAVE if needed. */
44688022 14709 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 14710 && info->vrsave_mask != 0)
9aa86737
AH
14711 {
14712 rtx addr, mem, reg;
14713
14714 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14715 GEN_INT (info->vrsave_save_offset + sp_offset));
14716 mem = gen_rtx_MEM (SImode, addr);
14717 set_mem_alias_set (mem, rs6000_sr_alias_set);
14718 reg = gen_rtx_REG (SImode, 12);
14719 emit_move_insn (reg, mem);
14720
14721 emit_insn (generate_set_vrsave (reg, info, 1));
14722 }
14723
9ebbca7d
GK
14724 /* Get the old lr if we saved it. */
14725 if (info->lr_save_p)
b6c9286a 14726 {
a3170dc6
AH
14727 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
14728 info->lr_save_offset + sp_offset);
ba4828e0
RK
14729
14730 set_mem_alias_set (mem, rs6000_sr_alias_set);
b6c9286a 14731
9ebbca7d 14732 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 14733 }
f676971a 14734
9ebbca7d
GK
14735 /* Get the old cr if we saved it. */
14736 if (info->cr_save_p)
14737 {
14738 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14739 GEN_INT (info->cr_save_offset + sp_offset));
14740 rtx mem = gen_rtx_MEM (SImode, addr);
ba4828e0
RK
14741
14742 set_mem_alias_set (mem, rs6000_sr_alias_set);
b6c9286a 14743
9ebbca7d
GK
14744 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
14745 }
f676971a 14746
9ebbca7d 14747 /* Set LR here to try to overlap restores below. */
4697a36c 14748 if (info->lr_save_p)
9ebbca7d
GK
14749 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
14750 gen_rtx_REG (Pmode, 0));
f676971a 14751
83720594
RH
14752 /* Load exception handler data registers, if needed. */
14753 if (current_function_calls_eh_return)
14754 {
78e1b90d
DE
14755 unsigned int i, regno;
14756
fc4767bb
JJ
14757 if (TARGET_AIX)
14758 {
14759 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14760 GEN_INT (sp_offset + 5 * reg_size));
14761 rtx mem = gen_rtx_MEM (reg_mode, addr);
14762
14763 set_mem_alias_set (mem, rs6000_sr_alias_set);
14764
14765 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
14766 }
14767
83720594
RH
14768 for (i = 0; ; ++i)
14769 {
a3170dc6 14770 rtx mem;
83720594
RH
14771
14772 regno = EH_RETURN_DATA_REGNO (i);
14773 if (regno == INVALID_REGNUM)
14774 break;
14775
a3170dc6
AH
14776 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14777 info->ehrd_offset + sp_offset
14778 + reg_size * (int) i);
ba4828e0 14779 set_mem_alias_set (mem, rs6000_sr_alias_set);
83720594
RH
14780
14781 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14782 }
14783 }
f676971a 14784
9ebbca7d
GK
14785 /* Restore GPRs. This is done as a PARALLEL if we are using
14786 the load-multiple instructions. */
14787 if (using_load_multiple)
979721f8 14788 {
9ebbca7d
GK
14789 rtvec p;
14790 p = rtvec_alloc (32 - info->first_gp_reg_save);
14791 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 14792 {
f676971a
EC
14793 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14794 GEN_INT (info->gp_save_offset
14795 + sp_offset
9ebbca7d
GK
14796 + reg_size * i));
14797 rtx mem = gen_rtx_MEM (reg_mode, addr);
ba4828e0
RK
14798
14799 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 14800
f676971a 14801 RTVEC_ELT (p, i) =
9ebbca7d
GK
14802 gen_rtx_SET (VOIDmode,
14803 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14804 mem);
979721f8 14805 }
9ebbca7d 14806 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 14807 }
9ebbca7d
GK
14808 else
14809 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
14810 if ((regs_ever_live[info->first_gp_reg_save + i]
14811 && (!call_used_regs[info->first_gp_reg_save + i]
14812 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
b4db40bf 14813 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 14814 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14815 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 14816 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9ebbca7d 14817 {
f676971a
EC
14818 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14819 GEN_INT (info->gp_save_offset
14820 + sp_offset
9ebbca7d
GK
14821 + reg_size * i));
14822 rtx mem = gen_rtx_MEM (reg_mode, addr);
ba4828e0 14823
a3170dc6 14824 /* Restore 64-bit quantities for SPE. */
c19de7aa 14825 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14826 {
14827 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14828 rtx b;
14829
14830 if (!SPE_CONST_OFFSET_OK (offset))
14831 {
14832 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14833 emit_move_insn (b, GEN_INT (offset));
14834 }
14835 else
14836 b = GEN_INT (offset);
14837
14838 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14839 mem = gen_rtx_MEM (V2SImode, addr);
14840 }
14841
ba4828e0 14842 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 14843
f676971a 14844 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 14845 info->first_gp_reg_save + i), mem);
9ebbca7d 14846 }
9878760c 14847
9ebbca7d
GK
14848 /* Restore fpr's if we need to do it without calling a function. */
14849 if (restoring_FPRs_inline)
14850 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 14851 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d
GK
14852 && ! call_used_regs[info->first_fp_reg_save+i]))
14853 {
14854 rtx addr, mem;
14855 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
14856 GEN_INT (info->fp_save_offset
14857 + sp_offset
a4f6c312 14858 + 8 * i));
9ebbca7d 14859 mem = gen_rtx_MEM (DFmode, addr);
ba4828e0 14860 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 14861
f676971a 14862 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
14863 info->first_fp_reg_save + i),
14864 mem);
14865 }
8d30c4ee 14866
9ebbca7d
GK
14867 /* If we saved cr, restore it here. Just those that were used. */
14868 if (info->cr_save_p)
979721f8 14869 {
9ebbca7d 14870 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 14871 int count = 0;
f676971a 14872
9ebbca7d 14873 if (using_mfcr_multiple)
979721f8 14874 {
9ebbca7d
GK
14875 for (i = 0; i < 8; i++)
14876 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
e35b9579 14877 count++;
37409796 14878 gcc_assert (count);
e35b9579
GK
14879 }
14880
14881 if (using_mfcr_multiple && count > 1)
14882 {
14883 rtvec p;
14884 int ndx;
f676971a 14885
e35b9579 14886 p = rtvec_alloc (count);
9ebbca7d 14887
e35b9579 14888 ndx = 0;
9ebbca7d
GK
14889 for (i = 0; i < 8; i++)
14890 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14891 {
14892 rtvec r = rtvec_alloc (2);
14893 RTVEC_ELT (r, 0) = r12_rtx;
14894 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 14895 RTVEC_ELT (p, ndx) =
f676971a 14896 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 14897 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 14898 ndx++;
9ebbca7d
GK
14899 }
14900 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 14901 gcc_assert (ndx == count);
979721f8
MM
14902 }
14903 else
9ebbca7d
GK
14904 for (i = 0; i < 8; i++)
14905 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
979721f8 14906 {
f676971a 14907 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
14908 CR0_REGNO+i),
14909 r12_rtx));
979721f8 14910 }
979721f8
MM
14911 }
14912
9ebbca7d
GK
14913 /* If this is V.4, unwind the stack pointer after all of the loads
14914 have been done. We need to emit a block here so that sched
14915 doesn't decide to move the sp change before the register restores
14916 (which may not have any obvious dependency on the stack). This
14917 doesn't hurt performance, because there is no scheduling that can
14918 be done after this point. */
fc4767bb
JJ
14919 if (DEFAULT_ABI == ABI_V4
14920 || current_function_calls_eh_return)
b6c9286a 14921 {
9ebbca7d 14922 if (frame_reg_rtx != sp_reg_rtx)
c4ad648e 14923 rs6000_emit_stack_tie ();
b6c9286a 14924
9ebbca7d 14925 if (use_backchain_to_restore_sp)
b6c9286a 14926 {
9ebbca7d 14927 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
b6c9286a 14928 }
9ebbca7d 14929 else if (sp_offset != 0)
13f1623b 14930 {
5b71a4e7 14931 emit_insn (TARGET_32BIT
9ebbca7d
GK
14932 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14933 GEN_INT (sp_offset))
14934 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14935 GEN_INT (sp_offset)));
13f1623b 14936 }
9ebbca7d 14937 }
b6c9286a 14938
83720594
RH
14939 if (current_function_calls_eh_return)
14940 {
14941 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 14942 emit_insn (TARGET_32BIT
83720594
RH
14943 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
14944 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
14945 }
14946
9ebbca7d
GK
14947 if (!sibcall)
14948 {
14949 rtvec p;
14950 if (! restoring_FPRs_inline)
14951 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
14952 else
14953 p = rtvec_alloc (2);
b6c9286a 14954
e35b9579 14955 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
14956 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14957 gen_rtx_REG (Pmode,
9ebbca7d 14958 LINK_REGISTER_REGNUM));
9ebbca7d
GK
14959
14960 /* If we have to restore more than two FP registers, branch to the
14961 restore function. It will return to our caller. */
14962 if (! restoring_FPRs_inline)
14963 {
14964 int i;
14965 char rname[30];
520a57c8 14966 const char *alloc_rname;
979721f8 14967
f676971a 14968 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 14969 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 14970 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
14971 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
14972 gen_rtx_SYMBOL_REF (Pmode,
14973 alloc_rname));
b6c9286a 14974
9ebbca7d
GK
14975 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14976 {
14977 rtx addr, mem;
14978 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
14979 GEN_INT (info->fp_save_offset + 8*i));
14980 mem = gen_rtx_MEM (DFmode, addr);
ba4828e0 14981 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 14982
f676971a 14983 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
14984 gen_rtx_SET (VOIDmode,
14985 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
14986 mem);
b6c9286a
MM
14987 }
14988 }
f676971a 14989
9ebbca7d 14990 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 14991 }
9878760c
RK
14992}
14993
14994/* Write function epilogue. */
14995
08c148a8 14996static void
f676971a 14997rs6000_output_function_epilogue (FILE *file,
a2369ed3 14998 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 14999{
4697a36c 15000 rs6000_stack_t *info = rs6000_stack_info ();
9878760c 15001
9ebbca7d 15002 if (! HAVE_epilogue)
9878760c 15003 {
9ebbca7d
GK
15004 rtx insn = get_last_insn ();
15005 /* If the last insn was a BARRIER, we don't have to write anything except
15006 the trace table. */
15007 if (GET_CODE (insn) == NOTE)
15008 insn = prev_nonnote_insn (insn);
15009 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 15010 {
9ebbca7d
GK
15011 /* This is slightly ugly, but at least we don't have two
15012 copies of the epilogue-emitting code. */
15013 start_sequence ();
15014
15015 /* A NOTE_INSN_DELETED is supposed to be at the start
15016 and end of the "toplevel" insn chain. */
2e040219 15017 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15018 rs6000_emit_epilogue (FALSE);
2e040219 15019 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15020
a3c9585f 15021 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15022 {
15023 rtx insn;
15024 unsigned addr = 0;
15025 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15026 {
15027 INSN_ADDRESSES_NEW (insn, addr);
15028 addr += 4;
15029 }
15030 }
15031
9ebbca7d 15032 if (TARGET_DEBUG_STACK)
a4f6c312 15033 debug_rtx_list (get_insns (), 100);
c9d691e9 15034 final (get_insns (), file, FALSE);
9ebbca7d 15035 end_sequence ();
4697a36c 15036 }
9878760c 15037 }
b4ac57ab 15038
efdba735
SH
15039#if TARGET_MACHO
15040 macho_branch_islands ();
0e5da0be
GK
15041 /* Mach-O doesn't support labels at the end of objects, so if
15042 it looks like we might want one, insert a NOP. */
15043 {
15044 rtx insn = get_last_insn ();
15045 while (insn
15046 && NOTE_P (insn)
15047 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15048 insn = PREV_INSN (insn);
f676971a
EC
15049 if (insn
15050 && (LABEL_P (insn)
0e5da0be
GK
15051 || (NOTE_P (insn)
15052 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15053 fputs ("\tnop\n", file);
15054 }
15055#endif
15056
9b30bae2 15057 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
15058 on its format.
15059
15060 We don't output a traceback table if -finhibit-size-directive was
15061 used. The documentation for -finhibit-size-directive reads
15062 ``don't output a @code{.size} assembler directive, or anything
15063 else that would cause trouble if the function is split in the
15064 middle, and the two halves are placed at locations far apart in
15065 memory.'' The traceback table has this property, since it
15066 includes the offset from the start of the function to the
4d30c363
MM
15067 traceback table itself.
15068
15069 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 15070 different traceback table. */
57ac7be9
AM
15071 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15072 && rs6000_traceback != traceback_none)
9b30bae2 15073 {
69c75916 15074 const char *fname = NULL;
3ac88239 15075 const char *language_string = lang_hooks.name;
6041bf2f 15076 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 15077 int i;
57ac7be9
AM
15078 int optional_tbtab;
15079
15080 if (rs6000_traceback == traceback_full)
15081 optional_tbtab = 1;
15082 else if (rs6000_traceback == traceback_part)
15083 optional_tbtab = 0;
15084 else
15085 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 15086
69c75916
AM
15087 if (optional_tbtab)
15088 {
15089 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15090 while (*fname == '.') /* V.4 encodes . in the name */
15091 fname++;
15092
15093 /* Need label immediately before tbtab, so we can compute
15094 its offset from the function start. */
15095 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15096 ASM_OUTPUT_LABEL (file, fname);
15097 }
314fc5a9
ILT
15098
15099 /* The .tbtab pseudo-op can only be used for the first eight
15100 expressions, since it can't handle the possibly variable
15101 length fields that follow. However, if you omit the optional
15102 fields, the assembler outputs zeros for all optional fields
15103 anyways, giving each variable length field is minimum length
15104 (as defined in sys/debug.h). Thus we can not use the .tbtab
15105 pseudo-op at all. */
15106
15107 /* An all-zero word flags the start of the tbtab, for debuggers
15108 that have to find it by searching forward from the entry
15109 point or from the current pc. */
19d2d16f 15110 fputs ("\t.long 0\n", file);
314fc5a9
ILT
15111
15112 /* Tbtab format type. Use format type 0. */
19d2d16f 15113 fputs ("\t.byte 0,", file);
314fc5a9 15114
5fc921c1
DE
15115 /* Language type. Unfortunately, there does not seem to be any
15116 official way to discover the language being compiled, so we
15117 use language_string.
15118 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
15119 Java is 13. Objective-C is 14. */
15120 if (! strcmp (language_string, "GNU C"))
314fc5a9 15121 i = 0;
6de9cd9a
DN
15122 else if (! strcmp (language_string, "GNU F77")
15123 || ! strcmp (language_string, "GNU F95"))
314fc5a9 15124 i = 1;
8b83775b 15125 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 15126 i = 2;
5fc921c1
DE
15127 else if (! strcmp (language_string, "GNU Ada"))
15128 i = 3;
314fc5a9
ILT
15129 else if (! strcmp (language_string, "GNU C++"))
15130 i = 9;
9517ead8
AG
15131 else if (! strcmp (language_string, "GNU Java"))
15132 i = 13;
5fc921c1
DE
15133 else if (! strcmp (language_string, "GNU Objective-C"))
15134 i = 14;
314fc5a9 15135 else
37409796 15136 gcc_unreachable ();
314fc5a9
ILT
15137 fprintf (file, "%d,", i);
15138
15139 /* 8 single bit fields: global linkage (not set for C extern linkage,
15140 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15141 from start of procedure stored in tbtab, internal function, function
15142 has controlled storage, function has no toc, function uses fp,
15143 function logs/aborts fp operations. */
15144 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
15145 fprintf (file, "%d,",
15146 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
15147
15148 /* 6 bitfields: function is interrupt handler, name present in
15149 proc table, function calls alloca, on condition directives
15150 (controls stack walks, 3 bits), saves condition reg, saves
15151 link reg. */
15152 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15153 set up as a frame pointer, even when there is no alloca call. */
15154 fprintf (file, "%d,",
6041bf2f
DE
15155 ((optional_tbtab << 6)
15156 | ((optional_tbtab & frame_pointer_needed) << 5)
15157 | (info->cr_save_p << 1)
15158 | (info->lr_save_p)));
314fc5a9 15159
6041bf2f 15160 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
15161 (6 bits). */
15162 fprintf (file, "%d,",
4697a36c 15163 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
15164
15165 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15166 fprintf (file, "%d,", (32 - first_reg_to_save ()));
15167
6041bf2f
DE
15168 if (optional_tbtab)
15169 {
15170 /* Compute the parameter info from the function decl argument
15171 list. */
15172 tree decl;
15173 int next_parm_info_bit = 31;
314fc5a9 15174
6041bf2f
DE
15175 for (decl = DECL_ARGUMENTS (current_function_decl);
15176 decl; decl = TREE_CHAIN (decl))
15177 {
15178 rtx parameter = DECL_INCOMING_RTL (decl);
15179 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 15180
6041bf2f
DE
15181 if (GET_CODE (parameter) == REG)
15182 {
15183 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
15184 {
15185 int bits;
15186
15187 float_parms++;
15188
37409796
NS
15189 switch (mode)
15190 {
15191 case SFmode:
15192 bits = 0x2;
15193 break;
15194
15195 case DFmode:
15196 case TFmode:
15197 bits = 0x3;
15198 break;
15199
15200 default:
15201 gcc_unreachable ();
15202 }
6041bf2f
DE
15203
15204 /* If only one bit will fit, don't or in this entry. */
15205 if (next_parm_info_bit > 0)
15206 parm_info |= (bits << (next_parm_info_bit - 1));
15207 next_parm_info_bit -= 2;
15208 }
15209 else
15210 {
15211 fixed_parms += ((GET_MODE_SIZE (mode)
15212 + (UNITS_PER_WORD - 1))
15213 / UNITS_PER_WORD);
15214 next_parm_info_bit -= 1;
15215 }
15216 }
15217 }
15218 }
314fc5a9
ILT
15219
15220 /* Number of fixed point parameters. */
15221 /* This is actually the number of words of fixed point parameters; thus
15222 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15223 fprintf (file, "%d,", fixed_parms);
15224
15225 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15226 all on stack. */
15227 /* This is actually the number of fp registers that hold parameters;
15228 and thus the maximum value is 13. */
15229 /* Set parameters on stack bit if parameters are not in their original
15230 registers, regardless of whether they are on the stack? Xlc
15231 seems to set the bit when not optimizing. */
15232 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15233
6041bf2f
DE
15234 if (! optional_tbtab)
15235 return;
15236
314fc5a9
ILT
15237 /* Optional fields follow. Some are variable length. */
15238
15239 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15240 11 double float. */
15241 /* There is an entry for each parameter in a register, in the order that
15242 they occur in the parameter list. Any intervening arguments on the
15243 stack are ignored. If the list overflows a long (max possible length
15244 34 bits) then completely leave off all elements that don't fit. */
15245 /* Only emit this long if there was at least one parameter. */
15246 if (fixed_parms || float_parms)
15247 fprintf (file, "\t.long %d\n", parm_info);
15248
15249 /* Offset from start of code to tb table. */
19d2d16f 15250 fputs ("\t.long ", file);
314fc5a9 15251 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
15252 if (TARGET_AIX)
15253 RS6000_OUTPUT_BASENAME (file, fname);
15254 else
15255 assemble_name (file, fname);
15256 putc ('-', file);
15257 rs6000_output_function_entry (file, fname);
19d2d16f 15258 putc ('\n', file);
314fc5a9
ILT
15259
15260 /* Interrupt handler mask. */
15261 /* Omit this long, since we never set the interrupt handler bit
15262 above. */
15263
15264 /* Number of CTL (controlled storage) anchors. */
15265 /* Omit this long, since the has_ctl bit is never set above. */
15266
15267 /* Displacement into stack of each CTL anchor. */
15268 /* Omit this list of longs, because there are no CTL anchors. */
15269
15270 /* Length of function name. */
69c75916
AM
15271 if (*fname == '*')
15272 ++fname;
296b8152 15273 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
15274
15275 /* Function name. */
15276 assemble_string (fname, strlen (fname));
15277
15278 /* Register for alloca automatic storage; this is always reg 31.
15279 Only emit this if the alloca bit was set above. */
15280 if (frame_pointer_needed)
19d2d16f 15281 fputs ("\t.byte 31\n", file);
b1765bde
DE
15282
15283 fputs ("\t.align 2\n", file);
9b30bae2 15284 }
9878760c 15285}
17167fd8 15286\f
a4f6c312
SS
15287/* A C compound statement that outputs the assembler code for a thunk
15288 function, used to implement C++ virtual function calls with
15289 multiple inheritance. The thunk acts as a wrapper around a virtual
15290 function, adjusting the implicit object parameter before handing
15291 control off to the real function.
15292
15293 First, emit code to add the integer DELTA to the location that
15294 contains the incoming first argument. Assume that this argument
15295 contains a pointer, and is the one used to pass the `this' pointer
15296 in C++. This is the incoming argument *before* the function
15297 prologue, e.g. `%o0' on a sparc. The addition must preserve the
15298 values of all other incoming arguments.
17167fd8
MM
15299
15300 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
15301 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
15302 not touch the return address. Hence returning from FUNCTION will
15303 return to whoever called the current `thunk'.
17167fd8 15304
a4f6c312
SS
15305 The effect must be as if FUNCTION had been called directly with the
15306 adjusted first argument. This macro is responsible for emitting
15307 all of the code for a thunk function; output_function_prologue()
15308 and output_function_epilogue() are not invoked.
17167fd8 15309
a4f6c312
SS
15310 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
15311 been extracted from it.) It might possibly be useful on some
15312 targets, but probably not.
17167fd8 15313
a4f6c312
SS
15314 If you do not define this macro, the target-independent code in the
15315 C++ frontend will generate a less efficient heavyweight thunk that
15316 calls FUNCTION instead of jumping to it. The generic approach does
15317 not support varargs. */
17167fd8 15318
3961e8fe 15319static void
f676971a
EC
15320rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15321 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 15322 tree function)
17167fd8 15323{
5b71a4e7 15324 rtx this, insn, funexp;
17167fd8 15325
5b71a4e7 15326 reload_completed = 1;
fe3ad572 15327 epilogue_completed = 1;
5b71a4e7 15328 no_new_pseudos = 1;
6429e3be 15329 reset_block_changes ();
56a7189a 15330
5b71a4e7 15331 /* Mark the end of the (empty) prologue. */
2e040219 15332 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 15333
5b71a4e7
DE
15334 /* Find the "this" pointer. If the function returns a structure,
15335 the structure return pointer is in r3. */
61f71b34 15336 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 15337 this = gen_rtx_REG (Pmode, 4);
56a7189a 15338 else
5b71a4e7 15339 this = gen_rtx_REG (Pmode, 3);
17167fd8 15340
5b71a4e7
DE
15341 /* Apply the constant offset, if required. */
15342 if (delta)
15343 {
15344 rtx delta_rtx = GEN_INT (delta);
15345 emit_insn (TARGET_32BIT
15346 ? gen_addsi3 (this, this, delta_rtx)
15347 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
15348 }
15349
5b71a4e7
DE
15350 /* Apply the offset from the vtable, if required. */
15351 if (vcall_offset)
17167fd8 15352 {
5b71a4e7
DE
15353 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15354 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 15355
5b71a4e7 15356 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
15357 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15358 {
15359 emit_insn (TARGET_32BIT
15360 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15361 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15362 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15363 }
15364 else
15365 {
15366 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15367
15368 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15369 }
5b71a4e7
DE
15370 emit_insn (TARGET_32BIT
15371 ? gen_addsi3 (this, this, tmp)
15372 : gen_adddi3 (this, this, tmp));
17167fd8
MM
15373 }
15374
5b71a4e7
DE
15375 /* Generate a tail call to the target function. */
15376 if (!TREE_USED (function))
15377 {
15378 assemble_external (function);
15379 TREE_USED (function) = 1;
15380 }
15381 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 15382 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
15383
15384#if TARGET_MACHO
ab82a49f 15385 if (MACHOPIC_INDIRECT)
5b71a4e7 15386 funexp = machopic_indirect_call_target (funexp);
ee890fe2 15387#endif
5b71a4e7
DE
15388
15389 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 15390 generate sibcall RTL explicitly. */
5b71a4e7
DE
15391 insn = emit_call_insn (
15392 gen_rtx_PARALLEL (VOIDmode,
15393 gen_rtvec (4,
15394 gen_rtx_CALL (VOIDmode,
15395 funexp, const0_rtx),
15396 gen_rtx_USE (VOIDmode, const0_rtx),
15397 gen_rtx_USE (VOIDmode,
15398 gen_rtx_REG (SImode,
15399 LINK_REGISTER_REGNUM)),
15400 gen_rtx_RETURN (VOIDmode))));
15401 SIBLING_CALL_P (insn) = 1;
15402 emit_barrier ();
15403
15404 /* Run just enough of rest_of_compilation to get the insns emitted.
15405 There's not really enough bulk here to make other passes such as
15406 instruction scheduling worth while. Note that use_thunk calls
15407 assemble_start_function and assemble_end_function. */
15408 insn = get_insns ();
a2855205 15409 insn_locators_initialize ();
5b71a4e7
DE
15410 shorten_branches (insn);
15411 final_start_function (insn, file, 1);
c9d691e9 15412 final (insn, file, 1);
5b71a4e7
DE
15413 final_end_function ();
15414
15415 reload_completed = 0;
fe3ad572 15416 epilogue_completed = 0;
5b71a4e7 15417 no_new_pseudos = 0;
9ebbca7d 15418}
9ebbca7d
GK
15419\f
15420/* A quick summary of the various types of 'constant-pool tables'
15421 under PowerPC:
15422
f676971a 15423 Target Flags Name One table per
9ebbca7d
GK
15424 AIX (none) AIX TOC object file
15425 AIX -mfull-toc AIX TOC object file
15426 AIX -mminimal-toc AIX minimal TOC translation unit
15427 SVR4/EABI (none) SVR4 SDATA object file
15428 SVR4/EABI -fpic SVR4 pic object file
15429 SVR4/EABI -fPIC SVR4 PIC translation unit
15430 SVR4/EABI -mrelocatable EABI TOC function
15431 SVR4/EABI -maix AIX TOC object file
f676971a 15432 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
15433 AIX minimal TOC translation unit
15434
15435 Name Reg. Set by entries contains:
15436 made by addrs? fp? sum?
15437
15438 AIX TOC 2 crt0 as Y option option
15439 AIX minimal TOC 30 prolog gcc Y Y option
15440 SVR4 SDATA 13 crt0 gcc N Y N
15441 SVR4 pic 30 prolog ld Y not yet N
15442 SVR4 PIC 30 prolog gcc Y option option
15443 EABI TOC 30 prolog gcc Y option option
15444
15445*/
15446
9ebbca7d
GK
15447/* Hash functions for the hash table. */
15448
15449static unsigned
a2369ed3 15450rs6000_hash_constant (rtx k)
9ebbca7d 15451{
46b33600
RH
15452 enum rtx_code code = GET_CODE (k);
15453 enum machine_mode mode = GET_MODE (k);
15454 unsigned result = (code << 3) ^ mode;
15455 const char *format;
15456 int flen, fidx;
f676971a 15457
46b33600
RH
15458 format = GET_RTX_FORMAT (code);
15459 flen = strlen (format);
15460 fidx = 0;
9ebbca7d 15461
46b33600
RH
15462 switch (code)
15463 {
15464 case LABEL_REF:
15465 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15466
15467 case CONST_DOUBLE:
15468 if (mode != VOIDmode)
15469 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15470 flen = 2;
15471 break;
15472
15473 case CODE_LABEL:
15474 fidx = 3;
15475 break;
15476
15477 default:
15478 break;
15479 }
9ebbca7d
GK
15480
15481 for (; fidx < flen; fidx++)
15482 switch (format[fidx])
15483 {
15484 case 's':
15485 {
15486 unsigned i, len;
15487 const char *str = XSTR (k, fidx);
15488 len = strlen (str);
15489 result = result * 613 + len;
15490 for (i = 0; i < len; i++)
15491 result = result * 613 + (unsigned) str[i];
17167fd8
MM
15492 break;
15493 }
9ebbca7d
GK
15494 case 'u':
15495 case 'e':
15496 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15497 break;
15498 case 'i':
15499 case 'n':
15500 result = result * 613 + (unsigned) XINT (k, fidx);
15501 break;
15502 case 'w':
15503 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15504 result = result * 613 + (unsigned) XWINT (k, fidx);
15505 else
15506 {
15507 size_t i;
9390387d 15508 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
15509 result = result * 613 + (unsigned) (XWINT (k, fidx)
15510 >> CHAR_BIT * i);
15511 }
15512 break;
09501938
DE
15513 case '0':
15514 break;
9ebbca7d 15515 default:
37409796 15516 gcc_unreachable ();
9ebbca7d 15517 }
46b33600 15518
9ebbca7d
GK
15519 return result;
15520}
15521
15522static unsigned
a2369ed3 15523toc_hash_function (const void *hash_entry)
9ebbca7d 15524{
f676971a 15525 const struct toc_hash_struct *thc =
a9098fd0
GK
15526 (const struct toc_hash_struct *) hash_entry;
15527 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
15528}
15529
15530/* Compare H1 and H2 for equivalence. */
15531
15532static int
a2369ed3 15533toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
15534{
15535 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15536 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15537
a9098fd0
GK
15538 if (((const struct toc_hash_struct *) h1)->key_mode
15539 != ((const struct toc_hash_struct *) h2)->key_mode)
15540 return 0;
15541
5692c7bc 15542 return rtx_equal_p (r1, r2);
9ebbca7d
GK
15543}
15544
28e510bd
MM
15545/* These are the names given by the C++ front-end to vtables, and
15546 vtable-like objects. Ideally, this logic should not be here;
15547 instead, there should be some programmatic way of inquiring as
15548 to whether or not an object is a vtable. */
15549
15550#define VTABLE_NAME_P(NAME) \
9390387d 15551 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
15552 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15553 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 15554 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 15555 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
15556
15557void
a2369ed3 15558rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
15559{
15560 /* Currently C++ toc references to vtables can be emitted before it
15561 is decided whether the vtable is public or private. If this is
15562 the case, then the linker will eventually complain that there is
f676971a 15563 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
15564 we emit the TOC reference to reference the symbol and not the
15565 section. */
15566 const char *name = XSTR (x, 0);
54ee9799 15567
f676971a 15568 if (VTABLE_NAME_P (name))
54ee9799
DE
15569 {
15570 RS6000_OUTPUT_BASENAME (file, name);
15571 }
15572 else
15573 assemble_name (file, name);
28e510bd
MM
15574}
15575
a4f6c312
SS
15576/* Output a TOC entry. We derive the entry name from what is being
15577 written. */
9878760c
RK
15578
15579void
a2369ed3 15580output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
15581{
15582 char buf[256];
3cce094d 15583 const char *name = buf;
ec940faa 15584 const char *real_name;
9878760c
RK
15585 rtx base = x;
15586 int offset = 0;
15587
37409796 15588 gcc_assert (!TARGET_NO_TOC);
4697a36c 15589
9ebbca7d
GK
15590 /* When the linker won't eliminate them, don't output duplicate
15591 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
15592 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15593 CODE_LABELs. */
15594 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
15595 {
15596 struct toc_hash_struct *h;
15597 void * * found;
f676971a 15598
17211ab5 15599 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 15600 time because GGC is not initialized at that point. */
17211ab5 15601 if (toc_hash_table == NULL)
f676971a 15602 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
15603 toc_hash_eq, NULL);
15604
9ebbca7d
GK
15605 h = ggc_alloc (sizeof (*h));
15606 h->key = x;
a9098fd0 15607 h->key_mode = mode;
9ebbca7d 15608 h->labelno = labelno;
f676971a 15609
9ebbca7d
GK
15610 found = htab_find_slot (toc_hash_table, h, 1);
15611 if (*found == NULL)
15612 *found = h;
f676971a 15613 else /* This is indeed a duplicate.
9ebbca7d
GK
15614 Set this label equal to that label. */
15615 {
15616 fputs ("\t.set ", file);
15617 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15618 fprintf (file, "%d,", labelno);
15619 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 15620 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
15621 found)->labelno));
15622 return;
15623 }
15624 }
15625
15626 /* If we're going to put a double constant in the TOC, make sure it's
15627 aligned properly when strict alignment is on. */
ff1720ed
RK
15628 if (GET_CODE (x) == CONST_DOUBLE
15629 && STRICT_ALIGNMENT
a9098fd0 15630 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
15631 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15632 ASM_OUTPUT_ALIGN (file, 3);
15633 }
15634
4977bab6 15635 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 15636
37c37a57
RK
15637 /* Handle FP constants specially. Note that if we have a minimal
15638 TOC, things we put here aren't actually in the TOC, so we can allow
15639 FP constants. */
fcce224d
DE
15640 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
15641 {
15642 REAL_VALUE_TYPE rv;
15643 long k[4];
15644
15645 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15646 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
15647
15648 if (TARGET_64BIT)
15649 {
15650 if (TARGET_MINIMAL_TOC)
15651 fputs (DOUBLE_INT_ASM_OP, file);
15652 else
15653 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15654 k[0] & 0xffffffff, k[1] & 0xffffffff,
15655 k[2] & 0xffffffff, k[3] & 0xffffffff);
15656 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15657 k[0] & 0xffffffff, k[1] & 0xffffffff,
15658 k[2] & 0xffffffff, k[3] & 0xffffffff);
15659 return;
15660 }
15661 else
15662 {
15663 if (TARGET_MINIMAL_TOC)
15664 fputs ("\t.long ", file);
15665 else
15666 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15667 k[0] & 0xffffffff, k[1] & 0xffffffff,
15668 k[2] & 0xffffffff, k[3] & 0xffffffff);
15669 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15670 k[0] & 0xffffffff, k[1] & 0xffffffff,
15671 k[2] & 0xffffffff, k[3] & 0xffffffff);
15672 return;
15673 }
15674 }
15675 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9878760c 15676 {
042259f2
DE
15677 REAL_VALUE_TYPE rv;
15678 long k[2];
0adc764e 15679
042259f2
DE
15680 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15681 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 15682
13ded975
DE
15683 if (TARGET_64BIT)
15684 {
15685 if (TARGET_MINIMAL_TOC)
2bfcf297 15686 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 15687 else
2f0552b6
AM
15688 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15689 k[0] & 0xffffffff, k[1] & 0xffffffff);
15690 fprintf (file, "0x%lx%08lx\n",
15691 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
15692 return;
15693 }
1875cc88 15694 else
13ded975
DE
15695 {
15696 if (TARGET_MINIMAL_TOC)
2bfcf297 15697 fputs ("\t.long ", file);
13ded975 15698 else
2f0552b6
AM
15699 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15700 k[0] & 0xffffffff, k[1] & 0xffffffff);
15701 fprintf (file, "0x%lx,0x%lx\n",
15702 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
15703 return;
15704 }
9878760c 15705 }
a9098fd0 15706 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9878760c 15707 {
042259f2
DE
15708 REAL_VALUE_TYPE rv;
15709 long l;
9878760c 15710
042259f2
DE
15711 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15712 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
15713
31bfaa0b
DE
15714 if (TARGET_64BIT)
15715 {
15716 if (TARGET_MINIMAL_TOC)
2bfcf297 15717 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 15718 else
2f0552b6
AM
15719 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15720 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
15721 return;
15722 }
042259f2 15723 else
31bfaa0b
DE
15724 {
15725 if (TARGET_MINIMAL_TOC)
2bfcf297 15726 fputs ("\t.long ", file);
31bfaa0b 15727 else
2f0552b6
AM
15728 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15729 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
15730 return;
15731 }
042259f2 15732 }
f176e826 15733 else if (GET_MODE (x) == VOIDmode
a9098fd0 15734 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 15735 {
e2c953b6 15736 unsigned HOST_WIDE_INT low;
042259f2
DE
15737 HOST_WIDE_INT high;
15738
15739 if (GET_CODE (x) == CONST_DOUBLE)
15740 {
15741 low = CONST_DOUBLE_LOW (x);
15742 high = CONST_DOUBLE_HIGH (x);
15743 }
15744 else
15745#if HOST_BITS_PER_WIDE_INT == 32
15746 {
15747 low = INTVAL (x);
0858c623 15748 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
15749 }
15750#else
15751 {
c4ad648e
AM
15752 low = INTVAL (x) & 0xffffffff;
15753 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
15754 }
15755#endif
9878760c 15756
a9098fd0
GK
15757 /* TOC entries are always Pmode-sized, but since this
15758 is a bigendian machine then if we're putting smaller
15759 integer constants in the TOC we have to pad them.
15760 (This is still a win over putting the constants in
15761 a separate constant pool, because then we'd have
02a4ec28
FS
15762 to have both a TOC entry _and_ the actual constant.)
15763
15764 For a 32-bit target, CONST_INT values are loaded and shifted
15765 entirely within `low' and can be stored in one TOC entry. */
15766
37409796
NS
15767 /* It would be easy to make this work, but it doesn't now. */
15768 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
15769
15770 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
15771 {
15772#if HOST_BITS_PER_WIDE_INT == 32
15773 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
15774 POINTER_SIZE, &low, &high, 0);
15775#else
15776 low |= high << 32;
15777 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15778 high = (HOST_WIDE_INT) low >> 32;
15779 low &= 0xffffffff;
15780#endif
15781 }
a9098fd0 15782
13ded975
DE
15783 if (TARGET_64BIT)
15784 {
15785 if (TARGET_MINIMAL_TOC)
2bfcf297 15786 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 15787 else
2f0552b6
AM
15788 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15789 (long) high & 0xffffffff, (long) low & 0xffffffff);
15790 fprintf (file, "0x%lx%08lx\n",
15791 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
15792 return;
15793 }
1875cc88 15794 else
13ded975 15795 {
02a4ec28
FS
15796 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15797 {
15798 if (TARGET_MINIMAL_TOC)
2bfcf297 15799 fputs ("\t.long ", file);
02a4ec28 15800 else
2bfcf297 15801 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
15802 (long) high & 0xffffffff, (long) low & 0xffffffff);
15803 fprintf (file, "0x%lx,0x%lx\n",
15804 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 15805 }
13ded975 15806 else
02a4ec28
FS
15807 {
15808 if (TARGET_MINIMAL_TOC)
2bfcf297 15809 fputs ("\t.long ", file);
02a4ec28 15810 else
2f0552b6
AM
15811 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15812 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 15813 }
13ded975
DE
15814 return;
15815 }
9878760c
RK
15816 }
15817
15818 if (GET_CODE (x) == CONST)
15819 {
37409796 15820 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 15821
9878760c
RK
15822 base = XEXP (XEXP (x, 0), 0);
15823 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15824 }
f676971a 15825
37409796
NS
15826 switch (GET_CODE (base))
15827 {
15828 case SYMBOL_REF:
15829 name = XSTR (base, 0);
15830 break;
15831
15832 case LABEL_REF:
15833 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
15834 CODE_LABEL_NUMBER (XEXP (base, 0)));
15835 break;
15836
15837 case CODE_LABEL:
15838 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15839 break;
15840
15841 default:
15842 gcc_unreachable ();
15843 }
9878760c 15844
772c5265 15845 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 15846 if (TARGET_MINIMAL_TOC)
2bfcf297 15847 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
15848 else
15849 {
b6c9286a 15850 fprintf (file, "\t.tc %s", real_name);
9878760c 15851
1875cc88
JW
15852 if (offset < 0)
15853 fprintf (file, ".N%d", - offset);
15854 else if (offset)
15855 fprintf (file, ".P%d", offset);
9878760c 15856
19d2d16f 15857 fputs ("[TC],", file);
1875cc88 15858 }
581bc4de
MM
15859
15860 /* Currently C++ toc references to vtables can be emitted before it
15861 is decided whether the vtable is public or private. If this is
15862 the case, then the linker will eventually complain that there is
15863 a TOC reference to an unknown section. Thus, for vtables only,
15864 we emit the TOC reference to reference the symbol and not the
15865 section. */
28e510bd 15866 if (VTABLE_NAME_P (name))
581bc4de 15867 {
54ee9799 15868 RS6000_OUTPUT_BASENAME (file, name);
581bc4de
MM
15869 if (offset < 0)
15870 fprintf (file, "%d", offset);
15871 else if (offset > 0)
15872 fprintf (file, "+%d", offset);
15873 }
15874 else
15875 output_addr_const (file, x);
19d2d16f 15876 putc ('\n', file);
9878760c
RK
15877}
15878\f
15879/* Output an assembler pseudo-op to write an ASCII string of N characters
15880 starting at P to FILE.
15881
15882 On the RS/6000, we have to do this using the .byte operation and
15883 write out special characters outside the quoted string.
15884 Also, the assembler is broken; very long strings are truncated,
a4f6c312 15885 so we must artificially break them up early. */
9878760c
RK
15886
15887void
a2369ed3 15888output_ascii (FILE *file, const char *p, int n)
9878760c
RK
15889{
15890 char c;
15891 int i, count_string;
d330fd93
KG
15892 const char *for_string = "\t.byte \"";
15893 const char *for_decimal = "\t.byte ";
15894 const char *to_close = NULL;
9878760c
RK
15895
15896 count_string = 0;
15897 for (i = 0; i < n; i++)
15898 {
15899 c = *p++;
15900 if (c >= ' ' && c < 0177)
15901 {
15902 if (for_string)
15903 fputs (for_string, file);
15904 putc (c, file);
15905
15906 /* Write two quotes to get one. */
15907 if (c == '"')
15908 {
15909 putc (c, file);
15910 ++count_string;
15911 }
15912
15913 for_string = NULL;
15914 for_decimal = "\"\n\t.byte ";
15915 to_close = "\"\n";
15916 ++count_string;
15917
15918 if (count_string >= 512)
15919 {
15920 fputs (to_close, file);
15921
15922 for_string = "\t.byte \"";
15923 for_decimal = "\t.byte ";
15924 to_close = NULL;
15925 count_string = 0;
15926 }
15927 }
15928 else
15929 {
15930 if (for_decimal)
15931 fputs (for_decimal, file);
15932 fprintf (file, "%d", c);
15933
15934 for_string = "\n\t.byte \"";
15935 for_decimal = ", ";
15936 to_close = "\n";
15937 count_string = 0;
15938 }
15939 }
15940
15941 /* Now close the string if we have written one. Then end the line. */
15942 if (to_close)
9ebbca7d 15943 fputs (to_close, file);
9878760c
RK
15944}
15945\f
15946/* Generate a unique section name for FILENAME for a section type
15947 represented by SECTION_DESC. Output goes into BUF.
15948
15949 SECTION_DESC can be any string, as long as it is different for each
15950 possible section type.
15951
15952 We name the section in the same manner as xlc. The name begins with an
15953 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
15954 names) with the last period replaced by the string SECTION_DESC. If
15955 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15956 the name. */
9878760c
RK
15957
15958void
f676971a 15959rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 15960 const char *section_desc)
9878760c 15961{
9ebbca7d 15962 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
15963 char *p;
15964 int len;
9878760c
RK
15965
15966 after_last_slash = filename;
15967 for (q = filename; *q; q++)
11e5fe42
RK
15968 {
15969 if (*q == '/')
15970 after_last_slash = q + 1;
15971 else if (*q == '.')
15972 last_period = q;
15973 }
9878760c 15974
11e5fe42 15975 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 15976 *buf = (char *) xmalloc (len);
9878760c
RK
15977
15978 p = *buf;
15979 *p++ = '_';
15980
15981 for (q = after_last_slash; *q; q++)
15982 {
11e5fe42 15983 if (q == last_period)
c4ad648e 15984 {
9878760c
RK
15985 strcpy (p, section_desc);
15986 p += strlen (section_desc);
e3981aab 15987 break;
c4ad648e 15988 }
9878760c 15989
e9a780ec 15990 else if (ISALNUM (*q))
c4ad648e 15991 *p++ = *q;
9878760c
RK
15992 }
15993
11e5fe42 15994 if (last_period == 0)
9878760c
RK
15995 strcpy (p, section_desc);
15996 else
15997 *p = '\0';
15998}
e165f3f0 15999\f
a4f6c312 16000/* Emit profile function. */
411707f4 16001
411707f4 16002void
a2369ed3 16003output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 16004{
858081ad
AH
16005 /* Non-standard profiling for kernels, which just saves LR then calls
16006 _mcount without worrying about arg saves. The idea is to change
16007 the function prologue as little as possible as it isn't easy to
16008 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
16009 if (TARGET_PROFILE_KERNEL)
16010 return;
16011
8480e480
CC
16012 if (DEFAULT_ABI == ABI_AIX)
16013 {
9739c90c
JJ
16014#ifndef NO_PROFILE_COUNTERS
16015# define NO_PROFILE_COUNTERS 0
16016#endif
f676971a 16017 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
16018 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16019 else
16020 {
16021 char buf[30];
16022 const char *label_name;
16023 rtx fun;
411707f4 16024
9739c90c
JJ
16025 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16026 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16027 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 16028
9739c90c
JJ
16029 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16030 fun, Pmode);
16031 }
8480e480 16032 }
ee890fe2
SS
16033 else if (DEFAULT_ABI == ABI_DARWIN)
16034 {
d5fa86ba 16035 const char *mcount_name = RS6000_MCOUNT;
ee890fe2
SS
16036 int caller_addr_regno = LINK_REGISTER_REGNUM;
16037
16038 /* Be conservative and always set this, at least for now. */
16039 current_function_uses_pic_offset_table = 1;
16040
16041#if TARGET_MACHO
16042 /* For PIC code, set up a stub and collect the caller's address
16043 from r0, which is where the prologue puts it. */
11abc112
MM
16044 if (MACHOPIC_INDIRECT
16045 && current_function_uses_pic_offset_table)
16046 caller_addr_regno = 0;
ee890fe2
SS
16047#endif
16048 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16049 0, VOIDmode, 1,
16050 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16051 }
411707f4
CC
16052}
16053
a4f6c312 16054/* Write function profiler code. */
e165f3f0
RK
16055
16056void
a2369ed3 16057output_function_profiler (FILE *file, int labelno)
e165f3f0 16058{
3daf36a4 16059 char buf[100];
e165f3f0 16060
38c1f2d7 16061 switch (DEFAULT_ABI)
3daf36a4 16062 {
38c1f2d7 16063 default:
37409796 16064 gcc_unreachable ();
38c1f2d7
MM
16065
16066 case ABI_V4:
09eeeacb
AM
16067 if (!TARGET_32BIT)
16068 {
d4ee4d25 16069 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
16070 return;
16071 }
ffcfcb5f 16072 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 16073 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
16074 if (NO_PROFILE_COUNTERS)
16075 {
16076 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16077 reg_names[0], reg_names[1]);
16078 }
16079 else if (TARGET_SECURE_PLT && flag_pic)
16080 {
16081 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16082 reg_names[0], reg_names[1]);
16083 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16084 asm_fprintf (file, "\t{cau|addis} %s,%s,",
16085 reg_names[12], reg_names[12]);
16086 assemble_name (file, buf);
16087 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16088 assemble_name (file, buf);
16089 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16090 }
16091 else if (flag_pic == 1)
38c1f2d7 16092 {
dfdfa60f 16093 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
16094 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16095 reg_names[0], reg_names[1]);
17167fd8 16096 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 16097 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 16098 assemble_name (file, buf);
17167fd8 16099 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 16100 }
9ebbca7d 16101 else if (flag_pic > 1)
38c1f2d7 16102 {
71625f3d
AM
16103 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16104 reg_names[0], reg_names[1]);
9ebbca7d 16105 /* Now, we need to get the address of the label. */
71625f3d 16106 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 16107 assemble_name (file, buf);
9ebbca7d
GK
16108 fputs ("-.\n1:", file);
16109 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 16110 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
16111 reg_names[0], reg_names[11]);
16112 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16113 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 16114 }
38c1f2d7
MM
16115 else
16116 {
17167fd8 16117 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 16118 assemble_name (file, buf);
dfdfa60f 16119 fputs ("@ha\n", file);
71625f3d
AM
16120 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16121 reg_names[0], reg_names[1]);
a260abc9 16122 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 16123 assemble_name (file, buf);
17167fd8 16124 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
16125 }
16126
50d440bc 16127 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
16128 fprintf (file, "\tbl %s%s\n",
16129 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
16130 break;
16131
16132 case ABI_AIX:
ee890fe2 16133 case ABI_DARWIN:
ffcfcb5f
AM
16134 if (!TARGET_PROFILE_KERNEL)
16135 {
a3c9585f 16136 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
16137 }
16138 else
16139 {
37409796 16140 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
16141
16142 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16143 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16144
6de9cd9a 16145 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
16146 {
16147 asm_fprintf (file, "\tstd %s,24(%s)\n",
16148 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16149 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16150 asm_fprintf (file, "\tld %s,24(%s)\n",
16151 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16152 }
16153 else
16154 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16155 }
38c1f2d7
MM
16156 break;
16157 }
e165f3f0 16158}
a251ffd0 16159
b54cf83a 16160\f
b54cf83a
DE
16161/* Power4 load update and store update instructions are cracked into a
16162 load or store and an integer insn which are executed in the same cycle.
16163 Branches have their own dispatch slot which does not count against the
16164 GCC issue rate, but it changes the program flow so there are no other
16165 instructions to issue in this cycle. */
16166
16167static int
f676971a
EC
16168rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16169 int verbose ATTRIBUTE_UNUSED,
a2369ed3 16170 rtx insn, int more)
b54cf83a
DE
16171{
16172 if (GET_CODE (PATTERN (insn)) == USE
16173 || GET_CODE (PATTERN (insn)) == CLOBBER)
16174 return more;
16175
ec507f2d 16176 if (rs6000_sched_groups)
b54cf83a 16177 {
cbe26ab8 16178 if (is_microcoded_insn (insn))
c4ad648e 16179 return 0;
cbe26ab8 16180 else if (is_cracked_insn (insn))
c4ad648e 16181 return more > 2 ? more - 2 : 0;
b54cf83a 16182 }
165b263e
DE
16183
16184 return more - 1;
b54cf83a
DE
16185}
16186
a251ffd0
TG
16187/* Adjust the cost of a scheduling dependency. Return the new cost of
16188 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16189
c237e94a 16190static int
0a4f0294 16191rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0
TG
16192{
16193 if (! recog_memoized (insn))
16194 return 0;
16195
16196 if (REG_NOTE_KIND (link) != 0)
16197 return 0;
16198
16199 if (REG_NOTE_KIND (link) == 0)
16200 {
ed947a96
DJ
16201 /* Data dependency; DEP_INSN writes a register that INSN reads
16202 some cycles later. */
c9dbf840
DE
16203
16204 /* Separate a load from a narrower, dependent store. */
16205 if (rs6000_sched_groups
16206 && GET_CODE (PATTERN (insn)) == SET
16207 && GET_CODE (PATTERN (dep_insn)) == SET
16208 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16209 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16210 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16211 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16212 return cost + 14;
16213
ed947a96
DJ
16214 switch (get_attr_type (insn))
16215 {
16216 case TYPE_JMPREG:
309323c2 16217 /* Tell the first scheduling pass about the latency between
ed947a96
DJ
16218 a mtctr and bctr (and mtlr and br/blr). The first
16219 scheduling pass will not know about this latency since
16220 the mtctr instruction, which has the latency associated
16221 to it, will be generated by reload. */
309323c2 16222 return TARGET_POWER ? 5 : 4;
ed947a96
DJ
16223 case TYPE_BRANCH:
16224 /* Leave some extra cycles between a compare and its
16225 dependent branch, to inhibit expensive mispredicts. */
309323c2
DE
16226 if ((rs6000_cpu_attr == CPU_PPC603
16227 || rs6000_cpu_attr == CPU_PPC604
16228 || rs6000_cpu_attr == CPU_PPC604E
16229 || rs6000_cpu_attr == CPU_PPC620
16230 || rs6000_cpu_attr == CPU_PPC630
16231 || rs6000_cpu_attr == CPU_PPC750
16232 || rs6000_cpu_attr == CPU_PPC7400
16233 || rs6000_cpu_attr == CPU_PPC7450
ec507f2d
DE
16234 || rs6000_cpu_attr == CPU_POWER4
16235 || rs6000_cpu_attr == CPU_POWER5)
ed947a96
DJ
16236 && recog_memoized (dep_insn)
16237 && (INSN_CODE (dep_insn) >= 0)
b54cf83a
DE
16238 && (get_attr_type (dep_insn) == TYPE_CMP
16239 || get_attr_type (dep_insn) == TYPE_COMPARE
ed947a96 16240 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
9259f3b0
DE
16241 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16242 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
ed947a96 16243 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
b54cf83a
DE
16244 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16245 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
ed947a96
DJ
16246 return cost + 2;
16247 default:
16248 break;
16249 }
a251ffd0
TG
16250 /* Fall out to return default cost. */
16251 }
16252
16253 return cost;
16254}
b6c9286a 16255
cbe26ab8 16256/* The function returns a true if INSN is microcoded.
839a4992 16257 Return false otherwise. */
cbe26ab8
DN
16258
16259static bool
16260is_microcoded_insn (rtx insn)
16261{
16262 if (!insn || !INSN_P (insn)
16263 || GET_CODE (PATTERN (insn)) == USE
16264 || GET_CODE (PATTERN (insn)) == CLOBBER)
16265 return false;
16266
ec507f2d 16267 if (rs6000_sched_groups)
cbe26ab8
DN
16268 {
16269 enum attr_type type = get_attr_type (insn);
16270 if (type == TYPE_LOAD_EXT_U
16271 || type == TYPE_LOAD_EXT_UX
16272 || type == TYPE_LOAD_UX
16273 || type == TYPE_STORE_UX
16274 || type == TYPE_MFCR)
c4ad648e 16275 return true;
cbe26ab8
DN
16276 }
16277
16278 return false;
16279}
16280
5c425df5 16281/* The function returns a nonzero value if INSN can be scheduled only
cbe26ab8
DN
16282 as the first insn in a dispatch group ("dispatch-slot restricted").
16283 In this case, the returned value indicates how many dispatch slots
16284 the insn occupies (at the beginning of the group).
79ae11c4
DN
16285 Return 0 otherwise. */
16286
cbe26ab8 16287static int
79ae11c4
DN
16288is_dispatch_slot_restricted (rtx insn)
16289{
16290 enum attr_type type;
16291
ec507f2d 16292 if (!rs6000_sched_groups)
79ae11c4
DN
16293 return 0;
16294
16295 if (!insn
16296 || insn == NULL_RTX
16297 || GET_CODE (insn) == NOTE
16298 || GET_CODE (PATTERN (insn)) == USE
16299 || GET_CODE (PATTERN (insn)) == CLOBBER)
16300 return 0;
16301
16302 type = get_attr_type (insn);
16303
ec507f2d
DE
16304 switch (type)
16305 {
16306 case TYPE_MFCR:
16307 case TYPE_MFCRF:
16308 case TYPE_MTCR:
16309 case TYPE_DELAYED_CR:
16310 case TYPE_CR_LOGICAL:
16311 case TYPE_MTJMPR:
16312 case TYPE_MFJMPR:
16313 return 1;
16314 case TYPE_IDIV:
16315 case TYPE_LDIV:
16316 return 2;
b52110d4
DE
16317 case TYPE_LOAD_L:
16318 case TYPE_STORE_C:
16319 case TYPE_ISYNC:
16320 case TYPE_SYNC:
16321 return 4;
ec507f2d
DE
16322 default:
16323 if (rs6000_cpu == PROCESSOR_POWER5
16324 && is_cracked_insn (insn))
16325 return 2;
16326 return 0;
16327 }
79ae11c4
DN
16328}
16329
cbe26ab8
DN
16330/* The function returns true if INSN is cracked into 2 instructions
16331 by the processor (and therefore occupies 2 issue slots). */
16332
16333static bool
16334is_cracked_insn (rtx insn)
16335{
16336 if (!insn || !INSN_P (insn)
16337 || GET_CODE (PATTERN (insn)) == USE
16338 || GET_CODE (PATTERN (insn)) == CLOBBER)
16339 return false;
16340
ec507f2d 16341 if (rs6000_sched_groups)
cbe26ab8
DN
16342 {
16343 enum attr_type type = get_attr_type (insn);
16344 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
16345 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16346 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16347 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16348 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16349 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16350 || type == TYPE_IDIV || type == TYPE_LDIV
16351 || type == TYPE_INSERT_WORD)
16352 return true;
cbe26ab8
DN
16353 }
16354
16355 return false;
16356}
16357
16358/* The function returns true if INSN can be issued only from
a3c9585f 16359 the branch slot. */
cbe26ab8
DN
16360
16361static bool
16362is_branch_slot_insn (rtx insn)
16363{
16364 if (!insn || !INSN_P (insn)
16365 || GET_CODE (PATTERN (insn)) == USE
16366 || GET_CODE (PATTERN (insn)) == CLOBBER)
16367 return false;
16368
ec507f2d 16369 if (rs6000_sched_groups)
cbe26ab8
DN
16370 {
16371 enum attr_type type = get_attr_type (insn);
16372 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 16373 return true;
cbe26ab8
DN
16374 return false;
16375 }
16376
16377 return false;
16378}
79ae11c4 16379
a4f6c312 16380/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
16381 priority INSN_PRIORITY (INSN). Increase the priority to execute the
16382 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
16383 define this macro if you do not need to adjust the scheduling
16384 priorities of insns. */
bef84347 16385
c237e94a 16386static int
a2369ed3 16387rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 16388{
a4f6c312
SS
16389 /* On machines (like the 750) which have asymmetric integer units,
16390 where one integer unit can do multiply and divides and the other
16391 can't, reduce the priority of multiply/divide so it is scheduled
16392 before other integer operations. */
bef84347
VM
16393
16394#if 0
2c3c49de 16395 if (! INSN_P (insn))
bef84347
VM
16396 return priority;
16397
16398 if (GET_CODE (PATTERN (insn)) == USE)
16399 return priority;
16400
16401 switch (rs6000_cpu_attr) {
16402 case CPU_PPC750:
16403 switch (get_attr_type (insn))
16404 {
16405 default:
16406 break;
16407
16408 case TYPE_IMUL:
16409 case TYPE_IDIV:
3cb999d8
DE
16410 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16411 priority, priority);
bef84347
VM
16412 if (priority >= 0 && priority < 0x01000000)
16413 priority >>= 3;
16414 break;
16415 }
16416 }
16417#endif
16418
79ae11c4
DN
16419 if (is_dispatch_slot_restricted (insn)
16420 && reload_completed
f676971a 16421 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
16422 && rs6000_sched_restricted_insns_priority)
16423 {
16424
c4ad648e
AM
16425 /* Prioritize insns that can be dispatched only in the first
16426 dispatch slot. */
79ae11c4 16427 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
16428 /* Attach highest priority to insn. This means that in
16429 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 16430 precede 'priority' (critical path) considerations. */
f676971a 16431 return current_sched_info->sched_max_insns_priority;
79ae11c4 16432 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 16433 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
16434 haifa-sched.c:ready_sort(), only 'priority' (critical path)
16435 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
16436 return (priority + 1);
16437 }
79ae11c4 16438
bef84347
VM
16439 return priority;
16440}
16441
a4f6c312
SS
16442/* Return how many instructions the machine can issue per cycle. */
16443
c237e94a 16444static int
863d938c 16445rs6000_issue_rate (void)
b6c9286a 16446{
3317bab1
DE
16447 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
16448 if (!reload_completed)
16449 return 1;
16450
b6c9286a 16451 switch (rs6000_cpu_attr) {
3cb999d8
DE
16452 case CPU_RIOS1: /* ? */
16453 case CPU_RS64A:
16454 case CPU_PPC601: /* ? */
ed947a96 16455 case CPU_PPC7450:
3cb999d8 16456 return 3;
b54cf83a 16457 case CPU_PPC440:
b6c9286a 16458 case CPU_PPC603:
bef84347 16459 case CPU_PPC750:
ed947a96 16460 case CPU_PPC7400:
be12c2b0 16461 case CPU_PPC8540:
f676971a 16462 return 2;
3cb999d8 16463 case CPU_RIOS2:
b6c9286a 16464 case CPU_PPC604:
19684119 16465 case CPU_PPC604E:
b6c9286a 16466 case CPU_PPC620:
3cb999d8 16467 case CPU_PPC630:
b6c9286a 16468 return 4;
cbe26ab8 16469 case CPU_POWER4:
ec507f2d 16470 case CPU_POWER5:
cbe26ab8 16471 return 5;
b6c9286a
MM
16472 default:
16473 return 1;
16474 }
16475}
16476
be12c2b0
VM
16477/* Return how many instructions to look ahead for better insn
16478 scheduling. */
16479
16480static int
863d938c 16481rs6000_use_sched_lookahead (void)
be12c2b0
VM
16482{
16483 if (rs6000_cpu_attr == CPU_PPC8540)
16484 return 4;
16485 return 0;
16486}
16487
569fa502
DN
16488/* Determine is PAT refers to memory. */
16489
16490static bool
16491is_mem_ref (rtx pat)
16492{
16493 const char * fmt;
16494 int i, j;
16495 bool ret = false;
16496
16497 if (GET_CODE (pat) == MEM)
16498 return true;
16499
16500 /* Recursively process the pattern. */
16501 fmt = GET_RTX_FORMAT (GET_CODE (pat));
16502
16503 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16504 {
16505 if (fmt[i] == 'e')
16506 ret |= is_mem_ref (XEXP (pat, i));
16507 else if (fmt[i] == 'E')
16508 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16509 ret |= is_mem_ref (XVECEXP (pat, i, j));
16510 }
16511
16512 return ret;
16513}
16514
16515/* Determine if PAT is a PATTERN of a load insn. */
f676971a 16516
569fa502
DN
16517static bool
16518is_load_insn1 (rtx pat)
16519{
16520 if (!pat || pat == NULL_RTX)
16521 return false;
16522
16523 if (GET_CODE (pat) == SET)
16524 return is_mem_ref (SET_SRC (pat));
16525
16526 if (GET_CODE (pat) == PARALLEL)
16527 {
16528 int i;
16529
16530 for (i = 0; i < XVECLEN (pat, 0); i++)
16531 if (is_load_insn1 (XVECEXP (pat, 0, i)))
16532 return true;
16533 }
16534
16535 return false;
16536}
16537
16538/* Determine if INSN loads from memory. */
16539
16540static bool
16541is_load_insn (rtx insn)
16542{
16543 if (!insn || !INSN_P (insn))
16544 return false;
16545
16546 if (GET_CODE (insn) == CALL_INSN)
16547 return false;
16548
16549 return is_load_insn1 (PATTERN (insn));
16550}
16551
16552/* Determine if PAT is a PATTERN of a store insn. */
16553
16554static bool
16555is_store_insn1 (rtx pat)
16556{
16557 if (!pat || pat == NULL_RTX)
16558 return false;
16559
16560 if (GET_CODE (pat) == SET)
16561 return is_mem_ref (SET_DEST (pat));
16562
16563 if (GET_CODE (pat) == PARALLEL)
16564 {
16565 int i;
16566
16567 for (i = 0; i < XVECLEN (pat, 0); i++)
16568 if (is_store_insn1 (XVECEXP (pat, 0, i)))
16569 return true;
16570 }
16571
16572 return false;
16573}
16574
16575/* Determine if INSN stores to memory. */
16576
16577static bool
16578is_store_insn (rtx insn)
16579{
16580 if (!insn || !INSN_P (insn))
16581 return false;
16582
16583 return is_store_insn1 (PATTERN (insn));
16584}
16585
16586/* Returns whether the dependence between INSN and NEXT is considered
16587 costly by the given target. */
16588
16589static bool
c4ad648e
AM
16590rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16591 int distance)
f676971a 16592{
aabcd309 16593 /* If the flag is not enabled - no dependence is considered costly;
f676971a 16594 allow all dependent insns in the same group.
569fa502
DN
16595 This is the most aggressive option. */
16596 if (rs6000_sched_costly_dep == no_dep_costly)
16597 return false;
16598
f676971a 16599 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
16600 do not allow dependent instructions in the same group.
16601 This is the most conservative option. */
16602 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 16603 return true;
569fa502 16604
f676971a
EC
16605 if (rs6000_sched_costly_dep == store_to_load_dep_costly
16606 && is_load_insn (next)
569fa502
DN
16607 && is_store_insn (insn))
16608 /* Prevent load after store in the same group. */
16609 return true;
16610
16611 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 16612 && is_load_insn (next)
569fa502
DN
16613 && is_store_insn (insn)
16614 && (!link || (int) REG_NOTE_KIND (link) == 0))
c4ad648e
AM
16615 /* Prevent load after store in the same group if it is a true
16616 dependence. */
569fa502 16617 return true;
f676971a
EC
16618
16619 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
16620 and will not be scheduled in the same group. */
16621 if (rs6000_sched_costly_dep <= max_dep_latency
16622 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
16623 return true;
16624
16625 return false;
16626}
16627
f676971a 16628/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
16629 skipping any "non-active" insns - insns that will not actually occupy
16630 an issue slot. Return NULL_RTX if such an insn is not found. */
16631
16632static rtx
16633get_next_active_insn (rtx insn, rtx tail)
16634{
f489aff8 16635 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
16636 return NULL_RTX;
16637
f489aff8 16638 while (1)
cbe26ab8 16639 {
f489aff8
AM
16640 insn = NEXT_INSN (insn);
16641 if (insn == NULL_RTX || insn == tail)
16642 return NULL_RTX;
cbe26ab8 16643
f489aff8
AM
16644 if (CALL_P (insn)
16645 || JUMP_P (insn)
16646 || (NONJUMP_INSN_P (insn)
16647 && GET_CODE (PATTERN (insn)) != USE
16648 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 16649 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
16650 break;
16651 }
16652 return insn;
cbe26ab8
DN
16653}
16654
839a4992 16655/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
16656 of group WHICH_GROUP.
16657
16658 If WHICH_GROUP == current_group, this function will return true if INSN
16659 causes the termination of the current group (i.e, the dispatch group to
16660 which INSN belongs). This means that INSN will be the last insn in the
16661 group it belongs to.
16662
16663 If WHICH_GROUP == previous_group, this function will return true if INSN
16664 causes the termination of the previous group (i.e, the dispatch group that
16665 precedes the group to which INSN belongs). This means that INSN will be
16666 the first insn in the group it belongs to). */
16667
16668static bool
16669insn_terminates_group_p (rtx insn, enum group_termination which_group)
16670{
16671 enum attr_type type;
16672
16673 if (! insn)
16674 return false;
569fa502 16675
cbe26ab8
DN
16676 type = get_attr_type (insn);
16677
16678 if (is_microcoded_insn (insn))
16679 return true;
16680
16681 if (which_group == current_group)
16682 {
16683 if (is_branch_slot_insn (insn))
c4ad648e 16684 return true;
cbe26ab8
DN
16685 return false;
16686 }
16687 else if (which_group == previous_group)
16688 {
16689 if (is_dispatch_slot_restricted (insn))
c4ad648e 16690 return true;
cbe26ab8
DN
16691 return false;
16692 }
16693
16694 return false;
16695}
16696
839a4992 16697/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
16698 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16699
16700static bool
16701is_costly_group (rtx *group_insns, rtx next_insn)
16702{
16703 int i;
16704 rtx link;
16705 int cost;
16706 int issue_rate = rs6000_issue_rate ();
16707
16708 for (i = 0; i < issue_rate; i++)
16709 {
16710 rtx insn = group_insns[i];
16711 if (!insn)
c4ad648e 16712 continue;
cbe26ab8 16713 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
c4ad648e
AM
16714 {
16715 rtx next = XEXP (link, 0);
16716 if (next == next_insn)
16717 {
16718 cost = insn_cost (insn, link, next_insn);
16719 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
16720 return true;
16721 }
16722 }
cbe26ab8
DN
16723 }
16724
16725 return false;
16726}
16727
f676971a 16728/* Utility of the function redefine_groups.
cbe26ab8
DN
16729 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16730 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16731 to keep it "far" (in a separate group) from GROUP_INSNS, following
16732 one of the following schemes, depending on the value of the flag
16733 -minsert_sched_nops = X:
16734 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 16735 in order to force NEXT_INSN into a separate group.
f676971a
EC
16736 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16737 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
16738 insertion (has a group just ended, how many vacant issue slots remain in the
16739 last group, and how many dispatch groups were encountered so far). */
16740
f676971a 16741static int
c4ad648e
AM
16742force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
16743 rtx next_insn, bool *group_end, int can_issue_more,
16744 int *group_count)
cbe26ab8
DN
16745{
16746 rtx nop;
16747 bool force;
16748 int issue_rate = rs6000_issue_rate ();
16749 bool end = *group_end;
16750 int i;
16751
16752 if (next_insn == NULL_RTX)
16753 return can_issue_more;
16754
16755 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
16756 return can_issue_more;
16757
16758 force = is_costly_group (group_insns, next_insn);
16759 if (!force)
16760 return can_issue_more;
16761
16762 if (sched_verbose > 6)
16763 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 16764 *group_count ,can_issue_more);
cbe26ab8
DN
16765
16766 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
16767 {
16768 if (*group_end)
c4ad648e 16769 can_issue_more = 0;
cbe26ab8
DN
16770
16771 /* Since only a branch can be issued in the last issue_slot, it is
16772 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16773 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
16774 in this case the last nop will start a new group and the branch
16775 will be forced to the new group. */
cbe26ab8 16776 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 16777 can_issue_more--;
cbe26ab8
DN
16778
16779 while (can_issue_more > 0)
c4ad648e 16780 {
9390387d 16781 nop = gen_nop ();
c4ad648e
AM
16782 emit_insn_before (nop, next_insn);
16783 can_issue_more--;
16784 }
cbe26ab8
DN
16785
16786 *group_end = true;
16787 return 0;
f676971a 16788 }
cbe26ab8
DN
16789
16790 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
16791 {
16792 int n_nops = rs6000_sched_insert_nops;
16793
f676971a 16794 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 16795 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 16796 if (can_issue_more == 0)
c4ad648e 16797 can_issue_more = issue_rate;
cbe26ab8
DN
16798 can_issue_more--;
16799 if (can_issue_more == 0)
c4ad648e
AM
16800 {
16801 can_issue_more = issue_rate - 1;
16802 (*group_count)++;
16803 end = true;
16804 for (i = 0; i < issue_rate; i++)
16805 {
16806 group_insns[i] = 0;
16807 }
16808 }
cbe26ab8
DN
16809
16810 while (n_nops > 0)
c4ad648e
AM
16811 {
16812 nop = gen_nop ();
16813 emit_insn_before (nop, next_insn);
16814 if (can_issue_more == issue_rate - 1) /* new group begins */
16815 end = false;
16816 can_issue_more--;
16817 if (can_issue_more == 0)
16818 {
16819 can_issue_more = issue_rate - 1;
16820 (*group_count)++;
16821 end = true;
16822 for (i = 0; i < issue_rate; i++)
16823 {
16824 group_insns[i] = 0;
16825 }
16826 }
16827 n_nops--;
16828 }
cbe26ab8
DN
16829
16830 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 16831 can_issue_more++;
cbe26ab8 16832
c4ad648e
AM
16833 /* Is next_insn going to start a new group? */
16834 *group_end
16835 = (end
cbe26ab8
DN
16836 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16837 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16838 || (can_issue_more < issue_rate &&
c4ad648e 16839 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 16840 if (*group_end && end)
c4ad648e 16841 (*group_count)--;
cbe26ab8
DN
16842
16843 if (sched_verbose > 6)
c4ad648e
AM
16844 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16845 *group_count, can_issue_more);
f676971a
EC
16846 return can_issue_more;
16847 }
cbe26ab8
DN
16848
16849 return can_issue_more;
16850}
16851
16852/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 16853 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
16854 form in practice. It tries to achieve this synchronization by forcing the
16855 estimated processor grouping on the compiler (as opposed to the function
16856 'pad_goups' which tries to force the scheduler's grouping on the processor).
16857
16858 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16859 examines the (estimated) dispatch groups that will be formed by the processor
16860 dispatcher. It marks these group boundaries to reflect the estimated
16861 processor grouping, overriding the grouping that the scheduler had marked.
16862 Depending on the value of the flag '-minsert-sched-nops' this function can
16863 force certain insns into separate groups or force a certain distance between
16864 them by inserting nops, for example, if there exists a "costly dependence"
16865 between the insns.
16866
16867 The function estimates the group boundaries that the processor will form as
0fa2e4df 16868 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
16869 each insn. A subsequent insn will start a new group if one of the following
16870 4 cases applies:
16871 - no more vacant issue slots remain in the current dispatch group.
16872 - only the last issue slot, which is the branch slot, is vacant, but the next
16873 insn is not a branch.
16874 - only the last 2 or less issue slots, including the branch slot, are vacant,
16875 which means that a cracked insn (which occupies two issue slots) can't be
16876 issued in this group.
f676971a 16877 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
16878 start a new group. */
16879
16880static int
16881redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16882{
16883 rtx insn, next_insn;
16884 int issue_rate;
16885 int can_issue_more;
16886 int slot, i;
16887 bool group_end;
16888 int group_count = 0;
16889 rtx *group_insns;
16890
16891 /* Initialize. */
16892 issue_rate = rs6000_issue_rate ();
16893 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 16894 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
16895 {
16896 group_insns[i] = 0;
16897 }
16898 can_issue_more = issue_rate;
16899 slot = 0;
16900 insn = get_next_active_insn (prev_head_insn, tail);
16901 group_end = false;
16902
16903 while (insn != NULL_RTX)
16904 {
16905 slot = (issue_rate - can_issue_more);
16906 group_insns[slot] = insn;
16907 can_issue_more =
c4ad648e 16908 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 16909 if (insn_terminates_group_p (insn, current_group))
c4ad648e 16910 can_issue_more = 0;
cbe26ab8
DN
16911
16912 next_insn = get_next_active_insn (insn, tail);
16913 if (next_insn == NULL_RTX)
c4ad648e 16914 return group_count + 1;
cbe26ab8 16915
c4ad648e
AM
16916 /* Is next_insn going to start a new group? */
16917 group_end
16918 = (can_issue_more == 0
16919 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16920 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16921 || (can_issue_more < issue_rate &&
16922 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 16923
f676971a 16924 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
16925 next_insn, &group_end, can_issue_more,
16926 &group_count);
cbe26ab8
DN
16927
16928 if (group_end)
c4ad648e
AM
16929 {
16930 group_count++;
16931 can_issue_more = 0;
16932 for (i = 0; i < issue_rate; i++)
16933 {
16934 group_insns[i] = 0;
16935 }
16936 }
cbe26ab8
DN
16937
16938 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 16939 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 16940 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 16941 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
16942
16943 insn = next_insn;
16944 if (can_issue_more == 0)
c4ad648e
AM
16945 can_issue_more = issue_rate;
16946 } /* while */
cbe26ab8
DN
16947
16948 return group_count;
16949}
16950
16951/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16952 dispatch group boundaries that the scheduler had marked. Pad with nops
16953 any dispatch groups which have vacant issue slots, in order to force the
16954 scheduler's grouping on the processor dispatcher. The function
16955 returns the number of dispatch groups found. */
16956
16957static int
16958pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16959{
16960 rtx insn, next_insn;
16961 rtx nop;
16962 int issue_rate;
16963 int can_issue_more;
16964 int group_end;
16965 int group_count = 0;
16966
16967 /* Initialize issue_rate. */
16968 issue_rate = rs6000_issue_rate ();
16969 can_issue_more = issue_rate;
16970
16971 insn = get_next_active_insn (prev_head_insn, tail);
16972 next_insn = get_next_active_insn (insn, tail);
16973
16974 while (insn != NULL_RTX)
16975 {
16976 can_issue_more =
16977 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
16978
16979 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
16980
16981 if (next_insn == NULL_RTX)
c4ad648e 16982 break;
cbe26ab8
DN
16983
16984 if (group_end)
c4ad648e
AM
16985 {
16986 /* If the scheduler had marked group termination at this location
16987 (between insn and next_indn), and neither insn nor next_insn will
16988 force group termination, pad the group with nops to force group
16989 termination. */
16990 if (can_issue_more
16991 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
16992 && !insn_terminates_group_p (insn, current_group)
16993 && !insn_terminates_group_p (next_insn, previous_group))
16994 {
9390387d 16995 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
16996 can_issue_more--;
16997
16998 while (can_issue_more)
16999 {
17000 nop = gen_nop ();
17001 emit_insn_before (nop, next_insn);
17002 can_issue_more--;
17003 }
17004 }
17005
17006 can_issue_more = issue_rate;
17007 group_count++;
17008 }
cbe26ab8
DN
17009
17010 insn = next_insn;
17011 next_insn = get_next_active_insn (insn, tail);
17012 }
17013
17014 return group_count;
17015}
17016
17017/* The following function is called at the end of scheduling BB.
17018 After reload, it inserts nops at insn group bundling. */
17019
17020static void
38f391a5 17021rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
17022{
17023 int n_groups;
17024
17025 if (sched_verbose)
17026 fprintf (dump, "=== Finishing schedule.\n");
17027
ec507f2d 17028 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
17029 {
17030 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 17031 return;
cbe26ab8
DN
17032
17033 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
17034 n_groups = pad_groups (dump, sched_verbose,
17035 current_sched_info->prev_head,
17036 current_sched_info->next_tail);
cbe26ab8 17037 else
c4ad648e
AM
17038 n_groups = redefine_groups (dump, sched_verbose,
17039 current_sched_info->prev_head,
17040 current_sched_info->next_tail);
cbe26ab8
DN
17041
17042 if (sched_verbose >= 6)
17043 {
17044 fprintf (dump, "ngroups = %d\n", n_groups);
17045 print_rtl (dump, current_sched_info->prev_head);
17046 fprintf (dump, "Done finish_sched\n");
17047 }
17048 }
17049}
b6c9286a 17050\f
b6c9286a
MM
17051/* Length in units of the trampoline for entering a nested function. */
17052
17053int
863d938c 17054rs6000_trampoline_size (void)
b6c9286a
MM
17055{
17056 int ret = 0;
17057
17058 switch (DEFAULT_ABI)
17059 {
17060 default:
37409796 17061 gcc_unreachable ();
b6c9286a
MM
17062
17063 case ABI_AIX:
8f802bfb 17064 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
17065 break;
17066
4dabc42d 17067 case ABI_DARWIN:
b6c9286a 17068 case ABI_V4:
03a7e1a5 17069 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 17070 break;
b6c9286a
MM
17071 }
17072
17073 return ret;
17074}
17075
17076/* Emit RTL insns to initialize the variable parts of a trampoline.
17077 FNADDR is an RTX for the address of the function's pure code.
17078 CXT is an RTX for the static chain value for the function. */
17079
17080void
a2369ed3 17081rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 17082{
ac2a93a1 17083 enum machine_mode pmode = Pmode;
8bd04c56
MM
17084 int regsize = (TARGET_32BIT) ? 4 : 8;
17085 rtx ctx_reg = force_reg (pmode, cxt);
b6c9286a
MM
17086
17087 switch (DEFAULT_ABI)
17088 {
17089 default:
37409796 17090 gcc_unreachable ();
b6c9286a 17091
8bd04c56 17092/* Macros to shorten the code expansions below. */
39403d82 17093#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
c5c76735
JL
17094#define MEM_PLUS(addr,offset) \
17095 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
7c59dc5d 17096
b6c9286a
MM
17097 /* Under AIX, just build the 3 word function descriptor */
17098 case ABI_AIX:
8bd04c56
MM
17099 {
17100 rtx fn_reg = gen_reg_rtx (pmode);
17101 rtx toc_reg = gen_reg_rtx (pmode);
17102 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 17103 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
17104 emit_move_insn (MEM_DEREF (addr), fn_reg);
17105 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17106 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17107 }
b6c9286a
MM
17108 break;
17109
4dabc42d
TC
17110 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
17111 case ABI_DARWIN:
b6c9286a 17112 case ABI_V4:
39403d82 17113 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
eaf1bcf1
MM
17114 FALSE, VOIDmode, 4,
17115 addr, pmode,
17116 GEN_INT (rs6000_trampoline_size ()), SImode,
17117 fnaddr, pmode,
17118 ctx_reg, pmode);
b6c9286a 17119 break;
b6c9286a
MM
17120 }
17121
17122 return;
17123}
7509c759
MM
17124
17125\f
91d231cb 17126/* Table of valid machine attributes. */
a4f6c312 17127
91d231cb 17128const struct attribute_spec rs6000_attribute_table[] =
7509c759 17129{
91d231cb 17130 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 17131 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
17132 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17133 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
005c1a13
GK
17134#ifdef SUBTARGET_ATTRIBUTE_TABLE
17135 SUBTARGET_ATTRIBUTE_TABLE,
17136#endif
a5c76ee6 17137 { NULL, 0, 0, false, false, false, NULL }
91d231cb 17138};
7509c759 17139
8bb418a3
ZL
17140/* Handle the "altivec" attribute. The attribute may have
17141 arguments as follows:
f676971a 17142
8bb418a3
ZL
17143 __attribute__((altivec(vector__)))
17144 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
17145 __attribute__((altivec(bool__))) (always followed by 'unsigned')
17146
17147 and may appear more than once (e.g., 'vector bool char') in a
17148 given declaration. */
17149
17150static tree
f90ac3f0
UP
17151rs6000_handle_altivec_attribute (tree *node,
17152 tree name ATTRIBUTE_UNUSED,
17153 tree args,
8bb418a3
ZL
17154 int flags ATTRIBUTE_UNUSED,
17155 bool *no_add_attrs)
17156{
17157 tree type = *node, result = NULL_TREE;
17158 enum machine_mode mode;
17159 int unsigned_p;
17160 char altivec_type
17161 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17162 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17163 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 17164 : '?');
8bb418a3
ZL
17165
17166 while (POINTER_TYPE_P (type)
17167 || TREE_CODE (type) == FUNCTION_TYPE
17168 || TREE_CODE (type) == METHOD_TYPE
17169 || TREE_CODE (type) == ARRAY_TYPE)
17170 type = TREE_TYPE (type);
17171
17172 mode = TYPE_MODE (type);
17173
f90ac3f0
UP
17174 /* Check for invalid AltiVec type qualifiers. */
17175 if (type == long_unsigned_type_node || type == long_integer_type_node)
17176 {
17177 if (TARGET_64BIT)
17178 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17179 else if (rs6000_warn_altivec_long)
d4ee4d25 17180 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
17181 }
17182 else if (type == long_long_unsigned_type_node
17183 || type == long_long_integer_type_node)
17184 error ("use of %<long long%> in AltiVec types is invalid");
17185 else if (type == double_type_node)
17186 error ("use of %<double%> in AltiVec types is invalid");
17187 else if (type == long_double_type_node)
17188 error ("use of %<long double%> in AltiVec types is invalid");
17189 else if (type == boolean_type_node)
17190 error ("use of boolean types in AltiVec types is invalid");
17191 else if (TREE_CODE (type) == COMPLEX_TYPE)
17192 error ("use of %<complex%> in AltiVec types is invalid");
8bb418a3
ZL
17193
17194 switch (altivec_type)
17195 {
17196 case 'v':
8df83eae 17197 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
17198 switch (mode)
17199 {
c4ad648e
AM
17200 case SImode:
17201 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17202 break;
17203 case HImode:
17204 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17205 break;
17206 case QImode:
17207 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17208 break;
17209 case SFmode: result = V4SF_type_node; break;
17210 /* If the user says 'vector int bool', we may be handed the 'bool'
17211 attribute _before_ the 'vector' attribute, and so select the
17212 proper type in the 'b' case below. */
17213 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17214 result = type;
17215 default: break;
8bb418a3
ZL
17216 }
17217 break;
17218 case 'b':
17219 switch (mode)
17220 {
c4ad648e
AM
17221 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17222 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17223 case QImode: case V16QImode: result = bool_V16QI_type_node;
17224 default: break;
8bb418a3
ZL
17225 }
17226 break;
17227 case 'p':
17228 switch (mode)
17229 {
c4ad648e
AM
17230 case V8HImode: result = pixel_V8HI_type_node;
17231 default: break;
8bb418a3
ZL
17232 }
17233 default: break;
17234 }
17235
7958a2a6
FJ
17236 if (result && result != type && TYPE_READONLY (type))
17237 result = build_qualified_type (result, TYPE_QUAL_CONST);
17238
8bb418a3
ZL
17239 *no_add_attrs = true; /* No need to hang on to the attribute. */
17240
f90ac3f0 17241 if (result)
8bb418a3
ZL
17242 *node = reconstruct_complex_type (*node, result);
17243
17244 return NULL_TREE;
17245}
17246
f18eca82
ZL
17247/* AltiVec defines four built-in scalar types that serve as vector
17248 elements; we must teach the compiler how to mangle them. */
17249
17250static const char *
17251rs6000_mangle_fundamental_type (tree type)
17252{
17253 if (type == bool_char_type_node) return "U6__boolc";
17254 if (type == bool_short_type_node) return "U6__bools";
17255 if (type == pixel_type_node) return "u7__pixel";
17256 if (type == bool_int_type_node) return "U6__booli";
17257
17258 /* For all other types, use normal C++ mangling. */
17259 return NULL;
17260}
17261
a5c76ee6
ZW
17262/* Handle a "longcall" or "shortcall" attribute; arguments as in
17263 struct attribute_spec.handler. */
a4f6c312 17264
91d231cb 17265static tree
f676971a
EC
17266rs6000_handle_longcall_attribute (tree *node, tree name,
17267 tree args ATTRIBUTE_UNUSED,
17268 int flags ATTRIBUTE_UNUSED,
a2369ed3 17269 bool *no_add_attrs)
91d231cb
JM
17270{
17271 if (TREE_CODE (*node) != FUNCTION_TYPE
17272 && TREE_CODE (*node) != FIELD_DECL
17273 && TREE_CODE (*node) != TYPE_DECL)
17274 {
5c498b10 17275 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
17276 IDENTIFIER_POINTER (name));
17277 *no_add_attrs = true;
17278 }
6a4cee5f 17279
91d231cb 17280 return NULL_TREE;
7509c759
MM
17281}
17282
a5c76ee6
ZW
17283/* Set longcall attributes on all functions declared when
17284 rs6000_default_long_calls is true. */
17285static void
a2369ed3 17286rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
17287{
17288 if (rs6000_default_long_calls
17289 && (TREE_CODE (type) == FUNCTION_TYPE
17290 || TREE_CODE (type) == METHOD_TYPE))
17291 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17292 NULL_TREE,
17293 TYPE_ATTRIBUTES (type));
17294}
17295
3cb999d8
DE
17296/* Return a reference suitable for calling a function with the
17297 longcall attribute. */
a4f6c312 17298
9390387d 17299rtx
a2369ed3 17300rs6000_longcall_ref (rtx call_ref)
6a4cee5f 17301{
d330fd93 17302 const char *call_name;
6a4cee5f
MM
17303 tree node;
17304
17305 if (GET_CODE (call_ref) != SYMBOL_REF)
17306 return call_ref;
17307
17308 /* System V adds '.' to the internal name, so skip them. */
17309 call_name = XSTR (call_ref, 0);
17310 if (*call_name == '.')
17311 {
17312 while (*call_name == '.')
17313 call_name++;
17314
17315 node = get_identifier (call_name);
39403d82 17316 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
17317 }
17318
17319 return force_reg (Pmode, call_ref);
17320}
7509c759 17321\f
b64a1b53
RH
17322#ifdef USING_ELFOS_H
17323
7509c759
MM
17324/* A C statement or statements to switch to the appropriate section
17325 for output of RTX in mode MODE. You can assume that RTX is some
17326 kind of constant in RTL. The argument MODE is redundant except in
17327 the case of a `const_int' rtx. Select the section by calling
17328 `text_section' or one of the alternatives for other sections.
17329
17330 Do not define this macro if you put all constants in the read-only
17331 data section. */
17332
b64a1b53 17333static void
f676971a 17334rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 17335 unsigned HOST_WIDE_INT align)
7509c759 17336{
a9098fd0 17337 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
7509c759 17338 toc_section ();
7509c759 17339 else
b64a1b53 17340 default_elf_select_rtx_section (mode, x, align);
7509c759
MM
17341}
17342
17343/* A C statement or statements to switch to the appropriate
17344 section for output of DECL. DECL is either a `VAR_DECL' node
17345 or a constant of some sort. RELOC indicates whether forming
17346 the initial value of DECL requires link-time relocations. */
17347
ae46c4e0 17348static void
f676971a 17349rs6000_elf_select_section (tree decl, int reloc,
a2369ed3 17350 unsigned HOST_WIDE_INT align)
7509c759 17351{
f1384257
AM
17352 /* Pretend that we're always building for a shared library when
17353 ABI_AIX, because otherwise we end up with dynamic relocations
17354 in read-only sections. This happens for function pointers,
17355 references to vtables in typeinfo, and probably other cases. */
0e5dbd9b
DE
17356 default_elf_select_section_1 (decl, reloc, align,
17357 flag_pic || DEFAULT_ABI == ABI_AIX);
63019373
GK
17358}
17359
17360/* A C statement to build up a unique section name, expressed as a
17361 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
17362 RELOC indicates whether the initial value of EXP requires
17363 link-time relocations. If you do not define this macro, GCC will use
17364 the symbol name prefixed by `.' as the section name. Note - this
f5143c46 17365 macro can now be called for uninitialized data items as well as
4912a07c 17366 initialized data and functions. */
63019373 17367
ae46c4e0 17368static void
a2369ed3 17369rs6000_elf_unique_section (tree decl, int reloc)
63019373 17370{
f1384257
AM
17371 /* As above, pretend that we're always building for a shared library
17372 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
0e5dbd9b
DE
17373 default_unique_section_1 (decl, reloc,
17374 flag_pic || DEFAULT_ABI == ABI_AIX);
7509c759 17375}
d9407988 17376\f
d1908feb
JJ
17377/* For a SYMBOL_REF, set generic flags and then perform some
17378 target-specific processing.
17379
d1908feb
JJ
17380 When the AIX ABI is requested on a non-AIX system, replace the
17381 function name with the real name (with a leading .) rather than the
17382 function descriptor name. This saves a lot of overriding code to
17383 read the prefixes. */
d9407988 17384
fb49053f 17385static void
a2369ed3 17386rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 17387{
d1908feb 17388 default_encode_section_info (decl, rtl, first);
b2003250 17389
d1908feb
JJ
17390 if (first
17391 && TREE_CODE (decl) == FUNCTION_DECL
17392 && !TARGET_AIX
17393 && DEFAULT_ABI == ABI_AIX)
d9407988 17394 {
c6a2438a 17395 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
17396 size_t len = strlen (XSTR (sym_ref, 0));
17397 char *str = alloca (len + 2);
17398 str[0] = '.';
17399 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17400 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 17401 }
d9407988
MM
17402}
17403
0e5dbd9b 17404static bool
a2369ed3 17405rs6000_elf_in_small_data_p (tree decl)
0e5dbd9b
DE
17406{
17407 if (rs6000_sdata == SDATA_NONE)
17408 return false;
17409
7482ad25
AF
17410 /* We want to merge strings, so we never consider them small data. */
17411 if (TREE_CODE (decl) == STRING_CST)
17412 return false;
17413
17414 /* Functions are never in the small data area. */
17415 if (TREE_CODE (decl) == FUNCTION_DECL)
17416 return false;
17417
0e5dbd9b
DE
17418 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17419 {
17420 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17421 if (strcmp (section, ".sdata") == 0
17422 || strcmp (section, ".sdata2") == 0
20bfcd69
GK
17423 || strcmp (section, ".sbss") == 0
17424 || strcmp (section, ".sbss2") == 0
17425 || strcmp (section, ".PPC.EMB.sdata0") == 0
17426 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
17427 return true;
17428 }
17429 else
17430 {
17431 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17432
17433 if (size > 0
307b599c 17434 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
17435 /* If it's not public, and we're not going to reference it there,
17436 there's no need to put it in the small data section. */
0e5dbd9b
DE
17437 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17438 return true;
17439 }
17440
17441 return false;
17442}
17443
b91da81f 17444#endif /* USING_ELFOS_H */
000034eb 17445
a6c2a102 17446\f
000034eb 17447/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
17448 ADDR can be effectively incremented by incrementing REG.
17449
17450 r0 is special and we must not select it as an address
17451 register by this routine since our caller will try to
17452 increment the returned register via an "la" instruction. */
000034eb 17453
9390387d 17454rtx
a2369ed3 17455find_addr_reg (rtx addr)
000034eb
DE
17456{
17457 while (GET_CODE (addr) == PLUS)
17458 {
02441cd6
JL
17459 if (GET_CODE (XEXP (addr, 0)) == REG
17460 && REGNO (XEXP (addr, 0)) != 0)
000034eb 17461 addr = XEXP (addr, 0);
02441cd6
JL
17462 else if (GET_CODE (XEXP (addr, 1)) == REG
17463 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
17464 addr = XEXP (addr, 1);
17465 else if (CONSTANT_P (XEXP (addr, 0)))
17466 addr = XEXP (addr, 1);
17467 else if (CONSTANT_P (XEXP (addr, 1)))
17468 addr = XEXP (addr, 0);
17469 else
37409796 17470 gcc_unreachable ();
000034eb 17471 }
37409796
NS
17472 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17473 return addr;
000034eb
DE
17474}
17475
a6c2a102 17476void
a2369ed3 17477rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
17478{
17479 fatal_insn ("bad address", op);
17480}
c8023011 17481
ee890fe2
SS
17482#if TARGET_MACHO
17483
efdba735 17484static tree branch_island_list = 0;
ee890fe2 17485
efdba735
SH
17486/* Remember to generate a branch island for far calls to the given
17487 function. */
ee890fe2 17488
f676971a 17489static void
c4ad648e
AM
17490add_compiler_branch_island (tree label_name, tree function_name,
17491 int line_number)
ee890fe2 17492{
efdba735 17493 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 17494 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
17495 TREE_CHAIN (branch_island) = branch_island_list;
17496 branch_island_list = branch_island;
ee890fe2
SS
17497}
17498
efdba735
SH
17499#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17500#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17501#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17502 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 17503
efdba735
SH
17504/* Generate far-jump branch islands for everything on the
17505 branch_island_list. Invoked immediately after the last instruction
17506 of the epilogue has been emitted; the branch-islands must be
17507 appended to, and contiguous with, the function body. Mach-O stubs
17508 are generated in machopic_output_stub(). */
ee890fe2 17509
efdba735
SH
17510static void
17511macho_branch_islands (void)
17512{
17513 char tmp_buf[512];
17514 tree branch_island;
17515
17516 for (branch_island = branch_island_list;
17517 branch_island;
17518 branch_island = TREE_CHAIN (branch_island))
17519 {
17520 const char *label =
17521 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17522 const char *name =
11abc112 17523 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
17524 char name_buf[512];
17525 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17526 if (name[0] == '*' || name[0] == '&')
17527 strcpy (name_buf, name+1);
17528 else
17529 {
17530 name_buf[0] = '_';
17531 strcpy (name_buf+1, name);
17532 }
17533 strcpy (tmp_buf, "\n");
17534 strcat (tmp_buf, label);
ee890fe2 17535#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 17536 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 17537 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 17538#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
17539 if (flag_pic)
17540 {
17541 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
17542 strcat (tmp_buf, label);
17543 strcat (tmp_buf, "_pic\n");
17544 strcat (tmp_buf, label);
17545 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 17546
efdba735
SH
17547 strcat (tmp_buf, "\taddis r11,r11,ha16(");
17548 strcat (tmp_buf, name_buf);
17549 strcat (tmp_buf, " - ");
17550 strcat (tmp_buf, label);
17551 strcat (tmp_buf, "_pic)\n");
f676971a 17552
efdba735 17553 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 17554
efdba735
SH
17555 strcat (tmp_buf, "\taddi r12,r11,lo16(");
17556 strcat (tmp_buf, name_buf);
17557 strcat (tmp_buf, " - ");
17558 strcat (tmp_buf, label);
17559 strcat (tmp_buf, "_pic)\n");
f676971a 17560
efdba735
SH
17561 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
17562 }
17563 else
17564 {
17565 strcat (tmp_buf, ":\nlis r12,hi16(");
17566 strcat (tmp_buf, name_buf);
17567 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
17568 strcat (tmp_buf, name_buf);
17569 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
17570 }
17571 output_asm_insn (tmp_buf, 0);
ee890fe2 17572#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 17573 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 17574 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 17575#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 17576 }
ee890fe2 17577
efdba735 17578 branch_island_list = 0;
ee890fe2
SS
17579}
17580
17581/* NO_PREVIOUS_DEF checks in the link list whether the function name is
17582 already there or not. */
17583
efdba735 17584static int
a2369ed3 17585no_previous_def (tree function_name)
ee890fe2 17586{
efdba735
SH
17587 tree branch_island;
17588 for (branch_island = branch_island_list;
17589 branch_island;
17590 branch_island = TREE_CHAIN (branch_island))
17591 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
17592 return 0;
17593 return 1;
17594}
17595
17596/* GET_PREV_LABEL gets the label name from the previous definition of
17597 the function. */
17598
efdba735 17599static tree
a2369ed3 17600get_prev_label (tree function_name)
ee890fe2 17601{
efdba735
SH
17602 tree branch_island;
17603 for (branch_island = branch_island_list;
17604 branch_island;
17605 branch_island = TREE_CHAIN (branch_island))
17606 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17607 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
17608 return 0;
17609}
17610
17611/* INSN is either a function call or a millicode call. It may have an
f676971a 17612 unconditional jump in its delay slot.
ee890fe2
SS
17613
17614 CALL_DEST is the routine we are calling. */
17615
17616char *
c4ad648e
AM
17617output_call (rtx insn, rtx *operands, int dest_operand_number,
17618 int cookie_operand_number)
ee890fe2
SS
17619{
17620 static char buf[256];
efdba735
SH
17621 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
17622 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
17623 {
17624 tree labelname;
efdba735 17625 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 17626
ee890fe2
SS
17627 if (no_previous_def (funname))
17628 {
308c142a 17629 int line_number = 0;
ee890fe2
SS
17630 rtx label_rtx = gen_label_rtx ();
17631 char *label_buf, temp_buf[256];
17632 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
17633 CODE_LABEL_NUMBER (label_rtx));
17634 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
17635 labelname = get_identifier (label_buf);
17636 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
17637 if (insn)
17638 line_number = NOTE_LINE_NUMBER (insn);
efdba735 17639 add_compiler_branch_island (labelname, funname, line_number);
ee890fe2
SS
17640 }
17641 else
17642 labelname = get_prev_label (funname);
17643
efdba735
SH
17644 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17645 instruction will reach 'foo', otherwise link as 'bl L42'".
17646 "L42" should be a 'branch island', that will do a far jump to
17647 'foo'. Branch islands are generated in
17648 macho_branch_islands(). */
ee890fe2 17649 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 17650 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
17651 }
17652 else
efdba735
SH
17653 sprintf (buf, "bl %%z%d", dest_operand_number);
17654 return buf;
ee890fe2
SS
17655}
17656
ee890fe2
SS
17657/* Generate PIC and indirect symbol stubs. */
17658
17659void
a2369ed3 17660machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
17661{
17662 unsigned int length;
a4f6c312
SS
17663 char *symbol_name, *lazy_ptr_name;
17664 char *local_label_0;
ee890fe2
SS
17665 static int label = 0;
17666
df56a27f 17667 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 17668 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 17669
ee890fe2 17670
ee890fe2
SS
17671 length = strlen (symb);
17672 symbol_name = alloca (length + 32);
17673 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
17674
17675 lazy_ptr_name = alloca (length + 32);
17676 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
17677
ee890fe2 17678 if (flag_pic == 2)
d3c300d2 17679 machopic_picsymbol_stub1_section ();
ee890fe2 17680 else
d3c300d2 17681 machopic_symbol_stub1_section ();
ee890fe2
SS
17682
17683 if (flag_pic == 2)
17684 {
d974312d
DJ
17685 fprintf (file, "\t.align 5\n");
17686
17687 fprintf (file, "%s:\n", stub);
17688 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17689
876455fa 17690 label++;
89da1f32 17691 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 17692 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 17693
ee890fe2
SS
17694 fprintf (file, "\tmflr r0\n");
17695 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
17696 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
17697 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
17698 lazy_ptr_name, local_label_0);
17699 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
17700 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
17701 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
17702 lazy_ptr_name, local_label_0);
17703 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
17704 fprintf (file, "\tbctr\n");
17705 }
17706 else
d974312d
DJ
17707 {
17708 fprintf (file, "\t.align 4\n");
17709
17710 fprintf (file, "%s:\n", stub);
17711 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17712
17713 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
17714 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
17715 (TARGET_64BIT ? "ldu" : "lwzu"),
17716 lazy_ptr_name);
d974312d
DJ
17717 fprintf (file, "\tmtctr r12\n");
17718 fprintf (file, "\tbctr\n");
17719 }
f676971a 17720
ee890fe2
SS
17721 machopic_lazy_symbol_ptr_section ();
17722 fprintf (file, "%s:\n", lazy_ptr_name);
17723 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
17724 fprintf (file, "%sdyld_stub_binding_helper\n",
17725 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
17726}
17727
17728/* Legitimize PIC addresses. If the address is already
17729 position-independent, we return ORIG. Newly generated
17730 position-independent addresses go into a reg. This is REG if non
17731 zero, otherwise we allocate register(s) as necessary. */
17732
9390387d 17733#define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
17734
17735rtx
f676971a 17736rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 17737 rtx reg)
ee890fe2
SS
17738{
17739 rtx base, offset;
17740
17741 if (reg == NULL && ! reload_in_progress && ! reload_completed)
17742 reg = gen_reg_rtx (Pmode);
17743
17744 if (GET_CODE (orig) == CONST)
17745 {
37409796
NS
17746 rtx reg_temp;
17747
ee890fe2
SS
17748 if (GET_CODE (XEXP (orig, 0)) == PLUS
17749 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
17750 return orig;
17751
37409796 17752 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 17753
37409796
NS
17754 /* Use a different reg for the intermediate value, as
17755 it will be marked UNCHANGING. */
17756 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
17757 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
17758 Pmode, reg_temp);
17759 offset =
17760 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
17761 Pmode, reg);
bb8df8a6 17762
ee890fe2
SS
17763 if (GET_CODE (offset) == CONST_INT)
17764 {
17765 if (SMALL_INT (offset))
ed8908e7 17766 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
17767 else if (! reload_in_progress && ! reload_completed)
17768 offset = force_reg (Pmode, offset);
17769 else
c859cda6
DJ
17770 {
17771 rtx mem = force_const_mem (Pmode, orig);
17772 return machopic_legitimize_pic_address (mem, Pmode, reg);
17773 }
ee890fe2 17774 }
f1c25d3b 17775 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
17776 }
17777
17778 /* Fall back on generic machopic code. */
17779 return machopic_legitimize_pic_address (orig, mode, reg);
17780}
17781
17782/* This is just a placeholder to make linking work without having to
17783 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17784 ever needed for Darwin (not too likely!) this would have to get a
17785 real definition. */
17786
17787void
863d938c 17788toc_section (void)
ee890fe2
SS
17789{
17790}
17791
c4e18b1c
GK
17792/* Output a .machine directive for the Darwin assembler, and call
17793 the generic start_file routine. */
17794
17795static void
17796rs6000_darwin_file_start (void)
17797{
94ff898d 17798 static const struct
c4e18b1c
GK
17799 {
17800 const char *arg;
17801 const char *name;
17802 int if_set;
17803 } mapping[] = {
55dbfb48 17804 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
17805 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17806 { "power4", "ppc970", 0 },
17807 { "G5", "ppc970", 0 },
17808 { "7450", "ppc7450", 0 },
17809 { "7400", "ppc7400", MASK_ALTIVEC },
17810 { "G4", "ppc7400", 0 },
17811 { "750", "ppc750", 0 },
17812 { "740", "ppc750", 0 },
17813 { "G3", "ppc750", 0 },
17814 { "604e", "ppc604e", 0 },
17815 { "604", "ppc604", 0 },
17816 { "603e", "ppc603", 0 },
17817 { "603", "ppc603", 0 },
17818 { "601", "ppc601", 0 },
17819 { NULL, "ppc", 0 } };
17820 const char *cpu_id = "";
17821 size_t i;
94ff898d 17822
9390387d 17823 rs6000_file_start ();
c4e18b1c
GK
17824
17825 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17826 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17827 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17828 && rs6000_select[i].string[0] != '\0')
17829 cpu_id = rs6000_select[i].string;
17830
17831 /* Look through the mapping array. Pick the first name that either
17832 matches the argument, has a bit set in IF_SET that is also set
17833 in the target flags, or has a NULL name. */
17834
17835 i = 0;
17836 while (mapping[i].arg != NULL
17837 && strcmp (mapping[i].arg, cpu_id) != 0
17838 && (mapping[i].if_set & target_flags) == 0)
17839 i++;
17840
17841 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17842}
17843
ee890fe2 17844#endif /* TARGET_MACHO */
7c262518
RH
17845
17846#if TARGET_ELF
17847static unsigned int
a2369ed3 17848rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
7c262518 17849{
1ff8f81a
AM
17850 return default_section_type_flags_1 (decl, name, reloc,
17851 flag_pic || DEFAULT_ABI == ABI_AIX);
7c262518 17852}
d9f6800d
RH
17853
17854/* Record an element in the table of global constructors. SYMBOL is
17855 a SYMBOL_REF of the function to be called; PRIORITY is a number
17856 between 0 and MAX_INIT_PRIORITY.
17857
17858 This differs from default_named_section_asm_out_constructor in
17859 that we have special handling for -mrelocatable. */
17860
17861static void
a2369ed3 17862rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
17863{
17864 const char *section = ".ctors";
17865 char buf[16];
17866
17867 if (priority != DEFAULT_INIT_PRIORITY)
17868 {
17869 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
17870 /* Invert the numbering so the linker puts us in the proper
17871 order; constructors are run from right to left, and the
17872 linker sorts in increasing order. */
17873 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
17874 section = buf;
17875 }
17876
715bdd29
RH
17877 named_section_flags (section, SECTION_WRITE);
17878 assemble_align (POINTER_SIZE);
d9f6800d
RH
17879
17880 if (TARGET_RELOCATABLE)
17881 {
17882 fputs ("\t.long (", asm_out_file);
17883 output_addr_const (asm_out_file, symbol);
17884 fputs (")@fixup\n", asm_out_file);
17885 }
17886 else
c8af3574 17887 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
17888}
17889
17890static void
a2369ed3 17891rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
17892{
17893 const char *section = ".dtors";
17894 char buf[16];
17895
17896 if (priority != DEFAULT_INIT_PRIORITY)
17897 {
17898 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
17899 /* Invert the numbering so the linker puts us in the proper
17900 order; constructors are run from right to left, and the
17901 linker sorts in increasing order. */
17902 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
17903 section = buf;
17904 }
17905
715bdd29
RH
17906 named_section_flags (section, SECTION_WRITE);
17907 assemble_align (POINTER_SIZE);
d9f6800d
RH
17908
17909 if (TARGET_RELOCATABLE)
17910 {
17911 fputs ("\t.long (", asm_out_file);
17912 output_addr_const (asm_out_file, symbol);
17913 fputs (")@fixup\n", asm_out_file);
17914 }
17915 else
c8af3574 17916 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 17917}
9739c90c
JJ
17918
17919void
a2369ed3 17920rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
17921{
17922 if (TARGET_64BIT)
17923 {
17924 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17925 ASM_OUTPUT_LABEL (file, name);
17926 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
17927 rs6000_output_function_entry (file, name);
17928 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17929 if (DOT_SYMBOLS)
9739c90c 17930 {
85b776df 17931 fputs ("\t.size\t", file);
9739c90c 17932 assemble_name (file, name);
85b776df
AM
17933 fputs (",24\n\t.type\t.", file);
17934 assemble_name (file, name);
17935 fputs (",@function\n", file);
17936 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
17937 {
17938 fputs ("\t.globl\t.", file);
17939 assemble_name (file, name);
17940 putc ('\n', file);
17941 }
9739c90c 17942 }
85b776df
AM
17943 else
17944 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 17945 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
17946 rs6000_output_function_entry (file, name);
17947 fputs (":\n", file);
9739c90c
JJ
17948 return;
17949 }
17950
17951 if (TARGET_RELOCATABLE
7f970b70 17952 && !TARGET_SECURE_PLT
9739c90c 17953 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 17954 && uses_TOC ())
9739c90c
JJ
17955 {
17956 char buf[256];
17957
17958 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
17959
17960 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
17961 fprintf (file, "\t.long ");
17962 assemble_name (file, buf);
17963 putc ('-', file);
17964 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
17965 assemble_name (file, buf);
17966 putc ('\n', file);
17967 }
17968
17969 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
17970 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
17971
17972 if (DEFAULT_ABI == ABI_AIX)
17973 {
17974 const char *desc_name, *orig_name;
17975
17976 orig_name = (*targetm.strip_name_encoding) (name);
17977 desc_name = orig_name;
17978 while (*desc_name == '.')
17979 desc_name++;
17980
17981 if (TREE_PUBLIC (decl))
17982 fprintf (file, "\t.globl %s\n", desc_name);
17983
17984 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
17985 fprintf (file, "%s:\n", desc_name);
17986 fprintf (file, "\t.long %s\n", orig_name);
17987 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
17988 if (DEFAULT_ABI == ABI_AIX)
17989 fputs ("\t.long 0\n", file);
17990 fprintf (file, "\t.previous\n");
17991 }
17992 ASM_OUTPUT_LABEL (file, name);
17993}
1334b570
AM
17994
17995static void
17996rs6000_elf_end_indicate_exec_stack (void)
17997{
17998 if (TARGET_32BIT)
17999 file_end_indicate_exec_stack ();
18000}
7c262518
RH
18001#endif
18002
cbaaba19 18003#if TARGET_XCOFF
7c262518 18004static void
a2369ed3 18005rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
18006{
18007 fputs (GLOBAL_ASM_OP, stream);
18008 RS6000_OUTPUT_BASENAME (stream, name);
18009 putc ('\n', stream);
18010}
18011
18012static void
c18a5b6c
MM
18013rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18014 tree decl ATTRIBUTE_UNUSED)
7c262518 18015{
0e5dbd9b
DE
18016 int smclass;
18017 static const char * const suffix[3] = { "PR", "RO", "RW" };
18018
18019 if (flags & SECTION_CODE)
18020 smclass = 0;
18021 else if (flags & SECTION_WRITE)
18022 smclass = 2;
18023 else
18024 smclass = 1;
18025
5b5198f7 18026 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 18027 (flags & SECTION_CODE) ? "." : "",
5b5198f7 18028 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 18029}
ae46c4e0
RH
18030
18031static void
f676971a 18032rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 18033 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 18034{
5add3202 18035 if (decl_readonly_section_1 (decl, reloc, 1))
ae46c4e0 18036 {
0e5dbd9b 18037 if (TREE_PUBLIC (decl))
c4ad648e 18038 read_only_data_section ();
ae46c4e0 18039 else
c4ad648e 18040 read_only_private_data_section ();
ae46c4e0
RH
18041 }
18042 else
18043 {
0e5dbd9b 18044 if (TREE_PUBLIC (decl))
c4ad648e 18045 data_section ();
ae46c4e0 18046 else
c4ad648e 18047 private_data_section ();
ae46c4e0
RH
18048 }
18049}
18050
18051static void
a2369ed3 18052rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
18053{
18054 const char *name;
ae46c4e0 18055
5b5198f7
DE
18056 /* Use select_section for private and uninitialized data. */
18057 if (!TREE_PUBLIC (decl)
18058 || DECL_COMMON (decl)
0e5dbd9b
DE
18059 || DECL_INITIAL (decl) == NULL_TREE
18060 || DECL_INITIAL (decl) == error_mark_node
18061 || (flag_zero_initialized_in_bss
18062 && initializer_zerop (DECL_INITIAL (decl))))
18063 return;
18064
18065 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18066 name = (*targetm.strip_name_encoding) (name);
18067 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 18068}
b64a1b53 18069
fb49053f
RH
18070/* Select section for constant in constant pool.
18071
18072 On RS/6000, all constants are in the private read-only data area.
18073 However, if this is being placed in the TOC it must be output as a
18074 toc entry. */
18075
b64a1b53 18076static void
f676971a 18077rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 18078 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
18079{
18080 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18081 toc_section ();
18082 else
18083 read_only_private_data_section ();
18084}
772c5265
RH
18085
18086/* Remove any trailing [DS] or the like from the symbol name. */
18087
18088static const char *
a2369ed3 18089rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
18090{
18091 size_t len;
18092 if (*name == '*')
18093 name++;
18094 len = strlen (name);
18095 if (name[len - 1] == ']')
18096 return ggc_alloc_string (name, len - 4);
18097 else
18098 return name;
18099}
18100
5add3202
DE
18101/* Section attributes. AIX is always PIC. */
18102
18103static unsigned int
a2369ed3 18104rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 18105{
5b5198f7
DE
18106 unsigned int align;
18107 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
18108
18109 /* Align to at least UNIT size. */
18110 if (flags & SECTION_CODE)
18111 align = MIN_UNITS_PER_WORD;
18112 else
18113 /* Increase alignment of large objects if not already stricter. */
18114 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18115 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18116 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18117
18118 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 18119}
a5fe455b 18120
1bc7c5b6
ZW
18121/* Output at beginning of assembler file.
18122
18123 Initialize the section names for the RS/6000 at this point.
18124
18125 Specify filename, including full path, to assembler.
18126
18127 We want to go into the TOC section so at least one .toc will be emitted.
18128 Also, in order to output proper .bs/.es pairs, we need at least one static
18129 [RW] section emitted.
18130
18131 Finally, declare mcount when profiling to make the assembler happy. */
18132
18133static void
863d938c 18134rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
18135{
18136 rs6000_gen_section_name (&xcoff_bss_section_name,
18137 main_input_filename, ".bss_");
18138 rs6000_gen_section_name (&xcoff_private_data_section_name,
18139 main_input_filename, ".rw_");
18140 rs6000_gen_section_name (&xcoff_read_only_section_name,
18141 main_input_filename, ".ro_");
18142
18143 fputs ("\t.file\t", asm_out_file);
18144 output_quoted_string (asm_out_file, main_input_filename);
18145 fputc ('\n', asm_out_file);
1bc7c5b6
ZW
18146 if (write_symbols != NO_DEBUG)
18147 private_data_section ();
18148 text_section ();
18149 if (profile_flag)
18150 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18151 rs6000_file_start ();
18152}
18153
a5fe455b
ZW
18154/* Output at end of assembler file.
18155 On the RS/6000, referencing data should automatically pull in text. */
18156
18157static void
863d938c 18158rs6000_xcoff_file_end (void)
a5fe455b
ZW
18159{
18160 text_section ();
18161 fputs ("_section_.text:\n", asm_out_file);
18162 data_section ();
18163 fputs (TARGET_32BIT
18164 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18165 asm_out_file);
18166}
f1384257 18167#endif /* TARGET_XCOFF */
0e5dbd9b 18168
3c50106f
RH
18169/* Compute a (partial) cost for rtx X. Return true if the complete
18170 cost has been computed, and false if subexpressions should be
18171 scanned. In either case, *TOTAL contains the cost result. */
18172
18173static bool
1494c534 18174rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 18175{
f0517163
RS
18176 enum machine_mode mode = GET_MODE (x);
18177
3c50106f
RH
18178 switch (code)
18179 {
30a555d9 18180 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 18181 case CONST_INT:
066cd967
DE
18182 if (((outer_code == SET
18183 || outer_code == PLUS
18184 || outer_code == MINUS)
18185 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18186 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
066cd967
DE
18187 || (outer_code == AND
18188 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
22e54023
DE
18189 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18190 mode == SImode ? 'L' : 'J'))
1990cd79
AM
18191 || mask_operand (x, mode)
18192 || (mode == DImode
18193 && mask64_operand (x, DImode))))
22e54023
DE
18194 || ((outer_code == IOR || outer_code == XOR)
18195 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18196 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18197 mode == SImode ? 'L' : 'J'))))
066cd967
DE
18198 || outer_code == ASHIFT
18199 || outer_code == ASHIFTRT
18200 || outer_code == LSHIFTRT
18201 || outer_code == ROTATE
18202 || outer_code == ROTATERT
d5861a7a 18203 || outer_code == ZERO_EXTRACT
066cd967
DE
18204 || (outer_code == MULT
18205 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
22e54023
DE
18206 || ((outer_code == DIV || outer_code == UDIV
18207 || outer_code == MOD || outer_code == UMOD)
18208 && exact_log2 (INTVAL (x)) >= 0)
066cd967
DE
18209 || (outer_code == COMPARE
18210 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
22e54023
DE
18211 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')))
18212 || (outer_code == EQ
18213 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18214 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18215 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18216 mode == SImode ? 'L' : 'J'))))
18217 || (outer_code == GTU
18218 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18219 || (outer_code == LTU
18220 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'P')))
066cd967
DE
18221 {
18222 *total = 0;
18223 return true;
18224 }
18225 else if ((outer_code == PLUS
4ae234b0 18226 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 18227 || (outer_code == MINUS
4ae234b0 18228 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
18229 || ((outer_code == SET
18230 || outer_code == IOR
18231 || outer_code == XOR)
18232 && (INTVAL (x)
18233 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18234 {
18235 *total = COSTS_N_INSNS (1);
18236 return true;
18237 }
18238 /* FALLTHRU */
18239
18240 case CONST_DOUBLE:
18241 if (mode == DImode
18242 && ((outer_code == AND
18243 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18244 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
1990cd79
AM
18245 || mask_operand (x, DImode)
18246 || mask64_operand (x, DImode)))
066cd967
DE
18247 || ((outer_code == IOR || outer_code == XOR)
18248 && CONST_DOUBLE_HIGH (x) == 0
18249 && (CONST_DOUBLE_LOW (x)
18250 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
18251 {
18252 *total = 0;
18253 return true;
18254 }
18255 else if (mode == DImode
18256 && (outer_code == SET
18257 || outer_code == IOR
18258 || outer_code == XOR)
18259 && CONST_DOUBLE_HIGH (x) == 0)
18260 {
18261 *total = COSTS_N_INSNS (1);
18262 return true;
18263 }
18264 /* FALLTHRU */
18265
3c50106f 18266 case CONST:
066cd967 18267 case HIGH:
3c50106f 18268 case SYMBOL_REF:
066cd967
DE
18269 case MEM:
18270 /* When optimizing for size, MEM should be slightly more expensive
18271 than generating address, e.g., (plus (reg) (const)).
c112cf2b 18272 L1 cache latency is about two instructions. */
066cd967 18273 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
18274 return true;
18275
30a555d9
DE
18276 case LABEL_REF:
18277 *total = 0;
18278 return true;
18279
3c50106f 18280 case PLUS:
f0517163 18281 if (mode == DFmode)
066cd967
DE
18282 {
18283 if (GET_CODE (XEXP (x, 0)) == MULT)
18284 {
18285 /* FNMA accounted in outer NEG. */
18286 if (outer_code == NEG)
18287 *total = rs6000_cost->dmul - rs6000_cost->fp;
18288 else
18289 *total = rs6000_cost->dmul;
18290 }
18291 else
18292 *total = rs6000_cost->fp;
18293 }
f0517163 18294 else if (mode == SFmode)
066cd967
DE
18295 {
18296 /* FNMA accounted in outer NEG. */
18297 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18298 *total = 0;
18299 else
18300 *total = rs6000_cost->fp;
18301 }
938bf747
RS
18302 else if (GET_CODE (XEXP (x, 0)) == MULT)
18303 {
18304 /* The rs6000 doesn't have shift-and-add instructions. */
18305 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
18306 *total += COSTS_N_INSNS (1);
18307 }
f0517163 18308 else
066cd967
DE
18309 *total = COSTS_N_INSNS (1);
18310 return false;
3c50106f 18311
52190329 18312 case MINUS:
f0517163 18313 if (mode == DFmode)
066cd967
DE
18314 {
18315 if (GET_CODE (XEXP (x, 0)) == MULT)
18316 {
18317 /* FNMA accounted in outer NEG. */
18318 if (outer_code == NEG)
18319 *total = 0;
18320 else
18321 *total = rs6000_cost->dmul;
18322 }
18323 else
18324 *total = rs6000_cost->fp;
18325 }
f0517163 18326 else if (mode == SFmode)
066cd967
DE
18327 {
18328 /* FNMA accounted in outer NEG. */
18329 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18330 *total = 0;
18331 else
18332 *total = rs6000_cost->fp;
18333 }
938bf747
RS
18334 else if (GET_CODE (XEXP (x, 0)) == MULT)
18335 {
18336 /* The rs6000 doesn't have shift-and-sub instructions. */
18337 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
18338 *total += COSTS_N_INSNS (1);
18339 }
f0517163 18340 else
c4ad648e 18341 *total = COSTS_N_INSNS (1);
066cd967 18342 return false;
3c50106f
RH
18343
18344 case MULT:
c9dbf840
DE
18345 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18346 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
3c50106f 18347 {
8b897cfa
RS
18348 if (INTVAL (XEXP (x, 1)) >= -256
18349 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 18350 *total = rs6000_cost->mulsi_const9;
8b897cfa 18351 else
06a67bdd 18352 *total = rs6000_cost->mulsi_const;
3c50106f 18353 }
066cd967
DE
18354 /* FMA accounted in outer PLUS/MINUS. */
18355 else if ((mode == DFmode || mode == SFmode)
18356 && (outer_code == PLUS || outer_code == MINUS))
18357 *total = 0;
f0517163 18358 else if (mode == DFmode)
06a67bdd 18359 *total = rs6000_cost->dmul;
f0517163 18360 else if (mode == SFmode)
06a67bdd 18361 *total = rs6000_cost->fp;
f0517163 18362 else if (mode == DImode)
06a67bdd 18363 *total = rs6000_cost->muldi;
8b897cfa 18364 else
06a67bdd 18365 *total = rs6000_cost->mulsi;
066cd967 18366 return false;
3c50106f
RH
18367
18368 case DIV:
18369 case MOD:
f0517163
RS
18370 if (FLOAT_MODE_P (mode))
18371 {
06a67bdd
RS
18372 *total = mode == DFmode ? rs6000_cost->ddiv
18373 : rs6000_cost->sdiv;
066cd967 18374 return false;
f0517163 18375 }
5efb1046 18376 /* FALLTHRU */
3c50106f
RH
18377
18378 case UDIV:
18379 case UMOD:
627b6fe2
DJ
18380 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18381 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18382 {
18383 if (code == DIV || code == MOD)
18384 /* Shift, addze */
18385 *total = COSTS_N_INSNS (2);
18386 else
18387 /* Shift */
18388 *total = COSTS_N_INSNS (1);
18389 }
c4ad648e 18390 else
627b6fe2
DJ
18391 {
18392 if (GET_MODE (XEXP (x, 1)) == DImode)
18393 *total = rs6000_cost->divdi;
18394 else
18395 *total = rs6000_cost->divsi;
18396 }
18397 /* Add in shift and subtract for MOD. */
18398 if (code == MOD || code == UMOD)
18399 *total += COSTS_N_INSNS (2);
066cd967 18400 return false;
3c50106f
RH
18401
18402 case FFS:
18403 *total = COSTS_N_INSNS (4);
066cd967 18404 return false;
3c50106f 18405
06a67bdd 18406 case NOT:
066cd967
DE
18407 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18408 {
18409 *total = 0;
18410 return false;
18411 }
18412 /* FALLTHRU */
18413
18414 case AND:
18415 case IOR:
18416 case XOR:
d5861a7a
DE
18417 case ZERO_EXTRACT:
18418 *total = COSTS_N_INSNS (1);
18419 return false;
18420
066cd967
DE
18421 case ASHIFT:
18422 case ASHIFTRT:
18423 case LSHIFTRT:
18424 case ROTATE:
18425 case ROTATERT:
d5861a7a 18426 /* Handle mul_highpart. */
066cd967
DE
18427 if (outer_code == TRUNCATE
18428 && GET_CODE (XEXP (x, 0)) == MULT)
18429 {
18430 if (mode == DImode)
18431 *total = rs6000_cost->muldi;
18432 else
18433 *total = rs6000_cost->mulsi;
18434 return true;
18435 }
d5861a7a
DE
18436 else if (outer_code == AND)
18437 *total = 0;
18438 else
18439 *total = COSTS_N_INSNS (1);
18440 return false;
18441
18442 case SIGN_EXTEND:
18443 case ZERO_EXTEND:
18444 if (GET_CODE (XEXP (x, 0)) == MEM)
18445 *total = 0;
18446 else
18447 *total = COSTS_N_INSNS (1);
066cd967 18448 return false;
06a67bdd 18449
066cd967
DE
18450 case COMPARE:
18451 case NEG:
18452 case ABS:
18453 if (!FLOAT_MODE_P (mode))
18454 {
18455 *total = COSTS_N_INSNS (1);
18456 return false;
18457 }
18458 /* FALLTHRU */
18459
18460 case FLOAT:
18461 case UNSIGNED_FLOAT:
18462 case FIX:
18463 case UNSIGNED_FIX:
06a67bdd
RS
18464 case FLOAT_TRUNCATE:
18465 *total = rs6000_cost->fp;
066cd967 18466 return false;
06a67bdd 18467
a2af5043
DJ
18468 case FLOAT_EXTEND:
18469 if (mode == DFmode)
18470 *total = 0;
18471 else
18472 *total = rs6000_cost->fp;
18473 return false;
18474
06a67bdd
RS
18475 case UNSPEC:
18476 switch (XINT (x, 1))
18477 {
18478 case UNSPEC_FRSP:
18479 *total = rs6000_cost->fp;
18480 return true;
18481
18482 default:
18483 break;
18484 }
18485 break;
18486
18487 case CALL:
18488 case IF_THEN_ELSE:
18489 if (optimize_size)
18490 {
18491 *total = COSTS_N_INSNS (1);
18492 return true;
18493 }
066cd967
DE
18494 else if (FLOAT_MODE_P (mode)
18495 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
18496 {
18497 *total = rs6000_cost->fp;
18498 return false;
18499 }
06a67bdd
RS
18500 break;
18501
c0600ecd
DE
18502 case EQ:
18503 case GTU:
18504 case LTU:
22e54023
DE
18505 /* Carry bit requires mode == Pmode.
18506 NEG or PLUS already counted so only add one. */
18507 if (mode == Pmode
18508 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 18509 {
22e54023
DE
18510 *total = COSTS_N_INSNS (1);
18511 return true;
18512 }
18513 if (outer_code == SET)
18514 {
18515 if (XEXP (x, 1) == const0_rtx)
c0600ecd 18516 {
22e54023 18517 *total = COSTS_N_INSNS (2);
c0600ecd 18518 return true;
c0600ecd 18519 }
22e54023
DE
18520 else if (mode == Pmode)
18521 {
18522 *total = COSTS_N_INSNS (3);
18523 return false;
18524 }
18525 }
18526 /* FALLTHRU */
18527
18528 case GT:
18529 case LT:
18530 case UNORDERED:
18531 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
18532 {
18533 *total = COSTS_N_INSNS (2);
18534 return true;
c0600ecd 18535 }
22e54023
DE
18536 /* CC COMPARE. */
18537 if (outer_code == COMPARE)
18538 {
18539 *total = 0;
18540 return true;
18541 }
18542 break;
c0600ecd 18543
3c50106f 18544 default:
06a67bdd 18545 break;
3c50106f 18546 }
06a67bdd
RS
18547
18548 return false;
3c50106f
RH
18549}
18550
34bb030a
DE
18551/* A C expression returning the cost of moving data from a register of class
18552 CLASS1 to one of CLASS2. */
18553
18554int
f676971a 18555rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 18556 enum reg_class from, enum reg_class to)
34bb030a
DE
18557{
18558 /* Moves from/to GENERAL_REGS. */
18559 if (reg_classes_intersect_p (to, GENERAL_REGS)
18560 || reg_classes_intersect_p (from, GENERAL_REGS))
18561 {
18562 if (! reg_classes_intersect_p (to, GENERAL_REGS))
18563 from = to;
18564
18565 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
18566 return (rs6000_memory_move_cost (mode, from, 0)
18567 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
18568
c4ad648e
AM
18569 /* It's more expensive to move CR_REGS than CR0_REGS because of the
18570 shift. */
34bb030a
DE
18571 else if (from == CR_REGS)
18572 return 4;
18573
18574 else
c4ad648e 18575 /* A move will cost one instruction per GPR moved. */
c8b622ff 18576 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
18577 }
18578
c4ad648e 18579 /* Moving between two similar registers is just one instruction. */
34bb030a
DE
18580 else if (reg_classes_intersect_p (to, from))
18581 return mode == TFmode ? 4 : 2;
18582
c4ad648e 18583 /* Everything else has to go through GENERAL_REGS. */
34bb030a 18584 else
f676971a 18585 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
18586 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
18587}
18588
18589/* A C expressions returning the cost of moving data of MODE from a register to
18590 or from memory. */
18591
18592int
f676971a 18593rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 18594 int in ATTRIBUTE_UNUSED)
34bb030a
DE
18595{
18596 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 18597 return 4 * hard_regno_nregs[0][mode];
34bb030a 18598 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 18599 return 4 * hard_regno_nregs[32][mode];
34bb030a 18600 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 18601 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
18602 else
18603 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
18604}
18605
ef765ea9
DE
18606/* Newton-Raphson approximation of single-precision floating point divide n/d.
18607 Assumes no trapping math and finite arguments. */
18608
18609void
18610rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
18611{
18612 rtx x0, e0, e1, y1, u0, v0, one;
18613
18614 x0 = gen_reg_rtx (SFmode);
18615 e0 = gen_reg_rtx (SFmode);
18616 e1 = gen_reg_rtx (SFmode);
18617 y1 = gen_reg_rtx (SFmode);
18618 u0 = gen_reg_rtx (SFmode);
18619 v0 = gen_reg_rtx (SFmode);
18620 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
18621
18622 /* x0 = 1./d estimate */
18623 emit_insn (gen_rtx_SET (VOIDmode, x0,
18624 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
18625 UNSPEC_FRES)));
18626 /* e0 = 1. - d * x0 */
18627 emit_insn (gen_rtx_SET (VOIDmode, e0,
18628 gen_rtx_MINUS (SFmode, one,
18629 gen_rtx_MULT (SFmode, d, x0))));
18630 /* e1 = e0 + e0 * e0 */
18631 emit_insn (gen_rtx_SET (VOIDmode, e1,
18632 gen_rtx_PLUS (SFmode,
18633 gen_rtx_MULT (SFmode, e0, e0), e0)));
18634 /* y1 = x0 + e1 * x0 */
18635 emit_insn (gen_rtx_SET (VOIDmode, y1,
18636 gen_rtx_PLUS (SFmode,
18637 gen_rtx_MULT (SFmode, e1, x0), x0)));
18638 /* u0 = n * y1 */
18639 emit_insn (gen_rtx_SET (VOIDmode, u0,
18640 gen_rtx_MULT (SFmode, n, y1)));
18641 /* v0 = n - d * u0 */
18642 emit_insn (gen_rtx_SET (VOIDmode, v0,
18643 gen_rtx_MINUS (SFmode, n,
18644 gen_rtx_MULT (SFmode, d, u0))));
18645 /* res = u0 + v0 * y1 */
18646 emit_insn (gen_rtx_SET (VOIDmode, res,
18647 gen_rtx_PLUS (SFmode,
18648 gen_rtx_MULT (SFmode, v0, y1), u0)));
18649}
18650
18651/* Newton-Raphson approximation of double-precision floating point divide n/d.
18652 Assumes no trapping math and finite arguments. */
18653
18654void
18655rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
18656{
18657 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
18658
18659 x0 = gen_reg_rtx (DFmode);
18660 e0 = gen_reg_rtx (DFmode);
18661 e1 = gen_reg_rtx (DFmode);
18662 e2 = gen_reg_rtx (DFmode);
18663 y1 = gen_reg_rtx (DFmode);
18664 y2 = gen_reg_rtx (DFmode);
18665 y3 = gen_reg_rtx (DFmode);
18666 u0 = gen_reg_rtx (DFmode);
18667 v0 = gen_reg_rtx (DFmode);
18668 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
18669
18670 /* x0 = 1./d estimate */
18671 emit_insn (gen_rtx_SET (VOIDmode, x0,
18672 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
18673 UNSPEC_FRES)));
18674 /* e0 = 1. - d * x0 */
18675 emit_insn (gen_rtx_SET (VOIDmode, e0,
18676 gen_rtx_MINUS (DFmode, one,
18677 gen_rtx_MULT (SFmode, d, x0))));
18678 /* y1 = x0 + e0 * x0 */
18679 emit_insn (gen_rtx_SET (VOIDmode, y1,
18680 gen_rtx_PLUS (DFmode,
18681 gen_rtx_MULT (DFmode, e0, x0), x0)));
18682 /* e1 = e0 * e0 */
18683 emit_insn (gen_rtx_SET (VOIDmode, e1,
18684 gen_rtx_MULT (DFmode, e0, e0)));
18685 /* y2 = y1 + e1 * y1 */
18686 emit_insn (gen_rtx_SET (VOIDmode, y2,
18687 gen_rtx_PLUS (DFmode,
18688 gen_rtx_MULT (DFmode, e1, y1), y1)));
18689 /* e2 = e1 * e1 */
18690 emit_insn (gen_rtx_SET (VOIDmode, e2,
18691 gen_rtx_MULT (DFmode, e1, e1)));
18692 /* y3 = y2 + e2 * y2 */
18693 emit_insn (gen_rtx_SET (VOIDmode, y3,
18694 gen_rtx_PLUS (DFmode,
18695 gen_rtx_MULT (DFmode, e2, y2), y2)));
18696 /* u0 = n * y3 */
18697 emit_insn (gen_rtx_SET (VOIDmode, u0,
18698 gen_rtx_MULT (DFmode, n, y3)));
18699 /* v0 = n - d * u0 */
18700 emit_insn (gen_rtx_SET (VOIDmode, v0,
18701 gen_rtx_MINUS (DFmode, n,
18702 gen_rtx_MULT (DFmode, d, u0))));
18703 /* res = u0 + v0 * y3 */
18704 emit_insn (gen_rtx_SET (VOIDmode, res,
18705 gen_rtx_PLUS (DFmode,
18706 gen_rtx_MULT (DFmode, v0, y3), u0)));
18707}
18708
ded9bf77
AH
18709/* Return an RTX representing where to find the function value of a
18710 function returning MODE. */
18711static rtx
18712rs6000_complex_function_value (enum machine_mode mode)
18713{
18714 unsigned int regno;
18715 rtx r1, r2;
18716 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 18717 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 18718
18f63bfa
AH
18719 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
18720 regno = FP_ARG_RETURN;
354ed18f
AH
18721 else
18722 {
18f63bfa 18723 regno = GP_ARG_RETURN;
ded9bf77 18724
18f63bfa
AH
18725 /* 32-bit is OK since it'll go in r3/r4. */
18726 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
18727 return gen_rtx_REG (mode, regno);
18728 }
18729
18f63bfa
AH
18730 if (inner_bytes >= 8)
18731 return gen_rtx_REG (mode, regno);
18732
ded9bf77
AH
18733 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
18734 const0_rtx);
18735 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 18736 GEN_INT (inner_bytes));
ded9bf77
AH
18737 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
18738}
18739
a6ebc39a
AH
18740/* Define how to find the value returned by a function.
18741 VALTYPE is the data type of the value (as a tree).
18742 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18743 otherwise, FUNC is 0.
18744
18745 On the SPE, both FPs and vectors are returned in r3.
18746
18747 On RS/6000 an integer value is in r3 and a floating-point value is in
18748 fp1, unless -msoft-float. */
18749
18750rtx
18751rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
18752{
18753 enum machine_mode mode;
2a8fa26c 18754 unsigned int regno;
a6ebc39a 18755
594a51fe
SS
18756 /* Special handling for structs in darwin64. */
18757 if (rs6000_darwin64_abi
18758 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
18759 && TREE_CODE (valtype) == RECORD_TYPE
18760 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
18761 {
18762 CUMULATIVE_ARGS valcum;
18763 rtx valret;
18764
0b5383eb 18765 valcum.words = 0;
594a51fe
SS
18766 valcum.fregno = FP_ARG_MIN_REG;
18767 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
18768 /* Do a trial code generation as if this were going to be passed as
18769 an argument; if any part goes in memory, we return NULL. */
18770 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
18771 if (valret)
18772 return valret;
18773 /* Otherwise fall through to standard ABI rules. */
18774 }
18775
0e67400a
FJ
18776 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
18777 {
18778 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18779 return gen_rtx_PARALLEL (DImode,
18780 gen_rtvec (2,
18781 gen_rtx_EXPR_LIST (VOIDmode,
18782 gen_rtx_REG (SImode, GP_ARG_RETURN),
18783 const0_rtx),
18784 gen_rtx_EXPR_LIST (VOIDmode,
18785 gen_rtx_REG (SImode,
18786 GP_ARG_RETURN + 1),
18787 GEN_INT (4))));
18788 }
0f086e42
FJ
18789 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
18790 {
18791 return gen_rtx_PARALLEL (DCmode,
18792 gen_rtvec (4,
18793 gen_rtx_EXPR_LIST (VOIDmode,
18794 gen_rtx_REG (SImode, GP_ARG_RETURN),
18795 const0_rtx),
18796 gen_rtx_EXPR_LIST (VOIDmode,
18797 gen_rtx_REG (SImode,
18798 GP_ARG_RETURN + 1),
18799 GEN_INT (4)),
18800 gen_rtx_EXPR_LIST (VOIDmode,
18801 gen_rtx_REG (SImode,
18802 GP_ARG_RETURN + 2),
18803 GEN_INT (8)),
18804 gen_rtx_EXPR_LIST (VOIDmode,
18805 gen_rtx_REG (SImode,
18806 GP_ARG_RETURN + 3),
18807 GEN_INT (12))));
18808 }
a6ebc39a
AH
18809 if ((INTEGRAL_TYPE_P (valtype)
18810 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
18811 || POINTER_TYPE_P (valtype))
b78d48dd 18812 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a
AH
18813 else
18814 mode = TYPE_MODE (valtype);
18815
4ed78545 18816 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 18817 regno = FP_ARG_RETURN;
ded9bf77 18818 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 18819 && targetm.calls.split_complex_arg)
ded9bf77 18820 return rs6000_complex_function_value (mode);
44688022 18821 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 18822 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 18823 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 18824 regno = ALTIVEC_ARG_RETURN;
18f63bfa
AH
18825 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18826 && (mode == DFmode || mode == DCmode))
18827 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
18828 else
18829 regno = GP_ARG_RETURN;
18830
18831 return gen_rtx_REG (mode, regno);
18832}
18833
ded9bf77
AH
18834/* Define how to find the value returned by a library function
18835 assuming the value has mode MODE. */
18836rtx
18837rs6000_libcall_value (enum machine_mode mode)
18838{
18839 unsigned int regno;
18840
2e6c9641
FJ
18841 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
18842 {
18843 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18844 return gen_rtx_PARALLEL (DImode,
18845 gen_rtvec (2,
18846 gen_rtx_EXPR_LIST (VOIDmode,
18847 gen_rtx_REG (SImode, GP_ARG_RETURN),
18848 const0_rtx),
18849 gen_rtx_EXPR_LIST (VOIDmode,
18850 gen_rtx_REG (SImode,
18851 GP_ARG_RETURN + 1),
18852 GEN_INT (4))));
18853 }
18854
ded9bf77
AH
18855 if (GET_MODE_CLASS (mode) == MODE_FLOAT
18856 && TARGET_HARD_FLOAT && TARGET_FPRS)
18857 regno = FP_ARG_RETURN;
44688022
AM
18858 else if (ALTIVEC_VECTOR_MODE (mode)
18859 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 18860 regno = ALTIVEC_ARG_RETURN;
42ba5130 18861 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 18862 return rs6000_complex_function_value (mode);
18f63bfa
AH
18863 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18864 && (mode == DFmode || mode == DCmode))
18865 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
18866 else
18867 regno = GP_ARG_RETURN;
18868
18869 return gen_rtx_REG (mode, regno);
18870}
18871
d1d0c603
JJ
18872/* Define the offset between two registers, FROM to be eliminated and its
18873 replacement TO, at the start of a routine. */
18874HOST_WIDE_INT
18875rs6000_initial_elimination_offset (int from, int to)
18876{
18877 rs6000_stack_t *info = rs6000_stack_info ();
18878 HOST_WIDE_INT offset;
18879
7d5175e1 18880 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 18881 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
18882 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18883 {
18884 offset = info->push_p ? 0 : -info->total_size;
18885 if (FRAME_GROWS_DOWNWARD)
5b667039 18886 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
18887 }
18888 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18889 offset = FRAME_GROWS_DOWNWARD
5b667039 18890 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
18891 : 0;
18892 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
18893 offset = info->total_size;
18894 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18895 offset = info->push_p ? info->total_size : 0;
18896 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
18897 offset = 0;
18898 else
37409796 18899 gcc_unreachable ();
d1d0c603
JJ
18900
18901 return offset;
18902}
18903
58646b77 18904/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 18905
c8e4f0e9 18906static bool
58646b77 18907rs6000_is_opaque_type (tree type)
62e1dfcf 18908{
58646b77 18909 return (type == opaque_V2SI_type_node
2abe3e28 18910 || type == opaque_V2SF_type_node
58646b77
PB
18911 || type == opaque_p_V2SI_type_node
18912 || type == opaque_V4SI_type_node);
62e1dfcf
NC
18913}
18914
96714395 18915static rtx
a2369ed3 18916rs6000_dwarf_register_span (rtx reg)
96714395
AH
18917{
18918 unsigned regno;
18919
4d4cbc0e
AH
18920 if (TARGET_SPE
18921 && (SPE_VECTOR_MODE (GET_MODE (reg))
18922 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18923 ;
18924 else
96714395
AH
18925 return NULL_RTX;
18926
18927 regno = REGNO (reg);
18928
18929 /* The duality of the SPE register size wreaks all kinds of havoc.
18930 This is a way of distinguishing r0 in 32-bits from r0 in
18931 64-bits. */
18932 return
18933 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
18934 BYTES_BIG_ENDIAN
18935 ? gen_rtvec (2,
18936 gen_rtx_REG (SImode, regno + 1200),
18937 gen_rtx_REG (SImode, regno))
18938 : gen_rtvec (2,
18939 gen_rtx_REG (SImode, regno),
18940 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
18941}
18942
93c9d1ba
AM
18943/* Map internal gcc register numbers to DWARF2 register numbers. */
18944
18945unsigned int
18946rs6000_dbx_register_number (unsigned int regno)
18947{
18948 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
18949 return regno;
18950 if (regno == MQ_REGNO)
18951 return 100;
18952 if (regno == LINK_REGISTER_REGNUM)
18953 return 108;
18954 if (regno == COUNT_REGISTER_REGNUM)
18955 return 109;
18956 if (CR_REGNO_P (regno))
18957 return regno - CR0_REGNO + 86;
18958 if (regno == XER_REGNO)
18959 return 101;
18960 if (ALTIVEC_REGNO_P (regno))
18961 return regno - FIRST_ALTIVEC_REGNO + 1124;
18962 if (regno == VRSAVE_REGNO)
18963 return 356;
18964 if (regno == VSCR_REGNO)
18965 return 67;
18966 if (regno == SPE_ACC_REGNO)
18967 return 99;
18968 if (regno == SPEFSCR_REGNO)
18969 return 612;
18970 /* SPE high reg number. We get these values of regno from
18971 rs6000_dwarf_register_span. */
37409796
NS
18972 gcc_assert (regno >= 1200 && regno < 1232);
18973 return regno;
93c9d1ba
AM
18974}
18975
93f90be6 18976/* target hook eh_return_filter_mode */
f676971a 18977static enum machine_mode
93f90be6
FJ
18978rs6000_eh_return_filter_mode (void)
18979{
18980 return TARGET_32BIT ? SImode : word_mode;
18981}
18982
f676971a
EC
18983/* Target hook for vector_mode_supported_p. */
18984static bool
18985rs6000_vector_mode_supported_p (enum machine_mode mode)
18986{
18987
18988 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
18989 return true;
18990
18991 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
18992 return true;
18993
18994 else
18995 return false;
18996}
18997
bb8df8a6
EC
18998/* Target hook for invalid_arg_for_unprototyped_fn. */
18999static const char *
4d3e6fae
FJ
19000invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19001{
19002 return (!rs6000_darwin64_abi
19003 && typelist == 0
19004 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19005 && (funcdecl == NULL_TREE
19006 || (TREE_CODE (funcdecl) == FUNCTION_DECL
19007 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19008 ? N_("AltiVec argument passed to unprototyped function")
19009 : NULL;
19010}
19011
3aebbe5f
JJ
19012/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19013 setup by using __stack_chk_fail_local hidden function instead of
19014 calling __stack_chk_fail directly. Otherwise it is better to call
19015 __stack_chk_fail directly. */
19016
19017static tree
19018rs6000_stack_protect_fail (void)
19019{
19020 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19021 ? default_hidden_stack_protect_fail ()
19022 : default_external_stack_protect_fail ();
19023}
19024
17211ab5 19025#include "gt-rs6000.h"