]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
[multiple changes]
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
5b86a469 3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
fab3bcc3 4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 5
5de601cf 6 This file is part of GCC.
9878760c 7
5de601cf
NC
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
9878760c 12
5de601cf
NC
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
9878760c 17
5de601cf
NC
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
39d14dda
KC
20 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
21 MA 02110-1301, USA. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
1bc7c5b6
ZW
58#if TARGET_XCOFF
59#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60#endif
93a27b7b
ZW
61#if TARGET_MACHO
62#include "gstab.h" /* for N_SLINE */
63#endif
9b30bae2 64
7509c759
MM
65#ifndef TARGET_NO_PROTOTYPE
66#define TARGET_NO_PROTOTYPE 0
67#endif
68
9878760c
RK
69#define min(A,B) ((A) < (B) ? (A) : (B))
70#define max(A,B) ((A) > (B) ? (A) : (B))
71
d1d0c603
JJ
72/* Structure used to define the rs6000 stack */
73typedef struct rs6000_stack {
74 int first_gp_reg_save; /* first callee saved GP register used */
75 int first_fp_reg_save; /* first callee saved FP register used */
76 int first_altivec_reg_save; /* first callee saved AltiVec register used */
77 int lr_save_p; /* true if the link reg needs to be saved */
78 int cr_save_p; /* true if the CR reg needs to be saved */
79 unsigned int vrsave_mask; /* mask of vec registers to save */
80 int toc_save_p; /* true if the TOC needs to be saved */
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
93 int toc_save_offset; /* offset to save the TOC pointer */
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
108 not in save_size */
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
114} rs6000_stack_t;
115
5b667039
JJ
116/* A C structure for machine-specific, per-function data.
117 This is added to the cfun structure. */
118typedef struct machine_function GTY(())
119{
120 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
121 int ra_needs_full_frame;
122 /* Some local-dynamic symbol. */
123 const char *some_ld_name;
124 /* Whether the instruction chain has been scanned already. */
125 int insn_chain_scanned_p;
126 /* Flags if __builtin_return_address (0) was used. */
127 int ra_need_lr;
128 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
129 varargs save area. */
130 HOST_WIDE_INT varargs_save_offset;
131} machine_function;
132
5248c961
RK
133/* Target cpu type */
134
135enum processor_type rs6000_cpu;
8e3f41e7
MM
136struct rs6000_cpu_select rs6000_select[3] =
137{
815cdc52
MM
138 /* switch name, tune arch */
139 { (const char *)0, "--with-cpu=", 1, 1 },
140 { (const char *)0, "-mcpu=", 1, 1 },
141 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 142};
5248c961 143
ec507f2d
DE
144/* Always emit branch hint bits. */
145static GTY(()) bool rs6000_always_hint;
146
147/* Schedule instructions for group formation. */
148static GTY(()) bool rs6000_sched_groups;
149
569fa502
DN
150/* Support for -msched-costly-dep option. */
151const char *rs6000_sched_costly_dep_str;
152enum rs6000_dependence_cost rs6000_sched_costly_dep;
153
cbe26ab8
DN
154/* Support for -minsert-sched-nops option. */
155const char *rs6000_sched_insert_nops_str;
156enum rs6000_nop_insertion rs6000_sched_insert_nops;
157
7ccf35ed 158/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 159static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 160
6fa3f289 161/* Size of long double */
6fa3f289
ZW
162int rs6000_long_double_type_size;
163
164/* Whether -mabi=altivec has appeared */
165int rs6000_altivec_abi;
166
a3170dc6
AH
167/* Nonzero if we want SPE ABI extensions. */
168int rs6000_spe_abi;
169
5da702b1
AH
170/* Nonzero if floating point operations are done in the GPRs. */
171int rs6000_float_gprs = 0;
172
594a51fe
SS
173/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
174int rs6000_darwin64_abi;
175
a0ab749a 176/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 177static GTY(()) int common_mode_defined;
c81bebd7 178
9878760c
RK
179/* Save information from a "cmpxx" operation until the branch or scc is
180 emitted. */
9878760c
RK
181rtx rs6000_compare_op0, rs6000_compare_op1;
182int rs6000_compare_fp_p;
874a0744 183
874a0744
MM
184/* Label number of label created for -mrelocatable, to call to so we can
185 get the address of the GOT section */
186int rs6000_pic_labelno;
c81bebd7 187
b91da81f 188#ifdef USING_ELFOS_H
c81bebd7 189/* Which abi to adhere to */
9739c90c 190const char *rs6000_abi_name;
d9407988
MM
191
192/* Semantics of the small data area */
193enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
194
195/* Which small data model to use */
815cdc52 196const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
197
198/* Counter for labels which are to be placed in .fixup. */
199int fixuplabelno = 0;
874a0744 200#endif
4697a36c 201
c4501e62
JJ
202/* Bit size of immediate TLS offsets and string from which it is decoded. */
203int rs6000_tls_size = 32;
204const char *rs6000_tls_size_string;
205
b6c9286a
MM
206/* ABI enumeration available for subtarget to use. */
207enum rs6000_abi rs6000_current_abi;
208
85b776df
AM
209/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
210int dot_symbols;
211
38c1f2d7 212/* Debug flags */
815cdc52 213const char *rs6000_debug_name;
38c1f2d7
MM
214int rs6000_debug_stack; /* debug stack applications */
215int rs6000_debug_arg; /* debug argument handling */
216
aabcd309 217/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
218bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
219
58646b77
PB
220/* Built in types. */
221
222tree rs6000_builtin_types[RS6000_BTI_MAX];
223tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 224
57ac7be9
AM
225const char *rs6000_traceback_name;
226static enum {
227 traceback_default = 0,
228 traceback_none,
229 traceback_part,
230 traceback_full
231} rs6000_traceback;
232
38c1f2d7
MM
233/* Flag to say the TOC is initialized */
234int toc_initialized;
9ebbca7d 235char toc_label_name[10];
38c1f2d7 236
9ebbca7d 237/* Alias set for saves and restores from the rs6000 stack. */
f103e34d 238static GTY(()) int rs6000_sr_alias_set;
c8023011 239
a3c9585f
KH
240/* Control alignment for fields within structures. */
241/* String from -malign-XXXXX. */
025d9908
KH
242int rs6000_alignment_flags;
243
78f5898b
AH
244/* True for any options that were explicitly set. */
245struct {
df01da37 246 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b
AH
247 bool alignment; /* True if -malign- was used. */
248 bool abi; /* True if -mabi= was used. */
249 bool spe; /* True if -mspe= was used. */
250 bool float_gprs; /* True if -mfloat-gprs= was used. */
251 bool isel; /* True if -misel was used. */
252 bool long_double; /* True if -mlong-double- was used. */
253} rs6000_explicit_options;
254
a3170dc6
AH
255struct builtin_description
256{
257 /* mask is not const because we're going to alter it below. This
258 nonsense will go away when we rewrite the -march infrastructure
259 to give us more target flag bits. */
260 unsigned int mask;
261 const enum insn_code icode;
262 const char *const name;
263 const enum rs6000_builtins code;
264};
8b897cfa
RS
265\f
266/* Target cpu costs. */
267
268struct processor_costs {
c4ad648e 269 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
270 const int mulsi_const; /* cost of SImode multiplication by constant. */
271 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
272 const int muldi; /* cost of DImode multiplication. */
273 const int divsi; /* cost of SImode division. */
274 const int divdi; /* cost of DImode division. */
275 const int fp; /* cost of simple SFmode and DFmode insns. */
276 const int dmul; /* cost of DFmode multiplication (and fmadd). */
277 const int sdiv; /* cost of SFmode division (fdivs). */
278 const int ddiv; /* cost of DFmode division (fdiv). */
8b897cfa
RS
279};
280
281const struct processor_costs *rs6000_cost;
282
283/* Processor costs (relative to an add) */
284
285/* Instruction size costs on 32bit processors. */
286static const
287struct processor_costs size32_cost = {
06a67bdd
RS
288 COSTS_N_INSNS (1), /* mulsi */
289 COSTS_N_INSNS (1), /* mulsi_const */
290 COSTS_N_INSNS (1), /* mulsi_const9 */
291 COSTS_N_INSNS (1), /* muldi */
292 COSTS_N_INSNS (1), /* divsi */
293 COSTS_N_INSNS (1), /* divdi */
294 COSTS_N_INSNS (1), /* fp */
295 COSTS_N_INSNS (1), /* dmul */
296 COSTS_N_INSNS (1), /* sdiv */
297 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
298};
299
300/* Instruction size costs on 64bit processors. */
301static const
302struct processor_costs size64_cost = {
06a67bdd
RS
303 COSTS_N_INSNS (1), /* mulsi */
304 COSTS_N_INSNS (1), /* mulsi_const */
305 COSTS_N_INSNS (1), /* mulsi_const9 */
306 COSTS_N_INSNS (1), /* muldi */
307 COSTS_N_INSNS (1), /* divsi */
308 COSTS_N_INSNS (1), /* divdi */
309 COSTS_N_INSNS (1), /* fp */
310 COSTS_N_INSNS (1), /* dmul */
311 COSTS_N_INSNS (1), /* sdiv */
312 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
313};
314
315/* Instruction costs on RIOS1 processors. */
316static const
317struct processor_costs rios1_cost = {
06a67bdd
RS
318 COSTS_N_INSNS (5), /* mulsi */
319 COSTS_N_INSNS (4), /* mulsi_const */
320 COSTS_N_INSNS (3), /* mulsi_const9 */
321 COSTS_N_INSNS (5), /* muldi */
322 COSTS_N_INSNS (19), /* divsi */
323 COSTS_N_INSNS (19), /* divdi */
324 COSTS_N_INSNS (2), /* fp */
325 COSTS_N_INSNS (2), /* dmul */
326 COSTS_N_INSNS (19), /* sdiv */
327 COSTS_N_INSNS (19), /* ddiv */
8b897cfa
RS
328};
329
330/* Instruction costs on RIOS2 processors. */
331static const
332struct processor_costs rios2_cost = {
06a67bdd
RS
333 COSTS_N_INSNS (2), /* mulsi */
334 COSTS_N_INSNS (2), /* mulsi_const */
335 COSTS_N_INSNS (2), /* mulsi_const9 */
336 COSTS_N_INSNS (2), /* muldi */
337 COSTS_N_INSNS (13), /* divsi */
338 COSTS_N_INSNS (13), /* divdi */
339 COSTS_N_INSNS (2), /* fp */
340 COSTS_N_INSNS (2), /* dmul */
341 COSTS_N_INSNS (17), /* sdiv */
342 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
343};
344
345/* Instruction costs on RS64A processors. */
346static const
347struct processor_costs rs64a_cost = {
06a67bdd
RS
348 COSTS_N_INSNS (20), /* mulsi */
349 COSTS_N_INSNS (12), /* mulsi_const */
350 COSTS_N_INSNS (8), /* mulsi_const9 */
351 COSTS_N_INSNS (34), /* muldi */
352 COSTS_N_INSNS (65), /* divsi */
353 COSTS_N_INSNS (67), /* divdi */
354 COSTS_N_INSNS (4), /* fp */
355 COSTS_N_INSNS (4), /* dmul */
356 COSTS_N_INSNS (31), /* sdiv */
357 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
358};
359
360/* Instruction costs on MPCCORE processors. */
361static const
362struct processor_costs mpccore_cost = {
06a67bdd
RS
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (6), /* divsi */
368 COSTS_N_INSNS (6), /* divdi */
369 COSTS_N_INSNS (4), /* fp */
370 COSTS_N_INSNS (5), /* dmul */
371 COSTS_N_INSNS (10), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
373};
374
375/* Instruction costs on PPC403 processors. */
376static const
377struct processor_costs ppc403_cost = {
06a67bdd
RS
378 COSTS_N_INSNS (4), /* mulsi */
379 COSTS_N_INSNS (4), /* mulsi_const */
380 COSTS_N_INSNS (4), /* mulsi_const9 */
381 COSTS_N_INSNS (4), /* muldi */
382 COSTS_N_INSNS (33), /* divsi */
383 COSTS_N_INSNS (33), /* divdi */
384 COSTS_N_INSNS (11), /* fp */
385 COSTS_N_INSNS (11), /* dmul */
386 COSTS_N_INSNS (11), /* sdiv */
387 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
388};
389
390/* Instruction costs on PPC405 processors. */
391static const
392struct processor_costs ppc405_cost = {
06a67bdd
RS
393 COSTS_N_INSNS (5), /* mulsi */
394 COSTS_N_INSNS (4), /* mulsi_const */
395 COSTS_N_INSNS (3), /* mulsi_const9 */
396 COSTS_N_INSNS (5), /* muldi */
397 COSTS_N_INSNS (35), /* divsi */
398 COSTS_N_INSNS (35), /* divdi */
399 COSTS_N_INSNS (11), /* fp */
400 COSTS_N_INSNS (11), /* dmul */
401 COSTS_N_INSNS (11), /* sdiv */
402 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
403};
404
405/* Instruction costs on PPC440 processors. */
406static const
407struct processor_costs ppc440_cost = {
06a67bdd
RS
408 COSTS_N_INSNS (3), /* mulsi */
409 COSTS_N_INSNS (2), /* mulsi_const */
410 COSTS_N_INSNS (2), /* mulsi_const9 */
411 COSTS_N_INSNS (3), /* muldi */
412 COSTS_N_INSNS (34), /* divsi */
413 COSTS_N_INSNS (34), /* divdi */
414 COSTS_N_INSNS (5), /* fp */
415 COSTS_N_INSNS (5), /* dmul */
416 COSTS_N_INSNS (19), /* sdiv */
417 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
418};
419
420/* Instruction costs on PPC601 processors. */
421static const
422struct processor_costs ppc601_cost = {
06a67bdd
RS
423 COSTS_N_INSNS (5), /* mulsi */
424 COSTS_N_INSNS (5), /* mulsi_const */
425 COSTS_N_INSNS (5), /* mulsi_const9 */
426 COSTS_N_INSNS (5), /* muldi */
427 COSTS_N_INSNS (36), /* divsi */
428 COSTS_N_INSNS (36), /* divdi */
429 COSTS_N_INSNS (4), /* fp */
430 COSTS_N_INSNS (5), /* dmul */
431 COSTS_N_INSNS (17), /* sdiv */
432 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
433};
434
435/* Instruction costs on PPC603 processors. */
436static const
437struct processor_costs ppc603_cost = {
06a67bdd
RS
438 COSTS_N_INSNS (5), /* mulsi */
439 COSTS_N_INSNS (3), /* mulsi_const */
440 COSTS_N_INSNS (2), /* mulsi_const9 */
441 COSTS_N_INSNS (5), /* muldi */
442 COSTS_N_INSNS (37), /* divsi */
443 COSTS_N_INSNS (37), /* divdi */
444 COSTS_N_INSNS (3), /* fp */
445 COSTS_N_INSNS (4), /* dmul */
446 COSTS_N_INSNS (18), /* sdiv */
447 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
448};
449
450/* Instruction costs on PPC604 processors. */
451static const
452struct processor_costs ppc604_cost = {
06a67bdd
RS
453 COSTS_N_INSNS (4), /* mulsi */
454 COSTS_N_INSNS (4), /* mulsi_const */
455 COSTS_N_INSNS (4), /* mulsi_const9 */
456 COSTS_N_INSNS (4), /* muldi */
457 COSTS_N_INSNS (20), /* divsi */
458 COSTS_N_INSNS (20), /* divdi */
459 COSTS_N_INSNS (3), /* fp */
460 COSTS_N_INSNS (3), /* dmul */
461 COSTS_N_INSNS (18), /* sdiv */
462 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
463};
464
465/* Instruction costs on PPC604e processors. */
466static const
467struct processor_costs ppc604e_cost = {
06a67bdd
RS
468 COSTS_N_INSNS (2), /* mulsi */
469 COSTS_N_INSNS (2), /* mulsi_const */
470 COSTS_N_INSNS (2), /* mulsi_const9 */
471 COSTS_N_INSNS (2), /* muldi */
472 COSTS_N_INSNS (20), /* divsi */
473 COSTS_N_INSNS (20), /* divdi */
474 COSTS_N_INSNS (3), /* fp */
475 COSTS_N_INSNS (3), /* dmul */
476 COSTS_N_INSNS (18), /* sdiv */
477 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
478};
479
f0517163 480/* Instruction costs on PPC620 processors. */
8b897cfa
RS
481static const
482struct processor_costs ppc620_cost = {
06a67bdd
RS
483 COSTS_N_INSNS (5), /* mulsi */
484 COSTS_N_INSNS (4), /* mulsi_const */
485 COSTS_N_INSNS (3), /* mulsi_const9 */
486 COSTS_N_INSNS (7), /* muldi */
487 COSTS_N_INSNS (21), /* divsi */
488 COSTS_N_INSNS (37), /* divdi */
489 COSTS_N_INSNS (3), /* fp */
490 COSTS_N_INSNS (3), /* dmul */
491 COSTS_N_INSNS (18), /* sdiv */
492 COSTS_N_INSNS (32), /* ddiv */
f0517163
RS
493};
494
495/* Instruction costs on PPC630 processors. */
496static const
497struct processor_costs ppc630_cost = {
06a67bdd
RS
498 COSTS_N_INSNS (5), /* mulsi */
499 COSTS_N_INSNS (4), /* mulsi_const */
500 COSTS_N_INSNS (3), /* mulsi_const9 */
501 COSTS_N_INSNS (7), /* muldi */
502 COSTS_N_INSNS (21), /* divsi */
503 COSTS_N_INSNS (37), /* divdi */
504 COSTS_N_INSNS (3), /* fp */
505 COSTS_N_INSNS (3), /* dmul */
506 COSTS_N_INSNS (17), /* sdiv */
507 COSTS_N_INSNS (21), /* ddiv */
8b897cfa
RS
508};
509
510/* Instruction costs on PPC750 and PPC7400 processors. */
511static const
512struct processor_costs ppc750_cost = {
06a67bdd
RS
513 COSTS_N_INSNS (5), /* mulsi */
514 COSTS_N_INSNS (3), /* mulsi_const */
515 COSTS_N_INSNS (2), /* mulsi_const9 */
516 COSTS_N_INSNS (5), /* muldi */
517 COSTS_N_INSNS (17), /* divsi */
518 COSTS_N_INSNS (17), /* divdi */
519 COSTS_N_INSNS (3), /* fp */
520 COSTS_N_INSNS (3), /* dmul */
521 COSTS_N_INSNS (17), /* sdiv */
522 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
523};
524
525/* Instruction costs on PPC7450 processors. */
526static const
527struct processor_costs ppc7450_cost = {
06a67bdd
RS
528 COSTS_N_INSNS (4), /* mulsi */
529 COSTS_N_INSNS (3), /* mulsi_const */
530 COSTS_N_INSNS (3), /* mulsi_const9 */
531 COSTS_N_INSNS (4), /* muldi */
532 COSTS_N_INSNS (23), /* divsi */
533 COSTS_N_INSNS (23), /* divdi */
534 COSTS_N_INSNS (5), /* fp */
535 COSTS_N_INSNS (5), /* dmul */
536 COSTS_N_INSNS (21), /* sdiv */
537 COSTS_N_INSNS (35), /* ddiv */
8b897cfa 538};
a3170dc6 539
8b897cfa
RS
540/* Instruction costs on PPC8540 processors. */
541static const
542struct processor_costs ppc8540_cost = {
06a67bdd
RS
543 COSTS_N_INSNS (4), /* mulsi */
544 COSTS_N_INSNS (4), /* mulsi_const */
545 COSTS_N_INSNS (4), /* mulsi_const9 */
546 COSTS_N_INSNS (4), /* muldi */
547 COSTS_N_INSNS (19), /* divsi */
548 COSTS_N_INSNS (19), /* divdi */
549 COSTS_N_INSNS (4), /* fp */
550 COSTS_N_INSNS (4), /* dmul */
551 COSTS_N_INSNS (29), /* sdiv */
552 COSTS_N_INSNS (29), /* ddiv */
8b897cfa
RS
553};
554
555/* Instruction costs on POWER4 and POWER5 processors. */
556static const
557struct processor_costs power4_cost = {
06a67bdd
RS
558 COSTS_N_INSNS (3), /* mulsi */
559 COSTS_N_INSNS (2), /* mulsi_const */
560 COSTS_N_INSNS (2), /* mulsi_const9 */
561 COSTS_N_INSNS (4), /* muldi */
562 COSTS_N_INSNS (18), /* divsi */
563 COSTS_N_INSNS (34), /* divdi */
564 COSTS_N_INSNS (3), /* fp */
565 COSTS_N_INSNS (3), /* dmul */
566 COSTS_N_INSNS (17), /* sdiv */
567 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
568};
569
570\f
a2369ed3 571static bool rs6000_function_ok_for_sibcall (tree, tree);
2ffa9a0c 572static const char *rs6000_invalid_within_doloop (rtx);
a2369ed3
DJ
573static rtx rs6000_generate_compare (enum rtx_code);
574static void rs6000_maybe_dead (rtx);
575static void rs6000_emit_stack_tie (void);
576static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
577static rtx spe_synthesize_frame_save (rtx);
578static bool spe_func_has_64bit_regs_p (void);
b20a9cca 579static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 580 int, HOST_WIDE_INT);
a2369ed3
DJ
581static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
582static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
583static unsigned rs6000_hash_constant (rtx);
584static unsigned toc_hash_function (const void *);
585static int toc_hash_eq (const void *, const void *);
586static int constant_pool_expr_1 (rtx, int *, int *);
587static bool constant_pool_expr_p (rtx);
a2369ed3 588static bool legitimate_indexed_address_p (rtx, int);
a2369ed3
DJ
589static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
590static struct machine_function * rs6000_init_machine_status (void);
591static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 592static bool no_global_regs_above (int);
5add3202 593#ifdef HAVE_GAS_HIDDEN
a2369ed3 594static void rs6000_assemble_visibility (tree, int);
5add3202 595#endif
a2369ed3
DJ
596static int rs6000_ra_ever_killed (void);
597static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 598static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
76d2b81d 599static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
f18eca82 600static const char *rs6000_mangle_fundamental_type (tree);
b86fe7b4 601extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3
DJ
602static void rs6000_set_default_type_attributes (tree);
603static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
604static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
605static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
606 tree);
a2369ed3 607static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
c6e8c921 608static bool rs6000_return_in_memory (tree, tree);
a2369ed3 609static void rs6000_file_start (void);
7c262518 610#if TARGET_ELF
a2369ed3
DJ
611static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
612static void rs6000_elf_asm_out_constructor (rtx, int);
613static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 614static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
a2369ed3
DJ
615static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
616static void rs6000_elf_unique_section (tree, int);
617static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
b20a9cca 618 unsigned HOST_WIDE_INT);
a56d7372 619static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 620 ATTRIBUTE_UNUSED;
a2369ed3 621static bool rs6000_elf_in_small_data_p (tree);
7c262518 622#endif
cbaaba19 623#if TARGET_XCOFF
a2369ed3 624static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
8210e4c4 625static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
a2369ed3
DJ
626static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
627static void rs6000_xcoff_unique_section (tree, int);
628static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
b20a9cca 629 unsigned HOST_WIDE_INT);
a2369ed3
DJ
630static const char * rs6000_xcoff_strip_name_encoding (const char *);
631static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
632static void rs6000_xcoff_file_start (void);
633static void rs6000_xcoff_file_end (void);
f1384257 634#endif
a2369ed3
DJ
635static int rs6000_variable_issue (FILE *, int, rtx, int);
636static bool rs6000_rtx_costs (rtx, int, int, int *);
637static int rs6000_adjust_cost (rtx, rtx, rtx, int);
cbe26ab8 638static bool is_microcoded_insn (rtx);
79ae11c4 639static int is_dispatch_slot_restricted (rtx);
cbe26ab8
DN
640static bool is_cracked_insn (rtx);
641static bool is_branch_slot_insn (rtx);
a2369ed3
DJ
642static int rs6000_adjust_priority (rtx, int);
643static int rs6000_issue_rate (void);
569fa502 644static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
cbe26ab8
DN
645static rtx get_next_active_insn (rtx, rtx);
646static bool insn_terminates_group_p (rtx , enum group_termination);
647static bool is_costly_group (rtx *, rtx);
648static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
649static int redefine_groups (FILE *, int, rtx, rtx);
650static int pad_groups (FILE *, int, rtx, rtx);
651static void rs6000_sched_finish (FILE *, int);
a2369ed3 652static int rs6000_use_sched_lookahead (void);
7ccf35ed 653static tree rs6000_builtin_mask_for_load (void);
a2369ed3 654
58646b77 655static void def_builtin (int, const char *, tree, int);
a2369ed3
DJ
656static void rs6000_init_builtins (void);
657static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
658static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
659static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
660static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
661static void altivec_init_builtins (void);
662static void rs6000_common_init_builtins (void);
c15c90bb 663static void rs6000_init_libfuncs (void);
a2369ed3 664
b20a9cca
AM
665static void enable_mask_for_builtins (struct builtin_description *, int,
666 enum rs6000_builtins,
667 enum rs6000_builtins);
7c62e993 668static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
669static void spe_init_builtins (void);
670static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 671static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
672static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
673static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
674static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
675static rs6000_stack_t *rs6000_stack_info (void);
676static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
677
678static rtx altivec_expand_builtin (tree, rtx, bool *);
679static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
680static rtx altivec_expand_st_builtin (tree, rtx, bool *);
681static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
682static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 683static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 684 const char *, tree, rtx);
b4a62fa0 685static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 686static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
687static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
688static rtx altivec_expand_vec_set_builtin (tree);
689static rtx altivec_expand_vec_ext_builtin (tree, rtx);
690static int get_element_number (tree, tree);
78f5898b 691static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 692static void rs6000_parse_tls_size_option (void);
5da702b1 693static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
694static int first_altivec_reg_to_save (void);
695static unsigned int compute_vrsave_mask (void);
9390387d 696static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
697static void is_altivec_return_reg (rtx, void *);
698static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
699int easy_vector_constant (rtx, enum machine_mode);
58646b77 700static bool rs6000_is_opaque_type (tree);
a2369ed3
DJ
701static rtx rs6000_dwarf_register_span (rtx);
702static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 703static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
704static rtx rs6000_tls_get_addr (void);
705static rtx rs6000_got_sym (void);
9390387d 706static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
707static const char *rs6000_get_some_local_dynamic_name (void);
708static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 709static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 710static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 711 enum machine_mode, tree);
0b5383eb
DJ
712static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
713 HOST_WIDE_INT);
714static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
715 tree, HOST_WIDE_INT);
716static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
717 HOST_WIDE_INT,
718 rtx[], int *);
719static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
720 tree, HOST_WIDE_INT,
721 rtx[], int *);
722static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
ec6376ab 723static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 724static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
725static void setup_incoming_varargs (CUMULATIVE_ARGS *,
726 enum machine_mode, tree,
727 int *, int);
8cd5a4e0
RH
728static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
729 tree, bool);
78a52f11
RH
730static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
731 tree, bool);
4d3e6fae 732static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
efdba735
SH
733#if TARGET_MACHO
734static void macho_branch_islands (void);
735static void add_compiler_branch_island (tree, tree, int);
736static int no_previous_def (tree function_name);
737static tree get_prev_label (tree function_name);
c4e18b1c 738static void rs6000_darwin_file_start (void);
efdba735
SH
739#endif
740
c35d187f 741static tree rs6000_build_builtin_va_list (void);
23a60a04 742static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
fe984136 743static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
f676971a 744static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 745static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 746 enum machine_mode);
94ff898d 747static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
748 enum machine_mode);
749static int get_vsel_insn (enum machine_mode);
750static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 751static tree rs6000_stack_protect_fail (void);
21213b4c
DP
752
753const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
754static enum machine_mode rs6000_eh_return_filter_mode (void);
755
17211ab5
GK
756/* Hash table stuff for keeping track of TOC entries. */
757
758struct toc_hash_struct GTY(())
759{
760 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
761 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
762 rtx key;
763 enum machine_mode key_mode;
764 int labelno;
765};
766
767static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
768\f
769/* Default register names. */
770char rs6000_reg_names[][8] =
771{
802a0058
MM
772 "0", "1", "2", "3", "4", "5", "6", "7",
773 "8", "9", "10", "11", "12", "13", "14", "15",
774 "16", "17", "18", "19", "20", "21", "22", "23",
775 "24", "25", "26", "27", "28", "29", "30", "31",
776 "0", "1", "2", "3", "4", "5", "6", "7",
777 "8", "9", "10", "11", "12", "13", "14", "15",
778 "16", "17", "18", "19", "20", "21", "22", "23",
779 "24", "25", "26", "27", "28", "29", "30", "31",
780 "mq", "lr", "ctr","ap",
781 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
782 "xer",
783 /* AltiVec registers. */
0cd5e3a1
AH
784 "0", "1", "2", "3", "4", "5", "6", "7",
785 "8", "9", "10", "11", "12", "13", "14", "15",
786 "16", "17", "18", "19", "20", "21", "22", "23",
787 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
788 "vrsave", "vscr",
789 /* SPE registers. */
7d5175e1
JJ
790 "spe_acc", "spefscr",
791 /* Soft frame pointer. */
792 "sfp"
c81bebd7
MM
793};
794
795#ifdef TARGET_REGNAMES
8b60264b 796static const char alt_reg_names[][8] =
c81bebd7 797{
802a0058
MM
798 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
799 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
800 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
801 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
802 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
803 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
804 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
805 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
806 "mq", "lr", "ctr", "ap",
807 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 808 "xer",
59a4c851 809 /* AltiVec registers. */
0ac081f6 810 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
811 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
812 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
813 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
814 "vrsave", "vscr",
815 /* SPE registers. */
7d5175e1
JJ
816 "spe_acc", "spefscr",
817 /* Soft frame pointer. */
818 "sfp"
c81bebd7
MM
819};
820#endif
9878760c 821\f
daf11973
MM
822#ifndef MASK_STRICT_ALIGN
823#define MASK_STRICT_ALIGN 0
824#endif
ffcfcb5f
AM
825#ifndef TARGET_PROFILE_KERNEL
826#define TARGET_PROFILE_KERNEL 0
827#endif
3961e8fe
RH
828
829/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
830#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
831\f
832/* Initialize the GCC target structure. */
91d231cb
JM
833#undef TARGET_ATTRIBUTE_TABLE
834#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
835#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
836#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 837
301d03af
RS
838#undef TARGET_ASM_ALIGNED_DI_OP
839#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
840
841/* Default unaligned ops are only provided for ELF. Find the ops needed
842 for non-ELF systems. */
843#ifndef OBJECT_FORMAT_ELF
cbaaba19 844#if TARGET_XCOFF
ae6c1efd 845/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
846 64-bit targets. */
847#undef TARGET_ASM_UNALIGNED_HI_OP
848#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
849#undef TARGET_ASM_UNALIGNED_SI_OP
850#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
851#undef TARGET_ASM_UNALIGNED_DI_OP
852#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
853#else
854/* For Darwin. */
855#undef TARGET_ASM_UNALIGNED_HI_OP
856#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
857#undef TARGET_ASM_UNALIGNED_SI_OP
858#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
859#undef TARGET_ASM_UNALIGNED_DI_OP
860#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
861#undef TARGET_ASM_ALIGNED_DI_OP
862#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
863#endif
864#endif
865
866/* This hook deals with fixups for relocatable code and DI-mode objects
867 in 64-bit code. */
868#undef TARGET_ASM_INTEGER
869#define TARGET_ASM_INTEGER rs6000_assemble_integer
870
93638d7a
AM
871#ifdef HAVE_GAS_HIDDEN
872#undef TARGET_ASM_ASSEMBLE_VISIBILITY
873#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
874#endif
875
c4501e62
JJ
876#undef TARGET_HAVE_TLS
877#define TARGET_HAVE_TLS HAVE_AS_TLS
878
879#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 880#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 881
08c148a8
NB
882#undef TARGET_ASM_FUNCTION_PROLOGUE
883#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
884#undef TARGET_ASM_FUNCTION_EPILOGUE
885#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
886
b54cf83a
DE
887#undef TARGET_SCHED_VARIABLE_ISSUE
888#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
889
c237e94a
ZW
890#undef TARGET_SCHED_ISSUE_RATE
891#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
892#undef TARGET_SCHED_ADJUST_COST
893#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
894#undef TARGET_SCHED_ADJUST_PRIORITY
895#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 896#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 897#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
cbe26ab8
DN
898#undef TARGET_SCHED_FINISH
899#define TARGET_SCHED_FINISH rs6000_sched_finish
c237e94a 900
be12c2b0
VM
901#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
902#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
903
7ccf35ed
DN
904#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
905#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
906
0ac081f6
AH
907#undef TARGET_INIT_BUILTINS
908#define TARGET_INIT_BUILTINS rs6000_init_builtins
909
910#undef TARGET_EXPAND_BUILTIN
911#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
912
f18eca82
ZL
913#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
914#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
915
c15c90bb
ZW
916#undef TARGET_INIT_LIBFUNCS
917#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
918
f1384257 919#if TARGET_MACHO
0e5dbd9b 920#undef TARGET_BINDS_LOCAL_P
31920d83 921#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 922#endif
0e5dbd9b 923
3961e8fe
RH
924#undef TARGET_ASM_OUTPUT_MI_THUNK
925#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
926
3961e8fe 927#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5b71a4e7 928#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
00b960c7 929
4977bab6
ZW
930#undef TARGET_FUNCTION_OK_FOR_SIBCALL
931#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
932
2e3f0db6
DJ
933#undef TARGET_INVALID_WITHIN_DOLOOP
934#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 935
3c50106f
RH
936#undef TARGET_RTX_COSTS
937#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
938#undef TARGET_ADDRESS_COST
939#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 940
c8e4f0e9 941#undef TARGET_VECTOR_OPAQUE_P
58646b77 942#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 943
96714395
AH
944#undef TARGET_DWARF_REGISTER_SPAN
945#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
946
c6e8c921
GK
947/* On rs6000, function arguments are promoted, as are function return
948 values. */
949#undef TARGET_PROMOTE_FUNCTION_ARGS
950#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
951#undef TARGET_PROMOTE_FUNCTION_RETURN
952#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
953
c6e8c921
GK
954#undef TARGET_RETURN_IN_MEMORY
955#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
956
957#undef TARGET_SETUP_INCOMING_VARARGS
958#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
959
960/* Always strict argument naming on rs6000. */
961#undef TARGET_STRICT_ARGUMENT_NAMING
962#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
963#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
964#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130
RH
965#undef TARGET_SPLIT_COMPLEX_ARG
966#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
fe984136
RH
967#undef TARGET_MUST_PASS_IN_STACK
968#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
969#undef TARGET_PASS_BY_REFERENCE
970#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
971#undef TARGET_ARG_PARTIAL_BYTES
972#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 973
c35d187f
RH
974#undef TARGET_BUILD_BUILTIN_VA_LIST
975#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
976
cd3ce9b4
JM
977#undef TARGET_GIMPLIFY_VA_ARG_EXPR
978#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
979
93f90be6
FJ
980#undef TARGET_EH_RETURN_FILTER_MODE
981#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
982
f676971a
EC
983#undef TARGET_VECTOR_MODE_SUPPORTED_P
984#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
985
4d3e6fae
FJ
986#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
987#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
988
78f5898b
AH
989#undef TARGET_HANDLE_OPTION
990#define TARGET_HANDLE_OPTION rs6000_handle_option
991
992#undef TARGET_DEFAULT_TARGET_FLAGS
993#define TARGET_DEFAULT_TARGET_FLAGS \
994 (TARGET_DEFAULT | MASK_SCHED_PROLOG)
995
3aebbe5f
JJ
996#undef TARGET_STACK_PROTECT_FAIL
997#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
998
445cf5eb
JM
999/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1000 The PowerPC architecture requires only weak consistency among
1001 processors--that is, memory accesses between processors need not be
1002 sequentially consistent and memory accesses among processors can occur
1003 in any order. The ability to order memory accesses weakly provides
1004 opportunities for more efficient use of the system bus. Unless a
1005 dependency exists, the 604e allows read operations to precede store
1006 operations. */
1007#undef TARGET_RELAXED_ORDERING
1008#define TARGET_RELAXED_ORDERING true
1009
fdbe66f2
EB
1010#ifdef HAVE_AS_TLS
1011#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1012#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1013#endif
1014
f6897b10 1015struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1016\f
0d1fbc8c
AH
1017
1018/* Value is 1 if hard register REGNO can hold a value of machine-mode
1019 MODE. */
1020static int
1021rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1022{
1023 /* The GPRs can hold any mode, but values bigger than one register
1024 cannot go past R31. */
1025 if (INT_REGNO_P (regno))
1026 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1027
1028 /* The float registers can only hold floating modes and DImode. */
1029 if (FP_REGNO_P (regno))
1030 return
ebb109ad 1031 (SCALAR_FLOAT_MODE_P (mode)
0d1fbc8c
AH
1032 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1033 || (GET_MODE_CLASS (mode) == MODE_INT
1034 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1035
1036 /* The CR register can only hold CC modes. */
1037 if (CR_REGNO_P (regno))
1038 return GET_MODE_CLASS (mode) == MODE_CC;
1039
1040 if (XER_REGNO_P (regno))
1041 return mode == PSImode;
1042
1043 /* AltiVec only in AldyVec registers. */
1044 if (ALTIVEC_REGNO_P (regno))
1045 return ALTIVEC_VECTOR_MODE (mode);
1046
1047 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1048 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1049 return 1;
1050
1051 /* We cannot put TImode anywhere except general register and it must be
1052 able to fit within the register set. */
1053
1054 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1055}
1056
1057/* Initialize rs6000_hard_regno_mode_ok_p table. */
1058static void
1059rs6000_init_hard_regno_mode_ok (void)
1060{
1061 int r, m;
1062
1063 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1064 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1065 if (rs6000_hard_regno_mode_ok (r, m))
1066 rs6000_hard_regno_mode_ok_p[m][r] = true;
1067}
1068
c1e55850
GK
1069/* If not otherwise specified by a target, make 'long double' equivalent to
1070 'double'. */
1071
1072#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1073#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1074#endif
1075
5248c961
RK
1076/* Override command line options. Mostly we process the processor
1077 type and sometimes adjust other TARGET_ options. */
1078
1079void
d779d0dc 1080rs6000_override_options (const char *default_cpu)
5248c961 1081{
c4d38ccb 1082 size_t i, j;
8e3f41e7 1083 struct rs6000_cpu_select *ptr;
66188a7e 1084 int set_masks;
5248c961 1085
66188a7e 1086 /* Simplifications for entries below. */
85638c0d 1087
66188a7e
GK
1088 enum {
1089 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1090 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1091 };
85638c0d 1092
66188a7e
GK
1093 /* This table occasionally claims that a processor does not support
1094 a particular feature even though it does, but the feature is slower
1095 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1096 complete description of the processor's support.
66188a7e
GK
1097
1098 Please keep this list in order, and don't forget to update the
1099 documentation in invoke.texi when adding a new processor or
1100 flag. */
5248c961
RK
1101 static struct ptt
1102 {
8b60264b
KG
1103 const char *const name; /* Canonical processor name. */
1104 const enum processor_type processor; /* Processor type enum value. */
1105 const int target_enable; /* Target flags to enable. */
8b60264b 1106 } const processor_target_table[]
66188a7e 1107 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1108 {"403", PROCESSOR_PPC403,
66188a7e 1109 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82
JM
1110 {"405", PROCESSOR_PPC405,
1111 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW},
1112 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_MULHW},
1113 {"440", PROCESSOR_PPC440,
1114 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW},
1115 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_MULHW},
66188a7e 1116 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1117 {"601", PROCESSOR_PPC601,
66188a7e
GK
1118 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1119 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1120 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1121 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1122 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1123 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1124 {"620", PROCESSOR_PPC620,
1125 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1126 {"630", PROCESSOR_PPC630,
1127 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1128 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1129 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1130 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1131 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1132 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1133 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1134 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1135 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
4d4cbc0e
AH
1136 /* 8548 has a dummy entry for now. */
1137 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1138 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1139 {"970", PROCESSOR_POWER4,
66188a7e
GK
1140 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1141 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1142 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1143 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1144 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1145 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1146 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1147 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1148 {"power2", PROCESSOR_POWER,
1149 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1150 {"power3", PROCESSOR_PPC630,
1151 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1152 {"power4", PROCESSOR_POWER4,
fc091c8e 1153 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1154 {"power5", PROCESSOR_POWER5,
432218ba
DE
1155 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1156 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1157 {"power5+", PROCESSOR_POWER5,
1158 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1159 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
66188a7e
GK
1160 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1161 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1162 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1163 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1164 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1165 {"rios2", PROCESSOR_RIOS2,
1166 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1167 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1168 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1169 {"rs64", PROCESSOR_RS64A,
1170 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1171 };
5248c961 1172
ca7558fc 1173 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1174
66188a7e
GK
1175 /* Some OSs don't support saving the high part of 64-bit registers on
1176 context switch. Other OSs don't support saving Altivec registers.
1177 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1178 settings; if the user wants either, the user must explicitly specify
1179 them and we won't interfere with the user's specification. */
1180
1181 enum {
1182 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
f676971a 1183 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
66188a7e 1184 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
131aeb82 1185 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW)
66188a7e 1186 };
0d1fbc8c
AH
1187
1188 rs6000_init_hard_regno_mode_ok ();
1189
c4ad648e 1190 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1191#ifdef OS_MISSING_POWERPC64
1192 if (OS_MISSING_POWERPC64)
1193 set_masks &= ~MASK_POWERPC64;
1194#endif
1195#ifdef OS_MISSING_ALTIVEC
1196 if (OS_MISSING_ALTIVEC)
1197 set_masks &= ~MASK_ALTIVEC;
1198#endif
1199
768875a8
AM
1200 /* Don't override by the processor default if given explicitly. */
1201 set_masks &= ~target_flags_explicit;
957211c3 1202
a4f6c312 1203 /* Identify the processor type. */
8e3f41e7 1204 rs6000_select[0].string = default_cpu;
3cb999d8 1205 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1206
b6a1cbae 1207 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1208 {
8e3f41e7
MM
1209 ptr = &rs6000_select[i];
1210 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1211 {
8e3f41e7
MM
1212 for (j = 0; j < ptt_size; j++)
1213 if (! strcmp (ptr->string, processor_target_table[j].name))
1214 {
1215 if (ptr->set_tune_p)
1216 rs6000_cpu = processor_target_table[j].processor;
1217
1218 if (ptr->set_arch_p)
1219 {
66188a7e
GK
1220 target_flags &= ~set_masks;
1221 target_flags |= (processor_target_table[j].target_enable
1222 & set_masks);
8e3f41e7
MM
1223 }
1224 break;
1225 }
1226
4406229e 1227 if (j == ptt_size)
8e3f41e7 1228 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1229 }
1230 }
8a61d227 1231
993f19a8 1232 if (TARGET_E500)
a3170dc6
AH
1233 rs6000_isel = 1;
1234
dff9f1b6
DE
1235 /* If we are optimizing big endian systems for space, use the load/store
1236 multiple and string instructions. */
ef792183 1237 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1238 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1239
a4f6c312
SS
1240 /* Don't allow -mmultiple or -mstring on little endian systems
1241 unless the cpu is a 750, because the hardware doesn't support the
1242 instructions used in little endian mode, and causes an alignment
1243 trap. The 750 does not cause an alignment trap (except when the
1244 target is unaligned). */
bef84347 1245
b21fb038 1246 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1247 {
1248 if (TARGET_MULTIPLE)
1249 {
1250 target_flags &= ~MASK_MULTIPLE;
b21fb038 1251 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1252 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1253 }
1254
1255 if (TARGET_STRING)
1256 {
1257 target_flags &= ~MASK_STRING;
b21fb038 1258 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1259 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1260 }
1261 }
3933e0e1 1262
38c1f2d7
MM
1263 /* Set debug flags */
1264 if (rs6000_debug_name)
1265 {
bfc79d3b 1266 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1267 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1268 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1269 rs6000_debug_stack = 1;
bfc79d3b 1270 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1271 rs6000_debug_arg = 1;
1272 else
c725bd79 1273 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1274 }
1275
57ac7be9
AM
1276 if (rs6000_traceback_name)
1277 {
1278 if (! strncmp (rs6000_traceback_name, "full", 4))
1279 rs6000_traceback = traceback_full;
1280 else if (! strncmp (rs6000_traceback_name, "part", 4))
1281 rs6000_traceback = traceback_part;
1282 else if (! strncmp (rs6000_traceback_name, "no", 2))
1283 rs6000_traceback = traceback_none;
1284 else
9e637a26 1285 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1286 rs6000_traceback_name);
1287 }
1288
78f5898b
AH
1289 if (!rs6000_explicit_options.long_double)
1290 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1291
6d0ef01e
HP
1292 /* Set Altivec ABI as default for powerpc64 linux. */
1293 if (TARGET_ELF && TARGET_64BIT)
1294 {
1295 rs6000_altivec_abi = 1;
78f5898b 1296 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1297 }
1298
594a51fe
SS
1299 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1300 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1301 {
1302 rs6000_darwin64_abi = 1;
9c7956fd 1303#if TARGET_MACHO
6ac49599 1304 darwin_one_byte_bool = 1;
9c7956fd 1305#endif
d9168963
SS
1306 /* Default to natural alignment, for better performance. */
1307 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1308 }
1309
c4501e62
JJ
1310 /* Handle -mtls-size option. */
1311 rs6000_parse_tls_size_option ();
1312
a7ae18e2
AH
1313#ifdef SUBTARGET_OVERRIDE_OPTIONS
1314 SUBTARGET_OVERRIDE_OPTIONS;
1315#endif
1316#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1317 SUBSUBTARGET_OVERRIDE_OPTIONS;
1318#endif
4d4cbc0e
AH
1319#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1320 SUB3TARGET_OVERRIDE_OPTIONS;
1321#endif
a7ae18e2 1322
5da702b1
AH
1323 if (TARGET_E500)
1324 {
e4463bf1
AH
1325 if (TARGET_ALTIVEC)
1326 error ("AltiVec and E500 instructions cannot coexist");
1327
5da702b1
AH
1328 /* The e500 does not have string instructions, and we set
1329 MASK_STRING above when optimizing for size. */
1330 if ((target_flags & MASK_STRING) != 0)
1331 target_flags = target_flags & ~MASK_STRING;
1332 }
1333 else if (rs6000_select[1].string != NULL)
1334 {
1335 /* For the powerpc-eabispe configuration, we set all these by
1336 default, so let's unset them if we manually set another
1337 CPU that is not the E500. */
78f5898b 1338 if (!rs6000_explicit_options.abi)
5da702b1 1339 rs6000_spe_abi = 0;
78f5898b 1340 if (!rs6000_explicit_options.spe)
5da702b1 1341 rs6000_spe = 0;
78f5898b 1342 if (!rs6000_explicit_options.float_gprs)
5da702b1 1343 rs6000_float_gprs = 0;
78f5898b 1344 if (!rs6000_explicit_options.isel)
5da702b1 1345 rs6000_isel = 0;
78f5898b 1346 if (!rs6000_explicit_options.long_double)
c1e55850 1347 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
5da702b1 1348 }
b5044283 1349
ec507f2d
DE
1350 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1351 && rs6000_cpu != PROCESSOR_POWER5);
1352 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1353 || rs6000_cpu == PROCESSOR_POWER5);
1354
ec507f2d
DE
1355 rs6000_sched_restricted_insns_priority
1356 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1357
569fa502 1358 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1359 rs6000_sched_costly_dep
1360 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1361
569fa502
DN
1362 if (rs6000_sched_costly_dep_str)
1363 {
f676971a 1364 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1365 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1366 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1367 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1368 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1369 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1370 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1371 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1372 else
c4ad648e 1373 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1374 }
1375
1376 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1377 rs6000_sched_insert_nops
1378 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1379
cbe26ab8
DN
1380 if (rs6000_sched_insert_nops_str)
1381 {
1382 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1383 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1384 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1385 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1386 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1387 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1388 else
c4ad648e 1389 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1390 }
1391
c81bebd7 1392#ifdef TARGET_REGNAMES
a4f6c312
SS
1393 /* If the user desires alternate register names, copy in the
1394 alternate names now. */
c81bebd7 1395 if (TARGET_REGNAMES)
4e135bdd 1396 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1397#endif
1398
df01da37 1399 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1400 If -maix-struct-return or -msvr4-struct-return was explicitly
1401 used, don't override with the ABI default. */
df01da37
DE
1402 if (!rs6000_explicit_options.aix_struct_ret)
1403 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1404
fcce224d
DE
1405 if (TARGET_LONG_DOUBLE_128
1406 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
70a01792 1407 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1408
9ebbca7d
GK
1409 /* Allocate an alias set for register saves & restores from stack. */
1410 rs6000_sr_alias_set = new_alias_set ();
1411
f676971a 1412 if (TARGET_TOC)
9ebbca7d 1413 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1414
301d03af
RS
1415 /* We can only guarantee the availability of DI pseudo-ops when
1416 assembling for 64-bit targets. */
ae6c1efd 1417 if (!TARGET_64BIT)
301d03af
RS
1418 {
1419 targetm.asm_out.aligned_op.di = NULL;
1420 targetm.asm_out.unaligned_op.di = NULL;
1421 }
1422
1494c534
DE
1423 /* Set branch target alignment, if not optimizing for size. */
1424 if (!optimize_size)
1425 {
1426 if (rs6000_sched_groups)
1427 {
1428 if (align_functions <= 0)
1429 align_functions = 16;
1430 if (align_jumps <= 0)
1431 align_jumps = 16;
1432 if (align_loops <= 0)
1433 align_loops = 16;
1434 }
1435 if (align_jumps_max_skip <= 0)
1436 align_jumps_max_skip = 15;
1437 if (align_loops_max_skip <= 0)
1438 align_loops_max_skip = 15;
1439 }
2792d578 1440
71f123ca
FS
1441 /* Arrange to save and restore machine status around nested functions. */
1442 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1443
1444 /* We should always be splitting complex arguments, but we can't break
1445 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1446 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1447 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1448
1449 /* Initialize rs6000_cost with the appropriate target costs. */
1450 if (optimize_size)
1451 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1452 else
1453 switch (rs6000_cpu)
1454 {
1455 case PROCESSOR_RIOS1:
1456 rs6000_cost = &rios1_cost;
1457 break;
1458
1459 case PROCESSOR_RIOS2:
1460 rs6000_cost = &rios2_cost;
1461 break;
1462
1463 case PROCESSOR_RS64A:
1464 rs6000_cost = &rs64a_cost;
1465 break;
1466
1467 case PROCESSOR_MPCCORE:
1468 rs6000_cost = &mpccore_cost;
1469 break;
1470
1471 case PROCESSOR_PPC403:
1472 rs6000_cost = &ppc403_cost;
1473 break;
1474
1475 case PROCESSOR_PPC405:
1476 rs6000_cost = &ppc405_cost;
1477 break;
1478
1479 case PROCESSOR_PPC440:
1480 rs6000_cost = &ppc440_cost;
1481 break;
1482
1483 case PROCESSOR_PPC601:
1484 rs6000_cost = &ppc601_cost;
1485 break;
1486
1487 case PROCESSOR_PPC603:
1488 rs6000_cost = &ppc603_cost;
1489 break;
1490
1491 case PROCESSOR_PPC604:
1492 rs6000_cost = &ppc604_cost;
1493 break;
1494
1495 case PROCESSOR_PPC604e:
1496 rs6000_cost = &ppc604e_cost;
1497 break;
1498
1499 case PROCESSOR_PPC620:
8b897cfa
RS
1500 rs6000_cost = &ppc620_cost;
1501 break;
1502
f0517163
RS
1503 case PROCESSOR_PPC630:
1504 rs6000_cost = &ppc630_cost;
1505 break;
1506
8b897cfa
RS
1507 case PROCESSOR_PPC750:
1508 case PROCESSOR_PPC7400:
1509 rs6000_cost = &ppc750_cost;
1510 break;
1511
1512 case PROCESSOR_PPC7450:
1513 rs6000_cost = &ppc7450_cost;
1514 break;
1515
1516 case PROCESSOR_PPC8540:
1517 rs6000_cost = &ppc8540_cost;
1518 break;
1519
1520 case PROCESSOR_POWER4:
1521 case PROCESSOR_POWER5:
1522 rs6000_cost = &power4_cost;
1523 break;
1524
1525 default:
37409796 1526 gcc_unreachable ();
8b897cfa 1527 }
5248c961 1528}
5accd822 1529
7ccf35ed
DN
1530/* Implement targetm.vectorize.builtin_mask_for_load. */
1531static tree
1532rs6000_builtin_mask_for_load (void)
1533{
1534 if (TARGET_ALTIVEC)
1535 return altivec_builtin_mask_for_load;
1536 else
1537 return 0;
1538}
1539
5da702b1
AH
1540/* Handle generic options of the form -mfoo=yes/no.
1541 NAME is the option name.
1542 VALUE is the option value.
1543 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1544 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1545static void
5da702b1 1546rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1547{
5da702b1 1548 if (value == 0)
993f19a8 1549 return;
5da702b1
AH
1550 else if (!strcmp (value, "yes"))
1551 *flag = 1;
1552 else if (!strcmp (value, "no"))
1553 *flag = 0;
08b57fb3 1554 else
5da702b1 1555 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1556}
1557
c4501e62
JJ
1558/* Validate and record the size specified with the -mtls-size option. */
1559
1560static void
863d938c 1561rs6000_parse_tls_size_option (void)
c4501e62
JJ
1562{
1563 if (rs6000_tls_size_string == 0)
1564 return;
1565 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1566 rs6000_tls_size = 16;
1567 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1568 rs6000_tls_size = 32;
1569 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1570 rs6000_tls_size = 64;
1571 else
9e637a26 1572 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1573}
1574
5accd822 1575void
a2369ed3 1576optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1577{
2e3f0db6
DJ
1578 if (DEFAULT_ABI == ABI_DARWIN)
1579 /* The Darwin libraries never set errno, so we might as well
1580 avoid calling them when that's the only reason we would. */
1581 flag_errno_math = 0;
59d6560b
DE
1582
1583 /* Double growth factor to counter reduced min jump length. */
1584 set_param_value ("max-grow-copy-bb-insns", 16);
5accd822 1585}
78f5898b
AH
1586
1587/* Implement TARGET_HANDLE_OPTION. */
1588
1589static bool
1590rs6000_handle_option (size_t code, const char *arg, int value)
1591{
1592 switch (code)
1593 {
1594 case OPT_mno_power:
1595 target_flags &= ~(MASK_POWER | MASK_POWER2
1596 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
1597 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1598 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
1599 break;
1600 case OPT_mno_powerpc:
1601 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1602 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
1603 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1604 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
1605 break;
1606 case OPT_mfull_toc:
1607 target_flags &= ~(MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1608 | MASK_NO_SUM_IN_TOC);
c2dba4ab
AH
1609 target_flags_explicit |= (MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC
1610 | MASK_NO_SUM_IN_TOC);
78f5898b
AH
1611#ifdef TARGET_USES_SYSV4_OPT
1612 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1613 just the same as -mminimal-toc. */
1614 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1615 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1616#endif
1617 break;
1618
1619#ifdef TARGET_USES_SYSV4_OPT
1620 case OPT_mtoc:
1621 /* Make -mtoc behave like -mminimal-toc. */
1622 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1623 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1624 break;
1625#endif
1626
1627#ifdef TARGET_USES_AIX64_OPT
1628 case OPT_maix64:
1629#else
1630 case OPT_m64:
1631#endif
2c9c9afd
AM
1632 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1633 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1634 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
1635 break;
1636
1637#ifdef TARGET_USES_AIX64_OPT
1638 case OPT_maix32:
1639#else
1640 case OPT_m32:
1641#endif
1642 target_flags &= ~MASK_POWERPC64;
c2dba4ab 1643 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
1644 break;
1645
1646 case OPT_minsert_sched_nops_:
1647 rs6000_sched_insert_nops_str = arg;
1648 break;
1649
1650 case OPT_mminimal_toc:
1651 if (value == 1)
1652 {
c2dba4ab
AH
1653 target_flags &= ~(MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
1654 target_flags_explicit |= (MASK_NO_FP_IN_TOC | MASK_NO_SUM_IN_TOC);
78f5898b
AH
1655 }
1656 break;
1657
1658 case OPT_mpower:
1659 if (value == 1)
c2dba4ab
AH
1660 {
1661 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1662 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1663 }
78f5898b
AH
1664 break;
1665
1666 case OPT_mpower2:
1667 if (value == 1)
c2dba4ab
AH
1668 {
1669 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1670 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1671 }
78f5898b
AH
1672 break;
1673
1674 case OPT_mpowerpc_gpopt:
1675 case OPT_mpowerpc_gfxopt:
1676 if (value == 1)
c2dba4ab
AH
1677 {
1678 target_flags |= MASK_POWERPC;
1679 target_flags_explicit |= MASK_POWERPC;
1680 }
78f5898b
AH
1681 break;
1682
df01da37
DE
1683 case OPT_maix_struct_return:
1684 case OPT_msvr4_struct_return:
1685 rs6000_explicit_options.aix_struct_ret = true;
1686 break;
1687
78f5898b
AH
1688 case OPT_mvrsave_:
1689 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1690 break;
78f5898b
AH
1691
1692 case OPT_misel_:
1693 rs6000_explicit_options.isel = true;
1694 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1695 break;
1696
1697 case OPT_mspe_:
1698 rs6000_explicit_options.spe = true;
1699 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1700 /* No SPE means 64-bit long doubles, even if an E500. */
1701 if (!rs6000_spe)
1702 rs6000_long_double_type_size = 64;
1703 break;
1704
1705 case OPT_mdebug_:
1706 rs6000_debug_name = arg;
1707 break;
1708
1709#ifdef TARGET_USES_SYSV4_OPT
1710 case OPT_mcall_:
1711 rs6000_abi_name = arg;
1712 break;
1713
1714 case OPT_msdata_:
1715 rs6000_sdata_name = arg;
1716 break;
1717
1718 case OPT_mtls_size_:
1719 rs6000_tls_size_string = arg;
1720 break;
1721
1722 case OPT_mrelocatable:
1723 if (value == 1)
c2dba4ab
AH
1724 {
1725 target_flags |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1726 target_flags_explicit |= MASK_MINIMAL_TOC | MASK_NO_FP_IN_TOC;
1727 }
78f5898b
AH
1728 break;
1729
1730 case OPT_mrelocatable_lib:
1731 if (value == 1)
c2dba4ab
AH
1732 {
1733 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1734 | MASK_NO_FP_IN_TOC;
1735 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC
1736 | MASK_NO_FP_IN_TOC;
1737 }
78f5898b 1738 else
c2dba4ab
AH
1739 {
1740 target_flags &= ~MASK_RELOCATABLE;
1741 target_flags_explicit |= MASK_RELOCATABLE;
1742 }
78f5898b
AH
1743 break;
1744#endif
1745
1746 case OPT_mabi_:
1747 rs6000_explicit_options.abi = true;
1748 if (!strcmp (arg, "altivec"))
1749 {
1750 rs6000_altivec_abi = 1;
1751 rs6000_spe_abi = 0;
1752 }
1753 else if (! strcmp (arg, "no-altivec"))
1754 rs6000_altivec_abi = 0;
1755 else if (! strcmp (arg, "spe"))
1756 {
1757 rs6000_spe_abi = 1;
1758 rs6000_altivec_abi = 0;
1759 if (!TARGET_SPE_ABI)
1760 error ("not configured for ABI: '%s'", arg);
1761 }
1762 else if (! strcmp (arg, "no-spe"))
1763 rs6000_spe_abi = 0;
1764
1765 /* These are here for testing during development only, do not
1766 document in the manual please. */
1767 else if (! strcmp (arg, "d64"))
1768 {
1769 rs6000_darwin64_abi = 1;
1770 warning (0, "Using darwin64 ABI");
1771 }
1772 else if (! strcmp (arg, "d32"))
1773 {
1774 rs6000_darwin64_abi = 0;
1775 warning (0, "Using old darwin ABI");
1776 }
1777
1778 else
1779 {
1780 error ("unknown ABI specified: '%s'", arg);
1781 return false;
1782 }
1783 break;
1784
1785 case OPT_mcpu_:
1786 rs6000_select[1].string = arg;
1787 break;
1788
1789 case OPT_mtune_:
1790 rs6000_select[2].string = arg;
1791 break;
1792
1793 case OPT_mtraceback_:
1794 rs6000_traceback_name = arg;
1795 break;
1796
1797 case OPT_mfloat_gprs_:
1798 rs6000_explicit_options.float_gprs = true;
1799 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1800 rs6000_float_gprs = 1;
1801 else if (! strcmp (arg, "double"))
1802 rs6000_float_gprs = 2;
1803 else if (! strcmp (arg, "no"))
1804 rs6000_float_gprs = 0;
1805 else
1806 {
1807 error ("invalid option for -mfloat-gprs: '%s'", arg);
1808 return false;
1809 }
1810 break;
1811
1812 case OPT_mlong_double_:
1813 rs6000_explicit_options.long_double = true;
1814 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1815 if (value != 64 && value != 128)
1816 {
1817 error ("Unknown switch -mlong-double-%s", arg);
1818 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1819 return false;
1820 }
1821 else
1822 rs6000_long_double_type_size = value;
1823 break;
1824
1825 case OPT_msched_costly_dep_:
1826 rs6000_sched_costly_dep_str = arg;
1827 break;
1828
1829 case OPT_malign_:
1830 rs6000_explicit_options.alignment = true;
1831 if (! strcmp (arg, "power"))
1832 {
1833 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1834 some C library functions, so warn about it. The flag may be
1835 useful for performance studies from time to time though, so
1836 don't disable it entirely. */
1837 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1838 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1839 " it is incompatible with the installed C and C++ libraries");
1840 rs6000_alignment_flags = MASK_ALIGN_POWER;
1841 }
1842 else if (! strcmp (arg, "natural"))
1843 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1844 else
1845 {
1846 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1847 return false;
1848 }
1849 break;
1850 }
1851 return true;
1852}
3cfa4909
MM
1853\f
1854/* Do anything needed at the start of the asm file. */
1855
1bc7c5b6 1856static void
863d938c 1857rs6000_file_start (void)
3cfa4909 1858{
c4d38ccb 1859 size_t i;
3cfa4909 1860 char buffer[80];
d330fd93 1861 const char *start = buffer;
3cfa4909 1862 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
1863 const char *default_cpu = TARGET_CPU_DEFAULT;
1864 FILE *file = asm_out_file;
1865
1866 default_file_start ();
1867
1868#ifdef TARGET_BI_ARCH
1869 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1870 default_cpu = 0;
1871#endif
3cfa4909
MM
1872
1873 if (flag_verbose_asm)
1874 {
1875 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1876 rs6000_select[0].string = default_cpu;
1877
b6a1cbae 1878 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
1879 {
1880 ptr = &rs6000_select[i];
1881 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1882 {
1883 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1884 start = "";
1885 }
1886 }
1887
9c6b4ed9 1888 if (PPC405_ERRATUM77)
b0bfee6e 1889 {
9c6b4ed9 1890 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
1891 start = "";
1892 }
b0bfee6e 1893
b91da81f 1894#ifdef USING_ELFOS_H
3cfa4909
MM
1895 switch (rs6000_sdata)
1896 {
1897 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1898 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1899 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1900 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1901 }
1902
1903 if (rs6000_sdata && g_switch_value)
1904 {
307b599c
MK
1905 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1906 g_switch_value);
3cfa4909
MM
1907 start = "";
1908 }
1909#endif
1910
1911 if (*start == '\0')
949ea356 1912 putc ('\n', file);
3cfa4909 1913 }
b723e82f
JJ
1914
1915 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1916 {
1917 toc_section ();
1918 text_section ();
1919 }
3cfa4909 1920}
c4e18b1c 1921
5248c961 1922\f
a0ab749a 1923/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
1924
1925int
863d938c 1926direct_return (void)
9878760c 1927{
4697a36c
MM
1928 if (reload_completed)
1929 {
1930 rs6000_stack_t *info = rs6000_stack_info ();
1931
1932 if (info->first_gp_reg_save == 32
1933 && info->first_fp_reg_save == 64
00b960c7 1934 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
1935 && ! info->lr_save_p
1936 && ! info->cr_save_p
00b960c7 1937 && info->vrsave_mask == 0
c81fc13e 1938 && ! info->push_p)
4697a36c
MM
1939 return 1;
1940 }
1941
1942 return 0;
9878760c
RK
1943}
1944
4e74d8ec
MM
1945/* Return the number of instructions it takes to form a constant in an
1946 integer register. */
1947
48d72335 1948int
a2369ed3 1949num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
1950{
1951 /* signed constant loadable with {cal|addi} */
5f59ecb7 1952 if (CONST_OK_FOR_LETTER_P (value, 'I'))
0865c631
GK
1953 return 1;
1954
4e74d8ec 1955 /* constant loadable with {cau|addis} */
5f59ecb7 1956 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
4e74d8ec
MM
1957 return 1;
1958
5f59ecb7 1959#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 1960 else if (TARGET_POWERPC64)
4e74d8ec 1961 {
a65c591c
DE
1962 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1963 HOST_WIDE_INT high = value >> 31;
4e74d8ec 1964
a65c591c 1965 if (high == 0 || high == -1)
4e74d8ec
MM
1966 return 2;
1967
a65c591c 1968 high >>= 1;
4e74d8ec 1969
a65c591c 1970 if (low == 0)
4e74d8ec 1971 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
1972 else
1973 return (num_insns_constant_wide (high)
e396202a 1974 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
1975 }
1976#endif
1977
1978 else
1979 return 2;
1980}
1981
1982int
a2369ed3 1983num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 1984{
37409796 1985 HOST_WIDE_INT low, high;
bb8df8a6 1986
37409796 1987 switch (GET_CODE (op))
0d30d435 1988 {
37409796 1989 case CONST_INT:
0d30d435 1990#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 1991 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 1992 && mask64_operand (op, mode))
c4ad648e 1993 return 2;
0d30d435
DE
1994 else
1995#endif
1996 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 1997
37409796
NS
1998 case CONST_DOUBLE:
1999 if (mode == SFmode)
2000 {
2001 long l;
2002 REAL_VALUE_TYPE rv;
bb8df8a6 2003
37409796
NS
2004 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2005 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2006 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2007 }
a260abc9 2008
37409796
NS
2009 if (mode == VOIDmode || mode == DImode)
2010 {
2011 high = CONST_DOUBLE_HIGH (op);
2012 low = CONST_DOUBLE_LOW (op);
2013 }
2014 else
2015 {
2016 long l[2];
2017 REAL_VALUE_TYPE rv;
bb8df8a6 2018
37409796
NS
2019 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2020 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2021 high = l[WORDS_BIG_ENDIAN == 0];
2022 low = l[WORDS_BIG_ENDIAN != 0];
2023 }
47ad8c61 2024
37409796
NS
2025 if (TARGET_32BIT)
2026 return (num_insns_constant_wide (low)
2027 + num_insns_constant_wide (high));
2028 else
2029 {
2030 if ((high == 0 && low >= 0)
2031 || (high == -1 && low < 0))
2032 return num_insns_constant_wide (low);
bb8df8a6 2033
1990cd79 2034 else if (mask64_operand (op, mode))
37409796 2035 return 2;
bb8df8a6 2036
37409796
NS
2037 else if (low == 0)
2038 return num_insns_constant_wide (high) + 1;
bb8df8a6 2039
37409796
NS
2040 else
2041 return (num_insns_constant_wide (high)
2042 + num_insns_constant_wide (low) + 1);
2043 }
bb8df8a6 2044
37409796
NS
2045 default:
2046 gcc_unreachable ();
4e74d8ec 2047 }
4e74d8ec
MM
2048}
2049
452a7d36 2050
66180ff3
PB
2051/* Return true if OP can be synthesized with a particular vspltisb, vspltish
2052 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2053 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2054 all items are set to the same value and contain COPIES replicas of the
2055 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2056 operand and the others are set to the value of the operand's msb. */
2057
2058static bool
2059vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2060{
66180ff3
PB
2061 enum machine_mode mode = GET_MODE (op);
2062 enum machine_mode inner = GET_MODE_INNER (mode);
2063
2064 unsigned i;
2065 unsigned nunits = GET_MODE_NUNITS (mode);
2066 unsigned bitsize = GET_MODE_BITSIZE (inner);
2067 unsigned mask = GET_MODE_MASK (inner);
2068
2069 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2070 HOST_WIDE_INT val = INTVAL (last);
2071 HOST_WIDE_INT splat_val = val;
2072 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2073
2074 /* Construct the value to be splatted, if possible. If not, return 0. */
2075 for (i = 2; i <= copies; i *= 2)
452a7d36 2076 {
66180ff3
PB
2077 HOST_WIDE_INT small_val;
2078 bitsize /= 2;
2079 small_val = splat_val >> bitsize;
2080 mask >>= bitsize;
2081 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2082 return false;
2083 splat_val = small_val;
2084 }
c4ad648e 2085
66180ff3
PB
2086 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2087 if (EASY_VECTOR_15 (splat_val))
2088 ;
2089
2090 /* Also check if we can splat, and then add the result to itself. Do so if
2091 the value is positive, of if the splat instruction is using OP's mode;
2092 for splat_val < 0, the splat and the add should use the same mode. */
2093 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2094 && (splat_val >= 0 || (step == 1 && copies == 1)))
2095 ;
2096
2097 else
2098 return false;
2099
2100 /* Check if VAL is present in every STEP-th element, and the
2101 other elements are filled with its most significant bit. */
2102 for (i = 0; i < nunits - 1; ++i)
2103 {
2104 HOST_WIDE_INT desired_val;
2105 if (((i + 1) & (step - 1)) == 0)
2106 desired_val = val;
2107 else
2108 desired_val = msb_val;
2109
2110 if (desired_val != INTVAL (CONST_VECTOR_ELT (op, i)))
2111 return false;
452a7d36 2112 }
66180ff3
PB
2113
2114 return true;
452a7d36
HP
2115}
2116
69ef87e2 2117
66180ff3
PB
2118/* Return true if OP is of the given MODE and can be synthesized
2119 with a vspltisb, vspltish or vspltisw. */
2120
2121bool
2122easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2123{
66180ff3 2124 unsigned step, copies;
d744e06e 2125
66180ff3
PB
2126 if (mode == VOIDmode)
2127 mode = GET_MODE (op);
2128 else if (mode != GET_MODE (op))
2129 return false;
d744e06e 2130
66180ff3
PB
2131 /* Start with a vspltisw. */
2132 step = GET_MODE_NUNITS (mode) / 4;
2133 copies = 1;
2134
2135 if (vspltis_constant (op, step, copies))
2136 return true;
2137
2138 /* Then try with a vspltish. */
2139 if (step == 1)
2140 copies <<= 1;
2141 else
2142 step >>= 1;
2143
2144 if (vspltis_constant (op, step, copies))
2145 return true;
2146
2147 /* And finally a vspltisb. */
2148 if (step == 1)
2149 copies <<= 1;
2150 else
2151 step >>= 1;
2152
2153 if (vspltis_constant (op, step, copies))
2154 return true;
2155
2156 return false;
d744e06e
AH
2157}
2158
66180ff3
PB
2159/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2160 result is OP. Abort if it is not possible. */
d744e06e 2161
f676971a 2162rtx
66180ff3 2163gen_easy_altivec_constant (rtx op)
452a7d36 2164{
66180ff3
PB
2165 enum machine_mode mode = GET_MODE (op);
2166 int nunits = GET_MODE_NUNITS (mode);
2167 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2168 unsigned step = nunits / 4;
2169 unsigned copies = 1;
2170
2171 /* Start with a vspltisw. */
2172 if (vspltis_constant (op, step, copies))
2173 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2174
2175 /* Then try with a vspltish. */
2176 if (step == 1)
2177 copies <<= 1;
2178 else
2179 step >>= 1;
2180
2181 if (vspltis_constant (op, step, copies))
2182 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2183
2184 /* And finally a vspltisb. */
2185 if (step == 1)
2186 copies <<= 1;
2187 else
2188 step >>= 1;
2189
2190 if (vspltis_constant (op, step, copies))
2191 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2192
2193 gcc_unreachable ();
d744e06e
AH
2194}
2195
2196const char *
a2369ed3 2197output_vec_const_move (rtx *operands)
d744e06e
AH
2198{
2199 int cst, cst2;
2200 enum machine_mode mode;
2201 rtx dest, vec;
2202
2203 dest = operands[0];
2204 vec = operands[1];
d744e06e 2205 mode = GET_MODE (dest);
69ef87e2 2206
d744e06e
AH
2207 if (TARGET_ALTIVEC)
2208 {
66180ff3 2209 rtx splat_vec;
d744e06e
AH
2210 if (zero_constant (vec, mode))
2211 return "vxor %0,%0,%0";
37409796 2212
66180ff3
PB
2213 splat_vec = gen_easy_altivec_constant (vec);
2214 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2215 operands[1] = XEXP (splat_vec, 0);
2216 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2217 return "#";
bb8df8a6 2218
66180ff3 2219 switch (GET_MODE (splat_vec))
98ef3137 2220 {
37409796 2221 case V4SImode:
66180ff3 2222 return "vspltisw %0,%1";
c4ad648e 2223
37409796 2224 case V8HImode:
66180ff3 2225 return "vspltish %0,%1";
c4ad648e 2226
37409796 2227 case V16QImode:
66180ff3 2228 return "vspltisb %0,%1";
bb8df8a6 2229
37409796
NS
2230 default:
2231 gcc_unreachable ();
98ef3137 2232 }
69ef87e2
AH
2233 }
2234
37409796 2235 gcc_assert (TARGET_SPE);
bb8df8a6 2236
37409796
NS
2237 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2238 pattern of V1DI, V4HI, and V2SF.
2239
2240 FIXME: We should probably return # and add post reload
2241 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2242 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2243 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2244 operands[1] = CONST_VECTOR_ELT (vec, 0);
2245 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2246 if (cst == cst2)
2247 return "li %0,%1\n\tevmergelo %0,%0,%0";
2248 else
2249 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2250}
2251
7a4eca66
DE
2252/* Initialize vector TARGET to VALS. */
2253
2254void
2255rs6000_expand_vector_init (rtx target, rtx vals)
2256{
2257 enum machine_mode mode = GET_MODE (target);
2258 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2259 int n_elts = GET_MODE_NUNITS (mode);
2260 int n_var = 0, one_var = -1;
2261 bool all_same = true, all_const_zero = true;
2262 rtx x, mem;
2263 int i;
2264
2265 for (i = 0; i < n_elts; ++i)
2266 {
2267 x = XVECEXP (vals, 0, i);
2268 if (!CONSTANT_P (x))
2269 ++n_var, one_var = i;
2270 else if (x != CONST0_RTX (inner_mode))
2271 all_const_zero = false;
2272
2273 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2274 all_same = false;
2275 }
2276
2277 if (n_var == 0)
2278 {
2279 if (mode != V4SFmode && all_const_zero)
2280 {
2281 /* Zero register. */
2282 emit_insn (gen_rtx_SET (VOIDmode, target,
2283 gen_rtx_XOR (mode, target, target)));
2284 return;
2285 }
66180ff3 2286 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2287 {
2288 /* Splat immediate. */
66180ff3 2289 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2290 return;
2291 }
2292 else if (all_same)
2293 ; /* Splat vector element. */
2294 else
2295 {
2296 /* Load from constant pool. */
2297 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2298 return;
2299 }
2300 }
2301
2302 /* Store value to stack temp. Load vector element. Splat. */
2303 if (all_same)
2304 {
2305 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2306 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2307 XVECEXP (vals, 0, 0));
2308 x = gen_rtx_UNSPEC (VOIDmode,
2309 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2310 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2311 gen_rtvec (2,
2312 gen_rtx_SET (VOIDmode,
2313 target, mem),
2314 x)));
2315 x = gen_rtx_VEC_SELECT (inner_mode, target,
2316 gen_rtx_PARALLEL (VOIDmode,
2317 gen_rtvec (1, const0_rtx)));
2318 emit_insn (gen_rtx_SET (VOIDmode, target,
2319 gen_rtx_VEC_DUPLICATE (mode, x)));
2320 return;
2321 }
2322
2323 /* One field is non-constant. Load constant then overwrite
2324 varying field. */
2325 if (n_var == 1)
2326 {
2327 rtx copy = copy_rtx (vals);
2328
57b51d4d 2329 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2330 varying element. */
2331 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2332 rs6000_expand_vector_init (target, copy);
2333
2334 /* Insert variable. */
2335 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2336 return;
2337 }
2338
2339 /* Construct the vector in memory one field at a time
2340 and load the whole vector. */
2341 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2342 for (i = 0; i < n_elts; i++)
2343 emit_move_insn (adjust_address_nv (mem, inner_mode,
2344 i * GET_MODE_SIZE (inner_mode)),
2345 XVECEXP (vals, 0, i));
2346 emit_move_insn (target, mem);
2347}
2348
2349/* Set field ELT of TARGET to VAL. */
2350
2351void
2352rs6000_expand_vector_set (rtx target, rtx val, int elt)
2353{
2354 enum machine_mode mode = GET_MODE (target);
2355 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2356 rtx reg = gen_reg_rtx (mode);
2357 rtx mask, mem, x;
2358 int width = GET_MODE_SIZE (inner_mode);
2359 int i;
2360
2361 /* Load single variable value. */
2362 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2363 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2364 x = gen_rtx_UNSPEC (VOIDmode,
2365 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2366 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2367 gen_rtvec (2,
2368 gen_rtx_SET (VOIDmode,
2369 reg, mem),
2370 x)));
2371
2372 /* Linear sequence. */
2373 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2374 for (i = 0; i < 16; ++i)
2375 XVECEXP (mask, 0, i) = GEN_INT (i);
2376
2377 /* Set permute mask to insert element into target. */
2378 for (i = 0; i < width; ++i)
2379 XVECEXP (mask, 0, elt*width + i)
2380 = GEN_INT (i + 0x10);
2381 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2382 x = gen_rtx_UNSPEC (mode,
2383 gen_rtvec (3, target, reg,
2384 force_reg (V16QImode, x)),
2385 UNSPEC_VPERM);
2386 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2387}
2388
2389/* Extract field ELT from VEC into TARGET. */
2390
2391void
2392rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2393{
2394 enum machine_mode mode = GET_MODE (vec);
2395 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2396 rtx mem, x;
2397
2398 /* Allocate mode-sized buffer. */
2399 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2400
2401 /* Add offset to field within buffer matching vector element. */
2402 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2403
2404 /* Store single field into mode-sized buffer. */
2405 x = gen_rtx_UNSPEC (VOIDmode,
2406 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2407 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2408 gen_rtvec (2,
2409 gen_rtx_SET (VOIDmode,
2410 mem, vec),
2411 x)));
2412 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2413}
2414
0ba1b2ff
AM
2415/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2416 implement ANDing by the mask IN. */
2417void
a2369ed3 2418build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2419{
2420#if HOST_BITS_PER_WIDE_INT >= 64
2421 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2422 int shift;
2423
37409796 2424 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2425
2426 c = INTVAL (in);
2427 if (c & 1)
2428 {
2429 /* Assume c initially something like 0x00fff000000fffff. The idea
2430 is to rotate the word so that the middle ^^^^^^ group of zeros
2431 is at the MS end and can be cleared with an rldicl mask. We then
2432 rotate back and clear off the MS ^^ group of zeros with a
2433 second rldicl. */
2434 c = ~c; /* c == 0xff000ffffff00000 */
2435 lsb = c & -c; /* lsb == 0x0000000000100000 */
2436 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2437 c = ~c; /* c == 0x00fff000000fffff */
2438 c &= -lsb; /* c == 0x00fff00000000000 */
2439 lsb = c & -c; /* lsb == 0x0000100000000000 */
2440 c = ~c; /* c == 0xff000fffffffffff */
2441 c &= -lsb; /* c == 0xff00000000000000 */
2442 shift = 0;
2443 while ((lsb >>= 1) != 0)
2444 shift++; /* shift == 44 on exit from loop */
2445 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2446 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2447 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2448 }
2449 else
0ba1b2ff
AM
2450 {
2451 /* Assume c initially something like 0xff000f0000000000. The idea
2452 is to rotate the word so that the ^^^ middle group of zeros
2453 is at the LS end and can be cleared with an rldicr mask. We then
2454 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2455 a second rldicr. */
2456 lsb = c & -c; /* lsb == 0x0000010000000000 */
2457 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2458 c = ~c; /* c == 0x00fff0ffffffffff */
2459 c &= -lsb; /* c == 0x00fff00000000000 */
2460 lsb = c & -c; /* lsb == 0x0000100000000000 */
2461 c = ~c; /* c == 0xff000fffffffffff */
2462 c &= -lsb; /* c == 0xff00000000000000 */
2463 shift = 0;
2464 while ((lsb >>= 1) != 0)
2465 shift++; /* shift == 44 on exit from loop */
2466 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2467 m1 >>= shift; /* m1 == 0x0000000000000fff */
2468 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2469 }
2470
2471 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2472 masks will be all 1's. We are guaranteed more than one transition. */
2473 out[0] = GEN_INT (64 - shift);
2474 out[1] = GEN_INT (m1);
2475 out[2] = GEN_INT (shift);
2476 out[3] = GEN_INT (m2);
2477#else
045572c7
GK
2478 (void)in;
2479 (void)out;
37409796 2480 gcc_unreachable ();
0ba1b2ff 2481#endif
a260abc9
DE
2482}
2483
54b695e7 2484/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
2485
2486bool
54b695e7
AH
2487invalid_e500_subreg (rtx op, enum machine_mode mode)
2488{
2489 /* Reject (subreg:SI (reg:DF)). */
2490 if (GET_CODE (op) == SUBREG
2491 && mode == SImode
2492 && REG_P (SUBREG_REG (op))
2493 && GET_MODE (SUBREG_REG (op)) == DFmode)
2494 return true;
2495
2496 /* Reject (subreg:DF (reg:DI)). */
2497 if (GET_CODE (op) == SUBREG
2498 && mode == DFmode
2499 && REG_P (SUBREG_REG (op))
2500 && GET_MODE (SUBREG_REG (op)) == DImode)
2501 return true;
2502
2503 return false;
2504}
2505
95727fb8
AP
2506/* Darwin, AIX increases natural record alignment to doubleword if the first
2507 field is an FP double while the FP fields remain word aligned. */
2508
19d66194 2509unsigned int
95727fb8
AP
2510rs6000_special_round_type_align (tree type, int computed, int specified)
2511{
2512 tree field = TYPE_FIELDS (type);
95727fb8 2513
bb8df8a6 2514 /* Skip all non field decls */
85962ac8 2515 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
2516 field = TREE_CHAIN (field);
2517
3ce5437a 2518 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
95727fb8
AP
2519 return MAX (computed, specified);
2520
2521 return MAX (MAX (computed, specified), 64);
2522}
2523
a4f6c312 2524/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
2525
2526int
f676971a 2527small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 2528 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 2529{
38c1f2d7 2530#if TARGET_ELF
5f59ecb7 2531 rtx sym_ref;
7509c759 2532
d9407988 2533 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 2534 return 0;
a54d04b7 2535
f607bc57 2536 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
2537 return 0;
2538
88228c4b
MM
2539 if (GET_CODE (op) == SYMBOL_REF)
2540 sym_ref = op;
2541
2542 else if (GET_CODE (op) != CONST
2543 || GET_CODE (XEXP (op, 0)) != PLUS
2544 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2545 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
2546 return 0;
2547
88228c4b 2548 else
dbf55e53
MM
2549 {
2550 rtx sum = XEXP (op, 0);
2551 HOST_WIDE_INT summand;
2552
2553 /* We have to be careful here, because it is the referenced address
c4ad648e 2554 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 2555 summand = INTVAL (XEXP (sum, 1));
307b599c 2556 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 2557 return 0;
dbf55e53
MM
2558
2559 sym_ref = XEXP (sum, 0);
2560 }
88228c4b 2561
20bfcd69 2562 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
2563#else
2564 return 0;
2565#endif
7509c759 2566}
46c07df8 2567
3a1f863f 2568/* Return true if either operand is a general purpose register. */
46c07df8 2569
3a1f863f
DE
2570bool
2571gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 2572{
3a1f863f
DE
2573 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2574 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
2575}
2576
9ebbca7d 2577\f
4d588c14
RH
2578/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2579
f676971a
EC
2580static int
2581constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 2582{
9390387d 2583 switch (GET_CODE (op))
9ebbca7d
GK
2584 {
2585 case SYMBOL_REF:
c4501e62
JJ
2586 if (RS6000_SYMBOL_REF_TLS_P (op))
2587 return 0;
2588 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
2589 {
2590 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2591 {
2592 *have_sym = 1;
2593 return 1;
2594 }
2595 else
2596 return 0;
2597 }
2598 else if (! strcmp (XSTR (op, 0), toc_label_name))
2599 {
2600 *have_toc = 1;
2601 return 1;
2602 }
2603 else
2604 return 0;
9ebbca7d
GK
2605 case PLUS:
2606 case MINUS:
c1f11548
DE
2607 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2608 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 2609 case CONST:
a4f6c312 2610 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 2611 case CONST_INT:
a4f6c312 2612 return 1;
9ebbca7d 2613 default:
a4f6c312 2614 return 0;
9ebbca7d
GK
2615 }
2616}
2617
4d588c14 2618static bool
a2369ed3 2619constant_pool_expr_p (rtx op)
9ebbca7d
GK
2620{
2621 int have_sym = 0;
2622 int have_toc = 0;
2623 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2624}
2625
48d72335 2626bool
a2369ed3 2627toc_relative_expr_p (rtx op)
9ebbca7d 2628{
4d588c14
RH
2629 int have_sym = 0;
2630 int have_toc = 0;
2631 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2632}
2633
4d588c14 2634bool
a2369ed3 2635legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
2636{
2637 return (TARGET_TOC
2638 && GET_CODE (x) == PLUS
2639 && GET_CODE (XEXP (x, 0)) == REG
2640 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2641 && constant_pool_expr_p (XEXP (x, 1)));
2642}
2643
0c380712
AM
2644bool
2645rs6000_legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
2646{
2647 return (DEFAULT_ABI == ABI_V4
2648 && !flag_pic && !TARGET_TOC
2649 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2650 && small_data_operand (x, mode));
2651}
2652
60cdabab
DE
2653/* SPE offset addressing is limited to 5-bits worth of double words. */
2654#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2655
76d2b81d
DJ
2656bool
2657rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
2658{
2659 unsigned HOST_WIDE_INT offset, extra;
2660
2661 if (GET_CODE (x) != PLUS)
2662 return false;
2663 if (GET_CODE (XEXP (x, 0)) != REG)
2664 return false;
2665 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2666 return false;
60cdabab
DE
2667 if (legitimate_constant_pool_address_p (x))
2668 return true;
4d588c14
RH
2669 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2670 return false;
2671
2672 offset = INTVAL (XEXP (x, 1));
2673 extra = 0;
2674 switch (mode)
2675 {
2676 case V16QImode:
2677 case V8HImode:
2678 case V4SFmode:
2679 case V4SImode:
7a4eca66
DE
2680 /* AltiVec vector modes. Only reg+reg addressing is valid and
2681 constant offset zero should not occur due to canonicalization.
2682 Allow any offset when not strict before reload. */
2683 return !strict;
4d588c14
RH
2684
2685 case V4HImode:
2686 case V2SImode:
2687 case V1DImode:
2688 case V2SFmode:
2689 /* SPE vector modes. */
2690 return SPE_CONST_OFFSET_OK (offset);
2691
2692 case DFmode:
4d4cbc0e
AH
2693 if (TARGET_E500_DOUBLE)
2694 return SPE_CONST_OFFSET_OK (offset);
2695
4d588c14 2696 case DImode:
54b695e7
AH
2697 /* On e500v2, we may have:
2698
2699 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2700
2701 Which gets addressed with evldd instructions. */
2702 if (TARGET_E500_DOUBLE)
2703 return SPE_CONST_OFFSET_OK (offset);
2704
3364872d 2705 if (mode == DFmode || !TARGET_POWERPC64)
4d588c14
RH
2706 extra = 4;
2707 else if (offset & 3)
2708 return false;
2709 break;
2710
2711 case TFmode:
2712 case TImode:
3364872d 2713 if (mode == TFmode || !TARGET_POWERPC64)
4d588c14
RH
2714 extra = 12;
2715 else if (offset & 3)
2716 return false;
2717 else
2718 extra = 8;
2719 break;
2720
2721 default:
2722 break;
2723 }
2724
b1917422
AM
2725 offset += 0x8000;
2726 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
2727}
2728
2729static bool
a2369ed3 2730legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
2731{
2732 rtx op0, op1;
2733
2734 if (GET_CODE (x) != PLUS)
2735 return false;
850e8d3d 2736
4d588c14
RH
2737 op0 = XEXP (x, 0);
2738 op1 = XEXP (x, 1);
2739
2740 if (!REG_P (op0) || !REG_P (op1))
2741 return false;
2742
2743 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2744 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2745 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2746 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
9ebbca7d
GK
2747}
2748
48d72335 2749inline bool
a2369ed3 2750legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
2751{
2752 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2753}
2754
48d72335 2755bool
4c81e946
FJ
2756macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2757{
c4ad648e 2758 if (!TARGET_MACHO || !flag_pic
9390387d 2759 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
2760 return false;
2761 x = XEXP (x, 0);
4c81e946
FJ
2762
2763 if (GET_CODE (x) != LO_SUM)
2764 return false;
2765 if (GET_CODE (XEXP (x, 0)) != REG)
2766 return false;
2767 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2768 return false;
2769 x = XEXP (x, 1);
2770
2771 return CONSTANT_P (x);
2772}
2773
4d588c14 2774static bool
a2369ed3 2775legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
2776{
2777 if (GET_CODE (x) != LO_SUM)
2778 return false;
2779 if (GET_CODE (XEXP (x, 0)) != REG)
2780 return false;
2781 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2782 return false;
54b695e7
AH
2783 /* Restrict addressing for DI because of our SUBREG hackery. */
2784 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
f82f556d 2785 return false;
4d588c14
RH
2786 x = XEXP (x, 1);
2787
8622e235 2788 if (TARGET_ELF || TARGET_MACHO)
4d588c14 2789 {
a29077da 2790 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
2791 return false;
2792 if (TARGET_TOC)
2793 return false;
2794 if (GET_MODE_NUNITS (mode) != 1)
2795 return false;
5e5f01b9 2796 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
2797 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2798 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
2799 return false;
2800
2801 return CONSTANT_P (x);
2802 }
2803
2804 return false;
2805}
2806
2807
9ebbca7d
GK
2808/* Try machine-dependent ways of modifying an illegitimate address
2809 to be legitimate. If we find one, return the new, valid address.
2810 This is used from only one place: `memory_address' in explow.c.
2811
a4f6c312
SS
2812 OLDX is the address as it was before break_out_memory_refs was
2813 called. In some cases it is useful to look at this to decide what
2814 needs to be done.
9ebbca7d 2815
a4f6c312 2816 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 2817
a4f6c312
SS
2818 It is always safe for this function to do nothing. It exists to
2819 recognize opportunities to optimize the output.
9ebbca7d
GK
2820
2821 On RS/6000, first check for the sum of a register with a constant
2822 integer that is out of range. If so, generate code to add the
2823 constant with the low-order 16 bits masked to the register and force
2824 this result into another register (this can be done with `cau').
2825 Then generate an address of REG+(CONST&0xffff), allowing for the
2826 possibility of bit 16 being a one.
2827
2828 Then check for the sum of a register and something not constant, try to
2829 load the other things into a register and return the sum. */
4d588c14 2830
9ebbca7d 2831rtx
a2369ed3
DJ
2832rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2833 enum machine_mode mode)
0ac081f6 2834{
c4501e62
JJ
2835 if (GET_CODE (x) == SYMBOL_REF)
2836 {
2837 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2838 if (model != 0)
2839 return rs6000_legitimize_tls_address (x, model);
2840 }
2841
f676971a 2842 if (GET_CODE (x) == PLUS
9ebbca7d
GK
2843 && GET_CODE (XEXP (x, 0)) == REG
2844 && GET_CODE (XEXP (x, 1)) == CONST_INT
2845 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
f676971a 2846 {
9ebbca7d
GK
2847 HOST_WIDE_INT high_int, low_int;
2848 rtx sum;
a65c591c
DE
2849 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2850 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
2851 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2852 GEN_INT (high_int)), 0);
2853 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2854 }
f676971a 2855 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
2856 && GET_CODE (XEXP (x, 0)) == REG
2857 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 2858 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
2859 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2860 || TARGET_POWERPC64
54b695e7
AH
2861 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2862 && mode != TFmode))
9ebbca7d
GK
2863 && (TARGET_POWERPC64 || mode != DImode)
2864 && mode != TImode)
2865 {
2866 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2867 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2868 }
0ac081f6
AH
2869 else if (ALTIVEC_VECTOR_MODE (mode))
2870 {
2871 rtx reg;
2872
2873 /* Make sure both operands are registers. */
2874 if (GET_CODE (x) == PLUS)
9f85ed45 2875 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
2876 force_reg (Pmode, XEXP (x, 1)));
2877
2878 reg = force_reg (Pmode, x);
2879 return reg;
2880 }
4d4cbc0e 2881 else if (SPE_VECTOR_MODE (mode)
54b695e7
AH
2882 || (TARGET_E500_DOUBLE && (mode == DFmode
2883 || mode == DImode)))
a3170dc6 2884 {
54b695e7
AH
2885 if (mode == DImode)
2886 return NULL_RTX;
a3170dc6
AH
2887 /* We accept [reg + reg] and [reg + OFFSET]. */
2888
2889 if (GET_CODE (x) == PLUS)
c4ad648e
AM
2890 {
2891 rtx op1 = XEXP (x, 0);
2892 rtx op2 = XEXP (x, 1);
a3170dc6 2893
c4ad648e 2894 op1 = force_reg (Pmode, op1);
a3170dc6 2895
c4ad648e
AM
2896 if (GET_CODE (op2) != REG
2897 && (GET_CODE (op2) != CONST_INT
2898 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2899 op2 = force_reg (Pmode, op2);
a3170dc6 2900
c4ad648e
AM
2901 return gen_rtx_PLUS (Pmode, op1, op2);
2902 }
a3170dc6
AH
2903
2904 return force_reg (Pmode, x);
2905 }
f1384257
AM
2906 else if (TARGET_ELF
2907 && TARGET_32BIT
2908 && TARGET_NO_TOC
2909 && ! flag_pic
9ebbca7d 2910 && GET_CODE (x) != CONST_INT
f676971a 2911 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 2912 && CONSTANT_P (x)
6ac7bf2c
GK
2913 && GET_MODE_NUNITS (mode) == 1
2914 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 2915 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
2916 {
2917 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
2918 emit_insn (gen_elf_high (reg, x));
2919 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 2920 }
ee890fe2
SS
2921 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2922 && ! flag_pic
ab82a49f
AP
2923#if TARGET_MACHO
2924 && ! MACHO_DYNAMIC_NO_PIC_P
2925#endif
ee890fe2 2926 && GET_CODE (x) != CONST_INT
f676971a 2927 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 2928 && CONSTANT_P (x)
f82f556d 2929 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 2930 && mode != DImode
ee890fe2
SS
2931 && mode != TImode)
2932 {
2933 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
2934 emit_insn (gen_macho_high (reg, x));
2935 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 2936 }
f676971a 2937 else if (TARGET_TOC
4d588c14 2938 && constant_pool_expr_p (x)
a9098fd0 2939 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
2940 {
2941 return create_TOC_reference (x);
2942 }
2943 else
2944 return NULL_RTX;
2945}
258bfae2 2946
fdbe66f2 2947/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
2948 We need to emit DTP-relative relocations. */
2949
fdbe66f2 2950static void
c973d557
JJ
2951rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2952{
2953 switch (size)
2954 {
2955 case 4:
2956 fputs ("\t.long\t", file);
2957 break;
2958 case 8:
2959 fputs (DOUBLE_INT_ASM_OP, file);
2960 break;
2961 default:
37409796 2962 gcc_unreachable ();
c973d557
JJ
2963 }
2964 output_addr_const (file, x);
2965 fputs ("@dtprel+0x8000", file);
2966}
2967
c4501e62
JJ
2968/* Construct the SYMBOL_REF for the tls_get_addr function. */
2969
2970static GTY(()) rtx rs6000_tls_symbol;
2971static rtx
863d938c 2972rs6000_tls_get_addr (void)
c4501e62
JJ
2973{
2974 if (!rs6000_tls_symbol)
2975 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2976
2977 return rs6000_tls_symbol;
2978}
2979
2980/* Construct the SYMBOL_REF for TLS GOT references. */
2981
2982static GTY(()) rtx rs6000_got_symbol;
2983static rtx
863d938c 2984rs6000_got_sym (void)
c4501e62
JJ
2985{
2986 if (!rs6000_got_symbol)
2987 {
2988 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2989 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2990 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 2991 }
c4501e62
JJ
2992
2993 return rs6000_got_symbol;
2994}
2995
2996/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2997 this (thread-local) address. */
2998
2999static rtx
a2369ed3 3000rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3001{
3002 rtx dest, insn;
3003
3004 dest = gen_reg_rtx (Pmode);
3005 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3006 {
3007 rtx tlsreg;
3008
3009 if (TARGET_64BIT)
3010 {
3011 tlsreg = gen_rtx_REG (Pmode, 13);
3012 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3013 }
3014 else
3015 {
3016 tlsreg = gen_rtx_REG (Pmode, 2);
3017 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3018 }
3019 emit_insn (insn);
3020 }
3021 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3022 {
3023 rtx tlsreg, tmp;
3024
3025 tmp = gen_reg_rtx (Pmode);
3026 if (TARGET_64BIT)
3027 {
3028 tlsreg = gen_rtx_REG (Pmode, 13);
3029 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3030 }
3031 else
3032 {
3033 tlsreg = gen_rtx_REG (Pmode, 2);
3034 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3035 }
3036 emit_insn (insn);
3037 if (TARGET_64BIT)
3038 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3039 else
3040 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3041 emit_insn (insn);
3042 }
3043 else
3044 {
3045 rtx r3, got, tga, tmp1, tmp2, eqv;
3046
4fed8f8f
AM
3047 /* We currently use relocations like @got@tlsgd for tls, which
3048 means the linker will handle allocation of tls entries, placing
3049 them in the .got section. So use a pointer to the .got section,
3050 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3051 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3052 if (TARGET_64BIT)
972f427b 3053 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3054 else
3055 {
3056 if (flag_pic == 1)
3057 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3058 else
3059 {
3060 rtx gsym = rs6000_got_sym ();
3061 got = gen_reg_rtx (Pmode);
3062 if (flag_pic == 0)
3063 rs6000_emit_move (got, gsym, Pmode);
3064 else
3065 {
ccbca5e4 3066 rtx tempLR, tmp3, mem;
c4501e62
JJ
3067 rtx first, last;
3068
c4501e62
JJ
3069 tempLR = gen_reg_rtx (Pmode);
3070 tmp1 = gen_reg_rtx (Pmode);
3071 tmp2 = gen_reg_rtx (Pmode);
3072 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3073 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3074
ccbca5e4 3075 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
c4501e62
JJ
3076 emit_move_insn (tmp1, tempLR);
3077 emit_move_insn (tmp2, mem);
3078 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3079 last = emit_move_insn (got, tmp3);
3080 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3081 REG_NOTES (last));
3082 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3083 REG_NOTES (first));
3084 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3085 REG_NOTES (last));
3086 }
3087 }
3088 }
3089
3090 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3091 {
3092 r3 = gen_rtx_REG (Pmode, 3);
3093 if (TARGET_64BIT)
3094 insn = gen_tls_gd_64 (r3, got, addr);
3095 else
3096 insn = gen_tls_gd_32 (r3, got, addr);
3097 start_sequence ();
3098 emit_insn (insn);
3099 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3100 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3101 insn = emit_call_insn (insn);
3102 CONST_OR_PURE_CALL_P (insn) = 1;
3103 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3104 insn = get_insns ();
3105 end_sequence ();
3106 emit_libcall_block (insn, dest, r3, addr);
3107 }
3108 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3109 {
3110 r3 = gen_rtx_REG (Pmode, 3);
3111 if (TARGET_64BIT)
3112 insn = gen_tls_ld_64 (r3, got);
3113 else
3114 insn = gen_tls_ld_32 (r3, got);
3115 start_sequence ();
3116 emit_insn (insn);
3117 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3118 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3119 insn = emit_call_insn (insn);
3120 CONST_OR_PURE_CALL_P (insn) = 1;
3121 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3122 insn = get_insns ();
3123 end_sequence ();
3124 tmp1 = gen_reg_rtx (Pmode);
3125 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3126 UNSPEC_TLSLD);
3127 emit_libcall_block (insn, tmp1, r3, eqv);
3128 if (rs6000_tls_size == 16)
3129 {
3130 if (TARGET_64BIT)
3131 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3132 else
3133 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3134 }
3135 else if (rs6000_tls_size == 32)
3136 {
3137 tmp2 = gen_reg_rtx (Pmode);
3138 if (TARGET_64BIT)
3139 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3140 else
3141 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3142 emit_insn (insn);
3143 if (TARGET_64BIT)
3144 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3145 else
3146 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3147 }
3148 else
3149 {
3150 tmp2 = gen_reg_rtx (Pmode);
3151 if (TARGET_64BIT)
3152 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3153 else
3154 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3155 emit_insn (insn);
3156 insn = gen_rtx_SET (Pmode, dest,
3157 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3158 }
3159 emit_insn (insn);
3160 }
3161 else
3162 {
3163 /* IE, or 64 bit offset LE. */
3164 tmp2 = gen_reg_rtx (Pmode);
3165 if (TARGET_64BIT)
3166 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3167 else
3168 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3169 emit_insn (insn);
3170 if (TARGET_64BIT)
3171 insn = gen_tls_tls_64 (dest, tmp2, addr);
3172 else
3173 insn = gen_tls_tls_32 (dest, tmp2, addr);
3174 emit_insn (insn);
3175 }
3176 }
3177
3178 return dest;
3179}
3180
c4501e62
JJ
3181/* Return 1 if X contains a thread-local symbol. */
3182
3183bool
a2369ed3 3184rs6000_tls_referenced_p (rtx x)
c4501e62 3185{
cd413cab
AP
3186 if (! TARGET_HAVE_TLS)
3187 return false;
3188
c4501e62
JJ
3189 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3190}
3191
3192/* Return 1 if *X is a thread-local symbol. This is the same as
3193 rs6000_tls_symbol_ref except for the type of the unused argument. */
3194
9390387d 3195static int
a2369ed3 3196rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3197{
3198 return RS6000_SYMBOL_REF_TLS_P (*x);
3199}
3200
24ea750e
DJ
3201/* The convention appears to be to define this wherever it is used.
3202 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3203 is now used here. */
3204#ifndef REG_MODE_OK_FOR_BASE_P
3205#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3206#endif
3207
3208/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3209 replace the input X, or the original X if no replacement is called for.
3210 The output parameter *WIN is 1 if the calling macro should goto WIN,
3211 0 if it should not.
3212
3213 For RS/6000, we wish to handle large displacements off a base
3214 register by splitting the addend across an addiu/addis and the mem insn.
3215 This cuts number of extra insns needed from 3 to 1.
3216
3217 On Darwin, we use this to generate code for floating point constants.
3218 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3219 The Darwin code is inside #if TARGET_MACHO because only then is
3220 machopic_function_base_name() defined. */
3221rtx
f676971a 3222rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3223 int opnum, int type,
3224 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3225{
f676971a 3226 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3227 if (GET_CODE (x) == PLUS
3228 && GET_CODE (XEXP (x, 0)) == PLUS
3229 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3230 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3231 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3232 {
3233 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3234 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3235 opnum, (enum reload_type)type);
24ea750e
DJ
3236 *win = 1;
3237 return x;
3238 }
3deb2758 3239
24ea750e
DJ
3240#if TARGET_MACHO
3241 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3242 && GET_CODE (x) == LO_SUM
3243 && GET_CODE (XEXP (x, 0)) == PLUS
3244 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3245 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3246 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3247 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3248 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3249 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3250 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3251 {
3252 /* Result of previous invocation of this function on Darwin
6f317ef3 3253 floating point constant. */
24ea750e 3254 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3255 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3256 opnum, (enum reload_type)type);
24ea750e
DJ
3257 *win = 1;
3258 return x;
3259 }
3260#endif
4937d02d
DE
3261
3262 /* Force ld/std non-word aligned offset into base register by wrapping
3263 in offset 0. */
3264 if (GET_CODE (x) == PLUS
3265 && GET_CODE (XEXP (x, 0)) == REG
3266 && REGNO (XEXP (x, 0)) < 32
3267 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3268 && GET_CODE (XEXP (x, 1)) == CONST_INT
3269 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3270 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3271 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3272 && TARGET_POWERPC64)
3273 {
3274 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3275 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3276 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3277 opnum, (enum reload_type) type);
3278 *win = 1;
3279 return x;
3280 }
3281
24ea750e
DJ
3282 if (GET_CODE (x) == PLUS
3283 && GET_CODE (XEXP (x, 0)) == REG
3284 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3285 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3286 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3287 && !SPE_VECTOR_MODE (mode)
54b695e7
AH
3288 && !(TARGET_E500_DOUBLE && (mode == DFmode
3289 || mode == DImode))
78c875e8 3290 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3291 {
3292 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3293 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3294 HOST_WIDE_INT high
c4ad648e 3295 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3296
3297 /* Check for 32-bit overflow. */
3298 if (high + low != val)
c4ad648e 3299 {
24ea750e
DJ
3300 *win = 0;
3301 return x;
3302 }
3303
3304 /* Reload the high part into a base reg; leave the low part
c4ad648e 3305 in the mem directly. */
24ea750e
DJ
3306
3307 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3308 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3309 GEN_INT (high)),
3310 GEN_INT (low));
24ea750e
DJ
3311
3312 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3313 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3314 opnum, (enum reload_type)type);
24ea750e
DJ
3315 *win = 1;
3316 return x;
3317 }
4937d02d 3318
24ea750e 3319 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3320 && !ALTIVEC_VECTOR_MODE (mode)
8308679f
DE
3321#if TARGET_MACHO
3322 && DEFAULT_ABI == ABI_DARWIN
a29077da 3323 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3324#else
3325 && DEFAULT_ABI == ABI_V4
3326 && !flag_pic
3327#endif
0d8c1c97
AM
3328 /* Don't do this for TFmode, since the result isn't offsettable.
3329 The same goes for DImode without 64-bit gprs. */
3330 && mode != TFmode
3331 && (mode != DImode || TARGET_POWERPC64))
24ea750e 3332 {
8308679f 3333#if TARGET_MACHO
a29077da
GK
3334 if (flag_pic)
3335 {
3336 rtx offset = gen_rtx_CONST (Pmode,
3337 gen_rtx_MINUS (Pmode, x,
11abc112 3338 machopic_function_base_sym ()));
a29077da
GK
3339 x = gen_rtx_LO_SUM (GET_MODE (x),
3340 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3341 gen_rtx_HIGH (Pmode, offset)), offset);
3342 }
3343 else
8308679f 3344#endif
a29077da 3345 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3346 gen_rtx_HIGH (Pmode, x), x);
a29077da 3347
24ea750e 3348 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3349 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3350 opnum, (enum reload_type)type);
24ea750e
DJ
3351 *win = 1;
3352 return x;
3353 }
4937d02d 3354
dec1f3aa
DE
3355 /* Reload an offset address wrapped by an AND that represents the
3356 masking of the lower bits. Strip the outer AND and let reload
3357 convert the offset address into an indirect address. */
3358 if (TARGET_ALTIVEC
3359 && ALTIVEC_VECTOR_MODE (mode)
3360 && GET_CODE (x) == AND
3361 && GET_CODE (XEXP (x, 0)) == PLUS
3362 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3363 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3364 && GET_CODE (XEXP (x, 1)) == CONST_INT
3365 && INTVAL (XEXP (x, 1)) == -16)
3366 {
3367 x = XEXP (x, 0);
3368 *win = 1;
3369 return x;
3370 }
3371
24ea750e 3372 if (TARGET_TOC
4d588c14 3373 && constant_pool_expr_p (x)
c1f11548 3374 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e
DJ
3375 {
3376 (x) = create_TOC_reference (x);
3377 *win = 1;
3378 return x;
3379 }
3380 *win = 0;
3381 return x;
f676971a 3382}
24ea750e 3383
258bfae2
FS
3384/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3385 that is a valid memory address for an instruction.
3386 The MODE argument is the machine mode for the MEM expression
3387 that wants to use this address.
3388
3389 On the RS/6000, there are four valid address: a SYMBOL_REF that
3390 refers to a constant pool entry of an address (or the sum of it
3391 plus a constant), a short (16-bit signed) constant plus a register,
3392 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3393 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3394 we must ensure that both words are addressable or PowerPC64 with offset
3395 word aligned.
3396
3397 For modes spanning multiple registers (DFmode in 32-bit GPRs,
76d2b81d 3398 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
258bfae2
FS
3399 adjacent memory cells are accessed by adding word-sized offsets
3400 during assembly output. */
3401int
a2369ed3 3402rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 3403{
850e8d3d
DN
3404 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3405 if (TARGET_ALTIVEC
3406 && ALTIVEC_VECTOR_MODE (mode)
3407 && GET_CODE (x) == AND
3408 && GET_CODE (XEXP (x, 1)) == CONST_INT
3409 && INTVAL (XEXP (x, 1)) == -16)
3410 x = XEXP (x, 0);
3411
c4501e62
JJ
3412 if (RS6000_SYMBOL_REF_TLS_P (x))
3413 return 0;
4d588c14 3414 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
3415 return 1;
3416 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 3417 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 3418 && !SPE_VECTOR_MODE (mode)
54b695e7
AH
3419 /* Restrict addressing for DI because of our SUBREG hackery. */
3420 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
258bfae2 3421 && TARGET_UPDATE
4d588c14 3422 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 3423 return 1;
0c380712 3424 if (rs6000_legitimate_small_data_p (mode, x))
258bfae2 3425 return 1;
4d588c14 3426 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
3427 return 1;
3428 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3429 if (! reg_ok_strict
3430 && GET_CODE (x) == PLUS
3431 && GET_CODE (XEXP (x, 0)) == REG
708d2456 3432 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 3433 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
3434 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3435 return 1;
76d2b81d 3436 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3437 return 1;
3438 if (mode != TImode
76d2b81d 3439 && mode != TFmode
a3170dc6
AH
3440 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3441 || TARGET_POWERPC64
4d4cbc0e 3442 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 3443 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 3444 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 3445 return 1;
4d588c14 3446 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3447 return 1;
3448 return 0;
3449}
4d588c14
RH
3450
3451/* Go to LABEL if ADDR (a legitimate address expression)
3452 has an effect that depends on the machine mode it is used for.
3453
3454 On the RS/6000 this is true of all integral offsets (since AltiVec
3455 modes don't allow them) or is a pre-increment or decrement.
3456
3457 ??? Except that due to conceptual problems in offsettable_address_p
3458 we can't really report the problems of integral offsets. So leave
f676971a 3459 this assuming that the adjustable offset must be valid for the
4d588c14
RH
3460 sub-words of a TFmode operand, which is what we had before. */
3461
3462bool
a2369ed3 3463rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
3464{
3465 switch (GET_CODE (addr))
3466 {
3467 case PLUS:
3468 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3469 {
3470 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3471 return val + 12 + 0x8000 >= 0x10000;
3472 }
3473 break;
3474
3475 case LO_SUM:
3476 return true;
3477
3478 case PRE_INC:
3479 case PRE_DEC:
3480 return TARGET_UPDATE;
3481
3482 default:
3483 break;
3484 }
3485
3486 return false;
3487}
d8ecbcdb
AH
3488
3489/* Return number of consecutive hard regs needed starting at reg REGNO
3490 to hold something of mode MODE.
3491 This is ordinarily the length in words of a value of mode MODE
3492 but can be less for certain modes in special long registers.
3493
3494 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3495 scalar instructions. The upper 32 bits are only available to the
3496 SIMD instructions.
3497
3498 POWER and PowerPC GPRs hold 32 bits worth;
3499 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3500
3501int
3502rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3503{
3504 if (FP_REGNO_P (regno))
3505 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3506
4d4cbc0e
AH
3507 if (TARGET_E500_DOUBLE && mode == DFmode)
3508 return 1;
3509
d8ecbcdb
AH
3510 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3511 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3512
3513 if (ALTIVEC_REGNO_P (regno))
3514 return
3515 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3516
3517 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3518}
2aa4498c
AH
3519
3520/* Change register usage conditional on target flags. */
3521void
3522rs6000_conditional_register_usage (void)
3523{
3524 int i;
3525
3526 /* Set MQ register fixed (already call_used) if not POWER
3527 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3528 be allocated. */
3529 if (! TARGET_POWER)
3530 fixed_regs[64] = 1;
3531
7c9ac5c0 3532 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
3533 if (TARGET_64BIT)
3534 fixed_regs[13] = call_used_regs[13]
3535 = call_really_used_regs[13] = 1;
3536
3537 /* Conditionally disable FPRs. */
3538 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3539 for (i = 32; i < 64; i++)
3540 fixed_regs[i] = call_used_regs[i]
c4ad648e 3541 = call_really_used_regs[i] = 1;
2aa4498c 3542
7c9ac5c0
PH
3543 /* The TOC register is not killed across calls in a way that is
3544 visible to the compiler. */
3545 if (DEFAULT_ABI == ABI_AIX)
3546 call_really_used_regs[2] = 0;
3547
2aa4498c
AH
3548 if (DEFAULT_ABI == ABI_V4
3549 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3550 && flag_pic == 2)
3551 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3552
3553 if (DEFAULT_ABI == ABI_V4
3554 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3555 && flag_pic == 1)
3556 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3557 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3558 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3559
3560 if (DEFAULT_ABI == ABI_DARWIN
3561 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 3562 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
3563 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3564 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3565
b4db40bf
JJ
3566 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3567 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3568 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3569
2aa4498c
AH
3570 if (TARGET_ALTIVEC)
3571 global_regs[VSCR_REGNO] = 1;
3572
3573 if (TARGET_SPE)
3574 {
3575 global_regs[SPEFSCR_REGNO] = 1;
3576 fixed_regs[FIXED_SCRATCH]
c4ad648e 3577 = call_used_regs[FIXED_SCRATCH]
2aa4498c
AH
3578 = call_really_used_regs[FIXED_SCRATCH] = 1;
3579 }
3580
3581 if (! TARGET_ALTIVEC)
3582 {
3583 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3584 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3585 call_really_used_regs[VRSAVE_REGNO] = 1;
3586 }
3587
3588 if (TARGET_ALTIVEC_ABI)
3589 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3590 call_used_regs[i] = call_really_used_regs[i] = 1;
3591}
fb4d4348 3592\f
a4f6c312
SS
3593/* Try to output insns to set TARGET equal to the constant C if it can
3594 be done in less than N insns. Do all computations in MODE.
3595 Returns the place where the output has been placed if it can be
3596 done and the insns have been emitted. If it would take more than N
3597 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
3598
3599rtx
f676971a 3600rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 3601 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 3602{
af8cb5c5 3603 rtx result, insn, set;
2bfcf297
DB
3604 HOST_WIDE_INT c0, c1;
3605
37409796 3606 switch (mode)
2bfcf297 3607 {
37409796
NS
3608 case QImode:
3609 case HImode:
2bfcf297 3610 if (dest == NULL)
c4ad648e 3611 dest = gen_reg_rtx (mode);
2bfcf297
DB
3612 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3613 return dest;
bb8df8a6 3614
37409796 3615 case SImode:
af8cb5c5 3616 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
bb8df8a6 3617
af8cb5c5
DE
3618 emit_insn (gen_rtx_SET (VOIDmode, result,
3619 GEN_INT (INTVAL (source)
3620 & (~ (HOST_WIDE_INT) 0xffff))));
3621 emit_insn (gen_rtx_SET (VOIDmode, dest,
3622 gen_rtx_IOR (SImode, result,
3623 GEN_INT (INTVAL (source) & 0xffff))));
3624 result = dest;
37409796
NS
3625 break;
3626
3627 case DImode:
3628 switch (GET_CODE (source))
af8cb5c5 3629 {
37409796 3630 case CONST_INT:
af8cb5c5
DE
3631 c0 = INTVAL (source);
3632 c1 = -(c0 < 0);
37409796 3633 break;
bb8df8a6 3634
37409796 3635 case CONST_DOUBLE:
2bfcf297 3636#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
3637 c0 = CONST_DOUBLE_LOW (source);
3638 c1 = -(c0 < 0);
2bfcf297 3639#else
af8cb5c5
DE
3640 c0 = CONST_DOUBLE_LOW (source);
3641 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 3642#endif
37409796
NS
3643 break;
3644
3645 default:
3646 gcc_unreachable ();
af8cb5c5 3647 }
af8cb5c5
DE
3648
3649 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
3650 break;
3651
3652 default:
3653 gcc_unreachable ();
2bfcf297 3654 }
2bfcf297 3655
af8cb5c5
DE
3656 insn = get_last_insn ();
3657 set = single_set (insn);
3658 if (! CONSTANT_P (SET_SRC (set)))
3659 set_unique_reg_note (insn, REG_EQUAL, source);
3660
3661 return result;
2bfcf297
DB
3662}
3663
3664/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3665 fall back to a straight forward decomposition. We do this to avoid
3666 exponential run times encountered when looking for longer sequences
3667 with rs6000_emit_set_const. */
3668static rtx
a2369ed3 3669rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
3670{
3671 if (!TARGET_POWERPC64)
3672 {
3673 rtx operand1, operand2;
3674
3675 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3676 DImode);
3677 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3678 DImode);
3679 emit_move_insn (operand1, GEN_INT (c1));
3680 emit_move_insn (operand2, GEN_INT (c2));
3681 }
3682 else
3683 {
bc06712d 3684 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 3685
bc06712d 3686 ud1 = c1 & 0xffff;
f921c9c9 3687 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 3688#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 3689 c2 = c1 >> 32;
2bfcf297 3690#endif
bc06712d 3691 ud3 = c2 & 0xffff;
f921c9c9 3692 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 3693
f676971a 3694 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 3695 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 3696 {
bc06712d 3697 if (ud1 & 0x8000)
b78d48dd 3698 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
3699 else
3700 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 3701 }
2bfcf297 3702
f676971a 3703 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 3704 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 3705 {
bc06712d 3706 if (ud2 & 0x8000)
f676971a 3707 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 3708 - 0x80000000));
252b88f7 3709 else
bc06712d
TR
3710 emit_move_insn (dest, GEN_INT (ud2 << 16));
3711 if (ud1 != 0)
3712 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
252b88f7 3713 }
f676971a 3714 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
3715 || (ud4 == 0 && ! (ud3 & 0x8000)))
3716 {
3717 if (ud3 & 0x8000)
f676971a 3718 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
3719 - 0x80000000));
3720 else
3721 emit_move_insn (dest, GEN_INT (ud3 << 16));
3722
3723 if (ud2 != 0)
3724 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3725 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3726 if (ud1 != 0)
3727 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3728 }
f676971a 3729 else
bc06712d
TR
3730 {
3731 if (ud4 & 0x8000)
f676971a 3732 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
3733 - 0x80000000));
3734 else
3735 emit_move_insn (dest, GEN_INT (ud4 << 16));
3736
3737 if (ud3 != 0)
3738 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2bfcf297 3739
bc06712d
TR
3740 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3741 if (ud2 != 0)
f676971a
EC
3742 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3743 GEN_INT (ud2 << 16)));
bc06712d
TR
3744 if (ud1 != 0)
3745 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3746 }
3747 }
2bfcf297
DB
3748 return dest;
3749}
3750
76d2b81d
DJ
3751/* Helper for the following. Get rid of [r+r] memory refs
3752 in cases where it won't work (TImode, TFmode). */
3753
3754static void
3755rs6000_eliminate_indexed_memrefs (rtx operands[2])
3756{
3757 if (GET_CODE (operands[0]) == MEM
3758 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 3759 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
3760 && ! reload_in_progress)
3761 operands[0]
3762 = replace_equiv_address (operands[0],
3763 copy_addr_to_reg (XEXP (operands[0], 0)));
3764
3765 if (GET_CODE (operands[1]) == MEM
3766 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 3767 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
3768 && ! reload_in_progress)
3769 operands[1]
3770 = replace_equiv_address (operands[1],
3771 copy_addr_to_reg (XEXP (operands[1], 0)));
3772}
3773
fb4d4348
GK
3774/* Emit a move from SOURCE to DEST in mode MODE. */
3775void
a2369ed3 3776rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
3777{
3778 rtx operands[2];
3779 operands[0] = dest;
3780 operands[1] = source;
f676971a 3781
fb4d4348
GK
3782 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3783 if (GET_CODE (operands[1]) == CONST_DOUBLE
3784 && ! FLOAT_MODE_P (mode)
3785 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3786 {
3787 /* FIXME. This should never happen. */
3788 /* Since it seems that it does, do the safe thing and convert
3789 to a CONST_INT. */
2496c7bd 3790 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 3791 }
37409796
NS
3792 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3793 || FLOAT_MODE_P (mode)
3794 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3795 || CONST_DOUBLE_LOW (operands[1]) < 0)
3796 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3797 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 3798
c9e8cb32
DD
3799 /* Check if GCC is setting up a block move that will end up using FP
3800 registers as temporaries. We must make sure this is acceptable. */
3801 if (GET_CODE (operands[0]) == MEM
3802 && GET_CODE (operands[1]) == MEM
3803 && mode == DImode
41543739
GK
3804 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3805 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3806 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3807 ? 32 : MEM_ALIGN (operands[0])))
3808 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 3809 ? 32
41543739
GK
3810 : MEM_ALIGN (operands[1]))))
3811 && ! MEM_VOLATILE_P (operands [0])
3812 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 3813 {
41543739
GK
3814 emit_move_insn (adjust_address (operands[0], SImode, 0),
3815 adjust_address (operands[1], SImode, 0));
3816 emit_move_insn (adjust_address (operands[0], SImode, 4),
3817 adjust_address (operands[1], SImode, 4));
c9e8cb32
DD
3818 return;
3819 }
630d42a0 3820
55aa0757 3821 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
c9dbf840 3822 && !gpc_reg_operand (operands[1], mode))
f6219a5e 3823 operands[1] = force_reg (mode, operands[1]);
a9098fd0 3824
a3170dc6
AH
3825 if (mode == SFmode && ! TARGET_POWERPC
3826 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 3827 && GET_CODE (operands[0]) == MEM)
fb4d4348 3828 {
ffc14f31
GK
3829 int regnum;
3830
3831 if (reload_in_progress || reload_completed)
3832 regnum = true_regnum (operands[1]);
3833 else if (GET_CODE (operands[1]) == REG)
3834 regnum = REGNO (operands[1]);
3835 else
3836 regnum = -1;
f676971a 3837
fb4d4348
GK
3838 /* If operands[1] is a register, on POWER it may have
3839 double-precision data in it, so truncate it to single
3840 precision. */
3841 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3842 {
3843 rtx newreg;
3844 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3845 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3846 operands[1] = newreg;
3847 }
3848 }
3849
c4501e62
JJ
3850 /* Recognize the case where operand[1] is a reference to thread-local
3851 data and load its address to a register. */
84f52ebd 3852 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 3853 {
84f52ebd
RH
3854 enum tls_model model;
3855 rtx tmp = operands[1];
3856 rtx addend = NULL;
3857
3858 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
3859 {
3860 addend = XEXP (XEXP (tmp, 0), 1);
3861 tmp = XEXP (XEXP (tmp, 0), 0);
3862 }
3863
3864 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
3865 model = SYMBOL_REF_TLS_MODEL (tmp);
3866 gcc_assert (model != 0);
3867
3868 tmp = rs6000_legitimize_tls_address (tmp, model);
3869 if (addend)
3870 {
3871 tmp = gen_rtx_PLUS (mode, tmp, addend);
3872 tmp = force_operand (tmp, operands[0]);
3873 }
3874 operands[1] = tmp;
c4501e62
JJ
3875 }
3876
8f4e6caf
RH
3877 /* Handle the case where reload calls us with an invalid address. */
3878 if (reload_in_progress && mode == Pmode
69ef87e2 3879 && (! general_operand (operands[1], mode)
8f4e6caf
RH
3880 || ! nonimmediate_operand (operands[0], mode)))
3881 goto emit_set;
3882
a9baceb1
GK
3883 /* 128-bit constant floating-point values on Darwin should really be
3884 loaded as two parts. */
3885 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3886 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3887 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3888 {
3889 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3890 know how to get a DFmode SUBREG of a TFmode. */
3891 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3892 simplify_gen_subreg (DImode, operands[1], mode, 0),
3893 DImode);
3894 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3895 GET_MODE_SIZE (DImode)),
3896 simplify_gen_subreg (DImode, operands[1], mode,
3897 GET_MODE_SIZE (DImode)),
3898 DImode);
3899 return;
3900 }
3901
fb4d4348
GK
3902 /* FIXME: In the long term, this switch statement should go away
3903 and be replaced by a sequence of tests based on things like
3904 mode == Pmode. */
3905 switch (mode)
3906 {
3907 case HImode:
3908 case QImode:
3909 if (CONSTANT_P (operands[1])
3910 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 3911 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
3912 break;
3913
06f4e019 3914 case TFmode:
76d2b81d
DJ
3915 rs6000_eliminate_indexed_memrefs (operands);
3916 /* fall through */
3917
fb4d4348
GK
3918 case DFmode:
3919 case SFmode:
f676971a 3920 if (CONSTANT_P (operands[1])
fb4d4348 3921 && ! easy_fp_constant (operands[1], mode))
a9098fd0 3922 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 3923 break;
f676971a 3924
0ac081f6
AH
3925 case V16QImode:
3926 case V8HImode:
3927 case V4SFmode:
3928 case V4SImode:
a3170dc6
AH
3929 case V4HImode:
3930 case V2SFmode:
3931 case V2SImode:
00a892b8 3932 case V1DImode:
69ef87e2 3933 if (CONSTANT_P (operands[1])
d744e06e 3934 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
3935 operands[1] = force_const_mem (mode, operands[1]);
3936 break;
f676971a 3937
fb4d4348 3938 case SImode:
a9098fd0 3939 case DImode:
fb4d4348
GK
3940 /* Use default pattern for address of ELF small data */
3941 if (TARGET_ELF
a9098fd0 3942 && mode == Pmode
f607bc57 3943 && DEFAULT_ABI == ABI_V4
f676971a 3944 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
3945 || GET_CODE (operands[1]) == CONST)
3946 && small_data_operand (operands[1], mode))
fb4d4348
GK
3947 {
3948 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3949 return;
3950 }
3951
f607bc57 3952 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
3953 && mode == Pmode && mode == SImode
3954 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
3955 {
3956 emit_insn (gen_movsi_got (operands[0], operands[1]));
3957 return;
3958 }
3959
ee890fe2 3960 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
3961 && TARGET_NO_TOC
3962 && ! flag_pic
a9098fd0 3963 && mode == Pmode
fb4d4348
GK
3964 && CONSTANT_P (operands[1])
3965 && GET_CODE (operands[1]) != HIGH
3966 && GET_CODE (operands[1]) != CONST_INT)
3967 {
a9098fd0 3968 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
fb4d4348
GK
3969
3970 /* If this is a function address on -mcall-aixdesc,
3971 convert it to the address of the descriptor. */
3972 if (DEFAULT_ABI == ABI_AIX
3973 && GET_CODE (operands[1]) == SYMBOL_REF
3974 && XSTR (operands[1], 0)[0] == '.')
3975 {
3976 const char *name = XSTR (operands[1], 0);
3977 rtx new_ref;
3978 while (*name == '.')
3979 name++;
3980 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3981 CONSTANT_POOL_ADDRESS_P (new_ref)
3982 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 3983 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 3984 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
d1908feb 3985 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
fb4d4348
GK
3986 operands[1] = new_ref;
3987 }
7509c759 3988
ee890fe2
SS
3989 if (DEFAULT_ABI == ABI_DARWIN)
3990 {
ab82a49f
AP
3991#if TARGET_MACHO
3992 if (MACHO_DYNAMIC_NO_PIC_P)
3993 {
3994 /* Take care of any required data indirection. */
3995 operands[1] = rs6000_machopic_legitimize_pic_address (
3996 operands[1], mode, operands[0]);
3997 if (operands[0] != operands[1])
3998 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 3999 operands[0], operands[1]));
ab82a49f
AP
4000 return;
4001 }
4002#endif
b8a55285
AP
4003 emit_insn (gen_macho_high (target, operands[1]));
4004 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4005 return;
4006 }
4007
fb4d4348
GK
4008 emit_insn (gen_elf_high (target, operands[1]));
4009 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4010 return;
4011 }
4012
a9098fd0
GK
4013 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4014 and we have put it in the TOC, we just need to make a TOC-relative
4015 reference to it. */
4016 if (TARGET_TOC
4017 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4018 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4019 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4020 get_pool_mode (operands[1])))
fb4d4348 4021 {
a9098fd0 4022 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4023 }
a9098fd0
GK
4024 else if (mode == Pmode
4025 && CONSTANT_P (operands[1])
38886f37
AO
4026 && ((GET_CODE (operands[1]) != CONST_INT
4027 && ! easy_fp_constant (operands[1], mode))
4028 || (GET_CODE (operands[1]) == CONST_INT
4029 && num_insns_constant (operands[1], mode) > 2)
4030 || (GET_CODE (operands[0]) == REG
4031 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4032 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4033 && ! legitimate_constant_pool_address_p (operands[1])
4034 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4035 {
4036 /* Emit a USE operation so that the constant isn't deleted if
4037 expensive optimizations are turned on because nobody
4038 references it. This should only be done for operands that
4039 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4040 This should not be done for operands that contain LABEL_REFs.
4041 For now, we just handle the obvious case. */
4042 if (GET_CODE (operands[1]) != LABEL_REF)
4043 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4044
c859cda6 4045#if TARGET_MACHO
ee890fe2 4046 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4047 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4048 {
ee890fe2
SS
4049 operands[1] =
4050 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4051 operands[0]);
4052 if (operands[0] != operands[1])
4053 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4054 return;
4055 }
c859cda6 4056#endif
ee890fe2 4057
fb4d4348
GK
4058 /* If we are to limit the number of things we put in the TOC and
4059 this is a symbol plus a constant we can add in one insn,
4060 just put the symbol in the TOC and add the constant. Don't do
4061 this if reload is in progress. */
4062 if (GET_CODE (operands[1]) == CONST
4063 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4064 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4065 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4066 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4067 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4068 && ! side_effects_p (operands[0]))
4069 {
a4f6c312
SS
4070 rtx sym =
4071 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4072 rtx other = XEXP (XEXP (operands[1], 0), 1);
4073
a9098fd0
GK
4074 sym = force_reg (mode, sym);
4075 if (mode == SImode)
4076 emit_insn (gen_addsi3 (operands[0], sym, other));
4077 else
4078 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4079 return;
4080 }
4081
a9098fd0 4082 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4083
f676971a 4084 if (TARGET_TOC
4d588c14 4085 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4086 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4087 get_pool_constant (XEXP (operands[1], 0)),
4088 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4089 {
ba4828e0 4090 operands[1]
542a8afa 4091 = gen_const_mem (mode,
c4ad648e 4092 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4093 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4094 }
fb4d4348
GK
4095 }
4096 break;
a9098fd0 4097
fb4d4348 4098 case TImode:
76d2b81d
DJ
4099 rs6000_eliminate_indexed_memrefs (operands);
4100
27dc0551
DE
4101 if (TARGET_POWER)
4102 {
4103 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4104 gen_rtvec (2,
4105 gen_rtx_SET (VOIDmode,
4106 operands[0], operands[1]),
4107 gen_rtx_CLOBBER (VOIDmode,
4108 gen_rtx_SCRATCH (SImode)))));
4109 return;
4110 }
fb4d4348
GK
4111 break;
4112
4113 default:
37409796 4114 gcc_unreachable ();
fb4d4348
GK
4115 }
4116
a9098fd0
GK
4117 /* Above, we may have called force_const_mem which may have returned
4118 an invalid address. If we can, fix this up; otherwise, reload will
4119 have to deal with it. */
8f4e6caf
RH
4120 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4121 operands[1] = validize_mem (operands[1]);
a9098fd0 4122
8f4e6caf 4123 emit_set:
fb4d4348
GK
4124 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4125}
4697a36c 4126\f
2858f73a
GK
4127/* Nonzero if we can use a floating-point register to pass this arg. */
4128#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4129 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
4130 && (CUM)->fregno <= FP_ARG_MAX_REG \
4131 && TARGET_HARD_FLOAT && TARGET_FPRS)
4132
4133/* Nonzero if we can use an AltiVec register to pass this arg. */
4134#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4135 (ALTIVEC_VECTOR_MODE (MODE) \
4136 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4137 && TARGET_ALTIVEC_ABI \
83953138 4138 && (NAMED))
2858f73a 4139
c6e8c921
GK
4140/* Return a nonzero value to say to return the function value in
4141 memory, just as large structures are always returned. TYPE will be
4142 the data type of the value, and FNTYPE will be the type of the
4143 function doing the returning, or @code{NULL} for libcalls.
4144
4145 The AIX ABI for the RS/6000 specifies that all structures are
4146 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4147 specifies that structures <= 8 bytes are returned in r3/r4, but a
4148 draft put them in memory, and GCC used to implement the draft
df01da37 4149 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4150 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4151 compatibility can change DRAFT_V4_STRUCT_RET to override the
4152 default, and -m switches get the final word. See
4153 rs6000_override_options for more details.
4154
4155 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4156 long double support is enabled. These values are returned in memory.
4157
4158 int_size_in_bytes returns -1 for variable size objects, which go in
4159 memory always. The cast to unsigned makes -1 > 8. */
4160
4161static bool
4162rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4163{
594a51fe
SS
4164 /* In the darwin64 abi, try to use registers for larger structs
4165 if possible. */
0b5383eb 4166 if (rs6000_darwin64_abi
594a51fe 4167 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4168 && int_size_in_bytes (type) > 0)
4169 {
4170 CUMULATIVE_ARGS valcum;
4171 rtx valret;
4172
4173 valcum.words = 0;
4174 valcum.fregno = FP_ARG_MIN_REG;
4175 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4176 /* Do a trial code generation as if this were going to be passed
4177 as an argument; if any part goes in memory, we return NULL. */
4178 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4179 if (valret)
4180 return false;
4181 /* Otherwise fall through to more conventional ABI rules. */
4182 }
594a51fe 4183
c6e8c921 4184 if (AGGREGATE_TYPE_P (type)
df01da37 4185 && (aix_struct_return
c6e8c921
GK
4186 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4187 return true;
b693336b 4188
bada2eb8
DE
4189 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4190 modes only exist for GCC vector types if -maltivec. */
4191 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4192 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4193 return false;
4194
b693336b
PB
4195 /* Return synthetic vectors in memory. */
4196 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4197 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4198 {
4199 static bool warned_for_return_big_vectors = false;
4200 if (!warned_for_return_big_vectors)
4201 {
d4ee4d25 4202 warning (0, "GCC vector returned by reference: "
b693336b
PB
4203 "non-standard ABI extension with no compatibility guarantee");
4204 warned_for_return_big_vectors = true;
4205 }
4206 return true;
4207 }
4208
c6e8c921
GK
4209 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4210 return true;
ad630bef 4211
c6e8c921
GK
4212 return false;
4213}
4214
4697a36c
MM
4215/* Initialize a variable CUM of type CUMULATIVE_ARGS
4216 for a call to a function whose data type is FNTYPE.
4217 For a library call, FNTYPE is 0.
4218
4219 For incoming args we set the number of arguments in the prototype large
1c20ae99 4220 so we never return a PARALLEL. */
4697a36c
MM
4221
4222void
f676971a 4223init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4224 rtx libname ATTRIBUTE_UNUSED, int incoming,
4225 int libcall, int n_named_args)
4697a36c
MM
4226{
4227 static CUMULATIVE_ARGS zero_cumulative;
4228
4229 *cum = zero_cumulative;
4230 cum->words = 0;
4231 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4232 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4233 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4234 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4235 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4236 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4237 cum->stdarg = fntype
4238 && (TYPE_ARG_TYPES (fntype) != 0
4239 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4240 != void_type_node));
4697a36c 4241
0f6937fe
AM
4242 cum->nargs_prototype = 0;
4243 if (incoming || cum->prototype)
4244 cum->nargs_prototype = n_named_args;
4697a36c 4245
a5c76ee6 4246 /* Check for a longcall attribute. */
3eb4e360
AM
4247 if ((!fntype && rs6000_default_long_calls)
4248 || (fntype
4249 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4250 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4251 cum->call_cookie |= CALL_LONG;
6a4cee5f 4252
4697a36c
MM
4253 if (TARGET_DEBUG_ARG)
4254 {
4255 fprintf (stderr, "\ninit_cumulative_args:");
4256 if (fntype)
4257 {
4258 tree ret_type = TREE_TYPE (fntype);
4259 fprintf (stderr, " ret code = %s,",
4260 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4261 }
4262
6a4cee5f
MM
4263 if (cum->call_cookie & CALL_LONG)
4264 fprintf (stderr, " longcall,");
4265
4697a36c
MM
4266 fprintf (stderr, " proto = %d, nargs = %d\n",
4267 cum->prototype, cum->nargs_prototype);
4268 }
f676971a 4269
c4ad648e
AM
4270 if (fntype
4271 && !TARGET_ALTIVEC
4272 && TARGET_ALTIVEC_ABI
4273 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4274 {
c85ce869 4275 error ("cannot return value in vector register because"
c4ad648e 4276 " altivec instructions are disabled, use -maltivec"
c85ce869 4277 " to enable them");
c4ad648e 4278 }
4697a36c
MM
4279}
4280\f
fe984136
RH
4281/* Return true if TYPE must be passed on the stack and not in registers. */
4282
4283static bool
4284rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4285{
4286 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4287 return must_pass_in_stack_var_size (mode, type);
4288 else
4289 return must_pass_in_stack_var_size_or_pad (mode, type);
4290}
4291
c229cba9
DE
4292/* If defined, a C expression which determines whether, and in which
4293 direction, to pad out an argument with extra space. The value
4294 should be of type `enum direction': either `upward' to pad above
4295 the argument, `downward' to pad below, or `none' to inhibit
4296 padding.
4297
4298 For the AIX ABI structs are always stored left shifted in their
4299 argument slot. */
4300
9ebbca7d 4301enum direction
a2369ed3 4302function_arg_padding (enum machine_mode mode, tree type)
c229cba9 4303{
6e985040
AM
4304#ifndef AGGREGATE_PADDING_FIXED
4305#define AGGREGATE_PADDING_FIXED 0
4306#endif
4307#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4308#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4309#endif
4310
4311 if (!AGGREGATE_PADDING_FIXED)
4312 {
4313 /* GCC used to pass structures of the same size as integer types as
4314 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4315 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4316 passed padded downward, except that -mstrict-align further
4317 muddied the water in that multi-component structures of 2 and 4
4318 bytes in size were passed padded upward.
4319
4320 The following arranges for best compatibility with previous
4321 versions of gcc, but removes the -mstrict-align dependency. */
4322 if (BYTES_BIG_ENDIAN)
4323 {
4324 HOST_WIDE_INT size = 0;
4325
4326 if (mode == BLKmode)
4327 {
4328 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4329 size = int_size_in_bytes (type);
4330 }
4331 else
4332 size = GET_MODE_SIZE (mode);
4333
4334 if (size == 1 || size == 2 || size == 4)
4335 return downward;
4336 }
4337 return upward;
4338 }
4339
4340 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4341 {
4342 if (type != 0 && AGGREGATE_TYPE_P (type))
4343 return upward;
4344 }
c229cba9 4345
d3704c46
KH
4346 /* Fall back to the default. */
4347 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
4348}
4349
b6c9286a 4350/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 4351 of an argument with the specified mode and type. If it is not defined,
b6c9286a 4352 PARM_BOUNDARY is used for all arguments.
f676971a 4353
b693336b
PB
4354 V.4 wants long longs to be double word aligned.
4355 Doubleword align SPE vectors.
4356 Quadword align Altivec vectors.
4357 Quadword align large synthetic vector types. */
b6c9286a
MM
4358
4359int
b693336b 4360function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 4361{
4ed78545
AM
4362 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4363 return 64;
ad630bef
DE
4364 else if (SPE_VECTOR_MODE (mode)
4365 || (type && TREE_CODE (type) == VECTOR_TYPE
4366 && int_size_in_bytes (type) >= 8
4367 && int_size_in_bytes (type) < 16))
e1f83b4d 4368 return 64;
ad630bef
DE
4369 else if (ALTIVEC_VECTOR_MODE (mode)
4370 || (type && TREE_CODE (type) == VECTOR_TYPE
4371 && int_size_in_bytes (type) >= 16))
0ac081f6 4372 return 128;
0b5383eb
DJ
4373 else if (rs6000_darwin64_abi && mode == BLKmode
4374 && type && TYPE_ALIGN (type) > 64)
4375 return 128;
9ebbca7d 4376 else
b6c9286a 4377 return PARM_BOUNDARY;
b6c9286a 4378}
c53bdcf5 4379
294bd182
AM
4380/* For a function parm of MODE and TYPE, return the starting word in
4381 the parameter area. NWORDS of the parameter area are already used. */
4382
4383static unsigned int
4384rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4385{
4386 unsigned int align;
4387 unsigned int parm_offset;
4388
4389 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4390 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4391 return nwords + (-(parm_offset + nwords) & align);
4392}
4393
c53bdcf5
AM
4394/* Compute the size (in words) of a function argument. */
4395
4396static unsigned long
4397rs6000_arg_size (enum machine_mode mode, tree type)
4398{
4399 unsigned long size;
4400
4401 if (mode != BLKmode)
4402 size = GET_MODE_SIZE (mode);
4403 else
4404 size = int_size_in_bytes (type);
4405
4406 if (TARGET_32BIT)
4407 return (size + 3) >> 2;
4408 else
4409 return (size + 7) >> 3;
4410}
b6c9286a 4411\f
0b5383eb 4412/* Use this to flush pending int fields. */
594a51fe
SS
4413
4414static void
0b5383eb
DJ
4415rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4416 HOST_WIDE_INT bitpos)
594a51fe 4417{
0b5383eb
DJ
4418 unsigned int startbit, endbit;
4419 int intregs, intoffset;
4420 enum machine_mode mode;
594a51fe 4421
0b5383eb
DJ
4422 if (cum->intoffset == -1)
4423 return;
594a51fe 4424
0b5383eb
DJ
4425 intoffset = cum->intoffset;
4426 cum->intoffset = -1;
4427
4428 if (intoffset % BITS_PER_WORD != 0)
4429 {
4430 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4431 MODE_INT, 0);
4432 if (mode == BLKmode)
594a51fe 4433 {
0b5383eb
DJ
4434 /* We couldn't find an appropriate mode, which happens,
4435 e.g., in packed structs when there are 3 bytes to load.
4436 Back intoffset back to the beginning of the word in this
4437 case. */
4438 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 4439 }
594a51fe 4440 }
0b5383eb
DJ
4441
4442 startbit = intoffset & -BITS_PER_WORD;
4443 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4444 intregs = (endbit - startbit) / BITS_PER_WORD;
4445 cum->words += intregs;
4446}
4447
4448/* The darwin64 ABI calls for us to recurse down through structs,
4449 looking for elements passed in registers. Unfortunately, we have
4450 to track int register count here also because of misalignments
4451 in powerpc alignment mode. */
4452
4453static void
4454rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4455 tree type,
4456 HOST_WIDE_INT startbitpos)
4457{
4458 tree f;
4459
4460 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4461 if (TREE_CODE (f) == FIELD_DECL)
4462 {
4463 HOST_WIDE_INT bitpos = startbitpos;
4464 tree ftype = TREE_TYPE (f);
4465 enum machine_mode mode = TYPE_MODE (ftype);
4466
4467 if (DECL_SIZE (f) != 0
4468 && host_integerp (bit_position (f), 1))
4469 bitpos += int_bit_position (f);
4470
4471 /* ??? FIXME: else assume zero offset. */
4472
4473 if (TREE_CODE (ftype) == RECORD_TYPE)
4474 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4475 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4476 {
4477 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4478 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4479 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4480 }
4481 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4482 {
4483 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4484 cum->vregno++;
4485 cum->words += 2;
4486 }
4487 else if (cum->intoffset == -1)
4488 cum->intoffset = bitpos;
4489 }
594a51fe
SS
4490}
4491
4697a36c
MM
4492/* Update the data in CUM to advance over an argument
4493 of mode MODE and data type TYPE.
b2d04ecf
AM
4494 (TYPE is null for libcalls where that information may not be available.)
4495
4496 Note that for args passed by reference, function_arg will be called
4497 with MODE and TYPE set to that of the pointer to the arg, not the arg
4498 itself. */
4697a36c
MM
4499
4500void
f676971a 4501function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 4502 tree type, int named, int depth)
4697a36c 4503{
0b5383eb
DJ
4504 int size;
4505
594a51fe
SS
4506 /* Only tick off an argument if we're not recursing. */
4507 if (depth == 0)
4508 cum->nargs_prototype--;
4697a36c 4509
ad630bef
DE
4510 if (TARGET_ALTIVEC_ABI
4511 && (ALTIVEC_VECTOR_MODE (mode)
4512 || (type && TREE_CODE (type) == VECTOR_TYPE
4513 && int_size_in_bytes (type) == 16)))
0ac081f6 4514 {
4ed78545
AM
4515 bool stack = false;
4516
2858f73a 4517 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 4518 {
6d0ef01e
HP
4519 cum->vregno++;
4520 if (!TARGET_ALTIVEC)
c85ce869 4521 error ("cannot pass argument in vector register because"
6d0ef01e 4522 " altivec instructions are disabled, use -maltivec"
c85ce869 4523 " to enable them");
4ed78545
AM
4524
4525 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 4526 even if it is going to be passed in a vector register.
4ed78545
AM
4527 Darwin does the same for variable-argument functions. */
4528 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4529 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4530 stack = true;
6d0ef01e 4531 }
4ed78545
AM
4532 else
4533 stack = true;
4534
4535 if (stack)
c4ad648e 4536 {
a594a19c 4537 int align;
f676971a 4538
2858f73a
GK
4539 /* Vector parameters must be 16-byte aligned. This places
4540 them at 2 mod 4 in terms of words in 32-bit mode, since
4541 the parameter save area starts at offset 24 from the
4542 stack. In 64-bit mode, they just have to start on an
4543 even word, since the parameter save area is 16-byte
4544 aligned. Space for GPRs is reserved even if the argument
4545 will be passed in memory. */
4546 if (TARGET_32BIT)
4ed78545 4547 align = (2 - cum->words) & 3;
2858f73a
GK
4548 else
4549 align = cum->words & 1;
c53bdcf5 4550 cum->words += align + rs6000_arg_size (mode, type);
f676971a 4551
a594a19c
GK
4552 if (TARGET_DEBUG_ARG)
4553 {
f676971a 4554 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
4555 cum->words, align);
4556 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 4557 cum->nargs_prototype, cum->prototype,
2858f73a 4558 GET_MODE_NAME (mode));
a594a19c
GK
4559 }
4560 }
0ac081f6 4561 }
a4b0320c 4562 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
4563 && !cum->stdarg
4564 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 4565 cum->sysv_gregno++;
594a51fe
SS
4566
4567 else if (rs6000_darwin64_abi
4568 && mode == BLKmode
0b5383eb
DJ
4569 && TREE_CODE (type) == RECORD_TYPE
4570 && (size = int_size_in_bytes (type)) > 0)
4571 {
4572 /* Variable sized types have size == -1 and are
4573 treated as if consisting entirely of ints.
4574 Pad to 16 byte boundary if needed. */
4575 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4576 && (cum->words % 2) != 0)
4577 cum->words++;
4578 /* For varargs, we can just go up by the size of the struct. */
4579 if (!named)
4580 cum->words += (size + 7) / 8;
4581 else
4582 {
4583 /* It is tempting to say int register count just goes up by
4584 sizeof(type)/8, but this is wrong in a case such as
4585 { int; double; int; } [powerpc alignment]. We have to
4586 grovel through the fields for these too. */
4587 cum->intoffset = 0;
4588 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 4589 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
4590 size * BITS_PER_UNIT);
4591 }
4592 }
f607bc57 4593 else if (DEFAULT_ABI == ABI_V4)
4697a36c 4594 {
a3170dc6 4595 if (TARGET_HARD_FLOAT && TARGET_FPRS
4cc833b7 4596 && (mode == SFmode || mode == DFmode))
4697a36c 4597 {
4cc833b7
RH
4598 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4599 cum->fregno++;
4600 else
4601 {
4602 if (mode == DFmode)
c4ad648e 4603 cum->words += cum->words & 1;
c53bdcf5 4604 cum->words += rs6000_arg_size (mode, type);
4cc833b7 4605 }
4697a36c 4606 }
4cc833b7
RH
4607 else
4608 {
b2d04ecf 4609 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
4610 int gregno = cum->sysv_gregno;
4611
4ed78545
AM
4612 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4613 (r7,r8) or (r9,r10). As does any other 2 word item such
4614 as complex int due to a historical mistake. */
4615 if (n_words == 2)
4616 gregno += (1 - gregno) & 1;
4cc833b7 4617
4ed78545 4618 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
4619 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4620 {
4ed78545
AM
4621 /* Long long and SPE vectors are aligned on the stack.
4622 So are other 2 word items such as complex int due to
4623 a historical mistake. */
4cc833b7
RH
4624 if (n_words == 2)
4625 cum->words += cum->words & 1;
4626 cum->words += n_words;
4627 }
4697a36c 4628
4cc833b7
RH
4629 /* Note: continuing to accumulate gregno past when we've started
4630 spilling to the stack indicates the fact that we've started
4631 spilling to the stack to expand_builtin_saveregs. */
4632 cum->sysv_gregno = gregno + n_words;
4633 }
4697a36c 4634
4cc833b7
RH
4635 if (TARGET_DEBUG_ARG)
4636 {
4637 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4638 cum->words, cum->fregno);
4639 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4640 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4641 fprintf (stderr, "mode = %4s, named = %d\n",
4642 GET_MODE_NAME (mode), named);
4643 }
4697a36c
MM
4644 }
4645 else
4cc833b7 4646 {
b2d04ecf 4647 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
4648 int start_words = cum->words;
4649 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 4650
294bd182 4651 cum->words = align_words + n_words;
4697a36c 4652
ebb109ad 4653 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 4654 && TARGET_HARD_FLOAT && TARGET_FPRS)
c53bdcf5 4655 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
4656
4657 if (TARGET_DEBUG_ARG)
4658 {
4659 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4660 cum->words, cum->fregno);
4661 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4662 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 4663 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 4664 named, align_words - start_words, depth);
4cc833b7
RH
4665 }
4666 }
4697a36c 4667}
a6c9bed4 4668
f82f556d
AH
4669static rtx
4670spe_build_register_parallel (enum machine_mode mode, int gregno)
4671{
54b695e7 4672 rtx r1, r3;
f82f556d 4673
37409796 4674 switch (mode)
f82f556d 4675 {
37409796 4676 case DFmode:
54b695e7
AH
4677 r1 = gen_rtx_REG (DImode, gregno);
4678 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4679 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
4680
4681 case DCmode:
54b695e7
AH
4682 r1 = gen_rtx_REG (DImode, gregno);
4683 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
4684 r3 = gen_rtx_REG (DImode, gregno + 2);
4685 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
4686 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796
NS
4687
4688 default:
4689 gcc_unreachable ();
f82f556d 4690 }
f82f556d 4691}
b78d48dd 4692
f82f556d 4693/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 4694static rtx
f676971a 4695rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 4696 tree type)
a6c9bed4 4697{
f82f556d
AH
4698 int gregno = cum->sysv_gregno;
4699
4700 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 4701 are passed and returned in a pair of GPRs for ABI compatibility. */
18f63bfa 4702 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode))
f82f556d 4703 {
b5870bee
AH
4704 int n_words = rs6000_arg_size (mode, type);
4705
f82f556d 4706 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
4707 if (mode == DFmode)
4708 gregno += (1 - gregno) & 1;
f82f556d 4709
b5870bee
AH
4710 /* Multi-reg args are not split between registers and stack. */
4711 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
4712 return NULL_RTX;
4713
4714 return spe_build_register_parallel (mode, gregno);
4715 }
a6c9bed4
AH
4716 if (cum->stdarg)
4717 {
c53bdcf5 4718 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
4719
4720 /* SPE vectors are put in odd registers. */
4721 if (n_words == 2 && (gregno & 1) == 0)
4722 gregno += 1;
4723
4724 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4725 {
4726 rtx r1, r2;
4727 enum machine_mode m = SImode;
4728
4729 r1 = gen_rtx_REG (m, gregno);
4730 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4731 r2 = gen_rtx_REG (m, gregno + 1);
4732 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4733 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4734 }
4735 else
b78d48dd 4736 return NULL_RTX;
a6c9bed4
AH
4737 }
4738 else
4739 {
f82f556d
AH
4740 if (gregno <= GP_ARG_MAX_REG)
4741 return gen_rtx_REG (mode, gregno);
a6c9bed4 4742 else
b78d48dd 4743 return NULL_RTX;
a6c9bed4
AH
4744 }
4745}
4746
0b5383eb
DJ
4747/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4748 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 4749
0b5383eb 4750static void
bb8df8a6 4751rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 4752 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 4753{
0b5383eb
DJ
4754 enum machine_mode mode;
4755 unsigned int regno;
4756 unsigned int startbit, endbit;
4757 int this_regno, intregs, intoffset;
4758 rtx reg;
594a51fe 4759
0b5383eb
DJ
4760 if (cum->intoffset == -1)
4761 return;
4762
4763 intoffset = cum->intoffset;
4764 cum->intoffset = -1;
4765
4766 /* If this is the trailing part of a word, try to only load that
4767 much into the register. Otherwise load the whole register. Note
4768 that in the latter case we may pick up unwanted bits. It's not a
4769 problem at the moment but may wish to revisit. */
4770
4771 if (intoffset % BITS_PER_WORD != 0)
594a51fe 4772 {
0b5383eb
DJ
4773 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4774 MODE_INT, 0);
4775 if (mode == BLKmode)
4776 {
4777 /* We couldn't find an appropriate mode, which happens,
4778 e.g., in packed structs when there are 3 bytes to load.
4779 Back intoffset back to the beginning of the word in this
4780 case. */
4781 intoffset = intoffset & -BITS_PER_WORD;
4782 mode = word_mode;
4783 }
4784 }
4785 else
4786 mode = word_mode;
4787
4788 startbit = intoffset & -BITS_PER_WORD;
4789 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4790 intregs = (endbit - startbit) / BITS_PER_WORD;
4791 this_regno = cum->words + intoffset / BITS_PER_WORD;
4792
4793 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
4794 cum->use_stack = 1;
bb8df8a6 4795
0b5383eb
DJ
4796 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
4797 if (intregs <= 0)
4798 return;
4799
4800 intoffset /= BITS_PER_UNIT;
4801 do
4802 {
4803 regno = GP_ARG_MIN_REG + this_regno;
4804 reg = gen_rtx_REG (mode, regno);
4805 rvec[(*k)++] =
4806 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
4807
4808 this_regno += 1;
4809 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
4810 mode = word_mode;
4811 intregs -= 1;
4812 }
4813 while (intregs > 0);
4814}
4815
4816/* Recursive workhorse for the following. */
4817
4818static void
bb8df8a6 4819rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
0b5383eb
DJ
4820 HOST_WIDE_INT startbitpos, rtx rvec[],
4821 int *k)
4822{
4823 tree f;
4824
4825 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4826 if (TREE_CODE (f) == FIELD_DECL)
4827 {
4828 HOST_WIDE_INT bitpos = startbitpos;
4829 tree ftype = TREE_TYPE (f);
4830 enum machine_mode mode = TYPE_MODE (ftype);
4831
4832 if (DECL_SIZE (f) != 0
4833 && host_integerp (bit_position (f), 1))
4834 bitpos += int_bit_position (f);
4835
4836 /* ??? FIXME: else assume zero offset. */
4837
4838 if (TREE_CODE (ftype) == RECORD_TYPE)
4839 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
4840 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 4841 {
0b5383eb
DJ
4842#if 0
4843 switch (mode)
594a51fe 4844 {
0b5383eb
DJ
4845 case SCmode: mode = SFmode; break;
4846 case DCmode: mode = DFmode; break;
4847 case TCmode: mode = TFmode; break;
4848 default: break;
594a51fe 4849 }
0b5383eb
DJ
4850#endif
4851 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4852 rvec[(*k)++]
bb8df8a6 4853 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
4854 gen_rtx_REG (mode, cum->fregno++),
4855 GEN_INT (bitpos / BITS_PER_UNIT));
4856 if (mode == TFmode)
4857 cum->fregno++;
594a51fe 4858 }
0b5383eb
DJ
4859 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
4860 {
4861 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
4862 rvec[(*k)++]
bb8df8a6
EC
4863 = gen_rtx_EXPR_LIST (VOIDmode,
4864 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
4865 GEN_INT (bitpos / BITS_PER_UNIT));
4866 }
4867 else if (cum->intoffset == -1)
4868 cum->intoffset = bitpos;
4869 }
4870}
594a51fe 4871
0b5383eb
DJ
4872/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4873 the register(s) to be used for each field and subfield of a struct
4874 being passed by value, along with the offset of where the
4875 register's value may be found in the block. FP fields go in FP
4876 register, vector fields go in vector registers, and everything
bb8df8a6 4877 else goes in int registers, packed as in memory.
8ff40a74 4878
0b5383eb
DJ
4879 This code is also used for function return values. RETVAL indicates
4880 whether this is the case.
8ff40a74 4881
a4d05547 4882 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 4883 calling convention. */
594a51fe 4884
0b5383eb
DJ
4885static rtx
4886rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
4887 int named, bool retval)
4888{
4889 rtx rvec[FIRST_PSEUDO_REGISTER];
4890 int k = 1, kbase = 1;
4891 HOST_WIDE_INT typesize = int_size_in_bytes (type);
4892 /* This is a copy; modifications are not visible to our caller. */
4893 CUMULATIVE_ARGS copy_cum = *orig_cum;
4894 CUMULATIVE_ARGS *cum = &copy_cum;
4895
4896 /* Pad to 16 byte boundary if needed. */
4897 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
4898 && (cum->words % 2) != 0)
4899 cum->words++;
4900
4901 cum->intoffset = 0;
4902 cum->use_stack = 0;
4903 cum->named = named;
4904
4905 /* Put entries into rvec[] for individual FP and vector fields, and
4906 for the chunks of memory that go in int regs. Note we start at
4907 element 1; 0 is reserved for an indication of using memory, and
4908 may or may not be filled in below. */
4909 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
4910 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
4911
4912 /* If any part of the struct went on the stack put all of it there.
4913 This hack is because the generic code for
4914 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4915 parts of the struct are not at the beginning. */
4916 if (cum->use_stack)
4917 {
4918 if (retval)
4919 return NULL_RTX; /* doesn't go in registers at all */
4920 kbase = 0;
4921 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4922 }
4923 if (k > 1 || cum->use_stack)
4924 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
4925 else
4926 return NULL_RTX;
4927}
4928
b78d48dd
FJ
4929/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4930
4931static rtx
ec6376ab 4932rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 4933{
ec6376ab
AM
4934 int n_units;
4935 int i, k;
4936 rtx rvec[GP_ARG_NUM_REG + 1];
4937
4938 if (align_words >= GP_ARG_NUM_REG)
4939 return NULL_RTX;
4940
4941 n_units = rs6000_arg_size (mode, type);
4942
4943 /* Optimize the simple case where the arg fits in one gpr, except in
4944 the case of BLKmode due to assign_parms assuming that registers are
4945 BITS_PER_WORD wide. */
4946 if (n_units == 0
4947 || (n_units == 1 && mode != BLKmode))
4948 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4949
4950 k = 0;
4951 if (align_words + n_units > GP_ARG_NUM_REG)
4952 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4953 using a magic NULL_RTX component.
4954 FIXME: This is not strictly correct. Only some of the arg
4955 belongs in memory, not all of it. However, there isn't any way
4956 to do this currently, apart from building rtx descriptions for
4957 the pieces of memory we want stored. Due to bugs in the generic
4958 code we can't use the normal function_arg_partial_nregs scheme
4959 with the PARALLEL arg description we emit here.
4960 In any case, the code to store the whole arg to memory is often
4961 more efficient than code to store pieces, and we know that space
4962 is available in the right place for the whole arg. */
78a52f11
RH
4963 /* FIXME: This should be fixed since the conversion to
4964 TARGET_ARG_PARTIAL_BYTES. */
ec6376ab
AM
4965 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4966
4967 i = 0;
4968 do
36a454e1 4969 {
ec6376ab
AM
4970 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4971 rtx off = GEN_INT (i++ * 4);
4972 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 4973 }
ec6376ab
AM
4974 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4975
4976 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
4977}
4978
4697a36c
MM
4979/* Determine where to put an argument to a function.
4980 Value is zero to push the argument on the stack,
4981 or a hard register in which to store the argument.
4982
4983 MODE is the argument's machine mode.
4984 TYPE is the data type of the argument (as a tree).
4985 This is null for libcalls where that information may
4986 not be available.
4987 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
4988 the preceding args and about the function being called. It is
4989 not modified in this routine.
4697a36c
MM
4990 NAMED is nonzero if this argument is a named parameter
4991 (otherwise it is an extra parameter matching an ellipsis).
4992
4993 On RS/6000 the first eight words of non-FP are normally in registers
4994 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4995 Under V.4, the first 8 FP args are in registers.
4996
4997 If this is floating-point and no prototype is specified, we use
4998 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 4999 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5000 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5001 doesn't support PARALLEL anyway.
5002
5003 Note that for args passed by reference, function_arg will be called
5004 with MODE and TYPE set to that of the pointer to the arg, not the arg
5005 itself. */
4697a36c 5006
9390387d 5007rtx
f676971a 5008function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5009 tree type, int named)
4697a36c 5010{
4cc833b7 5011 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5012
a4f6c312
SS
5013 /* Return a marker to indicate whether CR1 needs to set or clear the
5014 bit that V.4 uses to say fp args were passed in registers.
5015 Assume that we don't need the marker for software floating point,
5016 or compiler generated library calls. */
4697a36c
MM
5017 if (mode == VOIDmode)
5018 {
f607bc57 5019 if (abi == ABI_V4
b9599e46 5020 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5021 && (cum->stdarg
5022 || (cum->nargs_prototype < 0
5023 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5024 {
a3170dc6
AH
5025 /* For the SPE, we need to crxor CR6 always. */
5026 if (TARGET_SPE_ABI)
5027 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5028 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5029 return GEN_INT (cum->call_cookie
5030 | ((cum->fregno == FP_ARG_MIN_REG)
5031 ? CALL_V4_SET_FP_ARGS
5032 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5033 }
4697a36c 5034
7509c759 5035 return GEN_INT (cum->call_cookie);
4697a36c
MM
5036 }
5037
0b5383eb
DJ
5038 if (rs6000_darwin64_abi && mode == BLKmode
5039 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5040 {
0b5383eb 5041 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5042 if (rslt != NULL_RTX)
5043 return rslt;
5044 /* Else fall through to usual handling. */
5045 }
5046
2858f73a 5047 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5048 if (TARGET_64BIT && ! cum->prototype)
5049 {
c4ad648e
AM
5050 /* Vector parameters get passed in vector register
5051 and also in GPRs or memory, in absence of prototype. */
5052 int align_words;
5053 rtx slot;
5054 align_words = (cum->words + 1) & ~1;
5055
5056 if (align_words >= GP_ARG_NUM_REG)
5057 {
5058 slot = NULL_RTX;
5059 }
5060 else
5061 {
5062 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5063 }
5064 return gen_rtx_PARALLEL (mode,
5065 gen_rtvec (2,
5066 gen_rtx_EXPR_LIST (VOIDmode,
5067 slot, const0_rtx),
5068 gen_rtx_EXPR_LIST (VOIDmode,
5069 gen_rtx_REG (mode, cum->vregno),
5070 const0_rtx)));
c72d6c26
HP
5071 }
5072 else
5073 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5074 else if (TARGET_ALTIVEC_ABI
5075 && (ALTIVEC_VECTOR_MODE (mode)
5076 || (type && TREE_CODE (type) == VECTOR_TYPE
5077 && int_size_in_bytes (type) == 16)))
0ac081f6 5078 {
2858f73a 5079 if (named || abi == ABI_V4)
a594a19c 5080 return NULL_RTX;
0ac081f6 5081 else
a594a19c
GK
5082 {
5083 /* Vector parameters to varargs functions under AIX or Darwin
5084 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5085 int align, align_words, n_words;
5086 enum machine_mode part_mode;
a594a19c
GK
5087
5088 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5089 2 mod 4 in terms of words in 32-bit mode, since the parameter
5090 save area starts at offset 24 from the stack. In 64-bit mode,
5091 they just have to start on an even word, since the parameter
5092 save area is 16-byte aligned. */
5093 if (TARGET_32BIT)
4ed78545 5094 align = (2 - cum->words) & 3;
2858f73a
GK
5095 else
5096 align = cum->words & 1;
a594a19c
GK
5097 align_words = cum->words + align;
5098
5099 /* Out of registers? Memory, then. */
5100 if (align_words >= GP_ARG_NUM_REG)
5101 return NULL_RTX;
ec6376ab
AM
5102
5103 if (TARGET_32BIT && TARGET_POWERPC64)
5104 return rs6000_mixed_function_arg (mode, type, align_words);
5105
2858f73a
GK
5106 /* The vector value goes in GPRs. Only the part of the
5107 value in GPRs is reported here. */
ec6376ab
AM
5108 part_mode = mode;
5109 n_words = rs6000_arg_size (mode, type);
5110 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5111 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5112 is either wholly in GPRs or half in GPRs and half not. */
5113 part_mode = DImode;
ec6376ab
AM
5114
5115 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5116 }
0ac081f6 5117 }
f82f556d
AH
5118 else if (TARGET_SPE_ABI && TARGET_SPE
5119 && (SPE_VECTOR_MODE (mode)
18f63bfa
AH
5120 || (TARGET_E500_DOUBLE && (mode == DFmode
5121 || mode == DCmode))))
a6c9bed4 5122 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5123
f607bc57 5124 else if (abi == ABI_V4)
4697a36c 5125 {
a3170dc6 5126 if (TARGET_HARD_FLOAT && TARGET_FPRS
4cc833b7
RH
5127 && (mode == SFmode || mode == DFmode))
5128 {
5129 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5130 return gen_rtx_REG (mode, cum->fregno);
5131 else
b78d48dd 5132 return NULL_RTX;
4cc833b7
RH
5133 }
5134 else
5135 {
b2d04ecf 5136 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5137 int gregno = cum->sysv_gregno;
5138
4ed78545
AM
5139 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5140 (r7,r8) or (r9,r10). As does any other 2 word item such
5141 as complex int due to a historical mistake. */
5142 if (n_words == 2)
5143 gregno += (1 - gregno) & 1;
4cc833b7 5144
4ed78545 5145 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5146 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5147 return NULL_RTX;
ec6376ab
AM
5148
5149 if (TARGET_32BIT && TARGET_POWERPC64)
5150 return rs6000_mixed_function_arg (mode, type,
5151 gregno - GP_ARG_MIN_REG);
5152 return gen_rtx_REG (mode, gregno);
4cc833b7 5153 }
4697a36c 5154 }
4cc833b7
RH
5155 else
5156 {
294bd182 5157 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5158
2858f73a 5159 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5160 {
ec6376ab
AM
5161 rtx rvec[GP_ARG_NUM_REG + 1];
5162 rtx r;
5163 int k;
c53bdcf5
AM
5164 bool needs_psave;
5165 enum machine_mode fmode = mode;
c53bdcf5
AM
5166 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5167
5168 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5169 {
c53bdcf5
AM
5170 /* Currently, we only ever need one reg here because complex
5171 doubles are split. */
37409796 5172 gcc_assert (cum->fregno == FP_ARG_MAX_REG && fmode == TFmode);
ec6376ab
AM
5173
5174 /* Long double split over regs and memory. */
5175 fmode = DFmode;
c53bdcf5 5176 }
c53bdcf5
AM
5177
5178 /* Do we also need to pass this arg in the parameter save
5179 area? */
5180 needs_psave = (type
5181 && (cum->nargs_prototype <= 0
5182 || (DEFAULT_ABI == ABI_AIX
de17c25f 5183 && TARGET_XL_COMPAT
c53bdcf5
AM
5184 && align_words >= GP_ARG_NUM_REG)));
5185
5186 if (!needs_psave && mode == fmode)
ec6376ab 5187 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5188
ec6376ab 5189 k = 0;
c53bdcf5
AM
5190 if (needs_psave)
5191 {
ec6376ab 5192 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5193 This piece must come first, before the fprs. */
c53bdcf5
AM
5194 if (align_words < GP_ARG_NUM_REG)
5195 {
5196 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5197
5198 if (align_words + n_words > GP_ARG_NUM_REG
5199 || (TARGET_32BIT && TARGET_POWERPC64))
5200 {
5201 /* If this is partially on the stack, then we only
5202 include the portion actually in registers here. */
5203 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5204 rtx off;
2e6c9641 5205 int i=0;
c4ad648e
AM
5206 if (align_words + n_words > GP_ARG_NUM_REG
5207 && (TARGET_32BIT && TARGET_POWERPC64))
5208 /* Not all of the arg fits in gprs. Say that it
5209 goes in memory too, using a magic NULL_RTX
5210 component. Also see comment in
5211 rs6000_mixed_function_arg for why the normal
5212 function_arg_partial_nregs scheme doesn't work
5213 in this case. */
5214 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5215 const0_rtx);
ec6376ab
AM
5216 do
5217 {
5218 r = gen_rtx_REG (rmode,
5219 GP_ARG_MIN_REG + align_words);
2e6c9641 5220 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5221 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5222 }
5223 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5224 }
5225 else
5226 {
5227 /* The whole arg fits in gprs. */
5228 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5229 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5230 }
c53bdcf5 5231 }
ec6376ab
AM
5232 else
5233 /* It's entirely in memory. */
5234 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5235 }
5236
ec6376ab
AM
5237 /* Describe where this piece goes in the fprs. */
5238 r = gen_rtx_REG (fmode, cum->fregno);
5239 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5240
5241 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5242 }
5243 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5244 {
ec6376ab
AM
5245 if (TARGET_32BIT && TARGET_POWERPC64)
5246 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5247
4eeca74f
AM
5248 if (mode == BLKmode)
5249 mode = Pmode;
5250
b2d04ecf
AM
5251 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5252 }
4cc833b7
RH
5253 else
5254 return NULL_RTX;
4697a36c 5255 }
4697a36c
MM
5256}
5257\f
ec6376ab 5258/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5259 the number of bytes passed in registers. For args passed entirely in
5260 registers or entirely in memory, zero. When an arg is described by a
5261 PARALLEL, perhaps using more than one register type, this function
5262 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 5263
78a52f11
RH
5264static int
5265rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5266 tree type, bool named)
4697a36c 5267{
c53bdcf5 5268 int ret = 0;
ec6376ab 5269 int align_words;
c53bdcf5 5270
f607bc57 5271 if (DEFAULT_ABI == ABI_V4)
4697a36c 5272 return 0;
4697a36c 5273
c53bdcf5
AM
5274 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5275 && cum->nargs_prototype >= 0)
5276 return 0;
5277
0b5383eb
DJ
5278 /* In this complicated case we just disable the partial_nregs code. */
5279 if (rs6000_darwin64_abi && mode == BLKmode
5280 && TREE_CODE (type) == RECORD_TYPE
5281 && int_size_in_bytes (type) > 0)
5282 return 0;
5283
294bd182 5284 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab
AM
5285
5286 if (USE_FP_FOR_ARG_P (cum, mode, type)
fb63c729
AM
5287 /* If we are passing this arg in the fixed parameter save area
5288 (gprs or memory) as well as fprs, then this function should
5289 return the number of bytes passed in the parameter save area
bb8df8a6 5290 rather than bytes passed in fprs. */
ec6376ab
AM
5291 && !(type
5292 && (cum->nargs_prototype <= 0
5293 || (DEFAULT_ABI == ABI_AIX
de17c25f 5294 && TARGET_XL_COMPAT
ec6376ab 5295 && align_words >= GP_ARG_NUM_REG))))
4697a36c 5296 {
c53bdcf5 5297 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
ac7e839c 5298 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 5299 else if (cum->nargs_prototype >= 0)
4697a36c
MM
5300 return 0;
5301 }
5302
ec6376ab
AM
5303 if (align_words < GP_ARG_NUM_REG
5304 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 5305 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 5306
c53bdcf5 5307 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 5308 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 5309
c53bdcf5 5310 return ret;
4697a36c
MM
5311}
5312\f
5313/* A C expression that indicates when an argument must be passed by
5314 reference. If nonzero for an argument, a copy of that argument is
5315 made in memory and a pointer to the argument is passed instead of
5316 the argument itself. The pointer is passed in whatever way is
5317 appropriate for passing a pointer to that type.
5318
b2d04ecf
AM
5319 Under V.4, aggregates and long double are passed by reference.
5320
5321 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5322 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
5323
5324 As an extension to all ABIs, variable sized types are passed by
5325 reference. */
4697a36c 5326
8cd5a4e0 5327static bool
f676971a 5328rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
bada2eb8
DE
5329 enum machine_mode mode, tree type,
5330 bool named ATTRIBUTE_UNUSED)
4697a36c 5331{
bada2eb8 5332 if (DEFAULT_ABI == ABI_V4 && mode == TFmode)
4697a36c
MM
5333 {
5334 if (TARGET_DEBUG_ARG)
bada2eb8
DE
5335 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5336 return 1;
5337 }
5338
5339 if (!type)
5340 return 0;
4697a36c 5341
bada2eb8
DE
5342 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5343 {
5344 if (TARGET_DEBUG_ARG)
5345 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5346 return 1;
5347 }
5348
5349 if (int_size_in_bytes (type) < 0)
5350 {
5351 if (TARGET_DEBUG_ARG)
5352 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5353 return 1;
5354 }
5355
5356 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5357 modes only exist for GCC vector types if -maltivec. */
5358 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5359 {
5360 if (TARGET_DEBUG_ARG)
5361 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
5362 return 1;
5363 }
b693336b
PB
5364
5365 /* Pass synthetic vectors in memory. */
bada2eb8 5366 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5367 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5368 {
5369 static bool warned_for_pass_big_vectors = false;
5370 if (TARGET_DEBUG_ARG)
5371 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5372 if (!warned_for_pass_big_vectors)
5373 {
d4ee4d25 5374 warning (0, "GCC vector passed by reference: "
b693336b
PB
5375 "non-standard ABI extension with no compatibility guarantee");
5376 warned_for_pass_big_vectors = true;
5377 }
5378 return 1;
5379 }
5380
b2d04ecf 5381 return 0;
4697a36c 5382}
5985c7a6
FJ
5383
5384static void
2d9db8eb 5385rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
5386{
5387 int i;
5388 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5389
5390 if (nregs == 0)
5391 return;
5392
c4ad648e 5393 for (i = 0; i < nregs; i++)
5985c7a6 5394 {
9390387d 5395 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 5396 if (reload_completed)
c4ad648e
AM
5397 {
5398 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5399 tem = NULL_RTX;
5400 else
5401 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 5402 i * GET_MODE_SIZE (reg_mode));
c4ad648e 5403 }
5985c7a6
FJ
5404 else
5405 tem = replace_equiv_address (tem, XEXP (tem, 0));
5406
37409796 5407 gcc_assert (tem);
5985c7a6
FJ
5408
5409 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5410 }
5411}
4697a36c
MM
5412\f
5413/* Perform any needed actions needed for a function that is receiving a
f676971a 5414 variable number of arguments.
4697a36c
MM
5415
5416 CUM is as above.
5417
5418 MODE and TYPE are the mode and type of the current parameter.
5419
5420 PRETEND_SIZE is a variable that should be set to the amount of stack
5421 that must be pushed by the prolog to pretend that our caller pushed
5422 it.
5423
5424 Normally, this macro will push all remaining incoming registers on the
5425 stack and set PRETEND_SIZE to the length of the registers pushed. */
5426
c6e8c921 5427static void
f676971a 5428setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
5429 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5430 int no_rtl)
4697a36c 5431{
4cc833b7
RH
5432 CUMULATIVE_ARGS next_cum;
5433 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 5434 rtx save_area = NULL_RTX, mem;
dfafc897 5435 int first_reg_offset, set;
4697a36c 5436
f31bf321 5437 /* Skip the last named argument. */
d34c5b80 5438 next_cum = *cum;
594a51fe 5439 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 5440
f607bc57 5441 if (DEFAULT_ABI == ABI_V4)
d34c5b80 5442 {
5b667039
JJ
5443 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5444
60e2d0ca 5445 if (! no_rtl)
5b667039
JJ
5446 {
5447 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5448 HOST_WIDE_INT offset = 0;
5449
5450 /* Try to optimize the size of the varargs save area.
5451 The ABI requires that ap.reg_save_area is doubleword
5452 aligned, but we don't need to allocate space for all
5453 the bytes, only those to which we actually will save
5454 anything. */
5455 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5456 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5457 if (TARGET_HARD_FLOAT && TARGET_FPRS
5458 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5459 && cfun->va_list_fpr_size)
5460 {
5461 if (gpr_reg_num)
5462 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5463 * UNITS_PER_FP_WORD;
5464 if (cfun->va_list_fpr_size
5465 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5466 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5467 else
5468 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5469 * UNITS_PER_FP_WORD;
5470 }
5471 if (gpr_reg_num)
5472 {
5473 offset = -((first_reg_offset * reg_size) & ~7);
5474 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5475 {
5476 gpr_reg_num = cfun->va_list_gpr_size;
5477 if (reg_size == 4 && (first_reg_offset & 1))
5478 gpr_reg_num++;
5479 }
5480 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5481 }
5482 else if (fpr_size)
5483 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5484 * UNITS_PER_FP_WORD
5485 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 5486
5b667039
JJ
5487 if (gpr_size + fpr_size)
5488 {
5489 rtx reg_save_area
5490 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5491 gcc_assert (GET_CODE (reg_save_area) == MEM);
5492 reg_save_area = XEXP (reg_save_area, 0);
5493 if (GET_CODE (reg_save_area) == PLUS)
5494 {
5495 gcc_assert (XEXP (reg_save_area, 0)
5496 == virtual_stack_vars_rtx);
5497 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5498 offset += INTVAL (XEXP (reg_save_area, 1));
5499 }
5500 else
5501 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5502 }
5503
5504 cfun->machine->varargs_save_offset = offset;
5505 save_area = plus_constant (virtual_stack_vars_rtx, offset);
5506 }
4697a36c 5507 }
60e2d0ca 5508 else
4697a36c 5509 {
d34c5b80 5510 first_reg_offset = next_cum.words;
4cc833b7 5511 save_area = virtual_incoming_args_rtx;
4697a36c 5512
fe984136 5513 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 5514 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 5515 }
4697a36c 5516
dfafc897 5517 set = get_varargs_alias_set ();
9d30f3c1
JJ
5518 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
5519 && cfun->va_list_gpr_size)
4cc833b7 5520 {
9d30f3c1
JJ
5521 int nregs = GP_ARG_NUM_REG - first_reg_offset;
5522
5523 if (va_list_gpr_counter_field)
5524 {
5525 /* V4 va_list_gpr_size counts number of registers needed. */
5526 if (nregs > cfun->va_list_gpr_size)
5527 nregs = cfun->va_list_gpr_size;
5528 }
5529 else
5530 {
5531 /* char * va_list instead counts number of bytes needed. */
5532 if (nregs > cfun->va_list_gpr_size / reg_size)
5533 nregs = cfun->va_list_gpr_size / reg_size;
5534 }
5535
dfafc897 5536 mem = gen_rtx_MEM (BLKmode,
c4ad648e 5537 plus_constant (save_area,
13e2e16e
DE
5538 first_reg_offset * reg_size));
5539 MEM_NOTRAP_P (mem) = 1;
ba4828e0 5540 set_mem_alias_set (mem, set);
8ac61af7 5541 set_mem_align (mem, BITS_PER_WORD);
dfafc897 5542
f676971a 5543 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 5544 nregs);
4697a36c
MM
5545 }
5546
4697a36c 5547 /* Save FP registers if needed. */
f607bc57 5548 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
5549 && TARGET_HARD_FLOAT && TARGET_FPRS
5550 && ! no_rtl
9d30f3c1
JJ
5551 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5552 && cfun->va_list_fpr_size)
4697a36c 5553 {
9d30f3c1 5554 int fregno = next_cum.fregno, nregs;
9ebbca7d 5555 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 5556 rtx lab = gen_label_rtx ();
5b667039
JJ
5557 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
5558 * UNITS_PER_FP_WORD);
4697a36c 5559
c4ad648e
AM
5560 emit_jump_insn
5561 (gen_rtx_SET (VOIDmode,
5562 pc_rtx,
5563 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 5564 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 5565 const0_rtx),
39403d82 5566 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
5567 pc_rtx)));
5568
9d30f3c1
JJ
5569 for (nregs = 0;
5570 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 5571 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 5572 {
5496b36f 5573 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 5574 MEM_NOTRAP_P (mem) = 1;
c4ad648e 5575 set_mem_alias_set (mem, set);
94ff898d 5576 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 5577 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 5578 }
4cc833b7
RH
5579
5580 emit_label (lab);
4697a36c 5581 }
4697a36c 5582}
4697a36c 5583
dfafc897 5584/* Create the va_list data type. */
2c4974b7 5585
c35d187f
RH
5586static tree
5587rs6000_build_builtin_va_list (void)
dfafc897 5588{
64c2816f 5589 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 5590
9ebbca7d
GK
5591 /* For AIX, prefer 'char *' because that's what the system
5592 header files like. */
f607bc57 5593 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 5594 return build_pointer_type (char_type_node);
dfafc897 5595
f1e639b1 5596 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 5597 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 5598
f676971a 5599 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 5600 unsigned_char_type_node);
f676971a 5601 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 5602 unsigned_char_type_node);
64c2816f
DT
5603 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5604 every user file. */
5605 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5606 short_unsigned_type_node);
dfafc897
FS
5607 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5608 ptr_type_node);
5609 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5610 ptr_type_node);
5611
9d30f3c1
JJ
5612 va_list_gpr_counter_field = f_gpr;
5613 va_list_fpr_counter_field = f_fpr;
5614
dfafc897
FS
5615 DECL_FIELD_CONTEXT (f_gpr) = record;
5616 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 5617 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
5618 DECL_FIELD_CONTEXT (f_ovf) = record;
5619 DECL_FIELD_CONTEXT (f_sav) = record;
5620
bab45a51
FS
5621 TREE_CHAIN (record) = type_decl;
5622 TYPE_NAME (record) = type_decl;
dfafc897
FS
5623 TYPE_FIELDS (record) = f_gpr;
5624 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
5625 TREE_CHAIN (f_fpr) = f_res;
5626 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
5627 TREE_CHAIN (f_ovf) = f_sav;
5628
5629 layout_type (record);
5630
5631 /* The correct type is an array type of one element. */
5632 return build_array_type (record, build_index_type (size_zero_node));
5633}
5634
5635/* Implement va_start. */
5636
5637void
a2369ed3 5638rs6000_va_start (tree valist, rtx nextarg)
4697a36c 5639{
dfafc897 5640 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 5641 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 5642 tree gpr, fpr, ovf, sav, t;
2c4974b7 5643
dfafc897 5644 /* Only SVR4 needs something special. */
f607bc57 5645 if (DEFAULT_ABI != ABI_V4)
dfafc897 5646 {
e5faf155 5647 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
5648 return;
5649 }
5650
973a648b 5651 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 5652 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
5653 f_res = TREE_CHAIN (f_fpr);
5654 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
5655 f_sav = TREE_CHAIN (f_ovf);
5656
872a65b5 5657 valist = build_va_arg_indirect_ref (valist);
44de5aeb
RK
5658 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5659 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5660 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5661 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
5662
5663 /* Count number of gp and fp argument registers used. */
4cc833b7 5664 words = current_function_args_info.words;
987732e0
DE
5665 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
5666 GP_ARG_NUM_REG);
5667 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
5668 FP_ARG_NUM_REG);
dfafc897
FS
5669
5670 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
5671 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5672 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5673 words, n_gpr, n_fpr);
dfafc897 5674
9d30f3c1
JJ
5675 if (cfun->va_list_gpr_size)
5676 {
5677 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5678 build_int_cst (NULL_TREE, n_gpr));
5679 TREE_SIDE_EFFECTS (t) = 1;
5680 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5681 }
58c8adc1 5682
9d30f3c1
JJ
5683 if (cfun->va_list_fpr_size)
5684 {
5685 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5686 build_int_cst (NULL_TREE, n_fpr));
5687 TREE_SIDE_EFFECTS (t) = 1;
5688 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5689 }
dfafc897
FS
5690
5691 /* Find the overflow area. */
5692 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5693 if (words != 0)
5694 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
7d60be94 5695 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
dfafc897
FS
5696 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5697 TREE_SIDE_EFFECTS (t) = 1;
5698 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5699
9d30f3c1
JJ
5700 /* If there were no va_arg invocations, don't set up the register
5701 save area. */
5702 if (!cfun->va_list_gpr_size
5703 && !cfun->va_list_fpr_size
5704 && n_gpr < GP_ARG_NUM_REG
5705 && n_fpr < FP_ARG_V4_MAX_REG)
5706 return;
5707
dfafc897
FS
5708 /* Find the register save area. */
5709 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039
JJ
5710 if (cfun->machine->varargs_save_offset)
5711 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5712 build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
dfafc897
FS
5713 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5714 TREE_SIDE_EFFECTS (t) = 1;
5715 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5716}
5717
5718/* Implement va_arg. */
5719
23a60a04
JM
5720tree
5721rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 5722{
cd3ce9b4
JM
5723 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5724 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 5725 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
5726 tree lab_false, lab_over, addr;
5727 int align;
5728 tree ptrtype = build_pointer_type (type);
5729
08b0dc1b
RH
5730 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5731 {
5732 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 5733 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
5734 }
5735
cd3ce9b4
JM
5736 if (DEFAULT_ABI != ABI_V4)
5737 {
08b0dc1b 5738 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
5739 {
5740 tree elem_type = TREE_TYPE (type);
5741 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5742 int elem_size = GET_MODE_SIZE (elem_mode);
5743
5744 if (elem_size < UNITS_PER_WORD)
5745 {
23a60a04 5746 tree real_part, imag_part;
cd3ce9b4
JM
5747 tree post = NULL_TREE;
5748
23a60a04
JM
5749 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5750 &post);
5751 /* Copy the value into a temporary, lest the formal temporary
5752 be reused out from under us. */
5753 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
5754 append_to_statement_list (post, pre_p);
5755
23a60a04
JM
5756 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5757 post_p);
cd3ce9b4 5758
23a60a04 5759 return build (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
5760 }
5761 }
5762
23a60a04 5763 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
5764 }
5765
5766 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5767 f_fpr = TREE_CHAIN (f_gpr);
5768 f_res = TREE_CHAIN (f_fpr);
5769 f_ovf = TREE_CHAIN (f_res);
5770 f_sav = TREE_CHAIN (f_ovf);
5771
872a65b5 5772 valist = build_va_arg_indirect_ref (valist);
44de5aeb
RK
5773 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5774 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5775 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5776 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
5777
5778 size = int_size_in_bytes (type);
5779 rsize = (size + 3) / 4;
5780 align = 1;
5781
08b0dc1b
RH
5782 if (TARGET_HARD_FLOAT && TARGET_FPRS
5783 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
cd3ce9b4
JM
5784 {
5785 /* FP args go in FP registers, if present. */
cd3ce9b4
JM
5786 reg = fpr;
5787 n_reg = 1;
5788 sav_ofs = 8*4;
5789 sav_scale = 8;
5790 if (TYPE_MODE (type) == DFmode)
5791 align = 8;
5792 }
5793 else
5794 {
5795 /* Otherwise into GP registers. */
cd3ce9b4
JM
5796 reg = gpr;
5797 n_reg = rsize;
5798 sav_ofs = 0;
5799 sav_scale = 4;
5800 if (n_reg == 2)
5801 align = 8;
5802 }
5803
5804 /* Pull the value out of the saved registers.... */
5805
5806 lab_over = NULL;
5807 addr = create_tmp_var (ptr_type_node, "addr");
5808 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5809
5810 /* AltiVec vectors never go in registers when -mabi=altivec. */
5811 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5812 align = 16;
5813 else
5814 {
5815 lab_false = create_artificial_label ();
5816 lab_over = create_artificial_label ();
5817
5818 /* Long long and SPE vectors are aligned in the registers.
5819 As are any other 2 gpr item such as complex int due to a
5820 historical mistake. */
5821 u = reg;
5822 if (n_reg == 2)
5823 {
5824 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
95674810 5825 size_int (n_reg - 1));
cd3ce9b4
JM
5826 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5827 }
5828
95674810 5829 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
5830 t = build2 (GE_EXPR, boolean_type_node, u, t);
5831 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5832 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5833 gimplify_and_add (t, pre_p);
5834
5835 t = sav;
5836 if (sav_ofs)
95674810 5837 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 5838
95674810 5839 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
cd3ce9b4 5840 u = build1 (CONVERT_EXPR, integer_type_node, u);
95674810 5841 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
cd3ce9b4
JM
5842 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5843
5844 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5845 gimplify_and_add (t, pre_p);
5846
5847 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5848 gimplify_and_add (t, pre_p);
5849
5850 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5851 append_to_statement_list (t, pre_p);
5852
5853 if (n_reg > 2)
5854 {
5855 /* Ensure that we don't find any more args in regs.
5856 Alignment has taken care of the n_reg == 2 case. */
95674810 5857 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
5858 gimplify_and_add (t, pre_p);
5859 }
5860 }
5861
5862 /* ... otherwise out of the overflow area. */
5863
5864 /* Care for on-stack alignment if needed. */
5865 t = ovf;
5866 if (align != 1)
5867 {
95674810 5868 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
4a90aeeb 5869 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
7d60be94 5870 build_int_cst (NULL_TREE, -align));
cd3ce9b4
JM
5871 }
5872 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5873
5874 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5875 gimplify_and_add (u, pre_p);
5876
95674810 5877 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
cd3ce9b4
JM
5878 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5879 gimplify_and_add (t, pre_p);
5880
5881 if (lab_over)
5882 {
5883 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5884 append_to_statement_list (t, pre_p);
5885 }
5886
08b0dc1b 5887 addr = fold_convert (ptrtype, addr);
872a65b5 5888 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
5889}
5890
0ac081f6
AH
5891/* Builtins. */
5892
58646b77
PB
5893static void
5894def_builtin (int mask, const char *name, tree type, int code)
5895{
5896 if (mask & target_flags)
5897 {
5898 if (rs6000_builtin_decls[code])
5899 abort ();
5900
5901 rs6000_builtin_decls[code] =
5902 lang_hooks.builtin_function (name, type, code, BUILT_IN_MD,
5903 NULL, NULL_TREE);
5904 }
5905}
0ac081f6 5906
24408032
AH
5907/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5908
2212663f 5909static const struct builtin_description bdesc_3arg[] =
24408032
AH
5910{
5911 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5912 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5913 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5914 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5915 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5916 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5917 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5918 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5919 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5920 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 5921 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
5922 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5923 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5924 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5925 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5926 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5927 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5928 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5929 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5930 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5931 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5932 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5933 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
5934
5935 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
5936 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
5937 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
5938 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
5939 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
5940 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
5941 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
5942 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
5943 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
5944 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
5945 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
5946 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
5947 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
5948 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
5949 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
24408032 5950};
2212663f 5951
95385cbb
AH
5952/* DST operations: void foo (void *, const int, const char). */
5953
5954static const struct builtin_description bdesc_dst[] =
5955{
5956 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5957 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5958 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
5959 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
5960
5961 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
5962 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
5963 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
5964 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
5965};
5966
2212663f 5967/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 5968
a3170dc6 5969static struct builtin_description bdesc_2arg[] =
0ac081f6 5970{
f18c054f
DB
5971 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5972 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5973 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5974 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
5975 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5976 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5977 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5978 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5979 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5980 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5981 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 5982 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 5983 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
5984 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5985 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5986 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5987 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5988 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5989 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
5990 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5991 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
5992 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5993 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5994 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5995 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5996 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5997 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5998 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5999 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6000 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6001 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6002 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6003 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6004 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6005 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6006 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6007 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6008 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6009 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6010 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6011 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6012 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6013 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6014 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6015 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6016 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6017 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6018 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6019 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6020 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6021 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6022 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6023 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6024 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6025 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6026 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6027 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6028 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6029 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6030 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6031 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6032 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6033 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6034 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6035 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6036 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6037 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6038 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6039 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6040 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6041 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6042 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6043 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6044 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6045 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6046 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6047 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6048 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6049 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6050 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6051 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6052 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6053 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6054 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6055 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6056 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6057 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6058 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6059 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6060 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6061 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6062 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6063 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6064 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6065 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6066 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6067 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6068 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6069 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6070 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6071 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6072 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6073 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6074 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6075 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6076 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6077 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6078 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6079 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6080 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6081 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6082
58646b77
PB
6083 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6084 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6085 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6086 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6087 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6088 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6089 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6090 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6091 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6092 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6093 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6094 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6095 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6096 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6097 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6098 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6099 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6100 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6101 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6102 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6107 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6108 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6109 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6110 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6111 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6112 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6113 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6114 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6149 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6150 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6155 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6156 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6157 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6158 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6159 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6160 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6161 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6162 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6163 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6164 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6165 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6166 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6167 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6168 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6169 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6170 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6171 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6172 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6173 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6174 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6175 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6176 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6177 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6178 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6179 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6180 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6181 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6182 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6183 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6184 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6185 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6186 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6187 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6188 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6189 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6190 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6191 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6192 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6193 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6194 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6195 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6196 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6197 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6198 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6199 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6200 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6201 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6202 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6203 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6204 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6205 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6206 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6207 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6208 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6209 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6210
a3170dc6
AH
6211 /* Place holder, leave as first spe builtin. */
6212 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6213 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6214 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6215 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6216 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6217 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6218 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6219 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6220 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6221 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6222 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6223 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6224 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6225 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6226 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6227 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6228 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6229 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6230 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6231 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6232 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6233 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6234 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6235 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6236 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6237 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6238 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6239 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6240 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6241 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6242 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6243 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6244 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6245 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6246 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6247 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6248 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6249 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6250 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6251 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6252 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6253 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6254 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6255 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6256 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6257 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6258 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6259 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6260 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6261 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6262 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6263 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6264 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6265 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6266 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6267 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6268 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6269 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6270 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6271 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6272 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6273 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6274 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6275 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6276 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6277 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6278 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6279 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6280 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6281 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6282 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6283 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6284 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6285 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
6286 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6287 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
6288 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6289 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6290 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6291 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6292 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6293 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6294 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6295 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6296 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6297 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6298 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6299 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6300 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6301 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6302 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6303 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6304 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6305 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6306 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6307 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6308 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6309 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6310 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6311 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6312 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6313 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6314 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6315 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6316 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6317 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6318 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6319 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6320 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6321
6322 /* SPE binary operations expecting a 5-bit unsigned literal. */
6323 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6324
6325 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6326 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6327 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6328 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6329 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6330 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6331 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6332 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6333 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6334 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6335 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6336 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6337 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6338 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6339 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6340 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6341 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6342 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6343 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6344 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6345 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6346 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6347 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6348 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6349 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6350 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6351
6352 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 6353 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
6354};
6355
6356/* AltiVec predicates. */
6357
6358struct builtin_description_predicates
6359{
6360 const unsigned int mask;
6361 const enum insn_code icode;
6362 const char *opcode;
6363 const char *const name;
6364 const enum rs6000_builtins code;
6365};
6366
6367static const struct builtin_description_predicates bdesc_altivec_preds[] =
6368{
6369 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6370 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6371 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6372 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6373 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6374 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6375 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6376 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6377 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6378 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6379 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6380 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
6381 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6382
6383 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6384 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6385 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 6386};
24408032 6387
a3170dc6
AH
6388/* SPE predicates. */
6389static struct builtin_description bdesc_spe_predicates[] =
6390{
6391 /* Place-holder. Leave as first. */
6392 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6393 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6394 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6395 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6396 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6397 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6398 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6399 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6400 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6401 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6402 /* Place-holder. Leave as last. */
6403 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6404};
6405
6406/* SPE evsel predicates. */
6407static struct builtin_description bdesc_spe_evsel[] =
6408{
6409 /* Place-holder. Leave as first. */
6410 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6411 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6412 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6413 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6414 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6415 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6416 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6417 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6418 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6419 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6420 /* Place-holder. Leave as last. */
6421 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6422};
6423
b6d08ca1 6424/* ABS* operations. */
100c4561
AH
6425
6426static const struct builtin_description bdesc_abs[] =
6427{
6428 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6429 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6430 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6431 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6432 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6433 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6434 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6435};
6436
617e0e1d
DB
6437/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6438 foo (VECa). */
24408032 6439
a3170dc6 6440static struct builtin_description bdesc_1arg[] =
2212663f 6441{
617e0e1d
DB
6442 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6443 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6444 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6445 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6446 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6447 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6448 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6449 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
6450 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6451 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6452 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
6453 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6454 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6455 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6456 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6457 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6458 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 6459
58646b77
PB
6460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
6473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
6474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
6475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
6476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
6477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
6478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
6479
a3170dc6
AH
6480 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6481 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6482 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6483 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6484 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6485 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6486 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6487 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6488 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6489 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6490 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6491 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6492 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6493 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6494 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6495 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6496 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6497 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6498 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6499 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6500 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6501 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6502 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6503 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6504 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 6505 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
6506 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6507 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6508 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6509 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
6510
6511 /* Place-holder. Leave as last unary SPE builtin. */
58646b77 6512 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
2212663f
DB
6513};
6514
6515static rtx
a2369ed3 6516rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
2212663f
DB
6517{
6518 rtx pat;
6519 tree arg0 = TREE_VALUE (arglist);
6520 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6521 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6522 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6523
0559cc77
DE
6524 if (icode == CODE_FOR_nothing)
6525 /* Builtin not supported on this processor. */
6526 return 0;
6527
20e26713
AH
6528 /* If we got invalid arguments bail out before generating bad rtl. */
6529 if (arg0 == error_mark_node)
9a171fcd 6530 return const0_rtx;
20e26713 6531
0559cc77
DE
6532 if (icode == CODE_FOR_altivec_vspltisb
6533 || icode == CODE_FOR_altivec_vspltish
6534 || icode == CODE_FOR_altivec_vspltisw
6535 || icode == CODE_FOR_spe_evsplatfi
6536 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
6537 {
6538 /* Only allow 5-bit *signed* literals. */
b44140e7 6539 if (GET_CODE (op0) != CONST_INT
afca671b
DP
6540 || INTVAL (op0) > 15
6541 || INTVAL (op0) < -16)
b44140e7
AH
6542 {
6543 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 6544 return const0_rtx;
b44140e7 6545 }
b44140e7
AH
6546 }
6547
c62f2db5 6548 if (target == 0
2212663f
DB
6549 || GET_MODE (target) != tmode
6550 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6551 target = gen_reg_rtx (tmode);
6552
6553 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6554 op0 = copy_to_mode_reg (mode0, op0);
6555
6556 pat = GEN_FCN (icode) (target, op0);
6557 if (! pat)
6558 return 0;
6559 emit_insn (pat);
0ac081f6 6560
2212663f
DB
6561 return target;
6562}
ae4b4a02 6563
100c4561 6564static rtx
a2369ed3 6565altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
100c4561
AH
6566{
6567 rtx pat, scratch1, scratch2;
6568 tree arg0 = TREE_VALUE (arglist);
6569 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6570 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6571 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6572
6573 /* If we have invalid arguments, bail out before generating bad rtl. */
6574 if (arg0 == error_mark_node)
9a171fcd 6575 return const0_rtx;
100c4561
AH
6576
6577 if (target == 0
6578 || GET_MODE (target) != tmode
6579 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6580 target = gen_reg_rtx (tmode);
6581
6582 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6583 op0 = copy_to_mode_reg (mode0, op0);
6584
6585 scratch1 = gen_reg_rtx (mode0);
6586 scratch2 = gen_reg_rtx (mode0);
6587
6588 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6589 if (! pat)
6590 return 0;
6591 emit_insn (pat);
6592
6593 return target;
6594}
6595
0ac081f6 6596static rtx
a2369ed3 6597rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
0ac081f6
AH
6598{
6599 rtx pat;
6600 tree arg0 = TREE_VALUE (arglist);
6601 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6602 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6603 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6604 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6605 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6606 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6607
0559cc77
DE
6608 if (icode == CODE_FOR_nothing)
6609 /* Builtin not supported on this processor. */
6610 return 0;
6611
20e26713
AH
6612 /* If we got invalid arguments bail out before generating bad rtl. */
6613 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 6614 return const0_rtx;
20e26713 6615
0559cc77
DE
6616 if (icode == CODE_FOR_altivec_vcfux
6617 || icode == CODE_FOR_altivec_vcfsx
6618 || icode == CODE_FOR_altivec_vctsxs
6619 || icode == CODE_FOR_altivec_vctuxs
6620 || icode == CODE_FOR_altivec_vspltb
6621 || icode == CODE_FOR_altivec_vsplth
6622 || icode == CODE_FOR_altivec_vspltw
6623 || icode == CODE_FOR_spe_evaddiw
6624 || icode == CODE_FOR_spe_evldd
6625 || icode == CODE_FOR_spe_evldh
6626 || icode == CODE_FOR_spe_evldw
6627 || icode == CODE_FOR_spe_evlhhesplat
6628 || icode == CODE_FOR_spe_evlhhossplat
6629 || icode == CODE_FOR_spe_evlhhousplat
6630 || icode == CODE_FOR_spe_evlwhe
6631 || icode == CODE_FOR_spe_evlwhos
6632 || icode == CODE_FOR_spe_evlwhou
6633 || icode == CODE_FOR_spe_evlwhsplat
6634 || icode == CODE_FOR_spe_evlwwsplat
6635 || icode == CODE_FOR_spe_evrlwi
6636 || icode == CODE_FOR_spe_evslwi
6637 || icode == CODE_FOR_spe_evsrwis
f5119d10 6638 || icode == CODE_FOR_spe_evsubifw
0559cc77 6639 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
6640 {
6641 /* Only allow 5-bit unsigned literals. */
8bb418a3 6642 STRIP_NOPS (arg1);
b44140e7
AH
6643 if (TREE_CODE (arg1) != INTEGER_CST
6644 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6645 {
6646 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 6647 return const0_rtx;
b44140e7 6648 }
b44140e7
AH
6649 }
6650
c62f2db5 6651 if (target == 0
0ac081f6
AH
6652 || GET_MODE (target) != tmode
6653 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6654 target = gen_reg_rtx (tmode);
6655
6656 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6657 op0 = copy_to_mode_reg (mode0, op0);
6658 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6659 op1 = copy_to_mode_reg (mode1, op1);
6660
6661 pat = GEN_FCN (icode) (target, op0, op1);
6662 if (! pat)
6663 return 0;
6664 emit_insn (pat);
6665
6666 return target;
6667}
6525c0e7 6668
ae4b4a02 6669static rtx
f676971a 6670altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
a2369ed3 6671 tree arglist, rtx target)
ae4b4a02
AH
6672{
6673 rtx pat, scratch;
6674 tree cr6_form = TREE_VALUE (arglist);
6675 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6676 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6677 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6678 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6679 enum machine_mode tmode = SImode;
6680 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6681 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6682 int cr6_form_int;
6683
6684 if (TREE_CODE (cr6_form) != INTEGER_CST)
6685 {
6686 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 6687 return const0_rtx;
ae4b4a02
AH
6688 }
6689 else
6690 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6691
37409796 6692 gcc_assert (mode0 == mode1);
ae4b4a02
AH
6693
6694 /* If we have invalid arguments, bail out before generating bad rtl. */
6695 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 6696 return const0_rtx;
ae4b4a02
AH
6697
6698 if (target == 0
6699 || GET_MODE (target) != tmode
6700 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6701 target = gen_reg_rtx (tmode);
6702
6703 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6704 op0 = copy_to_mode_reg (mode0, op0);
6705 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6706 op1 = copy_to_mode_reg (mode1, op1);
6707
6708 scratch = gen_reg_rtx (mode0);
6709
6710 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 6711 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
6712 if (! pat)
6713 return 0;
6714 emit_insn (pat);
6715
6716 /* The vec_any* and vec_all* predicates use the same opcodes for two
6717 different operations, but the bits in CR6 will be different
6718 depending on what information we want. So we have to play tricks
6719 with CR6 to get the right bits out.
6720
6721 If you think this is disgusting, look at the specs for the
6722 AltiVec predicates. */
6723
c4ad648e
AM
6724 switch (cr6_form_int)
6725 {
6726 case 0:
6727 emit_insn (gen_cr6_test_for_zero (target));
6728 break;
6729 case 1:
6730 emit_insn (gen_cr6_test_for_zero_reverse (target));
6731 break;
6732 case 2:
6733 emit_insn (gen_cr6_test_for_lt (target));
6734 break;
6735 case 3:
6736 emit_insn (gen_cr6_test_for_lt_reverse (target));
6737 break;
6738 default:
6739 error ("argument 1 of __builtin_altivec_predicate is out of range");
6740 break;
6741 }
ae4b4a02
AH
6742
6743 return target;
6744}
6745
b4a62fa0 6746static rtx
38f391a5 6747altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
b4a62fa0
SB
6748{
6749 rtx pat, addr;
6750 tree arg0 = TREE_VALUE (arglist);
6751 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6752 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6753 enum machine_mode mode0 = Pmode;
6754 enum machine_mode mode1 = Pmode;
6755 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6756 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6757
6758 if (icode == CODE_FOR_nothing)
6759 /* Builtin not supported on this processor. */
6760 return 0;
6761
6762 /* If we got invalid arguments bail out before generating bad rtl. */
6763 if (arg0 == error_mark_node || arg1 == error_mark_node)
6764 return const0_rtx;
6765
6766 if (target == 0
6767 || GET_MODE (target) != tmode
6768 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6769 target = gen_reg_rtx (tmode);
6770
f676971a 6771 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
6772
6773 if (op0 == const0_rtx)
6774 {
6775 addr = gen_rtx_MEM (tmode, op1);
6776 }
6777 else
6778 {
6779 op0 = copy_to_mode_reg (mode0, op0);
6780 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6781 }
6782
6783 pat = GEN_FCN (icode) (target, addr);
6784
6785 if (! pat)
6786 return 0;
6787 emit_insn (pat);
6788
6789 return target;
6790}
6791
61bea3b0
AH
6792static rtx
6793spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6794{
6795 tree arg0 = TREE_VALUE (arglist);
6796 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6797 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6798 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6799 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6800 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6801 rtx pat;
6802 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6803 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6804 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6805
6806 /* Invalid arguments. Bail before doing anything stoopid! */
6807 if (arg0 == error_mark_node
6808 || arg1 == error_mark_node
6809 || arg2 == error_mark_node)
6810 return const0_rtx;
6811
6812 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6813 op0 = copy_to_mode_reg (mode2, op0);
6814 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6815 op1 = copy_to_mode_reg (mode0, op1);
6816 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6817 op2 = copy_to_mode_reg (mode1, op2);
6818
6819 pat = GEN_FCN (icode) (op1, op2, op0);
6820 if (pat)
6821 emit_insn (pat);
6822 return NULL_RTX;
6823}
6824
6525c0e7 6825static rtx
a2369ed3 6826altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6525c0e7
AH
6827{
6828 tree arg0 = TREE_VALUE (arglist);
6829 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6830 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6831 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6832 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6833 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
b4a62fa0
SB
6834 rtx pat, addr;
6835 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6836 enum machine_mode mode1 = Pmode;
6837 enum machine_mode mode2 = Pmode;
6525c0e7
AH
6838
6839 /* Invalid arguments. Bail before doing anything stoopid! */
6840 if (arg0 == error_mark_node
6841 || arg1 == error_mark_node
6842 || arg2 == error_mark_node)
9a171fcd 6843 return const0_rtx;
6525c0e7 6844
b4a62fa0
SB
6845 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6846 op0 = copy_to_mode_reg (tmode, op0);
6847
f676971a 6848 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
6849
6850 if (op1 == const0_rtx)
6851 {
6852 addr = gen_rtx_MEM (tmode, op2);
6853 }
6854 else
6855 {
6856 op1 = copy_to_mode_reg (mode1, op1);
6857 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6858 }
6525c0e7 6859
b4a62fa0 6860 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
6861 if (pat)
6862 emit_insn (pat);
6863 return NULL_RTX;
6864}
6865
2212663f 6866static rtx
a2369ed3 6867rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
2212663f
DB
6868{
6869 rtx pat;
6870 tree arg0 = TREE_VALUE (arglist);
6871 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6872 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6873 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6874 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6875 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6876 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6877 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6878 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6879 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 6880
774b5662
DE
6881 if (icode == CODE_FOR_nothing)
6882 /* Builtin not supported on this processor. */
6883 return 0;
6884
20e26713
AH
6885 /* If we got invalid arguments bail out before generating bad rtl. */
6886 if (arg0 == error_mark_node
6887 || arg1 == error_mark_node
6888 || arg2 == error_mark_node)
9a171fcd 6889 return const0_rtx;
20e26713 6890
aba5fb01
NS
6891 if (icode == CODE_FOR_altivec_vsldoi_v4sf
6892 || icode == CODE_FOR_altivec_vsldoi_v4si
6893 || icode == CODE_FOR_altivec_vsldoi_v8hi
6894 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
6895 {
6896 /* Only allow 4-bit unsigned literals. */
8bb418a3 6897 STRIP_NOPS (arg2);
b44140e7
AH
6898 if (TREE_CODE (arg2) != INTEGER_CST
6899 || TREE_INT_CST_LOW (arg2) & ~0xf)
6900 {
6901 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 6902 return const0_rtx;
b44140e7 6903 }
b44140e7
AH
6904 }
6905
c62f2db5 6906 if (target == 0
2212663f
DB
6907 || GET_MODE (target) != tmode
6908 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6909 target = gen_reg_rtx (tmode);
6910
6911 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6912 op0 = copy_to_mode_reg (mode0, op0);
6913 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6914 op1 = copy_to_mode_reg (mode1, op1);
6915 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6916 op2 = copy_to_mode_reg (mode2, op2);
6917
6918 pat = GEN_FCN (icode) (target, op0, op1, op2);
6919 if (! pat)
6920 return 0;
6921 emit_insn (pat);
6922
6923 return target;
6924}
92898235 6925
3a9b8c7e 6926/* Expand the lvx builtins. */
0ac081f6 6927static rtx
a2369ed3 6928altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 6929{
0ac081f6
AH
6930 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6931 tree arglist = TREE_OPERAND (exp, 1);
0ac081f6 6932 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
6933 tree arg0;
6934 enum machine_mode tmode, mode0;
7c3abc73 6935 rtx pat, op0;
3a9b8c7e 6936 enum insn_code icode;
92898235 6937
0ac081f6
AH
6938 switch (fcode)
6939 {
f18c054f 6940 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 6941 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 6942 break;
f18c054f 6943 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 6944 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
6945 break;
6946 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 6947 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
6948 break;
6949 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 6950 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
6951 break;
6952 default:
6953 *expandedp = false;
6954 return NULL_RTX;
6955 }
0ac081f6 6956
3a9b8c7e 6957 *expandedp = true;
f18c054f 6958
3a9b8c7e
AH
6959 arg0 = TREE_VALUE (arglist);
6960 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6961 tmode = insn_data[icode].operand[0].mode;
6962 mode0 = insn_data[icode].operand[1].mode;
f18c054f 6963
3a9b8c7e
AH
6964 if (target == 0
6965 || GET_MODE (target) != tmode
6966 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6967 target = gen_reg_rtx (tmode);
24408032 6968
3a9b8c7e
AH
6969 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6970 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 6971
3a9b8c7e
AH
6972 pat = GEN_FCN (icode) (target, op0);
6973 if (! pat)
6974 return 0;
6975 emit_insn (pat);
6976 return target;
6977}
f18c054f 6978
3a9b8c7e
AH
6979/* Expand the stvx builtins. */
6980static rtx
f676971a 6981altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 6982 bool *expandedp)
3a9b8c7e
AH
6983{
6984 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6985 tree arglist = TREE_OPERAND (exp, 1);
6986 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6987 tree arg0, arg1;
6988 enum machine_mode mode0, mode1;
7c3abc73 6989 rtx pat, op0, op1;
3a9b8c7e 6990 enum insn_code icode;
f18c054f 6991
3a9b8c7e
AH
6992 switch (fcode)
6993 {
6994 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 6995 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
6996 break;
6997 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 6998 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
6999 break;
7000 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 7001 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
7002 break;
7003 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 7004 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
7005 break;
7006 default:
7007 *expandedp = false;
7008 return NULL_RTX;
7009 }
24408032 7010
3a9b8c7e
AH
7011 arg0 = TREE_VALUE (arglist);
7012 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7013 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7014 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7015 mode0 = insn_data[icode].operand[0].mode;
7016 mode1 = insn_data[icode].operand[1].mode;
f18c054f 7017
3a9b8c7e
AH
7018 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7019 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7020 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7021 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 7022
3a9b8c7e
AH
7023 pat = GEN_FCN (icode) (op0, op1);
7024 if (pat)
7025 emit_insn (pat);
f18c054f 7026
3a9b8c7e
AH
7027 *expandedp = true;
7028 return NULL_RTX;
7029}
f18c054f 7030
3a9b8c7e
AH
7031/* Expand the dst builtins. */
7032static rtx
f676971a 7033altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7034 bool *expandedp)
3a9b8c7e
AH
7035{
7036 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7037 tree arglist = TREE_OPERAND (exp, 1);
7038 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7039 tree arg0, arg1, arg2;
7040 enum machine_mode mode0, mode1, mode2;
7c3abc73 7041 rtx pat, op0, op1, op2;
3a9b8c7e 7042 struct builtin_description *d;
a3170dc6 7043 size_t i;
f18c054f 7044
3a9b8c7e 7045 *expandedp = false;
f18c054f 7046
3a9b8c7e
AH
7047 /* Handle DST variants. */
7048 d = (struct builtin_description *) bdesc_dst;
7049 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7050 if (d->code == fcode)
7051 {
7052 arg0 = TREE_VALUE (arglist);
7053 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7054 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7055 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7056 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7057 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7058 mode0 = insn_data[d->icode].operand[0].mode;
7059 mode1 = insn_data[d->icode].operand[1].mode;
7060 mode2 = insn_data[d->icode].operand[2].mode;
24408032 7061
3a9b8c7e
AH
7062 /* Invalid arguments, bail out before generating bad rtl. */
7063 if (arg0 == error_mark_node
7064 || arg1 == error_mark_node
7065 || arg2 == error_mark_node)
7066 return const0_rtx;
f18c054f 7067
86e7df90 7068 *expandedp = true;
8bb418a3 7069 STRIP_NOPS (arg2);
3a9b8c7e
AH
7070 if (TREE_CODE (arg2) != INTEGER_CST
7071 || TREE_INT_CST_LOW (arg2) & ~0x3)
7072 {
9e637a26 7073 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7074 return const0_rtx;
7075 }
f18c054f 7076
3a9b8c7e 7077 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7078 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7079 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7080 op1 = copy_to_mode_reg (mode1, op1);
24408032 7081
3a9b8c7e
AH
7082 pat = GEN_FCN (d->icode) (op0, op1, op2);
7083 if (pat != 0)
7084 emit_insn (pat);
f18c054f 7085
3a9b8c7e
AH
7086 return NULL_RTX;
7087 }
f18c054f 7088
3a9b8c7e
AH
7089 return NULL_RTX;
7090}
24408032 7091
7a4eca66
DE
7092/* Expand vec_init builtin. */
7093static rtx
7094altivec_expand_vec_init_builtin (tree type, tree arglist, rtx target)
7095{
7096 enum machine_mode tmode = TYPE_MODE (type);
7097 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7098 int i, n_elt = GET_MODE_NUNITS (tmode);
7099 rtvec v = rtvec_alloc (n_elt);
7100
7101 gcc_assert (VECTOR_MODE_P (tmode));
7102
7103 for (i = 0; i < n_elt; ++i, arglist = TREE_CHAIN (arglist))
7104 {
7105 rtx x = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7106 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7107 }
7108
7109 gcc_assert (arglist == NULL);
7110
7111 if (!target || !register_operand (target, tmode))
7112 target = gen_reg_rtx (tmode);
7113
7114 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7115 return target;
7116}
7117
7118/* Return the integer constant in ARG. Constrain it to be in the range
7119 of the subparts of VEC_TYPE; issue an error if not. */
7120
7121static int
7122get_element_number (tree vec_type, tree arg)
7123{
7124 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7125
7126 if (!host_integerp (arg, 1)
7127 || (elt = tree_low_cst (arg, 1), elt > max))
7128 {
7129 error ("selector must be an integer constant in the range 0..%wi", max);
7130 return 0;
7131 }
7132
7133 return elt;
7134}
7135
7136/* Expand vec_set builtin. */
7137static rtx
7138altivec_expand_vec_set_builtin (tree arglist)
7139{
7140 enum machine_mode tmode, mode1;
7141 tree arg0, arg1, arg2;
7142 int elt;
7143 rtx op0, op1;
7144
7145 arg0 = TREE_VALUE (arglist);
7146 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7147 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7148
7149 tmode = TYPE_MODE (TREE_TYPE (arg0));
7150 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7151 gcc_assert (VECTOR_MODE_P (tmode));
7152
7153 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7154 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7155 elt = get_element_number (TREE_TYPE (arg0), arg2);
7156
7157 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7158 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7159
7160 op0 = force_reg (tmode, op0);
7161 op1 = force_reg (mode1, op1);
7162
7163 rs6000_expand_vector_set (op0, op1, elt);
7164
7165 return op0;
7166}
7167
7168/* Expand vec_ext builtin. */
7169static rtx
7170altivec_expand_vec_ext_builtin (tree arglist, rtx target)
7171{
7172 enum machine_mode tmode, mode0;
7173 tree arg0, arg1;
7174 int elt;
7175 rtx op0;
7176
7177 arg0 = TREE_VALUE (arglist);
7178 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7179
7180 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7181 elt = get_element_number (TREE_TYPE (arg0), arg1);
7182
7183 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7184 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7185 gcc_assert (VECTOR_MODE_P (mode0));
7186
7187 op0 = force_reg (mode0, op0);
7188
7189 if (optimize || !target || !register_operand (target, tmode))
7190 target = gen_reg_rtx (tmode);
7191
7192 rs6000_expand_vector_extract (target, op0, elt);
7193
7194 return target;
7195}
7196
3a9b8c7e
AH
7197/* Expand the builtin in EXP and store the result in TARGET. Store
7198 true in *EXPANDEDP if we found a builtin to expand. */
7199static rtx
a2369ed3 7200altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e
AH
7201{
7202 struct builtin_description *d;
7203 struct builtin_description_predicates *dp;
7204 size_t i;
7205 enum insn_code icode;
7206 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7207 tree arglist = TREE_OPERAND (exp, 1);
7c3abc73
AH
7208 tree arg0;
7209 rtx op0, pat;
7210 enum machine_mode tmode, mode0;
3a9b8c7e 7211 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 7212
58646b77
PB
7213 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7214 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7215 {
7216 *expandedp = true;
ea40ba9c 7217 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
7218 return const0_rtx;
7219 }
7220
3a9b8c7e
AH
7221 target = altivec_expand_ld_builtin (exp, target, expandedp);
7222 if (*expandedp)
7223 return target;
0ac081f6 7224
3a9b8c7e
AH
7225 target = altivec_expand_st_builtin (exp, target, expandedp);
7226 if (*expandedp)
7227 return target;
7228
7229 target = altivec_expand_dst_builtin (exp, target, expandedp);
7230 if (*expandedp)
7231 return target;
7232
7233 *expandedp = true;
95385cbb 7234
3a9b8c7e
AH
7235 switch (fcode)
7236 {
6525c0e7
AH
7237 case ALTIVEC_BUILTIN_STVX:
7238 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
7239 case ALTIVEC_BUILTIN_STVEBX:
7240 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
7241 case ALTIVEC_BUILTIN_STVEHX:
7242 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
7243 case ALTIVEC_BUILTIN_STVEWX:
7244 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
7245 case ALTIVEC_BUILTIN_STVXL:
7246 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3a9b8c7e 7247
95385cbb
AH
7248 case ALTIVEC_BUILTIN_MFVSCR:
7249 icode = CODE_FOR_altivec_mfvscr;
7250 tmode = insn_data[icode].operand[0].mode;
7251
7252 if (target == 0
7253 || GET_MODE (target) != tmode
7254 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7255 target = gen_reg_rtx (tmode);
f676971a 7256
95385cbb 7257 pat = GEN_FCN (icode) (target);
0ac081f6
AH
7258 if (! pat)
7259 return 0;
7260 emit_insn (pat);
95385cbb
AH
7261 return target;
7262
7263 case ALTIVEC_BUILTIN_MTVSCR:
7264 icode = CODE_FOR_altivec_mtvscr;
7265 arg0 = TREE_VALUE (arglist);
7266 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7267 mode0 = insn_data[icode].operand[0].mode;
7268
7269 /* If we got invalid arguments bail out before generating bad rtl. */
7270 if (arg0 == error_mark_node)
9a171fcd 7271 return const0_rtx;
95385cbb
AH
7272
7273 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7274 op0 = copy_to_mode_reg (mode0, op0);
7275
7276 pat = GEN_FCN (icode) (op0);
7277 if (pat)
7278 emit_insn (pat);
7279 return NULL_RTX;
3a9b8c7e 7280
95385cbb
AH
7281 case ALTIVEC_BUILTIN_DSSALL:
7282 emit_insn (gen_altivec_dssall ());
7283 return NULL_RTX;
7284
7285 case ALTIVEC_BUILTIN_DSS:
7286 icode = CODE_FOR_altivec_dss;
7287 arg0 = TREE_VALUE (arglist);
8bb418a3 7288 STRIP_NOPS (arg0);
95385cbb
AH
7289 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7290 mode0 = insn_data[icode].operand[0].mode;
7291
7292 /* If we got invalid arguments bail out before generating bad rtl. */
7293 if (arg0 == error_mark_node)
9a171fcd 7294 return const0_rtx;
95385cbb 7295
b44140e7
AH
7296 if (TREE_CODE (arg0) != INTEGER_CST
7297 || TREE_INT_CST_LOW (arg0) & ~0x3)
7298 {
7299 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 7300 return const0_rtx;
b44140e7
AH
7301 }
7302
95385cbb
AH
7303 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7304 op0 = copy_to_mode_reg (mode0, op0);
7305
7306 emit_insn (gen_altivec_dss (op0));
0ac081f6 7307 return NULL_RTX;
7a4eca66
DE
7308
7309 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7310 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7311 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7312 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
7313 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), arglist, target);
7314
7315 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7316 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7317 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7318 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
7319 return altivec_expand_vec_set_builtin (arglist);
7320
7321 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7322 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7323 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7324 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
7325 return altivec_expand_vec_ext_builtin (arglist, target);
7326
7327 default:
7328 break;
7329 /* Fall through. */
0ac081f6 7330 }
24408032 7331
100c4561
AH
7332 /* Expand abs* operations. */
7333 d = (struct builtin_description *) bdesc_abs;
ca7558fc 7334 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561
AH
7335 if (d->code == fcode)
7336 return altivec_expand_abs_builtin (d->icode, arglist, target);
7337
ae4b4a02
AH
7338 /* Expand the AltiVec predicates. */
7339 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
ca7558fc 7340 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 7341 if (dp->code == fcode)
c4ad648e
AM
7342 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
7343 arglist, target);
ae4b4a02 7344
6525c0e7
AH
7345 /* LV* are funky. We initialized them differently. */
7346 switch (fcode)
7347 {
7348 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 7349 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
c4ad648e 7350 arglist, target);
6525c0e7 7351 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 7352 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
c4ad648e 7353 arglist, target);
6525c0e7 7354 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 7355 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
c4ad648e 7356 arglist, target);
6525c0e7 7357 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 7358 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
c4ad648e 7359 arglist, target);
6525c0e7 7360 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 7361 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
c4ad648e 7362 arglist, target);
6525c0e7 7363 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 7364 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
c4ad648e 7365 arglist, target);
6525c0e7 7366 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 7367 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
c4ad648e 7368 arglist, target);
6525c0e7
AH
7369 default:
7370 break;
7371 /* Fall through. */
7372 }
95385cbb 7373
92898235 7374 *expandedp = false;
0ac081f6
AH
7375 return NULL_RTX;
7376}
7377
a3170dc6
AH
7378/* Binops that need to be initialized manually, but can be expanded
7379 automagically by rs6000_expand_binop_builtin. */
7380static struct builtin_description bdesc_2arg_spe[] =
7381{
7382 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7383 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7384 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7385 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7386 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7387 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7388 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7389 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7390 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7391 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7392 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7393 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7394 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7395 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7396 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7397 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7398 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7399 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7400 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7401 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7402 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7403 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7404};
7405
7406/* Expand the builtin in EXP and store the result in TARGET. Store
7407 true in *EXPANDEDP if we found a builtin to expand.
7408
7409 This expands the SPE builtins that are not simple unary and binary
7410 operations. */
7411static rtx
a2369ed3 7412spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6
AH
7413{
7414 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7415 tree arglist = TREE_OPERAND (exp, 1);
7416 tree arg1, arg0;
7417 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7418 enum insn_code icode;
7419 enum machine_mode tmode, mode0;
7420 rtx pat, op0;
7421 struct builtin_description *d;
7422 size_t i;
7423
7424 *expandedp = true;
7425
7426 /* Syntax check for a 5-bit unsigned immediate. */
7427 switch (fcode)
7428 {
7429 case SPE_BUILTIN_EVSTDD:
7430 case SPE_BUILTIN_EVSTDH:
7431 case SPE_BUILTIN_EVSTDW:
7432 case SPE_BUILTIN_EVSTWHE:
7433 case SPE_BUILTIN_EVSTWHO:
7434 case SPE_BUILTIN_EVSTWWE:
7435 case SPE_BUILTIN_EVSTWWO:
7436 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7437 if (TREE_CODE (arg1) != INTEGER_CST
7438 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7439 {
7440 error ("argument 2 must be a 5-bit unsigned literal");
7441 return const0_rtx;
7442 }
7443 break;
7444 default:
7445 break;
7446 }
7447
00332c9f
AH
7448 /* The evsplat*i instructions are not quite generic. */
7449 switch (fcode)
7450 {
7451 case SPE_BUILTIN_EVSPLATFI:
7452 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
7453 arglist, target);
7454 case SPE_BUILTIN_EVSPLATI:
7455 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
7456 arglist, target);
7457 default:
7458 break;
7459 }
7460
a3170dc6
AH
7461 d = (struct builtin_description *) bdesc_2arg_spe;
7462 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7463 if (d->code == fcode)
7464 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7465
7466 d = (struct builtin_description *) bdesc_spe_predicates;
7467 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7468 if (d->code == fcode)
7469 return spe_expand_predicate_builtin (d->icode, arglist, target);
7470
7471 d = (struct builtin_description *) bdesc_spe_evsel;
7472 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7473 if (d->code == fcode)
7474 return spe_expand_evsel_builtin (d->icode, arglist, target);
7475
7476 switch (fcode)
7477 {
7478 case SPE_BUILTIN_EVSTDDX:
61bea3b0 7479 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
a3170dc6 7480 case SPE_BUILTIN_EVSTDHX:
61bea3b0 7481 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
a3170dc6 7482 case SPE_BUILTIN_EVSTDWX:
61bea3b0 7483 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
a3170dc6 7484 case SPE_BUILTIN_EVSTWHEX:
61bea3b0 7485 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
a3170dc6 7486 case SPE_BUILTIN_EVSTWHOX:
61bea3b0 7487 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
a3170dc6 7488 case SPE_BUILTIN_EVSTWWEX:
61bea3b0 7489 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
a3170dc6 7490 case SPE_BUILTIN_EVSTWWOX:
61bea3b0 7491 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
a3170dc6 7492 case SPE_BUILTIN_EVSTDD:
61bea3b0 7493 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
a3170dc6 7494 case SPE_BUILTIN_EVSTDH:
61bea3b0 7495 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
a3170dc6 7496 case SPE_BUILTIN_EVSTDW:
61bea3b0 7497 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
a3170dc6 7498 case SPE_BUILTIN_EVSTWHE:
61bea3b0 7499 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
a3170dc6 7500 case SPE_BUILTIN_EVSTWHO:
61bea3b0 7501 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
a3170dc6 7502 case SPE_BUILTIN_EVSTWWE:
61bea3b0 7503 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
a3170dc6 7504 case SPE_BUILTIN_EVSTWWO:
61bea3b0 7505 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
a3170dc6
AH
7506 case SPE_BUILTIN_MFSPEFSCR:
7507 icode = CODE_FOR_spe_mfspefscr;
7508 tmode = insn_data[icode].operand[0].mode;
7509
7510 if (target == 0
7511 || GET_MODE (target) != tmode
7512 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7513 target = gen_reg_rtx (tmode);
f676971a 7514
a3170dc6
AH
7515 pat = GEN_FCN (icode) (target);
7516 if (! pat)
7517 return 0;
7518 emit_insn (pat);
7519 return target;
7520 case SPE_BUILTIN_MTSPEFSCR:
7521 icode = CODE_FOR_spe_mtspefscr;
7522 arg0 = TREE_VALUE (arglist);
7523 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7524 mode0 = insn_data[icode].operand[0].mode;
7525
7526 if (arg0 == error_mark_node)
7527 return const0_rtx;
7528
7529 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7530 op0 = copy_to_mode_reg (mode0, op0);
7531
7532 pat = GEN_FCN (icode) (op0);
7533 if (pat)
7534 emit_insn (pat);
7535 return NULL_RTX;
7536 default:
7537 break;
7538 }
7539
7540 *expandedp = false;
7541 return NULL_RTX;
7542}
7543
7544static rtx
a2369ed3 7545spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
a3170dc6
AH
7546{
7547 rtx pat, scratch, tmp;
7548 tree form = TREE_VALUE (arglist);
7549 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
7550 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7551 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7552 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7553 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7554 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7555 int form_int;
7556 enum rtx_code code;
7557
7558 if (TREE_CODE (form) != INTEGER_CST)
7559 {
7560 error ("argument 1 of __builtin_spe_predicate must be a constant");
7561 return const0_rtx;
7562 }
7563 else
7564 form_int = TREE_INT_CST_LOW (form);
7565
37409796 7566 gcc_assert (mode0 == mode1);
a3170dc6
AH
7567
7568 if (arg0 == error_mark_node || arg1 == error_mark_node)
7569 return const0_rtx;
7570
7571 if (target == 0
7572 || GET_MODE (target) != SImode
7573 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7574 target = gen_reg_rtx (SImode);
7575
7576 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7577 op0 = copy_to_mode_reg (mode0, op0);
7578 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7579 op1 = copy_to_mode_reg (mode1, op1);
7580
7581 scratch = gen_reg_rtx (CCmode);
7582
7583 pat = GEN_FCN (icode) (scratch, op0, op1);
7584 if (! pat)
7585 return const0_rtx;
7586 emit_insn (pat);
7587
7588 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7589 _lower_. We use one compare, but look in different bits of the
7590 CR for each variant.
7591
7592 There are 2 elements in each SPE simd type (upper/lower). The CR
7593 bits are set as follows:
7594
7595 BIT0 | BIT 1 | BIT 2 | BIT 3
7596 U | L | (U | L) | (U & L)
7597
7598 So, for an "all" relationship, BIT 3 would be set.
7599 For an "any" relationship, BIT 2 would be set. Etc.
7600
7601 Following traditional nomenclature, these bits map to:
7602
7603 BIT0 | BIT 1 | BIT 2 | BIT 3
7604 LT | GT | EQ | OV
7605
7606 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7607 */
7608
7609 switch (form_int)
7610 {
7611 /* All variant. OV bit. */
7612 case 0:
7613 /* We need to get to the OV bit, which is the ORDERED bit. We
7614 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 7615 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
7616 So let's just use another pattern. */
7617 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7618 return target;
7619 /* Any variant. EQ bit. */
7620 case 1:
7621 code = EQ;
7622 break;
7623 /* Upper variant. LT bit. */
7624 case 2:
7625 code = LT;
7626 break;
7627 /* Lower variant. GT bit. */
7628 case 3:
7629 code = GT;
7630 break;
7631 default:
7632 error ("argument 1 of __builtin_spe_predicate is out of range");
7633 return const0_rtx;
7634 }
7635
7636 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7637 emit_move_insn (target, tmp);
7638
7639 return target;
7640}
7641
7642/* The evsel builtins look like this:
7643
7644 e = __builtin_spe_evsel_OP (a, b, c, d);
7645
7646 and work like this:
7647
7648 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7649 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7650*/
7651
7652static rtx
a2369ed3 7653spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
a3170dc6
AH
7654{
7655 rtx pat, scratch;
7656 tree arg0 = TREE_VALUE (arglist);
7657 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7658 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7659 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7660 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7661 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7662 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7663 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7664 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7665 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7666
37409796 7667 gcc_assert (mode0 == mode1);
a3170dc6
AH
7668
7669 if (arg0 == error_mark_node || arg1 == error_mark_node
7670 || arg2 == error_mark_node || arg3 == error_mark_node)
7671 return const0_rtx;
7672
7673 if (target == 0
7674 || GET_MODE (target) != mode0
7675 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7676 target = gen_reg_rtx (mode0);
7677
7678 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7679 op0 = copy_to_mode_reg (mode0, op0);
7680 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7681 op1 = copy_to_mode_reg (mode0, op1);
7682 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7683 op2 = copy_to_mode_reg (mode0, op2);
7684 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7685 op3 = copy_to_mode_reg (mode0, op3);
7686
7687 /* Generate the compare. */
7688 scratch = gen_reg_rtx (CCmode);
7689 pat = GEN_FCN (icode) (scratch, op0, op1);
7690 if (! pat)
7691 return const0_rtx;
7692 emit_insn (pat);
7693
7694 if (mode0 == V2SImode)
7695 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7696 else
7697 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7698
7699 return target;
7700}
7701
0ac081f6
AH
7702/* Expand an expression EXP that calls a built-in function,
7703 with result going to TARGET if that's convenient
7704 (and in mode MODE if that's convenient).
7705 SUBTARGET may be used as the target for computing one of EXP's operands.
7706 IGNORE is nonzero if the value is to be ignored. */
7707
7708static rtx
a2369ed3 7709rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
7710 enum machine_mode mode ATTRIBUTE_UNUSED,
7711 int ignore ATTRIBUTE_UNUSED)
0ac081f6 7712{
92898235
AH
7713 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7714 tree arglist = TREE_OPERAND (exp, 1);
7715 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7716 struct builtin_description *d;
7717 size_t i;
7718 rtx ret;
7719 bool success;
f676971a 7720
7ccf35ed
DN
7721 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
7722 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7723 {
7724 int icode = (int) CODE_FOR_altivec_lvsr;
7725 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7726 enum machine_mode mode = insn_data[icode].operand[1].mode;
7727 tree arg;
7728 rtx op, addr, pat;
7729
37409796 7730 gcc_assert (TARGET_ALTIVEC);
7ccf35ed
DN
7731
7732 arg = TREE_VALUE (arglist);
37409796 7733 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
7734 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
7735 addr = memory_address (mode, op);
7736 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
7737 op = addr;
7738 else
7739 {
7740 /* For the load case need to negate the address. */
7741 op = gen_reg_rtx (GET_MODE (addr));
7742 emit_insn (gen_rtx_SET (VOIDmode, op,
7743 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 7744 }
7ccf35ed
DN
7745 op = gen_rtx_MEM (mode, op);
7746
7747 if (target == 0
7748 || GET_MODE (target) != tmode
7749 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7750 target = gen_reg_rtx (tmode);
7751
7752 /*pat = gen_altivec_lvsr (target, op);*/
7753 pat = GEN_FCN (icode) (target, op);
7754 if (!pat)
7755 return 0;
7756 emit_insn (pat);
7757
7758 return target;
7759 }
7760
0ac081f6 7761 if (TARGET_ALTIVEC)
92898235
AH
7762 {
7763 ret = altivec_expand_builtin (exp, target, &success);
7764
a3170dc6
AH
7765 if (success)
7766 return ret;
7767 }
7768 if (TARGET_SPE)
7769 {
7770 ret = spe_expand_builtin (exp, target, &success);
7771
92898235
AH
7772 if (success)
7773 return ret;
7774 }
7775
37409796 7776 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
bb8df8a6 7777
37409796
NS
7778 /* Handle simple unary operations. */
7779 d = (struct builtin_description *) bdesc_1arg;
7780 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7781 if (d->code == fcode)
7782 return rs6000_expand_unop_builtin (d->icode, arglist, target);
bb8df8a6 7783
37409796
NS
7784 /* Handle simple binary operations. */
7785 d = (struct builtin_description *) bdesc_2arg;
7786 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7787 if (d->code == fcode)
7788 return rs6000_expand_binop_builtin (d->icode, arglist, target);
0ac081f6 7789
37409796
NS
7790 /* Handle simple ternary operations. */
7791 d = (struct builtin_description *) bdesc_3arg;
7792 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7793 if (d->code == fcode)
7794 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
bb8df8a6 7795
37409796 7796 gcc_unreachable ();
0ac081f6
AH
7797}
7798
7c62e993
PB
7799static tree
7800build_opaque_vector_type (tree node, int nunits)
7801{
7802 node = copy_node (node);
7803 TYPE_MAIN_VARIANT (node) = node;
7804 return build_vector_type (node, nunits);
7805}
7806
0ac081f6 7807static void
863d938c 7808rs6000_init_builtins (void)
0ac081f6 7809{
4a5eab38
PB
7810 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7811 V2SF_type_node = build_vector_type (float_type_node, 2);
7812 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7813 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7814 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 7815 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
7816 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7817
7818 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7819 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7820 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7821
7c62e993
PB
7822 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7823 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 7824 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 7825 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 7826
8bb418a3
ZL
7827 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7828 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7829 'vector unsigned short'. */
7830
8dd16ecc
NS
7831 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
7832 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
7833 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
7834 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 7835
58646b77
PB
7836 long_integer_type_internal_node = long_integer_type_node;
7837 long_unsigned_type_internal_node = long_unsigned_type_node;
7838 intQI_type_internal_node = intQI_type_node;
7839 uintQI_type_internal_node = unsigned_intQI_type_node;
7840 intHI_type_internal_node = intHI_type_node;
7841 uintHI_type_internal_node = unsigned_intHI_type_node;
7842 intSI_type_internal_node = intSI_type_node;
7843 uintSI_type_internal_node = unsigned_intSI_type_node;
7844 float_type_internal_node = float_type_node;
7845 void_type_internal_node = void_type_node;
7846
8bb418a3
ZL
7847 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7848 get_identifier ("__bool char"),
7849 bool_char_type_node));
7850 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7851 get_identifier ("__bool short"),
7852 bool_short_type_node));
7853 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7854 get_identifier ("__bool int"),
7855 bool_int_type_node));
7856 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7857 get_identifier ("__pixel"),
7858 pixel_type_node));
7859
4a5eab38
PB
7860 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7861 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7862 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7863 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
7864
7865 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7866 get_identifier ("__vector unsigned char"),
7867 unsigned_V16QI_type_node));
7868 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7869 get_identifier ("__vector signed char"),
7870 V16QI_type_node));
7871 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7872 get_identifier ("__vector __bool char"),
7873 bool_V16QI_type_node));
7874
7875 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7876 get_identifier ("__vector unsigned short"),
7877 unsigned_V8HI_type_node));
7878 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7879 get_identifier ("__vector signed short"),
7880 V8HI_type_node));
7881 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7882 get_identifier ("__vector __bool short"),
7883 bool_V8HI_type_node));
7884
7885 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7886 get_identifier ("__vector unsigned int"),
7887 unsigned_V4SI_type_node));
7888 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7889 get_identifier ("__vector signed int"),
7890 V4SI_type_node));
7891 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7892 get_identifier ("__vector __bool int"),
7893 bool_V4SI_type_node));
7894
7895 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7896 get_identifier ("__vector float"),
7897 V4SF_type_node));
7898 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7899 get_identifier ("__vector __pixel"),
7900 pixel_V8HI_type_node));
7901
a3170dc6 7902 if (TARGET_SPE)
3fdaa45a 7903 spe_init_builtins ();
0ac081f6
AH
7904 if (TARGET_ALTIVEC)
7905 altivec_init_builtins ();
0559cc77
DE
7906 if (TARGET_ALTIVEC || TARGET_SPE)
7907 rs6000_common_init_builtins ();
0ac081f6
AH
7908}
7909
a3170dc6
AH
7910/* Search through a set of builtins and enable the mask bits.
7911 DESC is an array of builtins.
b6d08ca1 7912 SIZE is the total number of builtins.
a3170dc6
AH
7913 START is the builtin enum at which to start.
7914 END is the builtin enum at which to end. */
0ac081f6 7915static void
a2369ed3 7916enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 7917 enum rs6000_builtins start,
a2369ed3 7918 enum rs6000_builtins end)
a3170dc6
AH
7919{
7920 int i;
7921
7922 for (i = 0; i < size; ++i)
7923 if (desc[i].code == start)
7924 break;
7925
7926 if (i == size)
7927 return;
7928
7929 for (; i < size; ++i)
7930 {
7931 /* Flip all the bits on. */
7932 desc[i].mask = target_flags;
7933 if (desc[i].code == end)
7934 break;
7935 }
7936}
7937
7938static void
863d938c 7939spe_init_builtins (void)
0ac081f6 7940{
a3170dc6
AH
7941 tree endlink = void_list_node;
7942 tree puint_type_node = build_pointer_type (unsigned_type_node);
7943 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 7944 struct builtin_description *d;
0ac081f6
AH
7945 size_t i;
7946
a3170dc6
AH
7947 tree v2si_ftype_4_v2si
7948 = build_function_type
3fdaa45a
AH
7949 (opaque_V2SI_type_node,
7950 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7951 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7952 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7953 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7954 endlink)))));
7955
7956 tree v2sf_ftype_4_v2sf
7957 = build_function_type
3fdaa45a
AH
7958 (opaque_V2SF_type_node,
7959 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7960 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7961 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7962 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
7963 endlink)))));
7964
7965 tree int_ftype_int_v2si_v2si
7966 = build_function_type
7967 (integer_type_node,
7968 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
7969 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7970 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7971 endlink))));
7972
7973 tree int_ftype_int_v2sf_v2sf
7974 = build_function_type
7975 (integer_type_node,
7976 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
7977 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7978 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
7979 endlink))));
7980
7981 tree void_ftype_v2si_puint_int
7982 = build_function_type (void_type_node,
3fdaa45a 7983 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7984 tree_cons (NULL_TREE, puint_type_node,
7985 tree_cons (NULL_TREE,
7986 integer_type_node,
7987 endlink))));
7988
7989 tree void_ftype_v2si_puint_char
7990 = build_function_type (void_type_node,
3fdaa45a 7991 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
7992 tree_cons (NULL_TREE, puint_type_node,
7993 tree_cons (NULL_TREE,
7994 char_type_node,
7995 endlink))));
7996
7997 tree void_ftype_v2si_pv2si_int
7998 = build_function_type (void_type_node,
3fdaa45a 7999 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8000 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8001 tree_cons (NULL_TREE,
8002 integer_type_node,
8003 endlink))));
8004
8005 tree void_ftype_v2si_pv2si_char
8006 = build_function_type (void_type_node,
3fdaa45a 8007 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8008 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8009 tree_cons (NULL_TREE,
8010 char_type_node,
8011 endlink))));
8012
8013 tree void_ftype_int
8014 = build_function_type (void_type_node,
8015 tree_cons (NULL_TREE, integer_type_node, endlink));
8016
8017 tree int_ftype_void
36e8d515 8018 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
8019
8020 tree v2si_ftype_pv2si_int
3fdaa45a 8021 = build_function_type (opaque_V2SI_type_node,
6035d635 8022 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8023 tree_cons (NULL_TREE, integer_type_node,
8024 endlink)));
8025
8026 tree v2si_ftype_puint_int
3fdaa45a 8027 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8028 tree_cons (NULL_TREE, puint_type_node,
8029 tree_cons (NULL_TREE, integer_type_node,
8030 endlink)));
8031
8032 tree v2si_ftype_pushort_int
3fdaa45a 8033 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8034 tree_cons (NULL_TREE, pushort_type_node,
8035 tree_cons (NULL_TREE, integer_type_node,
8036 endlink)));
8037
00332c9f
AH
8038 tree v2si_ftype_signed_char
8039 = build_function_type (opaque_V2SI_type_node,
8040 tree_cons (NULL_TREE, signed_char_type_node,
8041 endlink));
8042
a3170dc6
AH
8043 /* The initialization of the simple binary and unary builtins is
8044 done in rs6000_common_init_builtins, but we have to enable the
8045 mask bits here manually because we have run out of `target_flags'
8046 bits. We really need to redesign this mask business. */
8047
8048 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8049 ARRAY_SIZE (bdesc_2arg),
8050 SPE_BUILTIN_EVADDW,
8051 SPE_BUILTIN_EVXOR);
8052 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8053 ARRAY_SIZE (bdesc_1arg),
8054 SPE_BUILTIN_EVABS,
8055 SPE_BUILTIN_EVSUBFUSIAAW);
8056 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8057 ARRAY_SIZE (bdesc_spe_predicates),
8058 SPE_BUILTIN_EVCMPEQ,
8059 SPE_BUILTIN_EVFSTSTLT);
8060 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8061 ARRAY_SIZE (bdesc_spe_evsel),
8062 SPE_BUILTIN_EVSEL_CMPGTS,
8063 SPE_BUILTIN_EVSEL_FSTSTEQ);
8064
36252949
AH
8065 (*lang_hooks.decls.pushdecl)
8066 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8067 opaque_V2SI_type_node));
8068
a3170dc6 8069 /* Initialize irregular SPE builtins. */
f676971a 8070
a3170dc6
AH
8071 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8072 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8073 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8074 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8075 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8076 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8077 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8078 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8079 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8080 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8081 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8082 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8083 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8084 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8085 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8086 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
8087 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8088 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
8089
8090 /* Loads. */
8091 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8092 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8093 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8094 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8095 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8096 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8097 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8098 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8099 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8100 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8101 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8102 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8103 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8104 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8105 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8106 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8107 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8108 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8109 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8110 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8111 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8112 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8113
8114 /* Predicates. */
8115 d = (struct builtin_description *) bdesc_spe_predicates;
8116 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8117 {
8118 tree type;
8119
8120 switch (insn_data[d->icode].operand[1].mode)
8121 {
8122 case V2SImode:
8123 type = int_ftype_int_v2si_v2si;
8124 break;
8125 case V2SFmode:
8126 type = int_ftype_int_v2sf_v2sf;
8127 break;
8128 default:
37409796 8129 gcc_unreachable ();
a3170dc6
AH
8130 }
8131
8132 def_builtin (d->mask, d->name, type, d->code);
8133 }
8134
8135 /* Evsel predicates. */
8136 d = (struct builtin_description *) bdesc_spe_evsel;
8137 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8138 {
8139 tree type;
8140
8141 switch (insn_data[d->icode].operand[1].mode)
8142 {
8143 case V2SImode:
8144 type = v2si_ftype_4_v2si;
8145 break;
8146 case V2SFmode:
8147 type = v2sf_ftype_4_v2sf;
8148 break;
8149 default:
37409796 8150 gcc_unreachable ();
a3170dc6
AH
8151 }
8152
8153 def_builtin (d->mask, d->name, type, d->code);
8154 }
8155}
8156
8157static void
863d938c 8158altivec_init_builtins (void)
a3170dc6
AH
8159{
8160 struct builtin_description *d;
8161 struct builtin_description_predicates *dp;
8162 size_t i;
7a4eca66
DE
8163 tree ftype;
8164
a3170dc6
AH
8165 tree pfloat_type_node = build_pointer_type (float_type_node);
8166 tree pint_type_node = build_pointer_type (integer_type_node);
8167 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8168 tree pchar_type_node = build_pointer_type (char_type_node);
8169
8170 tree pvoid_type_node = build_pointer_type (void_type_node);
8171
0dbc3651
ZW
8172 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8173 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8174 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8175 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8176
8177 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8178
58646b77
PB
8179 tree int_ftype_opaque
8180 = build_function_type_list (integer_type_node,
8181 opaque_V4SI_type_node, NULL_TREE);
8182
8183 tree opaque_ftype_opaque_int
8184 = build_function_type_list (opaque_V4SI_type_node,
8185 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8186 tree opaque_ftype_opaque_opaque_int
8187 = build_function_type_list (opaque_V4SI_type_node,
8188 opaque_V4SI_type_node, opaque_V4SI_type_node,
8189 integer_type_node, NULL_TREE);
8190 tree int_ftype_int_opaque_opaque
8191 = build_function_type_list (integer_type_node,
8192 integer_type_node, opaque_V4SI_type_node,
8193 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
8194 tree int_ftype_int_v4si_v4si
8195 = build_function_type_list (integer_type_node,
8196 integer_type_node, V4SI_type_node,
8197 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8198 tree v4sf_ftype_pcfloat
8199 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 8200 tree void_ftype_pfloat_v4sf
b4de2f7d 8201 = build_function_type_list (void_type_node,
a3170dc6 8202 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
8203 tree v4si_ftype_pcint
8204 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8205 tree void_ftype_pint_v4si
b4de2f7d
AH
8206 = build_function_type_list (void_type_node,
8207 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8208 tree v8hi_ftype_pcshort
8209 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 8210 tree void_ftype_pshort_v8hi
b4de2f7d
AH
8211 = build_function_type_list (void_type_node,
8212 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
8213 tree v16qi_ftype_pcchar
8214 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 8215 tree void_ftype_pchar_v16qi
b4de2f7d
AH
8216 = build_function_type_list (void_type_node,
8217 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 8218 tree void_ftype_v4si
b4de2f7d 8219 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8220 tree v8hi_ftype_void
8221 = build_function_type (V8HI_type_node, void_list_node);
8222 tree void_ftype_void
8223 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
8224 tree void_ftype_int
8225 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 8226
58646b77
PB
8227 tree opaque_ftype_long_pcvoid
8228 = build_function_type_list (opaque_V4SI_type_node,
8229 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 8230 tree v16qi_ftype_long_pcvoid
a3170dc6 8231 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
8232 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8233 tree v8hi_ftype_long_pcvoid
a3170dc6 8234 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
8235 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8236 tree v4si_ftype_long_pcvoid
a3170dc6 8237 = build_function_type_list (V4SI_type_node,
b4a62fa0 8238 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 8239
58646b77
PB
8240 tree void_ftype_opaque_long_pvoid
8241 = build_function_type_list (void_type_node,
8242 opaque_V4SI_type_node, long_integer_type_node,
8243 pvoid_type_node, NULL_TREE);
b4a62fa0 8244 tree void_ftype_v4si_long_pvoid
b4de2f7d 8245 = build_function_type_list (void_type_node,
b4a62fa0 8246 V4SI_type_node, long_integer_type_node,
b4de2f7d 8247 pvoid_type_node, NULL_TREE);
b4a62fa0 8248 tree void_ftype_v16qi_long_pvoid
b4de2f7d 8249 = build_function_type_list (void_type_node,
b4a62fa0 8250 V16QI_type_node, long_integer_type_node,
b4de2f7d 8251 pvoid_type_node, NULL_TREE);
b4a62fa0 8252 tree void_ftype_v8hi_long_pvoid
b4de2f7d 8253 = build_function_type_list (void_type_node,
b4a62fa0 8254 V8HI_type_node, long_integer_type_node,
b4de2f7d 8255 pvoid_type_node, NULL_TREE);
a3170dc6
AH
8256 tree int_ftype_int_v8hi_v8hi
8257 = build_function_type_list (integer_type_node,
8258 integer_type_node, V8HI_type_node,
8259 V8HI_type_node, NULL_TREE);
8260 tree int_ftype_int_v16qi_v16qi
8261 = build_function_type_list (integer_type_node,
8262 integer_type_node, V16QI_type_node,
8263 V16QI_type_node, NULL_TREE);
8264 tree int_ftype_int_v4sf_v4sf
8265 = build_function_type_list (integer_type_node,
8266 integer_type_node, V4SF_type_node,
8267 V4SF_type_node, NULL_TREE);
8268 tree v4si_ftype_v4si
8269 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8270 tree v8hi_ftype_v8hi
8271 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8272 tree v16qi_ftype_v16qi
8273 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8274 tree v4sf_ftype_v4sf
8275 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 8276 tree void_ftype_pcvoid_int_int
a3170dc6 8277 = build_function_type_list (void_type_node,
0dbc3651 8278 pcvoid_type_node, integer_type_node,
8bb418a3 8279 integer_type_node, NULL_TREE);
8bb418a3 8280
0dbc3651
ZW
8281 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8282 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8283 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8284 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8285 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8286 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8287 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8288 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8289 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8290 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8291 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8292 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8293 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8294 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8295 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8296 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
8297 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8298 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8299 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 8300 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
8301 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8302 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8303 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8304 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8305 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8306 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8307 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8308 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8309 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8310 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8311 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8312 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
8313 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8314 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8315 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8316 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8317 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8318 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8319 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8320 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8321 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8322 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8323 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8324 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8325 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8326 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8327
8328 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8329
8330 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8331 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8332 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8333 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8334 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8335 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8336 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8337 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8338 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8339 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 8340
a3170dc6
AH
8341 /* Add the DST variants. */
8342 d = (struct builtin_description *) bdesc_dst;
8343 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 8344 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
8345
8346 /* Initialize the predicates. */
8347 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8348 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8349 {
8350 enum machine_mode mode1;
8351 tree type;
58646b77
PB
8352 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8353 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 8354
58646b77
PB
8355 if (is_overloaded)
8356 mode1 = VOIDmode;
8357 else
8358 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
8359
8360 switch (mode1)
8361 {
58646b77
PB
8362 case VOIDmode:
8363 type = int_ftype_int_opaque_opaque;
8364 break;
a3170dc6
AH
8365 case V4SImode:
8366 type = int_ftype_int_v4si_v4si;
8367 break;
8368 case V8HImode:
8369 type = int_ftype_int_v8hi_v8hi;
8370 break;
8371 case V16QImode:
8372 type = int_ftype_int_v16qi_v16qi;
8373 break;
8374 case V4SFmode:
8375 type = int_ftype_int_v4sf_v4sf;
8376 break;
8377 default:
37409796 8378 gcc_unreachable ();
a3170dc6 8379 }
f676971a 8380
a3170dc6
AH
8381 def_builtin (dp->mask, dp->name, type, dp->code);
8382 }
8383
8384 /* Initialize the abs* operators. */
8385 d = (struct builtin_description *) bdesc_abs;
8386 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8387 {
8388 enum machine_mode mode0;
8389 tree type;
8390
8391 mode0 = insn_data[d->icode].operand[0].mode;
8392
8393 switch (mode0)
8394 {
8395 case V4SImode:
8396 type = v4si_ftype_v4si;
8397 break;
8398 case V8HImode:
8399 type = v8hi_ftype_v8hi;
8400 break;
8401 case V16QImode:
8402 type = v16qi_ftype_v16qi;
8403 break;
8404 case V4SFmode:
8405 type = v4sf_ftype_v4sf;
8406 break;
8407 default:
37409796 8408 gcc_unreachable ();
a3170dc6 8409 }
f676971a 8410
a3170dc6
AH
8411 def_builtin (d->mask, d->name, type, d->code);
8412 }
7ccf35ed 8413
13c62176
DN
8414 if (TARGET_ALTIVEC)
8415 {
8416 tree decl;
8417
8418 /* Initialize target builtin that implements
8419 targetm.vectorize.builtin_mask_for_load. */
8420
8421 decl = lang_hooks.builtin_function ("__builtin_altivec_mask_for_load",
8bb46326
DN
8422 v16qi_ftype_long_pcvoid,
8423 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
8424 BUILT_IN_MD, NULL,
8425 tree_cons (get_identifier ("const"),
8426 NULL_TREE, NULL_TREE));
13c62176
DN
8427 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8428 altivec_builtin_mask_for_load = decl;
13c62176 8429 }
7a4eca66
DE
8430
8431 /* Access to the vec_init patterns. */
8432 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8433 integer_type_node, integer_type_node,
8434 integer_type_node, NULL_TREE);
8435 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8436 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8437
8438 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8439 short_integer_type_node,
8440 short_integer_type_node,
8441 short_integer_type_node,
8442 short_integer_type_node,
8443 short_integer_type_node,
8444 short_integer_type_node,
8445 short_integer_type_node, NULL_TREE);
8446 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8447 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8448
8449 ftype = build_function_type_list (V16QI_type_node, char_type_node,
8450 char_type_node, char_type_node,
8451 char_type_node, char_type_node,
8452 char_type_node, char_type_node,
8453 char_type_node, char_type_node,
8454 char_type_node, char_type_node,
8455 char_type_node, char_type_node,
8456 char_type_node, char_type_node,
8457 char_type_node, NULL_TREE);
8458 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8459 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8460
8461 ftype = build_function_type_list (V4SF_type_node, float_type_node,
8462 float_type_node, float_type_node,
8463 float_type_node, NULL_TREE);
8464 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
8465 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
8466
8467 /* Access to the vec_set patterns. */
8468 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
8469 intSI_type_node,
8470 integer_type_node, NULL_TREE);
8471 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
8472 ALTIVEC_BUILTIN_VEC_SET_V4SI);
8473
8474 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
8475 intHI_type_node,
8476 integer_type_node, NULL_TREE);
8477 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
8478 ALTIVEC_BUILTIN_VEC_SET_V8HI);
8479
8480 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
8481 intQI_type_node,
8482 integer_type_node, NULL_TREE);
8483 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
8484 ALTIVEC_BUILTIN_VEC_SET_V16QI);
8485
8486 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
8487 float_type_node,
8488 integer_type_node, NULL_TREE);
8489 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
8490 ALTIVEC_BUILTIN_VEC_SET_V4SF);
8491
8492 /* Access to the vec_extract patterns. */
8493 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
8494 integer_type_node, NULL_TREE);
8495 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
8496 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
8497
8498 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
8499 integer_type_node, NULL_TREE);
8500 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
8501 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
8502
8503 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
8504 integer_type_node, NULL_TREE);
8505 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
8506 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
8507
8508 ftype = build_function_type_list (float_type_node, V4SF_type_node,
8509 integer_type_node, NULL_TREE);
8510 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
8511 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
8512}
8513
8514static void
863d938c 8515rs6000_common_init_builtins (void)
a3170dc6
AH
8516{
8517 struct builtin_description *d;
8518 size_t i;
8519
8520 tree v4sf_ftype_v4sf_v4sf_v16qi
8521 = build_function_type_list (V4SF_type_node,
8522 V4SF_type_node, V4SF_type_node,
8523 V16QI_type_node, NULL_TREE);
8524 tree v4si_ftype_v4si_v4si_v16qi
8525 = build_function_type_list (V4SI_type_node,
8526 V4SI_type_node, V4SI_type_node,
8527 V16QI_type_node, NULL_TREE);
8528 tree v8hi_ftype_v8hi_v8hi_v16qi
8529 = build_function_type_list (V8HI_type_node,
8530 V8HI_type_node, V8HI_type_node,
8531 V16QI_type_node, NULL_TREE);
8532 tree v16qi_ftype_v16qi_v16qi_v16qi
8533 = build_function_type_list (V16QI_type_node,
8534 V16QI_type_node, V16QI_type_node,
8535 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
8536 tree v4si_ftype_int
8537 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
8538 tree v8hi_ftype_int
8539 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
8540 tree v16qi_ftype_int
8541 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
8542 tree v8hi_ftype_v16qi
8543 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
8544 tree v4sf_ftype_v4sf
8545 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8546
8547 tree v2si_ftype_v2si_v2si
2abe3e28
AH
8548 = build_function_type_list (opaque_V2SI_type_node,
8549 opaque_V2SI_type_node,
8550 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8551
8552 tree v2sf_ftype_v2sf_v2sf
2abe3e28
AH
8553 = build_function_type_list (opaque_V2SF_type_node,
8554 opaque_V2SF_type_node,
8555 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
8556
8557 tree v2si_ftype_int_int
2abe3e28 8558 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
8559 integer_type_node, integer_type_node,
8560 NULL_TREE);
8561
58646b77
PB
8562 tree opaque_ftype_opaque
8563 = build_function_type_list (opaque_V4SI_type_node,
8564 opaque_V4SI_type_node, NULL_TREE);
8565
a3170dc6 8566 tree v2si_ftype_v2si
2abe3e28
AH
8567 = build_function_type_list (opaque_V2SI_type_node,
8568 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8569
8570 tree v2sf_ftype_v2sf
2abe3e28
AH
8571 = build_function_type_list (opaque_V2SF_type_node,
8572 opaque_V2SF_type_node, NULL_TREE);
f676971a 8573
a3170dc6 8574 tree v2sf_ftype_v2si
2abe3e28
AH
8575 = build_function_type_list (opaque_V2SF_type_node,
8576 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
8577
8578 tree v2si_ftype_v2sf
2abe3e28
AH
8579 = build_function_type_list (opaque_V2SI_type_node,
8580 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
8581
8582 tree v2si_ftype_v2si_char
2abe3e28
AH
8583 = build_function_type_list (opaque_V2SI_type_node,
8584 opaque_V2SI_type_node,
8585 char_type_node, NULL_TREE);
a3170dc6
AH
8586
8587 tree v2si_ftype_int_char
2abe3e28 8588 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
8589 integer_type_node, char_type_node, NULL_TREE);
8590
8591 tree v2si_ftype_char
2abe3e28
AH
8592 = build_function_type_list (opaque_V2SI_type_node,
8593 char_type_node, NULL_TREE);
a3170dc6
AH
8594
8595 tree int_ftype_int_int
8596 = build_function_type_list (integer_type_node,
8597 integer_type_node, integer_type_node,
8598 NULL_TREE);
95385cbb 8599
58646b77
PB
8600 tree opaque_ftype_opaque_opaque
8601 = build_function_type_list (opaque_V4SI_type_node,
8602 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 8603 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
8604 = build_function_type_list (V4SI_type_node,
8605 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 8606 tree v4sf_ftype_v4si_int
b4de2f7d 8607 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
8608 V4SI_type_node, integer_type_node, NULL_TREE);
8609 tree v4si_ftype_v4sf_int
b4de2f7d 8610 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
8611 V4SF_type_node, integer_type_node, NULL_TREE);
8612 tree v4si_ftype_v4si_int
b4de2f7d 8613 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
8614 V4SI_type_node, integer_type_node, NULL_TREE);
8615 tree v8hi_ftype_v8hi_int
b4de2f7d 8616 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
8617 V8HI_type_node, integer_type_node, NULL_TREE);
8618 tree v16qi_ftype_v16qi_int
b4de2f7d 8619 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
8620 V16QI_type_node, integer_type_node, NULL_TREE);
8621 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
8622 = build_function_type_list (V16QI_type_node,
8623 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
8624 integer_type_node, NULL_TREE);
8625 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
8626 = build_function_type_list (V8HI_type_node,
8627 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
8628 integer_type_node, NULL_TREE);
8629 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
8630 = build_function_type_list (V4SI_type_node,
8631 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
8632 integer_type_node, NULL_TREE);
8633 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
8634 = build_function_type_list (V4SF_type_node,
8635 V4SF_type_node, V4SF_type_node,
b9e4e5d1 8636 integer_type_node, NULL_TREE);
0ac081f6 8637 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
8638 = build_function_type_list (V4SF_type_node,
8639 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
8640 tree opaque_ftype_opaque_opaque_opaque
8641 = build_function_type_list (opaque_V4SI_type_node,
8642 opaque_V4SI_type_node, opaque_V4SI_type_node,
8643 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 8644 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
8645 = build_function_type_list (V4SF_type_node,
8646 V4SF_type_node, V4SF_type_node,
8647 V4SI_type_node, NULL_TREE);
2212663f 8648 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
8649 = build_function_type_list (V4SF_type_node,
8650 V4SF_type_node, V4SF_type_node,
8651 V4SF_type_node, NULL_TREE);
f676971a 8652 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
8653 = build_function_type_list (V4SI_type_node,
8654 V4SI_type_node, V4SI_type_node,
8655 V4SI_type_node, NULL_TREE);
0ac081f6 8656 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
8657 = build_function_type_list (V8HI_type_node,
8658 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 8659 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
8660 = build_function_type_list (V8HI_type_node,
8661 V8HI_type_node, V8HI_type_node,
8662 V8HI_type_node, NULL_TREE);
c4ad648e 8663 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
8664 = build_function_type_list (V4SI_type_node,
8665 V8HI_type_node, V8HI_type_node,
8666 V4SI_type_node, NULL_TREE);
c4ad648e 8667 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
8668 = build_function_type_list (V4SI_type_node,
8669 V16QI_type_node, V16QI_type_node,
8670 V4SI_type_node, NULL_TREE);
0ac081f6 8671 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
8672 = build_function_type_list (V16QI_type_node,
8673 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8674 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
8675 = build_function_type_list (V4SI_type_node,
8676 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 8677 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
8678 = build_function_type_list (V8HI_type_node,
8679 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8680 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
8681 = build_function_type_list (V4SI_type_node,
8682 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8683 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
8684 = build_function_type_list (V8HI_type_node,
8685 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 8686 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
8687 = build_function_type_list (V16QI_type_node,
8688 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8689 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
8690 = build_function_type_list (V4SI_type_node,
8691 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 8692 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
8693 = build_function_type_list (V4SI_type_node,
8694 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8695 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
8696 = build_function_type_list (V4SI_type_node,
8697 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8698 tree v4si_ftype_v8hi
8699 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
8700 tree int_ftype_v4si_v4si
8701 = build_function_type_list (integer_type_node,
8702 V4SI_type_node, V4SI_type_node, NULL_TREE);
8703 tree int_ftype_v4sf_v4sf
8704 = build_function_type_list (integer_type_node,
8705 V4SF_type_node, V4SF_type_node, NULL_TREE);
8706 tree int_ftype_v16qi_v16qi
8707 = build_function_type_list (integer_type_node,
8708 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 8709 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
8710 = build_function_type_list (integer_type_node,
8711 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 8712
6f317ef3 8713 /* Add the simple ternary operators. */
2212663f 8714 d = (struct builtin_description *) bdesc_3arg;
ca7558fc 8715 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 8716 {
2212663f
DB
8717 enum machine_mode mode0, mode1, mode2, mode3;
8718 tree type;
58646b77
PB
8719 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8720 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 8721
58646b77
PB
8722 if (is_overloaded)
8723 {
8724 mode0 = VOIDmode;
8725 mode1 = VOIDmode;
8726 mode2 = VOIDmode;
8727 mode3 = VOIDmode;
8728 }
8729 else
8730 {
8731 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8732 continue;
f676971a 8733
58646b77
PB
8734 mode0 = insn_data[d->icode].operand[0].mode;
8735 mode1 = insn_data[d->icode].operand[1].mode;
8736 mode2 = insn_data[d->icode].operand[2].mode;
8737 mode3 = insn_data[d->icode].operand[3].mode;
8738 }
bb8df8a6 8739
2212663f
DB
8740 /* When all four are of the same mode. */
8741 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
8742 {
8743 switch (mode0)
8744 {
58646b77
PB
8745 case VOIDmode:
8746 type = opaque_ftype_opaque_opaque_opaque;
8747 break;
617e0e1d
DB
8748 case V4SImode:
8749 type = v4si_ftype_v4si_v4si_v4si;
8750 break;
2212663f
DB
8751 case V4SFmode:
8752 type = v4sf_ftype_v4sf_v4sf_v4sf;
8753 break;
8754 case V8HImode:
8755 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 8756 break;
2212663f
DB
8757 case V16QImode:
8758 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 8759 break;
2212663f 8760 default:
37409796 8761 gcc_unreachable ();
2212663f
DB
8762 }
8763 }
8764 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 8765 {
2212663f
DB
8766 switch (mode0)
8767 {
8768 case V4SImode:
8769 type = v4si_ftype_v4si_v4si_v16qi;
8770 break;
8771 case V4SFmode:
8772 type = v4sf_ftype_v4sf_v4sf_v16qi;
8773 break;
8774 case V8HImode:
8775 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 8776 break;
2212663f
DB
8777 case V16QImode:
8778 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 8779 break;
2212663f 8780 default:
37409796 8781 gcc_unreachable ();
2212663f
DB
8782 }
8783 }
f676971a 8784 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 8785 && mode3 == V4SImode)
24408032 8786 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 8787 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 8788 && mode3 == V4SImode)
24408032 8789 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 8790 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 8791 && mode3 == V4SImode)
24408032
AH
8792 type = v4sf_ftype_v4sf_v4sf_v4si;
8793
8794 /* vchar, vchar, vchar, 4 bit literal. */
8795 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8796 && mode3 == QImode)
b9e4e5d1 8797 type = v16qi_ftype_v16qi_v16qi_int;
24408032
AH
8798
8799 /* vshort, vshort, vshort, 4 bit literal. */
8800 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8801 && mode3 == QImode)
b9e4e5d1 8802 type = v8hi_ftype_v8hi_v8hi_int;
24408032
AH
8803
8804 /* vint, vint, vint, 4 bit literal. */
8805 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8806 && mode3 == QImode)
b9e4e5d1 8807 type = v4si_ftype_v4si_v4si_int;
24408032
AH
8808
8809 /* vfloat, vfloat, vfloat, 4 bit literal. */
8810 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8811 && mode3 == QImode)
b9e4e5d1 8812 type = v4sf_ftype_v4sf_v4sf_int;
24408032 8813
2212663f 8814 else
37409796 8815 gcc_unreachable ();
2212663f
DB
8816
8817 def_builtin (d->mask, d->name, type, d->code);
8818 }
8819
0ac081f6 8820 /* Add the simple binary operators. */
00b960c7 8821 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 8822 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
8823 {
8824 enum machine_mode mode0, mode1, mode2;
8825 tree type;
58646b77
PB
8826 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8827 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 8828
58646b77
PB
8829 if (is_overloaded)
8830 {
8831 mode0 = VOIDmode;
8832 mode1 = VOIDmode;
8833 mode2 = VOIDmode;
8834 }
8835 else
bb8df8a6 8836 {
58646b77
PB
8837 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8838 continue;
f676971a 8839
58646b77
PB
8840 mode0 = insn_data[d->icode].operand[0].mode;
8841 mode1 = insn_data[d->icode].operand[1].mode;
8842 mode2 = insn_data[d->icode].operand[2].mode;
8843 }
0ac081f6
AH
8844
8845 /* When all three operands are of the same mode. */
8846 if (mode0 == mode1 && mode1 == mode2)
8847 {
8848 switch (mode0)
8849 {
58646b77
PB
8850 case VOIDmode:
8851 type = opaque_ftype_opaque_opaque;
8852 break;
0ac081f6
AH
8853 case V4SFmode:
8854 type = v4sf_ftype_v4sf_v4sf;
8855 break;
8856 case V4SImode:
8857 type = v4si_ftype_v4si_v4si;
8858 break;
8859 case V16QImode:
8860 type = v16qi_ftype_v16qi_v16qi;
8861 break;
8862 case V8HImode:
8863 type = v8hi_ftype_v8hi_v8hi;
8864 break;
a3170dc6
AH
8865 case V2SImode:
8866 type = v2si_ftype_v2si_v2si;
8867 break;
8868 case V2SFmode:
8869 type = v2sf_ftype_v2sf_v2sf;
8870 break;
8871 case SImode:
8872 type = int_ftype_int_int;
8873 break;
0ac081f6 8874 default:
37409796 8875 gcc_unreachable ();
0ac081f6
AH
8876 }
8877 }
8878
8879 /* A few other combos we really don't want to do manually. */
8880
8881 /* vint, vfloat, vfloat. */
8882 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8883 type = v4si_ftype_v4sf_v4sf;
8884
8885 /* vshort, vchar, vchar. */
8886 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8887 type = v8hi_ftype_v16qi_v16qi;
8888
8889 /* vint, vshort, vshort. */
8890 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8891 type = v4si_ftype_v8hi_v8hi;
8892
8893 /* vshort, vint, vint. */
8894 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8895 type = v8hi_ftype_v4si_v4si;
8896
8897 /* vchar, vshort, vshort. */
8898 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8899 type = v16qi_ftype_v8hi_v8hi;
8900
8901 /* vint, vchar, vint. */
8902 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8903 type = v4si_ftype_v16qi_v4si;
8904
fa066a23
AH
8905 /* vint, vchar, vchar. */
8906 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8907 type = v4si_ftype_v16qi_v16qi;
8908
0ac081f6
AH
8909 /* vint, vshort, vint. */
8910 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8911 type = v4si_ftype_v8hi_v4si;
f676971a 8912
2212663f
DB
8913 /* vint, vint, 5 bit literal. */
8914 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 8915 type = v4si_ftype_v4si_int;
f676971a 8916
2212663f
DB
8917 /* vshort, vshort, 5 bit literal. */
8918 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 8919 type = v8hi_ftype_v8hi_int;
f676971a 8920
2212663f
DB
8921 /* vchar, vchar, 5 bit literal. */
8922 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 8923 type = v16qi_ftype_v16qi_int;
0ac081f6 8924
617e0e1d
DB
8925 /* vfloat, vint, 5 bit literal. */
8926 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 8927 type = v4sf_ftype_v4si_int;
f676971a 8928
617e0e1d
DB
8929 /* vint, vfloat, 5 bit literal. */
8930 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 8931 type = v4si_ftype_v4sf_int;
617e0e1d 8932
a3170dc6
AH
8933 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8934 type = v2si_ftype_int_int;
8935
8936 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8937 type = v2si_ftype_v2si_char;
8938
8939 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8940 type = v2si_ftype_int_char;
8941
37409796 8942 else
0ac081f6 8943 {
37409796
NS
8944 /* int, x, x. */
8945 gcc_assert (mode0 == SImode);
0ac081f6
AH
8946 switch (mode1)
8947 {
8948 case V4SImode:
8949 type = int_ftype_v4si_v4si;
8950 break;
8951 case V4SFmode:
8952 type = int_ftype_v4sf_v4sf;
8953 break;
8954 case V16QImode:
8955 type = int_ftype_v16qi_v16qi;
8956 break;
8957 case V8HImode:
8958 type = int_ftype_v8hi_v8hi;
8959 break;
8960 default:
37409796 8961 gcc_unreachable ();
0ac081f6
AH
8962 }
8963 }
8964
2212663f
DB
8965 def_builtin (d->mask, d->name, type, d->code);
8966 }
24408032 8967
2212663f
DB
8968 /* Add the simple unary operators. */
8969 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 8970 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
8971 {
8972 enum machine_mode mode0, mode1;
8973 tree type;
58646b77
PB
8974 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8975 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
8976
8977 if (is_overloaded)
8978 {
8979 mode0 = VOIDmode;
8980 mode1 = VOIDmode;
8981 }
8982 else
8983 {
8984 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8985 continue;
bb8df8a6 8986
58646b77
PB
8987 mode0 = insn_data[d->icode].operand[0].mode;
8988 mode1 = insn_data[d->icode].operand[1].mode;
8989 }
2212663f
DB
8990
8991 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 8992 type = v4si_ftype_int;
2212663f 8993 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 8994 type = v8hi_ftype_int;
2212663f 8995 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 8996 type = v16qi_ftype_int;
58646b77
PB
8997 else if (mode0 == VOIDmode && mode1 == VOIDmode)
8998 type = opaque_ftype_opaque;
617e0e1d
DB
8999 else if (mode0 == V4SFmode && mode1 == V4SFmode)
9000 type = v4sf_ftype_v4sf;
20e26713
AH
9001 else if (mode0 == V8HImode && mode1 == V16QImode)
9002 type = v8hi_ftype_v16qi;
9003 else if (mode0 == V4SImode && mode1 == V8HImode)
9004 type = v4si_ftype_v8hi;
a3170dc6
AH
9005 else if (mode0 == V2SImode && mode1 == V2SImode)
9006 type = v2si_ftype_v2si;
9007 else if (mode0 == V2SFmode && mode1 == V2SFmode)
9008 type = v2sf_ftype_v2sf;
9009 else if (mode0 == V2SFmode && mode1 == V2SImode)
9010 type = v2sf_ftype_v2si;
9011 else if (mode0 == V2SImode && mode1 == V2SFmode)
9012 type = v2si_ftype_v2sf;
9013 else if (mode0 == V2SImode && mode1 == QImode)
9014 type = v2si_ftype_char;
2212663f 9015 else
37409796 9016 gcc_unreachable ();
2212663f 9017
0ac081f6
AH
9018 def_builtin (d->mask, d->name, type, d->code);
9019 }
9020}
9021
c15c90bb
ZW
9022static void
9023rs6000_init_libfuncs (void)
9024{
9025 if (!TARGET_HARD_FLOAT)
9026 return;
9027
c9034561 9028 if (DEFAULT_ABI != ABI_V4)
c15c90bb 9029 {
c9034561 9030 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
c15c90bb 9031 {
c9034561 9032 /* AIX library routines for float->int conversion. */
85363ca0
ZW
9033 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9034 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
4274207b
DE
9035 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9036 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
c15c90bb
ZW
9037 }
9038
98c41d98
DE
9039 /* AIX/Darwin/64-bit Linux quad floating point routines. */
9040 if (!TARGET_XL_COMPAT)
9041 {
9042 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9043 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9044 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9045 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
9046 }
9047 else
9048 {
9049 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9050 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9051 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9052 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9053 }
c15c90bb 9054 }
c9034561 9055 else
c15c90bb 9056 {
c9034561 9057 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
9058
9059 set_optab_libfunc (add_optab, TFmode, "_q_add");
9060 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9061 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9062 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9063 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9064 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9065 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9066
c9034561
ZW
9067 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9068 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9069 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9070 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9071 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9072 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9073
85363ca0
ZW
9074 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9075 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9076 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9077 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9078 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9079 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9080 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 9081 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
9082 }
9083}
fba73eb1
DE
9084
9085\f
9086/* Expand a block clear operation, and return 1 if successful. Return 0
9087 if we should let the compiler generate normal code.
9088
9089 operands[0] is the destination
9090 operands[1] is the length
57e84f18 9091 operands[3] is the alignment */
fba73eb1
DE
9092
9093int
9094expand_block_clear (rtx operands[])
9095{
9096 rtx orig_dest = operands[0];
9097 rtx bytes_rtx = operands[1];
57e84f18 9098 rtx align_rtx = operands[3];
5514620a
GK
9099 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9100 HOST_WIDE_INT align;
9101 HOST_WIDE_INT bytes;
fba73eb1
DE
9102 int offset;
9103 int clear_bytes;
5514620a 9104 int clear_step;
fba73eb1
DE
9105
9106 /* If this is not a fixed size move, just call memcpy */
9107 if (! constp)
9108 return 0;
9109
37409796
NS
9110 /* This must be a fixed size alignment */
9111 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
9112 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9113
9114 /* Anything to clear? */
9115 bytes = INTVAL (bytes_rtx);
9116 if (bytes <= 0)
9117 return 1;
9118
5514620a
GK
9119 /* Use the builtin memset after a point, to avoid huge code bloat.
9120 When optimize_size, avoid any significant code bloat; calling
9121 memset is about 4 instructions, so allow for one instruction to
9122 load zero and three to do clearing. */
9123 if (TARGET_ALTIVEC && align >= 128)
9124 clear_step = 16;
9125 else if (TARGET_POWERPC64 && align >= 32)
9126 clear_step = 8;
9127 else
9128 clear_step = 4;
fba73eb1 9129
5514620a
GK
9130 if (optimize_size && bytes > 3 * clear_step)
9131 return 0;
9132 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
9133 return 0;
9134
9135 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9136 {
fba73eb1
DE
9137 enum machine_mode mode = BLKmode;
9138 rtx dest;
f676971a 9139
5514620a
GK
9140 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9141 {
9142 clear_bytes = 16;
9143 mode = V4SImode;
9144 }
9145 else if (bytes >= 8 && TARGET_POWERPC64
9146 /* 64-bit loads and stores require word-aligned
9147 displacements. */
9148 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
9149 {
9150 clear_bytes = 8;
9151 mode = DImode;
fba73eb1 9152 }
5514620a 9153 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
9154 { /* move 4 bytes */
9155 clear_bytes = 4;
9156 mode = SImode;
fba73eb1 9157 }
ec53fc93 9158 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
9159 { /* move 2 bytes */
9160 clear_bytes = 2;
9161 mode = HImode;
fba73eb1
DE
9162 }
9163 else /* move 1 byte at a time */
9164 {
9165 clear_bytes = 1;
9166 mode = QImode;
fba73eb1 9167 }
f676971a 9168
fba73eb1 9169 dest = adjust_address (orig_dest, mode, offset);
f676971a 9170
5514620a 9171 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
9172 }
9173
9174 return 1;
9175}
9176
35aff10b 9177\f
7e69e155
MM
9178/* Expand a block move operation, and return 1 if successful. Return 0
9179 if we should let the compiler generate normal code.
9180
9181 operands[0] is the destination
9182 operands[1] is the source
9183 operands[2] is the length
9184 operands[3] is the alignment */
9185
3933e0e1
MM
9186#define MAX_MOVE_REG 4
9187
7e69e155 9188int
a2369ed3 9189expand_block_move (rtx operands[])
7e69e155 9190{
b6c9286a
MM
9191 rtx orig_dest = operands[0];
9192 rtx orig_src = operands[1];
7e69e155 9193 rtx bytes_rtx = operands[2];
7e69e155 9194 rtx align_rtx = operands[3];
3933e0e1 9195 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 9196 int align;
3933e0e1
MM
9197 int bytes;
9198 int offset;
7e69e155 9199 int move_bytes;
cabfd258
GK
9200 rtx stores[MAX_MOVE_REG];
9201 int num_reg = 0;
7e69e155 9202
3933e0e1 9203 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 9204 if (! constp)
3933e0e1
MM
9205 return 0;
9206
37409796
NS
9207 /* This must be a fixed size alignment */
9208 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 9209 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 9210
7e69e155 9211 /* Anything to move? */
3933e0e1
MM
9212 bytes = INTVAL (bytes_rtx);
9213 if (bytes <= 0)
7e69e155
MM
9214 return 1;
9215
ea9982a8 9216 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 9217 reg_parm_stack_space. */
ea9982a8 9218 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
9219 return 0;
9220
cabfd258 9221 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 9222 {
cabfd258 9223 union {
70128ad9 9224 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 9225 rtx (*mov) (rtx, rtx);
cabfd258
GK
9226 } gen_func;
9227 enum machine_mode mode = BLKmode;
9228 rtx src, dest;
f676971a 9229
5514620a
GK
9230 /* Altivec first, since it will be faster than a string move
9231 when it applies, and usually not significantly larger. */
9232 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9233 {
9234 move_bytes = 16;
9235 mode = V4SImode;
9236 gen_func.mov = gen_movv4si;
9237 }
9238 else if (TARGET_STRING
cabfd258
GK
9239 && bytes > 24 /* move up to 32 bytes at a time */
9240 && ! fixed_regs[5]
9241 && ! fixed_regs[6]
9242 && ! fixed_regs[7]
9243 && ! fixed_regs[8]
9244 && ! fixed_regs[9]
9245 && ! fixed_regs[10]
9246 && ! fixed_regs[11]
9247 && ! fixed_regs[12])
7e69e155 9248 {
cabfd258 9249 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 9250 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
9251 }
9252 else if (TARGET_STRING
9253 && bytes > 16 /* move up to 24 bytes at a time */
9254 && ! fixed_regs[5]
9255 && ! fixed_regs[6]
9256 && ! fixed_regs[7]
9257 && ! fixed_regs[8]
9258 && ! fixed_regs[9]
9259 && ! fixed_regs[10])
9260 {
9261 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 9262 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
9263 }
9264 else if (TARGET_STRING
9265 && bytes > 8 /* move up to 16 bytes at a time */
9266 && ! fixed_regs[5]
9267 && ! fixed_regs[6]
9268 && ! fixed_regs[7]
9269 && ! fixed_regs[8])
9270 {
9271 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 9272 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
9273 }
9274 else if (bytes >= 8 && TARGET_POWERPC64
9275 /* 64-bit loads and stores require word-aligned
9276 displacements. */
fba73eb1 9277 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
9278 {
9279 move_bytes = 8;
9280 mode = DImode;
9281 gen_func.mov = gen_movdi;
9282 }
9283 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9284 { /* move up to 8 bytes at a time */
9285 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 9286 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 9287 }
cd7d9ca4 9288 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
9289 { /* move 4 bytes */
9290 move_bytes = 4;
9291 mode = SImode;
9292 gen_func.mov = gen_movsi;
9293 }
ec53fc93 9294 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
9295 { /* move 2 bytes */
9296 move_bytes = 2;
9297 mode = HImode;
9298 gen_func.mov = gen_movhi;
9299 }
9300 else if (TARGET_STRING && bytes > 1)
9301 { /* move up to 4 bytes at a time */
9302 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 9303 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
9304 }
9305 else /* move 1 byte at a time */
9306 {
9307 move_bytes = 1;
9308 mode = QImode;
9309 gen_func.mov = gen_movqi;
9310 }
f676971a 9311
cabfd258
GK
9312 src = adjust_address (orig_src, mode, offset);
9313 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
9314
9315 if (mode != BLKmode)
cabfd258
GK
9316 {
9317 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 9318
cabfd258
GK
9319 emit_insn ((*gen_func.mov) (tmp_reg, src));
9320 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 9321 }
3933e0e1 9322
cabfd258
GK
9323 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9324 {
9325 int i;
9326 for (i = 0; i < num_reg; i++)
9327 emit_insn (stores[i]);
9328 num_reg = 0;
9329 }
35aff10b 9330
cabfd258 9331 if (mode == BLKmode)
7e69e155 9332 {
70128ad9 9333 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
9334 patterns require zero offset. */
9335 if (!REG_P (XEXP (src, 0)))
b6c9286a 9336 {
cabfd258
GK
9337 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9338 src = replace_equiv_address (src, src_reg);
b6c9286a 9339 }
cabfd258 9340 set_mem_size (src, GEN_INT (move_bytes));
f676971a 9341
cabfd258 9342 if (!REG_P (XEXP (dest, 0)))
3933e0e1 9343 {
cabfd258
GK
9344 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9345 dest = replace_equiv_address (dest, dest_reg);
7e69e155 9346 }
cabfd258 9347 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 9348
70128ad9 9349 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
9350 GEN_INT (move_bytes & 31),
9351 align_rtx));
7e69e155 9352 }
7e69e155
MM
9353 }
9354
9355 return 1;
9356}
9357
d62294f5 9358\f
9caa3eb2
DE
9359/* Return a string to perform a load_multiple operation.
9360 operands[0] is the vector.
9361 operands[1] is the source address.
9362 operands[2] is the first destination register. */
9363
9364const char *
a2369ed3 9365rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
9366{
9367 /* We have to handle the case where the pseudo used to contain the address
9368 is assigned to one of the output registers. */
9369 int i, j;
9370 int words = XVECLEN (operands[0], 0);
9371 rtx xop[10];
9372
9373 if (XVECLEN (operands[0], 0) == 1)
9374 return "{l|lwz} %2,0(%1)";
9375
9376 for (i = 0; i < words; i++)
9377 if (refers_to_regno_p (REGNO (operands[2]) + i,
9378 REGNO (operands[2]) + i + 1, operands[1], 0))
9379 {
9380 if (i == words-1)
9381 {
9382 xop[0] = GEN_INT (4 * (words-1));
9383 xop[1] = operands[1];
9384 xop[2] = operands[2];
9385 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9386 return "";
9387 }
9388 else if (i == 0)
9389 {
9390 xop[0] = GEN_INT (4 * (words-1));
9391 xop[1] = operands[1];
9392 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9393 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9394 return "";
9395 }
9396 else
9397 {
9398 for (j = 0; j < words; j++)
9399 if (j != i)
9400 {
9401 xop[0] = GEN_INT (j * 4);
9402 xop[1] = operands[1];
9403 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9404 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9405 }
9406 xop[0] = GEN_INT (i * 4);
9407 xop[1] = operands[1];
9408 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9409 return "";
9410 }
9411 }
9412
9413 return "{lsi|lswi} %2,%1,%N0";
9414}
9415
9878760c 9416\f
a4f6c312
SS
9417/* A validation routine: say whether CODE, a condition code, and MODE
9418 match. The other alternatives either don't make sense or should
9419 never be generated. */
39a10a29 9420
48d72335 9421void
a2369ed3 9422validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 9423{
37409796
NS
9424 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9425 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9426 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
9427
9428 /* These don't make sense. */
37409796
NS
9429 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9430 || mode != CCUNSmode);
39a10a29 9431
37409796
NS
9432 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9433 || mode == CCUNSmode);
39a10a29 9434
37409796
NS
9435 gcc_assert (mode == CCFPmode
9436 || (code != ORDERED && code != UNORDERED
9437 && code != UNEQ && code != LTGT
9438 && code != UNGT && code != UNLT
9439 && code != UNGE && code != UNLE));
f676971a
EC
9440
9441 /* These should never be generated except for
bc9ec0e0 9442 flag_finite_math_only. */
37409796
NS
9443 gcc_assert (mode != CCFPmode
9444 || flag_finite_math_only
9445 || (code != LE && code != GE
9446 && code != UNEQ && code != LTGT
9447 && code != UNGT && code != UNLT));
39a10a29
GK
9448
9449 /* These are invalid; the information is not there. */
37409796 9450 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
9451}
9452
9878760c
RK
9453\f
9454/* Return 1 if ANDOP is a mask that has no bits on that are not in the
9455 mask required to convert the result of a rotate insn into a shift
b1765bde 9456 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
9457
9458int
a2369ed3 9459includes_lshift_p (rtx shiftop, rtx andop)
9878760c 9460{
e2c953b6
DE
9461 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9462
9463 shift_mask <<= INTVAL (shiftop);
9878760c 9464
b1765bde 9465 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
9466}
9467
9468/* Similar, but for right shift. */
9469
9470int
a2369ed3 9471includes_rshift_p (rtx shiftop, rtx andop)
9878760c 9472{
a7653a2c 9473 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
9474
9475 shift_mask >>= INTVAL (shiftop);
9476
b1765bde 9477 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
9478}
9479
c5059423
AM
9480/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9481 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 9482 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
9483
9484int
a2369ed3 9485includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 9486{
c5059423
AM
9487 if (GET_CODE (andop) == CONST_INT)
9488 {
02071907 9489 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 9490
c5059423 9491 c = INTVAL (andop);
02071907 9492 if (c == 0 || c == ~0)
c5059423 9493 return 0;
e2c953b6 9494
02071907 9495 shift_mask = ~0;
c5059423
AM
9496 shift_mask <<= INTVAL (shiftop);
9497
b6d08ca1 9498 /* Find the least significant one bit. */
c5059423
AM
9499 lsb = c & -c;
9500
9501 /* It must coincide with the LSB of the shift mask. */
9502 if (-lsb != shift_mask)
9503 return 0;
e2c953b6 9504
c5059423
AM
9505 /* Invert to look for the next transition (if any). */
9506 c = ~c;
9507
9508 /* Remove the low group of ones (originally low group of zeros). */
9509 c &= -lsb;
9510
9511 /* Again find the lsb, and check we have all 1's above. */
9512 lsb = c & -c;
9513 return c == -lsb;
9514 }
9515 else if (GET_CODE (andop) == CONST_DOUBLE
9516 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9517 {
02071907
AM
9518 HOST_WIDE_INT low, high, lsb;
9519 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
9520
9521 low = CONST_DOUBLE_LOW (andop);
9522 if (HOST_BITS_PER_WIDE_INT < 64)
9523 high = CONST_DOUBLE_HIGH (andop);
9524
9525 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 9526 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
9527 return 0;
9528
9529 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9530 {
02071907 9531 shift_mask_high = ~0;
c5059423
AM
9532 if (INTVAL (shiftop) > 32)
9533 shift_mask_high <<= INTVAL (shiftop) - 32;
9534
9535 lsb = high & -high;
9536
9537 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9538 return 0;
9539
9540 high = ~high;
9541 high &= -lsb;
9542
9543 lsb = high & -high;
9544 return high == -lsb;
9545 }
9546
02071907 9547 shift_mask_low = ~0;
c5059423
AM
9548 shift_mask_low <<= INTVAL (shiftop);
9549
9550 lsb = low & -low;
9551
9552 if (-lsb != shift_mask_low)
9553 return 0;
9554
9555 if (HOST_BITS_PER_WIDE_INT < 64)
9556 high = ~high;
9557 low = ~low;
9558 low &= -lsb;
9559
9560 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9561 {
9562 lsb = high & -high;
9563 return high == -lsb;
9564 }
9565
9566 lsb = low & -low;
9567 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9568 }
9569 else
9570 return 0;
9571}
e2c953b6 9572
c5059423
AM
9573/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9574 to perform a left shift. It must have SHIFTOP or more least
c1207243 9575 significant 0's, with the remainder of the word 1's. */
e2c953b6 9576
c5059423 9577int
a2369ed3 9578includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 9579{
e2c953b6 9580 if (GET_CODE (andop) == CONST_INT)
c5059423 9581 {
02071907 9582 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 9583
02071907 9584 shift_mask = ~0;
c5059423
AM
9585 shift_mask <<= INTVAL (shiftop);
9586 c = INTVAL (andop);
9587
c1207243 9588 /* Find the least significant one bit. */
c5059423
AM
9589 lsb = c & -c;
9590
9591 /* It must be covered by the shift mask.
a4f6c312 9592 This test also rejects c == 0. */
c5059423
AM
9593 if ((lsb & shift_mask) == 0)
9594 return 0;
9595
9596 /* Check we have all 1's above the transition, and reject all 1's. */
9597 return c == -lsb && lsb != 1;
9598 }
9599 else if (GET_CODE (andop) == CONST_DOUBLE
9600 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9601 {
02071907 9602 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
9603
9604 low = CONST_DOUBLE_LOW (andop);
9605
9606 if (HOST_BITS_PER_WIDE_INT < 64)
9607 {
02071907 9608 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
9609
9610 high = CONST_DOUBLE_HIGH (andop);
9611
9612 if (low == 0)
9613 {
02071907 9614 shift_mask_high = ~0;
c5059423
AM
9615 if (INTVAL (shiftop) > 32)
9616 shift_mask_high <<= INTVAL (shiftop) - 32;
9617
9618 lsb = high & -high;
9619
9620 if ((lsb & shift_mask_high) == 0)
9621 return 0;
9622
9623 return high == -lsb;
9624 }
9625 if (high != ~0)
9626 return 0;
9627 }
9628
02071907 9629 shift_mask_low = ~0;
c5059423
AM
9630 shift_mask_low <<= INTVAL (shiftop);
9631
9632 lsb = low & -low;
9633
9634 if ((lsb & shift_mask_low) == 0)
9635 return 0;
9636
9637 return low == -lsb && lsb != 1;
9638 }
e2c953b6 9639 else
c5059423 9640 return 0;
9878760c 9641}
35068b43 9642
11ac38b2
DE
9643/* Return 1 if operands will generate a valid arguments to rlwimi
9644instruction for insert with right shift in 64-bit mode. The mask may
9645not start on the first bit or stop on the last bit because wrap-around
9646effects of instruction do not correspond to semantics of RTL insn. */
9647
9648int
9649insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9650{
9651 if (INTVAL (startop) < 64
9652 && INTVAL (startop) > 32
9653 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9654 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9655 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9656 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9657 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9658 return 1;
9659
9660 return 0;
9661}
9662
35068b43 9663/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 9664 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
9665
9666int
a2369ed3 9667registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
9668{
9669 /* We might have been passed a SUBREG. */
f676971a 9670 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 9671 return 0;
f676971a 9672
90f81f99
AP
9673 /* We might have been passed non floating point registers. */
9674 if (!FP_REGNO_P (REGNO (reg1))
9675 || !FP_REGNO_P (REGNO (reg2)))
9676 return 0;
35068b43
RK
9677
9678 return (REGNO (reg1) == REGNO (reg2) - 1);
9679}
9680
a4f6c312
SS
9681/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9682 addr1 and addr2 must be in consecutive memory locations
9683 (addr2 == addr1 + 8). */
35068b43
RK
9684
9685int
90f81f99 9686mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 9687{
90f81f99 9688 rtx addr1, addr2;
bb8df8a6
EC
9689 unsigned int reg1, reg2;
9690 int offset1, offset2;
35068b43 9691
90f81f99
AP
9692 /* The mems cannot be volatile. */
9693 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9694 return 0;
f676971a 9695
90f81f99
AP
9696 addr1 = XEXP (mem1, 0);
9697 addr2 = XEXP (mem2, 0);
9698
35068b43
RK
9699 /* Extract an offset (if used) from the first addr. */
9700 if (GET_CODE (addr1) == PLUS)
9701 {
9702 /* If not a REG, return zero. */
9703 if (GET_CODE (XEXP (addr1, 0)) != REG)
9704 return 0;
9705 else
9706 {
c4ad648e 9707 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
9708 /* The offset must be constant! */
9709 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
9710 return 0;
9711 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
9712 }
9713 }
9714 else if (GET_CODE (addr1) != REG)
9715 return 0;
9716 else
9717 {
9718 reg1 = REGNO (addr1);
9719 /* This was a simple (mem (reg)) expression. Offset is 0. */
9720 offset1 = 0;
9721 }
9722
bb8df8a6
EC
9723 /* And now for the second addr. */
9724 if (GET_CODE (addr2) == PLUS)
9725 {
9726 /* If not a REG, return zero. */
9727 if (GET_CODE (XEXP (addr2, 0)) != REG)
9728 return 0;
9729 else
9730 {
9731 reg2 = REGNO (XEXP (addr2, 0));
9732 /* The offset must be constant. */
9733 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9734 return 0;
9735 offset2 = INTVAL (XEXP (addr2, 1));
9736 }
9737 }
9738 else if (GET_CODE (addr2) != REG)
35068b43 9739 return 0;
bb8df8a6
EC
9740 else
9741 {
9742 reg2 = REGNO (addr2);
9743 /* This was a simple (mem (reg)) expression. Offset is 0. */
9744 offset2 = 0;
9745 }
35068b43 9746
bb8df8a6
EC
9747 /* Both of these must have the same base register. */
9748 if (reg1 != reg2)
35068b43
RK
9749 return 0;
9750
9751 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 9752 if (offset2 != offset1 + 8)
35068b43
RK
9753 return 0;
9754
9755 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9756 instructions. */
9757 return 1;
9758}
9878760c
RK
9759\f
9760/* Return the register class of a scratch register needed to copy IN into
9761 or out of a register in CLASS in MODE. If it can be done directly,
9762 NO_REGS is returned. */
9763
9764enum reg_class
3c4774e0
R
9765rs6000_secondary_reload_class (enum reg_class class,
9766 enum machine_mode mode ATTRIBUTE_UNUSED,
9767 rtx in)
9878760c 9768{
5accd822 9769 int regno;
9878760c 9770
ab82a49f
AP
9771 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9772#if TARGET_MACHO
c4ad648e 9773 && MACHOPIC_INDIRECT
ab82a49f 9774#endif
c4ad648e 9775 ))
46fad5b7
DJ
9776 {
9777 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
9778 other than BASE_REGS for TARGET_ELF. So indicate that a
9779 register from BASE_REGS is needed as an intermediate
9780 register.
f676971a 9781
46fad5b7
DJ
9782 On Darwin, pic addresses require a load from memory, which
9783 needs a base register. */
9784 if (class != BASE_REGS
c4ad648e
AM
9785 && (GET_CODE (in) == SYMBOL_REF
9786 || GET_CODE (in) == HIGH
9787 || GET_CODE (in) == LABEL_REF
9788 || GET_CODE (in) == CONST))
9789 return BASE_REGS;
46fad5b7 9790 }
e7b7998a 9791
5accd822
DE
9792 if (GET_CODE (in) == REG)
9793 {
9794 regno = REGNO (in);
9795 if (regno >= FIRST_PSEUDO_REGISTER)
9796 {
9797 regno = true_regnum (in);
9798 if (regno >= FIRST_PSEUDO_REGISTER)
9799 regno = -1;
9800 }
9801 }
9802 else if (GET_CODE (in) == SUBREG)
9803 {
9804 regno = true_regnum (in);
9805 if (regno >= FIRST_PSEUDO_REGISTER)
9806 regno = -1;
9807 }
9808 else
9809 regno = -1;
9810
9878760c
RK
9811 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9812 into anything. */
9813 if (class == GENERAL_REGS || class == BASE_REGS
9814 || (regno >= 0 && INT_REGNO_P (regno)))
9815 return NO_REGS;
9816
9817 /* Constants, memory, and FP registers can go into FP registers. */
9818 if ((regno == -1 || FP_REGNO_P (regno))
9819 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9820 return NO_REGS;
9821
0ac081f6
AH
9822 /* Memory, and AltiVec registers can go into AltiVec registers. */
9823 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9824 && class == ALTIVEC_REGS)
9825 return NO_REGS;
9826
9878760c
RK
9827 /* We can copy among the CR registers. */
9828 if ((class == CR_REGS || class == CR0_REGS)
9829 && regno >= 0 && CR_REGNO_P (regno))
9830 return NO_REGS;
9831
9832 /* Otherwise, we need GENERAL_REGS. */
9833 return GENERAL_REGS;
9834}
9835\f
9836/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 9837 know this is a valid comparison.
9878760c
RK
9838
9839 SCC_P is 1 if this is for an scc. That means that %D will have been
9840 used instead of %C, so the bits will be in different places.
9841
b4ac57ab 9842 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
9843
9844int
a2369ed3 9845ccr_bit (rtx op, int scc_p)
9878760c
RK
9846{
9847 enum rtx_code code = GET_CODE (op);
9848 enum machine_mode cc_mode;
9849 int cc_regnum;
9850 int base_bit;
9ebbca7d 9851 rtx reg;
9878760c 9852
ec8e098d 9853 if (!COMPARISON_P (op))
9878760c
RK
9854 return -1;
9855
9ebbca7d
GK
9856 reg = XEXP (op, 0);
9857
37409796 9858 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
9859
9860 cc_mode = GET_MODE (reg);
9861 cc_regnum = REGNO (reg);
9862 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 9863
39a10a29 9864 validate_condition_mode (code, cc_mode);
c5defebb 9865
b7053a3f
GK
9866 /* When generating a sCOND operation, only positive conditions are
9867 allowed. */
37409796
NS
9868 gcc_assert (!scc_p
9869 || code == EQ || code == GT || code == LT || code == UNORDERED
9870 || code == GTU || code == LTU);
f676971a 9871
9878760c
RK
9872 switch (code)
9873 {
9874 case NE:
9875 return scc_p ? base_bit + 3 : base_bit + 2;
9876 case EQ:
9877 return base_bit + 2;
1c882ea4 9878 case GT: case GTU: case UNLE:
9878760c 9879 return base_bit + 1;
1c882ea4 9880 case LT: case LTU: case UNGE:
9878760c 9881 return base_bit;
1c882ea4
GK
9882 case ORDERED: case UNORDERED:
9883 return base_bit + 3;
9878760c
RK
9884
9885 case GE: case GEU:
39a10a29 9886 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
9887 unordered position. So test that bit. For integer, this is ! LT
9888 unless this is an scc insn. */
39a10a29 9889 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
9890
9891 case LE: case LEU:
39a10a29 9892 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 9893
9878760c 9894 default:
37409796 9895 gcc_unreachable ();
9878760c
RK
9896 }
9897}
1ff7789b 9898\f
8d30c4ee 9899/* Return the GOT register. */
1ff7789b 9900
9390387d 9901rtx
a2369ed3 9902rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 9903{
a4f6c312
SS
9904 /* The second flow pass currently (June 1999) can't update
9905 regs_ever_live without disturbing other parts of the compiler, so
9906 update it here to make the prolog/epilogue code happy. */
1db02437
FS
9907 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9908 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
1ff7789b 9909
8d30c4ee 9910 current_function_uses_pic_offset_table = 1;
3cb999d8 9911
1ff7789b
MM
9912 return pic_offset_table_rtx;
9913}
a7df97e6 9914\f
e2500fed
GK
9915/* Function to init struct machine_function.
9916 This will be called, via a pointer variable,
9917 from push_function_context. */
a7df97e6 9918
e2500fed 9919static struct machine_function *
863d938c 9920rs6000_init_machine_status (void)
a7df97e6 9921{
e2500fed 9922 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 9923}
9878760c 9924\f
0ba1b2ff
AM
9925/* These macros test for integers and extract the low-order bits. */
9926#define INT_P(X) \
9927((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9928 && GET_MODE (X) == VOIDmode)
9929
9930#define INT_LOWPART(X) \
9931 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9932
9933int
a2369ed3 9934extract_MB (rtx op)
0ba1b2ff
AM
9935{
9936 int i;
9937 unsigned long val = INT_LOWPART (op);
9938
9939 /* If the high bit is zero, the value is the first 1 bit we find
9940 from the left. */
9941 if ((val & 0x80000000) == 0)
9942 {
37409796 9943 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
9944
9945 i = 1;
9946 while (((val <<= 1) & 0x80000000) == 0)
9947 ++i;
9948 return i;
9949 }
9950
9951 /* If the high bit is set and the low bit is not, or the mask is all
9952 1's, the value is zero. */
9953 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9954 return 0;
9955
9956 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9957 from the right. */
9958 i = 31;
9959 while (((val >>= 1) & 1) != 0)
9960 --i;
9961
9962 return i;
9963}
9964
9965int
a2369ed3 9966extract_ME (rtx op)
0ba1b2ff
AM
9967{
9968 int i;
9969 unsigned long val = INT_LOWPART (op);
9970
9971 /* If the low bit is zero, the value is the first 1 bit we find from
9972 the right. */
9973 if ((val & 1) == 0)
9974 {
37409796 9975 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
9976
9977 i = 30;
9978 while (((val >>= 1) & 1) == 0)
9979 --i;
9980
9981 return i;
9982 }
9983
9984 /* If the low bit is set and the high bit is not, or the mask is all
9985 1's, the value is 31. */
9986 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9987 return 31;
9988
9989 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9990 from the left. */
9991 i = 0;
9992 while (((val <<= 1) & 0x80000000) != 0)
9993 ++i;
9994
9995 return i;
9996}
9997
c4501e62
JJ
9998/* Locate some local-dynamic symbol still in use by this function
9999 so that we can print its name in some tls_ld pattern. */
10000
10001static const char *
863d938c 10002rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
10003{
10004 rtx insn;
10005
10006 if (cfun->machine->some_ld_name)
10007 return cfun->machine->some_ld_name;
10008
10009 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10010 if (INSN_P (insn)
10011 && for_each_rtx (&PATTERN (insn),
10012 rs6000_get_some_local_dynamic_name_1, 0))
10013 return cfun->machine->some_ld_name;
10014
37409796 10015 gcc_unreachable ();
c4501e62
JJ
10016}
10017
10018/* Helper function for rs6000_get_some_local_dynamic_name. */
10019
10020static int
a2369ed3 10021rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
10022{
10023 rtx x = *px;
10024
10025 if (GET_CODE (x) == SYMBOL_REF)
10026 {
10027 const char *str = XSTR (x, 0);
10028 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10029 {
10030 cfun->machine->some_ld_name = str;
10031 return 1;
10032 }
10033 }
10034
10035 return 0;
10036}
10037
85b776df
AM
10038/* Write out a function code label. */
10039
10040void
10041rs6000_output_function_entry (FILE *file, const char *fname)
10042{
10043 if (fname[0] != '.')
10044 {
10045 switch (DEFAULT_ABI)
10046 {
10047 default:
37409796 10048 gcc_unreachable ();
85b776df
AM
10049
10050 case ABI_AIX:
10051 if (DOT_SYMBOLS)
10052 putc ('.', file);
10053 else
10054 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10055 break;
10056
10057 case ABI_V4:
10058 case ABI_DARWIN:
10059 break;
10060 }
10061 }
10062 if (TARGET_AIX)
10063 RS6000_OUTPUT_BASENAME (file, fname);
10064 else
10065 assemble_name (file, fname);
10066}
10067
9878760c
RK
10068/* Print an operand. Recognize special options, documented below. */
10069
38c1f2d7 10070#if TARGET_ELF
d9407988 10071#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 10072#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
10073#else
10074#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 10075#define SMALL_DATA_REG 0
ba5e43aa
MM
10076#endif
10077
9878760c 10078void
a2369ed3 10079print_operand (FILE *file, rtx x, int code)
9878760c
RK
10080{
10081 int i;
a260abc9 10082 HOST_WIDE_INT val;
0ba1b2ff 10083 unsigned HOST_WIDE_INT uval;
9878760c
RK
10084
10085 switch (code)
10086 {
a8b3aeda 10087 case '.':
a85d226b
RK
10088 /* Write out an instruction after the call which may be replaced
10089 with glue code by the loader. This depends on the AIX version. */
10090 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
10091 return;
10092
81eace42
GK
10093 /* %a is output_address. */
10094
9854d9ed
RK
10095 case 'A':
10096 /* If X is a constant integer whose low-order 5 bits are zero,
10097 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 10098 in the AIX assembler where "sri" with a zero shift count
20e26713 10099 writes a trash instruction. */
9854d9ed 10100 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 10101 putc ('l', file);
9854d9ed 10102 else
76229ac8 10103 putc ('r', file);
9854d9ed
RK
10104 return;
10105
10106 case 'b':
e2c953b6
DE
10107 /* If constant, low-order 16 bits of constant, unsigned.
10108 Otherwise, write normally. */
10109 if (INT_P (x))
10110 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10111 else
10112 print_operand (file, x, 0);
cad12a8d
RK
10113 return;
10114
a260abc9
DE
10115 case 'B':
10116 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10117 for 64-bit mask direction. */
9390387d 10118 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 10119 return;
a260abc9 10120
81eace42
GK
10121 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10122 output_operand. */
10123
423c1189
AH
10124 case 'c':
10125 /* X is a CR register. Print the number of the GT bit of the CR. */
10126 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10127 output_operand_lossage ("invalid %%E value");
10128 else
10129 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10130 return;
10131
10132 case 'D':
6b1fedc3 10133 /* Like 'J' but get to the EQ bit. */
37409796 10134 gcc_assert (GET_CODE (x) == REG);
423c1189 10135
6b1fedc3
AH
10136 /* Bit 1 is EQ bit. */
10137 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
423c1189 10138
64022b5d 10139 fprintf (file, "%d", i);
423c1189
AH
10140 return;
10141
9854d9ed 10142 case 'E':
39a10a29 10143 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
10144 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10145 output_operand_lossage ("invalid %%E value");
78fbdbf7 10146 else
39a10a29 10147 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 10148 return;
9854d9ed
RK
10149
10150 case 'f':
10151 /* X is a CR register. Print the shift count needed to move it
10152 to the high-order four bits. */
10153 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10154 output_operand_lossage ("invalid %%f value");
10155 else
9ebbca7d 10156 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10157 return;
10158
10159 case 'F':
10160 /* Similar, but print the count for the rotate in the opposite
10161 direction. */
10162 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10163 output_operand_lossage ("invalid %%F value");
10164 else
9ebbca7d 10165 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10166 return;
10167
10168 case 'G':
10169 /* X is a constant integer. If it is negative, print "m",
43aa4e05 10170 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
10171 if (GET_CODE (x) != CONST_INT)
10172 output_operand_lossage ("invalid %%G value");
10173 else if (INTVAL (x) >= 0)
76229ac8 10174 putc ('z', file);
9854d9ed 10175 else
76229ac8 10176 putc ('m', file);
9854d9ed 10177 return;
e2c953b6 10178
9878760c 10179 case 'h':
a4f6c312
SS
10180 /* If constant, output low-order five bits. Otherwise, write
10181 normally. */
9878760c 10182 if (INT_P (x))
5f59ecb7 10183 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
10184 else
10185 print_operand (file, x, 0);
10186 return;
10187
64305719 10188 case 'H':
a4f6c312
SS
10189 /* If constant, output low-order six bits. Otherwise, write
10190 normally. */
64305719 10191 if (INT_P (x))
5f59ecb7 10192 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
10193 else
10194 print_operand (file, x, 0);
10195 return;
10196
9854d9ed
RK
10197 case 'I':
10198 /* Print `i' if this is a constant, else nothing. */
9878760c 10199 if (INT_P (x))
76229ac8 10200 putc ('i', file);
9878760c
RK
10201 return;
10202
9854d9ed
RK
10203 case 'j':
10204 /* Write the bit number in CCR for jump. */
10205 i = ccr_bit (x, 0);
10206 if (i == -1)
10207 output_operand_lossage ("invalid %%j code");
9878760c 10208 else
9854d9ed 10209 fprintf (file, "%d", i);
9878760c
RK
10210 return;
10211
9854d9ed
RK
10212 case 'J':
10213 /* Similar, but add one for shift count in rlinm for scc and pass
10214 scc flag to `ccr_bit'. */
10215 i = ccr_bit (x, 1);
10216 if (i == -1)
10217 output_operand_lossage ("invalid %%J code");
10218 else
a0466a68
RK
10219 /* If we want bit 31, write a shift count of zero, not 32. */
10220 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
10221 return;
10222
9854d9ed
RK
10223 case 'k':
10224 /* X must be a constant. Write the 1's complement of the
10225 constant. */
9878760c 10226 if (! INT_P (x))
9854d9ed 10227 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
10228 else
10229 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
10230 return;
10231
81eace42 10232 case 'K':
9ebbca7d
GK
10233 /* X must be a symbolic constant on ELF. Write an
10234 expression suitable for an 'addi' that adds in the low 16
10235 bits of the MEM. */
10236 if (GET_CODE (x) != CONST)
10237 {
10238 print_operand_address (file, x);
10239 fputs ("@l", file);
10240 }
10241 else
10242 {
10243 if (GET_CODE (XEXP (x, 0)) != PLUS
10244 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10245 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10246 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 10247 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
10248 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10249 fputs ("@l", file);
ed8d2920
MM
10250 /* For GNU as, there must be a non-alphanumeric character
10251 between 'l' and the number. The '-' is added by
10252 print_operand() already. */
10253 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10254 fputs ("+", file);
9ebbca7d
GK
10255 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10256 }
81eace42
GK
10257 return;
10258
10259 /* %l is output_asm_label. */
9ebbca7d 10260
9854d9ed
RK
10261 case 'L':
10262 /* Write second word of DImode or DFmode reference. Works on register
10263 or non-indexed memory only. */
10264 if (GET_CODE (x) == REG)
fb5c67a7 10265 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
10266 else if (GET_CODE (x) == MEM)
10267 {
10268 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 10269 we have already done it, we can just use an offset of word. */
9854d9ed
RK
10270 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10271 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
10272 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10273 UNITS_PER_WORD));
9854d9ed 10274 else
d7624dc0
RK
10275 output_address (XEXP (adjust_address_nv (x, SImode,
10276 UNITS_PER_WORD),
10277 0));
ed8908e7 10278
ba5e43aa 10279 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10280 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10281 reg_names[SMALL_DATA_REG]);
9854d9ed 10282 }
9878760c 10283 return;
f676971a 10284
9878760c
RK
10285 case 'm':
10286 /* MB value for a mask operand. */
b1765bde 10287 if (! mask_operand (x, SImode))
9878760c
RK
10288 output_operand_lossage ("invalid %%m value");
10289
0ba1b2ff 10290 fprintf (file, "%d", extract_MB (x));
9878760c
RK
10291 return;
10292
10293 case 'M':
10294 /* ME value for a mask operand. */
b1765bde 10295 if (! mask_operand (x, SImode))
a260abc9 10296 output_operand_lossage ("invalid %%M value");
9878760c 10297
0ba1b2ff 10298 fprintf (file, "%d", extract_ME (x));
9878760c
RK
10299 return;
10300
81eace42
GK
10301 /* %n outputs the negative of its operand. */
10302
9878760c
RK
10303 case 'N':
10304 /* Write the number of elements in the vector times 4. */
10305 if (GET_CODE (x) != PARALLEL)
10306 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
10307 else
10308 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
10309 return;
10310
10311 case 'O':
10312 /* Similar, but subtract 1 first. */
10313 if (GET_CODE (x) != PARALLEL)
1427100a 10314 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
10315 else
10316 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
10317 return;
10318
9854d9ed
RK
10319 case 'p':
10320 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10321 if (! INT_P (x)
2bfcf297 10322 || INT_LOWPART (x) < 0
9854d9ed
RK
10323 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10324 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
10325 else
10326 fprintf (file, "%d", i);
9854d9ed
RK
10327 return;
10328
9878760c
RK
10329 case 'P':
10330 /* The operand must be an indirect memory reference. The result
8bb418a3 10331 is the register name. */
9878760c
RK
10332 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10333 || REGNO (XEXP (x, 0)) >= 32)
10334 output_operand_lossage ("invalid %%P value");
e2c953b6 10335 else
fb5c67a7 10336 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
10337 return;
10338
dfbdccdb
GK
10339 case 'q':
10340 /* This outputs the logical code corresponding to a boolean
10341 expression. The expression may have one or both operands
39a10a29 10342 negated (if one, only the first one). For condition register
c4ad648e
AM
10343 logical operations, it will also treat the negated
10344 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 10345 {
63bc1d05 10346 const char *const *t = 0;
dfbdccdb
GK
10347 const char *s;
10348 enum rtx_code code = GET_CODE (x);
10349 static const char * const tbl[3][3] = {
10350 { "and", "andc", "nor" },
10351 { "or", "orc", "nand" },
10352 { "xor", "eqv", "xor" } };
10353
10354 if (code == AND)
10355 t = tbl[0];
10356 else if (code == IOR)
10357 t = tbl[1];
10358 else if (code == XOR)
10359 t = tbl[2];
10360 else
10361 output_operand_lossage ("invalid %%q value");
10362
10363 if (GET_CODE (XEXP (x, 0)) != NOT)
10364 s = t[0];
10365 else
10366 {
10367 if (GET_CODE (XEXP (x, 1)) == NOT)
10368 s = t[2];
10369 else
10370 s = t[1];
10371 }
f676971a 10372
dfbdccdb
GK
10373 fputs (s, file);
10374 }
10375 return;
10376
2c4a9cff
DE
10377 case 'Q':
10378 if (TARGET_MFCRF)
3b6ce0af 10379 fputc (',', file);
5efb1046 10380 /* FALLTHRU */
2c4a9cff
DE
10381 else
10382 return;
10383
9854d9ed
RK
10384 case 'R':
10385 /* X is a CR register. Print the mask for `mtcrf'. */
10386 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10387 output_operand_lossage ("invalid %%R value");
10388 else
9ebbca7d 10389 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 10390 return;
9854d9ed
RK
10391
10392 case 's':
10393 /* Low 5 bits of 32 - value */
10394 if (! INT_P (x))
10395 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
10396 else
10397 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 10398 return;
9854d9ed 10399
a260abc9 10400 case 'S':
0ba1b2ff 10401 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
10402 CONST_INT 32-bit mask is considered sign-extended so any
10403 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 10404 if (! mask64_operand (x, DImode))
a260abc9
DE
10405 output_operand_lossage ("invalid %%S value");
10406
0ba1b2ff 10407 uval = INT_LOWPART (x);
a260abc9 10408
0ba1b2ff 10409 if (uval & 1) /* Clear Left */
a260abc9 10410 {
f099d360
GK
10411#if HOST_BITS_PER_WIDE_INT > 64
10412 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10413#endif
0ba1b2ff 10414 i = 64;
a260abc9 10415 }
0ba1b2ff 10416 else /* Clear Right */
a260abc9 10417 {
0ba1b2ff 10418 uval = ~uval;
f099d360
GK
10419#if HOST_BITS_PER_WIDE_INT > 64
10420 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10421#endif
0ba1b2ff 10422 i = 63;
a260abc9 10423 }
0ba1b2ff
AM
10424 while (uval != 0)
10425 --i, uval >>= 1;
37409796 10426 gcc_assert (i >= 0);
0ba1b2ff
AM
10427 fprintf (file, "%d", i);
10428 return;
a260abc9 10429
a3170dc6
AH
10430 case 't':
10431 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 10432 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
10433
10434 /* Bit 3 is OV bit. */
10435 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10436
10437 /* If we want bit 31, write a shift count of zero, not 32. */
10438 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10439 return;
10440
cccf3bdc
DE
10441 case 'T':
10442 /* Print the symbolic name of a branch target register. */
10443 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10444 && REGNO (x) != COUNT_REGISTER_REGNUM))
10445 output_operand_lossage ("invalid %%T value");
e2c953b6 10446 else if (REGNO (x) == LINK_REGISTER_REGNUM)
cccf3bdc
DE
10447 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10448 else
10449 fputs ("ctr", file);
10450 return;
10451
9854d9ed 10452 case 'u':
802a0058 10453 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
10454 if (! INT_P (x))
10455 output_operand_lossage ("invalid %%u value");
e2c953b6 10456 else
f676971a 10457 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 10458 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
10459 return;
10460
802a0058
MM
10461 case 'v':
10462 /* High-order 16 bits of constant for use in signed operand. */
10463 if (! INT_P (x))
10464 output_operand_lossage ("invalid %%v value");
e2c953b6 10465 else
134c32f6
DE
10466 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10467 (INT_LOWPART (x) >> 16) & 0xffff);
10468 return;
802a0058 10469
9854d9ed
RK
10470 case 'U':
10471 /* Print `u' if this has an auto-increment or auto-decrement. */
10472 if (GET_CODE (x) == MEM
10473 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10474 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
76229ac8 10475 putc ('u', file);
9854d9ed 10476 return;
9878760c 10477
e0cd0770
JC
10478 case 'V':
10479 /* Print the trap code for this operand. */
10480 switch (GET_CODE (x))
10481 {
10482 case EQ:
10483 fputs ("eq", file); /* 4 */
10484 break;
10485 case NE:
10486 fputs ("ne", file); /* 24 */
10487 break;
10488 case LT:
10489 fputs ("lt", file); /* 16 */
10490 break;
10491 case LE:
10492 fputs ("le", file); /* 20 */
10493 break;
10494 case GT:
10495 fputs ("gt", file); /* 8 */
10496 break;
10497 case GE:
10498 fputs ("ge", file); /* 12 */
10499 break;
10500 case LTU:
10501 fputs ("llt", file); /* 2 */
10502 break;
10503 case LEU:
10504 fputs ("lle", file); /* 6 */
10505 break;
10506 case GTU:
10507 fputs ("lgt", file); /* 1 */
10508 break;
10509 case GEU:
10510 fputs ("lge", file); /* 5 */
10511 break;
10512 default:
37409796 10513 gcc_unreachable ();
e0cd0770
JC
10514 }
10515 break;
10516
9854d9ed
RK
10517 case 'w':
10518 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10519 normally. */
10520 if (INT_P (x))
f676971a 10521 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 10522 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
10523 else
10524 print_operand (file, x, 0);
9878760c
RK
10525 return;
10526
9854d9ed 10527 case 'W':
e2c953b6 10528 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
10529 val = (GET_CODE (x) == CONST_INT
10530 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10531
10532 if (val < 0)
10533 i = -1;
9854d9ed 10534 else
e2c953b6
DE
10535 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10536 if ((val <<= 1) < 0)
10537 break;
10538
10539#if HOST_BITS_PER_WIDE_INT == 32
10540 if (GET_CODE (x) == CONST_INT && i >= 0)
10541 i += 32; /* zero-extend high-part was all 0's */
10542 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10543 {
10544 val = CONST_DOUBLE_LOW (x);
10545
37409796
NS
10546 gcc_assert (val);
10547 if (val < 0)
e2c953b6
DE
10548 --i;
10549 else
10550 for ( ; i < 64; i++)
10551 if ((val <<= 1) < 0)
10552 break;
10553 }
10554#endif
10555
10556 fprintf (file, "%d", i + 1);
9854d9ed 10557 return;
9878760c 10558
9854d9ed
RK
10559 case 'X':
10560 if (GET_CODE (x) == MEM
4d588c14 10561 && legitimate_indexed_address_p (XEXP (x, 0), 0))
76229ac8 10562 putc ('x', file);
9854d9ed 10563 return;
9878760c 10564
9854d9ed
RK
10565 case 'Y':
10566 /* Like 'L', for third word of TImode */
10567 if (GET_CODE (x) == REG)
fb5c67a7 10568 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 10569 else if (GET_CODE (x) == MEM)
9878760c 10570 {
9854d9ed
RK
10571 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10572 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 10573 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 10574 else
d7624dc0 10575 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 10576 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10577 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10578 reg_names[SMALL_DATA_REG]);
9878760c
RK
10579 }
10580 return;
f676971a 10581
9878760c 10582 case 'z':
b4ac57ab
RS
10583 /* X is a SYMBOL_REF. Write out the name preceded by a
10584 period and without any trailing data in brackets. Used for function
4d30c363
MM
10585 names. If we are configured for System V (or the embedded ABI) on
10586 the PowerPC, do not emit the period, since those systems do not use
10587 TOCs and the like. */
37409796 10588 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 10589
c4ad648e
AM
10590 /* Mark the decl as referenced so that cgraph will output the
10591 function. */
9bf6462a 10592 if (SYMBOL_REF_DECL (x))
c4ad648e 10593 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 10594
85b776df 10595 /* For macho, check to see if we need a stub. */
f9da97f0
AP
10596 if (TARGET_MACHO)
10597 {
10598 const char *name = XSTR (x, 0);
a031e781 10599#if TARGET_MACHO
3b48085e 10600 if (MACHOPIC_INDIRECT
11abc112
MM
10601 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10602 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
10603#endif
10604 assemble_name (file, name);
10605 }
85b776df 10606 else if (!DOT_SYMBOLS)
9739c90c 10607 assemble_name (file, XSTR (x, 0));
85b776df
AM
10608 else
10609 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
10610 return;
10611
9854d9ed
RK
10612 case 'Z':
10613 /* Like 'L', for last word of TImode. */
10614 if (GET_CODE (x) == REG)
fb5c67a7 10615 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
10616 else if (GET_CODE (x) == MEM)
10617 {
10618 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10619 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 10620 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 10621 else
d7624dc0 10622 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 10623 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10624 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10625 reg_names[SMALL_DATA_REG]);
9854d9ed 10626 }
5c23c401 10627 return;
0ac081f6 10628
a3170dc6 10629 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
10630 case 'y':
10631 {
10632 rtx tmp;
10633
37409796 10634 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
10635
10636 tmp = XEXP (x, 0);
10637
993f19a8 10638 if (TARGET_E500)
a3170dc6
AH
10639 {
10640 /* Handle [reg]. */
10641 if (GET_CODE (tmp) == REG)
10642 {
10643 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10644 break;
10645 }
10646 /* Handle [reg+UIMM]. */
10647 else if (GET_CODE (tmp) == PLUS &&
10648 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10649 {
10650 int x;
10651
37409796 10652 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
10653
10654 x = INTVAL (XEXP (tmp, 1));
10655 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10656 break;
10657 }
10658
10659 /* Fall through. Must be [reg+reg]. */
10660 }
850e8d3d
DN
10661 if (TARGET_ALTIVEC
10662 && GET_CODE (tmp) == AND
10663 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
10664 && INTVAL (XEXP (tmp, 1)) == -16)
10665 tmp = XEXP (tmp, 0);
0ac081f6 10666 if (GET_CODE (tmp) == REG)
c62f2db5 10667 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 10668 else
0ac081f6 10669 {
37409796
NS
10670 gcc_assert (GET_CODE (tmp) == PLUS
10671 && GET_CODE (XEXP (tmp, 1)) == REG);
bb8df8a6 10672
0ac081f6
AH
10673 if (REGNO (XEXP (tmp, 0)) == 0)
10674 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10675 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10676 else
10677 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10678 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10679 }
0ac081f6
AH
10680 break;
10681 }
f676971a 10682
9878760c
RK
10683 case 0:
10684 if (GET_CODE (x) == REG)
10685 fprintf (file, "%s", reg_names[REGNO (x)]);
10686 else if (GET_CODE (x) == MEM)
10687 {
10688 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10689 know the width from the mode. */
10690 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
10691 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10692 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 10693 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
10694 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10695 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 10696 else
a54d04b7 10697 output_address (XEXP (x, 0));
9878760c
RK
10698 }
10699 else
a54d04b7 10700 output_addr_const (file, x);
a85d226b 10701 return;
9878760c 10702
c4501e62
JJ
10703 case '&':
10704 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10705 return;
10706
9878760c
RK
10707 default:
10708 output_operand_lossage ("invalid %%xn code");
10709 }
10710}
10711\f
10712/* Print the address of an operand. */
10713
10714void
a2369ed3 10715print_operand_address (FILE *file, rtx x)
9878760c
RK
10716{
10717 if (GET_CODE (x) == REG)
4697a36c 10718 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
10719 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10720 || GET_CODE (x) == LABEL_REF)
9878760c
RK
10721 {
10722 output_addr_const (file, x);
ba5e43aa 10723 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10724 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10725 reg_names[SMALL_DATA_REG]);
37409796
NS
10726 else
10727 gcc_assert (!TARGET_TOC);
9878760c
RK
10728 }
10729 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10730 {
10731 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
10732 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10733 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 10734 else
4697a36c
MM
10735 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10736 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
10737 }
10738 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
10739 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10740 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
10741#if TARGET_ELF
10742 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 10743 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
10744 {
10745 output_addr_const (file, XEXP (x, 1));
10746 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10747 }
c859cda6
DJ
10748#endif
10749#if TARGET_MACHO
10750 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 10751 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
10752 {
10753 fprintf (file, "lo16(");
10754 output_addr_const (file, XEXP (x, 1));
10755 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10756 }
3cb999d8 10757#endif
4d588c14 10758 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 10759 {
2bfcf297 10760 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 10761 {
2bfcf297
DB
10762 rtx contains_minus = XEXP (x, 1);
10763 rtx minus, symref;
10764 const char *name;
f676971a 10765
9ebbca7d 10766 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 10767 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
10768 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10769 contains_minus = XEXP (contains_minus, 0);
10770
2bfcf297
DB
10771 minus = XEXP (contains_minus, 0);
10772 symref = XEXP (minus, 0);
10773 XEXP (contains_minus, 0) = symref;
10774 if (TARGET_ELF)
10775 {
10776 char *newname;
10777
10778 name = XSTR (symref, 0);
10779 newname = alloca (strlen (name) + sizeof ("@toc"));
10780 strcpy (newname, name);
10781 strcat (newname, "@toc");
10782 XSTR (symref, 0) = newname;
10783 }
10784 output_addr_const (file, XEXP (x, 1));
10785 if (TARGET_ELF)
10786 XSTR (symref, 0) = name;
9ebbca7d
GK
10787 XEXP (contains_minus, 0) = minus;
10788 }
10789 else
10790 output_addr_const (file, XEXP (x, 1));
10791
10792 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10793 }
9878760c 10794 else
37409796 10795 gcc_unreachable ();
9878760c
RK
10796}
10797\f
88cad84b 10798/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
10799 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10800 is defined. It also needs to handle DI-mode objects on 64-bit
10801 targets. */
10802
10803static bool
a2369ed3 10804rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 10805{
f4f4921e 10806#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 10807 /* Special handling for SI values. */
84dcde01 10808 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 10809 {
a2369ed3 10810 extern int in_toc_section (void);
301d03af 10811 static int recurse = 0;
f676971a 10812
301d03af
RS
10813 /* For -mrelocatable, we mark all addresses that need to be fixed up
10814 in the .fixup section. */
10815 if (TARGET_RELOCATABLE
10816 && !in_toc_section ()
10817 && !in_text_section ()
642af3be 10818 && !in_unlikely_text_section ()
301d03af
RS
10819 && !recurse
10820 && GET_CODE (x) != CONST_INT
10821 && GET_CODE (x) != CONST_DOUBLE
10822 && CONSTANT_P (x))
10823 {
10824 char buf[256];
10825
10826 recurse = 1;
10827 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10828 fixuplabelno++;
10829 ASM_OUTPUT_LABEL (asm_out_file, buf);
10830 fprintf (asm_out_file, "\t.long\t(");
10831 output_addr_const (asm_out_file, x);
10832 fprintf (asm_out_file, ")@fixup\n");
10833 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10834 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10835 fprintf (asm_out_file, "\t.long\t");
10836 assemble_name (asm_out_file, buf);
10837 fprintf (asm_out_file, "\n\t.previous\n");
10838 recurse = 0;
10839 return true;
10840 }
10841 /* Remove initial .'s to turn a -mcall-aixdesc function
10842 address into the address of the descriptor, not the function
10843 itself. */
10844 else if (GET_CODE (x) == SYMBOL_REF
10845 && XSTR (x, 0)[0] == '.'
10846 && DEFAULT_ABI == ABI_AIX)
10847 {
10848 const char *name = XSTR (x, 0);
10849 while (*name == '.')
10850 name++;
10851
10852 fprintf (asm_out_file, "\t.long\t%s\n", name);
10853 return true;
10854 }
10855 }
f4f4921e 10856#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
10857 return default_assemble_integer (x, size, aligned_p);
10858}
93638d7a
AM
10859
10860#ifdef HAVE_GAS_HIDDEN
10861/* Emit an assembler directive to set symbol visibility for DECL to
10862 VISIBILITY_TYPE. */
10863
5add3202 10864static void
a2369ed3 10865rs6000_assemble_visibility (tree decl, int vis)
93638d7a 10866{
93638d7a
AM
10867 /* Functions need to have their entry point symbol visibility set as
10868 well as their descriptor symbol visibility. */
85b776df
AM
10869 if (DEFAULT_ABI == ABI_AIX
10870 && DOT_SYMBOLS
10871 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 10872 {
25fdb4dc 10873 static const char * const visibility_types[] = {
c4ad648e 10874 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
10875 };
10876
10877 const char *name, *type;
93638d7a
AM
10878
10879 name = ((* targetm.strip_name_encoding)
10880 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 10881 type = visibility_types[vis];
93638d7a 10882
25fdb4dc
RH
10883 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10884 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 10885 }
25fdb4dc
RH
10886 else
10887 default_assemble_visibility (decl, vis);
93638d7a
AM
10888}
10889#endif
301d03af 10890\f
39a10a29 10891enum rtx_code
a2369ed3 10892rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
10893{
10894 /* Reversal of FP compares takes care -- an ordered compare
10895 becomes an unordered compare and vice versa. */
f676971a 10896 if (mode == CCFPmode
bc9ec0e0
GK
10897 && (!flag_finite_math_only
10898 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10899 || code == UNEQ || code == LTGT))
bab6226b 10900 return reverse_condition_maybe_unordered (code);
39a10a29 10901 else
bab6226b 10902 return reverse_condition (code);
39a10a29
GK
10903}
10904
39a10a29
GK
10905/* Generate a compare for CODE. Return a brand-new rtx that
10906 represents the result of the compare. */
a4f6c312 10907
39a10a29 10908static rtx
a2369ed3 10909rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
10910{
10911 enum machine_mode comp_mode;
10912 rtx compare_result;
10913
10914 if (rs6000_compare_fp_p)
10915 comp_mode = CCFPmode;
10916 else if (code == GTU || code == LTU
c4ad648e 10917 || code == GEU || code == LEU)
39a10a29 10918 comp_mode = CCUNSmode;
60934f9c
NS
10919 else if ((code == EQ || code == NE)
10920 && GET_CODE (rs6000_compare_op0) == SUBREG
10921 && GET_CODE (rs6000_compare_op1) == SUBREG
10922 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
10923 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
10924 /* These are unsigned values, perhaps there will be a later
10925 ordering compare that can be shared with this one.
10926 Unfortunately we cannot detect the signedness of the operands
10927 for non-subregs. */
10928 comp_mode = CCUNSmode;
39a10a29
GK
10929 else
10930 comp_mode = CCmode;
10931
10932 /* First, the compare. */
10933 compare_result = gen_reg_rtx (comp_mode);
a3170dc6
AH
10934
10935 /* SPE FP compare instructions on the GPRs. Yuck! */
993f19a8
AH
10936 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10937 && rs6000_compare_fp_p)
a3170dc6 10938 {
64022b5d 10939 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
10940 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
10941
10942 if (op_mode == VOIDmode)
10943 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 10944
423c1189
AH
10945 /* Note: The E500 comparison instructions set the GT bit (x +
10946 1), on success. This explains the mess. */
10947
a3170dc6
AH
10948 switch (code)
10949 {
423c1189 10950 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
10951 switch (op_mode)
10952 {
10953 case SFmode:
10954 cmp = flag_unsafe_math_optimizations
10955 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10956 rs6000_compare_op1)
10957 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10958 rs6000_compare_op1);
10959 break;
10960
10961 case DFmode:
10962 cmp = flag_unsafe_math_optimizations
10963 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
10964 rs6000_compare_op1)
10965 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
10966 rs6000_compare_op1);
10967 break;
10968
10969 default:
10970 gcc_unreachable ();
10971 }
a3170dc6 10972 break;
bb8df8a6 10973
423c1189 10974 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
10975 switch (op_mode)
10976 {
10977 case SFmode:
10978 cmp = flag_unsafe_math_optimizations
10979 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10980 rs6000_compare_op1)
10981 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10982 rs6000_compare_op1);
10983 break;
bb8df8a6 10984
37409796
NS
10985 case DFmode:
10986 cmp = flag_unsafe_math_optimizations
10987 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
10988 rs6000_compare_op1)
10989 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
10990 rs6000_compare_op1);
10991 break;
10992
10993 default:
10994 gcc_unreachable ();
10995 }
a3170dc6 10996 break;
bb8df8a6 10997
423c1189 10998 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
10999 switch (op_mode)
11000 {
11001 case SFmode:
11002 cmp = flag_unsafe_math_optimizations
11003 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11004 rs6000_compare_op1)
11005 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11006 rs6000_compare_op1);
11007 break;
bb8df8a6 11008
37409796
NS
11009 case DFmode:
11010 cmp = flag_unsafe_math_optimizations
11011 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11012 rs6000_compare_op1)
11013 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11014 rs6000_compare_op1);
11015 break;
11016
11017 default:
11018 gcc_unreachable ();
11019 }
a3170dc6 11020 break;
4d4cbc0e 11021 default:
37409796 11022 gcc_unreachable ();
a3170dc6
AH
11023 }
11024
11025 /* Synthesize LE and GE from LT/GT || EQ. */
11026 if (code == LE || code == GE || code == LEU || code == GEU)
11027 {
a3170dc6
AH
11028 emit_insn (cmp);
11029
11030 switch (code)
11031 {
11032 case LE: code = LT; break;
11033 case GE: code = GT; break;
11034 case LEU: code = LT; break;
11035 case GEU: code = GT; break;
37409796 11036 default: gcc_unreachable ();
a3170dc6
AH
11037 }
11038
a3170dc6
AH
11039 compare_result2 = gen_reg_rtx (CCFPmode);
11040
11041 /* Do the EQ. */
37409796
NS
11042 switch (op_mode)
11043 {
11044 case SFmode:
11045 cmp = flag_unsafe_math_optimizations
11046 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11047 rs6000_compare_op1)
11048 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11049 rs6000_compare_op1);
11050 break;
11051
11052 case DFmode:
11053 cmp = flag_unsafe_math_optimizations
11054 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11055 rs6000_compare_op1)
11056 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11057 rs6000_compare_op1);
11058 break;
11059
11060 default:
11061 gcc_unreachable ();
11062 }
a3170dc6
AH
11063 emit_insn (cmp);
11064
a3170dc6 11065 /* OR them together. */
64022b5d
AH
11066 or_result = gen_reg_rtx (CCFPmode);
11067 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11068 compare_result2);
a3170dc6
AH
11069 compare_result = or_result;
11070 code = EQ;
11071 }
11072 else
11073 {
a3170dc6 11074 if (code == NE || code == LTGT)
a3170dc6 11075 code = NE;
423c1189
AH
11076 else
11077 code = EQ;
a3170dc6
AH
11078 }
11079
11080 emit_insn (cmp);
11081 }
11082 else
de17c25f
DE
11083 {
11084 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11085 CLOBBERs to match cmptf_internal2 pattern. */
11086 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11087 && GET_MODE (rs6000_compare_op0) == TFmode
11088 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
11089 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11090 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11091 gen_rtvec (9,
11092 gen_rtx_SET (VOIDmode,
11093 compare_result,
11094 gen_rtx_COMPARE (comp_mode,
11095 rs6000_compare_op0,
11096 rs6000_compare_op1)),
11097 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11098 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11099 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11100 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11101 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11102 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11103 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11104 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
11105 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11106 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11107 {
11108 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11109 comp_mode = CCEQmode;
11110 compare_result = gen_reg_rtx (CCEQmode);
11111 if (TARGET_64BIT)
11112 emit_insn (gen_stack_protect_testdi (compare_result,
11113 rs6000_compare_op0, op1));
11114 else
11115 emit_insn (gen_stack_protect_testsi (compare_result,
11116 rs6000_compare_op0, op1));
11117 }
de17c25f
DE
11118 else
11119 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11120 gen_rtx_COMPARE (comp_mode,
11121 rs6000_compare_op0,
11122 rs6000_compare_op1)));
11123 }
f676971a 11124
ca5adc63 11125 /* Some kinds of FP comparisons need an OR operation;
e7108df9 11126 under flag_finite_math_only we don't bother. */
39a10a29 11127 if (rs6000_compare_fp_p
e7108df9
DE
11128 && !flag_finite_math_only
11129 && !(TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
39a10a29
GK
11130 && (code == LE || code == GE
11131 || code == UNEQ || code == LTGT
11132 || code == UNGT || code == UNLT))
11133 {
11134 enum rtx_code or1, or2;
11135 rtx or1_rtx, or2_rtx, compare2_rtx;
11136 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 11137
39a10a29
GK
11138 switch (code)
11139 {
11140 case LE: or1 = LT; or2 = EQ; break;
11141 case GE: or1 = GT; or2 = EQ; break;
11142 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11143 case LTGT: or1 = LT; or2 = GT; break;
11144 case UNGT: or1 = UNORDERED; or2 = GT; break;
11145 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 11146 default: gcc_unreachable ();
39a10a29
GK
11147 }
11148 validate_condition_mode (or1, comp_mode);
11149 validate_condition_mode (or2, comp_mode);
1c563bed
KH
11150 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11151 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
11152 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11153 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11154 const_true_rtx);
11155 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11156
11157 compare_result = or_result;
11158 code = EQ;
11159 }
11160
11161 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 11162
1c563bed 11163 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
11164}
11165
11166
11167/* Emit the RTL for an sCOND pattern. */
11168
11169void
a2369ed3 11170rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
11171{
11172 rtx condition_rtx;
11173 enum machine_mode op_mode;
b7053a3f 11174 enum rtx_code cond_code;
39a10a29
GK
11175
11176 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
11177 cond_code = GET_CODE (condition_rtx);
11178
423c1189
AH
11179 if (TARGET_E500 && rs6000_compare_fp_p
11180 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11181 {
11182 rtx t;
11183
11184 PUT_MODE (condition_rtx, SImode);
11185 t = XEXP (condition_rtx, 0);
11186
37409796 11187 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
11188
11189 if (cond_code == NE)
64022b5d 11190 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 11191
64022b5d 11192 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
11193 return;
11194 }
11195
b7053a3f
GK
11196 if (cond_code == NE
11197 || cond_code == GE || cond_code == LE
11198 || cond_code == GEU || cond_code == LEU
11199 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11200 {
11201 rtx not_result = gen_reg_rtx (CCEQmode);
11202 rtx not_op, rev_cond_rtx;
11203 enum machine_mode cc_mode;
f676971a 11204
b7053a3f
GK
11205 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11206
1c563bed 11207 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 11208 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
11209 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11210 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11211 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11212 }
39a10a29
GK
11213
11214 op_mode = GET_MODE (rs6000_compare_op0);
11215 if (op_mode == VOIDmode)
11216 op_mode = GET_MODE (rs6000_compare_op1);
11217
11218 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11219 {
11220 PUT_MODE (condition_rtx, DImode);
11221 convert_move (result, condition_rtx, 0);
11222 }
11223 else
11224 {
11225 PUT_MODE (condition_rtx, SImode);
11226 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11227 }
11228}
11229
39a10a29
GK
11230/* Emit a branch of kind CODE to location LOC. */
11231
11232void
a2369ed3 11233rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
11234{
11235 rtx condition_rtx, loc_ref;
11236
11237 condition_rtx = rs6000_generate_compare (code);
11238 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11239 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11240 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11241 loc_ref, pc_rtx)));
11242}
11243
12a4e8c5
GK
11244/* Return the string to output a conditional branch to LABEL, which is
11245 the operand number of the label, or -1 if the branch is really a
f676971a 11246 conditional return.
12a4e8c5
GK
11247
11248 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11249 condition code register and its mode specifies what kind of
11250 comparison we made.
11251
a0ab749a 11252 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
11253
11254 INSN is the insn. */
11255
11256char *
a2369ed3 11257output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
11258{
11259 static char string[64];
11260 enum rtx_code code = GET_CODE (op);
11261 rtx cc_reg = XEXP (op, 0);
11262 enum machine_mode mode = GET_MODE (cc_reg);
11263 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 11264 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
11265 int really_reversed = reversed ^ need_longbranch;
11266 char *s = string;
11267 const char *ccode;
11268 const char *pred;
11269 rtx note;
11270
39a10a29
GK
11271 validate_condition_mode (code, mode);
11272
11273 /* Work out which way this really branches. We could use
11274 reverse_condition_maybe_unordered here always but this
11275 makes the resulting assembler clearer. */
12a4e8c5 11276 if (really_reversed)
de40e1df
DJ
11277 {
11278 /* Reversal of FP compares takes care -- an ordered compare
11279 becomes an unordered compare and vice versa. */
11280 if (mode == CCFPmode)
11281 code = reverse_condition_maybe_unordered (code);
11282 else
11283 code = reverse_condition (code);
11284 }
12a4e8c5 11285
993f19a8 11286 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
11287 {
11288 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11289 to the GT bit. */
37409796
NS
11290 switch (code)
11291 {
11292 case EQ:
11293 /* Opposite of GT. */
11294 code = GT;
11295 break;
11296
11297 case NE:
11298 code = UNLE;
11299 break;
11300
11301 default:
11302 gcc_unreachable ();
11303 }
a3170dc6
AH
11304 }
11305
39a10a29 11306 switch (code)
12a4e8c5
GK
11307 {
11308 /* Not all of these are actually distinct opcodes, but
11309 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
11310 case NE: case LTGT:
11311 ccode = "ne"; break;
11312 case EQ: case UNEQ:
11313 ccode = "eq"; break;
f676971a 11314 case GE: case GEU:
50a0b056 11315 ccode = "ge"; break;
f676971a 11316 case GT: case GTU: case UNGT:
50a0b056 11317 ccode = "gt"; break;
f676971a 11318 case LE: case LEU:
50a0b056 11319 ccode = "le"; break;
f676971a 11320 case LT: case LTU: case UNLT:
50a0b056 11321 ccode = "lt"; break;
12a4e8c5
GK
11322 case UNORDERED: ccode = "un"; break;
11323 case ORDERED: ccode = "nu"; break;
11324 case UNGE: ccode = "nl"; break;
11325 case UNLE: ccode = "ng"; break;
11326 default:
37409796 11327 gcc_unreachable ();
12a4e8c5 11328 }
f676971a
EC
11329
11330 /* Maybe we have a guess as to how likely the branch is.
94a54f47 11331 The old mnemonics don't have a way to specify this information. */
f4857b9b 11332 pred = "";
12a4e8c5
GK
11333 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11334 if (note != NULL_RTX)
11335 {
11336 /* PROB is the difference from 50%. */
11337 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
11338
11339 /* Only hint for highly probable/improbable branches on newer
11340 cpus as static prediction overrides processor dynamic
11341 prediction. For older cpus we may as well always hint, but
11342 assume not taken for branches that are very close to 50% as a
11343 mispredicted taken branch is more expensive than a
f676971a 11344 mispredicted not-taken branch. */
ec507f2d 11345 if (rs6000_always_hint
f4857b9b
AM
11346 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
11347 {
11348 if (abs (prob) > REG_BR_PROB_BASE / 20
11349 && ((prob > 0) ^ need_longbranch))
c4ad648e 11350 pred = "+";
f4857b9b
AM
11351 else
11352 pred = "-";
11353 }
12a4e8c5 11354 }
12a4e8c5
GK
11355
11356 if (label == NULL)
94a54f47 11357 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 11358 else
94a54f47 11359 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 11360
37c67319 11361 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 11362 Assume they'd only be the first character.... */
37c67319
GK
11363 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11364 *s++ = '%';
94a54f47 11365 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
11366
11367 if (label != NULL)
11368 {
11369 /* If the branch distance was too far, we may have to use an
11370 unconditional branch to go the distance. */
11371 if (need_longbranch)
44518ddd 11372 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
11373 else
11374 s += sprintf (s, ",%s", label);
11375 }
11376
11377 return string;
11378}
50a0b056 11379
64022b5d 11380/* Return the string to flip the GT bit on a CR. */
423c1189 11381char *
64022b5d 11382output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
11383{
11384 static char string[64];
11385 int a, b;
11386
37409796
NS
11387 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11388 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 11389
64022b5d
AH
11390 /* GT bit. */
11391 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11392 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
11393
11394 sprintf (string, "crnot %d,%d", a, b);
11395 return string;
11396}
11397
21213b4c
DP
11398/* Return insn index for the vector compare instruction for given CODE,
11399 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11400 not available. */
11401
11402static int
94ff898d 11403get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
11404 enum machine_mode dest_mode,
11405 enum machine_mode op_mode)
11406{
11407 if (!TARGET_ALTIVEC)
11408 return INSN_NOT_AVAILABLE;
11409
11410 switch (code)
11411 {
11412 case EQ:
11413 if (dest_mode == V16QImode && op_mode == V16QImode)
11414 return UNSPEC_VCMPEQUB;
11415 if (dest_mode == V8HImode && op_mode == V8HImode)
11416 return UNSPEC_VCMPEQUH;
11417 if (dest_mode == V4SImode && op_mode == V4SImode)
11418 return UNSPEC_VCMPEQUW;
11419 if (dest_mode == V4SImode && op_mode == V4SFmode)
11420 return UNSPEC_VCMPEQFP;
11421 break;
11422 case GE:
11423 if (dest_mode == V4SImode && op_mode == V4SFmode)
11424 return UNSPEC_VCMPGEFP;
11425 case GT:
11426 if (dest_mode == V16QImode && op_mode == V16QImode)
11427 return UNSPEC_VCMPGTSB;
11428 if (dest_mode == V8HImode && op_mode == V8HImode)
11429 return UNSPEC_VCMPGTSH;
11430 if (dest_mode == V4SImode && op_mode == V4SImode)
11431 return UNSPEC_VCMPGTSW;
11432 if (dest_mode == V4SImode && op_mode == V4SFmode)
11433 return UNSPEC_VCMPGTFP;
11434 break;
11435 case GTU:
11436 if (dest_mode == V16QImode && op_mode == V16QImode)
11437 return UNSPEC_VCMPGTUB;
11438 if (dest_mode == V8HImode && op_mode == V8HImode)
11439 return UNSPEC_VCMPGTUH;
11440 if (dest_mode == V4SImode && op_mode == V4SImode)
11441 return UNSPEC_VCMPGTUW;
11442 break;
11443 default:
11444 break;
11445 }
11446 return INSN_NOT_AVAILABLE;
11447}
11448
11449/* Emit vector compare for operands OP0 and OP1 using code RCODE.
11450 DMODE is expected destination mode. This is a recursive function. */
11451
11452static rtx
11453rs6000_emit_vector_compare (enum rtx_code rcode,
11454 rtx op0, rtx op1,
11455 enum machine_mode dmode)
11456{
11457 int vec_cmp_insn;
11458 rtx mask;
11459 enum machine_mode dest_mode;
11460 enum machine_mode op_mode = GET_MODE (op1);
11461
37409796
NS
11462 gcc_assert (TARGET_ALTIVEC);
11463 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
11464
11465 /* Floating point vector compare instructions uses destination V4SImode.
11466 Move destination to appropriate mode later. */
11467 if (dmode == V4SFmode)
11468 dest_mode = V4SImode;
11469 else
11470 dest_mode = dmode;
11471
11472 mask = gen_reg_rtx (dest_mode);
11473 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
11474
11475 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
11476 {
11477 bool swap_operands = false;
11478 bool try_again = false;
11479 switch (rcode)
11480 {
11481 case LT:
11482 rcode = GT;
11483 swap_operands = true;
11484 try_again = true;
11485 break;
11486 case LTU:
11487 rcode = GTU;
11488 swap_operands = true;
11489 try_again = true;
11490 break;
11491 case NE:
11492 /* Treat A != B as ~(A==B). */
11493 {
11494 enum insn_code nor_code;
11495 rtx eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11496 dest_mode);
94ff898d 11497
21213b4c 11498 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
37409796 11499 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
11500 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
11501
11502 if (dmode != dest_mode)
11503 {
11504 rtx temp = gen_reg_rtx (dest_mode);
11505 convert_move (temp, mask, 0);
11506 return temp;
11507 }
11508 return mask;
11509 }
11510 break;
11511 case GE:
11512 case GEU:
11513 case LE:
11514 case LEU:
11515 /* Try GT/GTU/LT/LTU OR EQ */
11516 {
11517 rtx c_rtx, eq_rtx;
11518 enum insn_code ior_code;
11519 enum rtx_code new_code;
11520
37409796
NS
11521 switch (rcode)
11522 {
11523 case GE:
11524 new_code = GT;
11525 break;
11526
11527 case GEU:
11528 new_code = GTU;
11529 break;
11530
11531 case LE:
11532 new_code = LT;
11533 break;
11534
11535 case LEU:
11536 new_code = LTU;
11537 break;
11538
11539 default:
11540 gcc_unreachable ();
11541 }
21213b4c
DP
11542
11543 c_rtx = rs6000_emit_vector_compare (new_code,
11544 op0, op1, dest_mode);
11545 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
11546 dest_mode);
11547
11548 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
37409796 11549 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
11550 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
11551 if (dmode != dest_mode)
11552 {
11553 rtx temp = gen_reg_rtx (dest_mode);
11554 convert_move (temp, mask, 0);
11555 return temp;
11556 }
11557 return mask;
11558 }
11559 break;
11560 default:
37409796 11561 gcc_unreachable ();
21213b4c
DP
11562 }
11563
11564 if (try_again)
11565 {
11566 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
11567 /* You only get two chances. */
11568 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
11569 }
11570
11571 if (swap_operands)
11572 {
11573 rtx tmp;
11574 tmp = op0;
11575 op0 = op1;
11576 op1 = tmp;
11577 }
11578 }
11579
915167f5
GK
11580 emit_insn (gen_rtx_SET (VOIDmode, mask,
11581 gen_rtx_UNSPEC (dest_mode,
11582 gen_rtvec (2, op0, op1),
11583 vec_cmp_insn)));
21213b4c
DP
11584 if (dmode != dest_mode)
11585 {
11586 rtx temp = gen_reg_rtx (dest_mode);
11587 convert_move (temp, mask, 0);
11588 return temp;
11589 }
11590 return mask;
11591}
11592
11593/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11594 valid insn doesn exist for given mode. */
11595
11596static int
11597get_vsel_insn (enum machine_mode mode)
11598{
11599 switch (mode)
11600 {
11601 case V4SImode:
11602 return UNSPEC_VSEL4SI;
11603 break;
11604 case V4SFmode:
11605 return UNSPEC_VSEL4SF;
11606 break;
11607 case V8HImode:
11608 return UNSPEC_VSEL8HI;
11609 break;
11610 case V16QImode:
11611 return UNSPEC_VSEL16QI;
11612 break;
11613 default:
11614 return INSN_NOT_AVAILABLE;
11615 break;
11616 }
11617 return INSN_NOT_AVAILABLE;
11618}
11619
11620/* Emit vector select insn where DEST is destination using
11621 operands OP1, OP2 and MASK. */
11622
11623static void
11624rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
11625{
11626 rtx t, temp;
11627 enum machine_mode dest_mode = GET_MODE (dest);
11628 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
11629
11630 temp = gen_reg_rtx (dest_mode);
94ff898d 11631
bb8df8a6 11632 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 11633 select op2. */
915167f5
GK
11634 t = gen_rtx_SET (VOIDmode, temp,
11635 gen_rtx_UNSPEC (dest_mode,
11636 gen_rtvec (3, op2, op1, mask),
11637 vsel_insn_index));
21213b4c
DP
11638 emit_insn (t);
11639 emit_move_insn (dest, temp);
11640 return;
11641}
11642
94ff898d 11643/* Emit vector conditional expression.
21213b4c
DP
11644 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11645 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11646
11647int
11648rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
11649 rtx cond, rtx cc_op0, rtx cc_op1)
11650{
11651 enum machine_mode dest_mode = GET_MODE (dest);
11652 enum rtx_code rcode = GET_CODE (cond);
11653 rtx mask;
11654
11655 if (!TARGET_ALTIVEC)
11656 return 0;
11657
11658 /* Get the vector mask for the given relational operations. */
11659 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
11660
11661 rs6000_emit_vector_select (dest, op1, op2, mask);
11662
11663 return 1;
11664}
11665
50a0b056
GK
11666/* Emit a conditional move: move TRUE_COND to DEST if OP of the
11667 operands of the last comparison is nonzero/true, FALSE_COND if it
11668 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 11669
50a0b056 11670int
a2369ed3 11671rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
11672{
11673 enum rtx_code code = GET_CODE (op);
11674 rtx op0 = rs6000_compare_op0;
11675 rtx op1 = rs6000_compare_op1;
11676 REAL_VALUE_TYPE c1;
3148ad6d
DJ
11677 enum machine_mode compare_mode = GET_MODE (op0);
11678 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 11679 rtx temp;
add2402e 11680 bool is_against_zero;
50a0b056 11681
a3c9585f 11682 /* These modes should always match. */
a3170dc6
AH
11683 if (GET_MODE (op1) != compare_mode
11684 /* In the isel case however, we can use a compare immediate, so
11685 op1 may be a small constant. */
11686 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 11687 return 0;
178c3eff 11688 if (GET_MODE (true_cond) != result_mode)
3148ad6d 11689 return 0;
178c3eff 11690 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
11691 return 0;
11692
50a0b056 11693 /* First, work out if the hardware can do this at all, or
a3c9585f 11694 if it's too slow.... */
50a0b056 11695 if (! rs6000_compare_fp_p)
a3170dc6
AH
11696 {
11697 if (TARGET_ISEL)
11698 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
11699 return 0;
11700 }
fef98bf2 11701 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 11702 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 11703 return 0;
50a0b056 11704
add2402e 11705 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 11706
add2402e
GK
11707 /* A floating-point subtract might overflow, underflow, or produce
11708 an inexact result, thus changing the floating-point flags, so it
11709 can't be generated if we care about that. It's safe if one side
11710 of the construct is zero, since then no subtract will be
11711 generated. */
ebb109ad 11712 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
11713 && flag_trapping_math && ! is_against_zero)
11714 return 0;
11715
50a0b056
GK
11716 /* Eliminate half of the comparisons by switching operands, this
11717 makes the remaining code simpler. */
11718 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 11719 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
11720 {
11721 code = reverse_condition_maybe_unordered (code);
11722 temp = true_cond;
11723 true_cond = false_cond;
11724 false_cond = temp;
11725 }
11726
11727 /* UNEQ and LTGT take four instructions for a comparison with zero,
11728 it'll probably be faster to use a branch here too. */
bc9ec0e0 11729 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 11730 return 0;
f676971a 11731
50a0b056
GK
11732 if (GET_CODE (op1) == CONST_DOUBLE)
11733 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 11734
b6d08ca1 11735 /* We're going to try to implement comparisons by performing
50a0b056
GK
11736 a subtract, then comparing against zero. Unfortunately,
11737 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 11738 know that the operand is finite and the comparison
50a0b056 11739 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 11740 if (HONOR_INFINITIES (compare_mode)
50a0b056 11741 && code != GT && code != UNGE
045572c7 11742 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
11743 /* Constructs of the form (a OP b ? a : b) are safe. */
11744 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 11745 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
11746 && ! rtx_equal_p (op1, true_cond))))
11747 return 0;
add2402e 11748
50a0b056
GK
11749 /* At this point we know we can use fsel. */
11750
11751 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
11752 if (! is_against_zero)
11753 {
11754 temp = gen_reg_rtx (compare_mode);
11755 emit_insn (gen_rtx_SET (VOIDmode, temp,
11756 gen_rtx_MINUS (compare_mode, op0, op1)));
11757 op0 = temp;
11758 op1 = CONST0_RTX (compare_mode);
11759 }
50a0b056
GK
11760
11761 /* If we don't care about NaNs we can reduce some of the comparisons
11762 down to faster ones. */
bc9ec0e0 11763 if (! HONOR_NANS (compare_mode))
50a0b056
GK
11764 switch (code)
11765 {
11766 case GT:
11767 code = LE;
11768 temp = true_cond;
11769 true_cond = false_cond;
11770 false_cond = temp;
11771 break;
11772 case UNGE:
11773 code = GE;
11774 break;
11775 case UNEQ:
11776 code = EQ;
11777 break;
11778 default:
11779 break;
11780 }
11781
11782 /* Now, reduce everything down to a GE. */
11783 switch (code)
11784 {
11785 case GE:
11786 break;
11787
11788 case LE:
3148ad6d
DJ
11789 temp = gen_reg_rtx (compare_mode);
11790 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
11791 op0 = temp;
11792 break;
11793
11794 case ORDERED:
3148ad6d
DJ
11795 temp = gen_reg_rtx (compare_mode);
11796 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
11797 op0 = temp;
11798 break;
11799
11800 case EQ:
3148ad6d 11801 temp = gen_reg_rtx (compare_mode);
f676971a 11802 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
11803 gen_rtx_NEG (compare_mode,
11804 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
11805 op0 = temp;
11806 break;
11807
11808 case UNGE:
bc9ec0e0 11809 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 11810 temp = gen_reg_rtx (result_mode);
50a0b056 11811 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 11812 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
11813 gen_rtx_GE (VOIDmode,
11814 op0, op1),
11815 true_cond, false_cond)));
bc9ec0e0
GK
11816 false_cond = true_cond;
11817 true_cond = temp;
50a0b056 11818
3148ad6d
DJ
11819 temp = gen_reg_rtx (compare_mode);
11820 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
11821 op0 = temp;
11822 break;
11823
11824 case GT:
bc9ec0e0 11825 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 11826 temp = gen_reg_rtx (result_mode);
50a0b056 11827 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 11828 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
11829 gen_rtx_GE (VOIDmode,
11830 op0, op1),
11831 true_cond, false_cond)));
bc9ec0e0
GK
11832 true_cond = false_cond;
11833 false_cond = temp;
50a0b056 11834
3148ad6d
DJ
11835 temp = gen_reg_rtx (compare_mode);
11836 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
11837 op0 = temp;
11838 break;
11839
11840 default:
37409796 11841 gcc_unreachable ();
50a0b056
GK
11842 }
11843
11844 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 11845 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
11846 gen_rtx_GE (VOIDmode,
11847 op0, op1),
11848 true_cond, false_cond)));
11849 return 1;
11850}
11851
a3170dc6
AH
11852/* Same as above, but for ints (isel). */
11853
11854static int
a2369ed3 11855rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
11856{
11857 rtx condition_rtx, cr;
11858
11859 /* All isel implementations thus far are 32-bits. */
11860 if (GET_MODE (rs6000_compare_op0) != SImode)
11861 return 0;
11862
11863 /* We still have to do the compare, because isel doesn't do a
11864 compare, it just looks at the CRx bits set by a previous compare
11865 instruction. */
11866 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11867 cr = XEXP (condition_rtx, 0);
11868
11869 if (GET_MODE (cr) == CCmode)
11870 emit_insn (gen_isel_signed (dest, condition_rtx,
11871 true_cond, false_cond, cr));
11872 else
11873 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11874 true_cond, false_cond, cr));
11875
11876 return 1;
11877}
11878
11879const char *
a2369ed3 11880output_isel (rtx *operands)
a3170dc6
AH
11881{
11882 enum rtx_code code;
11883
11884 code = GET_CODE (operands[1]);
11885 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11886 {
11887 PUT_CODE (operands[1], reverse_condition (code));
11888 return "isel %0,%3,%2,%j1";
11889 }
11890 else
11891 return "isel %0,%2,%3,%j1";
11892}
11893
50a0b056 11894void
a2369ed3 11895rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
11896{
11897 enum machine_mode mode = GET_MODE (op0);
5dc8d536 11898 enum rtx_code c;
50a0b056 11899 rtx target;
5dc8d536
AH
11900
11901 if (code == SMAX || code == SMIN)
11902 c = GE;
11903 else
11904 c = GEU;
11905
50a0b056 11906 if (code == SMAX || code == UMAX)
f676971a 11907 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
11908 op0, op1, mode, 0);
11909 else
f676971a 11910 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 11911 op1, op0, mode, 0);
37409796 11912 gcc_assert (target);
50a0b056
GK
11913 if (target != dest)
11914 emit_move_insn (dest, target);
11915}
46c07df8 11916
915167f5
GK
11917/* Emit instructions to perform a load-reserved/store-conditional operation.
11918 The operation performed is an atomic
11919 (set M (CODE:MODE M OP))
11920 If not NULL, BEFORE is atomically set to M before the operation, and
11921 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 11922 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
11923 Either OP or M may be wrapped in a NOT operation. */
11924
11925void
11926rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
11927 rtx m, rtx op, rtx before_param, rtx after_param,
11928 bool sync_p)
11929{
11930 enum machine_mode used_mode;
11931 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
11932 rtx used_m;
11933 rtvec vec;
11934 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
11935 rtx shift = NULL_RTX;
bb8df8a6 11936
915167f5
GK
11937 if (sync_p)
11938 emit_insn (gen_memory_barrier ());
bb8df8a6 11939
915167f5
GK
11940 if (GET_CODE (m) == NOT)
11941 used_m = XEXP (m, 0);
11942 else
11943 used_m = m;
11944
11945 /* If this is smaller than SImode, we'll have to use SImode with
11946 adjustments. */
11947 if (mode == QImode || mode == HImode)
11948 {
11949 rtx newop, oldop;
11950
11951 if (MEM_ALIGN (used_m) >= 32)
11952 {
11953 int ishift = 0;
11954 if (BYTES_BIG_ENDIAN)
11955 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 11956
915167f5
GK
11957 shift = GEN_INT (ishift);
11958 }
11959 else
11960 {
11961 rtx addrSI, aligned_addr;
a9c9d3fa 11962 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 11963
915167f5
GK
11964 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
11965 XEXP (used_m, 0)));
11966 shift = gen_reg_rtx (SImode);
11967
11968 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
11969 GEN_INT (shift_mask)));
11970 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
11971
11972 aligned_addr = expand_binop (Pmode, and_optab,
11973 XEXP (used_m, 0),
11974 GEN_INT (-4), NULL_RTX,
11975 1, OPTAB_LIB_WIDEN);
11976 used_m = change_address (used_m, SImode, aligned_addr);
11977 set_mem_align (used_m, 32);
11978 /* It's safe to keep the old alias set of USED_M, because
11979 the operation is atomic and only affects the original
11980 USED_M. */
11981 if (GET_CODE (m) == NOT)
11982 m = gen_rtx_NOT (SImode, used_m);
11983 else
11984 m = used_m;
11985 }
11986
11987 if (GET_CODE (op) == NOT)
11988 {
11989 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
11990 oldop = gen_rtx_NOT (SImode, oldop);
11991 }
11992 else
11993 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 11994
915167f5
GK
11995 switch (code)
11996 {
11997 case IOR:
11998 case XOR:
11999 newop = expand_binop (SImode, and_optab,
12000 oldop, GEN_INT (imask), NULL_RTX,
12001 1, OPTAB_LIB_WIDEN);
12002 emit_insn (gen_ashlsi3 (newop, newop, shift));
12003 break;
12004
12005 case AND:
12006 newop = expand_binop (SImode, ior_optab,
12007 oldop, GEN_INT (~imask), NULL_RTX,
12008 1, OPTAB_LIB_WIDEN);
a9c9d3fa 12009 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
12010 break;
12011
12012 case PLUS:
9f0076e5 12013 case MINUS:
915167f5
GK
12014 {
12015 rtx mask;
bb8df8a6 12016
915167f5
GK
12017 newop = expand_binop (SImode, and_optab,
12018 oldop, GEN_INT (imask), NULL_RTX,
12019 1, OPTAB_LIB_WIDEN);
12020 emit_insn (gen_ashlsi3 (newop, newop, shift));
12021
12022 mask = gen_reg_rtx (SImode);
12023 emit_move_insn (mask, GEN_INT (imask));
12024 emit_insn (gen_ashlsi3 (mask, mask, shift));
12025
9f0076e5
DE
12026 if (code == PLUS)
12027 newop = gen_rtx_PLUS (SImode, m, newop);
12028 else
12029 newop = gen_rtx_MINUS (SImode, m, newop);
12030 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
12031 newop = gen_rtx_IOR (SImode, newop,
12032 gen_rtx_AND (SImode,
12033 gen_rtx_NOT (SImode, mask),
12034 m));
12035 break;
12036 }
12037
12038 default:
12039 gcc_unreachable ();
12040 }
12041
a9c9d3fa
GK
12042 if (GET_CODE (m) == NOT)
12043 {
12044 rtx mask, xorm;
12045
12046 mask = gen_reg_rtx (SImode);
12047 emit_move_insn (mask, GEN_INT (imask));
12048 emit_insn (gen_ashlsi3 (mask, mask, shift));
12049
12050 xorm = gen_rtx_XOR (SImode, used_m, mask);
12051 /* Depending on the value of 'op', the XOR or the operation might
12052 be able to be simplified away. */
12053 newop = simplify_gen_binary (code, SImode, xorm, newop);
12054 }
915167f5
GK
12055 op = newop;
12056 used_mode = SImode;
12057 before = gen_reg_rtx (used_mode);
12058 after = gen_reg_rtx (used_mode);
12059 }
12060 else
12061 {
12062 used_mode = mode;
12063 before = before_param;
12064 after = after_param;
12065
12066 if (before == NULL_RTX)
12067 before = gen_reg_rtx (used_mode);
12068 if (after == NULL_RTX)
12069 after = gen_reg_rtx (used_mode);
12070 }
bb8df8a6 12071
9f0076e5
DE
12072 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12073 && used_mode != mode)
915167f5
GK
12074 the_op = op; /* Computed above. */
12075 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12076 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12077 else
12078 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12079
12080 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12081 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12082 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
12083 gen_rtx_UNSPEC (used_mode,
12084 gen_rtvec (1, the_op),
12085 UNSPEC_SYNC_OP));
915167f5
GK
12086 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12087
9f0076e5 12088 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
12089 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12090 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12091 else
12092 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12093 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12094
12095 /* Shift and mask the return values properly. */
12096 if (used_mode != mode && before_param)
12097 {
12098 emit_insn (gen_lshrsi3 (before, before, shift));
12099 convert_move (before_param, before, 1);
12100 }
12101
12102 if (used_mode != mode && after_param)
12103 {
12104 emit_insn (gen_lshrsi3 (after, after, shift));
12105 convert_move (after_param, after, 1);
12106 }
12107
12108 /* The previous sequence will end with a branch that's dependent on
12109 the conditional store, so placing an isync will ensure that no
12110 other instructions (especially, no load or store instructions)
12111 can start before the atomic operation completes. */
12112 if (sync_p)
12113 emit_insn (gen_isync ());
12114}
12115
b52110d4
DE
12116/* A subroutine of the atomic operation splitters. Jump to LABEL if
12117 COND is true. Mark the jump as unlikely to be taken. */
12118
12119static void
12120emit_unlikely_jump (rtx cond, rtx label)
12121{
12122 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12123 rtx x;
12124
12125 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12126 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12127 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12128}
12129
12130/* A subroutine of the atomic operation splitters. Emit a load-locked
12131 instruction in MODE. */
12132
12133static void
12134emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12135{
12136 rtx (*fn) (rtx, rtx) = NULL;
12137 if (mode == SImode)
12138 fn = gen_load_locked_si;
12139 else if (mode == DImode)
12140 fn = gen_load_locked_di;
12141 emit_insn (fn (reg, mem));
12142}
12143
12144/* A subroutine of the atomic operation splitters. Emit a store-conditional
12145 instruction in MODE. */
12146
12147static void
12148emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12149{
12150 rtx (*fn) (rtx, rtx, rtx) = NULL;
12151 if (mode == SImode)
12152 fn = gen_store_conditional_si;
12153 else if (mode == DImode)
12154 fn = gen_store_conditional_di;
12155
9f0076e5 12156 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
12157 if (PPC405_ERRATUM77)
12158 emit_insn (gen_memory_barrier ());
12159
12160 emit_insn (fn (res, mem, val));
12161}
12162
9f0076e5 12163/* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 12164 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
12165 operand of the binary operator. BEFORE and AFTER are optional locations to
12166 return the value of MEM either before of after the operation. SCRATCH is
12167 a scratch register. */
12168
12169void
12170rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12171 rtx before, rtx after, rtx scratch)
12172{
12173 enum machine_mode mode = GET_MODE (mem);
12174 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12175
12176 emit_insn (gen_memory_barrier ());
12177
12178 label = gen_label_rtx ();
12179 emit_label (label);
12180 label = gen_rtx_LABEL_REF (VOIDmode, label);
12181
12182 if (before == NULL_RTX)
12183 before = scratch;
12184 emit_load_locked (mode, before, mem);
12185
12186 if (code == NOT)
12187 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12188 else if (code == AND)
12189 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12190 else
12191 x = gen_rtx_fmt_ee (code, mode, before, val);
12192
12193 if (after != NULL_RTX)
12194 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12195 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12196
12197 emit_store_conditional (mode, cond, mem, scratch);
12198
12199 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12200 emit_unlikely_jump (x, label);
12201
12202 emit_insn (gen_isync ());
12203}
12204
b52110d4
DE
12205/* Expand an atomic compare and swap operation. MEM is the memory on which
12206 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12207 value to be stored. SCRATCH is a scratch GPR. */
12208
12209void
12210rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12211 rtx scratch)
12212{
12213 enum machine_mode mode = GET_MODE (mem);
12214 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12215
12216 emit_insn (gen_memory_barrier ());
12217
12218 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12219 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12220 emit_label (XEXP (label1, 0));
12221
12222 emit_load_locked (mode, retval, mem);
12223
12224 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12225 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12226
12227 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12228 emit_unlikely_jump (x, label2);
12229
12230 emit_move_insn (scratch, newval);
12231 emit_store_conditional (mode, cond, mem, scratch);
12232
12233 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12234 emit_unlikely_jump (x, label1);
12235
12236 emit_insn (gen_isync ());
12237 emit_label (XEXP (label2, 0));
12238}
12239
12240/* Expand an atomic test and set operation. MEM is the memory on which
12241 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12242
12243void
12244rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12245{
12246 enum machine_mode mode = GET_MODE (mem);
12247 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12248
12249 emit_insn (gen_memory_barrier ());
12250
12251 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12252 emit_label (XEXP (label, 0));
12253
12254 emit_load_locked (mode, retval, mem);
12255 emit_move_insn (scratch, val);
12256 emit_store_conditional (mode, cond, mem, scratch);
12257
12258 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12259 emit_unlikely_jump (x, label);
12260
12261 emit_insn (gen_isync ());
12262}
12263
12264 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
12265 multi-register moves. It will emit at most one instruction for
12266 each register that is accessed; that is, it won't emit li/lis pairs
12267 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12268 register. */
46c07df8 12269
46c07df8 12270void
a9baceb1 12271rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 12272{
a9baceb1
GK
12273 /* The register number of the first register being moved. */
12274 int reg;
12275 /* The mode that is to be moved. */
12276 enum machine_mode mode;
12277 /* The mode that the move is being done in, and its size. */
12278 enum machine_mode reg_mode;
12279 int reg_mode_size;
12280 /* The number of registers that will be moved. */
12281 int nregs;
12282
12283 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12284 mode = GET_MODE (dst);
c8b622ff 12285 nregs = hard_regno_nregs[reg][mode];
a9baceb1
GK
12286 if (FP_REGNO_P (reg))
12287 reg_mode = DFmode;
12288 else if (ALTIVEC_REGNO_P (reg))
12289 reg_mode = V16QImode;
12290 else
12291 reg_mode = word_mode;
12292 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 12293
37409796 12294 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 12295
a9baceb1
GK
12296 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12297 {
12298 /* Move register range backwards, if we might have destructive
12299 overlap. */
12300 int i;
12301 for (i = nregs - 1; i >= 0; i--)
f676971a 12302 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
12303 simplify_gen_subreg (reg_mode, dst, mode,
12304 i * reg_mode_size),
12305 simplify_gen_subreg (reg_mode, src, mode,
12306 i * reg_mode_size)));
12307 }
46c07df8
HP
12308 else
12309 {
a9baceb1
GK
12310 int i;
12311 int j = -1;
12312 bool used_update = false;
46c07df8 12313
c1e55850 12314 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
12315 {
12316 rtx breg;
3a1f863f 12317
a9baceb1
GK
12318 if (GET_CODE (XEXP (src, 0)) == PRE_INC
12319 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
12320 {
12321 rtx delta_rtx;
a9baceb1 12322 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
12323 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
12324 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
12325 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
12326 emit_insn (TARGET_32BIT
12327 ? gen_addsi3 (breg, breg, delta_rtx)
12328 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 12329 src = replace_equiv_address (src, breg);
3a1f863f 12330 }
c1e55850
GK
12331 else if (! offsettable_memref_p (src))
12332 {
13e2e16e 12333 rtx basereg;
c1e55850
GK
12334 basereg = gen_rtx_REG (Pmode, reg);
12335 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 12336 src = replace_equiv_address (src, basereg);
c1e55850 12337 }
3a1f863f 12338
0423421f
AM
12339 breg = XEXP (src, 0);
12340 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
12341 breg = XEXP (breg, 0);
12342
12343 /* If the base register we are using to address memory is
12344 also a destination reg, then change that register last. */
12345 if (REG_P (breg)
12346 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
12347 && REGNO (breg) < REGNO (dst) + nregs)
12348 j = REGNO (breg) - REGNO (dst);
c4ad648e 12349 }
46c07df8 12350
a9baceb1 12351 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
12352 {
12353 rtx breg;
12354
a9baceb1
GK
12355 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
12356 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
12357 {
12358 rtx delta_rtx;
a9baceb1 12359 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
12360 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
12361 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
12362 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
12363
12364 /* We have to update the breg before doing the store.
12365 Use store with update, if available. */
12366
12367 if (TARGET_UPDATE)
12368 {
a9baceb1 12369 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
12370 emit_insn (TARGET_32BIT
12371 ? (TARGET_POWERPC64
12372 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
12373 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
12374 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 12375 used_update = true;
3a1f863f
DE
12376 }
12377 else
a9baceb1
GK
12378 emit_insn (TARGET_32BIT
12379 ? gen_addsi3 (breg, breg, delta_rtx)
12380 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 12381 dst = replace_equiv_address (dst, breg);
3a1f863f 12382 }
37409796
NS
12383 else
12384 gcc_assert (offsettable_memref_p (dst));
3a1f863f
DE
12385 }
12386
46c07df8 12387 for (i = 0; i < nregs; i++)
f676971a 12388 {
3a1f863f
DE
12389 /* Calculate index to next subword. */
12390 ++j;
f676971a 12391 if (j == nregs)
3a1f863f 12392 j = 0;
46c07df8 12393
112cdef5 12394 /* If compiler already emitted move of first word by
a9baceb1 12395 store with update, no need to do anything. */
3a1f863f 12396 if (j == 0 && used_update)
a9baceb1 12397 continue;
f676971a 12398
a9baceb1
GK
12399 emit_insn (gen_rtx_SET (VOIDmode,
12400 simplify_gen_subreg (reg_mode, dst, mode,
12401 j * reg_mode_size),
12402 simplify_gen_subreg (reg_mode, src, mode,
12403 j * reg_mode_size)));
3a1f863f 12404 }
46c07df8
HP
12405 }
12406}
12407
12a4e8c5 12408\f
a4f6c312
SS
12409/* This page contains routines that are used to determine what the
12410 function prologue and epilogue code will do and write them out. */
9878760c 12411
a4f6c312
SS
12412/* Return the first fixed-point register that is required to be
12413 saved. 32 if none. */
9878760c
RK
12414
12415int
863d938c 12416first_reg_to_save (void)
9878760c
RK
12417{
12418 int first_reg;
12419
12420 /* Find lowest numbered live register. */
12421 for (first_reg = 13; first_reg <= 31; first_reg++)
f676971a 12422 if (regs_ever_live[first_reg]
a38d360d 12423 && (! call_used_regs[first_reg]
1db02437 12424 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 12425 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
12426 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
12427 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
12428 break;
12429
ee890fe2 12430#if TARGET_MACHO
93638d7a
AM
12431 if (flag_pic
12432 && current_function_uses_pic_offset_table
12433 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 12434 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
12435#endif
12436
9878760c
RK
12437 return first_reg;
12438}
12439
12440/* Similar, for FP regs. */
12441
12442int
863d938c 12443first_fp_reg_to_save (void)
9878760c
RK
12444{
12445 int first_reg;
12446
12447 /* Find lowest numbered live register. */
12448 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
12449 if (regs_ever_live[first_reg])
12450 break;
12451
12452 return first_reg;
12453}
00b960c7
AH
12454
12455/* Similar, for AltiVec regs. */
12456
12457static int
863d938c 12458first_altivec_reg_to_save (void)
00b960c7
AH
12459{
12460 int i;
12461
12462 /* Stack frame remains as is unless we are in AltiVec ABI. */
12463 if (! TARGET_ALTIVEC_ABI)
12464 return LAST_ALTIVEC_REGNO + 1;
12465
12466 /* Find lowest numbered live register. */
12467 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
12468 if (regs_ever_live[i])
12469 break;
12470
12471 return i;
12472}
12473
12474/* Return a 32-bit mask of the AltiVec registers we need to set in
12475 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
12476 the 32-bit word is 0. */
12477
12478static unsigned int
863d938c 12479compute_vrsave_mask (void)
00b960c7
AH
12480{
12481 unsigned int i, mask = 0;
12482
12483 /* First, find out if we use _any_ altivec registers. */
12484 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12485 if (regs_ever_live[i])
12486 mask |= ALTIVEC_REG_BIT (i);
12487
12488 if (mask == 0)
12489 return mask;
12490
00b960c7
AH
12491 /* Next, remove the argument registers from the set. These must
12492 be in the VRSAVE mask set by the caller, so we don't need to add
12493 them in again. More importantly, the mask we compute here is
12494 used to generate CLOBBERs in the set_vrsave insn, and we do not
12495 wish the argument registers to die. */
a6cf80f2 12496 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
12497 mask &= ~ALTIVEC_REG_BIT (i);
12498
12499 /* Similarly, remove the return value from the set. */
12500 {
12501 bool yes = false;
12502 diddle_return_value (is_altivec_return_reg, &yes);
12503 if (yes)
12504 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
12505 }
12506
12507 return mask;
12508}
12509
d62294f5 12510/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
12511 size of prologues/epilogues by calling our own save/restore-the-world
12512 routines. */
d62294f5
FJ
12513
12514static void
f57fe068
AM
12515compute_save_world_info (rs6000_stack_t *info_ptr)
12516{
12517 info_ptr->world_save_p = 1;
12518 info_ptr->world_save_p
12519 = (WORLD_SAVE_P (info_ptr)
12520 && DEFAULT_ABI == ABI_DARWIN
12521 && ! (current_function_calls_setjmp && flag_exceptions)
12522 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
12523 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
12524 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
12525 && info_ptr->cr_save_p);
f676971a 12526
d62294f5
FJ
12527 /* This will not work in conjunction with sibcalls. Make sure there
12528 are none. (This check is expensive, but seldom executed.) */
f57fe068 12529 if (WORLD_SAVE_P (info_ptr))
f676971a 12530 {
d62294f5
FJ
12531 rtx insn;
12532 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
12533 if ( GET_CODE (insn) == CALL_INSN
12534 && SIBLING_CALL_P (insn))
12535 {
12536 info_ptr->world_save_p = 0;
12537 break;
12538 }
d62294f5 12539 }
f676971a 12540
f57fe068 12541 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
12542 {
12543 /* Even if we're not touching VRsave, make sure there's room on the
12544 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 12545 will attempt to save it. */
d62294f5
FJ
12546 info_ptr->vrsave_size = 4;
12547
12548 /* "Save" the VRsave register too if we're saving the world. */
12549 if (info_ptr->vrsave_mask == 0)
c4ad648e 12550 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
12551
12552 /* Because the Darwin register save/restore routines only handle
c4ad648e 12553 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 12554 check. */
37409796
NS
12555 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
12556 && (info_ptr->first_altivec_reg_save
12557 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 12558 }
f676971a 12559 return;
d62294f5
FJ
12560}
12561
12562
00b960c7 12563static void
a2369ed3 12564is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
12565{
12566 bool *yes = (bool *) xyes;
12567 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
12568 *yes = true;
12569}
12570
4697a36c
MM
12571\f
12572/* Calculate the stack information for the current function. This is
12573 complicated by having two separate calling sequences, the AIX calling
12574 sequence and the V.4 calling sequence.
12575
592696dd 12576 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 12577 32-bit 64-bit
4697a36c 12578 SP----> +---------------------------------------+
a260abc9 12579 | back chain to caller | 0 0
4697a36c 12580 +---------------------------------------+
a260abc9 12581 | saved CR | 4 8 (8-11)
4697a36c 12582 +---------------------------------------+
a260abc9 12583 | saved LR | 8 16
4697a36c 12584 +---------------------------------------+
a260abc9 12585 | reserved for compilers | 12 24
4697a36c 12586 +---------------------------------------+
a260abc9 12587 | reserved for binders | 16 32
4697a36c 12588 +---------------------------------------+
a260abc9 12589 | saved TOC pointer | 20 40
4697a36c 12590 +---------------------------------------+
a260abc9 12591 | Parameter save area (P) | 24 48
4697a36c 12592 +---------------------------------------+
a260abc9 12593 | Alloca space (A) | 24+P etc.
802a0058 12594 +---------------------------------------+
a7df97e6 12595 | Local variable space (L) | 24+P+A
4697a36c 12596 +---------------------------------------+
a7df97e6 12597 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 12598 +---------------------------------------+
00b960c7
AH
12599 | Save area for AltiVec registers (W) | 24+P+A+L+X
12600 +---------------------------------------+
12601 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12602 +---------------------------------------+
12603 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 12604 +---------------------------------------+
00b960c7
AH
12605 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12606 +---------------------------------------+
12607 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
12608 +---------------------------------------+
12609 old SP->| back chain to caller's caller |
12610 +---------------------------------------+
12611
5376a30c
KR
12612 The required alignment for AIX configurations is two words (i.e., 8
12613 or 16 bytes).
12614
12615
4697a36c
MM
12616 V.4 stack frames look like:
12617
12618 SP----> +---------------------------------------+
12619 | back chain to caller | 0
12620 +---------------------------------------+
5eb387b8 12621 | caller's saved LR | 4
4697a36c
MM
12622 +---------------------------------------+
12623 | Parameter save area (P) | 8
12624 +---------------------------------------+
a7df97e6 12625 | Alloca space (A) | 8+P
f676971a 12626 +---------------------------------------+
a7df97e6 12627 | Varargs save area (V) | 8+P+A
f676971a 12628 +---------------------------------------+
a7df97e6 12629 | Local variable space (L) | 8+P+A+V
f676971a 12630 +---------------------------------------+
a7df97e6 12631 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 12632 +---------------------------------------+
00b960c7
AH
12633 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12634 +---------------------------------------+
12635 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12636 +---------------------------------------+
12637 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12638 +---------------------------------------+
c4ad648e
AM
12639 | SPE: area for 64-bit GP registers |
12640 +---------------------------------------+
12641 | SPE alignment padding |
12642 +---------------------------------------+
00b960c7 12643 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 12644 +---------------------------------------+
00b960c7 12645 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 12646 +---------------------------------------+
00b960c7 12647 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
12648 +---------------------------------------+
12649 old SP->| back chain to caller's caller |
12650 +---------------------------------------+
b6c9286a 12651
5376a30c
KR
12652 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12653 given. (But note below and in sysv4.h that we require only 8 and
12654 may round up the size of our stack frame anyways. The historical
12655 reason is early versions of powerpc-linux which didn't properly
12656 align the stack at program startup. A happy side-effect is that
12657 -mno-eabi libraries can be used with -meabi programs.)
12658
50d440bc 12659 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
12660 the stack alignment requirements may differ. If -mno-eabi is not
12661 given, the required stack alignment is 8 bytes; if -mno-eabi is
12662 given, the required alignment is 16 bytes. (But see V.4 comment
12663 above.) */
4697a36c 12664
61b2fbe7
MM
12665#ifndef ABI_STACK_BOUNDARY
12666#define ABI_STACK_BOUNDARY STACK_BOUNDARY
12667#endif
12668
d1d0c603 12669static rs6000_stack_t *
863d938c 12670rs6000_stack_info (void)
4697a36c
MM
12671{
12672 static rs6000_stack_t info, zero_info;
12673 rs6000_stack_t *info_ptr = &info;
327e5343 12674 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 12675 int ehrd_size;
64045029 12676 int save_align;
44688022 12677 HOST_WIDE_INT non_fixed_size;
4697a36c 12678
a4f6c312 12679 /* Zero all fields portably. */
4697a36c
MM
12680 info = zero_info;
12681
c19de7aa
AH
12682 if (TARGET_SPE)
12683 {
12684 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 12685 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
12686 cfun->machine->insn_chain_scanned_p
12687 = spe_func_has_64bit_regs_p () + 1;
12688 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
12689 }
12690
a4f6c312 12691 /* Select which calling sequence. */
178274da 12692 info_ptr->abi = DEFAULT_ABI;
9878760c 12693
a4f6c312 12694 /* Calculate which registers need to be saved & save area size. */
4697a36c 12695 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 12696 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
906fb125 12697 even if it currently looks like we won't. */
2bfcf297 12698 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
12699 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
12700 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
1db02437
FS
12701 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
12702 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
906fb125
GK
12703 else
12704 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
4697a36c 12705
a3170dc6
AH
12706 /* For the SPE, we have an additional upper 32-bits on each GPR.
12707 Ideally we should save the entire 64-bits only when the upper
12708 half is used in SIMD instructions. Since we only record
12709 registers live (not the size they are used in), this proves
12710 difficult because we'd have to traverse the instruction chain at
12711 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
12712 so we opt to save the GPRs in 64-bits always if but one register
12713 gets used in 64-bits. Otherwise, all the registers in the frame
12714 get saved in 32-bits.
a3170dc6 12715
c19de7aa 12716 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 12717 traditional GP save area will be empty. */
c19de7aa 12718 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
12719 info_ptr->gp_size = 0;
12720
4697a36c
MM
12721 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
12722 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
12723
00b960c7
AH
12724 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
12725 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
12726 - info_ptr->first_altivec_reg_save);
12727
592696dd 12728 /* Does this function call anything? */
71f123ca
FS
12729 info_ptr->calls_p = (! current_function_is_leaf
12730 || cfun->machine->ra_needs_full_frame);
b6c9286a 12731
a4f6c312 12732 /* Determine if we need to save the link register. */
71f123ca 12733 if (rs6000_ra_ever_killed ()
ffcfcb5f
AM
12734 || (DEFAULT_ABI == ABI_AIX
12735 && current_function_profile
12736 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
12737#ifdef TARGET_RELOCATABLE
12738 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
12739#endif
12740 || (info_ptr->first_fp_reg_save != 64
12741 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 12742 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 12743 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
4697a36c
MM
12744 || info_ptr->calls_p)
12745 {
12746 info_ptr->lr_save_p = 1;
9ebbca7d 12747 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
4697a36c
MM
12748 }
12749
9ebbca7d 12750 /* Determine if we need to save the condition code registers. */
f676971a 12751 if (regs_ever_live[CR2_REGNO]
9ebbca7d
GK
12752 || regs_ever_live[CR3_REGNO]
12753 || regs_ever_live[CR4_REGNO])
4697a36c
MM
12754 {
12755 info_ptr->cr_save_p = 1;
178274da 12756 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
12757 info_ptr->cr_size = reg_size;
12758 }
12759
83720594
RH
12760 /* If the current function calls __builtin_eh_return, then we need
12761 to allocate stack space for registers that will hold data for
12762 the exception handler. */
12763 if (current_function_calls_eh_return)
12764 {
12765 unsigned int i;
12766 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
12767 continue;
a3170dc6
AH
12768
12769 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
12770 ehrd_size = i * (TARGET_SPE_ABI
12771 && info_ptr->spe_64bit_regs_used != 0
12772 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
12773 }
12774 else
12775 ehrd_size = 0;
12776
592696dd 12777 /* Determine various sizes. */
4697a36c
MM
12778 info_ptr->reg_size = reg_size;
12779 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 12780 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 12781 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 12782 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
12783 if (FRAME_GROWS_DOWNWARD)
12784 info_ptr->vars_size
5b667039
JJ
12785 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
12786 + info_ptr->parm_size,
7d5175e1 12787 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
12788 - (info_ptr->fixed_size + info_ptr->vars_size
12789 + info_ptr->parm_size);
00b960c7 12790
c19de7aa 12791 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
12792 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
12793 else
12794 info_ptr->spe_gp_size = 0;
12795
4d774ff8
HP
12796 if (TARGET_ALTIVEC_ABI)
12797 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 12798 else
4d774ff8
HP
12799 info_ptr->vrsave_mask = 0;
12800
12801 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
12802 info_ptr->vrsave_size = 4;
12803 else
12804 info_ptr->vrsave_size = 0;
b6c9286a 12805
d62294f5
FJ
12806 compute_save_world_info (info_ptr);
12807
592696dd 12808 /* Calculate the offsets. */
178274da 12809 switch (DEFAULT_ABI)
4697a36c 12810 {
b6c9286a 12811 case ABI_NONE:
24d304eb 12812 default:
37409796 12813 gcc_unreachable ();
b6c9286a
MM
12814
12815 case ABI_AIX:
ee890fe2 12816 case ABI_DARWIN:
b6c9286a
MM
12817 info_ptr->fp_save_offset = - info_ptr->fp_size;
12818 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
12819
12820 if (TARGET_ALTIVEC_ABI)
12821 {
12822 info_ptr->vrsave_save_offset
12823 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
12824
12825 /* Align stack so vector save area is on a quadword boundary. */
12826 if (info_ptr->altivec_size != 0)
12827 info_ptr->altivec_padding_size
12828 = 16 - (-info_ptr->vrsave_save_offset % 16);
12829 else
12830 info_ptr->altivec_padding_size = 0;
12831
12832 info_ptr->altivec_save_offset
12833 = info_ptr->vrsave_save_offset
12834 - info_ptr->altivec_padding_size
12835 - info_ptr->altivec_size;
12836
12837 /* Adjust for AltiVec case. */
12838 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
12839 }
12840 else
12841 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
12842 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
12843 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
12844 break;
12845
12846 case ABI_V4:
b6c9286a
MM
12847 info_ptr->fp_save_offset = - info_ptr->fp_size;
12848 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 12849 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 12850
c19de7aa 12851 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
12852 {
12853 /* Align stack so SPE GPR save area is aligned on a
12854 double-word boundary. */
12855 if (info_ptr->spe_gp_size != 0)
12856 info_ptr->spe_padding_size
12857 = 8 - (-info_ptr->cr_save_offset % 8);
12858 else
12859 info_ptr->spe_padding_size = 0;
12860
12861 info_ptr->spe_gp_save_offset
12862 = info_ptr->cr_save_offset
12863 - info_ptr->spe_padding_size
12864 - info_ptr->spe_gp_size;
12865
12866 /* Adjust for SPE case. */
12867 info_ptr->toc_save_offset
12868 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
12869 }
a3170dc6 12870 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
12871 {
12872 info_ptr->vrsave_save_offset
12873 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
12874
12875 /* Align stack so vector save area is on a quadword boundary. */
12876 if (info_ptr->altivec_size != 0)
12877 info_ptr->altivec_padding_size
12878 = 16 - (-info_ptr->vrsave_save_offset % 16);
12879 else
12880 info_ptr->altivec_padding_size = 0;
12881
12882 info_ptr->altivec_save_offset
12883 = info_ptr->vrsave_save_offset
12884 - info_ptr->altivec_padding_size
12885 - info_ptr->altivec_size;
12886
12887 /* Adjust for AltiVec case. */
12888 info_ptr->toc_save_offset
12889 = info_ptr->altivec_save_offset - info_ptr->toc_size;
12890 }
12891 else
12892 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
83720594 12893 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
b6c9286a
MM
12894 info_ptr->lr_save_offset = reg_size;
12895 break;
4697a36c
MM
12896 }
12897
64045029 12898 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
12899 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
12900 + info_ptr->gp_size
12901 + info_ptr->altivec_size
12902 + info_ptr->altivec_padding_size
a3170dc6
AH
12903 + info_ptr->spe_gp_size
12904 + info_ptr->spe_padding_size
00b960c7
AH
12905 + ehrd_size
12906 + info_ptr->cr_size
12907 + info_ptr->lr_size
12908 + info_ptr->vrsave_size
12909 + info_ptr->toc_size,
64045029 12910 save_align);
00b960c7 12911
44688022 12912 non_fixed_size = (info_ptr->vars_size
ff381587 12913 + info_ptr->parm_size
5b667039 12914 + info_ptr->save_size);
ff381587 12915
44688022
AM
12916 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
12917 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
12918
12919 /* Determine if we need to allocate any stack frame:
12920
a4f6c312
SS
12921 For AIX we need to push the stack if a frame pointer is needed
12922 (because the stack might be dynamically adjusted), if we are
12923 debugging, if we make calls, or if the sum of fp_save, gp_save,
12924 and local variables are more than the space needed to save all
12925 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12926 + 18*8 = 288 (GPR13 reserved).
ff381587 12927
a4f6c312
SS
12928 For V.4 we don't have the stack cushion that AIX uses, but assume
12929 that the debugger can handle stackless frames. */
ff381587
MM
12930
12931 if (info_ptr->calls_p)
12932 info_ptr->push_p = 1;
12933
178274da 12934 else if (DEFAULT_ABI == ABI_V4)
44688022 12935 info_ptr->push_p = non_fixed_size != 0;
ff381587 12936
178274da
AM
12937 else if (frame_pointer_needed)
12938 info_ptr->push_p = 1;
12939
12940 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
12941 info_ptr->push_p = 1;
12942
ff381587 12943 else
44688022 12944 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 12945
a4f6c312 12946 /* Zero offsets if we're not saving those registers. */
8dda1a21 12947 if (info_ptr->fp_size == 0)
4697a36c
MM
12948 info_ptr->fp_save_offset = 0;
12949
8dda1a21 12950 if (info_ptr->gp_size == 0)
4697a36c
MM
12951 info_ptr->gp_save_offset = 0;
12952
00b960c7
AH
12953 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
12954 info_ptr->altivec_save_offset = 0;
12955
12956 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
12957 info_ptr->vrsave_save_offset = 0;
12958
c19de7aa
AH
12959 if (! TARGET_SPE_ABI
12960 || info_ptr->spe_64bit_regs_used == 0
12961 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
12962 info_ptr->spe_gp_save_offset = 0;
12963
c81fc13e 12964 if (! info_ptr->lr_save_p)
4697a36c
MM
12965 info_ptr->lr_save_offset = 0;
12966
c81fc13e 12967 if (! info_ptr->cr_save_p)
4697a36c
MM
12968 info_ptr->cr_save_offset = 0;
12969
c81fc13e 12970 if (! info_ptr->toc_save_p)
b6c9286a
MM
12971 info_ptr->toc_save_offset = 0;
12972
4697a36c
MM
12973 return info_ptr;
12974}
12975
c19de7aa
AH
12976/* Return true if the current function uses any GPRs in 64-bit SIMD
12977 mode. */
12978
12979static bool
863d938c 12980spe_func_has_64bit_regs_p (void)
c19de7aa
AH
12981{
12982 rtx insns, insn;
12983
12984 /* Functions that save and restore all the call-saved registers will
12985 need to save/restore the registers in 64-bits. */
12986 if (current_function_calls_eh_return
12987 || current_function_calls_setjmp
12988 || current_function_has_nonlocal_goto)
12989 return true;
12990
12991 insns = get_insns ();
12992
12993 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
12994 {
12995 if (INSN_P (insn))
12996 {
12997 rtx i;
12998
b5a5beb9
AH
12999 /* FIXME: This should be implemented with attributes...
13000
13001 (set_attr "spe64" "true")....then,
13002 if (get_spe64(insn)) return true;
13003
13004 It's the only reliable way to do the stuff below. */
13005
c19de7aa 13006 i = PATTERN (insn);
f82f556d
AH
13007 if (GET_CODE (i) == SET)
13008 {
13009 enum machine_mode mode = GET_MODE (SET_SRC (i));
13010
13011 if (SPE_VECTOR_MODE (mode))
13012 return true;
13013 if (TARGET_E500_DOUBLE && mode == DFmode)
13014 return true;
13015 }
c19de7aa
AH
13016 }
13017 }
13018
13019 return false;
13020}
13021
d1d0c603 13022static void
a2369ed3 13023debug_stack_info (rs6000_stack_t *info)
9878760c 13024{
d330fd93 13025 const char *abi_string;
24d304eb 13026
c81fc13e 13027 if (! info)
4697a36c
MM
13028 info = rs6000_stack_info ();
13029
13030 fprintf (stderr, "\nStack information for function %s:\n",
13031 ((current_function_decl && DECL_NAME (current_function_decl))
13032 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13033 : "<unknown>"));
13034
24d304eb
RK
13035 switch (info->abi)
13036 {
b6c9286a
MM
13037 default: abi_string = "Unknown"; break;
13038 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 13039 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 13040 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 13041 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
13042 }
13043
13044 fprintf (stderr, "\tABI = %5s\n", abi_string);
13045
00b960c7
AH
13046 if (TARGET_ALTIVEC_ABI)
13047 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13048
a3170dc6
AH
13049 if (TARGET_SPE_ABI)
13050 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13051
4697a36c
MM
13052 if (info->first_gp_reg_save != 32)
13053 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13054
13055 if (info->first_fp_reg_save != 64)
13056 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 13057
00b960c7
AH
13058 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13059 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13060 info->first_altivec_reg_save);
13061
4697a36c
MM
13062 if (info->lr_save_p)
13063 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 13064
4697a36c
MM
13065 if (info->cr_save_p)
13066 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13067
b6c9286a
MM
13068 if (info->toc_save_p)
13069 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
13070
00b960c7
AH
13071 if (info->vrsave_mask)
13072 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13073
4697a36c
MM
13074 if (info->push_p)
13075 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13076
13077 if (info->calls_p)
13078 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13079
4697a36c
MM
13080 if (info->gp_save_offset)
13081 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13082
13083 if (info->fp_save_offset)
13084 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13085
00b960c7
AH
13086 if (info->altivec_save_offset)
13087 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13088 info->altivec_save_offset);
13089
a3170dc6
AH
13090 if (info->spe_gp_save_offset)
13091 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13092 info->spe_gp_save_offset);
13093
00b960c7
AH
13094 if (info->vrsave_save_offset)
13095 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13096 info->vrsave_save_offset);
13097
4697a36c
MM
13098 if (info->lr_save_offset)
13099 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13100
13101 if (info->cr_save_offset)
13102 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13103
b6c9286a
MM
13104 if (info->toc_save_offset)
13105 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
13106
4697a36c
MM
13107 if (info->varargs_save_offset)
13108 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13109
13110 if (info->total_size)
d1d0c603
JJ
13111 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13112 info->total_size);
4697a36c 13113
4697a36c 13114 if (info->vars_size)
d1d0c603
JJ
13115 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13116 info->vars_size);
4697a36c
MM
13117
13118 if (info->parm_size)
13119 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13120
13121 if (info->fixed_size)
13122 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13123
13124 if (info->gp_size)
13125 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13126
a3170dc6
AH
13127 if (info->spe_gp_size)
13128 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13129
4697a36c
MM
13130 if (info->fp_size)
13131 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13132
00b960c7
AH
13133 if (info->altivec_size)
13134 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13135
13136 if (info->vrsave_size)
13137 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13138
13139 if (info->altivec_padding_size)
13140 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13141 info->altivec_padding_size);
13142
a3170dc6
AH
13143 if (info->spe_padding_size)
13144 fprintf (stderr, "\tspe_padding_size = %5d\n",
13145 info->spe_padding_size);
13146
a4f6c312 13147 if (info->lr_size)
ed947a96 13148 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
b6c9286a 13149
4697a36c
MM
13150 if (info->cr_size)
13151 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
13152
a4f6c312 13153 if (info->toc_size)
b6c9286a
MM
13154 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
13155
4697a36c
MM
13156 if (info->save_size)
13157 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
13158
13159 if (info->reg_size != 4)
13160 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
13161
13162 fprintf (stderr, "\n");
9878760c 13163}
71f123ca
FS
13164
13165rtx
a2369ed3 13166rs6000_return_addr (int count, rtx frame)
71f123ca 13167{
a4f6c312
SS
13168 /* Currently we don't optimize very well between prolog and body
13169 code and for PIC code the code can be actually quite bad, so
13170 don't try to be too clever here. */
f1384257 13171 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
13172 {
13173 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
13174
13175 return
13176 gen_rtx_MEM
13177 (Pmode,
13178 memory_address
13179 (Pmode,
13180 plus_constant (copy_to_reg
13181 (gen_rtx_MEM (Pmode,
13182 memory_address (Pmode, frame))),
13183 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
13184 }
13185
8c29550d 13186 cfun->machine->ra_need_lr = 1;
9e2f7ec7 13187 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
71f123ca
FS
13188}
13189
5e1bf043
DJ
13190/* Say whether a function is a candidate for sibcall handling or not.
13191 We do not allow indirect calls to be optimized into sibling calls.
13192 Also, we can't do it if there are any vector parameters; there's
13193 nowhere to put the VRsave code so it works; note that functions with
13194 vector parameters are required to have a prototype, so the argument
13195 type info must be available here. (The tail recursion case can work
13196 with vector parameters, but there's no way to distinguish here.) */
4977bab6 13197static bool
a2369ed3 13198rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
13199{
13200 tree type;
4977bab6 13201 if (decl)
5e1bf043
DJ
13202 {
13203 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 13204 {
4977bab6 13205 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
13206 type; type = TREE_CHAIN (type))
13207 {
c15b529f 13208 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 13209 return false;
5e1bf043 13210 }
c4ad648e 13211 }
5e1bf043 13212 if (DEFAULT_ABI == ABI_DARWIN
4977bab6 13213 || (*targetm.binds_local_p) (decl))
2bcc50d0 13214 {
4977bab6 13215 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
13216
13217 if (!lookup_attribute ("longcall", attr_list)
13218 || lookup_attribute ("shortcall", attr_list))
4977bab6 13219 return true;
2bcc50d0 13220 }
5e1bf043 13221 }
4977bab6 13222 return false;
5e1bf043
DJ
13223}
13224
e7e64a25
AS
13225/* NULL if INSN insn is valid within a low-overhead loop.
13226 Otherwise return why doloop cannot be applied.
9419649c
DE
13227 PowerPC uses the COUNT register for branch on table instructions. */
13228
e7e64a25
AS
13229static const char *
13230rs6000_invalid_within_doloop (rtx insn)
9419649c
DE
13231{
13232 if (CALL_P (insn))
e7e64a25 13233 return "Function call in the loop.";
9419649c
DE
13234
13235 if (JUMP_P (insn)
13236 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13237 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 13238 return "Computed branch in the loop.";
9419649c 13239
e7e64a25 13240 return NULL;
9419649c
DE
13241}
13242
71f123ca 13243static int
863d938c 13244rs6000_ra_ever_killed (void)
71f123ca
FS
13245{
13246 rtx top;
5e1bf043
DJ
13247 rtx reg;
13248 rtx insn;
71f123ca 13249
dd292d0a 13250 if (current_function_is_thunk)
71f123ca 13251 return 0;
eb0424da 13252
36f7e964
AH
13253 /* regs_ever_live has LR marked as used if any sibcalls are present,
13254 but this should not force saving and restoring in the
13255 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 13256 clobbers LR, so that is inappropriate. */
36f7e964 13257
5e1bf043
DJ
13258 /* Also, the prologue can generate a store into LR that
13259 doesn't really count, like this:
36f7e964 13260
5e1bf043
DJ
13261 move LR->R0
13262 bcl to set PIC register
13263 move LR->R31
13264 move R0->LR
36f7e964
AH
13265
13266 When we're called from the epilogue, we need to avoid counting
13267 this as a store. */
f676971a 13268
71f123ca
FS
13269 push_topmost_sequence ();
13270 top = get_insns ();
13271 pop_topmost_sequence ();
5e1bf043 13272 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
71f123ca 13273
5e1bf043
DJ
13274 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13275 {
13276 if (INSN_P (insn))
13277 {
13278 if (FIND_REG_INC_NOTE (insn, reg))
13279 return 1;
f676971a 13280 else if (GET_CODE (insn) == CALL_INSN
c15b529f 13281 && !SIBLING_CALL_P (insn))
5e1bf043 13282 return 1;
36f7e964
AH
13283 else if (set_of (reg, insn) != NULL_RTX
13284 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
13285 return 1;
13286 }
13287 }
13288 return 0;
71f123ca 13289}
4697a36c 13290\f
8cd8f856
GK
13291/* Add a REG_MAYBE_DEAD note to the insn. */
13292static void
a2369ed3 13293rs6000_maybe_dead (rtx insn)
8cd8f856
GK
13294{
13295 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13296 const0_rtx,
13297 REG_NOTES (insn));
13298}
13299
9ebbca7d 13300/* Emit instructions needed to load the TOC register.
c7ca610e 13301 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 13302 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
13303
13304void
a2369ed3 13305rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 13306{
027fbf43 13307 rtx dest, insn;
1db02437 13308 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 13309
7f970b70 13310 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 13311 {
7f970b70
AM
13312 char buf[30];
13313 rtx lab, tmp1, tmp2, got, tempLR;
13314
13315 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13316 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13317 if (flag_pic == 2)
13318 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
13319 else
13320 got = rs6000_got_sym ();
13321 tmp1 = tmp2 = dest;
13322 if (!fromprolog)
13323 {
13324 tmp1 = gen_reg_rtx (Pmode);
13325 tmp2 = gen_reg_rtx (Pmode);
13326 }
13327 tempLR = (fromprolog
13328 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13329 : gen_reg_rtx (Pmode));
13330 insn = emit_insn (gen_load_toc_v4_PIC_1 (tempLR, lab));
13331 if (fromprolog)
13332 rs6000_maybe_dead (insn);
13333 insn = emit_move_insn (tmp1, tempLR);
13334 if (fromprolog)
13335 rs6000_maybe_dead (insn);
13336 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
13337 if (fromprolog)
13338 rs6000_maybe_dead (insn);
13339 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
13340 if (fromprolog)
13341 rs6000_maybe_dead (insn);
13342 }
13343 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
13344 {
13345 rtx tempLR = (fromprolog
13346 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13347 : gen_reg_rtx (Pmode));
13348
13349 insn = emit_insn (gen_load_toc_v4_pic_si (tempLR));
027fbf43
JJ
13350 if (fromprolog)
13351 rs6000_maybe_dead (insn);
7f970b70 13352 insn = emit_move_insn (dest, tempLR);
027fbf43
JJ
13353 if (fromprolog)
13354 rs6000_maybe_dead (insn);
20b71b17
AM
13355 }
13356 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
13357 {
13358 char buf[30];
13359 rtx tempLR = (fromprolog
13360 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
13361 : gen_reg_rtx (Pmode));
13362 rtx temp0 = (fromprolog
13363 ? gen_rtx_REG (Pmode, 0)
13364 : gen_reg_rtx (Pmode));
20b71b17 13365
20b71b17
AM
13366 if (fromprolog)
13367 {
ccbca5e4 13368 rtx symF, symL;
38c1f2d7 13369
20b71b17
AM
13370 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13371 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 13372
20b71b17
AM
13373 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
13374 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13375
13376 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
13377 symF)));
13378 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
13379 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
13380 symL,
13381 symF)));
9ebbca7d
GK
13382 }
13383 else
20b71b17
AM
13384 {
13385 rtx tocsym;
20b71b17
AM
13386
13387 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
ccbca5e4 13388 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, tocsym));
027fbf43
JJ
13389 emit_move_insn (dest, tempLR);
13390 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 13391 }
027fbf43
JJ
13392 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
13393 if (fromprolog)
13394 rs6000_maybe_dead (insn);
9ebbca7d 13395 }
20b71b17
AM
13396 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
13397 {
13398 /* This is for AIX code running in non-PIC ELF32. */
13399 char buf[30];
13400 rtx realsym;
13401 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13402 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
13403
027fbf43
JJ
13404 insn = emit_insn (gen_elf_high (dest, realsym));
13405 if (fromprolog)
13406 rs6000_maybe_dead (insn);
13407 insn = emit_insn (gen_elf_low (dest, dest, realsym));
13408 if (fromprolog)
13409 rs6000_maybe_dead (insn);
20b71b17 13410 }
37409796 13411 else
9ebbca7d 13412 {
37409796 13413 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 13414
9ebbca7d 13415 if (TARGET_32BIT)
027fbf43 13416 insn = emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 13417 else
027fbf43
JJ
13418 insn = emit_insn (gen_load_toc_aix_di (dest));
13419 if (fromprolog)
13420 rs6000_maybe_dead (insn);
9ebbca7d
GK
13421 }
13422}
13423
d1d0c603
JJ
13424/* Emit instructions to restore the link register after determining where
13425 its value has been stored. */
13426
13427void
13428rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
13429{
13430 rs6000_stack_t *info = rs6000_stack_info ();
13431 rtx operands[2];
13432
13433 operands[0] = source;
13434 operands[1] = scratch;
13435
13436 if (info->lr_save_p)
13437 {
13438 rtx frame_rtx = stack_pointer_rtx;
13439 HOST_WIDE_INT sp_offset = 0;
13440 rtx tmp;
13441
13442 if (frame_pointer_needed
13443 || current_function_calls_alloca
13444 || info->total_size > 32767)
13445 {
8308679f
DE
13446 tmp = gen_rtx_MEM (Pmode, frame_rtx);
13447 MEM_NOTRAP_P (tmp) = 1;
13448 set_mem_alias_set (tmp, rs6000_sr_alias_set);
13449 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
13450 frame_rtx = operands[1];
13451 }
13452 else if (info->push_p)
13453 sp_offset = info->total_size;
13454
13455 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
13456 tmp = gen_rtx_MEM (Pmode, tmp);
8308679f
DE
13457 MEM_NOTRAP_P (tmp) = 1;
13458 set_mem_alias_set (tmp, rs6000_sr_alias_set);
d1d0c603
JJ
13459 emit_move_insn (tmp, operands[0]);
13460 }
13461 else
13462 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
13463}
13464
f103e34d
GK
13465static GTY(()) int set = -1;
13466
f676971a 13467int
863d938c 13468get_TOC_alias_set (void)
9ebbca7d 13469{
f103e34d
GK
13470 if (set == -1)
13471 set = new_alias_set ();
13472 return set;
f676971a 13473}
9ebbca7d 13474
c1207243 13475/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
13476 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
13477 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 13478#if TARGET_ELF
3c9eb5f4 13479static int
f676971a 13480uses_TOC (void)
9ebbca7d 13481{
c4501e62 13482 rtx insn;
38c1f2d7 13483
c4501e62
JJ
13484 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
13485 if (INSN_P (insn))
13486 {
13487 rtx pat = PATTERN (insn);
13488 int i;
9ebbca7d 13489
f676971a 13490 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
13491 for (i = 0; i < XVECLEN (pat, 0); i++)
13492 {
13493 rtx sub = XVECEXP (pat, 0, i);
13494 if (GET_CODE (sub) == USE)
13495 {
13496 sub = XEXP (sub, 0);
13497 if (GET_CODE (sub) == UNSPEC
13498 && XINT (sub, 1) == UNSPEC_TOC)
13499 return 1;
13500 }
13501 }
13502 }
13503 return 0;
9ebbca7d 13504}
c954844a 13505#endif
38c1f2d7 13506
9ebbca7d 13507rtx
f676971a 13508create_TOC_reference (rtx symbol)
9ebbca7d 13509{
f676971a 13510 return gen_rtx_PLUS (Pmode,
a8a05998 13511 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
13512 gen_rtx_CONST (Pmode,
13513 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 13514 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 13515}
38c1f2d7 13516
fc4767bb
JJ
13517/* If _Unwind_* has been called from within the same module,
13518 toc register is not guaranteed to be saved to 40(1) on function
13519 entry. Save it there in that case. */
c7ca610e 13520
9ebbca7d 13521void
863d938c 13522rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
13523{
13524 rtx mem;
13525 rtx stack_top = gen_reg_rtx (Pmode);
13526 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
13527 rtx opcode = gen_reg_rtx (SImode);
13528 rtx tocompare = gen_reg_rtx (SImode);
13529 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 13530
8308679f 13531 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
13532 emit_move_insn (stack_top, mem);
13533
8308679f
DE
13534 mem = gen_frame_mem (Pmode,
13535 gen_rtx_PLUS (Pmode, stack_top,
13536 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 13537 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
13538 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
13539 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 13540 : 0xE8410028, SImode));
9ebbca7d 13541
fc4767bb 13542 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 13543 SImode, NULL_RTX, NULL_RTX,
fc4767bb 13544 no_toc_save_needed);
9ebbca7d 13545
8308679f
DE
13546 mem = gen_frame_mem (Pmode,
13547 gen_rtx_PLUS (Pmode, stack_top,
13548 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
13549 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
13550 emit_label (no_toc_save_needed);
9ebbca7d 13551}
38c1f2d7 13552\f
ba4828e0
RK
13553/* This ties together stack memory (MEM with an alias set of
13554 rs6000_sr_alias_set) and the change to the stack pointer. */
13555
9ebbca7d 13556static void
863d938c 13557rs6000_emit_stack_tie (void)
9ebbca7d 13558{
ba4828e0
RK
13559 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
13560
13561 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
13562 emit_insn (gen_stack_tie (mem));
13563}
38c1f2d7 13564
9ebbca7d
GK
13565/* Emit the correct code for allocating stack space, as insns.
13566 If COPY_R12, make sure a copy of the old frame is left in r12.
13567 The generated code may use hard register 0 as a temporary. */
13568
13569static void
a2369ed3 13570rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 13571{
9ebbca7d
GK
13572 rtx insn;
13573 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13574 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
13575 rtx todec = gen_int_mode (-size, Pmode);
13576
13577 if (INTVAL (todec) != -size)
13578 {
d4ee4d25 13579 warning (0, "stack frame too large");
61168ff1
RS
13580 emit_insn (gen_trap ());
13581 return;
13582 }
a157febd
GK
13583
13584 if (current_function_limit_stack)
13585 {
13586 if (REG_P (stack_limit_rtx)
f676971a 13587 && REGNO (stack_limit_rtx) > 1
a157febd
GK
13588 && REGNO (stack_limit_rtx) <= 31)
13589 {
5b71a4e7 13590 emit_insn (TARGET_32BIT
9ebbca7d
GK
13591 ? gen_addsi3 (tmp_reg,
13592 stack_limit_rtx,
13593 GEN_INT (size))
13594 : gen_adddi3 (tmp_reg,
13595 stack_limit_rtx,
13596 GEN_INT (size)));
5b71a4e7 13597
9ebbca7d
GK
13598 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13599 const0_rtx));
a157febd
GK
13600 }
13601 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 13602 && TARGET_32BIT
f607bc57 13603 && DEFAULT_ABI == ABI_V4)
a157febd 13604 {
9ebbca7d 13605 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
13606 gen_rtx_PLUS (Pmode,
13607 stack_limit_rtx,
9ebbca7d 13608 GEN_INT (size)));
5b71a4e7 13609
9ebbca7d
GK
13610 emit_insn (gen_elf_high (tmp_reg, toload));
13611 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
13612 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
13613 const0_rtx));
a157febd
GK
13614 }
13615 else
d4ee4d25 13616 warning (0, "stack limit expression is not supported");
a157febd
GK
13617 }
13618
9ebbca7d
GK
13619 if (copy_r12 || ! TARGET_UPDATE)
13620 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
13621
38c1f2d7
MM
13622 if (TARGET_UPDATE)
13623 {
9ebbca7d 13624 if (size > 32767)
38c1f2d7 13625 {
9ebbca7d 13626 /* Need a note here so that try_split doesn't get confused. */
9390387d 13627 if (get_last_insn () == NULL_RTX)
2e040219 13628 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
13629 insn = emit_move_insn (tmp_reg, todec);
13630 try_split (PATTERN (insn), insn, 0);
13631 todec = tmp_reg;
38c1f2d7 13632 }
5b71a4e7
DE
13633
13634 insn = emit_insn (TARGET_32BIT
13635 ? gen_movsi_update (stack_reg, stack_reg,
13636 todec, stack_reg)
c4ad648e 13637 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 13638 todec, stack_reg));
38c1f2d7
MM
13639 }
13640 else
13641 {
5b71a4e7
DE
13642 insn = emit_insn (TARGET_32BIT
13643 ? gen_addsi3 (stack_reg, stack_reg, todec)
13644 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
13645 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
13646 gen_rtx_REG (Pmode, 12));
13647 }
f676971a 13648
9ebbca7d 13649 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 13650 REG_NOTES (insn) =
9ebbca7d 13651 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 13652 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
13653 gen_rtx_PLUS (Pmode, stack_reg,
13654 GEN_INT (-size))),
13655 REG_NOTES (insn));
13656}
13657
a4f6c312
SS
13658/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13659 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13660 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13661 deduce these equivalences by itself so it wasn't necessary to hold
13662 its hand so much. */
9ebbca7d
GK
13663
13664static void
f676971a 13665rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 13666 rtx reg2, rtx rreg)
9ebbca7d
GK
13667{
13668 rtx real, temp;
13669
e56c4463
JL
13670 /* copy_rtx will not make unique copies of registers, so we need to
13671 ensure we don't have unwanted sharing here. */
13672 if (reg == reg2)
13673 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13674
13675 if (reg == rreg)
13676 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
13677
9ebbca7d
GK
13678 real = copy_rtx (PATTERN (insn));
13679
89e7058f
AH
13680 if (reg2 != NULL_RTX)
13681 real = replace_rtx (real, reg2, rreg);
f676971a
EC
13682
13683 real = replace_rtx (real, reg,
9ebbca7d
GK
13684 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
13685 STACK_POINTER_REGNUM),
13686 GEN_INT (val)));
f676971a 13687
9ebbca7d
GK
13688 /* We expect that 'real' is either a SET or a PARALLEL containing
13689 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13690 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13691
13692 if (GET_CODE (real) == SET)
13693 {
13694 rtx set = real;
f676971a 13695
9ebbca7d
GK
13696 temp = simplify_rtx (SET_SRC (set));
13697 if (temp)
13698 SET_SRC (set) = temp;
13699 temp = simplify_rtx (SET_DEST (set));
13700 if (temp)
13701 SET_DEST (set) = temp;
13702 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 13703 {
9ebbca7d
GK
13704 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13705 if (temp)
13706 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 13707 }
38c1f2d7 13708 }
37409796 13709 else
9ebbca7d
GK
13710 {
13711 int i;
37409796
NS
13712
13713 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
13714 for (i = 0; i < XVECLEN (real, 0); i++)
13715 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
13716 {
13717 rtx set = XVECEXP (real, 0, i);
f676971a 13718
9ebbca7d
GK
13719 temp = simplify_rtx (SET_SRC (set));
13720 if (temp)
13721 SET_SRC (set) = temp;
13722 temp = simplify_rtx (SET_DEST (set));
13723 if (temp)
13724 SET_DEST (set) = temp;
13725 if (GET_CODE (SET_DEST (set)) == MEM)
13726 {
13727 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
13728 if (temp)
13729 XEXP (SET_DEST (set), 0) = temp;
13730 }
13731 RTX_FRAME_RELATED_P (set) = 1;
13732 }
13733 }
c19de7aa
AH
13734
13735 if (TARGET_SPE)
13736 real = spe_synthesize_frame_save (real);
13737
9ebbca7d
GK
13738 RTX_FRAME_RELATED_P (insn) = 1;
13739 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13740 real,
13741 REG_NOTES (insn));
38c1f2d7
MM
13742}
13743
c19de7aa
AH
13744/* Given an SPE frame note, return a PARALLEL of SETs with the
13745 original note, plus a synthetic register save. */
13746
13747static rtx
a2369ed3 13748spe_synthesize_frame_save (rtx real)
c19de7aa
AH
13749{
13750 rtx synth, offset, reg, real2;
13751
13752 if (GET_CODE (real) != SET
13753 || GET_MODE (SET_SRC (real)) != V2SImode)
13754 return real;
13755
13756 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13757 frame related note. The parallel contains a set of the register
41f3a930 13758 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
13759 This is so we can differentiate between 64-bit and 32-bit saves.
13760 Words cannot describe this nastiness. */
13761
37409796
NS
13762 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
13763 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
13764 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
13765
13766 /* Transform:
13767 (set (mem (plus (reg x) (const y)))
13768 (reg z))
13769 into:
13770 (set (mem (plus (reg x) (const y+4)))
41f3a930 13771 (reg z+1200))
c19de7aa
AH
13772 */
13773
13774 real2 = copy_rtx (real);
13775 PUT_MODE (SET_DEST (real2), SImode);
13776 reg = SET_SRC (real2);
13777 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
13778 synth = copy_rtx (real2);
13779
13780 if (BYTES_BIG_ENDIAN)
13781 {
13782 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
13783 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
13784 }
13785
13786 reg = SET_SRC (synth);
41f3a930 13787
c19de7aa 13788 synth = replace_rtx (synth, reg,
41f3a930 13789 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
13790
13791 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
13792 synth = replace_rtx (synth, offset,
13793 GEN_INT (INTVAL (offset)
13794 + (BYTES_BIG_ENDIAN ? 0 : 4)));
13795
13796 RTX_FRAME_RELATED_P (synth) = 1;
13797 RTX_FRAME_RELATED_P (real2) = 1;
13798 if (BYTES_BIG_ENDIAN)
13799 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
13800 else
13801 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
13802
13803 return real;
13804}
13805
00b960c7
AH
13806/* Returns an insn that has a vrsave set operation with the
13807 appropriate CLOBBERs. */
13808
13809static rtx
a2369ed3 13810generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
13811{
13812 int nclobs, i;
13813 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 13814 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 13815
a004eb82
AH
13816 clobs[0]
13817 = gen_rtx_SET (VOIDmode,
13818 vrsave,
13819 gen_rtx_UNSPEC_VOLATILE (SImode,
13820 gen_rtvec (2, reg, vrsave),
3aca4bff 13821 UNSPECV_SET_VRSAVE));
00b960c7
AH
13822
13823 nclobs = 1;
13824
9aa86737
AH
13825 /* We need to clobber the registers in the mask so the scheduler
13826 does not move sets to VRSAVE before sets of AltiVec registers.
13827
13828 However, if the function receives nonlocal gotos, reload will set
13829 all call saved registers live. We will end up with:
13830
13831 (set (reg 999) (mem))
13832 (parallel [ (set (reg vrsave) (unspec blah))
13833 (clobber (reg 999))])
13834
13835 The clobber will cause the store into reg 999 to be dead, and
13836 flow will attempt to delete an epilogue insn. In this case, we
13837 need an unspec use/set of the register. */
00b960c7
AH
13838
13839 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 13840 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
13841 {
13842 if (!epiloguep || call_used_regs [i])
13843 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
13844 gen_rtx_REG (V4SImode, i));
13845 else
13846 {
13847 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
13848
13849 clobs[nclobs++]
a004eb82
AH
13850 = gen_rtx_SET (VOIDmode,
13851 reg,
13852 gen_rtx_UNSPEC (V4SImode,
13853 gen_rtvec (1, reg), 27));
9aa86737
AH
13854 }
13855 }
00b960c7
AH
13856
13857 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
13858
13859 for (i = 0; i < nclobs; ++i)
13860 XVECEXP (insn, 0, i) = clobs[i];
13861
13862 return insn;
13863}
13864
89e7058f
AH
13865/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13866 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13867
13868static void
f676971a 13869emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 13870 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
13871{
13872 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
13873 rtx replacea, replaceb;
13874
13875 int_rtx = GEN_INT (offset);
13876
13877 /* Some cases that need register indexed addressing. */
13878 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 13879 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
13880 || (TARGET_SPE_ABI
13881 && SPE_VECTOR_MODE (mode)
13882 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
13883 {
13884 /* Whomever calls us must make sure r11 is available in the
c4ad648e 13885 flow path of instructions in the prologue. */
89e7058f
AH
13886 offset_rtx = gen_rtx_REG (Pmode, 11);
13887 emit_move_insn (offset_rtx, int_rtx);
13888
13889 replacea = offset_rtx;
13890 replaceb = int_rtx;
13891 }
13892 else
13893 {
13894 offset_rtx = int_rtx;
13895 replacea = NULL_RTX;
13896 replaceb = NULL_RTX;
13897 }
13898
13899 reg = gen_rtx_REG (mode, regno);
13900 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
13901 mem = gen_rtx_MEM (mode, addr);
13902 set_mem_alias_set (mem, rs6000_sr_alias_set);
13903
13904 insn = emit_move_insn (mem, reg);
13905
13906 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
13907}
13908
a3170dc6
AH
13909/* Emit an offset memory reference suitable for a frame store, while
13910 converting to a valid addressing mode. */
13911
13912static rtx
a2369ed3 13913gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
13914{
13915 rtx int_rtx, offset_rtx;
13916
13917 int_rtx = GEN_INT (offset);
13918
4d4cbc0e
AH
13919 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
13920 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
13921 {
13922 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13923 emit_move_insn (offset_rtx, int_rtx);
13924 }
13925 else
13926 offset_rtx = int_rtx;
13927
13928 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
13929}
13930
6d0a8091
DJ
13931/* Look for user-defined global regs. We should not save and restore these,
13932 and cannot use stmw/lmw if there are any in its range. */
13933
13934static bool
13935no_global_regs_above (int first_greg)
13936{
13937 int i;
13938 for (i = 0; i < 32 - first_greg; i++)
13939 if (global_regs[first_greg + i])
13940 return false;
13941 return true;
13942}
13943
699c914a
MS
13944#ifndef TARGET_FIX_AND_CONTINUE
13945#define TARGET_FIX_AND_CONTINUE 0
13946#endif
13947
9ebbca7d
GK
13948/* Emit function prologue as insns. */
13949
9878760c 13950void
863d938c 13951rs6000_emit_prologue (void)
9878760c 13952{
4697a36c 13953 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 13954 enum machine_mode reg_mode = Pmode;
327e5343 13955 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
13956 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
13957 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
13958 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 13959 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
13960 rtx insn;
13961 int saving_FPRs_inline;
13962 int using_store_multiple;
13963 HOST_WIDE_INT sp_offset = 0;
f676971a 13964
699c914a
MS
13965 if (TARGET_FIX_AND_CONTINUE)
13966 {
13967 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 13968 address by modifying the first 5 instructions of the function
699c914a
MS
13969 to branch to the overriding function. This is necessary to
13970 permit function pointers that point to the old function to
13971 actually forward to the new function. */
13972 emit_insn (gen_nop ());
13973 emit_insn (gen_nop ());
de2ab0ca 13974 emit_insn (gen_nop ());
699c914a
MS
13975 emit_insn (gen_nop ());
13976 emit_insn (gen_nop ());
13977 }
13978
13979 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13980 {
13981 reg_mode = V2SImode;
13982 reg_size = 8;
13983 }
a3170dc6 13984
9ebbca7d 13985 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
13986 && (!TARGET_SPE_ABI
13987 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
13988 && info->first_gp_reg_save < 31
13989 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 13990 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 13991 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 13992 || current_function_calls_eh_return
8c29550d 13993 || cfun->machine->ra_need_lr);
9ebbca7d
GK
13994
13995 /* For V.4, update stack before we do any saving and set back pointer. */
fc4767bb 13996 if (info->push_p
acd0b319
AM
13997 && (DEFAULT_ABI == ABI_V4
13998 || current_function_calls_eh_return))
9ebbca7d
GK
13999 {
14000 if (info->total_size < 32767)
14001 sp_offset = info->total_size;
14002 else
14003 frame_reg_rtx = frame_ptr_rtx;
f676971a 14004 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
14005 (frame_reg_rtx != sp_reg_rtx
14006 && (info->cr_save_p
14007 || info->lr_save_p
14008 || info->first_fp_reg_save < 64
14009 || info->first_gp_reg_save < 32
14010 )));
14011 if (frame_reg_rtx != sp_reg_rtx)
14012 rs6000_emit_stack_tie ();
14013 }
14014
d62294f5 14015 /* Handle world saves specially here. */
f57fe068 14016 if (WORLD_SAVE_P (info))
d62294f5
FJ
14017 {
14018 int i, j, sz;
14019 rtx treg;
14020 rtvec p;
14021
14022 /* save_world expects lr in r0. */
14023 if (info->lr_save_p)
c4ad648e
AM
14024 {
14025 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14026 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14027 RTX_FRAME_RELATED_P (insn) = 1;
14028 }
d62294f5
FJ
14029
14030 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 14031 assumptions about the offsets of various bits of the stack
992d08b1 14032 frame. */
37409796
NS
14033 gcc_assert (info->gp_save_offset == -220
14034 && info->fp_save_offset == -144
14035 && info->lr_save_offset == 8
14036 && info->cr_save_offset == 4
14037 && info->push_p
14038 && info->lr_save_p
14039 && (!current_function_calls_eh_return
14040 || info->ehrd_offset == -432)
14041 && info->vrsave_save_offset == -224
14042 && info->altivec_save_offset == (-224 -16 -192));
d62294f5
FJ
14043
14044 treg = gen_rtx_REG (SImode, 11);
14045 emit_move_insn (treg, GEN_INT (-info->total_size));
14046
14047 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 14048 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
14049
14050 /* Preserve CR2 for save_world prologues */
14051 sz = 6;
14052 sz += 32 - info->first_gp_reg_save;
14053 sz += 64 - info->first_fp_reg_save;
14054 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14055 p = rtvec_alloc (sz);
14056 j = 0;
14057 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
c4ad648e
AM
14058 gen_rtx_REG (Pmode,
14059 LINK_REGISTER_REGNUM));
d62294f5 14060 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14061 gen_rtx_SYMBOL_REF (Pmode,
14062 "*save_world"));
d62294f5 14063 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
14064 properly. */
14065 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14066 {
14067 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14068 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14069 GEN_INT (info->fp_save_offset
14070 + sp_offset + 8 * i));
14071 rtx mem = gen_rtx_MEM (DFmode, addr);
14072 set_mem_alias_set (mem, rs6000_sr_alias_set);
14073
14074 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14075 }
d62294f5 14076 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14077 {
14078 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14079 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14080 GEN_INT (info->altivec_save_offset
14081 + sp_offset + 16 * i));
14082 rtx mem = gen_rtx_MEM (V4SImode, addr);
14083 set_mem_alias_set (mem, rs6000_sr_alias_set);
14084
14085 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14086 }
d62294f5 14087 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14088 {
14089 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14090 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14091 GEN_INT (info->gp_save_offset
14092 + sp_offset + reg_size * i));
14093 rtx mem = gen_rtx_MEM (reg_mode, addr);
14094 set_mem_alias_set (mem, rs6000_sr_alias_set);
14095
14096 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14097 }
14098
14099 {
14100 /* CR register traditionally saved as CR2. */
14101 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14102 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14103 GEN_INT (info->cr_save_offset
14104 + sp_offset));
14105 rtx mem = gen_rtx_MEM (reg_mode, addr);
14106 set_mem_alias_set (mem, rs6000_sr_alias_set);
14107
14108 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14109 }
d62294f5
FJ
14110 /* Prevent any attempt to delete the setting of r0 and treg! */
14111 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 0));
14112 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode, treg);
14113 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode, sp_reg_rtx);
14114
14115 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14116 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
c4ad648e 14117 NULL_RTX, NULL_RTX);
d62294f5
FJ
14118
14119 if (current_function_calls_eh_return)
c4ad648e
AM
14120 {
14121 unsigned int i;
14122 for (i = 0; ; ++i)
14123 {
14124 unsigned int regno = EH_RETURN_DATA_REGNO (i);
14125 if (regno == INVALID_REGNUM)
14126 break;
14127 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14128 info->ehrd_offset + sp_offset
14129 + reg_size * (int) i,
14130 info->total_size);
14131 }
14132 }
d62294f5
FJ
14133 }
14134
9aa86737 14135 /* Save AltiVec registers if needed. */
f57fe068 14136 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9aa86737
AH
14137 {
14138 int i;
14139
14140 /* There should be a non inline version of this, for when we
14141 are saving lots of vector registers. */
14142 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14143 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14144 {
14145 rtx areg, savereg, mem;
14146 int offset;
14147
14148 offset = info->altivec_save_offset + sp_offset
14149 + 16 * (i - info->first_altivec_reg_save);
14150
14151 savereg = gen_rtx_REG (V4SImode, i);
14152
14153 areg = gen_rtx_REG (Pmode, 0);
14154 emit_move_insn (areg, GEN_INT (offset));
14155
14156 /* AltiVec addressing mode is [reg+reg]. */
14157 mem = gen_rtx_MEM (V4SImode,
14158 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
f676971a 14159
9aa86737
AH
14160 set_mem_alias_set (mem, rs6000_sr_alias_set);
14161
14162 insn = emit_move_insn (mem, savereg);
14163
5c242421
SB
14164 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14165 areg, GEN_INT (offset));
9aa86737
AH
14166 }
14167 }
14168
14169 /* VRSAVE is a bit vector representing which AltiVec registers
14170 are used. The OS uses this to determine which vector
14171 registers to save on a context switch. We need to save
14172 VRSAVE on the stack frame, add whatever AltiVec registers we
14173 used in this function, and do the corresponding magic in the
14174 epilogue. */
14175
4d774ff8 14176 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
bcb604b6 14177 && info->vrsave_mask != 0)
9aa86737 14178 {
a004eb82 14179 rtx reg, mem, vrsave;
9aa86737
AH
14180 int offset;
14181
eab97e44
AM
14182 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
14183 as frame_reg_rtx and r11 as the static chain pointer for
14184 nested functions. */
14185 reg = gen_rtx_REG (SImode, 0);
a004eb82 14186 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
b188f760
AH
14187 if (TARGET_MACHO)
14188 emit_insn (gen_get_vrsave_internal (reg));
14189 else
14190 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
9aa86737 14191
bcb604b6
FJ
14192 if (!WORLD_SAVE_P (info))
14193 {
14194 /* Save VRSAVE. */
14195 offset = info->vrsave_save_offset + sp_offset;
14196 mem
14197 = gen_rtx_MEM (SImode,
14198 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
14199 set_mem_alias_set (mem, rs6000_sr_alias_set);
14200 insn = emit_move_insn (mem, reg);
14201 }
9aa86737
AH
14202
14203 /* Include the registers in the mask. */
14204 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
14205
14206 insn = emit_insn (generate_set_vrsave (reg, info, 0));
14207 }
14208
9ebbca7d 14209 /* If we use the link register, get it into r0. */
f57fe068 14210 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8
GK
14211 {
14212 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14213 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14214 RTX_FRAME_RELATED_P (insn) = 1;
14215 }
9ebbca7d
GK
14216
14217 /* If we need to save CR, put it into r12. */
f57fe068 14218 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 14219 {
f8a57be8 14220 rtx set;
f676971a 14221
9ebbca7d 14222 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
14223 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14224 RTX_FRAME_RELATED_P (insn) = 1;
14225 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14226 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14227 But that's OK. All we have to do is specify that _one_ condition
14228 code register is saved in this stack slot. The thrower's epilogue
14229 will then restore all the call-saved registers.
14230 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14231 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14232 gen_rtx_REG (SImode, CR2_REGNO));
14233 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14234 set,
14235 REG_NOTES (insn));
9ebbca7d
GK
14236 }
14237
a4f6c312
SS
14238 /* Do any required saving of fpr's. If only one or two to save, do
14239 it ourselves. Otherwise, call function. */
f57fe068 14240 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
14241 {
14242 int i;
14243 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 14244 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d 14245 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
14246 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14247 info->first_fp_reg_save + i,
14248 info->fp_save_offset + sp_offset + 8 * i,
14249 info->total_size);
9ebbca7d 14250 }
f57fe068 14251 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
14252 {
14253 int i;
14254 char rname[30];
520a57c8 14255 const char *alloc_rname;
9ebbca7d
GK
14256 rtvec p;
14257 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
14258
14259 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14260 gen_rtx_REG (Pmode,
9ebbca7d
GK
14261 LINK_REGISTER_REGNUM));
14262 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14263 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 14264 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
14265 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14266 gen_rtx_SYMBOL_REF (Pmode,
14267 alloc_rname));
14268 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14269 {
14270 rtx addr, reg, mem;
14271 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14272 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 14273 GEN_INT (info->fp_save_offset
9ebbca7d
GK
14274 + sp_offset + 8*i));
14275 mem = gen_rtx_MEM (DFmode, addr);
ba4828e0 14276 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
14277
14278 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14279 }
14280 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14281 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
14282 NULL_RTX, NULL_RTX);
14283 }
b6c9286a 14284
9ebbca7d
GK
14285 /* Save GPRs. This is done as a PARALLEL if we are using
14286 the store-multiple instructions. */
f57fe068 14287 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 14288 {
308c142a 14289 rtvec p;
9ebbca7d
GK
14290 int i;
14291 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
14292 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14293 {
14294 rtx addr, reg, mem;
14295 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
14296 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14297 GEN_INT (info->gp_save_offset
14298 + sp_offset
9ebbca7d
GK
14299 + reg_size * i));
14300 mem = gen_rtx_MEM (reg_mode, addr);
ba4828e0 14301 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
14302
14303 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14304 }
14305 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14306 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 14307 NULL_RTX, NULL_RTX);
b6c9286a 14308 }
f57fe068 14309 else if (!WORLD_SAVE_P (info))
b6c9286a 14310 {
9ebbca7d
GK
14311 int i;
14312 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
14313 if ((regs_ever_live[info->first_gp_reg_save + i]
14314 && (!call_used_regs[info->first_gp_reg_save + i]
14315 || (i + info->first_gp_reg_save
b4db40bf
JJ
14316 == RS6000_PIC_OFFSET_TABLE_REGNUM
14317 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 14318 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14319 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 14320 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
a3170dc6
AH
14321 {
14322 rtx addr, reg, mem;
14323 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14324
c19de7aa 14325 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14326 {
14327 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14328 rtx b;
14329
14330 if (!SPE_CONST_OFFSET_OK (offset))
14331 {
14332 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14333 emit_move_insn (b, GEN_INT (offset));
14334 }
14335 else
14336 b = GEN_INT (offset);
14337
14338 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14339 mem = gen_rtx_MEM (V2SImode, addr);
14340 set_mem_alias_set (mem, rs6000_sr_alias_set);
14341 insn = emit_move_insn (mem, reg);
14342
14343 if (GET_CODE (b) == CONST_INT)
14344 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14345 NULL_RTX, NULL_RTX);
14346 else
14347 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14348 b, GEN_INT (offset));
14349 }
14350 else
14351 {
f676971a
EC
14352 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14353 GEN_INT (info->gp_save_offset
14354 + sp_offset
a3170dc6
AH
14355 + reg_size * i));
14356 mem = gen_rtx_MEM (reg_mode, addr);
14357 set_mem_alias_set (mem, rs6000_sr_alias_set);
14358
14359 insn = emit_move_insn (mem, reg);
f676971a 14360 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
a3170dc6
AH
14361 NULL_RTX, NULL_RTX);
14362 }
14363 }
9ebbca7d
GK
14364 }
14365
83720594
RH
14366 /* ??? There's no need to emit actual instructions here, but it's the
14367 easiest way to get the frame unwind information emitted. */
f57fe068 14368 if (!WORLD_SAVE_P (info) && current_function_calls_eh_return)
83720594 14369 {
78e1b90d
DE
14370 unsigned int i, regno;
14371
fc4767bb
JJ
14372 /* In AIX ABI we need to pretend we save r2 here. */
14373 if (TARGET_AIX)
14374 {
14375 rtx addr, reg, mem;
14376
14377 reg = gen_rtx_REG (reg_mode, 2);
14378 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14379 GEN_INT (sp_offset + 5 * reg_size));
14380 mem = gen_rtx_MEM (reg_mode, addr);
14381 set_mem_alias_set (mem, rs6000_sr_alias_set);
14382
14383 insn = emit_move_insn (mem, reg);
f676971a 14384 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
14385 NULL_RTX, NULL_RTX);
14386 PATTERN (insn) = gen_blockage ();
14387 }
14388
83720594
RH
14389 for (i = 0; ; ++i)
14390 {
83720594
RH
14391 regno = EH_RETURN_DATA_REGNO (i);
14392 if (regno == INVALID_REGNUM)
14393 break;
14394
89e7058f
AH
14395 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14396 info->ehrd_offset + sp_offset
14397 + reg_size * (int) i,
14398 info->total_size);
83720594
RH
14399 }
14400 }
14401
9ebbca7d 14402 /* Save lr if we used it. */
f57fe068 14403 if (!WORLD_SAVE_P (info) && info->lr_save_p)
9ebbca7d
GK
14404 {
14405 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14406 GEN_INT (info->lr_save_offset + sp_offset));
14407 rtx reg = gen_rtx_REG (Pmode, 0);
14408 rtx mem = gen_rtx_MEM (Pmode, addr);
14409 /* This should not be of rs6000_sr_alias_set, because of
14410 __builtin_return_address. */
f676971a 14411
9ebbca7d 14412 insn = emit_move_insn (mem, reg);
f676971a 14413 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 14414 NULL_RTX, NULL_RTX);
9ebbca7d
GK
14415 }
14416
14417 /* Save CR if we use any that must be preserved. */
f57fe068 14418 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
14419 {
14420 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14421 GEN_INT (info->cr_save_offset + sp_offset));
14422 rtx mem = gen_rtx_MEM (SImode, addr);
f8a57be8
GK
14423 /* See the large comment above about why CR2_REGNO is used. */
14424 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0
RK
14425
14426 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d
GK
14427
14428 /* If r12 was used to hold the original sp, copy cr into r0 now
14429 that it's free. */
14430 if (REGNO (frame_reg_rtx) == 12)
14431 {
f8a57be8
GK
14432 rtx set;
14433
9ebbca7d 14434 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
14435 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14436 RTX_FRAME_RELATED_P (insn) = 1;
14437 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
14438 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14439 set,
14440 REG_NOTES (insn));
f676971a 14441
9ebbca7d
GK
14442 }
14443 insn = emit_move_insn (mem, cr_save_rtx);
14444
f676971a 14445 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 14446 NULL_RTX, NULL_RTX);
9ebbca7d
GK
14447 }
14448
f676971a 14449 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 14450 for which it was done previously. */
f57fe068 14451 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 14452 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
9ebbca7d
GK
14453 rs6000_emit_allocate_stack (info->total_size, FALSE);
14454
14455 /* Set frame pointer, if needed. */
14456 if (frame_pointer_needed)
14457 {
7d5175e1 14458 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
14459 sp_reg_rtx);
14460 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 14461 }
9878760c 14462
1db02437 14463 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 14464 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
14465 || (DEFAULT_ABI == ABI_V4
14466 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
1db02437 14467 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
c4ad648e
AM
14468 {
14469 /* If emit_load_toc_table will use the link register, we need to save
14470 it. We use R12 for this purpose because emit_load_toc_table
14471 can use register 0. This allows us to use a plain 'blr' to return
14472 from the procedure more often. */
14473 int save_LR_around_toc_setup = (TARGET_ELF
14474 && DEFAULT_ABI != ABI_AIX
14475 && flag_pic
14476 && ! info->lr_save_p
14477 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
14478 if (save_LR_around_toc_setup)
14479 {
14480 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
f8a57be8 14481
c4ad648e
AM
14482 insn = emit_move_insn (frame_ptr_rtx, lr);
14483 rs6000_maybe_dead (insn);
14484 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 14485
c4ad648e 14486 rs6000_emit_load_toc_table (TRUE);
f8a57be8 14487
c4ad648e
AM
14488 insn = emit_move_insn (lr, frame_ptr_rtx);
14489 rs6000_maybe_dead (insn);
14490 RTX_FRAME_RELATED_P (insn) = 1;
14491 }
14492 else
14493 rs6000_emit_load_toc_table (TRUE);
14494 }
ee890fe2 14495
fcce224d 14496#if TARGET_MACHO
ee890fe2
SS
14497 if (DEFAULT_ABI == ABI_DARWIN
14498 && flag_pic && current_function_uses_pic_offset_table)
14499 {
f8a57be8 14500 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11abc112 14501 rtx src = machopic_function_base_sym ();
ee890fe2 14502
6d0a8091
DJ
14503 /* Save and restore LR locally around this call (in R0). */
14504 if (!info->lr_save_p)
14505 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
14506
f8a57be8 14507 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
ee890fe2 14508
f676971a 14509 insn = emit_move_insn (gen_rtx_REG (Pmode,
f8a57be8
GK
14510 RS6000_PIC_OFFSET_TABLE_REGNUM),
14511 lr);
14512 rs6000_maybe_dead (insn);
6d0a8091
DJ
14513
14514 if (!info->lr_save_p)
14515 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
ee890fe2 14516 }
fcce224d 14517#endif
9ebbca7d
GK
14518}
14519
9ebbca7d 14520/* Write function prologue. */
a4f6c312 14521
08c148a8 14522static void
f676971a 14523rs6000_output_function_prologue (FILE *file,
a2369ed3 14524 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
14525{
14526 rs6000_stack_t *info = rs6000_stack_info ();
14527
4697a36c
MM
14528 if (TARGET_DEBUG_STACK)
14529 debug_stack_info (info);
9878760c 14530
a4f6c312
SS
14531 /* Write .extern for any function we will call to save and restore
14532 fp values. */
14533 if (info->first_fp_reg_save < 64
14534 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 14535 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 14536 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
14537 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
14538 RESTORE_FP_SUFFIX);
9878760c 14539
c764f757
RK
14540 /* Write .extern for AIX common mode routines, if needed. */
14541 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
14542 {
f6709c70
JW
14543 fputs ("\t.extern __mulh\n", file);
14544 fputs ("\t.extern __mull\n", file);
14545 fputs ("\t.extern __divss\n", file);
14546 fputs ("\t.extern __divus\n", file);
14547 fputs ("\t.extern __quoss\n", file);
14548 fputs ("\t.extern __quous\n", file);
c764f757
RK
14549 common_mode_defined = 1;
14550 }
9878760c 14551
9ebbca7d 14552 if (! HAVE_prologue)
979721f8 14553 {
9ebbca7d 14554 start_sequence ();
9dda4cc8 14555
a4f6c312
SS
14556 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14557 the "toplevel" insn chain. */
2e040219 14558 emit_note (NOTE_INSN_DELETED);
9ebbca7d 14559 rs6000_emit_prologue ();
2e040219 14560 emit_note (NOTE_INSN_DELETED);
178c3eff 14561
a3c9585f 14562 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
14563 {
14564 rtx insn;
14565 unsigned addr = 0;
14566 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
14567 {
14568 INSN_ADDRESSES_NEW (insn, addr);
14569 addr += 4;
14570 }
14571 }
9dda4cc8 14572
9ebbca7d 14573 if (TARGET_DEBUG_STACK)
a4f6c312 14574 debug_rtx_list (get_insns (), 100);
c9d691e9 14575 final (get_insns (), file, FALSE);
9ebbca7d 14576 end_sequence ();
979721f8
MM
14577 }
14578
9ebbca7d
GK
14579 rs6000_pic_labelno++;
14580}
f676971a 14581
9ebbca7d 14582/* Emit function epilogue as insns.
9878760c 14583
9ebbca7d
GK
14584 At present, dwarf2out_frame_debug_expr doesn't understand
14585 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14586 anywhere in the epilogue. Most of the insns below would in any case
14587 need special notes to explain where r11 is in relation to the stack. */
9878760c 14588
9ebbca7d 14589void
a2369ed3 14590rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
14591{
14592 rs6000_stack_t *info;
14593 int restoring_FPRs_inline;
14594 int using_load_multiple;
14595 int using_mfcr_multiple;
14596 int use_backchain_to_restore_sp;
14597 int sp_offset = 0;
14598 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
14599 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 14600 enum machine_mode reg_mode = Pmode;
327e5343 14601 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
14602 int i;
14603
c19de7aa
AH
14604 info = rs6000_stack_info ();
14605
14606 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14607 {
14608 reg_mode = V2SImode;
14609 reg_size = 8;
14610 }
14611
9ebbca7d 14612 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
14613 && (!TARGET_SPE_ABI
14614 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
14615 && info->first_gp_reg_save < 31
14616 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 14617 restoring_FPRs_inline = (sibcall
83720594 14618 || current_function_calls_eh_return
9ebbca7d
GK
14619 || info->first_fp_reg_save == 64
14620 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 14621 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
14622 || current_function_calls_alloca
14623 || info->total_size > 32767);
14624 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
14625 || rs6000_cpu == PROCESSOR_PPC603
14626 || rs6000_cpu == PROCESSOR_PPC750
14627 || optimize_size);
14628
f57fe068 14629 if (WORLD_SAVE_P (info))
d62294f5
FJ
14630 {
14631 int i, j;
14632 char rname[30];
14633 const char *alloc_rname;
14634 rtvec p;
14635
14636 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
14637 stack slot (which is not likely to be our caller.)
14638 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14639 rest_world is similar, except any R10 parameter is ignored.
14640 The exception-handling stuff that was here in 2.95 is no
14641 longer necessary. */
d62294f5
FJ
14642
14643 p = rtvec_alloc (9
14644 + 1
f676971a 14645 + 32 - info->first_gp_reg_save
c4ad648e
AM
14646 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
14647 + 63 + 1 - info->first_fp_reg_save);
d62294f5 14648
c4ad648e
AM
14649 strcpy (rname, ((current_function_calls_eh_return) ?
14650 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
14651 alloc_rname = ggc_strdup (rname);
14652
14653 j = 0;
14654 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
14655 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14656 gen_rtx_REG (Pmode,
14657 LINK_REGISTER_REGNUM));
d62294f5 14658 RTVEC_ELT (p, j++)
c4ad648e 14659 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 14660 /* The instruction pattern requires a clobber here;
c4ad648e 14661 it is shared with the restVEC helper. */
d62294f5 14662 RTVEC_ELT (p, j++)
c4ad648e 14663 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
14664
14665 {
c4ad648e
AM
14666 /* CR register traditionally saved as CR2. */
14667 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14668 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14669 GEN_INT (info->cr_save_offset));
14670 rtx mem = gen_rtx_MEM (reg_mode, addr);
14671 set_mem_alias_set (mem, rs6000_sr_alias_set);
14672
14673 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
14674 }
14675
14676 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14677 {
14678 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14679 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14680 GEN_INT (info->gp_save_offset
14681 + reg_size * i));
14682 rtx mem = gen_rtx_MEM (reg_mode, addr);
14683 set_mem_alias_set (mem, rs6000_sr_alias_set);
14684
14685 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14686 }
d62294f5 14687 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14688 {
14689 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14690 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14691 GEN_INT (info->altivec_save_offset
14692 + 16 * i));
14693 rtx mem = gen_rtx_MEM (V4SImode, addr);
14694 set_mem_alias_set (mem, rs6000_sr_alias_set);
14695
14696 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14697 }
d62294f5 14698 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
14699 {
14700 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14701 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14702 GEN_INT (info->fp_save_offset
14703 + 8 * i));
14704 rtx mem = gen_rtx_MEM (DFmode, addr);
14705 set_mem_alias_set (mem, rs6000_sr_alias_set);
14706
14707 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
14708 }
d62294f5 14709 RTVEC_ELT (p, j++)
c4ad648e 14710 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 14711 RTVEC_ELT (p, j++)
c4ad648e 14712 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 14713 RTVEC_ELT (p, j++)
c4ad648e 14714 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 14715 RTVEC_ELT (p, j++)
c4ad648e 14716 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 14717 RTVEC_ELT (p, j++)
c4ad648e 14718 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
14719 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
14720
14721 return;
14722 }
14723
9ebbca7d
GK
14724 /* If we have a frame pointer, a call to alloca, or a large stack
14725 frame, restore the old stack pointer using the backchain. Otherwise,
14726 we know what size to update it with. */
14727 if (use_backchain_to_restore_sp)
bacbde18 14728 {
9ebbca7d
GK
14729 /* Under V.4, don't reset the stack pointer until after we're done
14730 loading the saved registers. */
f607bc57 14731 if (DEFAULT_ABI == ABI_V4)
9ebbca7d 14732 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
4697a36c 14733
9ebbca7d
GK
14734 emit_move_insn (frame_reg_rtx,
14735 gen_rtx_MEM (Pmode, sp_reg_rtx));
f676971a 14736
bacbde18 14737 }
9ebbca7d 14738 else if (info->push_p)
85638c0d 14739 {
fc4767bb
JJ
14740 if (DEFAULT_ABI == ABI_V4
14741 || current_function_calls_eh_return)
9ebbca7d
GK
14742 sp_offset = info->total_size;
14743 else
14744 {
14745 emit_insn (TARGET_32BIT
14746 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
14747 GEN_INT (info->total_size))
14748 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
14749 GEN_INT (info->total_size)));
14750 }
85638c0d 14751 }
f676971a 14752
9aa86737
AH
14753 /* Restore AltiVec registers if needed. */
14754 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
14755 {
14756 int i;
14757
14758 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
14759 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
14760 {
14761 rtx addr, areg, mem;
14762
14763 areg = gen_rtx_REG (Pmode, 0);
14764 emit_move_insn
14765 (areg, GEN_INT (info->altivec_save_offset
14766 + sp_offset
14767 + 16 * (i - info->first_altivec_reg_save)));
14768
14769 /* AltiVec addressing mode is [reg+reg]. */
14770 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
14771 mem = gen_rtx_MEM (V4SImode, addr);
14772 set_mem_alias_set (mem, rs6000_sr_alias_set);
14773
14774 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
14775 }
14776 }
14777
14778 /* Restore VRSAVE if needed. */
44688022 14779 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 14780 && info->vrsave_mask != 0)
9aa86737
AH
14781 {
14782 rtx addr, mem, reg;
14783
14784 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14785 GEN_INT (info->vrsave_save_offset + sp_offset));
14786 mem = gen_rtx_MEM (SImode, addr);
14787 set_mem_alias_set (mem, rs6000_sr_alias_set);
14788 reg = gen_rtx_REG (SImode, 12);
14789 emit_move_insn (reg, mem);
14790
14791 emit_insn (generate_set_vrsave (reg, info, 1));
14792 }
14793
9ebbca7d
GK
14794 /* Get the old lr if we saved it. */
14795 if (info->lr_save_p)
b6c9286a 14796 {
a3170dc6
AH
14797 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
14798 info->lr_save_offset + sp_offset);
ba4828e0
RK
14799
14800 set_mem_alias_set (mem, rs6000_sr_alias_set);
b6c9286a 14801
9ebbca7d 14802 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 14803 }
f676971a 14804
9ebbca7d
GK
14805 /* Get the old cr if we saved it. */
14806 if (info->cr_save_p)
14807 {
14808 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14809 GEN_INT (info->cr_save_offset + sp_offset));
14810 rtx mem = gen_rtx_MEM (SImode, addr);
ba4828e0
RK
14811
14812 set_mem_alias_set (mem, rs6000_sr_alias_set);
b6c9286a 14813
9ebbca7d
GK
14814 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
14815 }
f676971a 14816
9ebbca7d 14817 /* Set LR here to try to overlap restores below. */
4697a36c 14818 if (info->lr_save_p)
9ebbca7d
GK
14819 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
14820 gen_rtx_REG (Pmode, 0));
f676971a 14821
83720594
RH
14822 /* Load exception handler data registers, if needed. */
14823 if (current_function_calls_eh_return)
14824 {
78e1b90d
DE
14825 unsigned int i, regno;
14826
fc4767bb
JJ
14827 if (TARGET_AIX)
14828 {
14829 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14830 GEN_INT (sp_offset + 5 * reg_size));
14831 rtx mem = gen_rtx_MEM (reg_mode, addr);
14832
14833 set_mem_alias_set (mem, rs6000_sr_alias_set);
14834
14835 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
14836 }
14837
83720594
RH
14838 for (i = 0; ; ++i)
14839 {
a3170dc6 14840 rtx mem;
83720594
RH
14841
14842 regno = EH_RETURN_DATA_REGNO (i);
14843 if (regno == INVALID_REGNUM)
14844 break;
14845
a3170dc6
AH
14846 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
14847 info->ehrd_offset + sp_offset
14848 + reg_size * (int) i);
ba4828e0 14849 set_mem_alias_set (mem, rs6000_sr_alias_set);
83720594
RH
14850
14851 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
14852 }
14853 }
f676971a 14854
9ebbca7d
GK
14855 /* Restore GPRs. This is done as a PARALLEL if we are using
14856 the load-multiple instructions. */
14857 if (using_load_multiple)
979721f8 14858 {
9ebbca7d
GK
14859 rtvec p;
14860 p = rtvec_alloc (32 - info->first_gp_reg_save);
14861 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 14862 {
f676971a
EC
14863 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14864 GEN_INT (info->gp_save_offset
14865 + sp_offset
9ebbca7d
GK
14866 + reg_size * i));
14867 rtx mem = gen_rtx_MEM (reg_mode, addr);
ba4828e0
RK
14868
14869 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 14870
f676971a 14871 RTVEC_ELT (p, i) =
9ebbca7d
GK
14872 gen_rtx_SET (VOIDmode,
14873 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
14874 mem);
979721f8 14875 }
9ebbca7d 14876 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 14877 }
9ebbca7d
GK
14878 else
14879 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
14880 if ((regs_ever_live[info->first_gp_reg_save + i]
14881 && (!call_used_regs[info->first_gp_reg_save + i]
14882 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
b4db40bf 14883 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 14884 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14885 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 14886 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9ebbca7d 14887 {
f676971a
EC
14888 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14889 GEN_INT (info->gp_save_offset
14890 + sp_offset
9ebbca7d
GK
14891 + reg_size * i));
14892 rtx mem = gen_rtx_MEM (reg_mode, addr);
ba4828e0 14893
a3170dc6 14894 /* Restore 64-bit quantities for SPE. */
c19de7aa 14895 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14896 {
14897 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14898 rtx b;
14899
14900 if (!SPE_CONST_OFFSET_OK (offset))
14901 {
14902 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14903 emit_move_insn (b, GEN_INT (offset));
14904 }
14905 else
14906 b = GEN_INT (offset);
14907
14908 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
14909 mem = gen_rtx_MEM (V2SImode, addr);
14910 }
14911
ba4828e0 14912 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 14913
f676971a 14914 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 14915 info->first_gp_reg_save + i), mem);
9ebbca7d 14916 }
9878760c 14917
9ebbca7d
GK
14918 /* Restore fpr's if we need to do it without calling a function. */
14919 if (restoring_FPRs_inline)
14920 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 14921 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d
GK
14922 && ! call_used_regs[info->first_fp_reg_save+i]))
14923 {
14924 rtx addr, mem;
14925 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
14926 GEN_INT (info->fp_save_offset
14927 + sp_offset
a4f6c312 14928 + 8 * i));
9ebbca7d 14929 mem = gen_rtx_MEM (DFmode, addr);
ba4828e0 14930 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 14931
f676971a 14932 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
14933 info->first_fp_reg_save + i),
14934 mem);
14935 }
8d30c4ee 14936
9ebbca7d
GK
14937 /* If we saved cr, restore it here. Just those that were used. */
14938 if (info->cr_save_p)
979721f8 14939 {
9ebbca7d 14940 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 14941 int count = 0;
f676971a 14942
9ebbca7d 14943 if (using_mfcr_multiple)
979721f8 14944 {
9ebbca7d
GK
14945 for (i = 0; i < 8; i++)
14946 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
e35b9579 14947 count++;
37409796 14948 gcc_assert (count);
e35b9579
GK
14949 }
14950
14951 if (using_mfcr_multiple && count > 1)
14952 {
14953 rtvec p;
14954 int ndx;
f676971a 14955
e35b9579 14956 p = rtvec_alloc (count);
9ebbca7d 14957
e35b9579 14958 ndx = 0;
9ebbca7d
GK
14959 for (i = 0; i < 8; i++)
14960 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
14961 {
14962 rtvec r = rtvec_alloc (2);
14963 RTVEC_ELT (r, 0) = r12_rtx;
14964 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 14965 RTVEC_ELT (p, ndx) =
f676971a 14966 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 14967 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 14968 ndx++;
9ebbca7d
GK
14969 }
14970 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 14971 gcc_assert (ndx == count);
979721f8
MM
14972 }
14973 else
9ebbca7d
GK
14974 for (i = 0; i < 8; i++)
14975 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
979721f8 14976 {
f676971a 14977 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
14978 CR0_REGNO+i),
14979 r12_rtx));
979721f8 14980 }
979721f8
MM
14981 }
14982
9ebbca7d
GK
14983 /* If this is V.4, unwind the stack pointer after all of the loads
14984 have been done. We need to emit a block here so that sched
14985 doesn't decide to move the sp change before the register restores
14986 (which may not have any obvious dependency on the stack). This
14987 doesn't hurt performance, because there is no scheduling that can
14988 be done after this point. */
fc4767bb
JJ
14989 if (DEFAULT_ABI == ABI_V4
14990 || current_function_calls_eh_return)
b6c9286a 14991 {
9ebbca7d 14992 if (frame_reg_rtx != sp_reg_rtx)
c4ad648e 14993 rs6000_emit_stack_tie ();
b6c9286a 14994
9ebbca7d 14995 if (use_backchain_to_restore_sp)
b6c9286a 14996 {
9ebbca7d 14997 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
b6c9286a 14998 }
9ebbca7d 14999 else if (sp_offset != 0)
13f1623b 15000 {
5b71a4e7 15001 emit_insn (TARGET_32BIT
9ebbca7d
GK
15002 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15003 GEN_INT (sp_offset))
15004 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15005 GEN_INT (sp_offset)));
13f1623b 15006 }
9ebbca7d 15007 }
b6c9286a 15008
83720594
RH
15009 if (current_function_calls_eh_return)
15010 {
15011 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 15012 emit_insn (TARGET_32BIT
83720594
RH
15013 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15014 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15015 }
15016
9ebbca7d
GK
15017 if (!sibcall)
15018 {
15019 rtvec p;
15020 if (! restoring_FPRs_inline)
15021 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15022 else
15023 p = rtvec_alloc (2);
b6c9286a 15024
e35b9579 15025 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
15026 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15027 gen_rtx_REG (Pmode,
9ebbca7d 15028 LINK_REGISTER_REGNUM));
9ebbca7d
GK
15029
15030 /* If we have to restore more than two FP registers, branch to the
15031 restore function. It will return to our caller. */
15032 if (! restoring_FPRs_inline)
15033 {
15034 int i;
15035 char rname[30];
520a57c8 15036 const char *alloc_rname;
979721f8 15037
f676971a 15038 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 15039 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 15040 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15041 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15042 gen_rtx_SYMBOL_REF (Pmode,
15043 alloc_rname));
b6c9286a 15044
9ebbca7d
GK
15045 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15046 {
15047 rtx addr, mem;
15048 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15049 GEN_INT (info->fp_save_offset + 8*i));
15050 mem = gen_rtx_MEM (DFmode, addr);
ba4828e0 15051 set_mem_alias_set (mem, rs6000_sr_alias_set);
9ebbca7d 15052
f676971a 15053 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
15054 gen_rtx_SET (VOIDmode,
15055 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15056 mem);
b6c9286a
MM
15057 }
15058 }
f676971a 15059
9ebbca7d 15060 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 15061 }
9878760c
RK
15062}
15063
15064/* Write function epilogue. */
15065
08c148a8 15066static void
f676971a 15067rs6000_output_function_epilogue (FILE *file,
a2369ed3 15068 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 15069{
4697a36c 15070 rs6000_stack_t *info = rs6000_stack_info ();
9878760c 15071
9ebbca7d 15072 if (! HAVE_epilogue)
9878760c 15073 {
9ebbca7d
GK
15074 rtx insn = get_last_insn ();
15075 /* If the last insn was a BARRIER, we don't have to write anything except
15076 the trace table. */
15077 if (GET_CODE (insn) == NOTE)
15078 insn = prev_nonnote_insn (insn);
15079 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 15080 {
9ebbca7d
GK
15081 /* This is slightly ugly, but at least we don't have two
15082 copies of the epilogue-emitting code. */
15083 start_sequence ();
15084
15085 /* A NOTE_INSN_DELETED is supposed to be at the start
15086 and end of the "toplevel" insn chain. */
2e040219 15087 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15088 rs6000_emit_epilogue (FALSE);
2e040219 15089 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15090
a3c9585f 15091 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15092 {
15093 rtx insn;
15094 unsigned addr = 0;
15095 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15096 {
15097 INSN_ADDRESSES_NEW (insn, addr);
15098 addr += 4;
15099 }
15100 }
15101
9ebbca7d 15102 if (TARGET_DEBUG_STACK)
a4f6c312 15103 debug_rtx_list (get_insns (), 100);
c9d691e9 15104 final (get_insns (), file, FALSE);
9ebbca7d 15105 end_sequence ();
4697a36c 15106 }
9878760c 15107 }
b4ac57ab 15108
efdba735
SH
15109#if TARGET_MACHO
15110 macho_branch_islands ();
0e5da0be
GK
15111 /* Mach-O doesn't support labels at the end of objects, so if
15112 it looks like we might want one, insert a NOP. */
15113 {
15114 rtx insn = get_last_insn ();
15115 while (insn
15116 && NOTE_P (insn)
15117 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
15118 insn = PREV_INSN (insn);
f676971a
EC
15119 if (insn
15120 && (LABEL_P (insn)
0e5da0be
GK
15121 || (NOTE_P (insn)
15122 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
15123 fputs ("\tnop\n", file);
15124 }
15125#endif
15126
9b30bae2 15127 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
15128 on its format.
15129
15130 We don't output a traceback table if -finhibit-size-directive was
15131 used. The documentation for -finhibit-size-directive reads
15132 ``don't output a @code{.size} assembler directive, or anything
15133 else that would cause trouble if the function is split in the
15134 middle, and the two halves are placed at locations far apart in
15135 memory.'' The traceback table has this property, since it
15136 includes the offset from the start of the function to the
4d30c363
MM
15137 traceback table itself.
15138
15139 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 15140 different traceback table. */
57ac7be9
AM
15141 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
15142 && rs6000_traceback != traceback_none)
9b30bae2 15143 {
69c75916 15144 const char *fname = NULL;
3ac88239 15145 const char *language_string = lang_hooks.name;
6041bf2f 15146 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 15147 int i;
57ac7be9
AM
15148 int optional_tbtab;
15149
15150 if (rs6000_traceback == traceback_full)
15151 optional_tbtab = 1;
15152 else if (rs6000_traceback == traceback_part)
15153 optional_tbtab = 0;
15154 else
15155 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 15156
69c75916
AM
15157 if (optional_tbtab)
15158 {
15159 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15160 while (*fname == '.') /* V.4 encodes . in the name */
15161 fname++;
15162
15163 /* Need label immediately before tbtab, so we can compute
15164 its offset from the function start. */
15165 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15166 ASM_OUTPUT_LABEL (file, fname);
15167 }
314fc5a9
ILT
15168
15169 /* The .tbtab pseudo-op can only be used for the first eight
15170 expressions, since it can't handle the possibly variable
15171 length fields that follow. However, if you omit the optional
15172 fields, the assembler outputs zeros for all optional fields
15173 anyways, giving each variable length field is minimum length
15174 (as defined in sys/debug.h). Thus we can not use the .tbtab
15175 pseudo-op at all. */
15176
15177 /* An all-zero word flags the start of the tbtab, for debuggers
15178 that have to find it by searching forward from the entry
15179 point or from the current pc. */
19d2d16f 15180 fputs ("\t.long 0\n", file);
314fc5a9
ILT
15181
15182 /* Tbtab format type. Use format type 0. */
19d2d16f 15183 fputs ("\t.byte 0,", file);
314fc5a9 15184
5fc921c1
DE
15185 /* Language type. Unfortunately, there does not seem to be any
15186 official way to discover the language being compiled, so we
15187 use language_string.
15188 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
15189 Java is 13. Objective-C is 14. */
15190 if (! strcmp (language_string, "GNU C"))
314fc5a9 15191 i = 0;
6de9cd9a
DN
15192 else if (! strcmp (language_string, "GNU F77")
15193 || ! strcmp (language_string, "GNU F95"))
314fc5a9 15194 i = 1;
8b83775b 15195 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 15196 i = 2;
5fc921c1
DE
15197 else if (! strcmp (language_string, "GNU Ada"))
15198 i = 3;
314fc5a9
ILT
15199 else if (! strcmp (language_string, "GNU C++"))
15200 i = 9;
9517ead8
AG
15201 else if (! strcmp (language_string, "GNU Java"))
15202 i = 13;
5fc921c1
DE
15203 else if (! strcmp (language_string, "GNU Objective-C"))
15204 i = 14;
314fc5a9 15205 else
37409796 15206 gcc_unreachable ();
314fc5a9
ILT
15207 fprintf (file, "%d,", i);
15208
15209 /* 8 single bit fields: global linkage (not set for C extern linkage,
15210 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15211 from start of procedure stored in tbtab, internal function, function
15212 has controlled storage, function has no toc, function uses fp,
15213 function logs/aborts fp operations. */
15214 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
15215 fprintf (file, "%d,",
15216 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
15217
15218 /* 6 bitfields: function is interrupt handler, name present in
15219 proc table, function calls alloca, on condition directives
15220 (controls stack walks, 3 bits), saves condition reg, saves
15221 link reg. */
15222 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15223 set up as a frame pointer, even when there is no alloca call. */
15224 fprintf (file, "%d,",
6041bf2f
DE
15225 ((optional_tbtab << 6)
15226 | ((optional_tbtab & frame_pointer_needed) << 5)
15227 | (info->cr_save_p << 1)
15228 | (info->lr_save_p)));
314fc5a9 15229
6041bf2f 15230 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
15231 (6 bits). */
15232 fprintf (file, "%d,",
4697a36c 15233 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
15234
15235 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15236 fprintf (file, "%d,", (32 - first_reg_to_save ()));
15237
6041bf2f
DE
15238 if (optional_tbtab)
15239 {
15240 /* Compute the parameter info from the function decl argument
15241 list. */
15242 tree decl;
15243 int next_parm_info_bit = 31;
314fc5a9 15244
6041bf2f
DE
15245 for (decl = DECL_ARGUMENTS (current_function_decl);
15246 decl; decl = TREE_CHAIN (decl))
15247 {
15248 rtx parameter = DECL_INCOMING_RTL (decl);
15249 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 15250
6041bf2f
DE
15251 if (GET_CODE (parameter) == REG)
15252 {
ebb109ad 15253 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
15254 {
15255 int bits;
15256
15257 float_parms++;
15258
37409796
NS
15259 switch (mode)
15260 {
15261 case SFmode:
15262 bits = 0x2;
15263 break;
15264
15265 case DFmode:
15266 case TFmode:
15267 bits = 0x3;
15268 break;
15269
15270 default:
15271 gcc_unreachable ();
15272 }
6041bf2f
DE
15273
15274 /* If only one bit will fit, don't or in this entry. */
15275 if (next_parm_info_bit > 0)
15276 parm_info |= (bits << (next_parm_info_bit - 1));
15277 next_parm_info_bit -= 2;
15278 }
15279 else
15280 {
15281 fixed_parms += ((GET_MODE_SIZE (mode)
15282 + (UNITS_PER_WORD - 1))
15283 / UNITS_PER_WORD);
15284 next_parm_info_bit -= 1;
15285 }
15286 }
15287 }
15288 }
314fc5a9
ILT
15289
15290 /* Number of fixed point parameters. */
15291 /* This is actually the number of words of fixed point parameters; thus
15292 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15293 fprintf (file, "%d,", fixed_parms);
15294
15295 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15296 all on stack. */
15297 /* This is actually the number of fp registers that hold parameters;
15298 and thus the maximum value is 13. */
15299 /* Set parameters on stack bit if parameters are not in their original
15300 registers, regardless of whether they are on the stack? Xlc
15301 seems to set the bit when not optimizing. */
15302 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15303
6041bf2f
DE
15304 if (! optional_tbtab)
15305 return;
15306
314fc5a9
ILT
15307 /* Optional fields follow. Some are variable length. */
15308
15309 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15310 11 double float. */
15311 /* There is an entry for each parameter in a register, in the order that
15312 they occur in the parameter list. Any intervening arguments on the
15313 stack are ignored. If the list overflows a long (max possible length
15314 34 bits) then completely leave off all elements that don't fit. */
15315 /* Only emit this long if there was at least one parameter. */
15316 if (fixed_parms || float_parms)
15317 fprintf (file, "\t.long %d\n", parm_info);
15318
15319 /* Offset from start of code to tb table. */
19d2d16f 15320 fputs ("\t.long ", file);
314fc5a9 15321 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
15322 if (TARGET_AIX)
15323 RS6000_OUTPUT_BASENAME (file, fname);
15324 else
15325 assemble_name (file, fname);
15326 putc ('-', file);
15327 rs6000_output_function_entry (file, fname);
19d2d16f 15328 putc ('\n', file);
314fc5a9
ILT
15329
15330 /* Interrupt handler mask. */
15331 /* Omit this long, since we never set the interrupt handler bit
15332 above. */
15333
15334 /* Number of CTL (controlled storage) anchors. */
15335 /* Omit this long, since the has_ctl bit is never set above. */
15336
15337 /* Displacement into stack of each CTL anchor. */
15338 /* Omit this list of longs, because there are no CTL anchors. */
15339
15340 /* Length of function name. */
69c75916
AM
15341 if (*fname == '*')
15342 ++fname;
296b8152 15343 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
15344
15345 /* Function name. */
15346 assemble_string (fname, strlen (fname));
15347
15348 /* Register for alloca automatic storage; this is always reg 31.
15349 Only emit this if the alloca bit was set above. */
15350 if (frame_pointer_needed)
19d2d16f 15351 fputs ("\t.byte 31\n", file);
b1765bde
DE
15352
15353 fputs ("\t.align 2\n", file);
9b30bae2 15354 }
9878760c 15355}
17167fd8 15356\f
a4f6c312
SS
15357/* A C compound statement that outputs the assembler code for a thunk
15358 function, used to implement C++ virtual function calls with
15359 multiple inheritance. The thunk acts as a wrapper around a virtual
15360 function, adjusting the implicit object parameter before handing
15361 control off to the real function.
15362
15363 First, emit code to add the integer DELTA to the location that
15364 contains the incoming first argument. Assume that this argument
15365 contains a pointer, and is the one used to pass the `this' pointer
15366 in C++. This is the incoming argument *before* the function
15367 prologue, e.g. `%o0' on a sparc. The addition must preserve the
15368 values of all other incoming arguments.
17167fd8
MM
15369
15370 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
15371 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
15372 not touch the return address. Hence returning from FUNCTION will
15373 return to whoever called the current `thunk'.
17167fd8 15374
a4f6c312
SS
15375 The effect must be as if FUNCTION had been called directly with the
15376 adjusted first argument. This macro is responsible for emitting
15377 all of the code for a thunk function; output_function_prologue()
15378 and output_function_epilogue() are not invoked.
17167fd8 15379
a4f6c312
SS
15380 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
15381 been extracted from it.) It might possibly be useful on some
15382 targets, but probably not.
17167fd8 15383
a4f6c312
SS
15384 If you do not define this macro, the target-independent code in the
15385 C++ frontend will generate a less efficient heavyweight thunk that
15386 calls FUNCTION instead of jumping to it. The generic approach does
15387 not support varargs. */
17167fd8 15388
3961e8fe 15389static void
f676971a
EC
15390rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
15391 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 15392 tree function)
17167fd8 15393{
5b71a4e7 15394 rtx this, insn, funexp;
17167fd8 15395
5b71a4e7 15396 reload_completed = 1;
fe3ad572 15397 epilogue_completed = 1;
5b71a4e7 15398 no_new_pseudos = 1;
6429e3be 15399 reset_block_changes ();
56a7189a 15400
5b71a4e7 15401 /* Mark the end of the (empty) prologue. */
2e040219 15402 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 15403
5b71a4e7
DE
15404 /* Find the "this" pointer. If the function returns a structure,
15405 the structure return pointer is in r3. */
61f71b34 15406 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 15407 this = gen_rtx_REG (Pmode, 4);
56a7189a 15408 else
5b71a4e7 15409 this = gen_rtx_REG (Pmode, 3);
17167fd8 15410
5b71a4e7
DE
15411 /* Apply the constant offset, if required. */
15412 if (delta)
15413 {
15414 rtx delta_rtx = GEN_INT (delta);
15415 emit_insn (TARGET_32BIT
15416 ? gen_addsi3 (this, this, delta_rtx)
15417 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
15418 }
15419
5b71a4e7
DE
15420 /* Apply the offset from the vtable, if required. */
15421 if (vcall_offset)
17167fd8 15422 {
5b71a4e7
DE
15423 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
15424 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 15425
5b71a4e7 15426 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
15427 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
15428 {
15429 emit_insn (TARGET_32BIT
15430 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
15431 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
15432 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
15433 }
15434 else
15435 {
15436 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
15437
15438 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
15439 }
5b71a4e7
DE
15440 emit_insn (TARGET_32BIT
15441 ? gen_addsi3 (this, this, tmp)
15442 : gen_adddi3 (this, this, tmp));
17167fd8
MM
15443 }
15444
5b71a4e7
DE
15445 /* Generate a tail call to the target function. */
15446 if (!TREE_USED (function))
15447 {
15448 assemble_external (function);
15449 TREE_USED (function) = 1;
15450 }
15451 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 15452 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
15453
15454#if TARGET_MACHO
ab82a49f 15455 if (MACHOPIC_INDIRECT)
5b71a4e7 15456 funexp = machopic_indirect_call_target (funexp);
ee890fe2 15457#endif
5b71a4e7
DE
15458
15459 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 15460 generate sibcall RTL explicitly. */
5b71a4e7
DE
15461 insn = emit_call_insn (
15462 gen_rtx_PARALLEL (VOIDmode,
15463 gen_rtvec (4,
15464 gen_rtx_CALL (VOIDmode,
15465 funexp, const0_rtx),
15466 gen_rtx_USE (VOIDmode, const0_rtx),
15467 gen_rtx_USE (VOIDmode,
15468 gen_rtx_REG (SImode,
15469 LINK_REGISTER_REGNUM)),
15470 gen_rtx_RETURN (VOIDmode))));
15471 SIBLING_CALL_P (insn) = 1;
15472 emit_barrier ();
15473
15474 /* Run just enough of rest_of_compilation to get the insns emitted.
15475 There's not really enough bulk here to make other passes such as
15476 instruction scheduling worth while. Note that use_thunk calls
15477 assemble_start_function and assemble_end_function. */
15478 insn = get_insns ();
a2855205 15479 insn_locators_initialize ();
5b71a4e7
DE
15480 shorten_branches (insn);
15481 final_start_function (insn, file, 1);
c9d691e9 15482 final (insn, file, 1);
5b71a4e7
DE
15483 final_end_function ();
15484
15485 reload_completed = 0;
fe3ad572 15486 epilogue_completed = 0;
5b71a4e7 15487 no_new_pseudos = 0;
9ebbca7d 15488}
9ebbca7d
GK
15489\f
15490/* A quick summary of the various types of 'constant-pool tables'
15491 under PowerPC:
15492
f676971a 15493 Target Flags Name One table per
9ebbca7d
GK
15494 AIX (none) AIX TOC object file
15495 AIX -mfull-toc AIX TOC object file
15496 AIX -mminimal-toc AIX minimal TOC translation unit
15497 SVR4/EABI (none) SVR4 SDATA object file
15498 SVR4/EABI -fpic SVR4 pic object file
15499 SVR4/EABI -fPIC SVR4 PIC translation unit
15500 SVR4/EABI -mrelocatable EABI TOC function
15501 SVR4/EABI -maix AIX TOC object file
f676971a 15502 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
15503 AIX minimal TOC translation unit
15504
15505 Name Reg. Set by entries contains:
15506 made by addrs? fp? sum?
15507
15508 AIX TOC 2 crt0 as Y option option
15509 AIX minimal TOC 30 prolog gcc Y Y option
15510 SVR4 SDATA 13 crt0 gcc N Y N
15511 SVR4 pic 30 prolog ld Y not yet N
15512 SVR4 PIC 30 prolog gcc Y option option
15513 EABI TOC 30 prolog gcc Y option option
15514
15515*/
15516
9ebbca7d
GK
15517/* Hash functions for the hash table. */
15518
15519static unsigned
a2369ed3 15520rs6000_hash_constant (rtx k)
9ebbca7d 15521{
46b33600
RH
15522 enum rtx_code code = GET_CODE (k);
15523 enum machine_mode mode = GET_MODE (k);
15524 unsigned result = (code << 3) ^ mode;
15525 const char *format;
15526 int flen, fidx;
f676971a 15527
46b33600
RH
15528 format = GET_RTX_FORMAT (code);
15529 flen = strlen (format);
15530 fidx = 0;
9ebbca7d 15531
46b33600
RH
15532 switch (code)
15533 {
15534 case LABEL_REF:
15535 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
15536
15537 case CONST_DOUBLE:
15538 if (mode != VOIDmode)
15539 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
15540 flen = 2;
15541 break;
15542
15543 case CODE_LABEL:
15544 fidx = 3;
15545 break;
15546
15547 default:
15548 break;
15549 }
9ebbca7d
GK
15550
15551 for (; fidx < flen; fidx++)
15552 switch (format[fidx])
15553 {
15554 case 's':
15555 {
15556 unsigned i, len;
15557 const char *str = XSTR (k, fidx);
15558 len = strlen (str);
15559 result = result * 613 + len;
15560 for (i = 0; i < len; i++)
15561 result = result * 613 + (unsigned) str[i];
17167fd8
MM
15562 break;
15563 }
9ebbca7d
GK
15564 case 'u':
15565 case 'e':
15566 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
15567 break;
15568 case 'i':
15569 case 'n':
15570 result = result * 613 + (unsigned) XINT (k, fidx);
15571 break;
15572 case 'w':
15573 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
15574 result = result * 613 + (unsigned) XWINT (k, fidx);
15575 else
15576 {
15577 size_t i;
9390387d 15578 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
15579 result = result * 613 + (unsigned) (XWINT (k, fidx)
15580 >> CHAR_BIT * i);
15581 }
15582 break;
09501938
DE
15583 case '0':
15584 break;
9ebbca7d 15585 default:
37409796 15586 gcc_unreachable ();
9ebbca7d 15587 }
46b33600 15588
9ebbca7d
GK
15589 return result;
15590}
15591
15592static unsigned
a2369ed3 15593toc_hash_function (const void *hash_entry)
9ebbca7d 15594{
f676971a 15595 const struct toc_hash_struct *thc =
a9098fd0
GK
15596 (const struct toc_hash_struct *) hash_entry;
15597 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
15598}
15599
15600/* Compare H1 and H2 for equivalence. */
15601
15602static int
a2369ed3 15603toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
15604{
15605 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
15606 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
15607
a9098fd0
GK
15608 if (((const struct toc_hash_struct *) h1)->key_mode
15609 != ((const struct toc_hash_struct *) h2)->key_mode)
15610 return 0;
15611
5692c7bc 15612 return rtx_equal_p (r1, r2);
9ebbca7d
GK
15613}
15614
28e510bd
MM
15615/* These are the names given by the C++ front-end to vtables, and
15616 vtable-like objects. Ideally, this logic should not be here;
15617 instead, there should be some programmatic way of inquiring as
15618 to whether or not an object is a vtable. */
15619
15620#define VTABLE_NAME_P(NAME) \
9390387d 15621 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
15622 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15623 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 15624 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 15625 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
15626
15627void
a2369ed3 15628rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
15629{
15630 /* Currently C++ toc references to vtables can be emitted before it
15631 is decided whether the vtable is public or private. If this is
15632 the case, then the linker will eventually complain that there is
f676971a 15633 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
15634 we emit the TOC reference to reference the symbol and not the
15635 section. */
15636 const char *name = XSTR (x, 0);
54ee9799 15637
f676971a 15638 if (VTABLE_NAME_P (name))
54ee9799
DE
15639 {
15640 RS6000_OUTPUT_BASENAME (file, name);
15641 }
15642 else
15643 assemble_name (file, name);
28e510bd
MM
15644}
15645
a4f6c312
SS
15646/* Output a TOC entry. We derive the entry name from what is being
15647 written. */
9878760c
RK
15648
15649void
a2369ed3 15650output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
15651{
15652 char buf[256];
3cce094d 15653 const char *name = buf;
ec940faa 15654 const char *real_name;
9878760c 15655 rtx base = x;
16fdeb48 15656 HOST_WIDE_INT offset = 0;
9878760c 15657
37409796 15658 gcc_assert (!TARGET_NO_TOC);
4697a36c 15659
9ebbca7d
GK
15660 /* When the linker won't eliminate them, don't output duplicate
15661 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
15662 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15663 CODE_LABELs. */
15664 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
15665 {
15666 struct toc_hash_struct *h;
15667 void * * found;
f676971a 15668
17211ab5 15669 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 15670 time because GGC is not initialized at that point. */
17211ab5 15671 if (toc_hash_table == NULL)
f676971a 15672 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
15673 toc_hash_eq, NULL);
15674
9ebbca7d
GK
15675 h = ggc_alloc (sizeof (*h));
15676 h->key = x;
a9098fd0 15677 h->key_mode = mode;
9ebbca7d 15678 h->labelno = labelno;
f676971a 15679
9ebbca7d
GK
15680 found = htab_find_slot (toc_hash_table, h, 1);
15681 if (*found == NULL)
15682 *found = h;
f676971a 15683 else /* This is indeed a duplicate.
9ebbca7d
GK
15684 Set this label equal to that label. */
15685 {
15686 fputs ("\t.set ", file);
15687 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
15688 fprintf (file, "%d,", labelno);
15689 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 15690 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
15691 found)->labelno));
15692 return;
15693 }
15694 }
15695
15696 /* If we're going to put a double constant in the TOC, make sure it's
15697 aligned properly when strict alignment is on. */
ff1720ed
RK
15698 if (GET_CODE (x) == CONST_DOUBLE
15699 && STRICT_ALIGNMENT
a9098fd0 15700 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
15701 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
15702 ASM_OUTPUT_ALIGN (file, 3);
15703 }
15704
4977bab6 15705 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 15706
37c37a57
RK
15707 /* Handle FP constants specially. Note that if we have a minimal
15708 TOC, things we put here aren't actually in the TOC, so we can allow
15709 FP constants. */
fcce224d
DE
15710 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
15711 {
15712 REAL_VALUE_TYPE rv;
15713 long k[4];
15714
15715 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15716 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
15717
15718 if (TARGET_64BIT)
15719 {
15720 if (TARGET_MINIMAL_TOC)
15721 fputs (DOUBLE_INT_ASM_OP, file);
15722 else
15723 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15724 k[0] & 0xffffffff, k[1] & 0xffffffff,
15725 k[2] & 0xffffffff, k[3] & 0xffffffff);
15726 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
15727 k[0] & 0xffffffff, k[1] & 0xffffffff,
15728 k[2] & 0xffffffff, k[3] & 0xffffffff);
15729 return;
15730 }
15731 else
15732 {
15733 if (TARGET_MINIMAL_TOC)
15734 fputs ("\t.long ", file);
15735 else
15736 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15737 k[0] & 0xffffffff, k[1] & 0xffffffff,
15738 k[2] & 0xffffffff, k[3] & 0xffffffff);
15739 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15740 k[0] & 0xffffffff, k[1] & 0xffffffff,
15741 k[2] & 0xffffffff, k[3] & 0xffffffff);
15742 return;
15743 }
15744 }
15745 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9878760c 15746 {
042259f2
DE
15747 REAL_VALUE_TYPE rv;
15748 long k[2];
0adc764e 15749
042259f2
DE
15750 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15751 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 15752
13ded975
DE
15753 if (TARGET_64BIT)
15754 {
15755 if (TARGET_MINIMAL_TOC)
2bfcf297 15756 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 15757 else
2f0552b6
AM
15758 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15759 k[0] & 0xffffffff, k[1] & 0xffffffff);
15760 fprintf (file, "0x%lx%08lx\n",
15761 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
15762 return;
15763 }
1875cc88 15764 else
13ded975
DE
15765 {
15766 if (TARGET_MINIMAL_TOC)
2bfcf297 15767 fputs ("\t.long ", file);
13ded975 15768 else
2f0552b6
AM
15769 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
15770 k[0] & 0xffffffff, k[1] & 0xffffffff);
15771 fprintf (file, "0x%lx,0x%lx\n",
15772 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
15773 return;
15774 }
9878760c 15775 }
a9098fd0 15776 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9878760c 15777 {
042259f2
DE
15778 REAL_VALUE_TYPE rv;
15779 long l;
9878760c 15780
042259f2
DE
15781 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
15782 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
15783
31bfaa0b
DE
15784 if (TARGET_64BIT)
15785 {
15786 if (TARGET_MINIMAL_TOC)
2bfcf297 15787 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 15788 else
2f0552b6
AM
15789 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15790 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
15791 return;
15792 }
042259f2 15793 else
31bfaa0b
DE
15794 {
15795 if (TARGET_MINIMAL_TOC)
2bfcf297 15796 fputs ("\t.long ", file);
31bfaa0b 15797 else
2f0552b6
AM
15798 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
15799 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
15800 return;
15801 }
042259f2 15802 }
f176e826 15803 else if (GET_MODE (x) == VOIDmode
a9098fd0 15804 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 15805 {
e2c953b6 15806 unsigned HOST_WIDE_INT low;
042259f2
DE
15807 HOST_WIDE_INT high;
15808
15809 if (GET_CODE (x) == CONST_DOUBLE)
15810 {
15811 low = CONST_DOUBLE_LOW (x);
15812 high = CONST_DOUBLE_HIGH (x);
15813 }
15814 else
15815#if HOST_BITS_PER_WIDE_INT == 32
15816 {
15817 low = INTVAL (x);
0858c623 15818 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
15819 }
15820#else
15821 {
c4ad648e
AM
15822 low = INTVAL (x) & 0xffffffff;
15823 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
15824 }
15825#endif
9878760c 15826
a9098fd0
GK
15827 /* TOC entries are always Pmode-sized, but since this
15828 is a bigendian machine then if we're putting smaller
15829 integer constants in the TOC we have to pad them.
15830 (This is still a win over putting the constants in
15831 a separate constant pool, because then we'd have
02a4ec28
FS
15832 to have both a TOC entry _and_ the actual constant.)
15833
15834 For a 32-bit target, CONST_INT values are loaded and shifted
15835 entirely within `low' and can be stored in one TOC entry. */
15836
37409796
NS
15837 /* It would be easy to make this work, but it doesn't now. */
15838 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
15839
15840 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
15841 {
15842#if HOST_BITS_PER_WIDE_INT == 32
15843 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
15844 POINTER_SIZE, &low, &high, 0);
15845#else
15846 low |= high << 32;
15847 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
15848 high = (HOST_WIDE_INT) low >> 32;
15849 low &= 0xffffffff;
15850#endif
15851 }
a9098fd0 15852
13ded975
DE
15853 if (TARGET_64BIT)
15854 {
15855 if (TARGET_MINIMAL_TOC)
2bfcf297 15856 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 15857 else
2f0552b6
AM
15858 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
15859 (long) high & 0xffffffff, (long) low & 0xffffffff);
15860 fprintf (file, "0x%lx%08lx\n",
15861 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
15862 return;
15863 }
1875cc88 15864 else
13ded975 15865 {
02a4ec28
FS
15866 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
15867 {
15868 if (TARGET_MINIMAL_TOC)
2bfcf297 15869 fputs ("\t.long ", file);
02a4ec28 15870 else
2bfcf297 15871 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
15872 (long) high & 0xffffffff, (long) low & 0xffffffff);
15873 fprintf (file, "0x%lx,0x%lx\n",
15874 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 15875 }
13ded975 15876 else
02a4ec28
FS
15877 {
15878 if (TARGET_MINIMAL_TOC)
2bfcf297 15879 fputs ("\t.long ", file);
02a4ec28 15880 else
2f0552b6
AM
15881 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
15882 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 15883 }
13ded975
DE
15884 return;
15885 }
9878760c
RK
15886 }
15887
15888 if (GET_CODE (x) == CONST)
15889 {
37409796 15890 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 15891
9878760c
RK
15892 base = XEXP (XEXP (x, 0), 0);
15893 offset = INTVAL (XEXP (XEXP (x, 0), 1));
15894 }
f676971a 15895
37409796
NS
15896 switch (GET_CODE (base))
15897 {
15898 case SYMBOL_REF:
15899 name = XSTR (base, 0);
15900 break;
15901
15902 case LABEL_REF:
15903 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
15904 CODE_LABEL_NUMBER (XEXP (base, 0)));
15905 break;
15906
15907 case CODE_LABEL:
15908 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
15909 break;
15910
15911 default:
15912 gcc_unreachable ();
15913 }
9878760c 15914
772c5265 15915 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 15916 if (TARGET_MINIMAL_TOC)
2bfcf297 15917 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
15918 else
15919 {
b6c9286a 15920 fprintf (file, "\t.tc %s", real_name);
9878760c 15921
1875cc88 15922 if (offset < 0)
16fdeb48 15923 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 15924 else if (offset)
16fdeb48 15925 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 15926
19d2d16f 15927 fputs ("[TC],", file);
1875cc88 15928 }
581bc4de
MM
15929
15930 /* Currently C++ toc references to vtables can be emitted before it
15931 is decided whether the vtable is public or private. If this is
15932 the case, then the linker will eventually complain that there is
15933 a TOC reference to an unknown section. Thus, for vtables only,
15934 we emit the TOC reference to reference the symbol and not the
15935 section. */
28e510bd 15936 if (VTABLE_NAME_P (name))
581bc4de 15937 {
54ee9799 15938 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 15939 if (offset < 0)
16fdeb48 15940 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 15941 else if (offset > 0)
16fdeb48 15942 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
15943 }
15944 else
15945 output_addr_const (file, x);
19d2d16f 15946 putc ('\n', file);
9878760c
RK
15947}
15948\f
15949/* Output an assembler pseudo-op to write an ASCII string of N characters
15950 starting at P to FILE.
15951
15952 On the RS/6000, we have to do this using the .byte operation and
15953 write out special characters outside the quoted string.
15954 Also, the assembler is broken; very long strings are truncated,
a4f6c312 15955 so we must artificially break them up early. */
9878760c
RK
15956
15957void
a2369ed3 15958output_ascii (FILE *file, const char *p, int n)
9878760c
RK
15959{
15960 char c;
15961 int i, count_string;
d330fd93
KG
15962 const char *for_string = "\t.byte \"";
15963 const char *for_decimal = "\t.byte ";
15964 const char *to_close = NULL;
9878760c
RK
15965
15966 count_string = 0;
15967 for (i = 0; i < n; i++)
15968 {
15969 c = *p++;
15970 if (c >= ' ' && c < 0177)
15971 {
15972 if (for_string)
15973 fputs (for_string, file);
15974 putc (c, file);
15975
15976 /* Write two quotes to get one. */
15977 if (c == '"')
15978 {
15979 putc (c, file);
15980 ++count_string;
15981 }
15982
15983 for_string = NULL;
15984 for_decimal = "\"\n\t.byte ";
15985 to_close = "\"\n";
15986 ++count_string;
15987
15988 if (count_string >= 512)
15989 {
15990 fputs (to_close, file);
15991
15992 for_string = "\t.byte \"";
15993 for_decimal = "\t.byte ";
15994 to_close = NULL;
15995 count_string = 0;
15996 }
15997 }
15998 else
15999 {
16000 if (for_decimal)
16001 fputs (for_decimal, file);
16002 fprintf (file, "%d", c);
16003
16004 for_string = "\n\t.byte \"";
16005 for_decimal = ", ";
16006 to_close = "\n";
16007 count_string = 0;
16008 }
16009 }
16010
16011 /* Now close the string if we have written one. Then end the line. */
16012 if (to_close)
9ebbca7d 16013 fputs (to_close, file);
9878760c
RK
16014}
16015\f
16016/* Generate a unique section name for FILENAME for a section type
16017 represented by SECTION_DESC. Output goes into BUF.
16018
16019 SECTION_DESC can be any string, as long as it is different for each
16020 possible section type.
16021
16022 We name the section in the same manner as xlc. The name begins with an
16023 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
16024 names) with the last period replaced by the string SECTION_DESC. If
16025 FILENAME does not contain a period, SECTION_DESC is appended to the end of
16026 the name. */
9878760c
RK
16027
16028void
f676971a 16029rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 16030 const char *section_desc)
9878760c 16031{
9ebbca7d 16032 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
16033 char *p;
16034 int len;
9878760c
RK
16035
16036 after_last_slash = filename;
16037 for (q = filename; *q; q++)
11e5fe42
RK
16038 {
16039 if (*q == '/')
16040 after_last_slash = q + 1;
16041 else if (*q == '.')
16042 last_period = q;
16043 }
9878760c 16044
11e5fe42 16045 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 16046 *buf = (char *) xmalloc (len);
9878760c
RK
16047
16048 p = *buf;
16049 *p++ = '_';
16050
16051 for (q = after_last_slash; *q; q++)
16052 {
11e5fe42 16053 if (q == last_period)
c4ad648e 16054 {
9878760c
RK
16055 strcpy (p, section_desc);
16056 p += strlen (section_desc);
e3981aab 16057 break;
c4ad648e 16058 }
9878760c 16059
e9a780ec 16060 else if (ISALNUM (*q))
c4ad648e 16061 *p++ = *q;
9878760c
RK
16062 }
16063
11e5fe42 16064 if (last_period == 0)
9878760c
RK
16065 strcpy (p, section_desc);
16066 else
16067 *p = '\0';
16068}
e165f3f0 16069\f
a4f6c312 16070/* Emit profile function. */
411707f4 16071
411707f4 16072void
a2369ed3 16073output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 16074{
858081ad
AH
16075 /* Non-standard profiling for kernels, which just saves LR then calls
16076 _mcount without worrying about arg saves. The idea is to change
16077 the function prologue as little as possible as it isn't easy to
16078 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
16079 if (TARGET_PROFILE_KERNEL)
16080 return;
16081
8480e480
CC
16082 if (DEFAULT_ABI == ABI_AIX)
16083 {
9739c90c
JJ
16084#ifndef NO_PROFILE_COUNTERS
16085# define NO_PROFILE_COUNTERS 0
16086#endif
f676971a 16087 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
16088 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16089 else
16090 {
16091 char buf[30];
16092 const char *label_name;
16093 rtx fun;
411707f4 16094
9739c90c
JJ
16095 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16096 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16097 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 16098
9739c90c
JJ
16099 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16100 fun, Pmode);
16101 }
8480e480 16102 }
ee890fe2
SS
16103 else if (DEFAULT_ABI == ABI_DARWIN)
16104 {
d5fa86ba 16105 const char *mcount_name = RS6000_MCOUNT;
ee890fe2
SS
16106 int caller_addr_regno = LINK_REGISTER_REGNUM;
16107
16108 /* Be conservative and always set this, at least for now. */
16109 current_function_uses_pic_offset_table = 1;
16110
16111#if TARGET_MACHO
16112 /* For PIC code, set up a stub and collect the caller's address
16113 from r0, which is where the prologue puts it. */
11abc112
MM
16114 if (MACHOPIC_INDIRECT
16115 && current_function_uses_pic_offset_table)
16116 caller_addr_regno = 0;
ee890fe2
SS
16117#endif
16118 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16119 0, VOIDmode, 1,
16120 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16121 }
411707f4
CC
16122}
16123
a4f6c312 16124/* Write function profiler code. */
e165f3f0
RK
16125
16126void
a2369ed3 16127output_function_profiler (FILE *file, int labelno)
e165f3f0 16128{
3daf36a4 16129 char buf[100];
e165f3f0 16130
38c1f2d7 16131 switch (DEFAULT_ABI)
3daf36a4 16132 {
38c1f2d7 16133 default:
37409796 16134 gcc_unreachable ();
38c1f2d7
MM
16135
16136 case ABI_V4:
09eeeacb
AM
16137 if (!TARGET_32BIT)
16138 {
d4ee4d25 16139 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
16140 return;
16141 }
ffcfcb5f 16142 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 16143 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
16144 if (NO_PROFILE_COUNTERS)
16145 {
16146 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16147 reg_names[0], reg_names[1]);
16148 }
16149 else if (TARGET_SECURE_PLT && flag_pic)
16150 {
16151 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16152 reg_names[0], reg_names[1]);
16153 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16154 asm_fprintf (file, "\t{cau|addis} %s,%s,",
16155 reg_names[12], reg_names[12]);
16156 assemble_name (file, buf);
16157 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16158 assemble_name (file, buf);
16159 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16160 }
16161 else if (flag_pic == 1)
38c1f2d7 16162 {
dfdfa60f 16163 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
16164 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16165 reg_names[0], reg_names[1]);
17167fd8 16166 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 16167 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 16168 assemble_name (file, buf);
17167fd8 16169 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 16170 }
9ebbca7d 16171 else if (flag_pic > 1)
38c1f2d7 16172 {
71625f3d
AM
16173 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16174 reg_names[0], reg_names[1]);
9ebbca7d 16175 /* Now, we need to get the address of the label. */
71625f3d 16176 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 16177 assemble_name (file, buf);
9ebbca7d
GK
16178 fputs ("-.\n1:", file);
16179 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 16180 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
16181 reg_names[0], reg_names[11]);
16182 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16183 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 16184 }
38c1f2d7
MM
16185 else
16186 {
17167fd8 16187 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 16188 assemble_name (file, buf);
dfdfa60f 16189 fputs ("@ha\n", file);
71625f3d
AM
16190 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16191 reg_names[0], reg_names[1]);
a260abc9 16192 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 16193 assemble_name (file, buf);
17167fd8 16194 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
16195 }
16196
50d440bc 16197 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
16198 fprintf (file, "\tbl %s%s\n",
16199 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
16200 break;
16201
16202 case ABI_AIX:
ee890fe2 16203 case ABI_DARWIN:
ffcfcb5f
AM
16204 if (!TARGET_PROFILE_KERNEL)
16205 {
a3c9585f 16206 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
16207 }
16208 else
16209 {
37409796 16210 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
16211
16212 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16213 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16214
6de9cd9a 16215 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
16216 {
16217 asm_fprintf (file, "\tstd %s,24(%s)\n",
16218 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16219 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16220 asm_fprintf (file, "\tld %s,24(%s)\n",
16221 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16222 }
16223 else
16224 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16225 }
38c1f2d7
MM
16226 break;
16227 }
e165f3f0 16228}
a251ffd0 16229
b54cf83a 16230\f
b54cf83a
DE
16231/* Power4 load update and store update instructions are cracked into a
16232 load or store and an integer insn which are executed in the same cycle.
16233 Branches have their own dispatch slot which does not count against the
16234 GCC issue rate, but it changes the program flow so there are no other
16235 instructions to issue in this cycle. */
16236
16237static int
f676971a
EC
16238rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16239 int verbose ATTRIBUTE_UNUSED,
a2369ed3 16240 rtx insn, int more)
b54cf83a
DE
16241{
16242 if (GET_CODE (PATTERN (insn)) == USE
16243 || GET_CODE (PATTERN (insn)) == CLOBBER)
16244 return more;
16245
ec507f2d 16246 if (rs6000_sched_groups)
b54cf83a 16247 {
cbe26ab8 16248 if (is_microcoded_insn (insn))
c4ad648e 16249 return 0;
cbe26ab8 16250 else if (is_cracked_insn (insn))
c4ad648e 16251 return more > 2 ? more - 2 : 0;
b54cf83a 16252 }
165b263e
DE
16253
16254 return more - 1;
b54cf83a
DE
16255}
16256
a251ffd0
TG
16257/* Adjust the cost of a scheduling dependency. Return the new cost of
16258 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16259
c237e94a 16260static int
0a4f0294 16261rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0
TG
16262{
16263 if (! recog_memoized (insn))
16264 return 0;
16265
16266 if (REG_NOTE_KIND (link) != 0)
16267 return 0;
16268
16269 if (REG_NOTE_KIND (link) == 0)
16270 {
ed947a96
DJ
16271 /* Data dependency; DEP_INSN writes a register that INSN reads
16272 some cycles later. */
c9dbf840
DE
16273
16274 /* Separate a load from a narrower, dependent store. */
16275 if (rs6000_sched_groups
16276 && GET_CODE (PATTERN (insn)) == SET
16277 && GET_CODE (PATTERN (dep_insn)) == SET
16278 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16279 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16280 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16281 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16282 return cost + 14;
16283
ed947a96
DJ
16284 switch (get_attr_type (insn))
16285 {
16286 case TYPE_JMPREG:
309323c2 16287 /* Tell the first scheduling pass about the latency between
ed947a96
DJ
16288 a mtctr and bctr (and mtlr and br/blr). The first
16289 scheduling pass will not know about this latency since
16290 the mtctr instruction, which has the latency associated
16291 to it, will be generated by reload. */
309323c2 16292 return TARGET_POWER ? 5 : 4;
ed947a96
DJ
16293 case TYPE_BRANCH:
16294 /* Leave some extra cycles between a compare and its
16295 dependent branch, to inhibit expensive mispredicts. */
309323c2
DE
16296 if ((rs6000_cpu_attr == CPU_PPC603
16297 || rs6000_cpu_attr == CPU_PPC604
16298 || rs6000_cpu_attr == CPU_PPC604E
16299 || rs6000_cpu_attr == CPU_PPC620
16300 || rs6000_cpu_attr == CPU_PPC630
16301 || rs6000_cpu_attr == CPU_PPC750
16302 || rs6000_cpu_attr == CPU_PPC7400
16303 || rs6000_cpu_attr == CPU_PPC7450
ec507f2d
DE
16304 || rs6000_cpu_attr == CPU_POWER4
16305 || rs6000_cpu_attr == CPU_POWER5)
ed947a96
DJ
16306 && recog_memoized (dep_insn)
16307 && (INSN_CODE (dep_insn) >= 0)
b54cf83a
DE
16308 && (get_attr_type (dep_insn) == TYPE_CMP
16309 || get_attr_type (dep_insn) == TYPE_COMPARE
ed947a96 16310 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
9259f3b0
DE
16311 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
16312 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
ed947a96 16313 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
b54cf83a
DE
16314 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
16315 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
ed947a96
DJ
16316 return cost + 2;
16317 default:
16318 break;
16319 }
a251ffd0
TG
16320 /* Fall out to return default cost. */
16321 }
16322
16323 return cost;
16324}
b6c9286a 16325
cbe26ab8 16326/* The function returns a true if INSN is microcoded.
839a4992 16327 Return false otherwise. */
cbe26ab8
DN
16328
16329static bool
16330is_microcoded_insn (rtx insn)
16331{
16332 if (!insn || !INSN_P (insn)
16333 || GET_CODE (PATTERN (insn)) == USE
16334 || GET_CODE (PATTERN (insn)) == CLOBBER)
16335 return false;
16336
ec507f2d 16337 if (rs6000_sched_groups)
cbe26ab8
DN
16338 {
16339 enum attr_type type = get_attr_type (insn);
16340 if (type == TYPE_LOAD_EXT_U
16341 || type == TYPE_LOAD_EXT_UX
16342 || type == TYPE_LOAD_UX
16343 || type == TYPE_STORE_UX
16344 || type == TYPE_MFCR)
c4ad648e 16345 return true;
cbe26ab8
DN
16346 }
16347
16348 return false;
16349}
16350
5c425df5 16351/* The function returns a nonzero value if INSN can be scheduled only
cbe26ab8
DN
16352 as the first insn in a dispatch group ("dispatch-slot restricted").
16353 In this case, the returned value indicates how many dispatch slots
16354 the insn occupies (at the beginning of the group).
79ae11c4
DN
16355 Return 0 otherwise. */
16356
cbe26ab8 16357static int
79ae11c4
DN
16358is_dispatch_slot_restricted (rtx insn)
16359{
16360 enum attr_type type;
16361
ec507f2d 16362 if (!rs6000_sched_groups)
79ae11c4
DN
16363 return 0;
16364
16365 if (!insn
16366 || insn == NULL_RTX
16367 || GET_CODE (insn) == NOTE
16368 || GET_CODE (PATTERN (insn)) == USE
16369 || GET_CODE (PATTERN (insn)) == CLOBBER)
16370 return 0;
16371
16372 type = get_attr_type (insn);
16373
ec507f2d
DE
16374 switch (type)
16375 {
16376 case TYPE_MFCR:
16377 case TYPE_MFCRF:
16378 case TYPE_MTCR:
16379 case TYPE_DELAYED_CR:
16380 case TYPE_CR_LOGICAL:
16381 case TYPE_MTJMPR:
16382 case TYPE_MFJMPR:
16383 return 1;
16384 case TYPE_IDIV:
16385 case TYPE_LDIV:
16386 return 2;
b52110d4
DE
16387 case TYPE_LOAD_L:
16388 case TYPE_STORE_C:
16389 case TYPE_ISYNC:
16390 case TYPE_SYNC:
16391 return 4;
ec507f2d
DE
16392 default:
16393 if (rs6000_cpu == PROCESSOR_POWER5
16394 && is_cracked_insn (insn))
16395 return 2;
16396 return 0;
16397 }
79ae11c4
DN
16398}
16399
cbe26ab8
DN
16400/* The function returns true if INSN is cracked into 2 instructions
16401 by the processor (and therefore occupies 2 issue slots). */
16402
16403static bool
16404is_cracked_insn (rtx insn)
16405{
16406 if (!insn || !INSN_P (insn)
16407 || GET_CODE (PATTERN (insn)) == USE
16408 || GET_CODE (PATTERN (insn)) == CLOBBER)
16409 return false;
16410
ec507f2d 16411 if (rs6000_sched_groups)
cbe26ab8
DN
16412 {
16413 enum attr_type type = get_attr_type (insn);
16414 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
16415 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
16416 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
16417 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
16418 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
16419 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
16420 || type == TYPE_IDIV || type == TYPE_LDIV
16421 || type == TYPE_INSERT_WORD)
16422 return true;
cbe26ab8
DN
16423 }
16424
16425 return false;
16426}
16427
16428/* The function returns true if INSN can be issued only from
a3c9585f 16429 the branch slot. */
cbe26ab8
DN
16430
16431static bool
16432is_branch_slot_insn (rtx insn)
16433{
16434 if (!insn || !INSN_P (insn)
16435 || GET_CODE (PATTERN (insn)) == USE
16436 || GET_CODE (PATTERN (insn)) == CLOBBER)
16437 return false;
16438
ec507f2d 16439 if (rs6000_sched_groups)
cbe26ab8
DN
16440 {
16441 enum attr_type type = get_attr_type (insn);
16442 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 16443 return true;
cbe26ab8
DN
16444 return false;
16445 }
16446
16447 return false;
16448}
79ae11c4 16449
a4f6c312 16450/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
16451 priority INSN_PRIORITY (INSN). Increase the priority to execute the
16452 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
16453 define this macro if you do not need to adjust the scheduling
16454 priorities of insns. */
bef84347 16455
c237e94a 16456static int
a2369ed3 16457rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 16458{
a4f6c312
SS
16459 /* On machines (like the 750) which have asymmetric integer units,
16460 where one integer unit can do multiply and divides and the other
16461 can't, reduce the priority of multiply/divide so it is scheduled
16462 before other integer operations. */
bef84347
VM
16463
16464#if 0
2c3c49de 16465 if (! INSN_P (insn))
bef84347
VM
16466 return priority;
16467
16468 if (GET_CODE (PATTERN (insn)) == USE)
16469 return priority;
16470
16471 switch (rs6000_cpu_attr) {
16472 case CPU_PPC750:
16473 switch (get_attr_type (insn))
16474 {
16475 default:
16476 break;
16477
16478 case TYPE_IMUL:
16479 case TYPE_IDIV:
3cb999d8
DE
16480 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
16481 priority, priority);
bef84347
VM
16482 if (priority >= 0 && priority < 0x01000000)
16483 priority >>= 3;
16484 break;
16485 }
16486 }
16487#endif
16488
79ae11c4
DN
16489 if (is_dispatch_slot_restricted (insn)
16490 && reload_completed
f676971a 16491 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
16492 && rs6000_sched_restricted_insns_priority)
16493 {
16494
c4ad648e
AM
16495 /* Prioritize insns that can be dispatched only in the first
16496 dispatch slot. */
79ae11c4 16497 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
16498 /* Attach highest priority to insn. This means that in
16499 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 16500 precede 'priority' (critical path) considerations. */
f676971a 16501 return current_sched_info->sched_max_insns_priority;
79ae11c4 16502 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 16503 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
16504 haifa-sched.c:ready_sort(), only 'priority' (critical path)
16505 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
16506 return (priority + 1);
16507 }
79ae11c4 16508
bef84347
VM
16509 return priority;
16510}
16511
a4f6c312
SS
16512/* Return how many instructions the machine can issue per cycle. */
16513
c237e94a 16514static int
863d938c 16515rs6000_issue_rate (void)
b6c9286a 16516{
3317bab1
DE
16517 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
16518 if (!reload_completed)
16519 return 1;
16520
b6c9286a 16521 switch (rs6000_cpu_attr) {
3cb999d8
DE
16522 case CPU_RIOS1: /* ? */
16523 case CPU_RS64A:
16524 case CPU_PPC601: /* ? */
ed947a96 16525 case CPU_PPC7450:
3cb999d8 16526 return 3;
b54cf83a 16527 case CPU_PPC440:
b6c9286a 16528 case CPU_PPC603:
bef84347 16529 case CPU_PPC750:
ed947a96 16530 case CPU_PPC7400:
be12c2b0 16531 case CPU_PPC8540:
f676971a 16532 return 2;
3cb999d8 16533 case CPU_RIOS2:
b6c9286a 16534 case CPU_PPC604:
19684119 16535 case CPU_PPC604E:
b6c9286a 16536 case CPU_PPC620:
3cb999d8 16537 case CPU_PPC630:
b6c9286a 16538 return 4;
cbe26ab8 16539 case CPU_POWER4:
ec507f2d 16540 case CPU_POWER5:
cbe26ab8 16541 return 5;
b6c9286a
MM
16542 default:
16543 return 1;
16544 }
16545}
16546
be12c2b0
VM
16547/* Return how many instructions to look ahead for better insn
16548 scheduling. */
16549
16550static int
863d938c 16551rs6000_use_sched_lookahead (void)
be12c2b0
VM
16552{
16553 if (rs6000_cpu_attr == CPU_PPC8540)
16554 return 4;
16555 return 0;
16556}
16557
569fa502
DN
16558/* Determine is PAT refers to memory. */
16559
16560static bool
16561is_mem_ref (rtx pat)
16562{
16563 const char * fmt;
16564 int i, j;
16565 bool ret = false;
16566
16567 if (GET_CODE (pat) == MEM)
16568 return true;
16569
16570 /* Recursively process the pattern. */
16571 fmt = GET_RTX_FORMAT (GET_CODE (pat));
16572
16573 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
16574 {
16575 if (fmt[i] == 'e')
16576 ret |= is_mem_ref (XEXP (pat, i));
16577 else if (fmt[i] == 'E')
16578 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
16579 ret |= is_mem_ref (XVECEXP (pat, i, j));
16580 }
16581
16582 return ret;
16583}
16584
16585/* Determine if PAT is a PATTERN of a load insn. */
f676971a 16586
569fa502
DN
16587static bool
16588is_load_insn1 (rtx pat)
16589{
16590 if (!pat || pat == NULL_RTX)
16591 return false;
16592
16593 if (GET_CODE (pat) == SET)
16594 return is_mem_ref (SET_SRC (pat));
16595
16596 if (GET_CODE (pat) == PARALLEL)
16597 {
16598 int i;
16599
16600 for (i = 0; i < XVECLEN (pat, 0); i++)
16601 if (is_load_insn1 (XVECEXP (pat, 0, i)))
16602 return true;
16603 }
16604
16605 return false;
16606}
16607
16608/* Determine if INSN loads from memory. */
16609
16610static bool
16611is_load_insn (rtx insn)
16612{
16613 if (!insn || !INSN_P (insn))
16614 return false;
16615
16616 if (GET_CODE (insn) == CALL_INSN)
16617 return false;
16618
16619 return is_load_insn1 (PATTERN (insn));
16620}
16621
16622/* Determine if PAT is a PATTERN of a store insn. */
16623
16624static bool
16625is_store_insn1 (rtx pat)
16626{
16627 if (!pat || pat == NULL_RTX)
16628 return false;
16629
16630 if (GET_CODE (pat) == SET)
16631 return is_mem_ref (SET_DEST (pat));
16632
16633 if (GET_CODE (pat) == PARALLEL)
16634 {
16635 int i;
16636
16637 for (i = 0; i < XVECLEN (pat, 0); i++)
16638 if (is_store_insn1 (XVECEXP (pat, 0, i)))
16639 return true;
16640 }
16641
16642 return false;
16643}
16644
16645/* Determine if INSN stores to memory. */
16646
16647static bool
16648is_store_insn (rtx insn)
16649{
16650 if (!insn || !INSN_P (insn))
16651 return false;
16652
16653 return is_store_insn1 (PATTERN (insn));
16654}
16655
16656/* Returns whether the dependence between INSN and NEXT is considered
16657 costly by the given target. */
16658
16659static bool
c4ad648e
AM
16660rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost,
16661 int distance)
f676971a 16662{
aabcd309 16663 /* If the flag is not enabled - no dependence is considered costly;
f676971a 16664 allow all dependent insns in the same group.
569fa502
DN
16665 This is the most aggressive option. */
16666 if (rs6000_sched_costly_dep == no_dep_costly)
16667 return false;
16668
f676971a 16669 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
16670 do not allow dependent instructions in the same group.
16671 This is the most conservative option. */
16672 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 16673 return true;
569fa502 16674
f676971a
EC
16675 if (rs6000_sched_costly_dep == store_to_load_dep_costly
16676 && is_load_insn (next)
569fa502
DN
16677 && is_store_insn (insn))
16678 /* Prevent load after store in the same group. */
16679 return true;
16680
16681 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 16682 && is_load_insn (next)
569fa502
DN
16683 && is_store_insn (insn)
16684 && (!link || (int) REG_NOTE_KIND (link) == 0))
c4ad648e
AM
16685 /* Prevent load after store in the same group if it is a true
16686 dependence. */
569fa502 16687 return true;
f676971a
EC
16688
16689 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
16690 and will not be scheduled in the same group. */
16691 if (rs6000_sched_costly_dep <= max_dep_latency
16692 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
16693 return true;
16694
16695 return false;
16696}
16697
f676971a 16698/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
16699 skipping any "non-active" insns - insns that will not actually occupy
16700 an issue slot. Return NULL_RTX if such an insn is not found. */
16701
16702static rtx
16703get_next_active_insn (rtx insn, rtx tail)
16704{
f489aff8 16705 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
16706 return NULL_RTX;
16707
f489aff8 16708 while (1)
cbe26ab8 16709 {
f489aff8
AM
16710 insn = NEXT_INSN (insn);
16711 if (insn == NULL_RTX || insn == tail)
16712 return NULL_RTX;
cbe26ab8 16713
f489aff8
AM
16714 if (CALL_P (insn)
16715 || JUMP_P (insn)
16716 || (NONJUMP_INSN_P (insn)
16717 && GET_CODE (PATTERN (insn)) != USE
16718 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 16719 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
16720 break;
16721 }
16722 return insn;
cbe26ab8
DN
16723}
16724
839a4992 16725/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
16726 of group WHICH_GROUP.
16727
16728 If WHICH_GROUP == current_group, this function will return true if INSN
16729 causes the termination of the current group (i.e, the dispatch group to
16730 which INSN belongs). This means that INSN will be the last insn in the
16731 group it belongs to.
16732
16733 If WHICH_GROUP == previous_group, this function will return true if INSN
16734 causes the termination of the previous group (i.e, the dispatch group that
16735 precedes the group to which INSN belongs). This means that INSN will be
16736 the first insn in the group it belongs to). */
16737
16738static bool
16739insn_terminates_group_p (rtx insn, enum group_termination which_group)
16740{
16741 enum attr_type type;
16742
16743 if (! insn)
16744 return false;
569fa502 16745
cbe26ab8
DN
16746 type = get_attr_type (insn);
16747
16748 if (is_microcoded_insn (insn))
16749 return true;
16750
16751 if (which_group == current_group)
16752 {
16753 if (is_branch_slot_insn (insn))
c4ad648e 16754 return true;
cbe26ab8
DN
16755 return false;
16756 }
16757 else if (which_group == previous_group)
16758 {
16759 if (is_dispatch_slot_restricted (insn))
c4ad648e 16760 return true;
cbe26ab8
DN
16761 return false;
16762 }
16763
16764 return false;
16765}
16766
839a4992 16767/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
16768 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16769
16770static bool
16771is_costly_group (rtx *group_insns, rtx next_insn)
16772{
16773 int i;
16774 rtx link;
16775 int cost;
16776 int issue_rate = rs6000_issue_rate ();
16777
16778 for (i = 0; i < issue_rate; i++)
16779 {
16780 rtx insn = group_insns[i];
16781 if (!insn)
c4ad648e 16782 continue;
cbe26ab8 16783 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
c4ad648e
AM
16784 {
16785 rtx next = XEXP (link, 0);
16786 if (next == next_insn)
16787 {
16788 cost = insn_cost (insn, link, next_insn);
16789 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
16790 return true;
16791 }
16792 }
cbe26ab8
DN
16793 }
16794
16795 return false;
16796}
16797
f676971a 16798/* Utility of the function redefine_groups.
cbe26ab8
DN
16799 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16800 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16801 to keep it "far" (in a separate group) from GROUP_INSNS, following
16802 one of the following schemes, depending on the value of the flag
16803 -minsert_sched_nops = X:
16804 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 16805 in order to force NEXT_INSN into a separate group.
f676971a
EC
16806 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16807 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
16808 insertion (has a group just ended, how many vacant issue slots remain in the
16809 last group, and how many dispatch groups were encountered so far). */
16810
f676971a 16811static int
c4ad648e
AM
16812force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
16813 rtx next_insn, bool *group_end, int can_issue_more,
16814 int *group_count)
cbe26ab8
DN
16815{
16816 rtx nop;
16817 bool force;
16818 int issue_rate = rs6000_issue_rate ();
16819 bool end = *group_end;
16820 int i;
16821
16822 if (next_insn == NULL_RTX)
16823 return can_issue_more;
16824
16825 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
16826 return can_issue_more;
16827
16828 force = is_costly_group (group_insns, next_insn);
16829 if (!force)
16830 return can_issue_more;
16831
16832 if (sched_verbose > 6)
16833 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 16834 *group_count ,can_issue_more);
cbe26ab8
DN
16835
16836 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
16837 {
16838 if (*group_end)
c4ad648e 16839 can_issue_more = 0;
cbe26ab8
DN
16840
16841 /* Since only a branch can be issued in the last issue_slot, it is
16842 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16843 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
16844 in this case the last nop will start a new group and the branch
16845 will be forced to the new group. */
cbe26ab8 16846 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 16847 can_issue_more--;
cbe26ab8
DN
16848
16849 while (can_issue_more > 0)
c4ad648e 16850 {
9390387d 16851 nop = gen_nop ();
c4ad648e
AM
16852 emit_insn_before (nop, next_insn);
16853 can_issue_more--;
16854 }
cbe26ab8
DN
16855
16856 *group_end = true;
16857 return 0;
f676971a 16858 }
cbe26ab8
DN
16859
16860 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
16861 {
16862 int n_nops = rs6000_sched_insert_nops;
16863
f676971a 16864 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 16865 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 16866 if (can_issue_more == 0)
c4ad648e 16867 can_issue_more = issue_rate;
cbe26ab8
DN
16868 can_issue_more--;
16869 if (can_issue_more == 0)
c4ad648e
AM
16870 {
16871 can_issue_more = issue_rate - 1;
16872 (*group_count)++;
16873 end = true;
16874 for (i = 0; i < issue_rate; i++)
16875 {
16876 group_insns[i] = 0;
16877 }
16878 }
cbe26ab8
DN
16879
16880 while (n_nops > 0)
c4ad648e
AM
16881 {
16882 nop = gen_nop ();
16883 emit_insn_before (nop, next_insn);
16884 if (can_issue_more == issue_rate - 1) /* new group begins */
16885 end = false;
16886 can_issue_more--;
16887 if (can_issue_more == 0)
16888 {
16889 can_issue_more = issue_rate - 1;
16890 (*group_count)++;
16891 end = true;
16892 for (i = 0; i < issue_rate; i++)
16893 {
16894 group_insns[i] = 0;
16895 }
16896 }
16897 n_nops--;
16898 }
cbe26ab8
DN
16899
16900 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 16901 can_issue_more++;
cbe26ab8 16902
c4ad648e
AM
16903 /* Is next_insn going to start a new group? */
16904 *group_end
16905 = (end
cbe26ab8
DN
16906 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16907 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16908 || (can_issue_more < issue_rate &&
c4ad648e 16909 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 16910 if (*group_end && end)
c4ad648e 16911 (*group_count)--;
cbe26ab8
DN
16912
16913 if (sched_verbose > 6)
c4ad648e
AM
16914 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
16915 *group_count, can_issue_more);
f676971a
EC
16916 return can_issue_more;
16917 }
cbe26ab8
DN
16918
16919 return can_issue_more;
16920}
16921
16922/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 16923 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
16924 form in practice. It tries to achieve this synchronization by forcing the
16925 estimated processor grouping on the compiler (as opposed to the function
16926 'pad_goups' which tries to force the scheduler's grouping on the processor).
16927
16928 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16929 examines the (estimated) dispatch groups that will be formed by the processor
16930 dispatcher. It marks these group boundaries to reflect the estimated
16931 processor grouping, overriding the grouping that the scheduler had marked.
16932 Depending on the value of the flag '-minsert-sched-nops' this function can
16933 force certain insns into separate groups or force a certain distance between
16934 them by inserting nops, for example, if there exists a "costly dependence"
16935 between the insns.
16936
16937 The function estimates the group boundaries that the processor will form as
0fa2e4df 16938 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
16939 each insn. A subsequent insn will start a new group if one of the following
16940 4 cases applies:
16941 - no more vacant issue slots remain in the current dispatch group.
16942 - only the last issue slot, which is the branch slot, is vacant, but the next
16943 insn is not a branch.
16944 - only the last 2 or less issue slots, including the branch slot, are vacant,
16945 which means that a cracked insn (which occupies two issue slots) can't be
16946 issued in this group.
f676971a 16947 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
16948 start a new group. */
16949
16950static int
16951redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
16952{
16953 rtx insn, next_insn;
16954 int issue_rate;
16955 int can_issue_more;
16956 int slot, i;
16957 bool group_end;
16958 int group_count = 0;
16959 rtx *group_insns;
16960
16961 /* Initialize. */
16962 issue_rate = rs6000_issue_rate ();
16963 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 16964 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
16965 {
16966 group_insns[i] = 0;
16967 }
16968 can_issue_more = issue_rate;
16969 slot = 0;
16970 insn = get_next_active_insn (prev_head_insn, tail);
16971 group_end = false;
16972
16973 while (insn != NULL_RTX)
16974 {
16975 slot = (issue_rate - can_issue_more);
16976 group_insns[slot] = insn;
16977 can_issue_more =
c4ad648e 16978 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 16979 if (insn_terminates_group_p (insn, current_group))
c4ad648e 16980 can_issue_more = 0;
cbe26ab8
DN
16981
16982 next_insn = get_next_active_insn (insn, tail);
16983 if (next_insn == NULL_RTX)
c4ad648e 16984 return group_count + 1;
cbe26ab8 16985
c4ad648e
AM
16986 /* Is next_insn going to start a new group? */
16987 group_end
16988 = (can_issue_more == 0
16989 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
16990 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
16991 || (can_issue_more < issue_rate &&
16992 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 16993
f676971a 16994 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
16995 next_insn, &group_end, can_issue_more,
16996 &group_count);
cbe26ab8
DN
16997
16998 if (group_end)
c4ad648e
AM
16999 {
17000 group_count++;
17001 can_issue_more = 0;
17002 for (i = 0; i < issue_rate; i++)
17003 {
17004 group_insns[i] = 0;
17005 }
17006 }
cbe26ab8
DN
17007
17008 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 17009 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 17010 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 17011 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
17012
17013 insn = next_insn;
17014 if (can_issue_more == 0)
c4ad648e
AM
17015 can_issue_more = issue_rate;
17016 } /* while */
cbe26ab8
DN
17017
17018 return group_count;
17019}
17020
17021/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
17022 dispatch group boundaries that the scheduler had marked. Pad with nops
17023 any dispatch groups which have vacant issue slots, in order to force the
17024 scheduler's grouping on the processor dispatcher. The function
17025 returns the number of dispatch groups found. */
17026
17027static int
17028pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
17029{
17030 rtx insn, next_insn;
17031 rtx nop;
17032 int issue_rate;
17033 int can_issue_more;
17034 int group_end;
17035 int group_count = 0;
17036
17037 /* Initialize issue_rate. */
17038 issue_rate = rs6000_issue_rate ();
17039 can_issue_more = issue_rate;
17040
17041 insn = get_next_active_insn (prev_head_insn, tail);
17042 next_insn = get_next_active_insn (insn, tail);
17043
17044 while (insn != NULL_RTX)
17045 {
17046 can_issue_more =
17047 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
17048
17049 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
17050
17051 if (next_insn == NULL_RTX)
c4ad648e 17052 break;
cbe26ab8
DN
17053
17054 if (group_end)
c4ad648e
AM
17055 {
17056 /* If the scheduler had marked group termination at this location
17057 (between insn and next_indn), and neither insn nor next_insn will
17058 force group termination, pad the group with nops to force group
17059 termination. */
17060 if (can_issue_more
17061 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
17062 && !insn_terminates_group_p (insn, current_group)
17063 && !insn_terminates_group_p (next_insn, previous_group))
17064 {
9390387d 17065 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
17066 can_issue_more--;
17067
17068 while (can_issue_more)
17069 {
17070 nop = gen_nop ();
17071 emit_insn_before (nop, next_insn);
17072 can_issue_more--;
17073 }
17074 }
17075
17076 can_issue_more = issue_rate;
17077 group_count++;
17078 }
cbe26ab8
DN
17079
17080 insn = next_insn;
17081 next_insn = get_next_active_insn (insn, tail);
17082 }
17083
17084 return group_count;
17085}
17086
17087/* The following function is called at the end of scheduling BB.
17088 After reload, it inserts nops at insn group bundling. */
17089
17090static void
38f391a5 17091rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
17092{
17093 int n_groups;
17094
17095 if (sched_verbose)
17096 fprintf (dump, "=== Finishing schedule.\n");
17097
ec507f2d 17098 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
17099 {
17100 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 17101 return;
cbe26ab8
DN
17102
17103 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
17104 n_groups = pad_groups (dump, sched_verbose,
17105 current_sched_info->prev_head,
17106 current_sched_info->next_tail);
cbe26ab8 17107 else
c4ad648e
AM
17108 n_groups = redefine_groups (dump, sched_verbose,
17109 current_sched_info->prev_head,
17110 current_sched_info->next_tail);
cbe26ab8
DN
17111
17112 if (sched_verbose >= 6)
17113 {
17114 fprintf (dump, "ngroups = %d\n", n_groups);
17115 print_rtl (dump, current_sched_info->prev_head);
17116 fprintf (dump, "Done finish_sched\n");
17117 }
17118 }
17119}
b6c9286a 17120\f
b6c9286a
MM
17121/* Length in units of the trampoline for entering a nested function. */
17122
17123int
863d938c 17124rs6000_trampoline_size (void)
b6c9286a
MM
17125{
17126 int ret = 0;
17127
17128 switch (DEFAULT_ABI)
17129 {
17130 default:
37409796 17131 gcc_unreachable ();
b6c9286a
MM
17132
17133 case ABI_AIX:
8f802bfb 17134 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
17135 break;
17136
4dabc42d 17137 case ABI_DARWIN:
b6c9286a 17138 case ABI_V4:
03a7e1a5 17139 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 17140 break;
b6c9286a
MM
17141 }
17142
17143 return ret;
17144}
17145
17146/* Emit RTL insns to initialize the variable parts of a trampoline.
17147 FNADDR is an RTX for the address of the function's pure code.
17148 CXT is an RTX for the static chain value for the function. */
17149
17150void
a2369ed3 17151rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 17152{
ac2a93a1 17153 enum machine_mode pmode = Pmode;
8bd04c56
MM
17154 int regsize = (TARGET_32BIT) ? 4 : 8;
17155 rtx ctx_reg = force_reg (pmode, cxt);
b6c9286a
MM
17156
17157 switch (DEFAULT_ABI)
17158 {
17159 default:
37409796 17160 gcc_unreachable ();
b6c9286a 17161
8bd04c56 17162/* Macros to shorten the code expansions below. */
39403d82 17163#define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
c5c76735
JL
17164#define MEM_PLUS(addr,offset) \
17165 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
7c59dc5d 17166
b6c9286a
MM
17167 /* Under AIX, just build the 3 word function descriptor */
17168 case ABI_AIX:
8bd04c56
MM
17169 {
17170 rtx fn_reg = gen_reg_rtx (pmode);
17171 rtx toc_reg = gen_reg_rtx (pmode);
17172 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 17173 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
17174 emit_move_insn (MEM_DEREF (addr), fn_reg);
17175 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
17176 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
17177 }
b6c9286a
MM
17178 break;
17179
4dabc42d
TC
17180 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
17181 case ABI_DARWIN:
b6c9286a 17182 case ABI_V4:
39403d82 17183 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
eaf1bcf1
MM
17184 FALSE, VOIDmode, 4,
17185 addr, pmode,
17186 GEN_INT (rs6000_trampoline_size ()), SImode,
17187 fnaddr, pmode,
17188 ctx_reg, pmode);
b6c9286a 17189 break;
b6c9286a
MM
17190 }
17191
17192 return;
17193}
7509c759
MM
17194
17195\f
91d231cb 17196/* Table of valid machine attributes. */
a4f6c312 17197
91d231cb 17198const struct attribute_spec rs6000_attribute_table[] =
7509c759 17199{
91d231cb 17200 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 17201 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
17202 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
17203 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
005c1a13
GK
17204#ifdef SUBTARGET_ATTRIBUTE_TABLE
17205 SUBTARGET_ATTRIBUTE_TABLE,
17206#endif
a5c76ee6 17207 { NULL, 0, 0, false, false, false, NULL }
91d231cb 17208};
7509c759 17209
8bb418a3
ZL
17210/* Handle the "altivec" attribute. The attribute may have
17211 arguments as follows:
f676971a 17212
8bb418a3
ZL
17213 __attribute__((altivec(vector__)))
17214 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
17215 __attribute__((altivec(bool__))) (always followed by 'unsigned')
17216
17217 and may appear more than once (e.g., 'vector bool char') in a
17218 given declaration. */
17219
17220static tree
f90ac3f0
UP
17221rs6000_handle_altivec_attribute (tree *node,
17222 tree name ATTRIBUTE_UNUSED,
17223 tree args,
8bb418a3
ZL
17224 int flags ATTRIBUTE_UNUSED,
17225 bool *no_add_attrs)
17226{
17227 tree type = *node, result = NULL_TREE;
17228 enum machine_mode mode;
17229 int unsigned_p;
17230 char altivec_type
17231 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
17232 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
17233 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 17234 : '?');
8bb418a3
ZL
17235
17236 while (POINTER_TYPE_P (type)
17237 || TREE_CODE (type) == FUNCTION_TYPE
17238 || TREE_CODE (type) == METHOD_TYPE
17239 || TREE_CODE (type) == ARRAY_TYPE)
17240 type = TREE_TYPE (type);
17241
17242 mode = TYPE_MODE (type);
17243
f90ac3f0
UP
17244 /* Check for invalid AltiVec type qualifiers. */
17245 if (type == long_unsigned_type_node || type == long_integer_type_node)
17246 {
17247 if (TARGET_64BIT)
17248 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
17249 else if (rs6000_warn_altivec_long)
d4ee4d25 17250 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
17251 }
17252 else if (type == long_long_unsigned_type_node
17253 || type == long_long_integer_type_node)
17254 error ("use of %<long long%> in AltiVec types is invalid");
17255 else if (type == double_type_node)
17256 error ("use of %<double%> in AltiVec types is invalid");
17257 else if (type == long_double_type_node)
17258 error ("use of %<long double%> in AltiVec types is invalid");
17259 else if (type == boolean_type_node)
17260 error ("use of boolean types in AltiVec types is invalid");
17261 else if (TREE_CODE (type) == COMPLEX_TYPE)
17262 error ("use of %<complex%> in AltiVec types is invalid");
8bb418a3
ZL
17263
17264 switch (altivec_type)
17265 {
17266 case 'v':
8df83eae 17267 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
17268 switch (mode)
17269 {
c4ad648e
AM
17270 case SImode:
17271 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
17272 break;
17273 case HImode:
17274 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
17275 break;
17276 case QImode:
17277 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
17278 break;
17279 case SFmode: result = V4SF_type_node; break;
17280 /* If the user says 'vector int bool', we may be handed the 'bool'
17281 attribute _before_ the 'vector' attribute, and so select the
17282 proper type in the 'b' case below. */
17283 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
17284 result = type;
17285 default: break;
8bb418a3
ZL
17286 }
17287 break;
17288 case 'b':
17289 switch (mode)
17290 {
c4ad648e
AM
17291 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
17292 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
17293 case QImode: case V16QImode: result = bool_V16QI_type_node;
17294 default: break;
8bb418a3
ZL
17295 }
17296 break;
17297 case 'p':
17298 switch (mode)
17299 {
c4ad648e
AM
17300 case V8HImode: result = pixel_V8HI_type_node;
17301 default: break;
8bb418a3
ZL
17302 }
17303 default: break;
17304 }
17305
7958a2a6
FJ
17306 if (result && result != type && TYPE_READONLY (type))
17307 result = build_qualified_type (result, TYPE_QUAL_CONST);
17308
8bb418a3
ZL
17309 *no_add_attrs = true; /* No need to hang on to the attribute. */
17310
f90ac3f0 17311 if (result)
8bb418a3
ZL
17312 *node = reconstruct_complex_type (*node, result);
17313
17314 return NULL_TREE;
17315}
17316
f18eca82
ZL
17317/* AltiVec defines four built-in scalar types that serve as vector
17318 elements; we must teach the compiler how to mangle them. */
17319
17320static const char *
17321rs6000_mangle_fundamental_type (tree type)
17322{
17323 if (type == bool_char_type_node) return "U6__boolc";
17324 if (type == bool_short_type_node) return "U6__bools";
17325 if (type == pixel_type_node) return "u7__pixel";
17326 if (type == bool_int_type_node) return "U6__booli";
17327
17328 /* For all other types, use normal C++ mangling. */
17329 return NULL;
17330}
17331
a5c76ee6
ZW
17332/* Handle a "longcall" or "shortcall" attribute; arguments as in
17333 struct attribute_spec.handler. */
a4f6c312 17334
91d231cb 17335static tree
f676971a
EC
17336rs6000_handle_longcall_attribute (tree *node, tree name,
17337 tree args ATTRIBUTE_UNUSED,
17338 int flags ATTRIBUTE_UNUSED,
a2369ed3 17339 bool *no_add_attrs)
91d231cb
JM
17340{
17341 if (TREE_CODE (*node) != FUNCTION_TYPE
17342 && TREE_CODE (*node) != FIELD_DECL
17343 && TREE_CODE (*node) != TYPE_DECL)
17344 {
5c498b10 17345 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
17346 IDENTIFIER_POINTER (name));
17347 *no_add_attrs = true;
17348 }
6a4cee5f 17349
91d231cb 17350 return NULL_TREE;
7509c759
MM
17351}
17352
a5c76ee6
ZW
17353/* Set longcall attributes on all functions declared when
17354 rs6000_default_long_calls is true. */
17355static void
a2369ed3 17356rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
17357{
17358 if (rs6000_default_long_calls
17359 && (TREE_CODE (type) == FUNCTION_TYPE
17360 || TREE_CODE (type) == METHOD_TYPE))
17361 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
17362 NULL_TREE,
17363 TYPE_ATTRIBUTES (type));
17364}
17365
3cb999d8
DE
17366/* Return a reference suitable for calling a function with the
17367 longcall attribute. */
a4f6c312 17368
9390387d 17369rtx
a2369ed3 17370rs6000_longcall_ref (rtx call_ref)
6a4cee5f 17371{
d330fd93 17372 const char *call_name;
6a4cee5f
MM
17373 tree node;
17374
17375 if (GET_CODE (call_ref) != SYMBOL_REF)
17376 return call_ref;
17377
17378 /* System V adds '.' to the internal name, so skip them. */
17379 call_name = XSTR (call_ref, 0);
17380 if (*call_name == '.')
17381 {
17382 while (*call_name == '.')
17383 call_name++;
17384
17385 node = get_identifier (call_name);
39403d82 17386 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
17387 }
17388
17389 return force_reg (Pmode, call_ref);
17390}
7509c759 17391\f
b64a1b53
RH
17392#ifdef USING_ELFOS_H
17393
7509c759
MM
17394/* A C statement or statements to switch to the appropriate section
17395 for output of RTX in mode MODE. You can assume that RTX is some
17396 kind of constant in RTL. The argument MODE is redundant except in
17397 the case of a `const_int' rtx. Select the section by calling
17398 `text_section' or one of the alternatives for other sections.
17399
17400 Do not define this macro if you put all constants in the read-only
17401 data section. */
17402
b64a1b53 17403static void
f676971a 17404rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 17405 unsigned HOST_WIDE_INT align)
7509c759 17406{
a9098fd0 17407 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
7509c759 17408 toc_section ();
7509c759 17409 else
b64a1b53 17410 default_elf_select_rtx_section (mode, x, align);
7509c759
MM
17411}
17412
17413/* A C statement or statements to switch to the appropriate
17414 section for output of DECL. DECL is either a `VAR_DECL' node
17415 or a constant of some sort. RELOC indicates whether forming
17416 the initial value of DECL requires link-time relocations. */
17417
ae46c4e0 17418static void
f676971a 17419rs6000_elf_select_section (tree decl, int reloc,
a2369ed3 17420 unsigned HOST_WIDE_INT align)
7509c759 17421{
f1384257
AM
17422 /* Pretend that we're always building for a shared library when
17423 ABI_AIX, because otherwise we end up with dynamic relocations
17424 in read-only sections. This happens for function pointers,
17425 references to vtables in typeinfo, and probably other cases. */
0e5dbd9b
DE
17426 default_elf_select_section_1 (decl, reloc, align,
17427 flag_pic || DEFAULT_ABI == ABI_AIX);
63019373
GK
17428}
17429
17430/* A C statement to build up a unique section name, expressed as a
17431 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
17432 RELOC indicates whether the initial value of EXP requires
17433 link-time relocations. If you do not define this macro, GCC will use
17434 the symbol name prefixed by `.' as the section name. Note - this
f5143c46 17435 macro can now be called for uninitialized data items as well as
4912a07c 17436 initialized data and functions. */
63019373 17437
ae46c4e0 17438static void
a2369ed3 17439rs6000_elf_unique_section (tree decl, int reloc)
63019373 17440{
f1384257
AM
17441 /* As above, pretend that we're always building for a shared library
17442 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
0e5dbd9b
DE
17443 default_unique_section_1 (decl, reloc,
17444 flag_pic || DEFAULT_ABI == ABI_AIX);
7509c759 17445}
d9407988 17446\f
d1908feb
JJ
17447/* For a SYMBOL_REF, set generic flags and then perform some
17448 target-specific processing.
17449
d1908feb
JJ
17450 When the AIX ABI is requested on a non-AIX system, replace the
17451 function name with the real name (with a leading .) rather than the
17452 function descriptor name. This saves a lot of overriding code to
17453 read the prefixes. */
d9407988 17454
fb49053f 17455static void
a2369ed3 17456rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 17457{
d1908feb 17458 default_encode_section_info (decl, rtl, first);
b2003250 17459
d1908feb
JJ
17460 if (first
17461 && TREE_CODE (decl) == FUNCTION_DECL
17462 && !TARGET_AIX
17463 && DEFAULT_ABI == ABI_AIX)
d9407988 17464 {
c6a2438a 17465 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
17466 size_t len = strlen (XSTR (sym_ref, 0));
17467 char *str = alloca (len + 2);
17468 str[0] = '.';
17469 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
17470 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 17471 }
d9407988
MM
17472}
17473
0e5dbd9b 17474static bool
a2369ed3 17475rs6000_elf_in_small_data_p (tree decl)
0e5dbd9b
DE
17476{
17477 if (rs6000_sdata == SDATA_NONE)
17478 return false;
17479
7482ad25
AF
17480 /* We want to merge strings, so we never consider them small data. */
17481 if (TREE_CODE (decl) == STRING_CST)
17482 return false;
17483
17484 /* Functions are never in the small data area. */
17485 if (TREE_CODE (decl) == FUNCTION_DECL)
17486 return false;
17487
0e5dbd9b
DE
17488 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
17489 {
17490 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
17491 if (strcmp (section, ".sdata") == 0
17492 || strcmp (section, ".sdata2") == 0
20bfcd69
GK
17493 || strcmp (section, ".sbss") == 0
17494 || strcmp (section, ".sbss2") == 0
17495 || strcmp (section, ".PPC.EMB.sdata0") == 0
17496 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
17497 return true;
17498 }
17499 else
17500 {
17501 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
17502
17503 if (size > 0
307b599c 17504 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
17505 /* If it's not public, and we're not going to reference it there,
17506 there's no need to put it in the small data section. */
0e5dbd9b
DE
17507 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
17508 return true;
17509 }
17510
17511 return false;
17512}
17513
b91da81f 17514#endif /* USING_ELFOS_H */
000034eb 17515
a6c2a102 17516\f
000034eb 17517/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
17518 ADDR can be effectively incremented by incrementing REG.
17519
17520 r0 is special and we must not select it as an address
17521 register by this routine since our caller will try to
17522 increment the returned register via an "la" instruction. */
000034eb 17523
9390387d 17524rtx
a2369ed3 17525find_addr_reg (rtx addr)
000034eb
DE
17526{
17527 while (GET_CODE (addr) == PLUS)
17528 {
02441cd6
JL
17529 if (GET_CODE (XEXP (addr, 0)) == REG
17530 && REGNO (XEXP (addr, 0)) != 0)
000034eb 17531 addr = XEXP (addr, 0);
02441cd6
JL
17532 else if (GET_CODE (XEXP (addr, 1)) == REG
17533 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
17534 addr = XEXP (addr, 1);
17535 else if (CONSTANT_P (XEXP (addr, 0)))
17536 addr = XEXP (addr, 1);
17537 else if (CONSTANT_P (XEXP (addr, 1)))
17538 addr = XEXP (addr, 0);
17539 else
37409796 17540 gcc_unreachable ();
000034eb 17541 }
37409796
NS
17542 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
17543 return addr;
000034eb
DE
17544}
17545
a6c2a102 17546void
a2369ed3 17547rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
17548{
17549 fatal_insn ("bad address", op);
17550}
c8023011 17551
ee890fe2
SS
17552#if TARGET_MACHO
17553
efdba735 17554static tree branch_island_list = 0;
ee890fe2 17555
efdba735
SH
17556/* Remember to generate a branch island for far calls to the given
17557 function. */
ee890fe2 17558
f676971a 17559static void
c4ad648e
AM
17560add_compiler_branch_island (tree label_name, tree function_name,
17561 int line_number)
ee890fe2 17562{
efdba735 17563 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 17564 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
17565 TREE_CHAIN (branch_island) = branch_island_list;
17566 branch_island_list = branch_island;
ee890fe2
SS
17567}
17568
efdba735
SH
17569#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17570#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17571#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17572 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 17573
efdba735
SH
17574/* Generate far-jump branch islands for everything on the
17575 branch_island_list. Invoked immediately after the last instruction
17576 of the epilogue has been emitted; the branch-islands must be
17577 appended to, and contiguous with, the function body. Mach-O stubs
17578 are generated in machopic_output_stub(). */
ee890fe2 17579
efdba735
SH
17580static void
17581macho_branch_islands (void)
17582{
17583 char tmp_buf[512];
17584 tree branch_island;
17585
17586 for (branch_island = branch_island_list;
17587 branch_island;
17588 branch_island = TREE_CHAIN (branch_island))
17589 {
17590 const char *label =
17591 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
17592 const char *name =
11abc112 17593 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
17594 char name_buf[512];
17595 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17596 if (name[0] == '*' || name[0] == '&')
17597 strcpy (name_buf, name+1);
17598 else
17599 {
17600 name_buf[0] = '_';
17601 strcpy (name_buf+1, name);
17602 }
17603 strcpy (tmp_buf, "\n");
17604 strcat (tmp_buf, label);
ee890fe2 17605#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 17606 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 17607 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 17608#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
17609 if (flag_pic)
17610 {
17611 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
17612 strcat (tmp_buf, label);
17613 strcat (tmp_buf, "_pic\n");
17614 strcat (tmp_buf, label);
17615 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 17616
efdba735
SH
17617 strcat (tmp_buf, "\taddis r11,r11,ha16(");
17618 strcat (tmp_buf, name_buf);
17619 strcat (tmp_buf, " - ");
17620 strcat (tmp_buf, label);
17621 strcat (tmp_buf, "_pic)\n");
f676971a 17622
efdba735 17623 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 17624
efdba735
SH
17625 strcat (tmp_buf, "\taddi r12,r11,lo16(");
17626 strcat (tmp_buf, name_buf);
17627 strcat (tmp_buf, " - ");
17628 strcat (tmp_buf, label);
17629 strcat (tmp_buf, "_pic)\n");
f676971a 17630
efdba735
SH
17631 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
17632 }
17633 else
17634 {
17635 strcat (tmp_buf, ":\nlis r12,hi16(");
17636 strcat (tmp_buf, name_buf);
17637 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
17638 strcat (tmp_buf, name_buf);
17639 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
17640 }
17641 output_asm_insn (tmp_buf, 0);
ee890fe2 17642#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 17643 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 17644 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 17645#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 17646 }
ee890fe2 17647
efdba735 17648 branch_island_list = 0;
ee890fe2
SS
17649}
17650
17651/* NO_PREVIOUS_DEF checks in the link list whether the function name is
17652 already there or not. */
17653
efdba735 17654static int
a2369ed3 17655no_previous_def (tree function_name)
ee890fe2 17656{
efdba735
SH
17657 tree branch_island;
17658 for (branch_island = branch_island_list;
17659 branch_island;
17660 branch_island = TREE_CHAIN (branch_island))
17661 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
17662 return 0;
17663 return 1;
17664}
17665
17666/* GET_PREV_LABEL gets the label name from the previous definition of
17667 the function. */
17668
efdba735 17669static tree
a2369ed3 17670get_prev_label (tree function_name)
ee890fe2 17671{
efdba735
SH
17672 tree branch_island;
17673 for (branch_island = branch_island_list;
17674 branch_island;
17675 branch_island = TREE_CHAIN (branch_island))
17676 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
17677 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
17678 return 0;
17679}
17680
17681/* INSN is either a function call or a millicode call. It may have an
f676971a 17682 unconditional jump in its delay slot.
ee890fe2
SS
17683
17684 CALL_DEST is the routine we are calling. */
17685
17686char *
c4ad648e
AM
17687output_call (rtx insn, rtx *operands, int dest_operand_number,
17688 int cookie_operand_number)
ee890fe2
SS
17689{
17690 static char buf[256];
efdba735
SH
17691 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
17692 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
17693 {
17694 tree labelname;
efdba735 17695 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 17696
ee890fe2
SS
17697 if (no_previous_def (funname))
17698 {
308c142a 17699 int line_number = 0;
ee890fe2
SS
17700 rtx label_rtx = gen_label_rtx ();
17701 char *label_buf, temp_buf[256];
17702 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
17703 CODE_LABEL_NUMBER (label_rtx));
17704 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
17705 labelname = get_identifier (label_buf);
17706 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
17707 if (insn)
17708 line_number = NOTE_LINE_NUMBER (insn);
efdba735 17709 add_compiler_branch_island (labelname, funname, line_number);
ee890fe2
SS
17710 }
17711 else
17712 labelname = get_prev_label (funname);
17713
efdba735
SH
17714 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17715 instruction will reach 'foo', otherwise link as 'bl L42'".
17716 "L42" should be a 'branch island', that will do a far jump to
17717 'foo'. Branch islands are generated in
17718 macho_branch_islands(). */
ee890fe2 17719 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 17720 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
17721 }
17722 else
efdba735
SH
17723 sprintf (buf, "bl %%z%d", dest_operand_number);
17724 return buf;
ee890fe2
SS
17725}
17726
ee890fe2
SS
17727/* Generate PIC and indirect symbol stubs. */
17728
17729void
a2369ed3 17730machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
17731{
17732 unsigned int length;
a4f6c312
SS
17733 char *symbol_name, *lazy_ptr_name;
17734 char *local_label_0;
ee890fe2
SS
17735 static int label = 0;
17736
df56a27f 17737 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 17738 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 17739
ee890fe2 17740
ee890fe2
SS
17741 length = strlen (symb);
17742 symbol_name = alloca (length + 32);
17743 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
17744
17745 lazy_ptr_name = alloca (length + 32);
17746 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
17747
ee890fe2 17748 if (flag_pic == 2)
d3c300d2 17749 machopic_picsymbol_stub1_section ();
ee890fe2 17750 else
d3c300d2 17751 machopic_symbol_stub1_section ();
ee890fe2
SS
17752
17753 if (flag_pic == 2)
17754 {
d974312d
DJ
17755 fprintf (file, "\t.align 5\n");
17756
17757 fprintf (file, "%s:\n", stub);
17758 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17759
876455fa 17760 label++;
89da1f32 17761 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 17762 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 17763
ee890fe2
SS
17764 fprintf (file, "\tmflr r0\n");
17765 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
17766 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
17767 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
17768 lazy_ptr_name, local_label_0);
17769 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
17770 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
17771 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
17772 lazy_ptr_name, local_label_0);
17773 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
17774 fprintf (file, "\tbctr\n");
17775 }
17776 else
d974312d
DJ
17777 {
17778 fprintf (file, "\t.align 4\n");
17779
17780 fprintf (file, "%s:\n", stub);
17781 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
17782
17783 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
17784 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
17785 (TARGET_64BIT ? "ldu" : "lwzu"),
17786 lazy_ptr_name);
d974312d
DJ
17787 fprintf (file, "\tmtctr r12\n");
17788 fprintf (file, "\tbctr\n");
17789 }
f676971a 17790
ee890fe2
SS
17791 machopic_lazy_symbol_ptr_section ();
17792 fprintf (file, "%s:\n", lazy_ptr_name);
17793 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
17794 fprintf (file, "%sdyld_stub_binding_helper\n",
17795 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
17796}
17797
17798/* Legitimize PIC addresses. If the address is already
17799 position-independent, we return ORIG. Newly generated
17800 position-independent addresses go into a reg. This is REG if non
17801 zero, otherwise we allocate register(s) as necessary. */
17802
9390387d 17803#define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
17804
17805rtx
f676971a 17806rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 17807 rtx reg)
ee890fe2
SS
17808{
17809 rtx base, offset;
17810
17811 if (reg == NULL && ! reload_in_progress && ! reload_completed)
17812 reg = gen_reg_rtx (Pmode);
17813
17814 if (GET_CODE (orig) == CONST)
17815 {
37409796
NS
17816 rtx reg_temp;
17817
ee890fe2
SS
17818 if (GET_CODE (XEXP (orig, 0)) == PLUS
17819 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
17820 return orig;
17821
37409796 17822 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 17823
37409796
NS
17824 /* Use a different reg for the intermediate value, as
17825 it will be marked UNCHANGING. */
17826 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
17827 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
17828 Pmode, reg_temp);
17829 offset =
17830 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
17831 Pmode, reg);
bb8df8a6 17832
ee890fe2
SS
17833 if (GET_CODE (offset) == CONST_INT)
17834 {
17835 if (SMALL_INT (offset))
ed8908e7 17836 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
17837 else if (! reload_in_progress && ! reload_completed)
17838 offset = force_reg (Pmode, offset);
17839 else
c859cda6
DJ
17840 {
17841 rtx mem = force_const_mem (Pmode, orig);
17842 return machopic_legitimize_pic_address (mem, Pmode, reg);
17843 }
ee890fe2 17844 }
f1c25d3b 17845 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
17846 }
17847
17848 /* Fall back on generic machopic code. */
17849 return machopic_legitimize_pic_address (orig, mode, reg);
17850}
17851
17852/* This is just a placeholder to make linking work without having to
17853 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17854 ever needed for Darwin (not too likely!) this would have to get a
17855 real definition. */
17856
17857void
863d938c 17858toc_section (void)
ee890fe2
SS
17859{
17860}
17861
c4e18b1c
GK
17862/* Output a .machine directive for the Darwin assembler, and call
17863 the generic start_file routine. */
17864
17865static void
17866rs6000_darwin_file_start (void)
17867{
94ff898d 17868 static const struct
c4e18b1c
GK
17869 {
17870 const char *arg;
17871 const char *name;
17872 int if_set;
17873 } mapping[] = {
55dbfb48 17874 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
17875 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
17876 { "power4", "ppc970", 0 },
17877 { "G5", "ppc970", 0 },
17878 { "7450", "ppc7450", 0 },
17879 { "7400", "ppc7400", MASK_ALTIVEC },
17880 { "G4", "ppc7400", 0 },
17881 { "750", "ppc750", 0 },
17882 { "740", "ppc750", 0 },
17883 { "G3", "ppc750", 0 },
17884 { "604e", "ppc604e", 0 },
17885 { "604", "ppc604", 0 },
17886 { "603e", "ppc603", 0 },
17887 { "603", "ppc603", 0 },
17888 { "601", "ppc601", 0 },
17889 { NULL, "ppc", 0 } };
17890 const char *cpu_id = "";
17891 size_t i;
94ff898d 17892
9390387d 17893 rs6000_file_start ();
c4e18b1c
GK
17894
17895 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17896 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
17897 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
17898 && rs6000_select[i].string[0] != '\0')
17899 cpu_id = rs6000_select[i].string;
17900
17901 /* Look through the mapping array. Pick the first name that either
17902 matches the argument, has a bit set in IF_SET that is also set
17903 in the target flags, or has a NULL name. */
17904
17905 i = 0;
17906 while (mapping[i].arg != NULL
17907 && strcmp (mapping[i].arg, cpu_id) != 0
17908 && (mapping[i].if_set & target_flags) == 0)
17909 i++;
17910
17911 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
17912}
17913
ee890fe2 17914#endif /* TARGET_MACHO */
7c262518
RH
17915
17916#if TARGET_ELF
17917static unsigned int
a2369ed3 17918rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
7c262518 17919{
1ff8f81a
AM
17920 return default_section_type_flags_1 (decl, name, reloc,
17921 flag_pic || DEFAULT_ABI == ABI_AIX);
7c262518 17922}
d9f6800d
RH
17923
17924/* Record an element in the table of global constructors. SYMBOL is
17925 a SYMBOL_REF of the function to be called; PRIORITY is a number
17926 between 0 and MAX_INIT_PRIORITY.
17927
17928 This differs from default_named_section_asm_out_constructor in
17929 that we have special handling for -mrelocatable. */
17930
17931static void
a2369ed3 17932rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
17933{
17934 const char *section = ".ctors";
17935 char buf[16];
17936
17937 if (priority != DEFAULT_INIT_PRIORITY)
17938 {
17939 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
17940 /* Invert the numbering so the linker puts us in the proper
17941 order; constructors are run from right to left, and the
17942 linker sorts in increasing order. */
17943 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
17944 section = buf;
17945 }
17946
715bdd29
RH
17947 named_section_flags (section, SECTION_WRITE);
17948 assemble_align (POINTER_SIZE);
d9f6800d
RH
17949
17950 if (TARGET_RELOCATABLE)
17951 {
17952 fputs ("\t.long (", asm_out_file);
17953 output_addr_const (asm_out_file, symbol);
17954 fputs (")@fixup\n", asm_out_file);
17955 }
17956 else
c8af3574 17957 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
17958}
17959
17960static void
a2369ed3 17961rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
17962{
17963 const char *section = ".dtors";
17964 char buf[16];
17965
17966 if (priority != DEFAULT_INIT_PRIORITY)
17967 {
17968 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
17969 /* Invert the numbering so the linker puts us in the proper
17970 order; constructors are run from right to left, and the
17971 linker sorts in increasing order. */
17972 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
17973 section = buf;
17974 }
17975
715bdd29
RH
17976 named_section_flags (section, SECTION_WRITE);
17977 assemble_align (POINTER_SIZE);
d9f6800d
RH
17978
17979 if (TARGET_RELOCATABLE)
17980 {
17981 fputs ("\t.long (", asm_out_file);
17982 output_addr_const (asm_out_file, symbol);
17983 fputs (")@fixup\n", asm_out_file);
17984 }
17985 else
c8af3574 17986 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 17987}
9739c90c
JJ
17988
17989void
a2369ed3 17990rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
17991{
17992 if (TARGET_64BIT)
17993 {
17994 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
17995 ASM_OUTPUT_LABEL (file, name);
17996 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
17997 rs6000_output_function_entry (file, name);
17998 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
17999 if (DOT_SYMBOLS)
9739c90c 18000 {
85b776df 18001 fputs ("\t.size\t", file);
9739c90c 18002 assemble_name (file, name);
85b776df
AM
18003 fputs (",24\n\t.type\t.", file);
18004 assemble_name (file, name);
18005 fputs (",@function\n", file);
18006 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
18007 {
18008 fputs ("\t.globl\t.", file);
18009 assemble_name (file, name);
18010 putc ('\n', file);
18011 }
9739c90c 18012 }
85b776df
AM
18013 else
18014 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 18015 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
18016 rs6000_output_function_entry (file, name);
18017 fputs (":\n", file);
9739c90c
JJ
18018 return;
18019 }
18020
18021 if (TARGET_RELOCATABLE
7f970b70 18022 && !TARGET_SECURE_PLT
9739c90c 18023 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 18024 && uses_TOC ())
9739c90c
JJ
18025 {
18026 char buf[256];
18027
18028 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
18029
18030 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
18031 fprintf (file, "\t.long ");
18032 assemble_name (file, buf);
18033 putc ('-', file);
18034 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
18035 assemble_name (file, buf);
18036 putc ('\n', file);
18037 }
18038
18039 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
18040 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
18041
18042 if (DEFAULT_ABI == ABI_AIX)
18043 {
18044 const char *desc_name, *orig_name;
18045
18046 orig_name = (*targetm.strip_name_encoding) (name);
18047 desc_name = orig_name;
18048 while (*desc_name == '.')
18049 desc_name++;
18050
18051 if (TREE_PUBLIC (decl))
18052 fprintf (file, "\t.globl %s\n", desc_name);
18053
18054 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18055 fprintf (file, "%s:\n", desc_name);
18056 fprintf (file, "\t.long %s\n", orig_name);
18057 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
18058 if (DEFAULT_ABI == ABI_AIX)
18059 fputs ("\t.long 0\n", file);
18060 fprintf (file, "\t.previous\n");
18061 }
18062 ASM_OUTPUT_LABEL (file, name);
18063}
1334b570
AM
18064
18065static void
18066rs6000_elf_end_indicate_exec_stack (void)
18067{
18068 if (TARGET_32BIT)
18069 file_end_indicate_exec_stack ();
18070}
7c262518
RH
18071#endif
18072
cbaaba19 18073#if TARGET_XCOFF
7c262518 18074static void
a2369ed3 18075rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
18076{
18077 fputs (GLOBAL_ASM_OP, stream);
18078 RS6000_OUTPUT_BASENAME (stream, name);
18079 putc ('\n', stream);
18080}
18081
18082static void
c18a5b6c
MM
18083rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
18084 tree decl ATTRIBUTE_UNUSED)
7c262518 18085{
0e5dbd9b
DE
18086 int smclass;
18087 static const char * const suffix[3] = { "PR", "RO", "RW" };
18088
18089 if (flags & SECTION_CODE)
18090 smclass = 0;
18091 else if (flags & SECTION_WRITE)
18092 smclass = 2;
18093 else
18094 smclass = 1;
18095
5b5198f7 18096 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 18097 (flags & SECTION_CODE) ? "." : "",
5b5198f7 18098 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 18099}
ae46c4e0
RH
18100
18101static void
f676971a 18102rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 18103 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 18104{
5add3202 18105 if (decl_readonly_section_1 (decl, reloc, 1))
ae46c4e0 18106 {
0e5dbd9b 18107 if (TREE_PUBLIC (decl))
c4ad648e 18108 read_only_data_section ();
ae46c4e0 18109 else
c4ad648e 18110 read_only_private_data_section ();
ae46c4e0
RH
18111 }
18112 else
18113 {
0e5dbd9b 18114 if (TREE_PUBLIC (decl))
c4ad648e 18115 data_section ();
ae46c4e0 18116 else
c4ad648e 18117 private_data_section ();
ae46c4e0
RH
18118 }
18119}
18120
18121static void
a2369ed3 18122rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
18123{
18124 const char *name;
ae46c4e0 18125
5b5198f7
DE
18126 /* Use select_section for private and uninitialized data. */
18127 if (!TREE_PUBLIC (decl)
18128 || DECL_COMMON (decl)
0e5dbd9b
DE
18129 || DECL_INITIAL (decl) == NULL_TREE
18130 || DECL_INITIAL (decl) == error_mark_node
18131 || (flag_zero_initialized_in_bss
18132 && initializer_zerop (DECL_INITIAL (decl))))
18133 return;
18134
18135 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
18136 name = (*targetm.strip_name_encoding) (name);
18137 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 18138}
b64a1b53 18139
fb49053f
RH
18140/* Select section for constant in constant pool.
18141
18142 On RS/6000, all constants are in the private read-only data area.
18143 However, if this is being placed in the TOC it must be output as a
18144 toc entry. */
18145
b64a1b53 18146static void
f676971a 18147rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 18148 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
18149{
18150 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
18151 toc_section ();
18152 else
18153 read_only_private_data_section ();
18154}
772c5265
RH
18155
18156/* Remove any trailing [DS] or the like from the symbol name. */
18157
18158static const char *
a2369ed3 18159rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
18160{
18161 size_t len;
18162 if (*name == '*')
18163 name++;
18164 len = strlen (name);
18165 if (name[len - 1] == ']')
18166 return ggc_alloc_string (name, len - 4);
18167 else
18168 return name;
18169}
18170
5add3202
DE
18171/* Section attributes. AIX is always PIC. */
18172
18173static unsigned int
a2369ed3 18174rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 18175{
5b5198f7
DE
18176 unsigned int align;
18177 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
18178
18179 /* Align to at least UNIT size. */
18180 if (flags & SECTION_CODE)
18181 align = MIN_UNITS_PER_WORD;
18182 else
18183 /* Increase alignment of large objects if not already stricter. */
18184 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
18185 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
18186 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
18187
18188 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 18189}
a5fe455b 18190
1bc7c5b6
ZW
18191/* Output at beginning of assembler file.
18192
18193 Initialize the section names for the RS/6000 at this point.
18194
18195 Specify filename, including full path, to assembler.
18196
18197 We want to go into the TOC section so at least one .toc will be emitted.
18198 Also, in order to output proper .bs/.es pairs, we need at least one static
18199 [RW] section emitted.
18200
18201 Finally, declare mcount when profiling to make the assembler happy. */
18202
18203static void
863d938c 18204rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
18205{
18206 rs6000_gen_section_name (&xcoff_bss_section_name,
18207 main_input_filename, ".bss_");
18208 rs6000_gen_section_name (&xcoff_private_data_section_name,
18209 main_input_filename, ".rw_");
18210 rs6000_gen_section_name (&xcoff_read_only_section_name,
18211 main_input_filename, ".ro_");
18212
18213 fputs ("\t.file\t", asm_out_file);
18214 output_quoted_string (asm_out_file, main_input_filename);
18215 fputc ('\n', asm_out_file);
1bc7c5b6
ZW
18216 if (write_symbols != NO_DEBUG)
18217 private_data_section ();
18218 text_section ();
18219 if (profile_flag)
18220 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
18221 rs6000_file_start ();
18222}
18223
a5fe455b
ZW
18224/* Output at end of assembler file.
18225 On the RS/6000, referencing data should automatically pull in text. */
18226
18227static void
863d938c 18228rs6000_xcoff_file_end (void)
a5fe455b
ZW
18229{
18230 text_section ();
18231 fputs ("_section_.text:\n", asm_out_file);
18232 data_section ();
18233 fputs (TARGET_32BIT
18234 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
18235 asm_out_file);
18236}
f1384257 18237#endif /* TARGET_XCOFF */
0e5dbd9b 18238
3c50106f
RH
18239/* Compute a (partial) cost for rtx X. Return true if the complete
18240 cost has been computed, and false if subexpressions should be
18241 scanned. In either case, *TOTAL contains the cost result. */
18242
18243static bool
1494c534 18244rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 18245{
f0517163
RS
18246 enum machine_mode mode = GET_MODE (x);
18247
3c50106f
RH
18248 switch (code)
18249 {
30a555d9 18250 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 18251 case CONST_INT:
066cd967
DE
18252 if (((outer_code == SET
18253 || outer_code == PLUS
18254 || outer_code == MINUS)
18255 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18256 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
066cd967
DE
18257 || (outer_code == AND
18258 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
22e54023
DE
18259 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18260 mode == SImode ? 'L' : 'J'))
1990cd79
AM
18261 || mask_operand (x, mode)
18262 || (mode == DImode
18263 && mask64_operand (x, DImode))))
22e54023
DE
18264 || ((outer_code == IOR || outer_code == XOR)
18265 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18266 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18267 mode == SImode ? 'L' : 'J'))))
066cd967
DE
18268 || outer_code == ASHIFT
18269 || outer_code == ASHIFTRT
18270 || outer_code == LSHIFTRT
18271 || outer_code == ROTATE
18272 || outer_code == ROTATERT
d5861a7a 18273 || outer_code == ZERO_EXTRACT
066cd967
DE
18274 || (outer_code == MULT
18275 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
22e54023
DE
18276 || ((outer_code == DIV || outer_code == UDIV
18277 || outer_code == MOD || outer_code == UMOD)
18278 && exact_log2 (INTVAL (x)) >= 0)
066cd967
DE
18279 || (outer_code == COMPARE
18280 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
22e54023
DE
18281 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')))
18282 || (outer_code == EQ
18283 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
18284 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18285 || (CONST_OK_FOR_LETTER_P (INTVAL (x),
18286 mode == SImode ? 'L' : 'J'))))
18287 || (outer_code == GTU
18288 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
18289 || (outer_code == LTU
18290 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'P')))
066cd967
DE
18291 {
18292 *total = 0;
18293 return true;
18294 }
18295 else if ((outer_code == PLUS
4ae234b0 18296 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 18297 || (outer_code == MINUS
4ae234b0 18298 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
18299 || ((outer_code == SET
18300 || outer_code == IOR
18301 || outer_code == XOR)
18302 && (INTVAL (x)
18303 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
18304 {
18305 *total = COSTS_N_INSNS (1);
18306 return true;
18307 }
18308 /* FALLTHRU */
18309
18310 case CONST_DOUBLE:
18311 if (mode == DImode
18312 && ((outer_code == AND
18313 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
18314 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
1990cd79
AM
18315 || mask_operand (x, DImode)
18316 || mask64_operand (x, DImode)))
066cd967
DE
18317 || ((outer_code == IOR || outer_code == XOR)
18318 && CONST_DOUBLE_HIGH (x) == 0
18319 && (CONST_DOUBLE_LOW (x)
18320 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
18321 {
18322 *total = 0;
18323 return true;
18324 }
18325 else if (mode == DImode
18326 && (outer_code == SET
18327 || outer_code == IOR
18328 || outer_code == XOR)
18329 && CONST_DOUBLE_HIGH (x) == 0)
18330 {
18331 *total = COSTS_N_INSNS (1);
18332 return true;
18333 }
18334 /* FALLTHRU */
18335
3c50106f 18336 case CONST:
066cd967 18337 case HIGH:
3c50106f 18338 case SYMBOL_REF:
066cd967
DE
18339 case MEM:
18340 /* When optimizing for size, MEM should be slightly more expensive
18341 than generating address, e.g., (plus (reg) (const)).
c112cf2b 18342 L1 cache latency is about two instructions. */
066cd967 18343 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
18344 return true;
18345
30a555d9
DE
18346 case LABEL_REF:
18347 *total = 0;
18348 return true;
18349
3c50106f 18350 case PLUS:
f0517163 18351 if (mode == DFmode)
066cd967
DE
18352 {
18353 if (GET_CODE (XEXP (x, 0)) == MULT)
18354 {
18355 /* FNMA accounted in outer NEG. */
18356 if (outer_code == NEG)
18357 *total = rs6000_cost->dmul - rs6000_cost->fp;
18358 else
18359 *total = rs6000_cost->dmul;
18360 }
18361 else
18362 *total = rs6000_cost->fp;
18363 }
f0517163 18364 else if (mode == SFmode)
066cd967
DE
18365 {
18366 /* FNMA accounted in outer NEG. */
18367 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18368 *total = 0;
18369 else
18370 *total = rs6000_cost->fp;
18371 }
f0517163 18372 else
066cd967
DE
18373 *total = COSTS_N_INSNS (1);
18374 return false;
3c50106f 18375
52190329 18376 case MINUS:
f0517163 18377 if (mode == DFmode)
066cd967
DE
18378 {
18379 if (GET_CODE (XEXP (x, 0)) == MULT)
18380 {
18381 /* FNMA accounted in outer NEG. */
18382 if (outer_code == NEG)
18383 *total = 0;
18384 else
18385 *total = rs6000_cost->dmul;
18386 }
18387 else
18388 *total = rs6000_cost->fp;
18389 }
f0517163 18390 else if (mode == SFmode)
066cd967
DE
18391 {
18392 /* FNMA accounted in outer NEG. */
18393 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
18394 *total = 0;
18395 else
18396 *total = rs6000_cost->fp;
18397 }
f0517163 18398 else
c4ad648e 18399 *total = COSTS_N_INSNS (1);
066cd967 18400 return false;
3c50106f
RH
18401
18402 case MULT:
c9dbf840
DE
18403 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18404 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x, 1)), 'I'))
3c50106f 18405 {
8b897cfa
RS
18406 if (INTVAL (XEXP (x, 1)) >= -256
18407 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 18408 *total = rs6000_cost->mulsi_const9;
8b897cfa 18409 else
06a67bdd 18410 *total = rs6000_cost->mulsi_const;
3c50106f 18411 }
066cd967
DE
18412 /* FMA accounted in outer PLUS/MINUS. */
18413 else if ((mode == DFmode || mode == SFmode)
18414 && (outer_code == PLUS || outer_code == MINUS))
18415 *total = 0;
f0517163 18416 else if (mode == DFmode)
06a67bdd 18417 *total = rs6000_cost->dmul;
f0517163 18418 else if (mode == SFmode)
06a67bdd 18419 *total = rs6000_cost->fp;
f0517163 18420 else if (mode == DImode)
06a67bdd 18421 *total = rs6000_cost->muldi;
8b897cfa 18422 else
06a67bdd 18423 *total = rs6000_cost->mulsi;
066cd967 18424 return false;
3c50106f
RH
18425
18426 case DIV:
18427 case MOD:
f0517163
RS
18428 if (FLOAT_MODE_P (mode))
18429 {
06a67bdd
RS
18430 *total = mode == DFmode ? rs6000_cost->ddiv
18431 : rs6000_cost->sdiv;
066cd967 18432 return false;
f0517163 18433 }
5efb1046 18434 /* FALLTHRU */
3c50106f
RH
18435
18436 case UDIV:
18437 case UMOD:
627b6fe2
DJ
18438 if (GET_CODE (XEXP (x, 1)) == CONST_INT
18439 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
18440 {
18441 if (code == DIV || code == MOD)
18442 /* Shift, addze */
18443 *total = COSTS_N_INSNS (2);
18444 else
18445 /* Shift */
18446 *total = COSTS_N_INSNS (1);
18447 }
c4ad648e 18448 else
627b6fe2
DJ
18449 {
18450 if (GET_MODE (XEXP (x, 1)) == DImode)
18451 *total = rs6000_cost->divdi;
18452 else
18453 *total = rs6000_cost->divsi;
18454 }
18455 /* Add in shift and subtract for MOD. */
18456 if (code == MOD || code == UMOD)
18457 *total += COSTS_N_INSNS (2);
066cd967 18458 return false;
3c50106f
RH
18459
18460 case FFS:
18461 *total = COSTS_N_INSNS (4);
066cd967 18462 return false;
3c50106f 18463
06a67bdd 18464 case NOT:
066cd967
DE
18465 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
18466 {
18467 *total = 0;
18468 return false;
18469 }
18470 /* FALLTHRU */
18471
18472 case AND:
18473 case IOR:
18474 case XOR:
d5861a7a
DE
18475 case ZERO_EXTRACT:
18476 *total = COSTS_N_INSNS (1);
18477 return false;
18478
066cd967
DE
18479 case ASHIFT:
18480 case ASHIFTRT:
18481 case LSHIFTRT:
18482 case ROTATE:
18483 case ROTATERT:
d5861a7a 18484 /* Handle mul_highpart. */
066cd967
DE
18485 if (outer_code == TRUNCATE
18486 && GET_CODE (XEXP (x, 0)) == MULT)
18487 {
18488 if (mode == DImode)
18489 *total = rs6000_cost->muldi;
18490 else
18491 *total = rs6000_cost->mulsi;
18492 return true;
18493 }
d5861a7a
DE
18494 else if (outer_code == AND)
18495 *total = 0;
18496 else
18497 *total = COSTS_N_INSNS (1);
18498 return false;
18499
18500 case SIGN_EXTEND:
18501 case ZERO_EXTEND:
18502 if (GET_CODE (XEXP (x, 0)) == MEM)
18503 *total = 0;
18504 else
18505 *total = COSTS_N_INSNS (1);
066cd967 18506 return false;
06a67bdd 18507
066cd967
DE
18508 case COMPARE:
18509 case NEG:
18510 case ABS:
18511 if (!FLOAT_MODE_P (mode))
18512 {
18513 *total = COSTS_N_INSNS (1);
18514 return false;
18515 }
18516 /* FALLTHRU */
18517
18518 case FLOAT:
18519 case UNSIGNED_FLOAT:
18520 case FIX:
18521 case UNSIGNED_FIX:
06a67bdd
RS
18522 case FLOAT_TRUNCATE:
18523 *total = rs6000_cost->fp;
066cd967 18524 return false;
06a67bdd 18525
a2af5043
DJ
18526 case FLOAT_EXTEND:
18527 if (mode == DFmode)
18528 *total = 0;
18529 else
18530 *total = rs6000_cost->fp;
18531 return false;
18532
06a67bdd
RS
18533 case UNSPEC:
18534 switch (XINT (x, 1))
18535 {
18536 case UNSPEC_FRSP:
18537 *total = rs6000_cost->fp;
18538 return true;
18539
18540 default:
18541 break;
18542 }
18543 break;
18544
18545 case CALL:
18546 case IF_THEN_ELSE:
18547 if (optimize_size)
18548 {
18549 *total = COSTS_N_INSNS (1);
18550 return true;
18551 }
066cd967
DE
18552 else if (FLOAT_MODE_P (mode)
18553 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
18554 {
18555 *total = rs6000_cost->fp;
18556 return false;
18557 }
06a67bdd
RS
18558 break;
18559
c0600ecd
DE
18560 case EQ:
18561 case GTU:
18562 case LTU:
22e54023
DE
18563 /* Carry bit requires mode == Pmode.
18564 NEG or PLUS already counted so only add one. */
18565 if (mode == Pmode
18566 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 18567 {
22e54023
DE
18568 *total = COSTS_N_INSNS (1);
18569 return true;
18570 }
18571 if (outer_code == SET)
18572 {
18573 if (XEXP (x, 1) == const0_rtx)
c0600ecd 18574 {
22e54023 18575 *total = COSTS_N_INSNS (2);
c0600ecd 18576 return true;
c0600ecd 18577 }
22e54023
DE
18578 else if (mode == Pmode)
18579 {
18580 *total = COSTS_N_INSNS (3);
18581 return false;
18582 }
18583 }
18584 /* FALLTHRU */
18585
18586 case GT:
18587 case LT:
18588 case UNORDERED:
18589 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
18590 {
18591 *total = COSTS_N_INSNS (2);
18592 return true;
c0600ecd 18593 }
22e54023
DE
18594 /* CC COMPARE. */
18595 if (outer_code == COMPARE)
18596 {
18597 *total = 0;
18598 return true;
18599 }
18600 break;
c0600ecd 18601
3c50106f 18602 default:
06a67bdd 18603 break;
3c50106f 18604 }
06a67bdd
RS
18605
18606 return false;
3c50106f
RH
18607}
18608
34bb030a
DE
18609/* A C expression returning the cost of moving data from a register of class
18610 CLASS1 to one of CLASS2. */
18611
18612int
f676971a 18613rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 18614 enum reg_class from, enum reg_class to)
34bb030a
DE
18615{
18616 /* Moves from/to GENERAL_REGS. */
18617 if (reg_classes_intersect_p (to, GENERAL_REGS)
18618 || reg_classes_intersect_p (from, GENERAL_REGS))
18619 {
18620 if (! reg_classes_intersect_p (to, GENERAL_REGS))
18621 from = to;
18622
18623 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
18624 return (rs6000_memory_move_cost (mode, from, 0)
18625 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
18626
c4ad648e
AM
18627 /* It's more expensive to move CR_REGS than CR0_REGS because of the
18628 shift. */
34bb030a
DE
18629 else if (from == CR_REGS)
18630 return 4;
18631
18632 else
c4ad648e 18633 /* A move will cost one instruction per GPR moved. */
c8b622ff 18634 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
18635 }
18636
c4ad648e 18637 /* Moving between two similar registers is just one instruction. */
34bb030a
DE
18638 else if (reg_classes_intersect_p (to, from))
18639 return mode == TFmode ? 4 : 2;
18640
c4ad648e 18641 /* Everything else has to go through GENERAL_REGS. */
34bb030a 18642 else
f676971a 18643 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
18644 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
18645}
18646
18647/* A C expressions returning the cost of moving data of MODE from a register to
18648 or from memory. */
18649
18650int
f676971a 18651rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 18652 int in ATTRIBUTE_UNUSED)
34bb030a
DE
18653{
18654 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 18655 return 4 * hard_regno_nregs[0][mode];
34bb030a 18656 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 18657 return 4 * hard_regno_nregs[32][mode];
34bb030a 18658 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 18659 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
18660 else
18661 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
18662}
18663
ef765ea9
DE
18664/* Newton-Raphson approximation of single-precision floating point divide n/d.
18665 Assumes no trapping math and finite arguments. */
18666
18667void
18668rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
18669{
18670 rtx x0, e0, e1, y1, u0, v0, one;
18671
18672 x0 = gen_reg_rtx (SFmode);
18673 e0 = gen_reg_rtx (SFmode);
18674 e1 = gen_reg_rtx (SFmode);
18675 y1 = gen_reg_rtx (SFmode);
18676 u0 = gen_reg_rtx (SFmode);
18677 v0 = gen_reg_rtx (SFmode);
18678 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
18679
18680 /* x0 = 1./d estimate */
18681 emit_insn (gen_rtx_SET (VOIDmode, x0,
18682 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
18683 UNSPEC_FRES)));
18684 /* e0 = 1. - d * x0 */
18685 emit_insn (gen_rtx_SET (VOIDmode, e0,
18686 gen_rtx_MINUS (SFmode, one,
18687 gen_rtx_MULT (SFmode, d, x0))));
18688 /* e1 = e0 + e0 * e0 */
18689 emit_insn (gen_rtx_SET (VOIDmode, e1,
18690 gen_rtx_PLUS (SFmode,
18691 gen_rtx_MULT (SFmode, e0, e0), e0)));
18692 /* y1 = x0 + e1 * x0 */
18693 emit_insn (gen_rtx_SET (VOIDmode, y1,
18694 gen_rtx_PLUS (SFmode,
18695 gen_rtx_MULT (SFmode, e1, x0), x0)));
18696 /* u0 = n * y1 */
18697 emit_insn (gen_rtx_SET (VOIDmode, u0,
18698 gen_rtx_MULT (SFmode, n, y1)));
18699 /* v0 = n - d * u0 */
18700 emit_insn (gen_rtx_SET (VOIDmode, v0,
18701 gen_rtx_MINUS (SFmode, n,
18702 gen_rtx_MULT (SFmode, d, u0))));
18703 /* res = u0 + v0 * y1 */
18704 emit_insn (gen_rtx_SET (VOIDmode, res,
18705 gen_rtx_PLUS (SFmode,
18706 gen_rtx_MULT (SFmode, v0, y1), u0)));
18707}
18708
18709/* Newton-Raphson approximation of double-precision floating point divide n/d.
18710 Assumes no trapping math and finite arguments. */
18711
18712void
18713rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
18714{
18715 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
18716
18717 x0 = gen_reg_rtx (DFmode);
18718 e0 = gen_reg_rtx (DFmode);
18719 e1 = gen_reg_rtx (DFmode);
18720 e2 = gen_reg_rtx (DFmode);
18721 y1 = gen_reg_rtx (DFmode);
18722 y2 = gen_reg_rtx (DFmode);
18723 y3 = gen_reg_rtx (DFmode);
18724 u0 = gen_reg_rtx (DFmode);
18725 v0 = gen_reg_rtx (DFmode);
18726 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
18727
18728 /* x0 = 1./d estimate */
18729 emit_insn (gen_rtx_SET (VOIDmode, x0,
18730 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
18731 UNSPEC_FRES)));
18732 /* e0 = 1. - d * x0 */
18733 emit_insn (gen_rtx_SET (VOIDmode, e0,
18734 gen_rtx_MINUS (DFmode, one,
18735 gen_rtx_MULT (SFmode, d, x0))));
18736 /* y1 = x0 + e0 * x0 */
18737 emit_insn (gen_rtx_SET (VOIDmode, y1,
18738 gen_rtx_PLUS (DFmode,
18739 gen_rtx_MULT (DFmode, e0, x0), x0)));
18740 /* e1 = e0 * e0 */
18741 emit_insn (gen_rtx_SET (VOIDmode, e1,
18742 gen_rtx_MULT (DFmode, e0, e0)));
18743 /* y2 = y1 + e1 * y1 */
18744 emit_insn (gen_rtx_SET (VOIDmode, y2,
18745 gen_rtx_PLUS (DFmode,
18746 gen_rtx_MULT (DFmode, e1, y1), y1)));
18747 /* e2 = e1 * e1 */
18748 emit_insn (gen_rtx_SET (VOIDmode, e2,
18749 gen_rtx_MULT (DFmode, e1, e1)));
18750 /* y3 = y2 + e2 * y2 */
18751 emit_insn (gen_rtx_SET (VOIDmode, y3,
18752 gen_rtx_PLUS (DFmode,
18753 gen_rtx_MULT (DFmode, e2, y2), y2)));
18754 /* u0 = n * y3 */
18755 emit_insn (gen_rtx_SET (VOIDmode, u0,
18756 gen_rtx_MULT (DFmode, n, y3)));
18757 /* v0 = n - d * u0 */
18758 emit_insn (gen_rtx_SET (VOIDmode, v0,
18759 gen_rtx_MINUS (DFmode, n,
18760 gen_rtx_MULT (DFmode, d, u0))));
18761 /* res = u0 + v0 * y3 */
18762 emit_insn (gen_rtx_SET (VOIDmode, res,
18763 gen_rtx_PLUS (DFmode,
18764 gen_rtx_MULT (DFmode, v0, y3), u0)));
18765}
18766
ded9bf77
AH
18767/* Return an RTX representing where to find the function value of a
18768 function returning MODE. */
18769static rtx
18770rs6000_complex_function_value (enum machine_mode mode)
18771{
18772 unsigned int regno;
18773 rtx r1, r2;
18774 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 18775 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 18776
18f63bfa
AH
18777 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
18778 regno = FP_ARG_RETURN;
354ed18f
AH
18779 else
18780 {
18f63bfa 18781 regno = GP_ARG_RETURN;
ded9bf77 18782
18f63bfa
AH
18783 /* 32-bit is OK since it'll go in r3/r4. */
18784 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
18785 return gen_rtx_REG (mode, regno);
18786 }
18787
18f63bfa
AH
18788 if (inner_bytes >= 8)
18789 return gen_rtx_REG (mode, regno);
18790
ded9bf77
AH
18791 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
18792 const0_rtx);
18793 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 18794 GEN_INT (inner_bytes));
ded9bf77
AH
18795 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
18796}
18797
a6ebc39a
AH
18798/* Define how to find the value returned by a function.
18799 VALTYPE is the data type of the value (as a tree).
18800 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18801 otherwise, FUNC is 0.
18802
18803 On the SPE, both FPs and vectors are returned in r3.
18804
18805 On RS/6000 an integer value is in r3 and a floating-point value is in
18806 fp1, unless -msoft-float. */
18807
18808rtx
18809rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
18810{
18811 enum machine_mode mode;
2a8fa26c 18812 unsigned int regno;
a6ebc39a 18813
594a51fe
SS
18814 /* Special handling for structs in darwin64. */
18815 if (rs6000_darwin64_abi
18816 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
18817 && TREE_CODE (valtype) == RECORD_TYPE
18818 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
18819 {
18820 CUMULATIVE_ARGS valcum;
18821 rtx valret;
18822
0b5383eb 18823 valcum.words = 0;
594a51fe
SS
18824 valcum.fregno = FP_ARG_MIN_REG;
18825 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
18826 /* Do a trial code generation as if this were going to be passed as
18827 an argument; if any part goes in memory, we return NULL. */
18828 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
18829 if (valret)
18830 return valret;
18831 /* Otherwise fall through to standard ABI rules. */
18832 }
18833
0e67400a
FJ
18834 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
18835 {
18836 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18837 return gen_rtx_PARALLEL (DImode,
18838 gen_rtvec (2,
18839 gen_rtx_EXPR_LIST (VOIDmode,
18840 gen_rtx_REG (SImode, GP_ARG_RETURN),
18841 const0_rtx),
18842 gen_rtx_EXPR_LIST (VOIDmode,
18843 gen_rtx_REG (SImode,
18844 GP_ARG_RETURN + 1),
18845 GEN_INT (4))));
18846 }
0f086e42
FJ
18847 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
18848 {
18849 return gen_rtx_PARALLEL (DCmode,
18850 gen_rtvec (4,
18851 gen_rtx_EXPR_LIST (VOIDmode,
18852 gen_rtx_REG (SImode, GP_ARG_RETURN),
18853 const0_rtx),
18854 gen_rtx_EXPR_LIST (VOIDmode,
18855 gen_rtx_REG (SImode,
18856 GP_ARG_RETURN + 1),
18857 GEN_INT (4)),
18858 gen_rtx_EXPR_LIST (VOIDmode,
18859 gen_rtx_REG (SImode,
18860 GP_ARG_RETURN + 2),
18861 GEN_INT (8)),
18862 gen_rtx_EXPR_LIST (VOIDmode,
18863 gen_rtx_REG (SImode,
18864 GP_ARG_RETURN + 3),
18865 GEN_INT (12))));
18866 }
a6ebc39a
AH
18867 if ((INTEGRAL_TYPE_P (valtype)
18868 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
18869 || POINTER_TYPE_P (valtype))
b78d48dd 18870 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a
AH
18871 else
18872 mode = TYPE_MODE (valtype);
18873
4ed78545 18874 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 18875 regno = FP_ARG_RETURN;
ded9bf77 18876 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 18877 && targetm.calls.split_complex_arg)
ded9bf77 18878 return rs6000_complex_function_value (mode);
44688022 18879 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 18880 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 18881 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 18882 regno = ALTIVEC_ARG_RETURN;
18f63bfa
AH
18883 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18884 && (mode == DFmode || mode == DCmode))
18885 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
18886 else
18887 regno = GP_ARG_RETURN;
18888
18889 return gen_rtx_REG (mode, regno);
18890}
18891
ded9bf77
AH
18892/* Define how to find the value returned by a library function
18893 assuming the value has mode MODE. */
18894rtx
18895rs6000_libcall_value (enum machine_mode mode)
18896{
18897 unsigned int regno;
18898
2e6c9641
FJ
18899 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
18900 {
18901 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18902 return gen_rtx_PARALLEL (DImode,
18903 gen_rtvec (2,
18904 gen_rtx_EXPR_LIST (VOIDmode,
18905 gen_rtx_REG (SImode, GP_ARG_RETURN),
18906 const0_rtx),
18907 gen_rtx_EXPR_LIST (VOIDmode,
18908 gen_rtx_REG (SImode,
18909 GP_ARG_RETURN + 1),
18910 GEN_INT (4))));
18911 }
18912
ebb109ad 18913 if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
18914 && TARGET_HARD_FLOAT && TARGET_FPRS)
18915 regno = FP_ARG_RETURN;
44688022
AM
18916 else if (ALTIVEC_VECTOR_MODE (mode)
18917 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 18918 regno = ALTIVEC_ARG_RETURN;
42ba5130 18919 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 18920 return rs6000_complex_function_value (mode);
18f63bfa
AH
18921 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
18922 && (mode == DFmode || mode == DCmode))
18923 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
18924 else
18925 regno = GP_ARG_RETURN;
18926
18927 return gen_rtx_REG (mode, regno);
18928}
18929
d1d0c603
JJ
18930/* Define the offset between two registers, FROM to be eliminated and its
18931 replacement TO, at the start of a routine. */
18932HOST_WIDE_INT
18933rs6000_initial_elimination_offset (int from, int to)
18934{
18935 rs6000_stack_t *info = rs6000_stack_info ();
18936 HOST_WIDE_INT offset;
18937
7d5175e1 18938 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 18939 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
18940 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18941 {
18942 offset = info->push_p ? 0 : -info->total_size;
18943 if (FRAME_GROWS_DOWNWARD)
5b667039 18944 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
18945 }
18946 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
18947 offset = FRAME_GROWS_DOWNWARD
5b667039 18948 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
18949 : 0;
18950 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
18951 offset = info->total_size;
18952 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
18953 offset = info->push_p ? info->total_size : 0;
18954 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
18955 offset = 0;
18956 else
37409796 18957 gcc_unreachable ();
d1d0c603
JJ
18958
18959 return offset;
18960}
18961
58646b77 18962/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 18963
c8e4f0e9 18964static bool
58646b77 18965rs6000_is_opaque_type (tree type)
62e1dfcf 18966{
58646b77 18967 return (type == opaque_V2SI_type_node
2abe3e28 18968 || type == opaque_V2SF_type_node
58646b77
PB
18969 || type == opaque_p_V2SI_type_node
18970 || type == opaque_V4SI_type_node);
62e1dfcf
NC
18971}
18972
96714395 18973static rtx
a2369ed3 18974rs6000_dwarf_register_span (rtx reg)
96714395
AH
18975{
18976 unsigned regno;
18977
4d4cbc0e
AH
18978 if (TARGET_SPE
18979 && (SPE_VECTOR_MODE (GET_MODE (reg))
18980 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
18981 ;
18982 else
96714395
AH
18983 return NULL_RTX;
18984
18985 regno = REGNO (reg);
18986
18987 /* The duality of the SPE register size wreaks all kinds of havoc.
18988 This is a way of distinguishing r0 in 32-bits from r0 in
18989 64-bits. */
18990 return
18991 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
18992 BYTES_BIG_ENDIAN
18993 ? gen_rtvec (2,
18994 gen_rtx_REG (SImode, regno + 1200),
18995 gen_rtx_REG (SImode, regno))
18996 : gen_rtvec (2,
18997 gen_rtx_REG (SImode, regno),
18998 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
18999}
19000
93c9d1ba
AM
19001/* Map internal gcc register numbers to DWARF2 register numbers. */
19002
19003unsigned int
19004rs6000_dbx_register_number (unsigned int regno)
19005{
19006 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
19007 return regno;
19008 if (regno == MQ_REGNO)
19009 return 100;
19010 if (regno == LINK_REGISTER_REGNUM)
19011 return 108;
19012 if (regno == COUNT_REGISTER_REGNUM)
19013 return 109;
19014 if (CR_REGNO_P (regno))
19015 return regno - CR0_REGNO + 86;
19016 if (regno == XER_REGNO)
19017 return 101;
19018 if (ALTIVEC_REGNO_P (regno))
19019 return regno - FIRST_ALTIVEC_REGNO + 1124;
19020 if (regno == VRSAVE_REGNO)
19021 return 356;
19022 if (regno == VSCR_REGNO)
19023 return 67;
19024 if (regno == SPE_ACC_REGNO)
19025 return 99;
19026 if (regno == SPEFSCR_REGNO)
19027 return 612;
19028 /* SPE high reg number. We get these values of regno from
19029 rs6000_dwarf_register_span. */
37409796
NS
19030 gcc_assert (regno >= 1200 && regno < 1232);
19031 return regno;
93c9d1ba
AM
19032}
19033
93f90be6 19034/* target hook eh_return_filter_mode */
f676971a 19035static enum machine_mode
93f90be6
FJ
19036rs6000_eh_return_filter_mode (void)
19037{
19038 return TARGET_32BIT ? SImode : word_mode;
19039}
19040
f676971a
EC
19041/* Target hook for vector_mode_supported_p. */
19042static bool
19043rs6000_vector_mode_supported_p (enum machine_mode mode)
19044{
19045
19046 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
19047 return true;
19048
19049 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
19050 return true;
19051
19052 else
19053 return false;
19054}
19055
bb8df8a6
EC
19056/* Target hook for invalid_arg_for_unprototyped_fn. */
19057static const char *
4d3e6fae
FJ
19058invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
19059{
19060 return (!rs6000_darwin64_abi
19061 && typelist == 0
19062 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
19063 && (funcdecl == NULL_TREE
19064 || (TREE_CODE (funcdecl) == FUNCTION_DECL
19065 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
19066 ? N_("AltiVec argument passed to unprototyped function")
19067 : NULL;
19068}
19069
3aebbe5f
JJ
19070/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
19071 setup by using __stack_chk_fail_local hidden function instead of
19072 calling __stack_chk_fail directly. Otherwise it is better to call
19073 __stack_chk_fail directly. */
19074
19075static tree
19076rs6000_stack_protect_fail (void)
19077{
19078 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
19079 ? default_hidden_stack_protect_fail ()
19080 : default_external_stack_protect_fail ();
19081}
19082
17211ab5 19083#include "gt-rs6000.h"