]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
tree-vrp.c (execute_vrp): Do not check whether current_loops == NULL.
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 2, or (at your
12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf
NC
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the
39d14dda
KC
21 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
22 MA 02110-1301, USA. */
9878760c 23
956d6950 24#include "config.h"
c4d38ccb 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
9878760c
RK
28#include "rtl.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
9878760c
RK
34#include "insn-attr.h"
35#include "flags.h"
36#include "recog.h"
9878760c 37#include "obstack.h"
9b30bae2 38#include "tree.h"
dfafc897 39#include "expr.h"
2fc1c679 40#include "optabs.h"
2a430ec1 41#include "except.h"
a7df97e6 42#include "function.h"
296b8152 43#include "output.h"
d5fa86ba 44#include "basic-block.h"
d0101753 45#include "integrate.h"
296b8152 46#include "toplev.h"
c8023011 47#include "ggc.h"
9ebbca7d
GK
48#include "hashtab.h"
49#include "tm_p.h"
672a6f42
NB
50#include "target.h"
51#include "target-def.h"
3ac88239 52#include "langhooks.h"
24ea750e 53#include "reload.h"
117dca74 54#include "cfglayout.h"
79ae11c4 55#include "sched-int.h"
cd3ce9b4 56#include "tree-gimple.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
129} machine_function;
130
5248c961
RK
131/* Target cpu type */
132
133enum processor_type rs6000_cpu;
8e3f41e7
MM
134struct rs6000_cpu_select rs6000_select[3] =
135{
815cdc52
MM
136 /* switch name, tune arch */
137 { (const char *)0, "--with-cpu=", 1, 1 },
138 { (const char *)0, "-mcpu=", 1, 1 },
139 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 140};
5248c961 141
d296e02e
AP
142static GTY(()) bool rs6000_cell_dont_microcode;
143
ec507f2d
DE
144/* Always emit branch hint bits. */
145static GTY(()) bool rs6000_always_hint;
146
147/* Schedule instructions for group formation. */
148static GTY(()) bool rs6000_sched_groups;
149
44cd321e
PS
150/* Align branch targets. */
151static GTY(()) bool rs6000_align_branch_targets;
152
569fa502
DN
153/* Support for -msched-costly-dep option. */
154const char *rs6000_sched_costly_dep_str;
155enum rs6000_dependence_cost rs6000_sched_costly_dep;
156
cbe26ab8
DN
157/* Support for -minsert-sched-nops option. */
158const char *rs6000_sched_insert_nops_str;
159enum rs6000_nop_insertion rs6000_sched_insert_nops;
160
7ccf35ed 161/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 162static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 163
602ea4d3 164/* Size of long double. */
6fa3f289
ZW
165int rs6000_long_double_type_size;
166
602ea4d3
JJ
167/* IEEE quad extended precision long double. */
168int rs6000_ieeequad;
169
170/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
171int rs6000_altivec_abi;
172
a3170dc6
AH
173/* Nonzero if we want SPE ABI extensions. */
174int rs6000_spe_abi;
175
5da702b1
AH
176/* Nonzero if floating point operations are done in the GPRs. */
177int rs6000_float_gprs = 0;
178
594a51fe
SS
179/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
180int rs6000_darwin64_abi;
181
a0ab749a 182/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 183static GTY(()) int common_mode_defined;
c81bebd7 184
9878760c
RK
185/* Save information from a "cmpxx" operation until the branch or scc is
186 emitted. */
9878760c
RK
187rtx rs6000_compare_op0, rs6000_compare_op1;
188int rs6000_compare_fp_p;
874a0744 189
874a0744
MM
190/* Label number of label created for -mrelocatable, to call to so we can
191 get the address of the GOT section */
192int rs6000_pic_labelno;
c81bebd7 193
b91da81f 194#ifdef USING_ELFOS_H
c81bebd7 195/* Which abi to adhere to */
9739c90c 196const char *rs6000_abi_name;
d9407988
MM
197
198/* Semantics of the small data area */
199enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
200
201/* Which small data model to use */
815cdc52 202const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
203
204/* Counter for labels which are to be placed in .fixup. */
205int fixuplabelno = 0;
874a0744 206#endif
4697a36c 207
c4501e62
JJ
208/* Bit size of immediate TLS offsets and string from which it is decoded. */
209int rs6000_tls_size = 32;
210const char *rs6000_tls_size_string;
211
b6c9286a
MM
212/* ABI enumeration available for subtarget to use. */
213enum rs6000_abi rs6000_current_abi;
214
85b776df
AM
215/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
216int dot_symbols;
217
38c1f2d7 218/* Debug flags */
815cdc52 219const char *rs6000_debug_name;
38c1f2d7
MM
220int rs6000_debug_stack; /* debug stack applications */
221int rs6000_debug_arg; /* debug argument handling */
222
aabcd309 223/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
224bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
225
58646b77
PB
226/* Built in types. */
227
228tree rs6000_builtin_types[RS6000_BTI_MAX];
229tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 230
57ac7be9
AM
231const char *rs6000_traceback_name;
232static enum {
233 traceback_default = 0,
234 traceback_none,
235 traceback_part,
236 traceback_full
237} rs6000_traceback;
238
38c1f2d7
MM
239/* Flag to say the TOC is initialized */
240int toc_initialized;
9ebbca7d 241char toc_label_name[10];
38c1f2d7 242
44cd321e
PS
243/* Cached value of rs6000_variable_issue. This is cached in
244 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
245static short cached_can_issue_more;
246
d6b5193b
RS
247static GTY(()) section *read_only_data_section;
248static GTY(()) section *private_data_section;
249static GTY(()) section *read_only_private_data_section;
250static GTY(()) section *sdata2_section;
251static GTY(()) section *toc_section;
252
a3c9585f
KH
253/* Control alignment for fields within structures. */
254/* String from -malign-XXXXX. */
025d9908
KH
255int rs6000_alignment_flags;
256
78f5898b
AH
257/* True for any options that were explicitly set. */
258struct {
df01da37 259 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 260 bool alignment; /* True if -malign- was used. */
d3603e8c 261 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
262 bool spe; /* True if -mspe= was used. */
263 bool float_gprs; /* True if -mfloat-gprs= was used. */
264 bool isel; /* True if -misel was used. */
265 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 266 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
267} rs6000_explicit_options;
268
a3170dc6
AH
269struct builtin_description
270{
271 /* mask is not const because we're going to alter it below. This
272 nonsense will go away when we rewrite the -march infrastructure
273 to give us more target flag bits. */
274 unsigned int mask;
275 const enum insn_code icode;
276 const char *const name;
277 const enum rs6000_builtins code;
278};
8b897cfa
RS
279\f
280/* Target cpu costs. */
281
282struct processor_costs {
c4ad648e 283 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
284 const int mulsi_const; /* cost of SImode multiplication by constant. */
285 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
286 const int muldi; /* cost of DImode multiplication. */
287 const int divsi; /* cost of SImode division. */
288 const int divdi; /* cost of DImode division. */
289 const int fp; /* cost of simple SFmode and DFmode insns. */
290 const int dmul; /* cost of DFmode multiplication (and fmadd). */
291 const int sdiv; /* cost of SFmode division (fdivs). */
292 const int ddiv; /* cost of DFmode division (fdiv). */
8b897cfa
RS
293};
294
295const struct processor_costs *rs6000_cost;
296
297/* Processor costs (relative to an add) */
298
299/* Instruction size costs on 32bit processors. */
300static const
301struct processor_costs size32_cost = {
06a67bdd
RS
302 COSTS_N_INSNS (1), /* mulsi */
303 COSTS_N_INSNS (1), /* mulsi_const */
304 COSTS_N_INSNS (1), /* mulsi_const9 */
305 COSTS_N_INSNS (1), /* muldi */
306 COSTS_N_INSNS (1), /* divsi */
307 COSTS_N_INSNS (1), /* divdi */
308 COSTS_N_INSNS (1), /* fp */
309 COSTS_N_INSNS (1), /* dmul */
310 COSTS_N_INSNS (1), /* sdiv */
311 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
312};
313
314/* Instruction size costs on 64bit processors. */
315static const
316struct processor_costs size64_cost = {
06a67bdd
RS
317 COSTS_N_INSNS (1), /* mulsi */
318 COSTS_N_INSNS (1), /* mulsi_const */
319 COSTS_N_INSNS (1), /* mulsi_const9 */
320 COSTS_N_INSNS (1), /* muldi */
321 COSTS_N_INSNS (1), /* divsi */
322 COSTS_N_INSNS (1), /* divdi */
323 COSTS_N_INSNS (1), /* fp */
324 COSTS_N_INSNS (1), /* dmul */
325 COSTS_N_INSNS (1), /* sdiv */
326 COSTS_N_INSNS (1), /* ddiv */
8b897cfa
RS
327};
328
329/* Instruction costs on RIOS1 processors. */
330static const
331struct processor_costs rios1_cost = {
06a67bdd
RS
332 COSTS_N_INSNS (5), /* mulsi */
333 COSTS_N_INSNS (4), /* mulsi_const */
334 COSTS_N_INSNS (3), /* mulsi_const9 */
335 COSTS_N_INSNS (5), /* muldi */
336 COSTS_N_INSNS (19), /* divsi */
337 COSTS_N_INSNS (19), /* divdi */
338 COSTS_N_INSNS (2), /* fp */
339 COSTS_N_INSNS (2), /* dmul */
340 COSTS_N_INSNS (19), /* sdiv */
341 COSTS_N_INSNS (19), /* ddiv */
8b897cfa
RS
342};
343
344/* Instruction costs on RIOS2 processors. */
345static const
346struct processor_costs rios2_cost = {
06a67bdd
RS
347 COSTS_N_INSNS (2), /* mulsi */
348 COSTS_N_INSNS (2), /* mulsi_const */
349 COSTS_N_INSNS (2), /* mulsi_const9 */
350 COSTS_N_INSNS (2), /* muldi */
351 COSTS_N_INSNS (13), /* divsi */
352 COSTS_N_INSNS (13), /* divdi */
353 COSTS_N_INSNS (2), /* fp */
354 COSTS_N_INSNS (2), /* dmul */
355 COSTS_N_INSNS (17), /* sdiv */
356 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
357};
358
359/* Instruction costs on RS64A processors. */
360static const
361struct processor_costs rs64a_cost = {
06a67bdd
RS
362 COSTS_N_INSNS (20), /* mulsi */
363 COSTS_N_INSNS (12), /* mulsi_const */
364 COSTS_N_INSNS (8), /* mulsi_const9 */
365 COSTS_N_INSNS (34), /* muldi */
366 COSTS_N_INSNS (65), /* divsi */
367 COSTS_N_INSNS (67), /* divdi */
368 COSTS_N_INSNS (4), /* fp */
369 COSTS_N_INSNS (4), /* dmul */
370 COSTS_N_INSNS (31), /* sdiv */
371 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
372};
373
374/* Instruction costs on MPCCORE processors. */
375static const
376struct processor_costs mpccore_cost = {
06a67bdd
RS
377 COSTS_N_INSNS (2), /* mulsi */
378 COSTS_N_INSNS (2), /* mulsi_const */
379 COSTS_N_INSNS (2), /* mulsi_const9 */
380 COSTS_N_INSNS (2), /* muldi */
381 COSTS_N_INSNS (6), /* divsi */
382 COSTS_N_INSNS (6), /* divdi */
383 COSTS_N_INSNS (4), /* fp */
384 COSTS_N_INSNS (5), /* dmul */
385 COSTS_N_INSNS (10), /* sdiv */
386 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
387};
388
389/* Instruction costs on PPC403 processors. */
390static const
391struct processor_costs ppc403_cost = {
06a67bdd
RS
392 COSTS_N_INSNS (4), /* mulsi */
393 COSTS_N_INSNS (4), /* mulsi_const */
394 COSTS_N_INSNS (4), /* mulsi_const9 */
395 COSTS_N_INSNS (4), /* muldi */
396 COSTS_N_INSNS (33), /* divsi */
397 COSTS_N_INSNS (33), /* divdi */
398 COSTS_N_INSNS (11), /* fp */
399 COSTS_N_INSNS (11), /* dmul */
400 COSTS_N_INSNS (11), /* sdiv */
401 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
402};
403
404/* Instruction costs on PPC405 processors. */
405static const
406struct processor_costs ppc405_cost = {
06a67bdd
RS
407 COSTS_N_INSNS (5), /* mulsi */
408 COSTS_N_INSNS (4), /* mulsi_const */
409 COSTS_N_INSNS (3), /* mulsi_const9 */
410 COSTS_N_INSNS (5), /* muldi */
411 COSTS_N_INSNS (35), /* divsi */
412 COSTS_N_INSNS (35), /* divdi */
413 COSTS_N_INSNS (11), /* fp */
414 COSTS_N_INSNS (11), /* dmul */
415 COSTS_N_INSNS (11), /* sdiv */
416 COSTS_N_INSNS (11), /* ddiv */
8b897cfa
RS
417};
418
419/* Instruction costs on PPC440 processors. */
420static const
421struct processor_costs ppc440_cost = {
06a67bdd
RS
422 COSTS_N_INSNS (3), /* mulsi */
423 COSTS_N_INSNS (2), /* mulsi_const */
424 COSTS_N_INSNS (2), /* mulsi_const9 */
425 COSTS_N_INSNS (3), /* muldi */
426 COSTS_N_INSNS (34), /* divsi */
427 COSTS_N_INSNS (34), /* divdi */
428 COSTS_N_INSNS (5), /* fp */
429 COSTS_N_INSNS (5), /* dmul */
430 COSTS_N_INSNS (19), /* sdiv */
431 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
432};
433
434/* Instruction costs on PPC601 processors. */
435static const
436struct processor_costs ppc601_cost = {
06a67bdd
RS
437 COSTS_N_INSNS (5), /* mulsi */
438 COSTS_N_INSNS (5), /* mulsi_const */
439 COSTS_N_INSNS (5), /* mulsi_const9 */
440 COSTS_N_INSNS (5), /* muldi */
441 COSTS_N_INSNS (36), /* divsi */
442 COSTS_N_INSNS (36), /* divdi */
443 COSTS_N_INSNS (4), /* fp */
444 COSTS_N_INSNS (5), /* dmul */
445 COSTS_N_INSNS (17), /* sdiv */
446 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
447};
448
449/* Instruction costs on PPC603 processors. */
450static const
451struct processor_costs ppc603_cost = {
06a67bdd
RS
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (3), /* mulsi_const */
454 COSTS_N_INSNS (2), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (37), /* divsi */
457 COSTS_N_INSNS (37), /* divdi */
458 COSTS_N_INSNS (3), /* fp */
459 COSTS_N_INSNS (4), /* dmul */
460 COSTS_N_INSNS (18), /* sdiv */
461 COSTS_N_INSNS (33), /* ddiv */
8b897cfa
RS
462};
463
464/* Instruction costs on PPC604 processors. */
465static const
466struct processor_costs ppc604_cost = {
06a67bdd
RS
467 COSTS_N_INSNS (4), /* mulsi */
468 COSTS_N_INSNS (4), /* mulsi_const */
469 COSTS_N_INSNS (4), /* mulsi_const9 */
470 COSTS_N_INSNS (4), /* muldi */
471 COSTS_N_INSNS (20), /* divsi */
472 COSTS_N_INSNS (20), /* divdi */
473 COSTS_N_INSNS (3), /* fp */
474 COSTS_N_INSNS (3), /* dmul */
475 COSTS_N_INSNS (18), /* sdiv */
476 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
477};
478
479/* Instruction costs on PPC604e processors. */
480static const
481struct processor_costs ppc604e_cost = {
06a67bdd
RS
482 COSTS_N_INSNS (2), /* mulsi */
483 COSTS_N_INSNS (2), /* mulsi_const */
484 COSTS_N_INSNS (2), /* mulsi_const9 */
485 COSTS_N_INSNS (2), /* muldi */
486 COSTS_N_INSNS (20), /* divsi */
487 COSTS_N_INSNS (20), /* divdi */
488 COSTS_N_INSNS (3), /* fp */
489 COSTS_N_INSNS (3), /* dmul */
490 COSTS_N_INSNS (18), /* sdiv */
491 COSTS_N_INSNS (32), /* ddiv */
8b897cfa
RS
492};
493
f0517163 494/* Instruction costs on PPC620 processors. */
8b897cfa
RS
495static const
496struct processor_costs ppc620_cost = {
06a67bdd
RS
497 COSTS_N_INSNS (5), /* mulsi */
498 COSTS_N_INSNS (4), /* mulsi_const */
499 COSTS_N_INSNS (3), /* mulsi_const9 */
500 COSTS_N_INSNS (7), /* muldi */
501 COSTS_N_INSNS (21), /* divsi */
502 COSTS_N_INSNS (37), /* divdi */
503 COSTS_N_INSNS (3), /* fp */
504 COSTS_N_INSNS (3), /* dmul */
505 COSTS_N_INSNS (18), /* sdiv */
506 COSTS_N_INSNS (32), /* ddiv */
f0517163
RS
507};
508
509/* Instruction costs on PPC630 processors. */
510static const
511struct processor_costs ppc630_cost = {
06a67bdd
RS
512 COSTS_N_INSNS (5), /* mulsi */
513 COSTS_N_INSNS (4), /* mulsi_const */
514 COSTS_N_INSNS (3), /* mulsi_const9 */
515 COSTS_N_INSNS (7), /* muldi */
516 COSTS_N_INSNS (21), /* divsi */
517 COSTS_N_INSNS (37), /* divdi */
518 COSTS_N_INSNS (3), /* fp */
519 COSTS_N_INSNS (3), /* dmul */
520 COSTS_N_INSNS (17), /* sdiv */
521 COSTS_N_INSNS (21), /* ddiv */
8b897cfa
RS
522};
523
d296e02e
AP
524/* Instruction costs on Cell processor. */
525/* COSTS_N_INSNS (1) ~ one add. */
526static const
527struct processor_costs ppccell_cost = {
528 COSTS_N_INSNS (9/2)+2, /* mulsi */
529 COSTS_N_INSNS (6/2), /* mulsi_const */
530 COSTS_N_INSNS (6/2), /* mulsi_const9 */
531 COSTS_N_INSNS (15/2)+2, /* muldi */
532 COSTS_N_INSNS (38/2), /* divsi */
533 COSTS_N_INSNS (70/2), /* divdi */
534 COSTS_N_INSNS (10/2), /* fp */
535 COSTS_N_INSNS (10/2), /* dmul */
536 COSTS_N_INSNS (74/2), /* sdiv */
537 COSTS_N_INSNS (74/2), /* ddiv */
538};
539
8b897cfa
RS
540/* Instruction costs on PPC750 and PPC7400 processors. */
541static const
542struct processor_costs ppc750_cost = {
06a67bdd
RS
543 COSTS_N_INSNS (5), /* mulsi */
544 COSTS_N_INSNS (3), /* mulsi_const */
545 COSTS_N_INSNS (2), /* mulsi_const9 */
546 COSTS_N_INSNS (5), /* muldi */
547 COSTS_N_INSNS (17), /* divsi */
548 COSTS_N_INSNS (17), /* divdi */
549 COSTS_N_INSNS (3), /* fp */
550 COSTS_N_INSNS (3), /* dmul */
551 COSTS_N_INSNS (17), /* sdiv */
552 COSTS_N_INSNS (31), /* ddiv */
8b897cfa
RS
553};
554
555/* Instruction costs on PPC7450 processors. */
556static const
557struct processor_costs ppc7450_cost = {
06a67bdd
RS
558 COSTS_N_INSNS (4), /* mulsi */
559 COSTS_N_INSNS (3), /* mulsi_const */
560 COSTS_N_INSNS (3), /* mulsi_const9 */
561 COSTS_N_INSNS (4), /* muldi */
562 COSTS_N_INSNS (23), /* divsi */
563 COSTS_N_INSNS (23), /* divdi */
564 COSTS_N_INSNS (5), /* fp */
565 COSTS_N_INSNS (5), /* dmul */
566 COSTS_N_INSNS (21), /* sdiv */
567 COSTS_N_INSNS (35), /* ddiv */
8b897cfa 568};
a3170dc6 569
8b897cfa
RS
570/* Instruction costs on PPC8540 processors. */
571static const
572struct processor_costs ppc8540_cost = {
06a67bdd
RS
573 COSTS_N_INSNS (4), /* mulsi */
574 COSTS_N_INSNS (4), /* mulsi_const */
575 COSTS_N_INSNS (4), /* mulsi_const9 */
576 COSTS_N_INSNS (4), /* muldi */
577 COSTS_N_INSNS (19), /* divsi */
578 COSTS_N_INSNS (19), /* divdi */
579 COSTS_N_INSNS (4), /* fp */
580 COSTS_N_INSNS (4), /* dmul */
581 COSTS_N_INSNS (29), /* sdiv */
582 COSTS_N_INSNS (29), /* ddiv */
8b897cfa
RS
583};
584
585/* Instruction costs on POWER4 and POWER5 processors. */
586static const
587struct processor_costs power4_cost = {
06a67bdd
RS
588 COSTS_N_INSNS (3), /* mulsi */
589 COSTS_N_INSNS (2), /* mulsi_const */
590 COSTS_N_INSNS (2), /* mulsi_const9 */
591 COSTS_N_INSNS (4), /* muldi */
592 COSTS_N_INSNS (18), /* divsi */
593 COSTS_N_INSNS (34), /* divdi */
594 COSTS_N_INSNS (3), /* fp */
595 COSTS_N_INSNS (3), /* dmul */
596 COSTS_N_INSNS (17), /* sdiv */
597 COSTS_N_INSNS (17), /* ddiv */
8b897cfa
RS
598};
599
44cd321e
PS
600/* Instruction costs on POWER6 processors. */
601static const
602struct processor_costs power6_cost = {
603 COSTS_N_INSNS (8), /* mulsi */
604 COSTS_N_INSNS (8), /* mulsi_const */
605 COSTS_N_INSNS (8), /* mulsi_const9 */
606 COSTS_N_INSNS (8), /* muldi */
607 COSTS_N_INSNS (22), /* divsi */
608 COSTS_N_INSNS (28), /* divdi */
609 COSTS_N_INSNS (3), /* fp */
610 COSTS_N_INSNS (3), /* dmul */
611 COSTS_N_INSNS (13), /* sdiv */
612 COSTS_N_INSNS (16), /* ddiv */
613};
614
8b897cfa 615\f
a2369ed3 616static bool rs6000_function_ok_for_sibcall (tree, tree);
2ffa9a0c 617static const char *rs6000_invalid_within_doloop (rtx);
a2369ed3
DJ
618static rtx rs6000_generate_compare (enum rtx_code);
619static void rs6000_maybe_dead (rtx);
620static void rs6000_emit_stack_tie (void);
621static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
622static rtx spe_synthesize_frame_save (rtx);
623static bool spe_func_has_64bit_regs_p (void);
b20a9cca 624static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 625 int, HOST_WIDE_INT);
a2369ed3
DJ
626static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
627static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
628static unsigned rs6000_hash_constant (rtx);
629static unsigned toc_hash_function (const void *);
630static int toc_hash_eq (const void *, const void *);
631static int constant_pool_expr_1 (rtx, int *, int *);
632static bool constant_pool_expr_p (rtx);
d04b6e6e 633static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3 634static bool legitimate_indexed_address_p (rtx, int);
a2369ed3
DJ
635static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
636static struct machine_function * rs6000_init_machine_status (void);
637static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 638static bool no_global_regs_above (int);
5add3202 639#ifdef HAVE_GAS_HIDDEN
a2369ed3 640static void rs6000_assemble_visibility (tree, int);
5add3202 641#endif
a2369ed3
DJ
642static int rs6000_ra_ever_killed (void);
643static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 644static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
77ccdfed
EC
645static bool rs6000_ms_bitfield_layout_p (tree);
646static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 647static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
f18eca82 648static const char *rs6000_mangle_fundamental_type (tree);
b86fe7b4 649extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3
DJ
650static void rs6000_set_default_type_attributes (tree);
651static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
652static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
653static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
654 tree);
a2369ed3 655static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
c6e8c921 656static bool rs6000_return_in_memory (tree, tree);
a2369ed3 657static void rs6000_file_start (void);
7c262518 658#if TARGET_ELF
9b580a0b 659static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
660static void rs6000_elf_asm_out_constructor (rtx, int);
661static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 662static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 663static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
664static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
665 unsigned HOST_WIDE_INT);
a56d7372 666static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 667 ATTRIBUTE_UNUSED;
7c262518 668#endif
aacd3885 669static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
cbaaba19 670#if TARGET_XCOFF
0d5817b2 671static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 672static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 673static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 674static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 675static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 676static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 677 unsigned HOST_WIDE_INT);
d6b5193b
RS
678static void rs6000_xcoff_unique_section (tree, int);
679static section *rs6000_xcoff_select_rtx_section
680 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
681static const char * rs6000_xcoff_strip_name_encoding (const char *);
682static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
683static void rs6000_xcoff_file_start (void);
684static void rs6000_xcoff_file_end (void);
f1384257 685#endif
a2369ed3
DJ
686static int rs6000_variable_issue (FILE *, int, rtx, int);
687static bool rs6000_rtx_costs (rtx, int, int, int *);
688static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 689static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 690static bool is_microcoded_insn (rtx);
d296e02e 691static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
692static bool is_cracked_insn (rtx);
693static bool is_branch_slot_insn (rtx);
44cd321e 694static bool is_load_insn (rtx);
e3a0e200 695static rtx get_store_dest (rtx pat);
44cd321e
PS
696static bool is_store_insn (rtx);
697static bool set_to_load_agen (rtx,rtx);
982afe02 698static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
699static int rs6000_adjust_priority (rtx, int);
700static int rs6000_issue_rate (void);
b198261f 701static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
702static rtx get_next_active_insn (rtx, rtx);
703static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
704static bool insn_must_be_first_in_group (rtx);
705static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
706static bool is_costly_group (rtx *, rtx);
707static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
708static int redefine_groups (FILE *, int, rtx, rtx);
709static int pad_groups (FILE *, int, rtx, rtx);
710static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
711static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
712static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 713static int rs6000_use_sched_lookahead (void);
d296e02e 714static int rs6000_use_sched_lookahead_guard (rtx);
7ccf35ed 715static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
716static tree rs6000_builtin_mul_widen_even (tree);
717static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 718static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 719
58646b77 720static void def_builtin (int, const char *, tree, int);
a2369ed3
DJ
721static void rs6000_init_builtins (void);
722static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
723static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
724static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
725static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
726static void altivec_init_builtins (void);
727static void rs6000_common_init_builtins (void);
c15c90bb 728static void rs6000_init_libfuncs (void);
a2369ed3 729
b20a9cca
AM
730static void enable_mask_for_builtins (struct builtin_description *, int,
731 enum rs6000_builtins,
732 enum rs6000_builtins);
7c62e993 733static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
734static void spe_init_builtins (void);
735static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 736static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
737static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
738static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
739static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
740static rs6000_stack_t *rs6000_stack_info (void);
741static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
742
743static rtx altivec_expand_builtin (tree, rtx, bool *);
744static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
745static rtx altivec_expand_st_builtin (tree, rtx, bool *);
746static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
747static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 748static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 749 const char *, tree, rtx);
b4a62fa0 750static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 751static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
752static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
753static rtx altivec_expand_vec_set_builtin (tree);
754static rtx altivec_expand_vec_ext_builtin (tree, rtx);
755static int get_element_number (tree, tree);
78f5898b 756static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 757static void rs6000_parse_tls_size_option (void);
5da702b1 758static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
759static int first_altivec_reg_to_save (void);
760static unsigned int compute_vrsave_mask (void);
9390387d 761static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
762static void is_altivec_return_reg (rtx, void *);
763static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
764int easy_vector_constant (rtx, enum machine_mode);
58646b77 765static bool rs6000_is_opaque_type (tree);
a2369ed3 766static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 767static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 768static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 769static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
770static rtx rs6000_tls_get_addr (void);
771static rtx rs6000_got_sym (void);
9390387d 772static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
773static const char *rs6000_get_some_local_dynamic_name (void);
774static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 775static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 776static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 777 enum machine_mode, tree);
0b5383eb
DJ
778static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
779 HOST_WIDE_INT);
780static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
781 tree, HOST_WIDE_INT);
782static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
783 HOST_WIDE_INT,
784 rtx[], int *);
785static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
786 tree, HOST_WIDE_INT,
787 rtx[], int *);
788static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
ec6376ab 789static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 790static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
791static void setup_incoming_varargs (CUMULATIVE_ARGS *,
792 enum machine_mode, tree,
793 int *, int);
8cd5a4e0
RH
794static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
795 tree, bool);
78a52f11
RH
796static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
797 tree, bool);
4d3e6fae 798static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
efdba735
SH
799#if TARGET_MACHO
800static void macho_branch_islands (void);
efdba735
SH
801static int no_previous_def (tree function_name);
802static tree get_prev_label (tree function_name);
c4e18b1c 803static void rs6000_darwin_file_start (void);
efdba735
SH
804#endif
805
c35d187f 806static tree rs6000_build_builtin_va_list (void);
23a60a04 807static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
fe984136 808static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
00b79d54 809static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 810static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 811static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 812 enum machine_mode);
94ff898d 813static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
814 enum machine_mode);
815static int get_vsel_insn (enum machine_mode);
816static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 817static tree rs6000_stack_protect_fail (void);
21213b4c
DP
818
819const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
820static enum machine_mode rs6000_eh_return_filter_mode (void);
821
17211ab5
GK
822/* Hash table stuff for keeping track of TOC entries. */
823
824struct toc_hash_struct GTY(())
825{
826 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
827 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
828 rtx key;
829 enum machine_mode key_mode;
830 int labelno;
831};
832
833static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
834\f
835/* Default register names. */
836char rs6000_reg_names[][8] =
837{
802a0058
MM
838 "0", "1", "2", "3", "4", "5", "6", "7",
839 "8", "9", "10", "11", "12", "13", "14", "15",
840 "16", "17", "18", "19", "20", "21", "22", "23",
841 "24", "25", "26", "27", "28", "29", "30", "31",
842 "0", "1", "2", "3", "4", "5", "6", "7",
843 "8", "9", "10", "11", "12", "13", "14", "15",
844 "16", "17", "18", "19", "20", "21", "22", "23",
845 "24", "25", "26", "27", "28", "29", "30", "31",
846 "mq", "lr", "ctr","ap",
847 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
848 "xer",
849 /* AltiVec registers. */
0cd5e3a1
AH
850 "0", "1", "2", "3", "4", "5", "6", "7",
851 "8", "9", "10", "11", "12", "13", "14", "15",
852 "16", "17", "18", "19", "20", "21", "22", "23",
853 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
854 "vrsave", "vscr",
855 /* SPE registers. */
7d5175e1
JJ
856 "spe_acc", "spefscr",
857 /* Soft frame pointer. */
858 "sfp"
c81bebd7
MM
859};
860
861#ifdef TARGET_REGNAMES
8b60264b 862static const char alt_reg_names[][8] =
c81bebd7 863{
802a0058
MM
864 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
865 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
866 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
867 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
868 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
869 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
870 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
871 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
872 "mq", "lr", "ctr", "ap",
873 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 874 "xer",
59a4c851 875 /* AltiVec registers. */
0ac081f6 876 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
877 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
878 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
879 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
880 "vrsave", "vscr",
881 /* SPE registers. */
7d5175e1
JJ
882 "spe_acc", "spefscr",
883 /* Soft frame pointer. */
884 "sfp"
c81bebd7
MM
885};
886#endif
9878760c 887\f
daf11973
MM
888#ifndef MASK_STRICT_ALIGN
889#define MASK_STRICT_ALIGN 0
890#endif
ffcfcb5f
AM
891#ifndef TARGET_PROFILE_KERNEL
892#define TARGET_PROFILE_KERNEL 0
893#endif
3961e8fe
RH
894
895/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
896#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
897\f
898/* Initialize the GCC target structure. */
91d231cb
JM
899#undef TARGET_ATTRIBUTE_TABLE
900#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
901#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
902#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 903
301d03af
RS
904#undef TARGET_ASM_ALIGNED_DI_OP
905#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
906
907/* Default unaligned ops are only provided for ELF. Find the ops needed
908 for non-ELF systems. */
909#ifndef OBJECT_FORMAT_ELF
cbaaba19 910#if TARGET_XCOFF
ae6c1efd 911/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
912 64-bit targets. */
913#undef TARGET_ASM_UNALIGNED_HI_OP
914#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
915#undef TARGET_ASM_UNALIGNED_SI_OP
916#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
917#undef TARGET_ASM_UNALIGNED_DI_OP
918#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
919#else
920/* For Darwin. */
921#undef TARGET_ASM_UNALIGNED_HI_OP
922#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
923#undef TARGET_ASM_UNALIGNED_SI_OP
924#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
925#undef TARGET_ASM_UNALIGNED_DI_OP
926#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
927#undef TARGET_ASM_ALIGNED_DI_OP
928#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
929#endif
930#endif
931
932/* This hook deals with fixups for relocatable code and DI-mode objects
933 in 64-bit code. */
934#undef TARGET_ASM_INTEGER
935#define TARGET_ASM_INTEGER rs6000_assemble_integer
936
93638d7a
AM
937#ifdef HAVE_GAS_HIDDEN
938#undef TARGET_ASM_ASSEMBLE_VISIBILITY
939#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
940#endif
941
c4501e62
JJ
942#undef TARGET_HAVE_TLS
943#define TARGET_HAVE_TLS HAVE_AS_TLS
944
945#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 946#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 947
08c148a8
NB
948#undef TARGET_ASM_FUNCTION_PROLOGUE
949#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
950#undef TARGET_ASM_FUNCTION_EPILOGUE
951#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
952
b54cf83a
DE
953#undef TARGET_SCHED_VARIABLE_ISSUE
954#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
955
c237e94a
ZW
956#undef TARGET_SCHED_ISSUE_RATE
957#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
958#undef TARGET_SCHED_ADJUST_COST
959#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
960#undef TARGET_SCHED_ADJUST_PRIORITY
961#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 962#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 963#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
964#undef TARGET_SCHED_INIT
965#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
966#undef TARGET_SCHED_FINISH
967#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
968#undef TARGET_SCHED_REORDER
969#define TARGET_SCHED_REORDER rs6000_sched_reorder
970#undef TARGET_SCHED_REORDER2
971#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 972
be12c2b0
VM
973#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
974#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
975
d296e02e
AP
976#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
977#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
978
7ccf35ed
DN
979#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
980#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
981#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
982#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
983#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
984#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
985#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
986#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 987
0ac081f6
AH
988#undef TARGET_INIT_BUILTINS
989#define TARGET_INIT_BUILTINS rs6000_init_builtins
990
991#undef TARGET_EXPAND_BUILTIN
992#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
993
f18eca82
ZL
994#undef TARGET_MANGLE_FUNDAMENTAL_TYPE
995#define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
996
c15c90bb
ZW
997#undef TARGET_INIT_LIBFUNCS
998#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
999
f1384257 1000#if TARGET_MACHO
0e5dbd9b 1001#undef TARGET_BINDS_LOCAL_P
31920d83 1002#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1003#endif
0e5dbd9b 1004
77ccdfed
EC
1005#undef TARGET_MS_BITFIELD_LAYOUT_P
1006#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1007
3961e8fe
RH
1008#undef TARGET_ASM_OUTPUT_MI_THUNK
1009#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1010
3961e8fe 1011#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5b71a4e7 1012#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
00b960c7 1013
4977bab6
ZW
1014#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1015#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1016
2e3f0db6
DJ
1017#undef TARGET_INVALID_WITHIN_DOLOOP
1018#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1019
3c50106f
RH
1020#undef TARGET_RTX_COSTS
1021#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1022#undef TARGET_ADDRESS_COST
1023#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1024
c8e4f0e9 1025#undef TARGET_VECTOR_OPAQUE_P
58646b77 1026#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1027
96714395
AH
1028#undef TARGET_DWARF_REGISTER_SPAN
1029#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1030
37ea0b7e
JM
1031#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1032#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1033
c6e8c921
GK
1034/* On rs6000, function arguments are promoted, as are function return
1035 values. */
1036#undef TARGET_PROMOTE_FUNCTION_ARGS
1037#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
1038#undef TARGET_PROMOTE_FUNCTION_RETURN
1039#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
1040
c6e8c921
GK
1041#undef TARGET_RETURN_IN_MEMORY
1042#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1043
1044#undef TARGET_SETUP_INCOMING_VARARGS
1045#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1046
1047/* Always strict argument naming on rs6000. */
1048#undef TARGET_STRICT_ARGUMENT_NAMING
1049#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1050#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1051#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130
RH
1052#undef TARGET_SPLIT_COMPLEX_ARG
1053#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
fe984136
RH
1054#undef TARGET_MUST_PASS_IN_STACK
1055#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1056#undef TARGET_PASS_BY_REFERENCE
1057#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1058#undef TARGET_ARG_PARTIAL_BYTES
1059#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1060
c35d187f
RH
1061#undef TARGET_BUILD_BUILTIN_VA_LIST
1062#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1063
cd3ce9b4
JM
1064#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1065#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1066
93f90be6
FJ
1067#undef TARGET_EH_RETURN_FILTER_MODE
1068#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1069
00b79d54
BE
1070#undef TARGET_SCALAR_MODE_SUPPORTED_P
1071#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1072
f676971a
EC
1073#undef TARGET_VECTOR_MODE_SUPPORTED_P
1074#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1075
4d3e6fae
FJ
1076#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1077#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1078
78f5898b
AH
1079#undef TARGET_HANDLE_OPTION
1080#define TARGET_HANDLE_OPTION rs6000_handle_option
1081
1082#undef TARGET_DEFAULT_TARGET_FLAGS
1083#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1084 (TARGET_DEFAULT)
78f5898b 1085
3aebbe5f
JJ
1086#undef TARGET_STACK_PROTECT_FAIL
1087#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1088
445cf5eb
JM
1089/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1090 The PowerPC architecture requires only weak consistency among
1091 processors--that is, memory accesses between processors need not be
1092 sequentially consistent and memory accesses among processors can occur
1093 in any order. The ability to order memory accesses weakly provides
1094 opportunities for more efficient use of the system bus. Unless a
1095 dependency exists, the 604e allows read operations to precede store
1096 operations. */
1097#undef TARGET_RELAXED_ORDERING
1098#define TARGET_RELAXED_ORDERING true
1099
fdbe66f2
EB
1100#ifdef HAVE_AS_TLS
1101#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1102#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1103#endif
1104
aacd3885
RS
1105/* Use a 32-bit anchor range. This leads to sequences like:
1106
1107 addis tmp,anchor,high
1108 add dest,tmp,low
1109
1110 where tmp itself acts as an anchor, and can be shared between
1111 accesses to the same 64k page. */
1112#undef TARGET_MIN_ANCHOR_OFFSET
1113#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1114#undef TARGET_MAX_ANCHOR_OFFSET
1115#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1116#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1117#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1118
f6897b10 1119struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1120\f
0d1fbc8c
AH
1121
1122/* Value is 1 if hard register REGNO can hold a value of machine-mode
1123 MODE. */
1124static int
1125rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1126{
1127 /* The GPRs can hold any mode, but values bigger than one register
1128 cannot go past R31. */
1129 if (INT_REGNO_P (regno))
1130 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1131
a5a97921 1132 /* The float registers can only hold floating modes and DImode.
7393f7f8 1133 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1134 if (FP_REGNO_P (regno))
1135 return
ebb109ad 1136 (SCALAR_FLOAT_MODE_P (mode)
c092b045 1137 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1138 && mode != SDmode
0d1fbc8c
AH
1139 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1140 || (GET_MODE_CLASS (mode) == MODE_INT
1141 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1142
1143 /* The CR register can only hold CC modes. */
1144 if (CR_REGNO_P (regno))
1145 return GET_MODE_CLASS (mode) == MODE_CC;
1146
1147 if (XER_REGNO_P (regno))
1148 return mode == PSImode;
1149
1150 /* AltiVec only in AldyVec registers. */
1151 if (ALTIVEC_REGNO_P (regno))
1152 return ALTIVEC_VECTOR_MODE (mode);
1153
1154 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1155 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1156 return 1;
1157
1158 /* We cannot put TImode anywhere except general register and it must be
1159 able to fit within the register set. */
1160
1161 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1162}
1163
1164/* Initialize rs6000_hard_regno_mode_ok_p table. */
1165static void
1166rs6000_init_hard_regno_mode_ok (void)
1167{
1168 int r, m;
1169
1170 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1171 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1172 if (rs6000_hard_regno_mode_ok (r, m))
1173 rs6000_hard_regno_mode_ok_p[m][r] = true;
1174}
1175
e4cad568
GK
1176#if TARGET_MACHO
1177/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1178
1179static void
1180darwin_rs6000_override_options (void)
1181{
1182 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1183 off. */
1184 rs6000_altivec_abi = 1;
1185 TARGET_ALTIVEC_VRSAVE = 1;
1186 if (DEFAULT_ABI == ABI_DARWIN)
1187 {
1188 if (MACHO_DYNAMIC_NO_PIC_P)
1189 {
1190 if (flag_pic)
1191 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1192 flag_pic = 0;
1193 }
1194 else if (flag_pic == 1)
1195 {
1196 flag_pic = 2;
1197 }
1198 }
1199 if (TARGET_64BIT && ! TARGET_POWERPC64)
1200 {
1201 target_flags |= MASK_POWERPC64;
1202 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1203 }
1204 if (flag_mkernel)
1205 {
1206 rs6000_default_long_calls = 1;
1207 target_flags |= MASK_SOFT_FLOAT;
1208 }
1209
1210 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1211 Altivec. */
1212 if (!flag_mkernel && !flag_apple_kext
1213 && TARGET_64BIT
1214 && ! (target_flags_explicit & MASK_ALTIVEC))
1215 target_flags |= MASK_ALTIVEC;
1216
1217 /* Unless the user (not the configurer) has explicitly overridden
1218 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1219 G4 unless targetting the kernel. */
1220 if (!flag_mkernel
1221 && !flag_apple_kext
1222 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1223 && ! (target_flags_explicit & MASK_ALTIVEC)
1224 && ! rs6000_select[1].string)
1225 {
1226 target_flags |= MASK_ALTIVEC;
1227 }
1228}
1229#endif
1230
c1e55850
GK
1231/* If not otherwise specified by a target, make 'long double' equivalent to
1232 'double'. */
1233
1234#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1235#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1236#endif
1237
5248c961
RK
1238/* Override command line options. Mostly we process the processor
1239 type and sometimes adjust other TARGET_ options. */
1240
1241void
d779d0dc 1242rs6000_override_options (const char *default_cpu)
5248c961 1243{
c4d38ccb 1244 size_t i, j;
8e3f41e7 1245 struct rs6000_cpu_select *ptr;
66188a7e 1246 int set_masks;
5248c961 1247
66188a7e 1248 /* Simplifications for entries below. */
85638c0d 1249
66188a7e
GK
1250 enum {
1251 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1252 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1253 };
85638c0d 1254
66188a7e
GK
1255 /* This table occasionally claims that a processor does not support
1256 a particular feature even though it does, but the feature is slower
1257 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1258 complete description of the processor's support.
66188a7e
GK
1259
1260 Please keep this list in order, and don't forget to update the
1261 documentation in invoke.texi when adding a new processor or
1262 flag. */
5248c961
RK
1263 static struct ptt
1264 {
8b60264b
KG
1265 const char *const name; /* Canonical processor name. */
1266 const enum processor_type processor; /* Processor type enum value. */
1267 const int target_enable; /* Target flags to enable. */
8b60264b 1268 } const processor_target_table[]
66188a7e 1269 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1270 {"403", PROCESSOR_PPC403,
66188a7e 1271 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1272 {"405", PROCESSOR_PPC405,
716019c0
JM
1273 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1274 {"405fp", PROCESSOR_PPC405,
1275 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1276 {"440", PROCESSOR_PPC440,
716019c0
JM
1277 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1278 {"440fp", PROCESSOR_PPC440,
1279 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1280 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1281 {"601", PROCESSOR_PPC601,
66188a7e
GK
1282 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1283 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1284 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1285 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1286 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1287 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1288 {"620", PROCESSOR_PPC620,
1289 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1290 {"630", PROCESSOR_PPC630,
1291 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1292 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1293 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1294 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1295 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1296 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1297 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1298 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1299 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1300 /* 8548 has a dummy entry for now. */
a45bce6e 1301 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1302 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1303 {"970", PROCESSOR_POWER4,
66188a7e 1304 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1305 {"cell", PROCESSOR_CELL,
1306 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1307 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1308 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1309 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1310 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1311 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1312 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1313 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1314 {"power2", PROCESSOR_POWER,
1315 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1316 {"power3", PROCESSOR_PPC630,
1317 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1318 {"power4", PROCESSOR_POWER4,
fc091c8e 1319 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1320 {"power5", PROCESSOR_POWER5,
432218ba
DE
1321 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1322 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1323 {"power5+", PROCESSOR_POWER5,
1324 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1325 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1326 {"power6", PROCESSOR_POWER6,
e118597e 1327 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1328 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1329 {"power6x", PROCESSOR_POWER6,
1330 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1331 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1332 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1333 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1334 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1335 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1336 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1337 {"rios2", PROCESSOR_RIOS2,
1338 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1339 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1340 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1341 {"rs64", PROCESSOR_RS64A,
1342 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1343 };
5248c961 1344
ca7558fc 1345 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1346
66188a7e
GK
1347 /* Some OSs don't support saving the high part of 64-bit registers on
1348 context switch. Other OSs don't support saving Altivec registers.
1349 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1350 settings; if the user wants either, the user must explicitly specify
1351 them and we won't interfere with the user's specification. */
1352
1353 enum {
1354 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1355 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1356 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1357 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1358 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1359 };
0d1fbc8c
AH
1360
1361 rs6000_init_hard_regno_mode_ok ();
1362
c4ad648e 1363 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1364#ifdef OS_MISSING_POWERPC64
1365 if (OS_MISSING_POWERPC64)
1366 set_masks &= ~MASK_POWERPC64;
1367#endif
1368#ifdef OS_MISSING_ALTIVEC
1369 if (OS_MISSING_ALTIVEC)
1370 set_masks &= ~MASK_ALTIVEC;
1371#endif
1372
768875a8
AM
1373 /* Don't override by the processor default if given explicitly. */
1374 set_masks &= ~target_flags_explicit;
957211c3 1375
a4f6c312 1376 /* Identify the processor type. */
8e3f41e7 1377 rs6000_select[0].string = default_cpu;
3cb999d8 1378 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1379
b6a1cbae 1380 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1381 {
8e3f41e7
MM
1382 ptr = &rs6000_select[i];
1383 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1384 {
8e3f41e7
MM
1385 for (j = 0; j < ptt_size; j++)
1386 if (! strcmp (ptr->string, processor_target_table[j].name))
1387 {
1388 if (ptr->set_tune_p)
1389 rs6000_cpu = processor_target_table[j].processor;
1390
1391 if (ptr->set_arch_p)
1392 {
66188a7e
GK
1393 target_flags &= ~set_masks;
1394 target_flags |= (processor_target_table[j].target_enable
1395 & set_masks);
8e3f41e7
MM
1396 }
1397 break;
1398 }
1399
4406229e 1400 if (j == ptt_size)
8e3f41e7 1401 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1402 }
1403 }
8a61d227 1404
993f19a8 1405 if (TARGET_E500)
a3170dc6
AH
1406 rs6000_isel = 1;
1407
dff9f1b6
DE
1408 /* If we are optimizing big endian systems for space, use the load/store
1409 multiple and string instructions. */
ef792183 1410 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1411 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1412
a4f6c312
SS
1413 /* Don't allow -mmultiple or -mstring on little endian systems
1414 unless the cpu is a 750, because the hardware doesn't support the
1415 instructions used in little endian mode, and causes an alignment
1416 trap. The 750 does not cause an alignment trap (except when the
1417 target is unaligned). */
bef84347 1418
b21fb038 1419 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1420 {
1421 if (TARGET_MULTIPLE)
1422 {
1423 target_flags &= ~MASK_MULTIPLE;
b21fb038 1424 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1425 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1426 }
1427
1428 if (TARGET_STRING)
1429 {
1430 target_flags &= ~MASK_STRING;
b21fb038 1431 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1432 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1433 }
1434 }
3933e0e1 1435
38c1f2d7
MM
1436 /* Set debug flags */
1437 if (rs6000_debug_name)
1438 {
bfc79d3b 1439 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1440 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1441 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1442 rs6000_debug_stack = 1;
bfc79d3b 1443 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1444 rs6000_debug_arg = 1;
1445 else
c725bd79 1446 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1447 }
1448
57ac7be9
AM
1449 if (rs6000_traceback_name)
1450 {
1451 if (! strncmp (rs6000_traceback_name, "full", 4))
1452 rs6000_traceback = traceback_full;
1453 else if (! strncmp (rs6000_traceback_name, "part", 4))
1454 rs6000_traceback = traceback_part;
1455 else if (! strncmp (rs6000_traceback_name, "no", 2))
1456 rs6000_traceback = traceback_none;
1457 else
9e637a26 1458 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1459 rs6000_traceback_name);
1460 }
1461
78f5898b
AH
1462 if (!rs6000_explicit_options.long_double)
1463 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1464
602ea4d3 1465#ifndef POWERPC_LINUX
d3603e8c 1466 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1467 rs6000_ieeequad = 1;
1468#endif
1469
6d0ef01e
HP
1470 /* Set Altivec ABI as default for powerpc64 linux. */
1471 if (TARGET_ELF && TARGET_64BIT)
1472 {
1473 rs6000_altivec_abi = 1;
78f5898b 1474 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1475 }
1476
594a51fe
SS
1477 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1478 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1479 {
1480 rs6000_darwin64_abi = 1;
9c7956fd 1481#if TARGET_MACHO
6ac49599 1482 darwin_one_byte_bool = 1;
9c7956fd 1483#endif
d9168963
SS
1484 /* Default to natural alignment, for better performance. */
1485 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1486 }
1487
194c524a
DE
1488 /* Place FP constants in the constant pool instead of TOC
1489 if section anchors enabled. */
1490 if (flag_section_anchors)
1491 TARGET_NO_FP_IN_TOC = 1;
1492
c4501e62
JJ
1493 /* Handle -mtls-size option. */
1494 rs6000_parse_tls_size_option ();
1495
a7ae18e2
AH
1496#ifdef SUBTARGET_OVERRIDE_OPTIONS
1497 SUBTARGET_OVERRIDE_OPTIONS;
1498#endif
1499#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1500 SUBSUBTARGET_OVERRIDE_OPTIONS;
1501#endif
4d4cbc0e
AH
1502#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1503 SUB3TARGET_OVERRIDE_OPTIONS;
1504#endif
a7ae18e2 1505
5da702b1
AH
1506 if (TARGET_E500)
1507 {
1508 /* The e500 does not have string instructions, and we set
1509 MASK_STRING above when optimizing for size. */
1510 if ((target_flags & MASK_STRING) != 0)
1511 target_flags = target_flags & ~MASK_STRING;
1512 }
1513 else if (rs6000_select[1].string != NULL)
1514 {
1515 /* For the powerpc-eabispe configuration, we set all these by
1516 default, so let's unset them if we manually set another
1517 CPU that is not the E500. */
78f5898b 1518 if (!rs6000_explicit_options.abi)
5da702b1 1519 rs6000_spe_abi = 0;
78f5898b 1520 if (!rs6000_explicit_options.spe)
5da702b1 1521 rs6000_spe = 0;
78f5898b 1522 if (!rs6000_explicit_options.float_gprs)
5da702b1 1523 rs6000_float_gprs = 0;
78f5898b 1524 if (!rs6000_explicit_options.isel)
5da702b1
AH
1525 rs6000_isel = 0;
1526 }
b5044283 1527
eca0d5e8
JM
1528 /* Detect invalid option combinations with E500. */
1529 CHECK_E500_OPTIONS;
1530
ec507f2d 1531 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1532 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1533 && rs6000_cpu != PROCESSOR_POWER6
1534 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1535 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1536 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1537 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1538 || rs6000_cpu == PROCESSOR_POWER5
1539 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1540
ec507f2d
DE
1541 rs6000_sched_restricted_insns_priority
1542 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1543
569fa502 1544 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1545 rs6000_sched_costly_dep
1546 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1547
569fa502
DN
1548 if (rs6000_sched_costly_dep_str)
1549 {
f676971a 1550 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1551 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1552 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1553 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1554 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1555 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1556 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1557 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1558 else
c4ad648e 1559 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1560 }
1561
1562 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1563 rs6000_sched_insert_nops
1564 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1565
cbe26ab8
DN
1566 if (rs6000_sched_insert_nops_str)
1567 {
1568 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1569 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1570 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1571 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1572 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1573 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1574 else
c4ad648e 1575 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1576 }
1577
c81bebd7 1578#ifdef TARGET_REGNAMES
a4f6c312
SS
1579 /* If the user desires alternate register names, copy in the
1580 alternate names now. */
c81bebd7 1581 if (TARGET_REGNAMES)
4e135bdd 1582 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1583#endif
1584
df01da37 1585 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1586 If -maix-struct-return or -msvr4-struct-return was explicitly
1587 used, don't override with the ABI default. */
df01da37
DE
1588 if (!rs6000_explicit_options.aix_struct_ret)
1589 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1590
602ea4d3 1591 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1592 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1593
f676971a 1594 if (TARGET_TOC)
9ebbca7d 1595 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1596
301d03af
RS
1597 /* We can only guarantee the availability of DI pseudo-ops when
1598 assembling for 64-bit targets. */
ae6c1efd 1599 if (!TARGET_64BIT)
301d03af
RS
1600 {
1601 targetm.asm_out.aligned_op.di = NULL;
1602 targetm.asm_out.unaligned_op.di = NULL;
1603 }
1604
1494c534
DE
1605 /* Set branch target alignment, if not optimizing for size. */
1606 if (!optimize_size)
1607 {
d296e02e
AP
1608 /* Cell wants to be aligned 8byte for dual issue. */
1609 if (rs6000_cpu == PROCESSOR_CELL)
1610 {
1611 if (align_functions <= 0)
1612 align_functions = 8;
1613 if (align_jumps <= 0)
1614 align_jumps = 8;
1615 if (align_loops <= 0)
1616 align_loops = 8;
1617 }
44cd321e 1618 if (rs6000_align_branch_targets)
1494c534
DE
1619 {
1620 if (align_functions <= 0)
1621 align_functions = 16;
1622 if (align_jumps <= 0)
1623 align_jumps = 16;
1624 if (align_loops <= 0)
1625 align_loops = 16;
1626 }
1627 if (align_jumps_max_skip <= 0)
1628 align_jumps_max_skip = 15;
1629 if (align_loops_max_skip <= 0)
1630 align_loops_max_skip = 15;
1631 }
2792d578 1632
71f123ca
FS
1633 /* Arrange to save and restore machine status around nested functions. */
1634 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1635
1636 /* We should always be splitting complex arguments, but we can't break
1637 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1638 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1639 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1640
1641 /* Initialize rs6000_cost with the appropriate target costs. */
1642 if (optimize_size)
1643 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1644 else
1645 switch (rs6000_cpu)
1646 {
1647 case PROCESSOR_RIOS1:
1648 rs6000_cost = &rios1_cost;
1649 break;
1650
1651 case PROCESSOR_RIOS2:
1652 rs6000_cost = &rios2_cost;
1653 break;
1654
1655 case PROCESSOR_RS64A:
1656 rs6000_cost = &rs64a_cost;
1657 break;
1658
1659 case PROCESSOR_MPCCORE:
1660 rs6000_cost = &mpccore_cost;
1661 break;
1662
1663 case PROCESSOR_PPC403:
1664 rs6000_cost = &ppc403_cost;
1665 break;
1666
1667 case PROCESSOR_PPC405:
1668 rs6000_cost = &ppc405_cost;
1669 break;
1670
1671 case PROCESSOR_PPC440:
1672 rs6000_cost = &ppc440_cost;
1673 break;
1674
1675 case PROCESSOR_PPC601:
1676 rs6000_cost = &ppc601_cost;
1677 break;
1678
1679 case PROCESSOR_PPC603:
1680 rs6000_cost = &ppc603_cost;
1681 break;
1682
1683 case PROCESSOR_PPC604:
1684 rs6000_cost = &ppc604_cost;
1685 break;
1686
1687 case PROCESSOR_PPC604e:
1688 rs6000_cost = &ppc604e_cost;
1689 break;
1690
1691 case PROCESSOR_PPC620:
8b897cfa
RS
1692 rs6000_cost = &ppc620_cost;
1693 break;
1694
f0517163
RS
1695 case PROCESSOR_PPC630:
1696 rs6000_cost = &ppc630_cost;
1697 break;
1698
982afe02 1699 case PROCESSOR_CELL:
d296e02e
AP
1700 rs6000_cost = &ppccell_cost;
1701 break;
1702
8b897cfa
RS
1703 case PROCESSOR_PPC750:
1704 case PROCESSOR_PPC7400:
1705 rs6000_cost = &ppc750_cost;
1706 break;
1707
1708 case PROCESSOR_PPC7450:
1709 rs6000_cost = &ppc7450_cost;
1710 break;
1711
1712 case PROCESSOR_PPC8540:
1713 rs6000_cost = &ppc8540_cost;
1714 break;
1715
1716 case PROCESSOR_POWER4:
1717 case PROCESSOR_POWER5:
1718 rs6000_cost = &power4_cost;
1719 break;
1720
44cd321e
PS
1721 case PROCESSOR_POWER6:
1722 rs6000_cost = &power6_cost;
1723 break;
1724
8b897cfa 1725 default:
37409796 1726 gcc_unreachable ();
8b897cfa 1727 }
5248c961 1728}
5accd822 1729
7ccf35ed
DN
1730/* Implement targetm.vectorize.builtin_mask_for_load. */
1731static tree
1732rs6000_builtin_mask_for_load (void)
1733{
1734 if (TARGET_ALTIVEC)
1735 return altivec_builtin_mask_for_load;
1736 else
1737 return 0;
1738}
1739
f57d17f1
TM
1740/* Implement targetm.vectorize.builtin_conversion. */
1741static tree
1742rs6000_builtin_conversion (enum tree_code code, tree type)
1743{
1744 if (!TARGET_ALTIVEC)
1745 return NULL_TREE;
982afe02 1746
f57d17f1
TM
1747 switch (code)
1748 {
1749 case FLOAT_EXPR:
1750 switch (TYPE_MODE (type))
1751 {
1752 case V4SImode:
982afe02 1753 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1754 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1755 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1756 default:
1757 return NULL_TREE;
1758 }
1759 default:
1760 return NULL_TREE;
1761 }
1762}
1763
89d67cca
DN
1764/* Implement targetm.vectorize.builtin_mul_widen_even. */
1765static tree
1766rs6000_builtin_mul_widen_even (tree type)
1767{
1768 if (!TARGET_ALTIVEC)
1769 return NULL_TREE;
1770
1771 switch (TYPE_MODE (type))
1772 {
1773 case V8HImode:
982afe02 1774 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1775 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1776 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1777
1778 case V16QImode:
1779 return TYPE_UNSIGNED (type) ?
1780 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1781 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1782 default:
1783 return NULL_TREE;
1784 }
1785}
1786
1787/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1788static tree
1789rs6000_builtin_mul_widen_odd (tree type)
1790{
1791 if (!TARGET_ALTIVEC)
1792 return NULL_TREE;
1793
1794 switch (TYPE_MODE (type))
1795 {
1796 case V8HImode:
1797 return TYPE_UNSIGNED (type) ?
1798 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1799 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1800
1801 case V16QImode:
1802 return TYPE_UNSIGNED (type) ?
1803 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1804 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1805 default:
1806 return NULL_TREE;
1807 }
1808}
1809
5da702b1
AH
1810/* Handle generic options of the form -mfoo=yes/no.
1811 NAME is the option name.
1812 VALUE is the option value.
1813 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1814 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1815static void
5da702b1 1816rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1817{
5da702b1 1818 if (value == 0)
993f19a8 1819 return;
5da702b1
AH
1820 else if (!strcmp (value, "yes"))
1821 *flag = 1;
1822 else if (!strcmp (value, "no"))
1823 *flag = 0;
08b57fb3 1824 else
5da702b1 1825 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1826}
1827
c4501e62
JJ
1828/* Validate and record the size specified with the -mtls-size option. */
1829
1830static void
863d938c 1831rs6000_parse_tls_size_option (void)
c4501e62
JJ
1832{
1833 if (rs6000_tls_size_string == 0)
1834 return;
1835 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1836 rs6000_tls_size = 16;
1837 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1838 rs6000_tls_size = 32;
1839 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1840 rs6000_tls_size = 64;
1841 else
9e637a26 1842 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1843}
1844
5accd822 1845void
a2369ed3 1846optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1847{
2e3f0db6
DJ
1848 if (DEFAULT_ABI == ABI_DARWIN)
1849 /* The Darwin libraries never set errno, so we might as well
1850 avoid calling them when that's the only reason we would. */
1851 flag_errno_math = 0;
59d6560b
DE
1852
1853 /* Double growth factor to counter reduced min jump length. */
1854 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
1855
1856 /* Enable section anchors by default.
1857 Skip section anchors for Objective C and Objective C++
1858 until front-ends fixed. */
23f99493 1859 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 1860 flag_section_anchors = 1;
5accd822 1861}
78f5898b
AH
1862
1863/* Implement TARGET_HANDLE_OPTION. */
1864
1865static bool
1866rs6000_handle_option (size_t code, const char *arg, int value)
1867{
1868 switch (code)
1869 {
1870 case OPT_mno_power:
1871 target_flags &= ~(MASK_POWER | MASK_POWER2
1872 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
1873 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1874 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
1875 break;
1876 case OPT_mno_powerpc:
1877 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1878 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
1879 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1880 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
1881 break;
1882 case OPT_mfull_toc:
d2894ab5
DE
1883 target_flags &= ~MASK_MINIMAL_TOC;
1884 TARGET_NO_FP_IN_TOC = 0;
1885 TARGET_NO_SUM_IN_TOC = 0;
1886 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1887#ifdef TARGET_USES_SYSV4_OPT
1888 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1889 just the same as -mminimal-toc. */
1890 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1891 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1892#endif
1893 break;
1894
1895#ifdef TARGET_USES_SYSV4_OPT
1896 case OPT_mtoc:
1897 /* Make -mtoc behave like -mminimal-toc. */
1898 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1899 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1900 break;
1901#endif
1902
1903#ifdef TARGET_USES_AIX64_OPT
1904 case OPT_maix64:
1905#else
1906 case OPT_m64:
1907#endif
2c9c9afd
AM
1908 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1909 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1910 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
1911 break;
1912
1913#ifdef TARGET_USES_AIX64_OPT
1914 case OPT_maix32:
1915#else
1916 case OPT_m32:
1917#endif
1918 target_flags &= ~MASK_POWERPC64;
c2dba4ab 1919 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
1920 break;
1921
1922 case OPT_minsert_sched_nops_:
1923 rs6000_sched_insert_nops_str = arg;
1924 break;
1925
1926 case OPT_mminimal_toc:
1927 if (value == 1)
1928 {
d2894ab5
DE
1929 TARGET_NO_FP_IN_TOC = 0;
1930 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
1931 }
1932 break;
1933
1934 case OPT_mpower:
1935 if (value == 1)
c2dba4ab
AH
1936 {
1937 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1938 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1939 }
78f5898b
AH
1940 break;
1941
1942 case OPT_mpower2:
1943 if (value == 1)
c2dba4ab
AH
1944 {
1945 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1946 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1947 }
78f5898b
AH
1948 break;
1949
1950 case OPT_mpowerpc_gpopt:
1951 case OPT_mpowerpc_gfxopt:
1952 if (value == 1)
c2dba4ab
AH
1953 {
1954 target_flags |= MASK_POWERPC;
1955 target_flags_explicit |= MASK_POWERPC;
1956 }
78f5898b
AH
1957 break;
1958
df01da37
DE
1959 case OPT_maix_struct_return:
1960 case OPT_msvr4_struct_return:
1961 rs6000_explicit_options.aix_struct_ret = true;
1962 break;
1963
78f5898b
AH
1964 case OPT_mvrsave_:
1965 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1966 break;
78f5898b
AH
1967
1968 case OPT_misel_:
1969 rs6000_explicit_options.isel = true;
1970 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1971 break;
1972
1973 case OPT_mspe_:
1974 rs6000_explicit_options.spe = true;
1975 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
1976 break;
1977
1978 case OPT_mdebug_:
1979 rs6000_debug_name = arg;
1980 break;
1981
1982#ifdef TARGET_USES_SYSV4_OPT
1983 case OPT_mcall_:
1984 rs6000_abi_name = arg;
1985 break;
1986
1987 case OPT_msdata_:
1988 rs6000_sdata_name = arg;
1989 break;
1990
1991 case OPT_mtls_size_:
1992 rs6000_tls_size_string = arg;
1993 break;
1994
1995 case OPT_mrelocatable:
1996 if (value == 1)
c2dba4ab 1997 {
e0bf274f
AM
1998 target_flags |= MASK_MINIMAL_TOC;
1999 target_flags_explicit |= MASK_MINIMAL_TOC;
2000 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2001 }
78f5898b
AH
2002 break;
2003
2004 case OPT_mrelocatable_lib:
2005 if (value == 1)
c2dba4ab 2006 {
e0bf274f
AM
2007 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2008 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2009 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2010 }
78f5898b 2011 else
c2dba4ab
AH
2012 {
2013 target_flags &= ~MASK_RELOCATABLE;
2014 target_flags_explicit |= MASK_RELOCATABLE;
2015 }
78f5898b
AH
2016 break;
2017#endif
2018
2019 case OPT_mabi_:
78f5898b
AH
2020 if (!strcmp (arg, "altivec"))
2021 {
d3603e8c 2022 rs6000_explicit_options.abi = true;
78f5898b
AH
2023 rs6000_altivec_abi = 1;
2024 rs6000_spe_abi = 0;
2025 }
2026 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2027 {
2028 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2029 the default for rs6000_spe_abi to be chosen later. */
2030 rs6000_altivec_abi = 0;
2031 }
78f5898b
AH
2032 else if (! strcmp (arg, "spe"))
2033 {
d3603e8c 2034 rs6000_explicit_options.abi = true;
78f5898b
AH
2035 rs6000_spe_abi = 1;
2036 rs6000_altivec_abi = 0;
2037 if (!TARGET_SPE_ABI)
2038 error ("not configured for ABI: '%s'", arg);
2039 }
2040 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2041 {
2042 rs6000_explicit_options.abi = true;
2043 rs6000_spe_abi = 0;
2044 }
78f5898b
AH
2045
2046 /* These are here for testing during development only, do not
2047 document in the manual please. */
2048 else if (! strcmp (arg, "d64"))
2049 {
2050 rs6000_darwin64_abi = 1;
2051 warning (0, "Using darwin64 ABI");
2052 }
2053 else if (! strcmp (arg, "d32"))
2054 {
2055 rs6000_darwin64_abi = 0;
2056 warning (0, "Using old darwin ABI");
2057 }
2058
602ea4d3
JJ
2059 else if (! strcmp (arg, "ibmlongdouble"))
2060 {
d3603e8c 2061 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2062 rs6000_ieeequad = 0;
2063 warning (0, "Using IBM extended precision long double");
2064 }
2065 else if (! strcmp (arg, "ieeelongdouble"))
2066 {
d3603e8c 2067 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2068 rs6000_ieeequad = 1;
2069 warning (0, "Using IEEE extended precision long double");
2070 }
2071
78f5898b
AH
2072 else
2073 {
2074 error ("unknown ABI specified: '%s'", arg);
2075 return false;
2076 }
2077 break;
2078
2079 case OPT_mcpu_:
2080 rs6000_select[1].string = arg;
2081 break;
2082
2083 case OPT_mtune_:
2084 rs6000_select[2].string = arg;
2085 break;
2086
2087 case OPT_mtraceback_:
2088 rs6000_traceback_name = arg;
2089 break;
2090
2091 case OPT_mfloat_gprs_:
2092 rs6000_explicit_options.float_gprs = true;
2093 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2094 rs6000_float_gprs = 1;
2095 else if (! strcmp (arg, "double"))
2096 rs6000_float_gprs = 2;
2097 else if (! strcmp (arg, "no"))
2098 rs6000_float_gprs = 0;
2099 else
2100 {
2101 error ("invalid option for -mfloat-gprs: '%s'", arg);
2102 return false;
2103 }
2104 break;
2105
2106 case OPT_mlong_double_:
2107 rs6000_explicit_options.long_double = true;
2108 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2109 if (value != 64 && value != 128)
2110 {
2111 error ("Unknown switch -mlong-double-%s", arg);
2112 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2113 return false;
2114 }
2115 else
2116 rs6000_long_double_type_size = value;
2117 break;
2118
2119 case OPT_msched_costly_dep_:
2120 rs6000_sched_costly_dep_str = arg;
2121 break;
2122
2123 case OPT_malign_:
2124 rs6000_explicit_options.alignment = true;
2125 if (! strcmp (arg, "power"))
2126 {
2127 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2128 some C library functions, so warn about it. The flag may be
2129 useful for performance studies from time to time though, so
2130 don't disable it entirely. */
2131 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2132 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2133 " it is incompatible with the installed C and C++ libraries");
2134 rs6000_alignment_flags = MASK_ALIGN_POWER;
2135 }
2136 else if (! strcmp (arg, "natural"))
2137 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2138 else
2139 {
2140 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2141 return false;
2142 }
2143 break;
2144 }
2145 return true;
2146}
3cfa4909
MM
2147\f
2148/* Do anything needed at the start of the asm file. */
2149
1bc7c5b6 2150static void
863d938c 2151rs6000_file_start (void)
3cfa4909 2152{
c4d38ccb 2153 size_t i;
3cfa4909 2154 char buffer[80];
d330fd93 2155 const char *start = buffer;
3cfa4909 2156 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2157 const char *default_cpu = TARGET_CPU_DEFAULT;
2158 FILE *file = asm_out_file;
2159
2160 default_file_start ();
2161
2162#ifdef TARGET_BI_ARCH
2163 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2164 default_cpu = 0;
2165#endif
3cfa4909
MM
2166
2167 if (flag_verbose_asm)
2168 {
2169 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2170 rs6000_select[0].string = default_cpu;
2171
b6a1cbae 2172 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2173 {
2174 ptr = &rs6000_select[i];
2175 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2176 {
2177 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2178 start = "";
2179 }
2180 }
2181
9c6b4ed9 2182 if (PPC405_ERRATUM77)
b0bfee6e 2183 {
9c6b4ed9 2184 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2185 start = "";
2186 }
b0bfee6e 2187
b91da81f 2188#ifdef USING_ELFOS_H
3cfa4909
MM
2189 switch (rs6000_sdata)
2190 {
2191 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2192 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2193 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2194 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2195 }
2196
2197 if (rs6000_sdata && g_switch_value)
2198 {
307b599c
MK
2199 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2200 g_switch_value);
3cfa4909
MM
2201 start = "";
2202 }
2203#endif
2204
2205 if (*start == '\0')
949ea356 2206 putc ('\n', file);
3cfa4909 2207 }
b723e82f
JJ
2208
2209 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2210 {
d6b5193b
RS
2211 switch_to_section (toc_section);
2212 switch_to_section (text_section);
b723e82f 2213 }
3cfa4909 2214}
c4e18b1c 2215
5248c961 2216\f
a0ab749a 2217/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2218
2219int
863d938c 2220direct_return (void)
9878760c 2221{
4697a36c
MM
2222 if (reload_completed)
2223 {
2224 rs6000_stack_t *info = rs6000_stack_info ();
2225
2226 if (info->first_gp_reg_save == 32
2227 && info->first_fp_reg_save == 64
00b960c7 2228 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2229 && ! info->lr_save_p
2230 && ! info->cr_save_p
00b960c7 2231 && info->vrsave_mask == 0
c81fc13e 2232 && ! info->push_p)
4697a36c
MM
2233 return 1;
2234 }
2235
2236 return 0;
9878760c
RK
2237}
2238
4e74d8ec
MM
2239/* Return the number of instructions it takes to form a constant in an
2240 integer register. */
2241
48d72335 2242int
a2369ed3 2243num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2244{
2245 /* signed constant loadable with {cal|addi} */
547b216d 2246 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2247 return 1;
2248
4e74d8ec 2249 /* constant loadable with {cau|addis} */
547b216d
DE
2250 else if ((value & 0xffff) == 0
2251 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2252 return 1;
2253
5f59ecb7 2254#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2255 else if (TARGET_POWERPC64)
4e74d8ec 2256 {
a65c591c
DE
2257 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2258 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2259
a65c591c 2260 if (high == 0 || high == -1)
4e74d8ec
MM
2261 return 2;
2262
a65c591c 2263 high >>= 1;
4e74d8ec 2264
a65c591c 2265 if (low == 0)
4e74d8ec 2266 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2267 else
2268 return (num_insns_constant_wide (high)
e396202a 2269 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2270 }
2271#endif
2272
2273 else
2274 return 2;
2275}
2276
2277int
a2369ed3 2278num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2279{
37409796 2280 HOST_WIDE_INT low, high;
bb8df8a6 2281
37409796 2282 switch (GET_CODE (op))
0d30d435 2283 {
37409796 2284 case CONST_INT:
0d30d435 2285#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2286 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2287 && mask64_operand (op, mode))
c4ad648e 2288 return 2;
0d30d435
DE
2289 else
2290#endif
2291 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2292
37409796
NS
2293 case CONST_DOUBLE:
2294 if (mode == SFmode)
2295 {
2296 long l;
2297 REAL_VALUE_TYPE rv;
bb8df8a6 2298
37409796
NS
2299 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2300 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2301 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2302 }
a260abc9 2303
37409796
NS
2304 if (mode == VOIDmode || mode == DImode)
2305 {
2306 high = CONST_DOUBLE_HIGH (op);
2307 low = CONST_DOUBLE_LOW (op);
2308 }
2309 else
2310 {
2311 long l[2];
2312 REAL_VALUE_TYPE rv;
bb8df8a6 2313
37409796 2314 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2315 if (DECIMAL_FLOAT_MODE_P (mode))
2316 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2317 else
2318 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2319 high = l[WORDS_BIG_ENDIAN == 0];
2320 low = l[WORDS_BIG_ENDIAN != 0];
2321 }
47ad8c61 2322
37409796
NS
2323 if (TARGET_32BIT)
2324 return (num_insns_constant_wide (low)
2325 + num_insns_constant_wide (high));
2326 else
2327 {
2328 if ((high == 0 && low >= 0)
2329 || (high == -1 && low < 0))
2330 return num_insns_constant_wide (low);
bb8df8a6 2331
1990cd79 2332 else if (mask64_operand (op, mode))
37409796 2333 return 2;
bb8df8a6 2334
37409796
NS
2335 else if (low == 0)
2336 return num_insns_constant_wide (high) + 1;
bb8df8a6 2337
37409796
NS
2338 else
2339 return (num_insns_constant_wide (high)
2340 + num_insns_constant_wide (low) + 1);
2341 }
bb8df8a6 2342
37409796
NS
2343 default:
2344 gcc_unreachable ();
4e74d8ec 2345 }
4e74d8ec
MM
2346}
2347
0972012c
RS
2348/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2349 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2350 corresponding element of the vector, but for V4SFmode and V2SFmode,
2351 the corresponding "float" is interpreted as an SImode integer. */
2352
2353static HOST_WIDE_INT
2354const_vector_elt_as_int (rtx op, unsigned int elt)
2355{
2356 rtx tmp = CONST_VECTOR_ELT (op, elt);
2357 if (GET_MODE (op) == V4SFmode
2358 || GET_MODE (op) == V2SFmode)
2359 tmp = gen_lowpart (SImode, tmp);
2360 return INTVAL (tmp);
2361}
452a7d36 2362
77ccdfed 2363/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2364 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2365 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2366 all items are set to the same value and contain COPIES replicas of the
2367 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2368 operand and the others are set to the value of the operand's msb. */
2369
2370static bool
2371vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2372{
66180ff3
PB
2373 enum machine_mode mode = GET_MODE (op);
2374 enum machine_mode inner = GET_MODE_INNER (mode);
2375
2376 unsigned i;
2377 unsigned nunits = GET_MODE_NUNITS (mode);
2378 unsigned bitsize = GET_MODE_BITSIZE (inner);
2379 unsigned mask = GET_MODE_MASK (inner);
2380
0972012c 2381 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2382 HOST_WIDE_INT splat_val = val;
2383 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2384
2385 /* Construct the value to be splatted, if possible. If not, return 0. */
2386 for (i = 2; i <= copies; i *= 2)
452a7d36 2387 {
66180ff3
PB
2388 HOST_WIDE_INT small_val;
2389 bitsize /= 2;
2390 small_val = splat_val >> bitsize;
2391 mask >>= bitsize;
2392 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2393 return false;
2394 splat_val = small_val;
2395 }
c4ad648e 2396
66180ff3
PB
2397 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2398 if (EASY_VECTOR_15 (splat_val))
2399 ;
2400
2401 /* Also check if we can splat, and then add the result to itself. Do so if
2402 the value is positive, of if the splat instruction is using OP's mode;
2403 for splat_val < 0, the splat and the add should use the same mode. */
2404 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2405 && (splat_val >= 0 || (step == 1 && copies == 1)))
2406 ;
2407
2408 else
2409 return false;
2410
2411 /* Check if VAL is present in every STEP-th element, and the
2412 other elements are filled with its most significant bit. */
2413 for (i = 0; i < nunits - 1; ++i)
2414 {
2415 HOST_WIDE_INT desired_val;
2416 if (((i + 1) & (step - 1)) == 0)
2417 desired_val = val;
2418 else
2419 desired_val = msb_val;
2420
0972012c 2421 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2422 return false;
452a7d36 2423 }
66180ff3
PB
2424
2425 return true;
452a7d36
HP
2426}
2427
69ef87e2 2428
77ccdfed 2429/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2430 with a vspltisb, vspltish or vspltisw. */
2431
2432bool
2433easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2434{
66180ff3 2435 unsigned step, copies;
d744e06e 2436
66180ff3
PB
2437 if (mode == VOIDmode)
2438 mode = GET_MODE (op);
2439 else if (mode != GET_MODE (op))
2440 return false;
d744e06e 2441
66180ff3
PB
2442 /* Start with a vspltisw. */
2443 step = GET_MODE_NUNITS (mode) / 4;
2444 copies = 1;
2445
2446 if (vspltis_constant (op, step, copies))
2447 return true;
2448
2449 /* Then try with a vspltish. */
2450 if (step == 1)
2451 copies <<= 1;
2452 else
2453 step >>= 1;
2454
2455 if (vspltis_constant (op, step, copies))
2456 return true;
2457
2458 /* And finally a vspltisb. */
2459 if (step == 1)
2460 copies <<= 1;
2461 else
2462 step >>= 1;
2463
2464 if (vspltis_constant (op, step, copies))
2465 return true;
2466
2467 return false;
d744e06e
AH
2468}
2469
66180ff3
PB
2470/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2471 result is OP. Abort if it is not possible. */
d744e06e 2472
f676971a 2473rtx
66180ff3 2474gen_easy_altivec_constant (rtx op)
452a7d36 2475{
66180ff3
PB
2476 enum machine_mode mode = GET_MODE (op);
2477 int nunits = GET_MODE_NUNITS (mode);
2478 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2479 unsigned step = nunits / 4;
2480 unsigned copies = 1;
2481
2482 /* Start with a vspltisw. */
2483 if (vspltis_constant (op, step, copies))
2484 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2485
2486 /* Then try with a vspltish. */
2487 if (step == 1)
2488 copies <<= 1;
2489 else
2490 step >>= 1;
2491
2492 if (vspltis_constant (op, step, copies))
2493 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2494
2495 /* And finally a vspltisb. */
2496 if (step == 1)
2497 copies <<= 1;
2498 else
2499 step >>= 1;
2500
2501 if (vspltis_constant (op, step, copies))
2502 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2503
2504 gcc_unreachable ();
d744e06e
AH
2505}
2506
2507const char *
a2369ed3 2508output_vec_const_move (rtx *operands)
d744e06e
AH
2509{
2510 int cst, cst2;
2511 enum machine_mode mode;
2512 rtx dest, vec;
2513
2514 dest = operands[0];
2515 vec = operands[1];
d744e06e 2516 mode = GET_MODE (dest);
69ef87e2 2517
d744e06e
AH
2518 if (TARGET_ALTIVEC)
2519 {
66180ff3 2520 rtx splat_vec;
d744e06e
AH
2521 if (zero_constant (vec, mode))
2522 return "vxor %0,%0,%0";
37409796 2523
66180ff3
PB
2524 splat_vec = gen_easy_altivec_constant (vec);
2525 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2526 operands[1] = XEXP (splat_vec, 0);
2527 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2528 return "#";
bb8df8a6 2529
66180ff3 2530 switch (GET_MODE (splat_vec))
98ef3137 2531 {
37409796 2532 case V4SImode:
66180ff3 2533 return "vspltisw %0,%1";
c4ad648e 2534
37409796 2535 case V8HImode:
66180ff3 2536 return "vspltish %0,%1";
c4ad648e 2537
37409796 2538 case V16QImode:
66180ff3 2539 return "vspltisb %0,%1";
bb8df8a6 2540
37409796
NS
2541 default:
2542 gcc_unreachable ();
98ef3137 2543 }
69ef87e2
AH
2544 }
2545
37409796 2546 gcc_assert (TARGET_SPE);
bb8df8a6 2547
37409796
NS
2548 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2549 pattern of V1DI, V4HI, and V2SF.
2550
2551 FIXME: We should probably return # and add post reload
2552 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2553 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2554 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2555 operands[1] = CONST_VECTOR_ELT (vec, 0);
2556 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2557 if (cst == cst2)
2558 return "li %0,%1\n\tevmergelo %0,%0,%0";
2559 else
2560 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2561}
2562
7a4eca66
DE
2563/* Initialize vector TARGET to VALS. */
2564
2565void
2566rs6000_expand_vector_init (rtx target, rtx vals)
2567{
2568 enum machine_mode mode = GET_MODE (target);
2569 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2570 int n_elts = GET_MODE_NUNITS (mode);
2571 int n_var = 0, one_var = -1;
2572 bool all_same = true, all_const_zero = true;
2573 rtx x, mem;
2574 int i;
2575
2576 for (i = 0; i < n_elts; ++i)
2577 {
2578 x = XVECEXP (vals, 0, i);
2579 if (!CONSTANT_P (x))
2580 ++n_var, one_var = i;
2581 else if (x != CONST0_RTX (inner_mode))
2582 all_const_zero = false;
2583
2584 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2585 all_same = false;
2586 }
2587
2588 if (n_var == 0)
2589 {
2590 if (mode != V4SFmode && all_const_zero)
2591 {
2592 /* Zero register. */
2593 emit_insn (gen_rtx_SET (VOIDmode, target,
2594 gen_rtx_XOR (mode, target, target)));
2595 return;
2596 }
66180ff3 2597 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2598 {
2599 /* Splat immediate. */
66180ff3 2600 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2601 return;
2602 }
2603 else if (all_same)
2604 ; /* Splat vector element. */
2605 else
2606 {
2607 /* Load from constant pool. */
2608 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2609 return;
2610 }
2611 }
2612
2613 /* Store value to stack temp. Load vector element. Splat. */
2614 if (all_same)
2615 {
2616 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2617 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2618 XVECEXP (vals, 0, 0));
2619 x = gen_rtx_UNSPEC (VOIDmode,
2620 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2621 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2622 gen_rtvec (2,
2623 gen_rtx_SET (VOIDmode,
2624 target, mem),
2625 x)));
2626 x = gen_rtx_VEC_SELECT (inner_mode, target,
2627 gen_rtx_PARALLEL (VOIDmode,
2628 gen_rtvec (1, const0_rtx)));
2629 emit_insn (gen_rtx_SET (VOIDmode, target,
2630 gen_rtx_VEC_DUPLICATE (mode, x)));
2631 return;
2632 }
2633
2634 /* One field is non-constant. Load constant then overwrite
2635 varying field. */
2636 if (n_var == 1)
2637 {
2638 rtx copy = copy_rtx (vals);
2639
57b51d4d 2640 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2641 varying element. */
2642 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2643 rs6000_expand_vector_init (target, copy);
2644
2645 /* Insert variable. */
2646 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2647 return;
2648 }
2649
2650 /* Construct the vector in memory one field at a time
2651 and load the whole vector. */
2652 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2653 for (i = 0; i < n_elts; i++)
2654 emit_move_insn (adjust_address_nv (mem, inner_mode,
2655 i * GET_MODE_SIZE (inner_mode)),
2656 XVECEXP (vals, 0, i));
2657 emit_move_insn (target, mem);
2658}
2659
2660/* Set field ELT of TARGET to VAL. */
2661
2662void
2663rs6000_expand_vector_set (rtx target, rtx val, int elt)
2664{
2665 enum machine_mode mode = GET_MODE (target);
2666 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2667 rtx reg = gen_reg_rtx (mode);
2668 rtx mask, mem, x;
2669 int width = GET_MODE_SIZE (inner_mode);
2670 int i;
2671
2672 /* Load single variable value. */
2673 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2674 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2675 x = gen_rtx_UNSPEC (VOIDmode,
2676 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2677 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2678 gen_rtvec (2,
2679 gen_rtx_SET (VOIDmode,
2680 reg, mem),
2681 x)));
2682
2683 /* Linear sequence. */
2684 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2685 for (i = 0; i < 16; ++i)
2686 XVECEXP (mask, 0, i) = GEN_INT (i);
2687
2688 /* Set permute mask to insert element into target. */
2689 for (i = 0; i < width; ++i)
2690 XVECEXP (mask, 0, elt*width + i)
2691 = GEN_INT (i + 0x10);
2692 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2693 x = gen_rtx_UNSPEC (mode,
2694 gen_rtvec (3, target, reg,
2695 force_reg (V16QImode, x)),
2696 UNSPEC_VPERM);
2697 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2698}
2699
2700/* Extract field ELT from VEC into TARGET. */
2701
2702void
2703rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2704{
2705 enum machine_mode mode = GET_MODE (vec);
2706 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2707 rtx mem, x;
2708
2709 /* Allocate mode-sized buffer. */
2710 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2711
2712 /* Add offset to field within buffer matching vector element. */
2713 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2714
2715 /* Store single field into mode-sized buffer. */
2716 x = gen_rtx_UNSPEC (VOIDmode,
2717 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2718 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2719 gen_rtvec (2,
2720 gen_rtx_SET (VOIDmode,
2721 mem, vec),
2722 x)));
2723 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2724}
2725
0ba1b2ff
AM
2726/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2727 implement ANDing by the mask IN. */
2728void
a2369ed3 2729build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2730{
2731#if HOST_BITS_PER_WIDE_INT >= 64
2732 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2733 int shift;
2734
37409796 2735 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2736
2737 c = INTVAL (in);
2738 if (c & 1)
2739 {
2740 /* Assume c initially something like 0x00fff000000fffff. The idea
2741 is to rotate the word so that the middle ^^^^^^ group of zeros
2742 is at the MS end and can be cleared with an rldicl mask. We then
2743 rotate back and clear off the MS ^^ group of zeros with a
2744 second rldicl. */
2745 c = ~c; /* c == 0xff000ffffff00000 */
2746 lsb = c & -c; /* lsb == 0x0000000000100000 */
2747 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2748 c = ~c; /* c == 0x00fff000000fffff */
2749 c &= -lsb; /* c == 0x00fff00000000000 */
2750 lsb = c & -c; /* lsb == 0x0000100000000000 */
2751 c = ~c; /* c == 0xff000fffffffffff */
2752 c &= -lsb; /* c == 0xff00000000000000 */
2753 shift = 0;
2754 while ((lsb >>= 1) != 0)
2755 shift++; /* shift == 44 on exit from loop */
2756 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2757 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2758 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2759 }
2760 else
0ba1b2ff
AM
2761 {
2762 /* Assume c initially something like 0xff000f0000000000. The idea
2763 is to rotate the word so that the ^^^ middle group of zeros
2764 is at the LS end and can be cleared with an rldicr mask. We then
2765 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2766 a second rldicr. */
2767 lsb = c & -c; /* lsb == 0x0000010000000000 */
2768 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2769 c = ~c; /* c == 0x00fff0ffffffffff */
2770 c &= -lsb; /* c == 0x00fff00000000000 */
2771 lsb = c & -c; /* lsb == 0x0000100000000000 */
2772 c = ~c; /* c == 0xff000fffffffffff */
2773 c &= -lsb; /* c == 0xff00000000000000 */
2774 shift = 0;
2775 while ((lsb >>= 1) != 0)
2776 shift++; /* shift == 44 on exit from loop */
2777 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2778 m1 >>= shift; /* m1 == 0x0000000000000fff */
2779 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2780 }
2781
2782 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2783 masks will be all 1's. We are guaranteed more than one transition. */
2784 out[0] = GEN_INT (64 - shift);
2785 out[1] = GEN_INT (m1);
2786 out[2] = GEN_INT (shift);
2787 out[3] = GEN_INT (m2);
2788#else
045572c7
GK
2789 (void)in;
2790 (void)out;
37409796 2791 gcc_unreachable ();
0ba1b2ff 2792#endif
a260abc9
DE
2793}
2794
54b695e7 2795/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
2796
2797bool
54b695e7
AH
2798invalid_e500_subreg (rtx op, enum machine_mode mode)
2799{
61c76239
JM
2800 if (TARGET_E500_DOUBLE)
2801 {
17caeff2
JM
2802 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
2803 subreg:TI and reg:TF. */
61c76239 2804 if (GET_CODE (op) == SUBREG
17caeff2 2805 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 2806 && REG_P (SUBREG_REG (op))
17caeff2
JM
2807 && (GET_MODE (SUBREG_REG (op)) == DFmode
2808 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
2809 return true;
2810
17caeff2
JM
2811 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
2812 reg:TI. */
61c76239 2813 if (GET_CODE (op) == SUBREG
17caeff2 2814 && (mode == DFmode || mode == TFmode)
61c76239 2815 && REG_P (SUBREG_REG (op))
17caeff2
JM
2816 && (GET_MODE (SUBREG_REG (op)) == DImode
2817 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
2818 return true;
2819 }
54b695e7 2820
61c76239
JM
2821 if (TARGET_SPE
2822 && GET_CODE (op) == SUBREG
2823 && mode == SImode
54b695e7 2824 && REG_P (SUBREG_REG (op))
14502dad 2825 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
2826 return true;
2827
2828 return false;
2829}
2830
58182de3 2831/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
2832 field is an FP double while the FP fields remain word aligned. */
2833
19d66194 2834unsigned int
fa5b0972
AM
2835rs6000_special_round_type_align (tree type, unsigned int computed,
2836 unsigned int specified)
95727fb8 2837{
fa5b0972 2838 unsigned int align = MAX (computed, specified);
95727fb8 2839 tree field = TYPE_FIELDS (type);
95727fb8 2840
bb8df8a6 2841 /* Skip all non field decls */
85962ac8 2842 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
2843 field = TREE_CHAIN (field);
2844
fa5b0972
AM
2845 if (field != NULL && field != type)
2846 {
2847 type = TREE_TYPE (field);
2848 while (TREE_CODE (type) == ARRAY_TYPE)
2849 type = TREE_TYPE (type);
2850
2851 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2852 align = MAX (align, 64);
2853 }
95727fb8 2854
fa5b0972 2855 return align;
95727fb8
AP
2856}
2857
58182de3
GK
2858/* Darwin increases record alignment to the natural alignment of
2859 the first field. */
2860
2861unsigned int
2862darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
2863 unsigned int specified)
2864{
2865 unsigned int align = MAX (computed, specified);
2866
2867 if (TYPE_PACKED (type))
2868 return align;
2869
2870 /* Find the first field, looking down into aggregates. */
2871 do {
2872 tree field = TYPE_FIELDS (type);
2873 /* Skip all non field decls */
2874 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2875 field = TREE_CHAIN (field);
2876 if (! field)
2877 break;
2878 type = TREE_TYPE (field);
2879 while (TREE_CODE (type) == ARRAY_TYPE)
2880 type = TREE_TYPE (type);
2881 } while (AGGREGATE_TYPE_P (type));
2882
2883 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
2884 align = MAX (align, TYPE_ALIGN (type));
2885
2886 return align;
2887}
2888
a4f6c312 2889/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
2890
2891int
f676971a 2892small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 2893 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 2894{
38c1f2d7 2895#if TARGET_ELF
5f59ecb7 2896 rtx sym_ref;
7509c759 2897
d9407988 2898 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 2899 return 0;
a54d04b7 2900
f607bc57 2901 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
2902 return 0;
2903
88228c4b
MM
2904 if (GET_CODE (op) == SYMBOL_REF)
2905 sym_ref = op;
2906
2907 else if (GET_CODE (op) != CONST
2908 || GET_CODE (XEXP (op, 0)) != PLUS
2909 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2910 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
2911 return 0;
2912
88228c4b 2913 else
dbf55e53
MM
2914 {
2915 rtx sum = XEXP (op, 0);
2916 HOST_WIDE_INT summand;
2917
2918 /* We have to be careful here, because it is the referenced address
c4ad648e 2919 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 2920 summand = INTVAL (XEXP (sum, 1));
307b599c 2921 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 2922 return 0;
dbf55e53
MM
2923
2924 sym_ref = XEXP (sum, 0);
2925 }
88228c4b 2926
20bfcd69 2927 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
2928#else
2929 return 0;
2930#endif
7509c759 2931}
46c07df8 2932
3a1f863f 2933/* Return true if either operand is a general purpose register. */
46c07df8 2934
3a1f863f
DE
2935bool
2936gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 2937{
3a1f863f
DE
2938 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2939 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
2940}
2941
9ebbca7d 2942\f
4d588c14
RH
2943/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2944
f676971a
EC
2945static int
2946constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 2947{
9390387d 2948 switch (GET_CODE (op))
9ebbca7d
GK
2949 {
2950 case SYMBOL_REF:
c4501e62
JJ
2951 if (RS6000_SYMBOL_REF_TLS_P (op))
2952 return 0;
2953 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
2954 {
2955 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2956 {
2957 *have_sym = 1;
2958 return 1;
2959 }
2960 else
2961 return 0;
2962 }
2963 else if (! strcmp (XSTR (op, 0), toc_label_name))
2964 {
2965 *have_toc = 1;
2966 return 1;
2967 }
2968 else
2969 return 0;
9ebbca7d
GK
2970 case PLUS:
2971 case MINUS:
c1f11548
DE
2972 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2973 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 2974 case CONST:
a4f6c312 2975 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 2976 case CONST_INT:
a4f6c312 2977 return 1;
9ebbca7d 2978 default:
a4f6c312 2979 return 0;
9ebbca7d
GK
2980 }
2981}
2982
4d588c14 2983static bool
a2369ed3 2984constant_pool_expr_p (rtx op)
9ebbca7d
GK
2985{
2986 int have_sym = 0;
2987 int have_toc = 0;
2988 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2989}
2990
48d72335 2991bool
a2369ed3 2992toc_relative_expr_p (rtx op)
9ebbca7d 2993{
4d588c14
RH
2994 int have_sym = 0;
2995 int have_toc = 0;
2996 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2997}
2998
4d588c14 2999bool
a2369ed3 3000legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3001{
3002 return (TARGET_TOC
3003 && GET_CODE (x) == PLUS
3004 && GET_CODE (XEXP (x, 0)) == REG
3005 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3006 && constant_pool_expr_p (XEXP (x, 1)));
3007}
3008
d04b6e6e
EB
3009static bool
3010legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3011{
3012 return (DEFAULT_ABI == ABI_V4
3013 && !flag_pic && !TARGET_TOC
3014 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3015 && small_data_operand (x, mode));
3016}
3017
60cdabab
DE
3018/* SPE offset addressing is limited to 5-bits worth of double words. */
3019#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3020
76d2b81d
DJ
3021bool
3022rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3023{
3024 unsigned HOST_WIDE_INT offset, extra;
3025
3026 if (GET_CODE (x) != PLUS)
3027 return false;
3028 if (GET_CODE (XEXP (x, 0)) != REG)
3029 return false;
3030 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3031 return false;
60cdabab
DE
3032 if (legitimate_constant_pool_address_p (x))
3033 return true;
4d588c14
RH
3034 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3035 return false;
3036
3037 offset = INTVAL (XEXP (x, 1));
3038 extra = 0;
3039 switch (mode)
3040 {
3041 case V16QImode:
3042 case V8HImode:
3043 case V4SFmode:
3044 case V4SImode:
7a4eca66
DE
3045 /* AltiVec vector modes. Only reg+reg addressing is valid and
3046 constant offset zero should not occur due to canonicalization.
3047 Allow any offset when not strict before reload. */
3048 return !strict;
4d588c14
RH
3049
3050 case V4HImode:
3051 case V2SImode:
3052 case V1DImode:
3053 case V2SFmode:
3054 /* SPE vector modes. */
3055 return SPE_CONST_OFFSET_OK (offset);
3056
3057 case DFmode:
7393f7f8 3058 case DDmode:
4d4cbc0e
AH
3059 if (TARGET_E500_DOUBLE)
3060 return SPE_CONST_OFFSET_OK (offset);
3061
4d588c14 3062 case DImode:
54b695e7
AH
3063 /* On e500v2, we may have:
3064
3065 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3066
3067 Which gets addressed with evldd instructions. */
3068 if (TARGET_E500_DOUBLE)
3069 return SPE_CONST_OFFSET_OK (offset);
3070
7393f7f8 3071 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3072 extra = 4;
3073 else if (offset & 3)
3074 return false;
3075 break;
3076
3077 case TFmode:
17caeff2
JM
3078 if (TARGET_E500_DOUBLE)
3079 return (SPE_CONST_OFFSET_OK (offset)
3080 && SPE_CONST_OFFSET_OK (offset + 8));
3081
4d588c14 3082 case TImode:
7393f7f8
BE
3083 case TDmode:
3084 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3085 extra = 12;
3086 else if (offset & 3)
3087 return false;
3088 else
3089 extra = 8;
3090 break;
3091
3092 default:
3093 break;
3094 }
3095
b1917422
AM
3096 offset += 0x8000;
3097 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3098}
3099
3100static bool
a2369ed3 3101legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3102{
3103 rtx op0, op1;
3104
3105 if (GET_CODE (x) != PLUS)
3106 return false;
850e8d3d 3107
4d588c14
RH
3108 op0 = XEXP (x, 0);
3109 op1 = XEXP (x, 1);
3110
bf00cc0f 3111 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3112 replaced with proper base and index regs. */
3113 if (!strict
3114 && reload_in_progress
3115 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3116 && REG_P (op1))
3117 return true;
3118
3119 return (REG_P (op0) && REG_P (op1)
3120 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3121 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3122 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3123 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3124}
3125
48d72335 3126inline bool
a2369ed3 3127legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3128{
3129 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3130}
3131
48d72335 3132bool
4c81e946
FJ
3133macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3134{
c4ad648e 3135 if (!TARGET_MACHO || !flag_pic
9390387d 3136 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3137 return false;
3138 x = XEXP (x, 0);
4c81e946
FJ
3139
3140 if (GET_CODE (x) != LO_SUM)
3141 return false;
3142 if (GET_CODE (XEXP (x, 0)) != REG)
3143 return false;
3144 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3145 return false;
3146 x = XEXP (x, 1);
3147
3148 return CONSTANT_P (x);
3149}
3150
4d588c14 3151static bool
a2369ed3 3152legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3153{
3154 if (GET_CODE (x) != LO_SUM)
3155 return false;
3156 if (GET_CODE (XEXP (x, 0)) != REG)
3157 return false;
3158 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3159 return false;
54b695e7 3160 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3161 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3162 || mode == DImode))
f82f556d 3163 return false;
4d588c14
RH
3164 x = XEXP (x, 1);
3165
8622e235 3166 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3167 {
a29077da 3168 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3169 return false;
3170 if (TARGET_TOC)
3171 return false;
3172 if (GET_MODE_NUNITS (mode) != 1)
3173 return false;
5e5f01b9 3174 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
3175 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
3176 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
3177 return false;
3178
3179 return CONSTANT_P (x);
3180 }
3181
3182 return false;
3183}
3184
3185
9ebbca7d
GK
3186/* Try machine-dependent ways of modifying an illegitimate address
3187 to be legitimate. If we find one, return the new, valid address.
3188 This is used from only one place: `memory_address' in explow.c.
3189
a4f6c312
SS
3190 OLDX is the address as it was before break_out_memory_refs was
3191 called. In some cases it is useful to look at this to decide what
3192 needs to be done.
9ebbca7d 3193
a4f6c312 3194 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3195
a4f6c312
SS
3196 It is always safe for this function to do nothing. It exists to
3197 recognize opportunities to optimize the output.
9ebbca7d
GK
3198
3199 On RS/6000, first check for the sum of a register with a constant
3200 integer that is out of range. If so, generate code to add the
3201 constant with the low-order 16 bits masked to the register and force
3202 this result into another register (this can be done with `cau').
3203 Then generate an address of REG+(CONST&0xffff), allowing for the
3204 possibility of bit 16 being a one.
3205
3206 Then check for the sum of a register and something not constant, try to
3207 load the other things into a register and return the sum. */
4d588c14 3208
9ebbca7d 3209rtx
a2369ed3
DJ
3210rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3211 enum machine_mode mode)
0ac081f6 3212{
c4501e62
JJ
3213 if (GET_CODE (x) == SYMBOL_REF)
3214 {
3215 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3216 if (model != 0)
3217 return rs6000_legitimize_tls_address (x, model);
3218 }
3219
f676971a 3220 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3221 && GET_CODE (XEXP (x, 0)) == REG
3222 && GET_CODE (XEXP (x, 1)) == CONST_INT
3223 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
f676971a 3224 {
9ebbca7d
GK
3225 HOST_WIDE_INT high_int, low_int;
3226 rtx sum;
a65c591c
DE
3227 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3228 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3229 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3230 GEN_INT (high_int)), 0);
3231 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3232 }
f676971a 3233 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3234 && GET_CODE (XEXP (x, 0)) == REG
3235 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3236 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3237 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3238 || TARGET_POWERPC64
7393f7f8
BE
3239 || (((mode != DImode && mode != DFmode && mode != DDmode)
3240 || TARGET_E500_DOUBLE)
3241 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3242 && (TARGET_POWERPC64 || mode != DImode)
3243 && mode != TImode)
3244 {
3245 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3246 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3247 }
0ac081f6
AH
3248 else if (ALTIVEC_VECTOR_MODE (mode))
3249 {
3250 rtx reg;
3251
3252 /* Make sure both operands are registers. */
3253 if (GET_CODE (x) == PLUS)
9f85ed45 3254 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3255 force_reg (Pmode, XEXP (x, 1)));
3256
3257 reg = force_reg (Pmode, x);
3258 return reg;
3259 }
4d4cbc0e 3260 else if (SPE_VECTOR_MODE (mode)
17caeff2 3261 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3262 || mode == DDmode || mode == TDmode
54b695e7 3263 || mode == DImode)))
a3170dc6 3264 {
54b695e7
AH
3265 if (mode == DImode)
3266 return NULL_RTX;
a3170dc6
AH
3267 /* We accept [reg + reg] and [reg + OFFSET]. */
3268
3269 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3270 {
3271 rtx op1 = XEXP (x, 0);
3272 rtx op2 = XEXP (x, 1);
a3170dc6 3273
c4ad648e 3274 op1 = force_reg (Pmode, op1);
a3170dc6 3275
c4ad648e
AM
3276 if (GET_CODE (op2) != REG
3277 && (GET_CODE (op2) != CONST_INT
3278 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3279 op2 = force_reg (Pmode, op2);
a3170dc6 3280
c4ad648e
AM
3281 return gen_rtx_PLUS (Pmode, op1, op2);
3282 }
a3170dc6
AH
3283
3284 return force_reg (Pmode, x);
3285 }
f1384257
AM
3286 else if (TARGET_ELF
3287 && TARGET_32BIT
3288 && TARGET_NO_TOC
3289 && ! flag_pic
9ebbca7d 3290 && GET_CODE (x) != CONST_INT
f676971a 3291 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3292 && CONSTANT_P (x)
6ac7bf2c
GK
3293 && GET_MODE_NUNITS (mode) == 1
3294 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 3295 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
3296 {
3297 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3298 emit_insn (gen_elf_high (reg, x));
3299 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3300 }
ee890fe2
SS
3301 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3302 && ! flag_pic
ab82a49f
AP
3303#if TARGET_MACHO
3304 && ! MACHO_DYNAMIC_NO_PIC_P
3305#endif
ee890fe2 3306 && GET_CODE (x) != CONST_INT
f676971a 3307 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3308 && CONSTANT_P (x)
f82f556d 3309 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 3310 && mode != DImode
ee890fe2
SS
3311 && mode != TImode)
3312 {
3313 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3314 emit_insn (gen_macho_high (reg, x));
3315 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3316 }
f676971a 3317 else if (TARGET_TOC
4d588c14 3318 && constant_pool_expr_p (x)
a9098fd0 3319 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3320 {
3321 return create_TOC_reference (x);
3322 }
3323 else
3324 return NULL_RTX;
3325}
258bfae2 3326
fdbe66f2 3327/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3328 We need to emit DTP-relative relocations. */
3329
fdbe66f2 3330static void
c973d557
JJ
3331rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3332{
3333 switch (size)
3334 {
3335 case 4:
3336 fputs ("\t.long\t", file);
3337 break;
3338 case 8:
3339 fputs (DOUBLE_INT_ASM_OP, file);
3340 break;
3341 default:
37409796 3342 gcc_unreachable ();
c973d557
JJ
3343 }
3344 output_addr_const (file, x);
3345 fputs ("@dtprel+0x8000", file);
3346}
3347
c4501e62
JJ
3348/* Construct the SYMBOL_REF for the tls_get_addr function. */
3349
3350static GTY(()) rtx rs6000_tls_symbol;
3351static rtx
863d938c 3352rs6000_tls_get_addr (void)
c4501e62
JJ
3353{
3354 if (!rs6000_tls_symbol)
3355 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3356
3357 return rs6000_tls_symbol;
3358}
3359
3360/* Construct the SYMBOL_REF for TLS GOT references. */
3361
3362static GTY(()) rtx rs6000_got_symbol;
3363static rtx
863d938c 3364rs6000_got_sym (void)
c4501e62
JJ
3365{
3366 if (!rs6000_got_symbol)
3367 {
3368 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3369 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3370 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3371 }
c4501e62
JJ
3372
3373 return rs6000_got_symbol;
3374}
3375
3376/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3377 this (thread-local) address. */
3378
3379static rtx
a2369ed3 3380rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3381{
3382 rtx dest, insn;
3383
3384 dest = gen_reg_rtx (Pmode);
3385 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3386 {
3387 rtx tlsreg;
3388
3389 if (TARGET_64BIT)
3390 {
3391 tlsreg = gen_rtx_REG (Pmode, 13);
3392 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3393 }
3394 else
3395 {
3396 tlsreg = gen_rtx_REG (Pmode, 2);
3397 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3398 }
3399 emit_insn (insn);
3400 }
3401 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3402 {
3403 rtx tlsreg, tmp;
3404
3405 tmp = gen_reg_rtx (Pmode);
3406 if (TARGET_64BIT)
3407 {
3408 tlsreg = gen_rtx_REG (Pmode, 13);
3409 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3410 }
3411 else
3412 {
3413 tlsreg = gen_rtx_REG (Pmode, 2);
3414 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3415 }
3416 emit_insn (insn);
3417 if (TARGET_64BIT)
3418 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3419 else
3420 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3421 emit_insn (insn);
3422 }
3423 else
3424 {
3425 rtx r3, got, tga, tmp1, tmp2, eqv;
3426
4fed8f8f
AM
3427 /* We currently use relocations like @got@tlsgd for tls, which
3428 means the linker will handle allocation of tls entries, placing
3429 them in the .got section. So use a pointer to the .got section,
3430 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3431 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3432 if (TARGET_64BIT)
972f427b 3433 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3434 else
3435 {
3436 if (flag_pic == 1)
3437 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3438 else
3439 {
3440 rtx gsym = rs6000_got_sym ();
3441 got = gen_reg_rtx (Pmode);
3442 if (flag_pic == 0)
3443 rs6000_emit_move (got, gsym, Pmode);
3444 else
3445 {
e65a3857 3446 rtx tmp3, mem;
c4501e62
JJ
3447 rtx first, last;
3448
c4501e62
JJ
3449 tmp1 = gen_reg_rtx (Pmode);
3450 tmp2 = gen_reg_rtx (Pmode);
3451 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3452 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3453
e65a3857
DE
3454 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3455 emit_move_insn (tmp1,
3456 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
c4501e62
JJ
3457 emit_move_insn (tmp2, mem);
3458 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3459 last = emit_move_insn (got, tmp3);
bd94cb6e 3460 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
3461 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3462 REG_NOTES (first));
3463 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3464 REG_NOTES (last));
3465 }
3466 }
3467 }
3468
3469 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3470 {
3471 r3 = gen_rtx_REG (Pmode, 3);
3472 if (TARGET_64BIT)
3473 insn = gen_tls_gd_64 (r3, got, addr);
3474 else
3475 insn = gen_tls_gd_32 (r3, got, addr);
3476 start_sequence ();
3477 emit_insn (insn);
3478 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3479 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3480 insn = emit_call_insn (insn);
3481 CONST_OR_PURE_CALL_P (insn) = 1;
3482 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3483 insn = get_insns ();
3484 end_sequence ();
3485 emit_libcall_block (insn, dest, r3, addr);
3486 }
3487 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3488 {
3489 r3 = gen_rtx_REG (Pmode, 3);
3490 if (TARGET_64BIT)
3491 insn = gen_tls_ld_64 (r3, got);
3492 else
3493 insn = gen_tls_ld_32 (r3, got);
3494 start_sequence ();
3495 emit_insn (insn);
3496 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3497 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3498 insn = emit_call_insn (insn);
3499 CONST_OR_PURE_CALL_P (insn) = 1;
3500 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3501 insn = get_insns ();
3502 end_sequence ();
3503 tmp1 = gen_reg_rtx (Pmode);
3504 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3505 UNSPEC_TLSLD);
3506 emit_libcall_block (insn, tmp1, r3, eqv);
3507 if (rs6000_tls_size == 16)
3508 {
3509 if (TARGET_64BIT)
3510 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3511 else
3512 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3513 }
3514 else if (rs6000_tls_size == 32)
3515 {
3516 tmp2 = gen_reg_rtx (Pmode);
3517 if (TARGET_64BIT)
3518 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3519 else
3520 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3521 emit_insn (insn);
3522 if (TARGET_64BIT)
3523 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3524 else
3525 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3526 }
3527 else
3528 {
3529 tmp2 = gen_reg_rtx (Pmode);
3530 if (TARGET_64BIT)
3531 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3532 else
3533 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3534 emit_insn (insn);
3535 insn = gen_rtx_SET (Pmode, dest,
3536 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3537 }
3538 emit_insn (insn);
3539 }
3540 else
3541 {
a7b376ee 3542 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3543 tmp2 = gen_reg_rtx (Pmode);
3544 if (TARGET_64BIT)
3545 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3546 else
3547 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3548 emit_insn (insn);
3549 if (TARGET_64BIT)
3550 insn = gen_tls_tls_64 (dest, tmp2, addr);
3551 else
3552 insn = gen_tls_tls_32 (dest, tmp2, addr);
3553 emit_insn (insn);
3554 }
3555 }
3556
3557 return dest;
3558}
3559
c4501e62
JJ
3560/* Return 1 if X contains a thread-local symbol. */
3561
3562bool
a2369ed3 3563rs6000_tls_referenced_p (rtx x)
c4501e62 3564{
cd413cab
AP
3565 if (! TARGET_HAVE_TLS)
3566 return false;
3567
c4501e62
JJ
3568 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3569}
3570
3571/* Return 1 if *X is a thread-local symbol. This is the same as
3572 rs6000_tls_symbol_ref except for the type of the unused argument. */
3573
9390387d 3574static int
a2369ed3 3575rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3576{
3577 return RS6000_SYMBOL_REF_TLS_P (*x);
3578}
3579
24ea750e
DJ
3580/* The convention appears to be to define this wherever it is used.
3581 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3582 is now used here. */
3583#ifndef REG_MODE_OK_FOR_BASE_P
3584#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3585#endif
3586
3587/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3588 replace the input X, or the original X if no replacement is called for.
3589 The output parameter *WIN is 1 if the calling macro should goto WIN,
3590 0 if it should not.
3591
3592 For RS/6000, we wish to handle large displacements off a base
3593 register by splitting the addend across an addiu/addis and the mem insn.
3594 This cuts number of extra insns needed from 3 to 1.
3595
3596 On Darwin, we use this to generate code for floating point constants.
3597 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3598 The Darwin code is inside #if TARGET_MACHO because only then is
3599 machopic_function_base_name() defined. */
3600rtx
f676971a 3601rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3602 int opnum, int type,
3603 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3604{
f676971a 3605 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3606 if (GET_CODE (x) == PLUS
3607 && GET_CODE (XEXP (x, 0)) == PLUS
3608 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3609 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3610 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3611 {
3612 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3613 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3614 opnum, (enum reload_type)type);
24ea750e
DJ
3615 *win = 1;
3616 return x;
3617 }
3deb2758 3618
24ea750e
DJ
3619#if TARGET_MACHO
3620 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3621 && GET_CODE (x) == LO_SUM
3622 && GET_CODE (XEXP (x, 0)) == PLUS
3623 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3624 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3625 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3626 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3627 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3628 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3629 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3630 {
3631 /* Result of previous invocation of this function on Darwin
6f317ef3 3632 floating point constant. */
24ea750e 3633 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3634 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3635 opnum, (enum reload_type)type);
24ea750e
DJ
3636 *win = 1;
3637 return x;
3638 }
3639#endif
4937d02d
DE
3640
3641 /* Force ld/std non-word aligned offset into base register by wrapping
3642 in offset 0. */
3643 if (GET_CODE (x) == PLUS
3644 && GET_CODE (XEXP (x, 0)) == REG
3645 && REGNO (XEXP (x, 0)) < 32
3646 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3647 && GET_CODE (XEXP (x, 1)) == CONST_INT
3648 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3649 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3650 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3651 && TARGET_POWERPC64)
3652 {
3653 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3654 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3655 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3656 opnum, (enum reload_type) type);
3657 *win = 1;
3658 return x;
3659 }
3660
24ea750e
DJ
3661 if (GET_CODE (x) == PLUS
3662 && GET_CODE (XEXP (x, 0)) == REG
3663 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3664 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3665 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3666 && !SPE_VECTOR_MODE (mode)
17caeff2 3667 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
54b695e7 3668 || mode == DImode))
78c875e8 3669 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3670 {
3671 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3672 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3673 HOST_WIDE_INT high
c4ad648e 3674 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3675
3676 /* Check for 32-bit overflow. */
3677 if (high + low != val)
c4ad648e 3678 {
24ea750e
DJ
3679 *win = 0;
3680 return x;
3681 }
3682
3683 /* Reload the high part into a base reg; leave the low part
c4ad648e 3684 in the mem directly. */
24ea750e
DJ
3685
3686 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3687 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3688 GEN_INT (high)),
3689 GEN_INT (low));
24ea750e
DJ
3690
3691 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3692 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3693 opnum, (enum reload_type)type);
24ea750e
DJ
3694 *win = 1;
3695 return x;
3696 }
4937d02d 3697
24ea750e 3698 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3699 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 3700 && !SPE_VECTOR_MODE (mode)
8308679f
DE
3701#if TARGET_MACHO
3702 && DEFAULT_ABI == ABI_DARWIN
a29077da 3703 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3704#else
3705 && DEFAULT_ABI == ABI_V4
3706 && !flag_pic
3707#endif
7393f7f8 3708 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
7b5d92b2
AM
3709 The same goes for DImode without 64-bit gprs and DFmode
3710 without fprs. */
0d8c1c97 3711 && mode != TFmode
7393f7f8 3712 && mode != TDmode
7b5d92b2
AM
3713 && (mode != DImode || TARGET_POWERPC64)
3714 && (mode != DFmode || TARGET_POWERPC64
3715 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 3716 {
8308679f 3717#if TARGET_MACHO
a29077da
GK
3718 if (flag_pic)
3719 {
3720 rtx offset = gen_rtx_CONST (Pmode,
3721 gen_rtx_MINUS (Pmode, x,
11abc112 3722 machopic_function_base_sym ()));
a29077da
GK
3723 x = gen_rtx_LO_SUM (GET_MODE (x),
3724 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3725 gen_rtx_HIGH (Pmode, offset)), offset);
3726 }
3727 else
8308679f 3728#endif
a29077da 3729 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3730 gen_rtx_HIGH (Pmode, x), x);
a29077da 3731
24ea750e 3732 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3733 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3734 opnum, (enum reload_type)type);
24ea750e
DJ
3735 *win = 1;
3736 return x;
3737 }
4937d02d 3738
dec1f3aa
DE
3739 /* Reload an offset address wrapped by an AND that represents the
3740 masking of the lower bits. Strip the outer AND and let reload
3741 convert the offset address into an indirect address. */
3742 if (TARGET_ALTIVEC
3743 && ALTIVEC_VECTOR_MODE (mode)
3744 && GET_CODE (x) == AND
3745 && GET_CODE (XEXP (x, 0)) == PLUS
3746 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3747 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3748 && GET_CODE (XEXP (x, 1)) == CONST_INT
3749 && INTVAL (XEXP (x, 1)) == -16)
3750 {
3751 x = XEXP (x, 0);
3752 *win = 1;
3753 return x;
3754 }
3755
24ea750e 3756 if (TARGET_TOC
4d588c14 3757 && constant_pool_expr_p (x)
c1f11548 3758 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 3759 {
194c524a 3760 x = create_TOC_reference (x);
24ea750e
DJ
3761 *win = 1;
3762 return x;
3763 }
3764 *win = 0;
3765 return x;
f676971a 3766}
24ea750e 3767
258bfae2
FS
3768/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3769 that is a valid memory address for an instruction.
3770 The MODE argument is the machine mode for the MEM expression
3771 that wants to use this address.
3772
3773 On the RS/6000, there are four valid address: a SYMBOL_REF that
3774 refers to a constant pool entry of an address (or the sum of it
3775 plus a constant), a short (16-bit signed) constant plus a register,
3776 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3777 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3778 we must ensure that both words are addressable or PowerPC64 with offset
3779 word aligned.
3780
3781 For modes spanning multiple registers (DFmode in 32-bit GPRs,
7393f7f8
BE
3782 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
3783 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
3784 during assembly output. */
3785int
a2369ed3 3786rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 3787{
850e8d3d
DN
3788 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3789 if (TARGET_ALTIVEC
3790 && ALTIVEC_VECTOR_MODE (mode)
3791 && GET_CODE (x) == AND
3792 && GET_CODE (XEXP (x, 1)) == CONST_INT
3793 && INTVAL (XEXP (x, 1)) == -16)
3794 x = XEXP (x, 0);
3795
c4501e62
JJ
3796 if (RS6000_SYMBOL_REF_TLS_P (x))
3797 return 0;
4d588c14 3798 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
3799 return 1;
3800 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 3801 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 3802 && !SPE_VECTOR_MODE (mode)
429ec7dc 3803 && mode != TFmode
7393f7f8 3804 && mode != TDmode
54b695e7 3805 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3806 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3807 || mode == DImode))
258bfae2 3808 && TARGET_UPDATE
4d588c14 3809 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 3810 return 1;
d04b6e6e 3811 if (legitimate_small_data_p (mode, x))
258bfae2 3812 return 1;
4d588c14 3813 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
3814 return 1;
3815 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3816 if (! reg_ok_strict
3817 && GET_CODE (x) == PLUS
3818 && GET_CODE (XEXP (x, 0)) == REG
708d2456 3819 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 3820 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
3821 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3822 return 1;
76d2b81d 3823 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3824 return 1;
3825 if (mode != TImode
76d2b81d 3826 && mode != TFmode
7393f7f8 3827 && mode != TDmode
a3170dc6
AH
3828 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3829 || TARGET_POWERPC64
4d4cbc0e 3830 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 3831 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 3832 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 3833 return 1;
4d588c14 3834 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3835 return 1;
3836 return 0;
3837}
4d588c14
RH
3838
3839/* Go to LABEL if ADDR (a legitimate address expression)
3840 has an effect that depends on the machine mode it is used for.
3841
3842 On the RS/6000 this is true of all integral offsets (since AltiVec
3843 modes don't allow them) or is a pre-increment or decrement.
3844
3845 ??? Except that due to conceptual problems in offsettable_address_p
3846 we can't really report the problems of integral offsets. So leave
f676971a 3847 this assuming that the adjustable offset must be valid for the
4d588c14
RH
3848 sub-words of a TFmode operand, which is what we had before. */
3849
3850bool
a2369ed3 3851rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
3852{
3853 switch (GET_CODE (addr))
3854 {
3855 case PLUS:
3856 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3857 {
3858 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3859 return val + 12 + 0x8000 >= 0x10000;
3860 }
3861 break;
3862
3863 case LO_SUM:
3864 return true;
3865
b9a76028 3866 /* Auto-increment cases are now treated generically in recog.c. */
4d588c14
RH
3867
3868 default:
3869 break;
3870 }
3871
3872 return false;
3873}
d8ecbcdb 3874
d04b6e6e
EB
3875/* More elaborate version of recog's offsettable_memref_p predicate
3876 that works around the ??? note of rs6000_mode_dependent_address.
3877 In particular it accepts
3878
3879 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3880
3881 in 32-bit mode, that the recog predicate rejects. */
3882
3883bool
3884rs6000_offsettable_memref_p (rtx op)
3885{
3886 if (!MEM_P (op))
3887 return false;
3888
3889 /* First mimic offsettable_memref_p. */
3890 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
3891 return true;
3892
3893 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3894 the latter predicate knows nothing about the mode of the memory
3895 reference and, therefore, assumes that it is the largest supported
3896 mode (TFmode). As a consequence, legitimate offsettable memory
3897 references are rejected. rs6000_legitimate_offset_address_p contains
3898 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
3899 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
3900}
3901
d8ecbcdb
AH
3902/* Return number of consecutive hard regs needed starting at reg REGNO
3903 to hold something of mode MODE.
3904 This is ordinarily the length in words of a value of mode MODE
3905 but can be less for certain modes in special long registers.
3906
3907 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3908 scalar instructions. The upper 32 bits are only available to the
3909 SIMD instructions.
3910
3911 POWER and PowerPC GPRs hold 32 bits worth;
3912 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3913
3914int
3915rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3916{
3917 if (FP_REGNO_P (regno))
3918 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3919
3920 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3921 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3922
3923 if (ALTIVEC_REGNO_P (regno))
3924 return
3925 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3926
8521c414
JM
3927 /* The value returned for SCmode in the E500 double case is 2 for
3928 ABI compatibility; storing an SCmode value in a single register
3929 would require function_arg and rs6000_spe_function_arg to handle
3930 SCmode so as to pass the value correctly in a pair of
3931 registers. */
3932 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
3933 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3934
d8ecbcdb
AH
3935 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3936}
2aa4498c
AH
3937
3938/* Change register usage conditional on target flags. */
3939void
3940rs6000_conditional_register_usage (void)
3941{
3942 int i;
3943
3944 /* Set MQ register fixed (already call_used) if not POWER
3945 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3946 be allocated. */
3947 if (! TARGET_POWER)
3948 fixed_regs[64] = 1;
3949
7c9ac5c0 3950 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
3951 if (TARGET_64BIT)
3952 fixed_regs[13] = call_used_regs[13]
3953 = call_really_used_regs[13] = 1;
3954
3955 /* Conditionally disable FPRs. */
3956 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3957 for (i = 32; i < 64; i++)
3958 fixed_regs[i] = call_used_regs[i]
c4ad648e 3959 = call_really_used_regs[i] = 1;
2aa4498c 3960
7c9ac5c0
PH
3961 /* The TOC register is not killed across calls in a way that is
3962 visible to the compiler. */
3963 if (DEFAULT_ABI == ABI_AIX)
3964 call_really_used_regs[2] = 0;
3965
2aa4498c
AH
3966 if (DEFAULT_ABI == ABI_V4
3967 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3968 && flag_pic == 2)
3969 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3970
3971 if (DEFAULT_ABI == ABI_V4
3972 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3973 && flag_pic == 1)
3974 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3975 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3976 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3977
3978 if (DEFAULT_ABI == ABI_DARWIN
3979 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 3980 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
3981 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3982 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3983
b4db40bf
JJ
3984 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3985 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3986 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3987
2aa4498c
AH
3988 if (TARGET_ALTIVEC)
3989 global_regs[VSCR_REGNO] = 1;
3990
3991 if (TARGET_SPE)
3992 {
3993 global_regs[SPEFSCR_REGNO] = 1;
3994 fixed_regs[FIXED_SCRATCH]
c4ad648e 3995 = call_used_regs[FIXED_SCRATCH]
2aa4498c
AH
3996 = call_really_used_regs[FIXED_SCRATCH] = 1;
3997 }
3998
3999 if (! TARGET_ALTIVEC)
4000 {
4001 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4002 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4003 call_really_used_regs[VRSAVE_REGNO] = 1;
4004 }
4005
4006 if (TARGET_ALTIVEC_ABI)
4007 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4008 call_used_regs[i] = call_really_used_regs[i] = 1;
4009}
fb4d4348 4010\f
a4f6c312
SS
4011/* Try to output insns to set TARGET equal to the constant C if it can
4012 be done in less than N insns. Do all computations in MODE.
4013 Returns the place where the output has been placed if it can be
4014 done and the insns have been emitted. If it would take more than N
4015 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4016
4017rtx
f676971a 4018rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4019 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4020{
af8cb5c5 4021 rtx result, insn, set;
2bfcf297
DB
4022 HOST_WIDE_INT c0, c1;
4023
37409796 4024 switch (mode)
2bfcf297 4025 {
37409796
NS
4026 case QImode:
4027 case HImode:
2bfcf297 4028 if (dest == NULL)
c4ad648e 4029 dest = gen_reg_rtx (mode);
2bfcf297
DB
4030 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4031 return dest;
bb8df8a6 4032
37409796 4033 case SImode:
af8cb5c5 4034 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
bb8df8a6 4035
d448860e 4036 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4037 GEN_INT (INTVAL (source)
4038 & (~ (HOST_WIDE_INT) 0xffff))));
4039 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4040 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4041 GEN_INT (INTVAL (source) & 0xffff))));
4042 result = dest;
37409796
NS
4043 break;
4044
4045 case DImode:
4046 switch (GET_CODE (source))
af8cb5c5 4047 {
37409796 4048 case CONST_INT:
af8cb5c5
DE
4049 c0 = INTVAL (source);
4050 c1 = -(c0 < 0);
37409796 4051 break;
bb8df8a6 4052
37409796 4053 case CONST_DOUBLE:
2bfcf297 4054#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4055 c0 = CONST_DOUBLE_LOW (source);
4056 c1 = -(c0 < 0);
2bfcf297 4057#else
af8cb5c5
DE
4058 c0 = CONST_DOUBLE_LOW (source);
4059 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4060#endif
37409796
NS
4061 break;
4062
4063 default:
4064 gcc_unreachable ();
af8cb5c5 4065 }
af8cb5c5
DE
4066
4067 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4068 break;
4069
4070 default:
4071 gcc_unreachable ();
2bfcf297 4072 }
2bfcf297 4073
af8cb5c5
DE
4074 insn = get_last_insn ();
4075 set = single_set (insn);
4076 if (! CONSTANT_P (SET_SRC (set)))
4077 set_unique_reg_note (insn, REG_EQUAL, source);
4078
4079 return result;
2bfcf297
DB
4080}
4081
4082/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4083 fall back to a straight forward decomposition. We do this to avoid
4084 exponential run times encountered when looking for longer sequences
4085 with rs6000_emit_set_const. */
4086static rtx
a2369ed3 4087rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4088{
4089 if (!TARGET_POWERPC64)
4090 {
4091 rtx operand1, operand2;
4092
4093 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4094 DImode);
d448860e 4095 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4096 DImode);
4097 emit_move_insn (operand1, GEN_INT (c1));
4098 emit_move_insn (operand2, GEN_INT (c2));
4099 }
4100 else
4101 {
bc06712d 4102 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4103
bc06712d 4104 ud1 = c1 & 0xffff;
f921c9c9 4105 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4106#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4107 c2 = c1 >> 32;
2bfcf297 4108#endif
bc06712d 4109 ud3 = c2 & 0xffff;
f921c9c9 4110 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4111
f676971a 4112 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4113 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4114 {
bc06712d 4115 if (ud1 & 0x8000)
b78d48dd 4116 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4117 else
4118 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4119 }
2bfcf297 4120
f676971a 4121 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4122 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4123 {
bc06712d 4124 if (ud2 & 0x8000)
f676971a 4125 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4126 - 0x80000000));
252b88f7 4127 else
bc06712d
TR
4128 emit_move_insn (dest, GEN_INT (ud2 << 16));
4129 if (ud1 != 0)
d448860e
JH
4130 emit_move_insn (copy_rtx (dest),
4131 gen_rtx_IOR (DImode, copy_rtx (dest),
4132 GEN_INT (ud1)));
252b88f7 4133 }
f676971a 4134 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4135 || (ud4 == 0 && ! (ud3 & 0x8000)))
4136 {
4137 if (ud3 & 0x8000)
f676971a 4138 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4139 - 0x80000000));
4140 else
4141 emit_move_insn (dest, GEN_INT (ud3 << 16));
4142
4143 if (ud2 != 0)
d448860e
JH
4144 emit_move_insn (copy_rtx (dest),
4145 gen_rtx_IOR (DImode, copy_rtx (dest),
4146 GEN_INT (ud2)));
4147 emit_move_insn (copy_rtx (dest),
4148 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4149 GEN_INT (16)));
bc06712d 4150 if (ud1 != 0)
d448860e
JH
4151 emit_move_insn (copy_rtx (dest),
4152 gen_rtx_IOR (DImode, copy_rtx (dest),
4153 GEN_INT (ud1)));
bc06712d 4154 }
f676971a 4155 else
bc06712d
TR
4156 {
4157 if (ud4 & 0x8000)
f676971a 4158 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4159 - 0x80000000));
4160 else
4161 emit_move_insn (dest, GEN_INT (ud4 << 16));
4162
4163 if (ud3 != 0)
d448860e
JH
4164 emit_move_insn (copy_rtx (dest),
4165 gen_rtx_IOR (DImode, copy_rtx (dest),
4166 GEN_INT (ud3)));
2bfcf297 4167
d448860e
JH
4168 emit_move_insn (copy_rtx (dest),
4169 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4170 GEN_INT (32)));
bc06712d 4171 if (ud2 != 0)
d448860e
JH
4172 emit_move_insn (copy_rtx (dest),
4173 gen_rtx_IOR (DImode, copy_rtx (dest),
4174 GEN_INT (ud2 << 16)));
bc06712d 4175 if (ud1 != 0)
d448860e
JH
4176 emit_move_insn (copy_rtx (dest),
4177 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4178 }
4179 }
2bfcf297
DB
4180 return dest;
4181}
4182
76d2b81d 4183/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4184 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4185
4186static void
4187rs6000_eliminate_indexed_memrefs (rtx operands[2])
4188{
4189 if (GET_CODE (operands[0]) == MEM
4190 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4191 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4192 && ! reload_in_progress)
4193 operands[0]
4194 = replace_equiv_address (operands[0],
4195 copy_addr_to_reg (XEXP (operands[0], 0)));
4196
4197 if (GET_CODE (operands[1]) == MEM
4198 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4199 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4200 && ! reload_in_progress)
4201 operands[1]
4202 = replace_equiv_address (operands[1],
4203 copy_addr_to_reg (XEXP (operands[1], 0)));
4204}
4205
fb4d4348
GK
4206/* Emit a move from SOURCE to DEST in mode MODE. */
4207void
a2369ed3 4208rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4209{
4210 rtx operands[2];
4211 operands[0] = dest;
4212 operands[1] = source;
f676971a 4213
fb4d4348
GK
4214 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4215 if (GET_CODE (operands[1]) == CONST_DOUBLE
4216 && ! FLOAT_MODE_P (mode)
4217 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4218 {
4219 /* FIXME. This should never happen. */
4220 /* Since it seems that it does, do the safe thing and convert
4221 to a CONST_INT. */
2496c7bd 4222 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4223 }
37409796
NS
4224 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4225 || FLOAT_MODE_P (mode)
4226 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4227 || CONST_DOUBLE_LOW (operands[1]) < 0)
4228 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4229 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4230
c9e8cb32
DD
4231 /* Check if GCC is setting up a block move that will end up using FP
4232 registers as temporaries. We must make sure this is acceptable. */
4233 if (GET_CODE (operands[0]) == MEM
4234 && GET_CODE (operands[1]) == MEM
4235 && mode == DImode
41543739
GK
4236 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4237 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4238 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4239 ? 32 : MEM_ALIGN (operands[0])))
4240 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4241 ? 32
41543739
GK
4242 : MEM_ALIGN (operands[1]))))
4243 && ! MEM_VOLATILE_P (operands [0])
4244 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4245 {
41543739
GK
4246 emit_move_insn (adjust_address (operands[0], SImode, 0),
4247 adjust_address (operands[1], SImode, 0));
d448860e
JH
4248 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4249 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4250 return;
4251 }
630d42a0 4252
55aa0757 4253 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
c9dbf840 4254 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4255 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4256
a3170dc6
AH
4257 if (mode == SFmode && ! TARGET_POWERPC
4258 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4259 && GET_CODE (operands[0]) == MEM)
fb4d4348 4260 {
ffc14f31
GK
4261 int regnum;
4262
4263 if (reload_in_progress || reload_completed)
4264 regnum = true_regnum (operands[1]);
4265 else if (GET_CODE (operands[1]) == REG)
4266 regnum = REGNO (operands[1]);
4267 else
4268 regnum = -1;
f676971a 4269
fb4d4348
GK
4270 /* If operands[1] is a register, on POWER it may have
4271 double-precision data in it, so truncate it to single
4272 precision. */
4273 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4274 {
4275 rtx newreg;
d448860e
JH
4276 newreg = (no_new_pseudos ? copy_rtx (operands[1])
4277 : gen_reg_rtx (mode));
fb4d4348
GK
4278 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4279 operands[1] = newreg;
4280 }
4281 }
4282
c4501e62
JJ
4283 /* Recognize the case where operand[1] is a reference to thread-local
4284 data and load its address to a register. */
84f52ebd 4285 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4286 {
84f52ebd
RH
4287 enum tls_model model;
4288 rtx tmp = operands[1];
4289 rtx addend = NULL;
4290
4291 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4292 {
4293 addend = XEXP (XEXP (tmp, 0), 1);
4294 tmp = XEXP (XEXP (tmp, 0), 0);
4295 }
4296
4297 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4298 model = SYMBOL_REF_TLS_MODEL (tmp);
4299 gcc_assert (model != 0);
4300
4301 tmp = rs6000_legitimize_tls_address (tmp, model);
4302 if (addend)
4303 {
4304 tmp = gen_rtx_PLUS (mode, tmp, addend);
4305 tmp = force_operand (tmp, operands[0]);
4306 }
4307 operands[1] = tmp;
c4501e62
JJ
4308 }
4309
8f4e6caf
RH
4310 /* Handle the case where reload calls us with an invalid address. */
4311 if (reload_in_progress && mode == Pmode
69ef87e2 4312 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4313 || ! nonimmediate_operand (operands[0], mode)))
4314 goto emit_set;
4315
a9baceb1
GK
4316 /* 128-bit constant floating-point values on Darwin should really be
4317 loaded as two parts. */
8521c414 4318 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4319 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4320 {
4321 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4322 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4323 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4324 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4325 simplify_gen_subreg (imode, operands[1], mode, 0),
4326 imode);
4327 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4328 GET_MODE_SIZE (imode)),
4329 simplify_gen_subreg (imode, operands[1], mode,
4330 GET_MODE_SIZE (imode)),
4331 imode);
a9baceb1
GK
4332 return;
4333 }
4334
fb4d4348
GK
4335 /* FIXME: In the long term, this switch statement should go away
4336 and be replaced by a sequence of tests based on things like
4337 mode == Pmode. */
4338 switch (mode)
4339 {
4340 case HImode:
4341 case QImode:
4342 if (CONSTANT_P (operands[1])
4343 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4344 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4345 break;
4346
06f4e019 4347 case TFmode:
7393f7f8 4348 case TDmode:
76d2b81d
DJ
4349 rs6000_eliminate_indexed_memrefs (operands);
4350 /* fall through */
4351
fb4d4348 4352 case DFmode:
7393f7f8 4353 case DDmode:
fb4d4348 4354 case SFmode:
f676971a 4355 if (CONSTANT_P (operands[1])
fb4d4348 4356 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4357 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4358 break;
f676971a 4359
0ac081f6
AH
4360 case V16QImode:
4361 case V8HImode:
4362 case V4SFmode:
4363 case V4SImode:
a3170dc6
AH
4364 case V4HImode:
4365 case V2SFmode:
4366 case V2SImode:
00a892b8 4367 case V1DImode:
69ef87e2 4368 if (CONSTANT_P (operands[1])
d744e06e 4369 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4370 operands[1] = force_const_mem (mode, operands[1]);
4371 break;
f676971a 4372
fb4d4348 4373 case SImode:
a9098fd0 4374 case DImode:
fb4d4348
GK
4375 /* Use default pattern for address of ELF small data */
4376 if (TARGET_ELF
a9098fd0 4377 && mode == Pmode
f607bc57 4378 && DEFAULT_ABI == ABI_V4
f676971a 4379 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4380 || GET_CODE (operands[1]) == CONST)
4381 && small_data_operand (operands[1], mode))
fb4d4348
GK
4382 {
4383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4384 return;
4385 }
4386
f607bc57 4387 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4388 && mode == Pmode && mode == SImode
4389 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4390 {
4391 emit_insn (gen_movsi_got (operands[0], operands[1]));
4392 return;
4393 }
4394
ee890fe2 4395 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4396 && TARGET_NO_TOC
4397 && ! flag_pic
a9098fd0 4398 && mode == Pmode
fb4d4348
GK
4399 && CONSTANT_P (operands[1])
4400 && GET_CODE (operands[1]) != HIGH
4401 && GET_CODE (operands[1]) != CONST_INT)
4402 {
a9098fd0 4403 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
fb4d4348
GK
4404
4405 /* If this is a function address on -mcall-aixdesc,
4406 convert it to the address of the descriptor. */
4407 if (DEFAULT_ABI == ABI_AIX
4408 && GET_CODE (operands[1]) == SYMBOL_REF
4409 && XSTR (operands[1], 0)[0] == '.')
4410 {
4411 const char *name = XSTR (operands[1], 0);
4412 rtx new_ref;
4413 while (*name == '.')
4414 name++;
4415 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4416 CONSTANT_POOL_ADDRESS_P (new_ref)
4417 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4418 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4419 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4420 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4421 operands[1] = new_ref;
4422 }
7509c759 4423
ee890fe2
SS
4424 if (DEFAULT_ABI == ABI_DARWIN)
4425 {
ab82a49f
AP
4426#if TARGET_MACHO
4427 if (MACHO_DYNAMIC_NO_PIC_P)
4428 {
4429 /* Take care of any required data indirection. */
4430 operands[1] = rs6000_machopic_legitimize_pic_address (
4431 operands[1], mode, operands[0]);
4432 if (operands[0] != operands[1])
4433 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4434 operands[0], operands[1]));
ab82a49f
AP
4435 return;
4436 }
4437#endif
b8a55285
AP
4438 emit_insn (gen_macho_high (target, operands[1]));
4439 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4440 return;
4441 }
4442
fb4d4348
GK
4443 emit_insn (gen_elf_high (target, operands[1]));
4444 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4445 return;
4446 }
4447
a9098fd0
GK
4448 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4449 and we have put it in the TOC, we just need to make a TOC-relative
4450 reference to it. */
4451 if (TARGET_TOC
4452 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4453 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4454 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4455 get_pool_mode (operands[1])))
fb4d4348 4456 {
a9098fd0 4457 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4458 }
a9098fd0
GK
4459 else if (mode == Pmode
4460 && CONSTANT_P (operands[1])
38886f37
AO
4461 && ((GET_CODE (operands[1]) != CONST_INT
4462 && ! easy_fp_constant (operands[1], mode))
4463 || (GET_CODE (operands[1]) == CONST_INT
4464 && num_insns_constant (operands[1], mode) > 2)
4465 || (GET_CODE (operands[0]) == REG
4466 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4467 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4468 && ! legitimate_constant_pool_address_p (operands[1])
4469 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4470 {
4471 /* Emit a USE operation so that the constant isn't deleted if
4472 expensive optimizations are turned on because nobody
4473 references it. This should only be done for operands that
4474 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4475 This should not be done for operands that contain LABEL_REFs.
4476 For now, we just handle the obvious case. */
4477 if (GET_CODE (operands[1]) != LABEL_REF)
4478 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4479
c859cda6 4480#if TARGET_MACHO
ee890fe2 4481 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4482 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4483 {
ee890fe2
SS
4484 operands[1] =
4485 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4486 operands[0]);
4487 if (operands[0] != operands[1])
4488 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4489 return;
4490 }
c859cda6 4491#endif
ee890fe2 4492
fb4d4348
GK
4493 /* If we are to limit the number of things we put in the TOC and
4494 this is a symbol plus a constant we can add in one insn,
4495 just put the symbol in the TOC and add the constant. Don't do
4496 this if reload is in progress. */
4497 if (GET_CODE (operands[1]) == CONST
4498 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4499 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4500 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4501 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4502 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4503 && ! side_effects_p (operands[0]))
4504 {
a4f6c312
SS
4505 rtx sym =
4506 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4507 rtx other = XEXP (XEXP (operands[1], 0), 1);
4508
a9098fd0
GK
4509 sym = force_reg (mode, sym);
4510 if (mode == SImode)
4511 emit_insn (gen_addsi3 (operands[0], sym, other));
4512 else
4513 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4514 return;
4515 }
4516
a9098fd0 4517 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4518
f676971a 4519 if (TARGET_TOC
4d588c14 4520 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4521 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4522 get_pool_constant (XEXP (operands[1], 0)),
4523 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4524 {
ba4828e0 4525 operands[1]
542a8afa 4526 = gen_const_mem (mode,
c4ad648e 4527 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4528 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4529 }
fb4d4348
GK
4530 }
4531 break;
a9098fd0 4532
fb4d4348 4533 case TImode:
76d2b81d
DJ
4534 rs6000_eliminate_indexed_memrefs (operands);
4535
27dc0551
DE
4536 if (TARGET_POWER)
4537 {
4538 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4539 gen_rtvec (2,
4540 gen_rtx_SET (VOIDmode,
4541 operands[0], operands[1]),
4542 gen_rtx_CLOBBER (VOIDmode,
4543 gen_rtx_SCRATCH (SImode)))));
4544 return;
4545 }
fb4d4348
GK
4546 break;
4547
4548 default:
37409796 4549 gcc_unreachable ();
fb4d4348
GK
4550 }
4551
a9098fd0
GK
4552 /* Above, we may have called force_const_mem which may have returned
4553 an invalid address. If we can, fix this up; otherwise, reload will
4554 have to deal with it. */
8f4e6caf
RH
4555 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4556 operands[1] = validize_mem (operands[1]);
a9098fd0 4557
8f4e6caf 4558 emit_set:
fb4d4348
GK
4559 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4560}
4697a36c 4561\f
2858f73a
GK
4562/* Nonzero if we can use a floating-point register to pass this arg. */
4563#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4564 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4565 && (MODE) != SDmode \
2858f73a
GK
4566 && (CUM)->fregno <= FP_ARG_MAX_REG \
4567 && TARGET_HARD_FLOAT && TARGET_FPRS)
4568
4569/* Nonzero if we can use an AltiVec register to pass this arg. */
4570#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4571 (ALTIVEC_VECTOR_MODE (MODE) \
4572 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4573 && TARGET_ALTIVEC_ABI \
83953138 4574 && (NAMED))
2858f73a 4575
c6e8c921
GK
4576/* Return a nonzero value to say to return the function value in
4577 memory, just as large structures are always returned. TYPE will be
4578 the data type of the value, and FNTYPE will be the type of the
4579 function doing the returning, or @code{NULL} for libcalls.
4580
4581 The AIX ABI for the RS/6000 specifies that all structures are
4582 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4583 specifies that structures <= 8 bytes are returned in r3/r4, but a
4584 draft put them in memory, and GCC used to implement the draft
df01da37 4585 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4586 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4587 compatibility can change DRAFT_V4_STRUCT_RET to override the
4588 default, and -m switches get the final word. See
4589 rs6000_override_options for more details.
4590
4591 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4592 long double support is enabled. These values are returned in memory.
4593
4594 int_size_in_bytes returns -1 for variable size objects, which go in
4595 memory always. The cast to unsigned makes -1 > 8. */
4596
4597static bool
4598rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4599{
594a51fe
SS
4600 /* In the darwin64 abi, try to use registers for larger structs
4601 if possible. */
0b5383eb 4602 if (rs6000_darwin64_abi
594a51fe 4603 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4604 && int_size_in_bytes (type) > 0)
4605 {
4606 CUMULATIVE_ARGS valcum;
4607 rtx valret;
4608
4609 valcum.words = 0;
4610 valcum.fregno = FP_ARG_MIN_REG;
4611 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4612 /* Do a trial code generation as if this were going to be passed
4613 as an argument; if any part goes in memory, we return NULL. */
4614 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4615 if (valret)
4616 return false;
4617 /* Otherwise fall through to more conventional ABI rules. */
4618 }
594a51fe 4619
c6e8c921 4620 if (AGGREGATE_TYPE_P (type)
df01da37 4621 && (aix_struct_return
c6e8c921
GK
4622 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4623 return true;
b693336b 4624
bada2eb8
DE
4625 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4626 modes only exist for GCC vector types if -maltivec. */
4627 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4628 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4629 return false;
4630
b693336b
PB
4631 /* Return synthetic vectors in memory. */
4632 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4633 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4634 {
4635 static bool warned_for_return_big_vectors = false;
4636 if (!warned_for_return_big_vectors)
4637 {
d4ee4d25 4638 warning (0, "GCC vector returned by reference: "
b693336b
PB
4639 "non-standard ABI extension with no compatibility guarantee");
4640 warned_for_return_big_vectors = true;
4641 }
4642 return true;
4643 }
4644
602ea4d3 4645 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 4646 return true;
ad630bef 4647
c6e8c921
GK
4648 return false;
4649}
4650
4697a36c
MM
4651/* Initialize a variable CUM of type CUMULATIVE_ARGS
4652 for a call to a function whose data type is FNTYPE.
4653 For a library call, FNTYPE is 0.
4654
4655 For incoming args we set the number of arguments in the prototype large
1c20ae99 4656 so we never return a PARALLEL. */
4697a36c
MM
4657
4658void
f676971a 4659init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4660 rtx libname ATTRIBUTE_UNUSED, int incoming,
4661 int libcall, int n_named_args)
4697a36c
MM
4662{
4663 static CUMULATIVE_ARGS zero_cumulative;
4664
4665 *cum = zero_cumulative;
4666 cum->words = 0;
4667 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4668 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4669 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4670 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4671 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4672 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4673 cum->stdarg = fntype
4674 && (TYPE_ARG_TYPES (fntype) != 0
4675 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4676 != void_type_node));
4697a36c 4677
0f6937fe
AM
4678 cum->nargs_prototype = 0;
4679 if (incoming || cum->prototype)
4680 cum->nargs_prototype = n_named_args;
4697a36c 4681
a5c76ee6 4682 /* Check for a longcall attribute. */
3eb4e360
AM
4683 if ((!fntype && rs6000_default_long_calls)
4684 || (fntype
4685 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4686 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4687 cum->call_cookie |= CALL_LONG;
6a4cee5f 4688
4697a36c
MM
4689 if (TARGET_DEBUG_ARG)
4690 {
4691 fprintf (stderr, "\ninit_cumulative_args:");
4692 if (fntype)
4693 {
4694 tree ret_type = TREE_TYPE (fntype);
4695 fprintf (stderr, " ret code = %s,",
4696 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4697 }
4698
6a4cee5f
MM
4699 if (cum->call_cookie & CALL_LONG)
4700 fprintf (stderr, " longcall,");
4701
4697a36c
MM
4702 fprintf (stderr, " proto = %d, nargs = %d\n",
4703 cum->prototype, cum->nargs_prototype);
4704 }
f676971a 4705
c4ad648e
AM
4706 if (fntype
4707 && !TARGET_ALTIVEC
4708 && TARGET_ALTIVEC_ABI
4709 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4710 {
c85ce869 4711 error ("cannot return value in vector register because"
c4ad648e 4712 " altivec instructions are disabled, use -maltivec"
c85ce869 4713 " to enable them");
c4ad648e 4714 }
4697a36c
MM
4715}
4716\f
fe984136
RH
4717/* Return true if TYPE must be passed on the stack and not in registers. */
4718
4719static bool
4720rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4721{
4722 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4723 return must_pass_in_stack_var_size (mode, type);
4724 else
4725 return must_pass_in_stack_var_size_or_pad (mode, type);
4726}
4727
c229cba9
DE
4728/* If defined, a C expression which determines whether, and in which
4729 direction, to pad out an argument with extra space. The value
4730 should be of type `enum direction': either `upward' to pad above
4731 the argument, `downward' to pad below, or `none' to inhibit
4732 padding.
4733
4734 For the AIX ABI structs are always stored left shifted in their
4735 argument slot. */
4736
9ebbca7d 4737enum direction
a2369ed3 4738function_arg_padding (enum machine_mode mode, tree type)
c229cba9 4739{
6e985040
AM
4740#ifndef AGGREGATE_PADDING_FIXED
4741#define AGGREGATE_PADDING_FIXED 0
4742#endif
4743#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4744#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4745#endif
4746
4747 if (!AGGREGATE_PADDING_FIXED)
4748 {
4749 /* GCC used to pass structures of the same size as integer types as
4750 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4751 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4752 passed padded downward, except that -mstrict-align further
4753 muddied the water in that multi-component structures of 2 and 4
4754 bytes in size were passed padded upward.
4755
4756 The following arranges for best compatibility with previous
4757 versions of gcc, but removes the -mstrict-align dependency. */
4758 if (BYTES_BIG_ENDIAN)
4759 {
4760 HOST_WIDE_INT size = 0;
4761
4762 if (mode == BLKmode)
4763 {
4764 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4765 size = int_size_in_bytes (type);
4766 }
4767 else
4768 size = GET_MODE_SIZE (mode);
4769
4770 if (size == 1 || size == 2 || size == 4)
4771 return downward;
4772 }
4773 return upward;
4774 }
4775
4776 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4777 {
4778 if (type != 0 && AGGREGATE_TYPE_P (type))
4779 return upward;
4780 }
c229cba9 4781
d3704c46
KH
4782 /* Fall back to the default. */
4783 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
4784}
4785
b6c9286a 4786/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 4787 of an argument with the specified mode and type. If it is not defined,
b6c9286a 4788 PARM_BOUNDARY is used for all arguments.
f676971a 4789
84e9ad15
AM
4790 V.4 wants long longs and doubles to be double word aligned. Just
4791 testing the mode size is a boneheaded way to do this as it means
4792 that other types such as complex int are also double word aligned.
4793 However, we're stuck with this because changing the ABI might break
4794 existing library interfaces.
4795
b693336b
PB
4796 Doubleword align SPE vectors.
4797 Quadword align Altivec vectors.
4798 Quadword align large synthetic vector types. */
b6c9286a
MM
4799
4800int
b693336b 4801function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 4802{
84e9ad15
AM
4803 if (DEFAULT_ABI == ABI_V4
4804 && (GET_MODE_SIZE (mode) == 8
4805 || (TARGET_HARD_FLOAT
4806 && TARGET_FPRS
7393f7f8 4807 && (mode == TFmode || mode == TDmode))))
4ed78545 4808 return 64;
ad630bef
DE
4809 else if (SPE_VECTOR_MODE (mode)
4810 || (type && TREE_CODE (type) == VECTOR_TYPE
4811 && int_size_in_bytes (type) >= 8
4812 && int_size_in_bytes (type) < 16))
e1f83b4d 4813 return 64;
ad630bef
DE
4814 else if (ALTIVEC_VECTOR_MODE (mode)
4815 || (type && TREE_CODE (type) == VECTOR_TYPE
4816 && int_size_in_bytes (type) >= 16))
0ac081f6 4817 return 128;
0b5383eb
DJ
4818 else if (rs6000_darwin64_abi && mode == BLKmode
4819 && type && TYPE_ALIGN (type) > 64)
4820 return 128;
9ebbca7d 4821 else
b6c9286a 4822 return PARM_BOUNDARY;
b6c9286a 4823}
c53bdcf5 4824
294bd182
AM
4825/* For a function parm of MODE and TYPE, return the starting word in
4826 the parameter area. NWORDS of the parameter area are already used. */
4827
4828static unsigned int
4829rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4830{
4831 unsigned int align;
4832 unsigned int parm_offset;
4833
4834 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4835 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4836 return nwords + (-(parm_offset + nwords) & align);
4837}
4838
c53bdcf5
AM
4839/* Compute the size (in words) of a function argument. */
4840
4841static unsigned long
4842rs6000_arg_size (enum machine_mode mode, tree type)
4843{
4844 unsigned long size;
4845
4846 if (mode != BLKmode)
4847 size = GET_MODE_SIZE (mode);
4848 else
4849 size = int_size_in_bytes (type);
4850
4851 if (TARGET_32BIT)
4852 return (size + 3) >> 2;
4853 else
4854 return (size + 7) >> 3;
4855}
b6c9286a 4856\f
0b5383eb 4857/* Use this to flush pending int fields. */
594a51fe
SS
4858
4859static void
0b5383eb
DJ
4860rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
4861 HOST_WIDE_INT bitpos)
594a51fe 4862{
0b5383eb
DJ
4863 unsigned int startbit, endbit;
4864 int intregs, intoffset;
4865 enum machine_mode mode;
594a51fe 4866
0b5383eb
DJ
4867 if (cum->intoffset == -1)
4868 return;
594a51fe 4869
0b5383eb
DJ
4870 intoffset = cum->intoffset;
4871 cum->intoffset = -1;
4872
4873 if (intoffset % BITS_PER_WORD != 0)
4874 {
4875 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
4876 MODE_INT, 0);
4877 if (mode == BLKmode)
594a51fe 4878 {
0b5383eb
DJ
4879 /* We couldn't find an appropriate mode, which happens,
4880 e.g., in packed structs when there are 3 bytes to load.
4881 Back intoffset back to the beginning of the word in this
4882 case. */
4883 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 4884 }
594a51fe 4885 }
0b5383eb
DJ
4886
4887 startbit = intoffset & -BITS_PER_WORD;
4888 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
4889 intregs = (endbit - startbit) / BITS_PER_WORD;
4890 cum->words += intregs;
4891}
4892
4893/* The darwin64 ABI calls for us to recurse down through structs,
4894 looking for elements passed in registers. Unfortunately, we have
4895 to track int register count here also because of misalignments
4896 in powerpc alignment mode. */
4897
4898static void
4899rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
4900 tree type,
4901 HOST_WIDE_INT startbitpos)
4902{
4903 tree f;
4904
4905 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4906 if (TREE_CODE (f) == FIELD_DECL)
4907 {
4908 HOST_WIDE_INT bitpos = startbitpos;
4909 tree ftype = TREE_TYPE (f);
70fb00df
AP
4910 enum machine_mode mode;
4911 if (ftype == error_mark_node)
4912 continue;
4913 mode = TYPE_MODE (ftype);
0b5383eb
DJ
4914
4915 if (DECL_SIZE (f) != 0
4916 && host_integerp (bit_position (f), 1))
4917 bitpos += int_bit_position (f);
4918
4919 /* ??? FIXME: else assume zero offset. */
4920
4921 if (TREE_CODE (ftype) == RECORD_TYPE)
4922 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
4923 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
4924 {
4925 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4926 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4927 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
4928 }
4929 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
4930 {
4931 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
4932 cum->vregno++;
4933 cum->words += 2;
4934 }
4935 else if (cum->intoffset == -1)
4936 cum->intoffset = bitpos;
4937 }
594a51fe
SS
4938}
4939
4697a36c
MM
4940/* Update the data in CUM to advance over an argument
4941 of mode MODE and data type TYPE.
b2d04ecf
AM
4942 (TYPE is null for libcalls where that information may not be available.)
4943
4944 Note that for args passed by reference, function_arg will be called
4945 with MODE and TYPE set to that of the pointer to the arg, not the arg
4946 itself. */
4697a36c
MM
4947
4948void
f676971a 4949function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 4950 tree type, int named, int depth)
4697a36c 4951{
0b5383eb
DJ
4952 int size;
4953
594a51fe
SS
4954 /* Only tick off an argument if we're not recursing. */
4955 if (depth == 0)
4956 cum->nargs_prototype--;
4697a36c 4957
ad630bef
DE
4958 if (TARGET_ALTIVEC_ABI
4959 && (ALTIVEC_VECTOR_MODE (mode)
4960 || (type && TREE_CODE (type) == VECTOR_TYPE
4961 && int_size_in_bytes (type) == 16)))
0ac081f6 4962 {
4ed78545
AM
4963 bool stack = false;
4964
2858f73a 4965 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 4966 {
6d0ef01e
HP
4967 cum->vregno++;
4968 if (!TARGET_ALTIVEC)
c85ce869 4969 error ("cannot pass argument in vector register because"
6d0ef01e 4970 " altivec instructions are disabled, use -maltivec"
c85ce869 4971 " to enable them");
4ed78545
AM
4972
4973 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 4974 even if it is going to be passed in a vector register.
4ed78545
AM
4975 Darwin does the same for variable-argument functions. */
4976 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4977 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4978 stack = true;
6d0ef01e 4979 }
4ed78545
AM
4980 else
4981 stack = true;
4982
4983 if (stack)
c4ad648e 4984 {
a594a19c 4985 int align;
f676971a 4986
2858f73a
GK
4987 /* Vector parameters must be 16-byte aligned. This places
4988 them at 2 mod 4 in terms of words in 32-bit mode, since
4989 the parameter save area starts at offset 24 from the
4990 stack. In 64-bit mode, they just have to start on an
4991 even word, since the parameter save area is 16-byte
4992 aligned. Space for GPRs is reserved even if the argument
4993 will be passed in memory. */
4994 if (TARGET_32BIT)
4ed78545 4995 align = (2 - cum->words) & 3;
2858f73a
GK
4996 else
4997 align = cum->words & 1;
c53bdcf5 4998 cum->words += align + rs6000_arg_size (mode, type);
f676971a 4999
a594a19c
GK
5000 if (TARGET_DEBUG_ARG)
5001 {
f676971a 5002 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5003 cum->words, align);
5004 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5005 cum->nargs_prototype, cum->prototype,
2858f73a 5006 GET_MODE_NAME (mode));
a594a19c
GK
5007 }
5008 }
0ac081f6 5009 }
a4b0320c 5010 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5011 && !cum->stdarg
5012 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5013 cum->sysv_gregno++;
594a51fe
SS
5014
5015 else if (rs6000_darwin64_abi
5016 && mode == BLKmode
0b5383eb
DJ
5017 && TREE_CODE (type) == RECORD_TYPE
5018 && (size = int_size_in_bytes (type)) > 0)
5019 {
5020 /* Variable sized types have size == -1 and are
5021 treated as if consisting entirely of ints.
5022 Pad to 16 byte boundary if needed. */
5023 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5024 && (cum->words % 2) != 0)
5025 cum->words++;
5026 /* For varargs, we can just go up by the size of the struct. */
5027 if (!named)
5028 cum->words += (size + 7) / 8;
5029 else
5030 {
5031 /* It is tempting to say int register count just goes up by
5032 sizeof(type)/8, but this is wrong in a case such as
5033 { int; double; int; } [powerpc alignment]. We have to
5034 grovel through the fields for these too. */
5035 cum->intoffset = 0;
5036 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5037 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5038 size * BITS_PER_UNIT);
5039 }
5040 }
f607bc57 5041 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5042 {
a3170dc6 5043 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5044 && (mode == SFmode || mode == DFmode
7393f7f8 5045 || mode == DDmode || mode == TDmode
602ea4d3 5046 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5047 {
2d83f070
JJ
5048 /* _Decimal128 must use an even/odd register pair. This assumes
5049 that the register number is odd when fregno is odd. */
5050 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5051 cum->fregno++;
5052
5053 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5054 <= FP_ARG_V4_MAX_REG)
602ea4d3 5055 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5056 else
5057 {
602ea4d3 5058 cum->fregno = FP_ARG_V4_MAX_REG + 1;
7393f7f8 5059 if (mode == DFmode || mode == TFmode || mode == DDmode || mode == TDmode)
c4ad648e 5060 cum->words += cum->words & 1;
c53bdcf5 5061 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5062 }
4697a36c 5063 }
4cc833b7
RH
5064 else
5065 {
b2d04ecf 5066 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5067 int gregno = cum->sysv_gregno;
5068
4ed78545
AM
5069 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5070 (r7,r8) or (r9,r10). As does any other 2 word item such
5071 as complex int due to a historical mistake. */
5072 if (n_words == 2)
5073 gregno += (1 - gregno) & 1;
4cc833b7 5074
4ed78545 5075 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5076 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5077 {
4ed78545
AM
5078 /* Long long and SPE vectors are aligned on the stack.
5079 So are other 2 word items such as complex int due to
5080 a historical mistake. */
4cc833b7
RH
5081 if (n_words == 2)
5082 cum->words += cum->words & 1;
5083 cum->words += n_words;
5084 }
4697a36c 5085
4cc833b7
RH
5086 /* Note: continuing to accumulate gregno past when we've started
5087 spilling to the stack indicates the fact that we've started
5088 spilling to the stack to expand_builtin_saveregs. */
5089 cum->sysv_gregno = gregno + n_words;
5090 }
4697a36c 5091
4cc833b7
RH
5092 if (TARGET_DEBUG_ARG)
5093 {
5094 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5095 cum->words, cum->fregno);
5096 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5097 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5098 fprintf (stderr, "mode = %4s, named = %d\n",
5099 GET_MODE_NAME (mode), named);
5100 }
4697a36c
MM
5101 }
5102 else
4cc833b7 5103 {
b2d04ecf 5104 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5105 int start_words = cum->words;
5106 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5107
294bd182 5108 cum->words = align_words + n_words;
4697a36c 5109
ebb109ad 5110 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5111 && mode != SDmode
a3170dc6 5112 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5113 {
5114 /* _Decimal128 must be passed in an even/odd float register pair.
5115 This assumes that the register number is odd when fregno is
5116 odd. */
5117 if (mode == TDmode && (cum->fregno % 2) == 1)
5118 cum->fregno++;
5119 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5120 }
4cc833b7
RH
5121
5122 if (TARGET_DEBUG_ARG)
5123 {
5124 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5125 cum->words, cum->fregno);
5126 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5127 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5128 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5129 named, align_words - start_words, depth);
4cc833b7
RH
5130 }
5131 }
4697a36c 5132}
a6c9bed4 5133
f82f556d
AH
5134static rtx
5135spe_build_register_parallel (enum machine_mode mode, int gregno)
5136{
17caeff2 5137 rtx r1, r3, r5, r7;
f82f556d 5138
37409796 5139 switch (mode)
f82f556d 5140 {
37409796 5141 case DFmode:
54b695e7
AH
5142 r1 = gen_rtx_REG (DImode, gregno);
5143 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5144 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5145
5146 case DCmode:
17caeff2 5147 case TFmode:
54b695e7
AH
5148 r1 = gen_rtx_REG (DImode, gregno);
5149 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5150 r3 = gen_rtx_REG (DImode, gregno + 2);
5151 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5152 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5153
17caeff2
JM
5154 case TCmode:
5155 r1 = gen_rtx_REG (DImode, gregno);
5156 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5157 r3 = gen_rtx_REG (DImode, gregno + 2);
5158 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5159 r5 = gen_rtx_REG (DImode, gregno + 4);
5160 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5161 r7 = gen_rtx_REG (DImode, gregno + 6);
5162 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5163 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5164
37409796
NS
5165 default:
5166 gcc_unreachable ();
f82f556d 5167 }
f82f556d 5168}
b78d48dd 5169
f82f556d 5170/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5171static rtx
f676971a 5172rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5173 tree type)
a6c9bed4 5174{
f82f556d
AH
5175 int gregno = cum->sysv_gregno;
5176
5177 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5178 are passed and returned in a pair of GPRs for ABI compatibility. */
17caeff2
JM
5179 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode
5180 || mode == TFmode || mode == TCmode))
f82f556d 5181 {
b5870bee
AH
5182 int n_words = rs6000_arg_size (mode, type);
5183
f82f556d 5184 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
5185 if (mode == DFmode)
5186 gregno += (1 - gregno) & 1;
f82f556d 5187
b5870bee
AH
5188 /* Multi-reg args are not split between registers and stack. */
5189 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5190 return NULL_RTX;
5191
5192 return spe_build_register_parallel (mode, gregno);
5193 }
a6c9bed4
AH
5194 if (cum->stdarg)
5195 {
c53bdcf5 5196 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5197
5198 /* SPE vectors are put in odd registers. */
5199 if (n_words == 2 && (gregno & 1) == 0)
5200 gregno += 1;
5201
5202 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5203 {
5204 rtx r1, r2;
5205 enum machine_mode m = SImode;
5206
5207 r1 = gen_rtx_REG (m, gregno);
5208 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5209 r2 = gen_rtx_REG (m, gregno + 1);
5210 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5211 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5212 }
5213 else
b78d48dd 5214 return NULL_RTX;
a6c9bed4
AH
5215 }
5216 else
5217 {
f82f556d
AH
5218 if (gregno <= GP_ARG_MAX_REG)
5219 return gen_rtx_REG (mode, gregno);
a6c9bed4 5220 else
b78d48dd 5221 return NULL_RTX;
a6c9bed4
AH
5222 }
5223}
5224
0b5383eb
DJ
5225/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5226 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5227
0b5383eb 5228static void
bb8df8a6 5229rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5230 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5231{
0b5383eb
DJ
5232 enum machine_mode mode;
5233 unsigned int regno;
5234 unsigned int startbit, endbit;
5235 int this_regno, intregs, intoffset;
5236 rtx reg;
594a51fe 5237
0b5383eb
DJ
5238 if (cum->intoffset == -1)
5239 return;
5240
5241 intoffset = cum->intoffset;
5242 cum->intoffset = -1;
5243
5244 /* If this is the trailing part of a word, try to only load that
5245 much into the register. Otherwise load the whole register. Note
5246 that in the latter case we may pick up unwanted bits. It's not a
5247 problem at the moment but may wish to revisit. */
5248
5249 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5250 {
0b5383eb
DJ
5251 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5252 MODE_INT, 0);
5253 if (mode == BLKmode)
5254 {
5255 /* We couldn't find an appropriate mode, which happens,
5256 e.g., in packed structs when there are 3 bytes to load.
5257 Back intoffset back to the beginning of the word in this
5258 case. */
5259 intoffset = intoffset & -BITS_PER_WORD;
5260 mode = word_mode;
5261 }
5262 }
5263 else
5264 mode = word_mode;
5265
5266 startbit = intoffset & -BITS_PER_WORD;
5267 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5268 intregs = (endbit - startbit) / BITS_PER_WORD;
5269 this_regno = cum->words + intoffset / BITS_PER_WORD;
5270
5271 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5272 cum->use_stack = 1;
bb8df8a6 5273
0b5383eb
DJ
5274 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5275 if (intregs <= 0)
5276 return;
5277
5278 intoffset /= BITS_PER_UNIT;
5279 do
5280 {
5281 regno = GP_ARG_MIN_REG + this_regno;
5282 reg = gen_rtx_REG (mode, regno);
5283 rvec[(*k)++] =
5284 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5285
5286 this_regno += 1;
5287 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5288 mode = word_mode;
5289 intregs -= 1;
5290 }
5291 while (intregs > 0);
5292}
5293
5294/* Recursive workhorse for the following. */
5295
5296static void
bb8df8a6 5297rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
0b5383eb
DJ
5298 HOST_WIDE_INT startbitpos, rtx rvec[],
5299 int *k)
5300{
5301 tree f;
5302
5303 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5304 if (TREE_CODE (f) == FIELD_DECL)
5305 {
5306 HOST_WIDE_INT bitpos = startbitpos;
5307 tree ftype = TREE_TYPE (f);
70fb00df
AP
5308 enum machine_mode mode;
5309 if (ftype == error_mark_node)
5310 continue;
5311 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5312
5313 if (DECL_SIZE (f) != 0
5314 && host_integerp (bit_position (f), 1))
5315 bitpos += int_bit_position (f);
5316
5317 /* ??? FIXME: else assume zero offset. */
5318
5319 if (TREE_CODE (ftype) == RECORD_TYPE)
5320 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5321 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5322 {
0b5383eb
DJ
5323#if 0
5324 switch (mode)
594a51fe 5325 {
0b5383eb
DJ
5326 case SCmode: mode = SFmode; break;
5327 case DCmode: mode = DFmode; break;
5328 case TCmode: mode = TFmode; break;
5329 default: break;
594a51fe 5330 }
0b5383eb
DJ
5331#endif
5332 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5333 rvec[(*k)++]
bb8df8a6 5334 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5335 gen_rtx_REG (mode, cum->fregno++),
5336 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5337 if (mode == TFmode || mode == TDmode)
0b5383eb 5338 cum->fregno++;
594a51fe 5339 }
0b5383eb
DJ
5340 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5341 {
5342 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5343 rvec[(*k)++]
bb8df8a6
EC
5344 = gen_rtx_EXPR_LIST (VOIDmode,
5345 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5346 GEN_INT (bitpos / BITS_PER_UNIT));
5347 }
5348 else if (cum->intoffset == -1)
5349 cum->intoffset = bitpos;
5350 }
5351}
594a51fe 5352
0b5383eb
DJ
5353/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5354 the register(s) to be used for each field and subfield of a struct
5355 being passed by value, along with the offset of where the
5356 register's value may be found in the block. FP fields go in FP
5357 register, vector fields go in vector registers, and everything
bb8df8a6 5358 else goes in int registers, packed as in memory.
8ff40a74 5359
0b5383eb
DJ
5360 This code is also used for function return values. RETVAL indicates
5361 whether this is the case.
8ff40a74 5362
a4d05547 5363 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5364 calling convention. */
594a51fe 5365
0b5383eb
DJ
5366static rtx
5367rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
5368 int named, bool retval)
5369{
5370 rtx rvec[FIRST_PSEUDO_REGISTER];
5371 int k = 1, kbase = 1;
5372 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5373 /* This is a copy; modifications are not visible to our caller. */
5374 CUMULATIVE_ARGS copy_cum = *orig_cum;
5375 CUMULATIVE_ARGS *cum = &copy_cum;
5376
5377 /* Pad to 16 byte boundary if needed. */
5378 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5379 && (cum->words % 2) != 0)
5380 cum->words++;
5381
5382 cum->intoffset = 0;
5383 cum->use_stack = 0;
5384 cum->named = named;
5385
5386 /* Put entries into rvec[] for individual FP and vector fields, and
5387 for the chunks of memory that go in int regs. Note we start at
5388 element 1; 0 is reserved for an indication of using memory, and
5389 may or may not be filled in below. */
5390 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5391 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5392
5393 /* If any part of the struct went on the stack put all of it there.
5394 This hack is because the generic code for
5395 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5396 parts of the struct are not at the beginning. */
5397 if (cum->use_stack)
5398 {
5399 if (retval)
5400 return NULL_RTX; /* doesn't go in registers at all */
5401 kbase = 0;
5402 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5403 }
5404 if (k > 1 || cum->use_stack)
5405 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5406 else
5407 return NULL_RTX;
5408}
5409
b78d48dd
FJ
5410/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5411
5412static rtx
ec6376ab 5413rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5414{
ec6376ab
AM
5415 int n_units;
5416 int i, k;
5417 rtx rvec[GP_ARG_NUM_REG + 1];
5418
5419 if (align_words >= GP_ARG_NUM_REG)
5420 return NULL_RTX;
5421
5422 n_units = rs6000_arg_size (mode, type);
5423
5424 /* Optimize the simple case where the arg fits in one gpr, except in
5425 the case of BLKmode due to assign_parms assuming that registers are
5426 BITS_PER_WORD wide. */
5427 if (n_units == 0
5428 || (n_units == 1 && mode != BLKmode))
5429 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5430
5431 k = 0;
5432 if (align_words + n_units > GP_ARG_NUM_REG)
5433 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5434 using a magic NULL_RTX component.
79773478
AM
5435 This is not strictly correct. Only some of the arg belongs in
5436 memory, not all of it. However, the normal scheme using
5437 function_arg_partial_nregs can result in unusual subregs, eg.
5438 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5439 store the whole arg to memory is often more efficient than code
5440 to store pieces, and we know that space is available in the right
5441 place for the whole arg. */
ec6376ab
AM
5442 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5443
5444 i = 0;
5445 do
36a454e1 5446 {
ec6376ab
AM
5447 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5448 rtx off = GEN_INT (i++ * 4);
5449 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5450 }
ec6376ab
AM
5451 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5452
5453 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5454}
5455
4697a36c
MM
5456/* Determine where to put an argument to a function.
5457 Value is zero to push the argument on the stack,
5458 or a hard register in which to store the argument.
5459
5460 MODE is the argument's machine mode.
5461 TYPE is the data type of the argument (as a tree).
5462 This is null for libcalls where that information may
5463 not be available.
5464 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5465 the preceding args and about the function being called. It is
5466 not modified in this routine.
4697a36c
MM
5467 NAMED is nonzero if this argument is a named parameter
5468 (otherwise it is an extra parameter matching an ellipsis).
5469
5470 On RS/6000 the first eight words of non-FP are normally in registers
5471 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5472 Under V.4, the first 8 FP args are in registers.
5473
5474 If this is floating-point and no prototype is specified, we use
5475 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5476 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5477 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5478 doesn't support PARALLEL anyway.
5479
5480 Note that for args passed by reference, function_arg will be called
5481 with MODE and TYPE set to that of the pointer to the arg, not the arg
5482 itself. */
4697a36c 5483
9390387d 5484rtx
f676971a 5485function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5486 tree type, int named)
4697a36c 5487{
4cc833b7 5488 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5489
a4f6c312
SS
5490 /* Return a marker to indicate whether CR1 needs to set or clear the
5491 bit that V.4 uses to say fp args were passed in registers.
5492 Assume that we don't need the marker for software floating point,
5493 or compiler generated library calls. */
4697a36c
MM
5494 if (mode == VOIDmode)
5495 {
f607bc57 5496 if (abi == ABI_V4
b9599e46 5497 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5498 && (cum->stdarg
5499 || (cum->nargs_prototype < 0
5500 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5501 {
a3170dc6
AH
5502 /* For the SPE, we need to crxor CR6 always. */
5503 if (TARGET_SPE_ABI)
5504 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5505 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5506 return GEN_INT (cum->call_cookie
5507 | ((cum->fregno == FP_ARG_MIN_REG)
5508 ? CALL_V4_SET_FP_ARGS
5509 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5510 }
4697a36c 5511
7509c759 5512 return GEN_INT (cum->call_cookie);
4697a36c
MM
5513 }
5514
0b5383eb
DJ
5515 if (rs6000_darwin64_abi && mode == BLKmode
5516 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5517 {
0b5383eb 5518 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5519 if (rslt != NULL_RTX)
5520 return rslt;
5521 /* Else fall through to usual handling. */
5522 }
5523
2858f73a 5524 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5525 if (TARGET_64BIT && ! cum->prototype)
5526 {
c4ad648e
AM
5527 /* Vector parameters get passed in vector register
5528 and also in GPRs or memory, in absence of prototype. */
5529 int align_words;
5530 rtx slot;
5531 align_words = (cum->words + 1) & ~1;
5532
5533 if (align_words >= GP_ARG_NUM_REG)
5534 {
5535 slot = NULL_RTX;
5536 }
5537 else
5538 {
5539 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5540 }
5541 return gen_rtx_PARALLEL (mode,
5542 gen_rtvec (2,
5543 gen_rtx_EXPR_LIST (VOIDmode,
5544 slot, const0_rtx),
5545 gen_rtx_EXPR_LIST (VOIDmode,
5546 gen_rtx_REG (mode, cum->vregno),
5547 const0_rtx)));
c72d6c26
HP
5548 }
5549 else
5550 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5551 else if (TARGET_ALTIVEC_ABI
5552 && (ALTIVEC_VECTOR_MODE (mode)
5553 || (type && TREE_CODE (type) == VECTOR_TYPE
5554 && int_size_in_bytes (type) == 16)))
0ac081f6 5555 {
2858f73a 5556 if (named || abi == ABI_V4)
a594a19c 5557 return NULL_RTX;
0ac081f6 5558 else
a594a19c
GK
5559 {
5560 /* Vector parameters to varargs functions under AIX or Darwin
5561 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5562 int align, align_words, n_words;
5563 enum machine_mode part_mode;
a594a19c
GK
5564
5565 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5566 2 mod 4 in terms of words in 32-bit mode, since the parameter
5567 save area starts at offset 24 from the stack. In 64-bit mode,
5568 they just have to start on an even word, since the parameter
5569 save area is 16-byte aligned. */
5570 if (TARGET_32BIT)
4ed78545 5571 align = (2 - cum->words) & 3;
2858f73a
GK
5572 else
5573 align = cum->words & 1;
a594a19c
GK
5574 align_words = cum->words + align;
5575
5576 /* Out of registers? Memory, then. */
5577 if (align_words >= GP_ARG_NUM_REG)
5578 return NULL_RTX;
ec6376ab
AM
5579
5580 if (TARGET_32BIT && TARGET_POWERPC64)
5581 return rs6000_mixed_function_arg (mode, type, align_words);
5582
2858f73a
GK
5583 /* The vector value goes in GPRs. Only the part of the
5584 value in GPRs is reported here. */
ec6376ab
AM
5585 part_mode = mode;
5586 n_words = rs6000_arg_size (mode, type);
5587 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5588 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5589 is either wholly in GPRs or half in GPRs and half not. */
5590 part_mode = DImode;
ec6376ab
AM
5591
5592 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5593 }
0ac081f6 5594 }
f82f556d
AH
5595 else if (TARGET_SPE_ABI && TARGET_SPE
5596 && (SPE_VECTOR_MODE (mode)
18f63bfa 5597 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5598 || mode == DDmode
17caeff2
JM
5599 || mode == DCmode
5600 || mode == TFmode
7393f7f8 5601 || mode == TDmode
17caeff2 5602 || mode == TCmode))))
a6c9bed4 5603 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5604
f607bc57 5605 else if (abi == ABI_V4)
4697a36c 5606 {
a3170dc6 5607 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5608 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5609 || (mode == TFmode && !TARGET_IEEEQUAD)
5610 || mode == DDmode || mode == TDmode))
4cc833b7 5611 {
2d83f070
JJ
5612 /* _Decimal128 must use an even/odd register pair. This assumes
5613 that the register number is odd when fregno is odd. */
5614 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5615 cum->fregno++;
5616
5617 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5618 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5619 return gen_rtx_REG (mode, cum->fregno);
5620 else
b78d48dd 5621 return NULL_RTX;
4cc833b7
RH
5622 }
5623 else
5624 {
b2d04ecf 5625 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5626 int gregno = cum->sysv_gregno;
5627
4ed78545
AM
5628 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5629 (r7,r8) or (r9,r10). As does any other 2 word item such
5630 as complex int due to a historical mistake. */
5631 if (n_words == 2)
5632 gregno += (1 - gregno) & 1;
4cc833b7 5633
4ed78545 5634 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5635 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5636 return NULL_RTX;
ec6376ab
AM
5637
5638 if (TARGET_32BIT && TARGET_POWERPC64)
5639 return rs6000_mixed_function_arg (mode, type,
5640 gregno - GP_ARG_MIN_REG);
5641 return gen_rtx_REG (mode, gregno);
4cc833b7 5642 }
4697a36c 5643 }
4cc833b7
RH
5644 else
5645 {
294bd182 5646 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5647
2d83f070
JJ
5648 /* _Decimal128 must be passed in an even/odd float register pair.
5649 This assumes that the register number is odd when fregno is odd. */
5650 if (mode == TDmode && (cum->fregno % 2) == 1)
5651 cum->fregno++;
5652
2858f73a 5653 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5654 {
ec6376ab
AM
5655 rtx rvec[GP_ARG_NUM_REG + 1];
5656 rtx r;
5657 int k;
c53bdcf5
AM
5658 bool needs_psave;
5659 enum machine_mode fmode = mode;
c53bdcf5
AM
5660 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5661
5662 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5663 {
c53bdcf5
AM
5664 /* Currently, we only ever need one reg here because complex
5665 doubles are split. */
7393f7f8
BE
5666 gcc_assert (cum->fregno == FP_ARG_MAX_REG
5667 && (fmode == TFmode || fmode == TDmode));
ec6376ab 5668
7393f7f8
BE
5669 /* Long double or _Decimal128 split over regs and memory. */
5670 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 5671 }
c53bdcf5
AM
5672
5673 /* Do we also need to pass this arg in the parameter save
5674 area? */
5675 needs_psave = (type
5676 && (cum->nargs_prototype <= 0
5677 || (DEFAULT_ABI == ABI_AIX
de17c25f 5678 && TARGET_XL_COMPAT
c53bdcf5
AM
5679 && align_words >= GP_ARG_NUM_REG)));
5680
5681 if (!needs_psave && mode == fmode)
ec6376ab 5682 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5683
ec6376ab 5684 k = 0;
c53bdcf5
AM
5685 if (needs_psave)
5686 {
ec6376ab 5687 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5688 This piece must come first, before the fprs. */
c53bdcf5
AM
5689 if (align_words < GP_ARG_NUM_REG)
5690 {
5691 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5692
5693 if (align_words + n_words > GP_ARG_NUM_REG
5694 || (TARGET_32BIT && TARGET_POWERPC64))
5695 {
5696 /* If this is partially on the stack, then we only
5697 include the portion actually in registers here. */
5698 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5699 rtx off;
79773478
AM
5700 int i = 0;
5701 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
5702 /* Not all of the arg fits in gprs. Say that it
5703 goes in memory too, using a magic NULL_RTX
5704 component. Also see comment in
5705 rs6000_mixed_function_arg for why the normal
5706 function_arg_partial_nregs scheme doesn't work
5707 in this case. */
5708 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5709 const0_rtx);
ec6376ab
AM
5710 do
5711 {
5712 r = gen_rtx_REG (rmode,
5713 GP_ARG_MIN_REG + align_words);
2e6c9641 5714 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5715 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5716 }
5717 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5718 }
5719 else
5720 {
5721 /* The whole arg fits in gprs. */
5722 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5723 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5724 }
c53bdcf5 5725 }
ec6376ab
AM
5726 else
5727 /* It's entirely in memory. */
5728 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5729 }
5730
ec6376ab
AM
5731 /* Describe where this piece goes in the fprs. */
5732 r = gen_rtx_REG (fmode, cum->fregno);
5733 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5734
5735 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5736 }
5737 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5738 {
ec6376ab
AM
5739 if (TARGET_32BIT && TARGET_POWERPC64)
5740 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5741
4eeca74f
AM
5742 if (mode == BLKmode)
5743 mode = Pmode;
5744
b2d04ecf
AM
5745 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5746 }
4cc833b7
RH
5747 else
5748 return NULL_RTX;
4697a36c 5749 }
4697a36c
MM
5750}
5751\f
ec6376ab 5752/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5753 the number of bytes passed in registers. For args passed entirely in
5754 registers or entirely in memory, zero. When an arg is described by a
5755 PARALLEL, perhaps using more than one register type, this function
5756 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 5757
78a52f11
RH
5758static int
5759rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5760 tree type, bool named)
4697a36c 5761{
c53bdcf5 5762 int ret = 0;
ec6376ab 5763 int align_words;
c53bdcf5 5764
f607bc57 5765 if (DEFAULT_ABI == ABI_V4)
4697a36c 5766 return 0;
4697a36c 5767
c53bdcf5
AM
5768 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5769 && cum->nargs_prototype >= 0)
5770 return 0;
5771
0b5383eb
DJ
5772 /* In this complicated case we just disable the partial_nregs code. */
5773 if (rs6000_darwin64_abi && mode == BLKmode
5774 && TREE_CODE (type) == RECORD_TYPE
5775 && int_size_in_bytes (type) > 0)
5776 return 0;
5777
294bd182 5778 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 5779
79773478
AM
5780 if (USE_FP_FOR_ARG_P (cum, mode, type))
5781 {
fb63c729
AM
5782 /* If we are passing this arg in the fixed parameter save area
5783 (gprs or memory) as well as fprs, then this function should
79773478
AM
5784 return the number of partial bytes passed in the parameter
5785 save area rather than partial bytes passed in fprs. */
5786 if (type
5787 && (cum->nargs_prototype <= 0
5788 || (DEFAULT_ABI == ABI_AIX
5789 && TARGET_XL_COMPAT
5790 && align_words >= GP_ARG_NUM_REG)))
5791 return 0;
5792 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5793 > FP_ARG_MAX_REG + 1)
ac7e839c 5794 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 5795 else if (cum->nargs_prototype >= 0)
4697a36c
MM
5796 return 0;
5797 }
5798
ec6376ab
AM
5799 if (align_words < GP_ARG_NUM_REG
5800 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 5801 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 5802
c53bdcf5 5803 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 5804 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 5805
c53bdcf5 5806 return ret;
4697a36c
MM
5807}
5808\f
5809/* A C expression that indicates when an argument must be passed by
5810 reference. If nonzero for an argument, a copy of that argument is
5811 made in memory and a pointer to the argument is passed instead of
5812 the argument itself. The pointer is passed in whatever way is
5813 appropriate for passing a pointer to that type.
5814
b2d04ecf
AM
5815 Under V.4, aggregates and long double are passed by reference.
5816
5817 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5818 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
5819
5820 As an extension to all ABIs, variable sized types are passed by
5821 reference. */
4697a36c 5822
8cd5a4e0 5823static bool
f676971a 5824rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
bada2eb8
DE
5825 enum machine_mode mode, tree type,
5826 bool named ATTRIBUTE_UNUSED)
4697a36c 5827{
602ea4d3 5828 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
5829 {
5830 if (TARGET_DEBUG_ARG)
bada2eb8
DE
5831 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5832 return 1;
5833 }
5834
5835 if (!type)
5836 return 0;
4697a36c 5837
bada2eb8
DE
5838 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5839 {
5840 if (TARGET_DEBUG_ARG)
5841 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5842 return 1;
5843 }
5844
5845 if (int_size_in_bytes (type) < 0)
5846 {
5847 if (TARGET_DEBUG_ARG)
5848 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5849 return 1;
5850 }
5851
5852 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5853 modes only exist for GCC vector types if -maltivec. */
5854 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5855 {
5856 if (TARGET_DEBUG_ARG)
5857 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
5858 return 1;
5859 }
b693336b
PB
5860
5861 /* Pass synthetic vectors in memory. */
bada2eb8 5862 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5863 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5864 {
5865 static bool warned_for_pass_big_vectors = false;
5866 if (TARGET_DEBUG_ARG)
5867 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
5868 if (!warned_for_pass_big_vectors)
5869 {
d4ee4d25 5870 warning (0, "GCC vector passed by reference: "
b693336b
PB
5871 "non-standard ABI extension with no compatibility guarantee");
5872 warned_for_pass_big_vectors = true;
5873 }
5874 return 1;
5875 }
5876
b2d04ecf 5877 return 0;
4697a36c 5878}
5985c7a6
FJ
5879
5880static void
2d9db8eb 5881rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
5882{
5883 int i;
5884 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5885
5886 if (nregs == 0)
5887 return;
5888
c4ad648e 5889 for (i = 0; i < nregs; i++)
5985c7a6 5890 {
9390387d 5891 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 5892 if (reload_completed)
c4ad648e
AM
5893 {
5894 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5895 tem = NULL_RTX;
5896 else
5897 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 5898 i * GET_MODE_SIZE (reg_mode));
c4ad648e 5899 }
5985c7a6
FJ
5900 else
5901 tem = replace_equiv_address (tem, XEXP (tem, 0));
5902
37409796 5903 gcc_assert (tem);
5985c7a6
FJ
5904
5905 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5906 }
5907}
4697a36c
MM
5908\f
5909/* Perform any needed actions needed for a function that is receiving a
f676971a 5910 variable number of arguments.
4697a36c
MM
5911
5912 CUM is as above.
5913
5914 MODE and TYPE are the mode and type of the current parameter.
5915
5916 PRETEND_SIZE is a variable that should be set to the amount of stack
5917 that must be pushed by the prolog to pretend that our caller pushed
5918 it.
5919
5920 Normally, this macro will push all remaining incoming registers on the
5921 stack and set PRETEND_SIZE to the length of the registers pushed. */
5922
c6e8c921 5923static void
f676971a 5924setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
5925 tree type, int *pretend_size ATTRIBUTE_UNUSED,
5926 int no_rtl)
4697a36c 5927{
4cc833b7
RH
5928 CUMULATIVE_ARGS next_cum;
5929 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 5930 rtx save_area = NULL_RTX, mem;
dfafc897 5931 int first_reg_offset, set;
4697a36c 5932
f31bf321 5933 /* Skip the last named argument. */
d34c5b80 5934 next_cum = *cum;
594a51fe 5935 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 5936
f607bc57 5937 if (DEFAULT_ABI == ABI_V4)
d34c5b80 5938 {
5b667039
JJ
5939 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5940
60e2d0ca 5941 if (! no_rtl)
5b667039
JJ
5942 {
5943 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
5944 HOST_WIDE_INT offset = 0;
5945
5946 /* Try to optimize the size of the varargs save area.
5947 The ABI requires that ap.reg_save_area is doubleword
5948 aligned, but we don't need to allocate space for all
5949 the bytes, only those to which we actually will save
5950 anything. */
5951 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
5952 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
5953 if (TARGET_HARD_FLOAT && TARGET_FPRS
5954 && next_cum.fregno <= FP_ARG_V4_MAX_REG
5955 && cfun->va_list_fpr_size)
5956 {
5957 if (gpr_reg_num)
5958 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
5959 * UNITS_PER_FP_WORD;
5960 if (cfun->va_list_fpr_size
5961 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5962 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
5963 else
5964 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
5965 * UNITS_PER_FP_WORD;
5966 }
5967 if (gpr_reg_num)
5968 {
5969 offset = -((first_reg_offset * reg_size) & ~7);
5970 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
5971 {
5972 gpr_reg_num = cfun->va_list_gpr_size;
5973 if (reg_size == 4 && (first_reg_offset & 1))
5974 gpr_reg_num++;
5975 }
5976 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
5977 }
5978 else if (fpr_size)
5979 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
5980 * UNITS_PER_FP_WORD
5981 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 5982
5b667039
JJ
5983 if (gpr_size + fpr_size)
5984 {
5985 rtx reg_save_area
5986 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
5987 gcc_assert (GET_CODE (reg_save_area) == MEM);
5988 reg_save_area = XEXP (reg_save_area, 0);
5989 if (GET_CODE (reg_save_area) == PLUS)
5990 {
5991 gcc_assert (XEXP (reg_save_area, 0)
5992 == virtual_stack_vars_rtx);
5993 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
5994 offset += INTVAL (XEXP (reg_save_area, 1));
5995 }
5996 else
5997 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
5998 }
5999
6000 cfun->machine->varargs_save_offset = offset;
6001 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6002 }
4697a36c 6003 }
60e2d0ca 6004 else
4697a36c 6005 {
d34c5b80 6006 first_reg_offset = next_cum.words;
4cc833b7 6007 save_area = virtual_incoming_args_rtx;
4697a36c 6008
fe984136 6009 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6010 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6011 }
4697a36c 6012
dfafc897 6013 set = get_varargs_alias_set ();
9d30f3c1
JJ
6014 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6015 && cfun->va_list_gpr_size)
4cc833b7 6016 {
9d30f3c1
JJ
6017 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6018
6019 if (va_list_gpr_counter_field)
6020 {
6021 /* V4 va_list_gpr_size counts number of registers needed. */
6022 if (nregs > cfun->va_list_gpr_size)
6023 nregs = cfun->va_list_gpr_size;
6024 }
6025 else
6026 {
6027 /* char * va_list instead counts number of bytes needed. */
6028 if (nregs > cfun->va_list_gpr_size / reg_size)
6029 nregs = cfun->va_list_gpr_size / reg_size;
6030 }
6031
dfafc897 6032 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6033 plus_constant (save_area,
13e2e16e
DE
6034 first_reg_offset * reg_size));
6035 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6036 set_mem_alias_set (mem, set);
8ac61af7 6037 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6038
f676971a 6039 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6040 nregs);
4697a36c
MM
6041 }
6042
4697a36c 6043 /* Save FP registers if needed. */
f607bc57 6044 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6045 && TARGET_HARD_FLOAT && TARGET_FPRS
6046 && ! no_rtl
9d30f3c1
JJ
6047 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6048 && cfun->va_list_fpr_size)
4697a36c 6049 {
9d30f3c1 6050 int fregno = next_cum.fregno, nregs;
9ebbca7d 6051 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6052 rtx lab = gen_label_rtx ();
5b667039
JJ
6053 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6054 * UNITS_PER_FP_WORD);
4697a36c 6055
c4ad648e
AM
6056 emit_jump_insn
6057 (gen_rtx_SET (VOIDmode,
6058 pc_rtx,
6059 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6060 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6061 const0_rtx),
39403d82 6062 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6063 pc_rtx)));
6064
9d30f3c1
JJ
6065 for (nregs = 0;
6066 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6067 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6068 {
5496b36f 6069 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6070 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6071 set_mem_alias_set (mem, set);
94ff898d 6072 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6073 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6074 }
4cc833b7
RH
6075
6076 emit_label (lab);
4697a36c 6077 }
4697a36c 6078}
4697a36c 6079
dfafc897 6080/* Create the va_list data type. */
2c4974b7 6081
c35d187f
RH
6082static tree
6083rs6000_build_builtin_va_list (void)
dfafc897 6084{
64c2816f 6085 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6086
9ebbca7d
GK
6087 /* For AIX, prefer 'char *' because that's what the system
6088 header files like. */
f607bc57 6089 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6090 return build_pointer_type (char_type_node);
dfafc897 6091
f1e639b1 6092 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6093 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6094
f676971a 6095 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6096 unsigned_char_type_node);
f676971a 6097 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6098 unsigned_char_type_node);
64c2816f
DT
6099 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6100 every user file. */
6101 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6102 short_unsigned_type_node);
dfafc897
FS
6103 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6104 ptr_type_node);
6105 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6106 ptr_type_node);
6107
9d30f3c1
JJ
6108 va_list_gpr_counter_field = f_gpr;
6109 va_list_fpr_counter_field = f_fpr;
6110
dfafc897
FS
6111 DECL_FIELD_CONTEXT (f_gpr) = record;
6112 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6113 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6114 DECL_FIELD_CONTEXT (f_ovf) = record;
6115 DECL_FIELD_CONTEXT (f_sav) = record;
6116
bab45a51
FS
6117 TREE_CHAIN (record) = type_decl;
6118 TYPE_NAME (record) = type_decl;
dfafc897
FS
6119 TYPE_FIELDS (record) = f_gpr;
6120 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6121 TREE_CHAIN (f_fpr) = f_res;
6122 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6123 TREE_CHAIN (f_ovf) = f_sav;
6124
6125 layout_type (record);
6126
6127 /* The correct type is an array type of one element. */
6128 return build_array_type (record, build_index_type (size_zero_node));
6129}
6130
6131/* Implement va_start. */
6132
6133void
a2369ed3 6134rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6135{
dfafc897 6136 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6137 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6138 tree gpr, fpr, ovf, sav, t;
2c4974b7 6139
dfafc897 6140 /* Only SVR4 needs something special. */
f607bc57 6141 if (DEFAULT_ABI != ABI_V4)
dfafc897 6142 {
e5faf155 6143 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6144 return;
6145 }
6146
973a648b 6147 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6148 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6149 f_res = TREE_CHAIN (f_fpr);
6150 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6151 f_sav = TREE_CHAIN (f_ovf);
6152
872a65b5 6153 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6154 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6155 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6156 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6157 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6158
6159 /* Count number of gp and fp argument registers used. */
4cc833b7 6160 words = current_function_args_info.words;
987732e0
DE
6161 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6162 GP_ARG_NUM_REG);
6163 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6164 FP_ARG_NUM_REG);
dfafc897
FS
6165
6166 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6167 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6168 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6169 words, n_gpr, n_fpr);
dfafc897 6170
9d30f3c1
JJ
6171 if (cfun->va_list_gpr_size)
6172 {
07beea0d 6173 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6174 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6175 TREE_SIDE_EFFECTS (t) = 1;
6176 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6177 }
58c8adc1 6178
9d30f3c1
JJ
6179 if (cfun->va_list_fpr_size)
6180 {
07beea0d 6181 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6182 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6183 TREE_SIDE_EFFECTS (t) = 1;
6184 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6185 }
dfafc897
FS
6186
6187 /* Find the overflow area. */
6188 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6189 if (words != 0)
47a25a46
RG
6190 t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t,
6191 build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
07beea0d 6192 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6193 TREE_SIDE_EFFECTS (t) = 1;
6194 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6195
9d30f3c1
JJ
6196 /* If there were no va_arg invocations, don't set up the register
6197 save area. */
6198 if (!cfun->va_list_gpr_size
6199 && !cfun->va_list_fpr_size
6200 && n_gpr < GP_ARG_NUM_REG
6201 && n_fpr < FP_ARG_V4_MAX_REG)
6202 return;
6203
dfafc897
FS
6204 /* Find the register save area. */
6205 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6206 if (cfun->machine->varargs_save_offset)
47a25a46
RG
6207 t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
6208 build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
07beea0d 6209 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6210 TREE_SIDE_EFFECTS (t) = 1;
6211 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6212}
6213
6214/* Implement va_arg. */
6215
23a60a04
JM
6216tree
6217rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6218{
cd3ce9b4
JM
6219 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6220 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6221 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6222 tree lab_false, lab_over, addr;
6223 int align;
6224 tree ptrtype = build_pointer_type (type);
7393f7f8 6225 int regalign = 0;
cd3ce9b4 6226
08b0dc1b
RH
6227 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6228 {
6229 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6230 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6231 }
6232
cd3ce9b4
JM
6233 if (DEFAULT_ABI != ABI_V4)
6234 {
08b0dc1b 6235 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6236 {
6237 tree elem_type = TREE_TYPE (type);
6238 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6239 int elem_size = GET_MODE_SIZE (elem_mode);
6240
6241 if (elem_size < UNITS_PER_WORD)
6242 {
23a60a04 6243 tree real_part, imag_part;
cd3ce9b4
JM
6244 tree post = NULL_TREE;
6245
23a60a04
JM
6246 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6247 &post);
6248 /* Copy the value into a temporary, lest the formal temporary
6249 be reused out from under us. */
6250 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6251 append_to_statement_list (post, pre_p);
6252
23a60a04
JM
6253 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6254 post_p);
cd3ce9b4 6255
47a25a46 6256 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6257 }
6258 }
6259
23a60a04 6260 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6261 }
6262
6263 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6264 f_fpr = TREE_CHAIN (f_gpr);
6265 f_res = TREE_CHAIN (f_fpr);
6266 f_ovf = TREE_CHAIN (f_res);
6267 f_sav = TREE_CHAIN (f_ovf);
6268
872a65b5 6269 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6270 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6271 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6272 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6273 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6274
6275 size = int_size_in_bytes (type);
6276 rsize = (size + 3) / 4;
6277 align = 1;
6278
08b0dc1b 6279 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6280 && (TYPE_MODE (type) == SFmode
6281 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6282 || TYPE_MODE (type) == TFmode
6283 || TYPE_MODE (type) == DDmode
6284 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6285 {
6286 /* FP args go in FP registers, if present. */
cd3ce9b4 6287 reg = fpr;
602ea4d3 6288 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6289 sav_ofs = 8*4;
6290 sav_scale = 8;
602ea4d3 6291 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6292 align = 8;
6293 }
6294 else
6295 {
6296 /* Otherwise into GP registers. */
cd3ce9b4
JM
6297 reg = gpr;
6298 n_reg = rsize;
6299 sav_ofs = 0;
6300 sav_scale = 4;
6301 if (n_reg == 2)
6302 align = 8;
6303 }
6304
6305 /* Pull the value out of the saved registers.... */
6306
6307 lab_over = NULL;
6308 addr = create_tmp_var (ptr_type_node, "addr");
6309 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6310
6311 /* AltiVec vectors never go in registers when -mabi=altivec. */
6312 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6313 align = 16;
6314 else
6315 {
6316 lab_false = create_artificial_label ();
6317 lab_over = create_artificial_label ();
6318
6319 /* Long long and SPE vectors are aligned in the registers.
6320 As are any other 2 gpr item such as complex int due to a
6321 historical mistake. */
6322 u = reg;
602ea4d3 6323 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6324 {
7393f7f8 6325 regalign = 1;
cd3ce9b4 6326 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
95674810 6327 size_int (n_reg - 1));
cd3ce9b4
JM
6328 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6329 }
7393f7f8
BE
6330 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6331 reg number is 0 for f1, so we want to make it odd. */
6332 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6333 {
6334 regalign = 1;
6335 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6336 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6337 }
cd3ce9b4 6338
95674810 6339 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6340 t = build2 (GE_EXPR, boolean_type_node, u, t);
6341 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6342 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6343 gimplify_and_add (t, pre_p);
6344
6345 t = sav;
6346 if (sav_ofs)
95674810 6347 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6348
95674810 6349 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
cd3ce9b4 6350 u = build1 (CONVERT_EXPR, integer_type_node, u);
95674810 6351 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
cd3ce9b4
JM
6352 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
6353
07beea0d 6354 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6355 gimplify_and_add (t, pre_p);
6356
6357 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6358 gimplify_and_add (t, pre_p);
6359
6360 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6361 append_to_statement_list (t, pre_p);
6362
7393f7f8 6363 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6364 {
6365 /* Ensure that we don't find any more args in regs.
7393f7f8 6366 Alignment has taken care of for special cases. */
07beea0d 6367 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6368 gimplify_and_add (t, pre_p);
6369 }
6370 }
6371
6372 /* ... otherwise out of the overflow area. */
6373
6374 /* Care for on-stack alignment if needed. */
6375 t = ovf;
6376 if (align != 1)
6377 {
95674810 6378 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
4a90aeeb 6379 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
7d60be94 6380 build_int_cst (NULL_TREE, -align));
cd3ce9b4
JM
6381 }
6382 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6383
07beea0d 6384 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6385 gimplify_and_add (u, pre_p);
6386
95674810 6387 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6388 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6389 gimplify_and_add (t, pre_p);
6390
6391 if (lab_over)
6392 {
6393 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6394 append_to_statement_list (t, pre_p);
6395 }
6396
0cfbc62b
JM
6397 if (STRICT_ALIGNMENT
6398 && (TYPE_ALIGN (type)
6399 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6400 {
6401 /* The value (of type complex double, for example) may not be
6402 aligned in memory in the saved registers, so copy via a
6403 temporary. (This is the same code as used for SPARC.) */
6404 tree tmp = create_tmp_var (type, "va_arg_tmp");
6405 tree dest_addr = build_fold_addr_expr (tmp);
6406
5039610b
SL
6407 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6408 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6409
6410 gimplify_and_add (copy, pre_p);
6411 addr = dest_addr;
6412 }
6413
08b0dc1b 6414 addr = fold_convert (ptrtype, addr);
872a65b5 6415 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6416}
6417
0ac081f6
AH
6418/* Builtins. */
6419
58646b77
PB
6420static void
6421def_builtin (int mask, const char *name, tree type, int code)
6422{
6423 if (mask & target_flags)
6424 {
6425 if (rs6000_builtin_decls[code])
6426 abort ();
6427
6428 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6429 add_builtin_function (name, type, code, BUILT_IN_MD,
6430 NULL, NULL_TREE);
58646b77
PB
6431 }
6432}
0ac081f6 6433
24408032
AH
6434/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6435
2212663f 6436static const struct builtin_description bdesc_3arg[] =
24408032
AH
6437{
6438 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6439 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6440 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6441 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6442 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6443 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6444 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6445 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6446 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6447 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6448 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6449 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6450 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6451 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6452 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6453 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6454 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6455 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6456 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6457 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6458 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6459 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6460 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6461
6462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
24408032 6477};
2212663f 6478
95385cbb
AH
6479/* DST operations: void foo (void *, const int, const char). */
6480
6481static const struct builtin_description bdesc_dst[] =
6482{
6483 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6484 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6485 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6486 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6487
6488 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6489 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6490 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6491 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6492};
6493
2212663f 6494/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6495
a3170dc6 6496static struct builtin_description bdesc_2arg[] =
0ac081f6 6497{
f18c054f
DB
6498 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6499 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6500 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6501 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6502 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6503 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6504 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6505 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6506 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6507 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6508 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6509 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6510 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6511 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6512 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6513 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6514 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6515 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6516 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6517 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6518 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6519 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6520 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6521 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6522 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6523 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6524 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6525 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6526 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6527 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6528 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6529 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6530 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6531 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6532 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6533 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6534 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6535 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6536 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6537 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6538 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6539 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6540 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6541 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6542 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6543 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6544 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6545 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6546 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6547 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6548 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6549 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6550 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6551 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6552 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6553 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6554 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6555 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6556 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6557 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6558 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6559 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6560 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6561 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6562 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6563 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6564 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6565 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6566 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6567 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6568 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6569 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6570 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6571 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6572 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6573 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6574 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6575 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6576 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6577 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6578 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6579 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6580 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6581 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6582 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6583 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6584 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6585 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6586 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6587 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6588 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6589 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6590 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6591 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6592 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6593 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6594 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6595 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6596 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6597 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6598 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6599 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6600 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6601 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6602 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6603 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6604 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6605 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6606 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6607 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6608 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6609
58646b77
PB
6610 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6611 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6612 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6613 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6614 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6615 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6616 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6617 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6618 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6619 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6620 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6621 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6622 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6623 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6624 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6625 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6626 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6627 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6628 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6629 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6630 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6631 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6632 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6633 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6634 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6635 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6636 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6637 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6638 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6639 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6640 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6641 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6642 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6643 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6644 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6645 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6646 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6647 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6648 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6649 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6650 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6651 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6652 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6653 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6654 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6655 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6656 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6657 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6658 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6659 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6660 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6661 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6662 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6663 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6664 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6665 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6666 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6667 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6668 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6669 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6670 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6671 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6672 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6673 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6674 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6675 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6676 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6677 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6678 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6679 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6680 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6681 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6682 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6683 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6684 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6685 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6686 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6687 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6688 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6689 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6690 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6691 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6692 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6693 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6694 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6695 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6696 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6697 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6698 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6699 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6700 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6701 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6702 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6703 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6704 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6705 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6706 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6707 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6708 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6709 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6710 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6711 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6712 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6713 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6714 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6715 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6716 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6717 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6718 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6719 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6720 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6721 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6722 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6723 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6724 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6725 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6726 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6727 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6728 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6729 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6730 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6731 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6732 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6733 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6734 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6735 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6736 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6737
a3170dc6
AH
6738 /* Place holder, leave as first spe builtin. */
6739 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6740 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6741 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6742 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6743 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6744 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6745 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6746 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6747 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6748 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6749 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6750 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6751 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6752 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6753 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6754 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6755 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6756 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6757 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6758 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6759 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6760 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6761 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6762 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6763 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6764 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6765 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6766 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6767 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6768 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6769 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6770 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6771 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6772 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6773 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6774 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6775 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6776 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6777 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6778 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6779 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6780 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6781 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6782 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6783 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6784 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6785 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6786 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6787 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6788 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6789 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6790 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6791 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6792 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6793 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6794 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6795 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6796 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6797 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6798 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6799 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6800 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6801 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6802 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6803 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6804 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6805 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6806 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6807 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6808 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6809 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6810 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6811 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6812 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
6813 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6814 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
6815 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6816 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6817 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6818 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6819 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6820 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6821 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6822 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6823 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6824 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6825 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6826 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6827 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6828 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6829 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6830 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6831 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6832 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6833 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6834 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6835 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6836 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6837 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6838 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6839 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6840 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6841 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6842 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6843 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6844 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6845 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6846 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6847 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6848
6849 /* SPE binary operations expecting a 5-bit unsigned literal. */
6850 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6851
6852 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6853 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6854 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6855 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6856 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
6857 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
6858 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
6859 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
6860 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
6861 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
6862 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
6863 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
6864 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
6865 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
6866 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
6867 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
6868 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
6869 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
6870 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
6871 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
6872 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
6873 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
6874 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
6875 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
6876 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
6877 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
6878
6879 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 6880 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
6881};
6882
6883/* AltiVec predicates. */
6884
6885struct builtin_description_predicates
6886{
6887 const unsigned int mask;
6888 const enum insn_code icode;
6889 const char *opcode;
6890 const char *const name;
6891 const enum rs6000_builtins code;
6892};
6893
6894static const struct builtin_description_predicates bdesc_altivec_preds[] =
6895{
6896 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
6897 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
6898 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
6899 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
6900 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
6901 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
6902 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
6903 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
6904 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
6905 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
6906 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
6907 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
6908 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
6909
6910 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
6911 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
6912 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 6913};
24408032 6914
a3170dc6
AH
6915/* SPE predicates. */
6916static struct builtin_description bdesc_spe_predicates[] =
6917{
6918 /* Place-holder. Leave as first. */
6919 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
6920 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
6921 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
6922 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
6923 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
6924 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
6925 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
6926 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
6927 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
6928 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
6929 /* Place-holder. Leave as last. */
6930 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
6931};
6932
6933/* SPE evsel predicates. */
6934static struct builtin_description bdesc_spe_evsel[] =
6935{
6936 /* Place-holder. Leave as first. */
6937 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6938 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6939 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6940 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6941 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6942 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6943 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6944 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6945 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6946 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6947 /* Place-holder. Leave as last. */
6948 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6949};
6950
b6d08ca1 6951/* ABS* operations. */
100c4561
AH
6952
6953static const struct builtin_description bdesc_abs[] =
6954{
6955 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6956 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6957 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6958 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6959 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6960 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6961 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6962};
6963
617e0e1d
DB
6964/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6965 foo (VECa). */
24408032 6966
a3170dc6 6967static struct builtin_description bdesc_1arg[] =
2212663f 6968{
617e0e1d
DB
6969 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6970 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6971 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6972 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6973 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6974 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6975 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6976 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
6977 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6978 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6979 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
6980 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6981 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6982 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6983 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6984 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6985 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 6986
58646b77
PB
6987 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
6988 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
6989 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
6990 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
6991 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
6992 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
6993 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
6994 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
6995 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
6996 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
6997 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
6998 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
6999 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7000 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7001 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7002 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7003 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7004 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7005 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7006
a3170dc6
AH
7007 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7008 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7009 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7010 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7011 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7012 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7013 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7014 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7015 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7016 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7017 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7018 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7019 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7020 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7021 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7022 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7023 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7024 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7025 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7026 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7027 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7028 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7029 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7030 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7031 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7032 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7033 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7034 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7035 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7036 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7037
7038 /* Place-holder. Leave as last unary SPE builtin. */
58646b77 7039 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
2212663f
DB
7040};
7041
7042static rtx
5039610b 7043rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7044{
7045 rtx pat;
5039610b 7046 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7047 rtx op0 = expand_normal (arg0);
2212663f
DB
7048 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7049 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7050
0559cc77
DE
7051 if (icode == CODE_FOR_nothing)
7052 /* Builtin not supported on this processor. */
7053 return 0;
7054
20e26713
AH
7055 /* If we got invalid arguments bail out before generating bad rtl. */
7056 if (arg0 == error_mark_node)
9a171fcd 7057 return const0_rtx;
20e26713 7058
0559cc77
DE
7059 if (icode == CODE_FOR_altivec_vspltisb
7060 || icode == CODE_FOR_altivec_vspltish
7061 || icode == CODE_FOR_altivec_vspltisw
7062 || icode == CODE_FOR_spe_evsplatfi
7063 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7064 {
7065 /* Only allow 5-bit *signed* literals. */
b44140e7 7066 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7067 || INTVAL (op0) > 15
7068 || INTVAL (op0) < -16)
b44140e7
AH
7069 {
7070 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7071 return const0_rtx;
b44140e7 7072 }
b44140e7
AH
7073 }
7074
c62f2db5 7075 if (target == 0
2212663f
DB
7076 || GET_MODE (target) != tmode
7077 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7078 target = gen_reg_rtx (tmode);
7079
7080 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7081 op0 = copy_to_mode_reg (mode0, op0);
7082
7083 pat = GEN_FCN (icode) (target, op0);
7084 if (! pat)
7085 return 0;
7086 emit_insn (pat);
0ac081f6 7087
2212663f
DB
7088 return target;
7089}
ae4b4a02 7090
100c4561 7091static rtx
5039610b 7092altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7093{
7094 rtx pat, scratch1, scratch2;
5039610b 7095 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7096 rtx op0 = expand_normal (arg0);
100c4561
AH
7097 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7098 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7099
7100 /* If we have invalid arguments, bail out before generating bad rtl. */
7101 if (arg0 == error_mark_node)
9a171fcd 7102 return const0_rtx;
100c4561
AH
7103
7104 if (target == 0
7105 || GET_MODE (target) != tmode
7106 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7107 target = gen_reg_rtx (tmode);
7108
7109 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7110 op0 = copy_to_mode_reg (mode0, op0);
7111
7112 scratch1 = gen_reg_rtx (mode0);
7113 scratch2 = gen_reg_rtx (mode0);
7114
7115 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7116 if (! pat)
7117 return 0;
7118 emit_insn (pat);
7119
7120 return target;
7121}
7122
0ac081f6 7123static rtx
5039610b 7124rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7125{
7126 rtx pat;
5039610b
SL
7127 tree arg0 = CALL_EXPR_ARG (exp, 0);
7128 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7129 rtx op0 = expand_normal (arg0);
7130 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7131 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7132 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7133 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7134
0559cc77
DE
7135 if (icode == CODE_FOR_nothing)
7136 /* Builtin not supported on this processor. */
7137 return 0;
7138
20e26713
AH
7139 /* If we got invalid arguments bail out before generating bad rtl. */
7140 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7141 return const0_rtx;
20e26713 7142
0559cc77
DE
7143 if (icode == CODE_FOR_altivec_vcfux
7144 || icode == CODE_FOR_altivec_vcfsx
7145 || icode == CODE_FOR_altivec_vctsxs
7146 || icode == CODE_FOR_altivec_vctuxs
7147 || icode == CODE_FOR_altivec_vspltb
7148 || icode == CODE_FOR_altivec_vsplth
7149 || icode == CODE_FOR_altivec_vspltw
7150 || icode == CODE_FOR_spe_evaddiw
7151 || icode == CODE_FOR_spe_evldd
7152 || icode == CODE_FOR_spe_evldh
7153 || icode == CODE_FOR_spe_evldw
7154 || icode == CODE_FOR_spe_evlhhesplat
7155 || icode == CODE_FOR_spe_evlhhossplat
7156 || icode == CODE_FOR_spe_evlhhousplat
7157 || icode == CODE_FOR_spe_evlwhe
7158 || icode == CODE_FOR_spe_evlwhos
7159 || icode == CODE_FOR_spe_evlwhou
7160 || icode == CODE_FOR_spe_evlwhsplat
7161 || icode == CODE_FOR_spe_evlwwsplat
7162 || icode == CODE_FOR_spe_evrlwi
7163 || icode == CODE_FOR_spe_evslwi
7164 || icode == CODE_FOR_spe_evsrwis
f5119d10 7165 || icode == CODE_FOR_spe_evsubifw
0559cc77 7166 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7167 {
7168 /* Only allow 5-bit unsigned literals. */
8bb418a3 7169 STRIP_NOPS (arg1);
b44140e7
AH
7170 if (TREE_CODE (arg1) != INTEGER_CST
7171 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7172 {
7173 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7174 return const0_rtx;
b44140e7 7175 }
b44140e7
AH
7176 }
7177
c62f2db5 7178 if (target == 0
0ac081f6
AH
7179 || GET_MODE (target) != tmode
7180 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7181 target = gen_reg_rtx (tmode);
7182
7183 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7184 op0 = copy_to_mode_reg (mode0, op0);
7185 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7186 op1 = copy_to_mode_reg (mode1, op1);
7187
7188 pat = GEN_FCN (icode) (target, op0, op1);
7189 if (! pat)
7190 return 0;
7191 emit_insn (pat);
7192
7193 return target;
7194}
6525c0e7 7195
ae4b4a02 7196static rtx
f676971a 7197altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7198 tree exp, rtx target)
ae4b4a02
AH
7199{
7200 rtx pat, scratch;
5039610b
SL
7201 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7202 tree arg0 = CALL_EXPR_ARG (exp, 1);
7203 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7204 rtx op0 = expand_normal (arg0);
7205 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7206 enum machine_mode tmode = SImode;
7207 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7208 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7209 int cr6_form_int;
7210
7211 if (TREE_CODE (cr6_form) != INTEGER_CST)
7212 {
7213 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7214 return const0_rtx;
ae4b4a02
AH
7215 }
7216 else
7217 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7218
37409796 7219 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7220
7221 /* If we have invalid arguments, bail out before generating bad rtl. */
7222 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7223 return const0_rtx;
ae4b4a02
AH
7224
7225 if (target == 0
7226 || GET_MODE (target) != tmode
7227 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7228 target = gen_reg_rtx (tmode);
7229
7230 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7231 op0 = copy_to_mode_reg (mode0, op0);
7232 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7233 op1 = copy_to_mode_reg (mode1, op1);
7234
7235 scratch = gen_reg_rtx (mode0);
7236
7237 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7238 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7239 if (! pat)
7240 return 0;
7241 emit_insn (pat);
7242
7243 /* The vec_any* and vec_all* predicates use the same opcodes for two
7244 different operations, but the bits in CR6 will be different
7245 depending on what information we want. So we have to play tricks
7246 with CR6 to get the right bits out.
7247
7248 If you think this is disgusting, look at the specs for the
7249 AltiVec predicates. */
7250
c4ad648e
AM
7251 switch (cr6_form_int)
7252 {
7253 case 0:
7254 emit_insn (gen_cr6_test_for_zero (target));
7255 break;
7256 case 1:
7257 emit_insn (gen_cr6_test_for_zero_reverse (target));
7258 break;
7259 case 2:
7260 emit_insn (gen_cr6_test_for_lt (target));
7261 break;
7262 case 3:
7263 emit_insn (gen_cr6_test_for_lt_reverse (target));
7264 break;
7265 default:
7266 error ("argument 1 of __builtin_altivec_predicate is out of range");
7267 break;
7268 }
ae4b4a02
AH
7269
7270 return target;
7271}
7272
b4a62fa0 7273static rtx
5039610b 7274altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7275{
7276 rtx pat, addr;
5039610b
SL
7277 tree arg0 = CALL_EXPR_ARG (exp, 0);
7278 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7279 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7280 enum machine_mode mode0 = Pmode;
7281 enum machine_mode mode1 = Pmode;
84217346
MD
7282 rtx op0 = expand_normal (arg0);
7283 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7284
7285 if (icode == CODE_FOR_nothing)
7286 /* Builtin not supported on this processor. */
7287 return 0;
7288
7289 /* If we got invalid arguments bail out before generating bad rtl. */
7290 if (arg0 == error_mark_node || arg1 == error_mark_node)
7291 return const0_rtx;
7292
7293 if (target == 0
7294 || GET_MODE (target) != tmode
7295 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7296 target = gen_reg_rtx (tmode);
7297
f676971a 7298 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7299
7300 if (op0 == const0_rtx)
7301 {
7302 addr = gen_rtx_MEM (tmode, op1);
7303 }
7304 else
7305 {
7306 op0 = copy_to_mode_reg (mode0, op0);
7307 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7308 }
7309
7310 pat = GEN_FCN (icode) (target, addr);
7311
7312 if (! pat)
7313 return 0;
7314 emit_insn (pat);
7315
7316 return target;
7317}
7318
61bea3b0 7319static rtx
5039610b 7320spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7321{
5039610b
SL
7322 tree arg0 = CALL_EXPR_ARG (exp, 0);
7323 tree arg1 = CALL_EXPR_ARG (exp, 1);
7324 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7325 rtx op0 = expand_normal (arg0);
7326 rtx op1 = expand_normal (arg1);
7327 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7328 rtx pat;
7329 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7330 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7331 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7332
7333 /* Invalid arguments. Bail before doing anything stoopid! */
7334 if (arg0 == error_mark_node
7335 || arg1 == error_mark_node
7336 || arg2 == error_mark_node)
7337 return const0_rtx;
7338
7339 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7340 op0 = copy_to_mode_reg (mode2, op0);
7341 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7342 op1 = copy_to_mode_reg (mode0, op1);
7343 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7344 op2 = copy_to_mode_reg (mode1, op2);
7345
7346 pat = GEN_FCN (icode) (op1, op2, op0);
7347 if (pat)
7348 emit_insn (pat);
7349 return NULL_RTX;
7350}
7351
6525c0e7 7352static rtx
5039610b 7353altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7354{
5039610b
SL
7355 tree arg0 = CALL_EXPR_ARG (exp, 0);
7356 tree arg1 = CALL_EXPR_ARG (exp, 1);
7357 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7358 rtx op0 = expand_normal (arg0);
7359 rtx op1 = expand_normal (arg1);
7360 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7361 rtx pat, addr;
7362 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7363 enum machine_mode mode1 = Pmode;
7364 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7365
7366 /* Invalid arguments. Bail before doing anything stoopid! */
7367 if (arg0 == error_mark_node
7368 || arg1 == error_mark_node
7369 || arg2 == error_mark_node)
9a171fcd 7370 return const0_rtx;
6525c0e7 7371
b4a62fa0
SB
7372 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7373 op0 = copy_to_mode_reg (tmode, op0);
7374
f676971a 7375 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7376
7377 if (op1 == const0_rtx)
7378 {
7379 addr = gen_rtx_MEM (tmode, op2);
7380 }
7381 else
7382 {
7383 op1 = copy_to_mode_reg (mode1, op1);
7384 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7385 }
6525c0e7 7386
b4a62fa0 7387 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7388 if (pat)
7389 emit_insn (pat);
7390 return NULL_RTX;
7391}
7392
2212663f 7393static rtx
5039610b 7394rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7395{
7396 rtx pat;
5039610b
SL
7397 tree arg0 = CALL_EXPR_ARG (exp, 0);
7398 tree arg1 = CALL_EXPR_ARG (exp, 1);
7399 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7400 rtx op0 = expand_normal (arg0);
7401 rtx op1 = expand_normal (arg1);
7402 rtx op2 = expand_normal (arg2);
2212663f
DB
7403 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7404 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7405 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7406 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7407
774b5662
DE
7408 if (icode == CODE_FOR_nothing)
7409 /* Builtin not supported on this processor. */
7410 return 0;
7411
20e26713
AH
7412 /* If we got invalid arguments bail out before generating bad rtl. */
7413 if (arg0 == error_mark_node
7414 || arg1 == error_mark_node
7415 || arg2 == error_mark_node)
9a171fcd 7416 return const0_rtx;
20e26713 7417
aba5fb01
NS
7418 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7419 || icode == CODE_FOR_altivec_vsldoi_v4si
7420 || icode == CODE_FOR_altivec_vsldoi_v8hi
7421 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7422 {
7423 /* Only allow 4-bit unsigned literals. */
8bb418a3 7424 STRIP_NOPS (arg2);
b44140e7
AH
7425 if (TREE_CODE (arg2) != INTEGER_CST
7426 || TREE_INT_CST_LOW (arg2) & ~0xf)
7427 {
7428 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7429 return const0_rtx;
b44140e7 7430 }
b44140e7
AH
7431 }
7432
c62f2db5 7433 if (target == 0
2212663f
DB
7434 || GET_MODE (target) != tmode
7435 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7436 target = gen_reg_rtx (tmode);
7437
7438 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7439 op0 = copy_to_mode_reg (mode0, op0);
7440 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7441 op1 = copy_to_mode_reg (mode1, op1);
7442 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7443 op2 = copy_to_mode_reg (mode2, op2);
7444
7445 pat = GEN_FCN (icode) (target, op0, op1, op2);
7446 if (! pat)
7447 return 0;
7448 emit_insn (pat);
7449
7450 return target;
7451}
92898235 7452
3a9b8c7e 7453/* Expand the lvx builtins. */
0ac081f6 7454static rtx
a2369ed3 7455altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7456{
5039610b 7457 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7458 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7459 tree arg0;
7460 enum machine_mode tmode, mode0;
7c3abc73 7461 rtx pat, op0;
3a9b8c7e 7462 enum insn_code icode;
92898235 7463
0ac081f6
AH
7464 switch (fcode)
7465 {
f18c054f 7466 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7467 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7468 break;
f18c054f 7469 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7470 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7471 break;
7472 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7473 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7474 break;
7475 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7476 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7477 break;
7478 default:
7479 *expandedp = false;
7480 return NULL_RTX;
7481 }
0ac081f6 7482
3a9b8c7e 7483 *expandedp = true;
f18c054f 7484
5039610b 7485 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7486 op0 = expand_normal (arg0);
3a9b8c7e
AH
7487 tmode = insn_data[icode].operand[0].mode;
7488 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7489
3a9b8c7e
AH
7490 if (target == 0
7491 || GET_MODE (target) != tmode
7492 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7493 target = gen_reg_rtx (tmode);
24408032 7494
3a9b8c7e
AH
7495 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7496 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7497
3a9b8c7e
AH
7498 pat = GEN_FCN (icode) (target, op0);
7499 if (! pat)
7500 return 0;
7501 emit_insn (pat);
7502 return target;
7503}
f18c054f 7504
3a9b8c7e
AH
7505/* Expand the stvx builtins. */
7506static rtx
f676971a 7507altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7508 bool *expandedp)
3a9b8c7e 7509{
5039610b 7510 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7511 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7512 tree arg0, arg1;
7513 enum machine_mode mode0, mode1;
7c3abc73 7514 rtx pat, op0, op1;
3a9b8c7e 7515 enum insn_code icode;
f18c054f 7516
3a9b8c7e
AH
7517 switch (fcode)
7518 {
7519 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 7520 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
7521 break;
7522 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 7523 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
7524 break;
7525 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 7526 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
7527 break;
7528 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 7529 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
7530 break;
7531 default:
7532 *expandedp = false;
7533 return NULL_RTX;
7534 }
24408032 7535
5039610b
SL
7536 arg0 = CALL_EXPR_ARG (exp, 0);
7537 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7538 op0 = expand_normal (arg0);
7539 op1 = expand_normal (arg1);
3a9b8c7e
AH
7540 mode0 = insn_data[icode].operand[0].mode;
7541 mode1 = insn_data[icode].operand[1].mode;
f18c054f 7542
3a9b8c7e
AH
7543 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7544 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7545 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7546 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 7547
3a9b8c7e
AH
7548 pat = GEN_FCN (icode) (op0, op1);
7549 if (pat)
7550 emit_insn (pat);
f18c054f 7551
3a9b8c7e
AH
7552 *expandedp = true;
7553 return NULL_RTX;
7554}
f18c054f 7555
3a9b8c7e
AH
7556/* Expand the dst builtins. */
7557static rtx
f676971a 7558altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7559 bool *expandedp)
3a9b8c7e 7560{
5039610b 7561 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7562 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7563 tree arg0, arg1, arg2;
7564 enum machine_mode mode0, mode1, mode2;
7c3abc73 7565 rtx pat, op0, op1, op2;
3a9b8c7e 7566 struct builtin_description *d;
a3170dc6 7567 size_t i;
f18c054f 7568
3a9b8c7e 7569 *expandedp = false;
f18c054f 7570
3a9b8c7e
AH
7571 /* Handle DST variants. */
7572 d = (struct builtin_description *) bdesc_dst;
7573 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7574 if (d->code == fcode)
7575 {
5039610b
SL
7576 arg0 = CALL_EXPR_ARG (exp, 0);
7577 arg1 = CALL_EXPR_ARG (exp, 1);
7578 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7579 op0 = expand_normal (arg0);
7580 op1 = expand_normal (arg1);
7581 op2 = expand_normal (arg2);
3a9b8c7e
AH
7582 mode0 = insn_data[d->icode].operand[0].mode;
7583 mode1 = insn_data[d->icode].operand[1].mode;
7584 mode2 = insn_data[d->icode].operand[2].mode;
24408032 7585
3a9b8c7e
AH
7586 /* Invalid arguments, bail out before generating bad rtl. */
7587 if (arg0 == error_mark_node
7588 || arg1 == error_mark_node
7589 || arg2 == error_mark_node)
7590 return const0_rtx;
f18c054f 7591
86e7df90 7592 *expandedp = true;
8bb418a3 7593 STRIP_NOPS (arg2);
3a9b8c7e
AH
7594 if (TREE_CODE (arg2) != INTEGER_CST
7595 || TREE_INT_CST_LOW (arg2) & ~0x3)
7596 {
9e637a26 7597 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7598 return const0_rtx;
7599 }
f18c054f 7600
3a9b8c7e 7601 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7602 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7603 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7604 op1 = copy_to_mode_reg (mode1, op1);
24408032 7605
3a9b8c7e
AH
7606 pat = GEN_FCN (d->icode) (op0, op1, op2);
7607 if (pat != 0)
7608 emit_insn (pat);
f18c054f 7609
3a9b8c7e
AH
7610 return NULL_RTX;
7611 }
f18c054f 7612
3a9b8c7e
AH
7613 return NULL_RTX;
7614}
24408032 7615
7a4eca66
DE
7616/* Expand vec_init builtin. */
7617static rtx
5039610b 7618altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
7619{
7620 enum machine_mode tmode = TYPE_MODE (type);
7621 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7622 int i, n_elt = GET_MODE_NUNITS (tmode);
7623 rtvec v = rtvec_alloc (n_elt);
7624
7625 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 7626 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 7627
5039610b 7628 for (i = 0; i < n_elt; ++i)
7a4eca66 7629 {
5039610b 7630 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
7631 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7632 }
7633
7a4eca66
DE
7634 if (!target || !register_operand (target, tmode))
7635 target = gen_reg_rtx (tmode);
7636
7637 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7638 return target;
7639}
7640
7641/* Return the integer constant in ARG. Constrain it to be in the range
7642 of the subparts of VEC_TYPE; issue an error if not. */
7643
7644static int
7645get_element_number (tree vec_type, tree arg)
7646{
7647 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7648
7649 if (!host_integerp (arg, 1)
7650 || (elt = tree_low_cst (arg, 1), elt > max))
7651 {
7652 error ("selector must be an integer constant in the range 0..%wi", max);
7653 return 0;
7654 }
7655
7656 return elt;
7657}
7658
7659/* Expand vec_set builtin. */
7660static rtx
5039610b 7661altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
7662{
7663 enum machine_mode tmode, mode1;
7664 tree arg0, arg1, arg2;
7665 int elt;
7666 rtx op0, op1;
7667
5039610b
SL
7668 arg0 = CALL_EXPR_ARG (exp, 0);
7669 arg1 = CALL_EXPR_ARG (exp, 1);
7670 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
7671
7672 tmode = TYPE_MODE (TREE_TYPE (arg0));
7673 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7674 gcc_assert (VECTOR_MODE_P (tmode));
7675
7676 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7677 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7678 elt = get_element_number (TREE_TYPE (arg0), arg2);
7679
7680 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7681 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7682
7683 op0 = force_reg (tmode, op0);
7684 op1 = force_reg (mode1, op1);
7685
7686 rs6000_expand_vector_set (op0, op1, elt);
7687
7688 return op0;
7689}
7690
7691/* Expand vec_ext builtin. */
7692static rtx
5039610b 7693altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
7694{
7695 enum machine_mode tmode, mode0;
7696 tree arg0, arg1;
7697 int elt;
7698 rtx op0;
7699
5039610b
SL
7700 arg0 = CALL_EXPR_ARG (exp, 0);
7701 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 7702
84217346 7703 op0 = expand_normal (arg0);
7a4eca66
DE
7704 elt = get_element_number (TREE_TYPE (arg0), arg1);
7705
7706 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7707 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7708 gcc_assert (VECTOR_MODE_P (mode0));
7709
7710 op0 = force_reg (mode0, op0);
7711
7712 if (optimize || !target || !register_operand (target, tmode))
7713 target = gen_reg_rtx (tmode);
7714
7715 rs6000_expand_vector_extract (target, op0, elt);
7716
7717 return target;
7718}
7719
3a9b8c7e
AH
7720/* Expand the builtin in EXP and store the result in TARGET. Store
7721 true in *EXPANDEDP if we found a builtin to expand. */
7722static rtx
a2369ed3 7723altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e
AH
7724{
7725 struct builtin_description *d;
7726 struct builtin_description_predicates *dp;
7727 size_t i;
7728 enum insn_code icode;
5039610b 7729 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
7730 tree arg0;
7731 rtx op0, pat;
7732 enum machine_mode tmode, mode0;
3a9b8c7e 7733 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 7734
58646b77
PB
7735 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7736 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7737 {
7738 *expandedp = true;
ea40ba9c 7739 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
7740 return const0_rtx;
7741 }
7742
3a9b8c7e
AH
7743 target = altivec_expand_ld_builtin (exp, target, expandedp);
7744 if (*expandedp)
7745 return target;
0ac081f6 7746
3a9b8c7e
AH
7747 target = altivec_expand_st_builtin (exp, target, expandedp);
7748 if (*expandedp)
7749 return target;
7750
7751 target = altivec_expand_dst_builtin (exp, target, expandedp);
7752 if (*expandedp)
7753 return target;
7754
7755 *expandedp = true;
95385cbb 7756
3a9b8c7e
AH
7757 switch (fcode)
7758 {
6525c0e7 7759 case ALTIVEC_BUILTIN_STVX:
5039610b 7760 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 7761 case ALTIVEC_BUILTIN_STVEBX:
5039610b 7762 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 7763 case ALTIVEC_BUILTIN_STVEHX:
5039610b 7764 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 7765 case ALTIVEC_BUILTIN_STVEWX:
5039610b 7766 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 7767 case ALTIVEC_BUILTIN_STVXL:
5039610b 7768 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 7769
95385cbb
AH
7770 case ALTIVEC_BUILTIN_MFVSCR:
7771 icode = CODE_FOR_altivec_mfvscr;
7772 tmode = insn_data[icode].operand[0].mode;
7773
7774 if (target == 0
7775 || GET_MODE (target) != tmode
7776 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7777 target = gen_reg_rtx (tmode);
f676971a 7778
95385cbb 7779 pat = GEN_FCN (icode) (target);
0ac081f6
AH
7780 if (! pat)
7781 return 0;
7782 emit_insn (pat);
95385cbb
AH
7783 return target;
7784
7785 case ALTIVEC_BUILTIN_MTVSCR:
7786 icode = CODE_FOR_altivec_mtvscr;
5039610b 7787 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7788 op0 = expand_normal (arg0);
95385cbb
AH
7789 mode0 = insn_data[icode].operand[0].mode;
7790
7791 /* If we got invalid arguments bail out before generating bad rtl. */
7792 if (arg0 == error_mark_node)
9a171fcd 7793 return const0_rtx;
95385cbb
AH
7794
7795 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7796 op0 = copy_to_mode_reg (mode0, op0);
7797
7798 pat = GEN_FCN (icode) (op0);
7799 if (pat)
7800 emit_insn (pat);
7801 return NULL_RTX;
3a9b8c7e 7802
95385cbb
AH
7803 case ALTIVEC_BUILTIN_DSSALL:
7804 emit_insn (gen_altivec_dssall ());
7805 return NULL_RTX;
7806
7807 case ALTIVEC_BUILTIN_DSS:
7808 icode = CODE_FOR_altivec_dss;
5039610b 7809 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 7810 STRIP_NOPS (arg0);
84217346 7811 op0 = expand_normal (arg0);
95385cbb
AH
7812 mode0 = insn_data[icode].operand[0].mode;
7813
7814 /* If we got invalid arguments bail out before generating bad rtl. */
7815 if (arg0 == error_mark_node)
9a171fcd 7816 return const0_rtx;
95385cbb 7817
b44140e7
AH
7818 if (TREE_CODE (arg0) != INTEGER_CST
7819 || TREE_INT_CST_LOW (arg0) & ~0x3)
7820 {
7821 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 7822 return const0_rtx;
b44140e7
AH
7823 }
7824
95385cbb
AH
7825 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7826 op0 = copy_to_mode_reg (mode0, op0);
7827
7828 emit_insn (gen_altivec_dss (op0));
0ac081f6 7829 return NULL_RTX;
7a4eca66
DE
7830
7831 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7832 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7833 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7834 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 7835 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
7836
7837 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7838 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7839 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7840 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 7841 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
7842
7843 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7844 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7845 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7846 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 7847 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
7848
7849 default:
7850 break;
7851 /* Fall through. */
0ac081f6 7852 }
24408032 7853
100c4561
AH
7854 /* Expand abs* operations. */
7855 d = (struct builtin_description *) bdesc_abs;
ca7558fc 7856 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 7857 if (d->code == fcode)
5039610b 7858 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 7859
ae4b4a02
AH
7860 /* Expand the AltiVec predicates. */
7861 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
ca7558fc 7862 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 7863 if (dp->code == fcode)
c4ad648e 7864 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 7865 exp, target);
ae4b4a02 7866
6525c0e7
AH
7867 /* LV* are funky. We initialized them differently. */
7868 switch (fcode)
7869 {
7870 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 7871 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 7872 exp, target);
6525c0e7 7873 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 7874 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 7875 exp, target);
6525c0e7 7876 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 7877 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 7878 exp, target);
6525c0e7 7879 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 7880 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 7881 exp, target);
6525c0e7 7882 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 7883 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 7884 exp, target);
6525c0e7 7885 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 7886 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 7887 exp, target);
6525c0e7 7888 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 7889 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 7890 exp, target);
6525c0e7
AH
7891 default:
7892 break;
7893 /* Fall through. */
7894 }
95385cbb 7895
92898235 7896 *expandedp = false;
0ac081f6
AH
7897 return NULL_RTX;
7898}
7899
a3170dc6
AH
7900/* Binops that need to be initialized manually, but can be expanded
7901 automagically by rs6000_expand_binop_builtin. */
7902static struct builtin_description bdesc_2arg_spe[] =
7903{
7904 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
7905 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
7906 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
7907 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
7908 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
7909 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
7910 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
7911 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
7912 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
7913 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
7914 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
7915 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
7916 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
7917 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
7918 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
7919 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
7920 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
7921 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
7922 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
7923 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
7924 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
7925 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
7926};
7927
7928/* Expand the builtin in EXP and store the result in TARGET. Store
7929 true in *EXPANDEDP if we found a builtin to expand.
7930
7931 This expands the SPE builtins that are not simple unary and binary
7932 operations. */
7933static rtx
a2369ed3 7934spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 7935{
5039610b 7936 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
7937 tree arg1, arg0;
7938 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7939 enum insn_code icode;
7940 enum machine_mode tmode, mode0;
7941 rtx pat, op0;
7942 struct builtin_description *d;
7943 size_t i;
7944
7945 *expandedp = true;
7946
7947 /* Syntax check for a 5-bit unsigned immediate. */
7948 switch (fcode)
7949 {
7950 case SPE_BUILTIN_EVSTDD:
7951 case SPE_BUILTIN_EVSTDH:
7952 case SPE_BUILTIN_EVSTDW:
7953 case SPE_BUILTIN_EVSTWHE:
7954 case SPE_BUILTIN_EVSTWHO:
7955 case SPE_BUILTIN_EVSTWWE:
7956 case SPE_BUILTIN_EVSTWWO:
5039610b 7957 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
7958 if (TREE_CODE (arg1) != INTEGER_CST
7959 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7960 {
7961 error ("argument 2 must be a 5-bit unsigned literal");
7962 return const0_rtx;
7963 }
7964 break;
7965 default:
7966 break;
7967 }
7968
00332c9f
AH
7969 /* The evsplat*i instructions are not quite generic. */
7970 switch (fcode)
7971 {
7972 case SPE_BUILTIN_EVSPLATFI:
7973 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 7974 exp, target);
00332c9f
AH
7975 case SPE_BUILTIN_EVSPLATI:
7976 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 7977 exp, target);
00332c9f
AH
7978 default:
7979 break;
7980 }
7981
a3170dc6
AH
7982 d = (struct builtin_description *) bdesc_2arg_spe;
7983 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
7984 if (d->code == fcode)
5039610b 7985 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
7986
7987 d = (struct builtin_description *) bdesc_spe_predicates;
7988 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
7989 if (d->code == fcode)
5039610b 7990 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
7991
7992 d = (struct builtin_description *) bdesc_spe_evsel;
7993 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
7994 if (d->code == fcode)
5039610b 7995 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
7996
7997 switch (fcode)
7998 {
7999 case SPE_BUILTIN_EVSTDDX:
5039610b 8000 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8001 case SPE_BUILTIN_EVSTDHX:
5039610b 8002 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8003 case SPE_BUILTIN_EVSTDWX:
5039610b 8004 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8005 case SPE_BUILTIN_EVSTWHEX:
5039610b 8006 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8007 case SPE_BUILTIN_EVSTWHOX:
5039610b 8008 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8009 case SPE_BUILTIN_EVSTWWEX:
5039610b 8010 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8011 case SPE_BUILTIN_EVSTWWOX:
5039610b 8012 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8013 case SPE_BUILTIN_EVSTDD:
5039610b 8014 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8015 case SPE_BUILTIN_EVSTDH:
5039610b 8016 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8017 case SPE_BUILTIN_EVSTDW:
5039610b 8018 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8019 case SPE_BUILTIN_EVSTWHE:
5039610b 8020 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8021 case SPE_BUILTIN_EVSTWHO:
5039610b 8022 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8023 case SPE_BUILTIN_EVSTWWE:
5039610b 8024 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8025 case SPE_BUILTIN_EVSTWWO:
5039610b 8026 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8027 case SPE_BUILTIN_MFSPEFSCR:
8028 icode = CODE_FOR_spe_mfspefscr;
8029 tmode = insn_data[icode].operand[0].mode;
8030
8031 if (target == 0
8032 || GET_MODE (target) != tmode
8033 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8034 target = gen_reg_rtx (tmode);
f676971a 8035
a3170dc6
AH
8036 pat = GEN_FCN (icode) (target);
8037 if (! pat)
8038 return 0;
8039 emit_insn (pat);
8040 return target;
8041 case SPE_BUILTIN_MTSPEFSCR:
8042 icode = CODE_FOR_spe_mtspefscr;
5039610b 8043 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8044 op0 = expand_normal (arg0);
a3170dc6
AH
8045 mode0 = insn_data[icode].operand[0].mode;
8046
8047 if (arg0 == error_mark_node)
8048 return const0_rtx;
8049
8050 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8051 op0 = copy_to_mode_reg (mode0, op0);
8052
8053 pat = GEN_FCN (icode) (op0);
8054 if (pat)
8055 emit_insn (pat);
8056 return NULL_RTX;
8057 default:
8058 break;
8059 }
8060
8061 *expandedp = false;
8062 return NULL_RTX;
8063}
8064
8065static rtx
5039610b 8066spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8067{
8068 rtx pat, scratch, tmp;
5039610b
SL
8069 tree form = CALL_EXPR_ARG (exp, 0);
8070 tree arg0 = CALL_EXPR_ARG (exp, 1);
8071 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8072 rtx op0 = expand_normal (arg0);
8073 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8074 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8075 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8076 int form_int;
8077 enum rtx_code code;
8078
8079 if (TREE_CODE (form) != INTEGER_CST)
8080 {
8081 error ("argument 1 of __builtin_spe_predicate must be a constant");
8082 return const0_rtx;
8083 }
8084 else
8085 form_int = TREE_INT_CST_LOW (form);
8086
37409796 8087 gcc_assert (mode0 == mode1);
a3170dc6
AH
8088
8089 if (arg0 == error_mark_node || arg1 == error_mark_node)
8090 return const0_rtx;
8091
8092 if (target == 0
8093 || GET_MODE (target) != SImode
8094 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8095 target = gen_reg_rtx (SImode);
8096
8097 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8098 op0 = copy_to_mode_reg (mode0, op0);
8099 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8100 op1 = copy_to_mode_reg (mode1, op1);
8101
8102 scratch = gen_reg_rtx (CCmode);
8103
8104 pat = GEN_FCN (icode) (scratch, op0, op1);
8105 if (! pat)
8106 return const0_rtx;
8107 emit_insn (pat);
8108
8109 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8110 _lower_. We use one compare, but look in different bits of the
8111 CR for each variant.
8112
8113 There are 2 elements in each SPE simd type (upper/lower). The CR
8114 bits are set as follows:
8115
8116 BIT0 | BIT 1 | BIT 2 | BIT 3
8117 U | L | (U | L) | (U & L)
8118
8119 So, for an "all" relationship, BIT 3 would be set.
8120 For an "any" relationship, BIT 2 would be set. Etc.
8121
8122 Following traditional nomenclature, these bits map to:
8123
8124 BIT0 | BIT 1 | BIT 2 | BIT 3
8125 LT | GT | EQ | OV
8126
8127 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8128 */
8129
8130 switch (form_int)
8131 {
8132 /* All variant. OV bit. */
8133 case 0:
8134 /* We need to get to the OV bit, which is the ORDERED bit. We
8135 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8136 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8137 So let's just use another pattern. */
8138 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8139 return target;
8140 /* Any variant. EQ bit. */
8141 case 1:
8142 code = EQ;
8143 break;
8144 /* Upper variant. LT bit. */
8145 case 2:
8146 code = LT;
8147 break;
8148 /* Lower variant. GT bit. */
8149 case 3:
8150 code = GT;
8151 break;
8152 default:
8153 error ("argument 1 of __builtin_spe_predicate is out of range");
8154 return const0_rtx;
8155 }
8156
8157 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8158 emit_move_insn (target, tmp);
8159
8160 return target;
8161}
8162
8163/* The evsel builtins look like this:
8164
8165 e = __builtin_spe_evsel_OP (a, b, c, d);
8166
8167 and work like this:
8168
8169 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8170 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8171*/
8172
8173static rtx
5039610b 8174spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8175{
8176 rtx pat, scratch;
5039610b
SL
8177 tree arg0 = CALL_EXPR_ARG (exp, 0);
8178 tree arg1 = CALL_EXPR_ARG (exp, 1);
8179 tree arg2 = CALL_EXPR_ARG (exp, 2);
8180 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8181 rtx op0 = expand_normal (arg0);
8182 rtx op1 = expand_normal (arg1);
8183 rtx op2 = expand_normal (arg2);
8184 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8185 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8186 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8187
37409796 8188 gcc_assert (mode0 == mode1);
a3170dc6
AH
8189
8190 if (arg0 == error_mark_node || arg1 == error_mark_node
8191 || arg2 == error_mark_node || arg3 == error_mark_node)
8192 return const0_rtx;
8193
8194 if (target == 0
8195 || GET_MODE (target) != mode0
8196 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8197 target = gen_reg_rtx (mode0);
8198
8199 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8200 op0 = copy_to_mode_reg (mode0, op0);
8201 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8202 op1 = copy_to_mode_reg (mode0, op1);
8203 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8204 op2 = copy_to_mode_reg (mode0, op2);
8205 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8206 op3 = copy_to_mode_reg (mode0, op3);
8207
8208 /* Generate the compare. */
8209 scratch = gen_reg_rtx (CCmode);
8210 pat = GEN_FCN (icode) (scratch, op0, op1);
8211 if (! pat)
8212 return const0_rtx;
8213 emit_insn (pat);
8214
8215 if (mode0 == V2SImode)
8216 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8217 else
8218 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8219
8220 return target;
8221}
8222
0ac081f6
AH
8223/* Expand an expression EXP that calls a built-in function,
8224 with result going to TARGET if that's convenient
8225 (and in mode MODE if that's convenient).
8226 SUBTARGET may be used as the target for computing one of EXP's operands.
8227 IGNORE is nonzero if the value is to be ignored. */
8228
8229static rtx
a2369ed3 8230rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8231 enum machine_mode mode ATTRIBUTE_UNUSED,
8232 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8233{
5039610b 8234 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235
AH
8235 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8236 struct builtin_description *d;
8237 size_t i;
8238 rtx ret;
8239 bool success;
f676971a 8240
7ccf35ed
DN
8241 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8242 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8243 {
8244 int icode = (int) CODE_FOR_altivec_lvsr;
8245 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8246 enum machine_mode mode = insn_data[icode].operand[1].mode;
8247 tree arg;
8248 rtx op, addr, pat;
8249
37409796 8250 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8251
5039610b 8252 arg = CALL_EXPR_ARG (exp, 0);
37409796 8253 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8254 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8255 addr = memory_address (mode, op);
8256 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8257 op = addr;
8258 else
8259 {
8260 /* For the load case need to negate the address. */
8261 op = gen_reg_rtx (GET_MODE (addr));
8262 emit_insn (gen_rtx_SET (VOIDmode, op,
8263 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8264 }
7ccf35ed
DN
8265 op = gen_rtx_MEM (mode, op);
8266
8267 if (target == 0
8268 || GET_MODE (target) != tmode
8269 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8270 target = gen_reg_rtx (tmode);
8271
8272 /*pat = gen_altivec_lvsr (target, op);*/
8273 pat = GEN_FCN (icode) (target, op);
8274 if (!pat)
8275 return 0;
8276 emit_insn (pat);
8277
8278 return target;
8279 }
5039610b
SL
8280
8281 /* FIXME: There's got to be a nicer way to handle this case than
8282 constructing a new CALL_EXPR. */
f57d17f1
TM
8283 if (fcode == ALTIVEC_BUILTIN_VCFUX
8284 || fcode == ALTIVEC_BUILTIN_VCFSX)
8285 {
5039610b
SL
8286 if (call_expr_nargs (exp) == 1)
8287 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8288 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8289 }
7ccf35ed 8290
0ac081f6 8291 if (TARGET_ALTIVEC)
92898235
AH
8292 {
8293 ret = altivec_expand_builtin (exp, target, &success);
8294
a3170dc6
AH
8295 if (success)
8296 return ret;
8297 }
8298 if (TARGET_SPE)
8299 {
8300 ret = spe_expand_builtin (exp, target, &success);
8301
92898235
AH
8302 if (success)
8303 return ret;
8304 }
8305
37409796 8306 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
bb8df8a6 8307
37409796
NS
8308 /* Handle simple unary operations. */
8309 d = (struct builtin_description *) bdesc_1arg;
8310 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8311 if (d->code == fcode)
5039610b 8312 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8313
37409796
NS
8314 /* Handle simple binary operations. */
8315 d = (struct builtin_description *) bdesc_2arg;
8316 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8317 if (d->code == fcode)
5039610b 8318 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8319
37409796
NS
8320 /* Handle simple ternary operations. */
8321 d = (struct builtin_description *) bdesc_3arg;
8322 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8323 if (d->code == fcode)
5039610b 8324 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8325
37409796 8326 gcc_unreachable ();
0ac081f6
AH
8327}
8328
7c62e993
PB
8329static tree
8330build_opaque_vector_type (tree node, int nunits)
8331{
8332 node = copy_node (node);
8333 TYPE_MAIN_VARIANT (node) = node;
8334 return build_vector_type (node, nunits);
8335}
8336
0ac081f6 8337static void
863d938c 8338rs6000_init_builtins (void)
0ac081f6 8339{
4a5eab38
PB
8340 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8341 V2SF_type_node = build_vector_type (float_type_node, 2);
8342 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8343 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8344 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8345 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8346 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8347
8348 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8349 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8350 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8351
7c62e993
PB
8352 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8353 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8354 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8355 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8356
8bb418a3
ZL
8357 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8358 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8359 'vector unsigned short'. */
8360
8dd16ecc
NS
8361 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8362 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8363 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8364 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8365
58646b77
PB
8366 long_integer_type_internal_node = long_integer_type_node;
8367 long_unsigned_type_internal_node = long_unsigned_type_node;
8368 intQI_type_internal_node = intQI_type_node;
8369 uintQI_type_internal_node = unsigned_intQI_type_node;
8370 intHI_type_internal_node = intHI_type_node;
8371 uintHI_type_internal_node = unsigned_intHI_type_node;
8372 intSI_type_internal_node = intSI_type_node;
8373 uintSI_type_internal_node = unsigned_intSI_type_node;
8374 float_type_internal_node = float_type_node;
8375 void_type_internal_node = void_type_node;
8376
8bb418a3
ZL
8377 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8378 get_identifier ("__bool char"),
8379 bool_char_type_node));
8380 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8381 get_identifier ("__bool short"),
8382 bool_short_type_node));
8383 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8384 get_identifier ("__bool int"),
8385 bool_int_type_node));
8386 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8387 get_identifier ("__pixel"),
8388 pixel_type_node));
8389
4a5eab38
PB
8390 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8391 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8392 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8393 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
8394
8395 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8396 get_identifier ("__vector unsigned char"),
8397 unsigned_V16QI_type_node));
8398 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8399 get_identifier ("__vector signed char"),
8400 V16QI_type_node));
8401 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8402 get_identifier ("__vector __bool char"),
8403 bool_V16QI_type_node));
8404
8405 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8406 get_identifier ("__vector unsigned short"),
8407 unsigned_V8HI_type_node));
8408 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8409 get_identifier ("__vector signed short"),
8410 V8HI_type_node));
8411 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8412 get_identifier ("__vector __bool short"),
8413 bool_V8HI_type_node));
8414
8415 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8416 get_identifier ("__vector unsigned int"),
8417 unsigned_V4SI_type_node));
8418 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8419 get_identifier ("__vector signed int"),
8420 V4SI_type_node));
8421 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8422 get_identifier ("__vector __bool int"),
8423 bool_V4SI_type_node));
8424
8425 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8426 get_identifier ("__vector float"),
8427 V4SF_type_node));
8428 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8429 get_identifier ("__vector __pixel"),
8430 pixel_V8HI_type_node));
8431
a3170dc6 8432 if (TARGET_SPE)
3fdaa45a 8433 spe_init_builtins ();
0ac081f6
AH
8434 if (TARGET_ALTIVEC)
8435 altivec_init_builtins ();
0559cc77
DE
8436 if (TARGET_ALTIVEC || TARGET_SPE)
8437 rs6000_common_init_builtins ();
69ca3549
DE
8438
8439#if TARGET_XCOFF
8440 /* AIX libm provides clog as __clog. */
8441 if (built_in_decls [BUILT_IN_CLOG])
8442 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8443#endif
0ac081f6
AH
8444}
8445
a3170dc6
AH
8446/* Search through a set of builtins and enable the mask bits.
8447 DESC is an array of builtins.
b6d08ca1 8448 SIZE is the total number of builtins.
a3170dc6
AH
8449 START is the builtin enum at which to start.
8450 END is the builtin enum at which to end. */
0ac081f6 8451static void
a2369ed3 8452enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 8453 enum rs6000_builtins start,
a2369ed3 8454 enum rs6000_builtins end)
a3170dc6
AH
8455{
8456 int i;
8457
8458 for (i = 0; i < size; ++i)
8459 if (desc[i].code == start)
8460 break;
8461
8462 if (i == size)
8463 return;
8464
8465 for (; i < size; ++i)
8466 {
8467 /* Flip all the bits on. */
8468 desc[i].mask = target_flags;
8469 if (desc[i].code == end)
8470 break;
8471 }
8472}
8473
8474static void
863d938c 8475spe_init_builtins (void)
0ac081f6 8476{
a3170dc6
AH
8477 tree endlink = void_list_node;
8478 tree puint_type_node = build_pointer_type (unsigned_type_node);
8479 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 8480 struct builtin_description *d;
0ac081f6
AH
8481 size_t i;
8482
a3170dc6
AH
8483 tree v2si_ftype_4_v2si
8484 = build_function_type
3fdaa45a
AH
8485 (opaque_V2SI_type_node,
8486 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8487 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8488 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8489 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8490 endlink)))));
8491
8492 tree v2sf_ftype_4_v2sf
8493 = build_function_type
3fdaa45a
AH
8494 (opaque_V2SF_type_node,
8495 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8496 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8497 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8498 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8499 endlink)))));
8500
8501 tree int_ftype_int_v2si_v2si
8502 = build_function_type
8503 (integer_type_node,
8504 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8505 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8506 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8507 endlink))));
8508
8509 tree int_ftype_int_v2sf_v2sf
8510 = build_function_type
8511 (integer_type_node,
8512 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8513 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8514 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8515 endlink))));
8516
8517 tree void_ftype_v2si_puint_int
8518 = build_function_type (void_type_node,
3fdaa45a 8519 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8520 tree_cons (NULL_TREE, puint_type_node,
8521 tree_cons (NULL_TREE,
8522 integer_type_node,
8523 endlink))));
8524
8525 tree void_ftype_v2si_puint_char
8526 = build_function_type (void_type_node,
3fdaa45a 8527 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8528 tree_cons (NULL_TREE, puint_type_node,
8529 tree_cons (NULL_TREE,
8530 char_type_node,
8531 endlink))));
8532
8533 tree void_ftype_v2si_pv2si_int
8534 = build_function_type (void_type_node,
3fdaa45a 8535 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8536 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8537 tree_cons (NULL_TREE,
8538 integer_type_node,
8539 endlink))));
8540
8541 tree void_ftype_v2si_pv2si_char
8542 = build_function_type (void_type_node,
3fdaa45a 8543 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8544 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8545 tree_cons (NULL_TREE,
8546 char_type_node,
8547 endlink))));
8548
8549 tree void_ftype_int
8550 = build_function_type (void_type_node,
8551 tree_cons (NULL_TREE, integer_type_node, endlink));
8552
8553 tree int_ftype_void
36e8d515 8554 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
8555
8556 tree v2si_ftype_pv2si_int
3fdaa45a 8557 = build_function_type (opaque_V2SI_type_node,
6035d635 8558 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8559 tree_cons (NULL_TREE, integer_type_node,
8560 endlink)));
8561
8562 tree v2si_ftype_puint_int
3fdaa45a 8563 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8564 tree_cons (NULL_TREE, puint_type_node,
8565 tree_cons (NULL_TREE, integer_type_node,
8566 endlink)));
8567
8568 tree v2si_ftype_pushort_int
3fdaa45a 8569 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8570 tree_cons (NULL_TREE, pushort_type_node,
8571 tree_cons (NULL_TREE, integer_type_node,
8572 endlink)));
8573
00332c9f
AH
8574 tree v2si_ftype_signed_char
8575 = build_function_type (opaque_V2SI_type_node,
8576 tree_cons (NULL_TREE, signed_char_type_node,
8577 endlink));
8578
a3170dc6
AH
8579 /* The initialization of the simple binary and unary builtins is
8580 done in rs6000_common_init_builtins, but we have to enable the
8581 mask bits here manually because we have run out of `target_flags'
8582 bits. We really need to redesign this mask business. */
8583
8584 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8585 ARRAY_SIZE (bdesc_2arg),
8586 SPE_BUILTIN_EVADDW,
8587 SPE_BUILTIN_EVXOR);
8588 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8589 ARRAY_SIZE (bdesc_1arg),
8590 SPE_BUILTIN_EVABS,
8591 SPE_BUILTIN_EVSUBFUSIAAW);
8592 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8593 ARRAY_SIZE (bdesc_spe_predicates),
8594 SPE_BUILTIN_EVCMPEQ,
8595 SPE_BUILTIN_EVFSTSTLT);
8596 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8597 ARRAY_SIZE (bdesc_spe_evsel),
8598 SPE_BUILTIN_EVSEL_CMPGTS,
8599 SPE_BUILTIN_EVSEL_FSTSTEQ);
8600
36252949
AH
8601 (*lang_hooks.decls.pushdecl)
8602 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8603 opaque_V2SI_type_node));
8604
a3170dc6 8605 /* Initialize irregular SPE builtins. */
f676971a 8606
a3170dc6
AH
8607 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8608 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8609 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8610 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8611 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8612 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8613 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8614 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8615 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8616 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8617 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8618 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8619 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8620 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8621 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8622 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
8623 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8624 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
8625
8626 /* Loads. */
8627 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8628 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8629 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8630 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8631 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8632 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8633 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8634 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8635 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8636 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8637 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8638 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8639 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8640 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8641 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8642 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8643 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8644 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8645 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8646 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8647 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8648 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8649
8650 /* Predicates. */
8651 d = (struct builtin_description *) bdesc_spe_predicates;
8652 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8653 {
8654 tree type;
8655
8656 switch (insn_data[d->icode].operand[1].mode)
8657 {
8658 case V2SImode:
8659 type = int_ftype_int_v2si_v2si;
8660 break;
8661 case V2SFmode:
8662 type = int_ftype_int_v2sf_v2sf;
8663 break;
8664 default:
37409796 8665 gcc_unreachable ();
a3170dc6
AH
8666 }
8667
8668 def_builtin (d->mask, d->name, type, d->code);
8669 }
8670
8671 /* Evsel predicates. */
8672 d = (struct builtin_description *) bdesc_spe_evsel;
8673 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8674 {
8675 tree type;
8676
8677 switch (insn_data[d->icode].operand[1].mode)
8678 {
8679 case V2SImode:
8680 type = v2si_ftype_4_v2si;
8681 break;
8682 case V2SFmode:
8683 type = v2sf_ftype_4_v2sf;
8684 break;
8685 default:
37409796 8686 gcc_unreachable ();
a3170dc6
AH
8687 }
8688
8689 def_builtin (d->mask, d->name, type, d->code);
8690 }
8691}
8692
8693static void
863d938c 8694altivec_init_builtins (void)
a3170dc6
AH
8695{
8696 struct builtin_description *d;
8697 struct builtin_description_predicates *dp;
8698 size_t i;
7a4eca66
DE
8699 tree ftype;
8700
a3170dc6
AH
8701 tree pfloat_type_node = build_pointer_type (float_type_node);
8702 tree pint_type_node = build_pointer_type (integer_type_node);
8703 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8704 tree pchar_type_node = build_pointer_type (char_type_node);
8705
8706 tree pvoid_type_node = build_pointer_type (void_type_node);
8707
0dbc3651
ZW
8708 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8709 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8710 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8711 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8712
8713 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8714
58646b77
PB
8715 tree int_ftype_opaque
8716 = build_function_type_list (integer_type_node,
8717 opaque_V4SI_type_node, NULL_TREE);
8718
8719 tree opaque_ftype_opaque_int
8720 = build_function_type_list (opaque_V4SI_type_node,
8721 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8722 tree opaque_ftype_opaque_opaque_int
8723 = build_function_type_list (opaque_V4SI_type_node,
8724 opaque_V4SI_type_node, opaque_V4SI_type_node,
8725 integer_type_node, NULL_TREE);
8726 tree int_ftype_int_opaque_opaque
8727 = build_function_type_list (integer_type_node,
8728 integer_type_node, opaque_V4SI_type_node,
8729 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
8730 tree int_ftype_int_v4si_v4si
8731 = build_function_type_list (integer_type_node,
8732 integer_type_node, V4SI_type_node,
8733 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8734 tree v4sf_ftype_pcfloat
8735 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 8736 tree void_ftype_pfloat_v4sf
b4de2f7d 8737 = build_function_type_list (void_type_node,
a3170dc6 8738 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
8739 tree v4si_ftype_pcint
8740 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8741 tree void_ftype_pint_v4si
b4de2f7d
AH
8742 = build_function_type_list (void_type_node,
8743 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8744 tree v8hi_ftype_pcshort
8745 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 8746 tree void_ftype_pshort_v8hi
b4de2f7d
AH
8747 = build_function_type_list (void_type_node,
8748 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
8749 tree v16qi_ftype_pcchar
8750 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 8751 tree void_ftype_pchar_v16qi
b4de2f7d
AH
8752 = build_function_type_list (void_type_node,
8753 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 8754 tree void_ftype_v4si
b4de2f7d 8755 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8756 tree v8hi_ftype_void
8757 = build_function_type (V8HI_type_node, void_list_node);
8758 tree void_ftype_void
8759 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
8760 tree void_ftype_int
8761 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 8762
58646b77
PB
8763 tree opaque_ftype_long_pcvoid
8764 = build_function_type_list (opaque_V4SI_type_node,
8765 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 8766 tree v16qi_ftype_long_pcvoid
a3170dc6 8767 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
8768 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8769 tree v8hi_ftype_long_pcvoid
a3170dc6 8770 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
8771 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8772 tree v4si_ftype_long_pcvoid
a3170dc6 8773 = build_function_type_list (V4SI_type_node,
b4a62fa0 8774 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 8775
58646b77
PB
8776 tree void_ftype_opaque_long_pvoid
8777 = build_function_type_list (void_type_node,
8778 opaque_V4SI_type_node, long_integer_type_node,
8779 pvoid_type_node, NULL_TREE);
b4a62fa0 8780 tree void_ftype_v4si_long_pvoid
b4de2f7d 8781 = build_function_type_list (void_type_node,
b4a62fa0 8782 V4SI_type_node, long_integer_type_node,
b4de2f7d 8783 pvoid_type_node, NULL_TREE);
b4a62fa0 8784 tree void_ftype_v16qi_long_pvoid
b4de2f7d 8785 = build_function_type_list (void_type_node,
b4a62fa0 8786 V16QI_type_node, long_integer_type_node,
b4de2f7d 8787 pvoid_type_node, NULL_TREE);
b4a62fa0 8788 tree void_ftype_v8hi_long_pvoid
b4de2f7d 8789 = build_function_type_list (void_type_node,
b4a62fa0 8790 V8HI_type_node, long_integer_type_node,
b4de2f7d 8791 pvoid_type_node, NULL_TREE);
a3170dc6
AH
8792 tree int_ftype_int_v8hi_v8hi
8793 = build_function_type_list (integer_type_node,
8794 integer_type_node, V8HI_type_node,
8795 V8HI_type_node, NULL_TREE);
8796 tree int_ftype_int_v16qi_v16qi
8797 = build_function_type_list (integer_type_node,
8798 integer_type_node, V16QI_type_node,
8799 V16QI_type_node, NULL_TREE);
8800 tree int_ftype_int_v4sf_v4sf
8801 = build_function_type_list (integer_type_node,
8802 integer_type_node, V4SF_type_node,
8803 V4SF_type_node, NULL_TREE);
8804 tree v4si_ftype_v4si
8805 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8806 tree v8hi_ftype_v8hi
8807 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8808 tree v16qi_ftype_v16qi
8809 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8810 tree v4sf_ftype_v4sf
8811 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 8812 tree void_ftype_pcvoid_int_int
a3170dc6 8813 = build_function_type_list (void_type_node,
0dbc3651 8814 pcvoid_type_node, integer_type_node,
8bb418a3 8815 integer_type_node, NULL_TREE);
8bb418a3 8816
0dbc3651
ZW
8817 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8818 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8820 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8821 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8822 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8823 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8824 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8825 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8826 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8827 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8828 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8829 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8830 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8831 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8832 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
8833 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8834 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8835 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 8836 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
8837 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8838 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8839 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8840 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8841 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8842 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8843 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8844 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8845 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8846 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8847 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8848 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
8849 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8850 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8851 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8852 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8853 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8854 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8855 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8856 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
8857 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
8858 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
8859 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
8860 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
8861 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
8862 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
8863
8864 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
8865
8866 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
8867 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
8868 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
8869 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
8870 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
8871 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
8872 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
8873 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
8874 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
8875 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 8876
a3170dc6
AH
8877 /* Add the DST variants. */
8878 d = (struct builtin_description *) bdesc_dst;
8879 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 8880 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
8881
8882 /* Initialize the predicates. */
8883 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
8884 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8885 {
8886 enum machine_mode mode1;
8887 tree type;
58646b77
PB
8888 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8889 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 8890
58646b77
PB
8891 if (is_overloaded)
8892 mode1 = VOIDmode;
8893 else
8894 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
8895
8896 switch (mode1)
8897 {
58646b77
PB
8898 case VOIDmode:
8899 type = int_ftype_int_opaque_opaque;
8900 break;
a3170dc6
AH
8901 case V4SImode:
8902 type = int_ftype_int_v4si_v4si;
8903 break;
8904 case V8HImode:
8905 type = int_ftype_int_v8hi_v8hi;
8906 break;
8907 case V16QImode:
8908 type = int_ftype_int_v16qi_v16qi;
8909 break;
8910 case V4SFmode:
8911 type = int_ftype_int_v4sf_v4sf;
8912 break;
8913 default:
37409796 8914 gcc_unreachable ();
a3170dc6 8915 }
f676971a 8916
a3170dc6
AH
8917 def_builtin (dp->mask, dp->name, type, dp->code);
8918 }
8919
8920 /* Initialize the abs* operators. */
8921 d = (struct builtin_description *) bdesc_abs;
8922 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8923 {
8924 enum machine_mode mode0;
8925 tree type;
8926
8927 mode0 = insn_data[d->icode].operand[0].mode;
8928
8929 switch (mode0)
8930 {
8931 case V4SImode:
8932 type = v4si_ftype_v4si;
8933 break;
8934 case V8HImode:
8935 type = v8hi_ftype_v8hi;
8936 break;
8937 case V16QImode:
8938 type = v16qi_ftype_v16qi;
8939 break;
8940 case V4SFmode:
8941 type = v4sf_ftype_v4sf;
8942 break;
8943 default:
37409796 8944 gcc_unreachable ();
a3170dc6 8945 }
f676971a 8946
a3170dc6
AH
8947 def_builtin (d->mask, d->name, type, d->code);
8948 }
7ccf35ed 8949
13c62176
DN
8950 if (TARGET_ALTIVEC)
8951 {
8952 tree decl;
8953
8954 /* Initialize target builtin that implements
8955 targetm.vectorize.builtin_mask_for_load. */
8956
c79efc4d
RÁE
8957 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
8958 v16qi_ftype_long_pcvoid,
8959 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
8960 BUILT_IN_MD, NULL, NULL_TREE);
8961 TREE_READONLY (decl) = 1;
13c62176
DN
8962 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8963 altivec_builtin_mask_for_load = decl;
13c62176 8964 }
7a4eca66
DE
8965
8966 /* Access to the vec_init patterns. */
8967 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
8968 integer_type_node, integer_type_node,
8969 integer_type_node, NULL_TREE);
8970 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
8971 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
8972
8973 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
8974 short_integer_type_node,
8975 short_integer_type_node,
8976 short_integer_type_node,
8977 short_integer_type_node,
8978 short_integer_type_node,
8979 short_integer_type_node,
8980 short_integer_type_node, NULL_TREE);
8981 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
8982 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
8983
8984 ftype = build_function_type_list (V16QI_type_node, char_type_node,
8985 char_type_node, char_type_node,
8986 char_type_node, char_type_node,
8987 char_type_node, char_type_node,
8988 char_type_node, char_type_node,
8989 char_type_node, char_type_node,
8990 char_type_node, char_type_node,
8991 char_type_node, char_type_node,
8992 char_type_node, NULL_TREE);
8993 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
8994 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
8995
8996 ftype = build_function_type_list (V4SF_type_node, float_type_node,
8997 float_type_node, float_type_node,
8998 float_type_node, NULL_TREE);
8999 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9000 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9001
9002 /* Access to the vec_set patterns. */
9003 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9004 intSI_type_node,
9005 integer_type_node, NULL_TREE);
9006 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9007 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9008
9009 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9010 intHI_type_node,
9011 integer_type_node, NULL_TREE);
9012 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9013 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9014
9015 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9016 intQI_type_node,
9017 integer_type_node, NULL_TREE);
9018 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9019 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9020
9021 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9022 float_type_node,
9023 integer_type_node, NULL_TREE);
9024 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9025 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9026
9027 /* Access to the vec_extract patterns. */
9028 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9029 integer_type_node, NULL_TREE);
9030 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9031 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9032
9033 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9034 integer_type_node, NULL_TREE);
9035 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9036 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9037
9038 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9039 integer_type_node, NULL_TREE);
9040 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9041 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9042
9043 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9044 integer_type_node, NULL_TREE);
9045 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9046 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9047}
9048
9049static void
863d938c 9050rs6000_common_init_builtins (void)
a3170dc6
AH
9051{
9052 struct builtin_description *d;
9053 size_t i;
9054
9055 tree v4sf_ftype_v4sf_v4sf_v16qi
9056 = build_function_type_list (V4SF_type_node,
9057 V4SF_type_node, V4SF_type_node,
9058 V16QI_type_node, NULL_TREE);
9059 tree v4si_ftype_v4si_v4si_v16qi
9060 = build_function_type_list (V4SI_type_node,
9061 V4SI_type_node, V4SI_type_node,
9062 V16QI_type_node, NULL_TREE);
9063 tree v8hi_ftype_v8hi_v8hi_v16qi
9064 = build_function_type_list (V8HI_type_node,
9065 V8HI_type_node, V8HI_type_node,
9066 V16QI_type_node, NULL_TREE);
9067 tree v16qi_ftype_v16qi_v16qi_v16qi
9068 = build_function_type_list (V16QI_type_node,
9069 V16QI_type_node, V16QI_type_node,
9070 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9071 tree v4si_ftype_int
9072 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9073 tree v8hi_ftype_int
9074 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9075 tree v16qi_ftype_int
9076 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9077 tree v8hi_ftype_v16qi
9078 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9079 tree v4sf_ftype_v4sf
9080 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9081
9082 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9083 = build_function_type_list (opaque_V2SI_type_node,
9084 opaque_V2SI_type_node,
9085 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9086
9087 tree v2sf_ftype_v2sf_v2sf
2abe3e28
AH
9088 = build_function_type_list (opaque_V2SF_type_node,
9089 opaque_V2SF_type_node,
9090 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9091
9092 tree v2si_ftype_int_int
2abe3e28 9093 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9094 integer_type_node, integer_type_node,
9095 NULL_TREE);
9096
58646b77
PB
9097 tree opaque_ftype_opaque
9098 = build_function_type_list (opaque_V4SI_type_node,
9099 opaque_V4SI_type_node, NULL_TREE);
9100
a3170dc6 9101 tree v2si_ftype_v2si
2abe3e28
AH
9102 = build_function_type_list (opaque_V2SI_type_node,
9103 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9104
9105 tree v2sf_ftype_v2sf
2abe3e28
AH
9106 = build_function_type_list (opaque_V2SF_type_node,
9107 opaque_V2SF_type_node, NULL_TREE);
f676971a 9108
a3170dc6 9109 tree v2sf_ftype_v2si
2abe3e28
AH
9110 = build_function_type_list (opaque_V2SF_type_node,
9111 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9112
9113 tree v2si_ftype_v2sf
2abe3e28
AH
9114 = build_function_type_list (opaque_V2SI_type_node,
9115 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9116
9117 tree v2si_ftype_v2si_char
2abe3e28
AH
9118 = build_function_type_list (opaque_V2SI_type_node,
9119 opaque_V2SI_type_node,
9120 char_type_node, NULL_TREE);
a3170dc6
AH
9121
9122 tree v2si_ftype_int_char
2abe3e28 9123 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9124 integer_type_node, char_type_node, NULL_TREE);
9125
9126 tree v2si_ftype_char
2abe3e28
AH
9127 = build_function_type_list (opaque_V2SI_type_node,
9128 char_type_node, NULL_TREE);
a3170dc6
AH
9129
9130 tree int_ftype_int_int
9131 = build_function_type_list (integer_type_node,
9132 integer_type_node, integer_type_node,
9133 NULL_TREE);
95385cbb 9134
58646b77
PB
9135 tree opaque_ftype_opaque_opaque
9136 = build_function_type_list (opaque_V4SI_type_node,
9137 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9138 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9139 = build_function_type_list (V4SI_type_node,
9140 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9141 tree v4sf_ftype_v4si_int
b4de2f7d 9142 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9143 V4SI_type_node, integer_type_node, NULL_TREE);
9144 tree v4si_ftype_v4sf_int
b4de2f7d 9145 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9146 V4SF_type_node, integer_type_node, NULL_TREE);
9147 tree v4si_ftype_v4si_int
b4de2f7d 9148 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9149 V4SI_type_node, integer_type_node, NULL_TREE);
9150 tree v8hi_ftype_v8hi_int
b4de2f7d 9151 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9152 V8HI_type_node, integer_type_node, NULL_TREE);
9153 tree v16qi_ftype_v16qi_int
b4de2f7d 9154 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9155 V16QI_type_node, integer_type_node, NULL_TREE);
9156 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9157 = build_function_type_list (V16QI_type_node,
9158 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9159 integer_type_node, NULL_TREE);
9160 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9161 = build_function_type_list (V8HI_type_node,
9162 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9163 integer_type_node, NULL_TREE);
9164 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9165 = build_function_type_list (V4SI_type_node,
9166 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9167 integer_type_node, NULL_TREE);
9168 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9169 = build_function_type_list (V4SF_type_node,
9170 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9171 integer_type_node, NULL_TREE);
0ac081f6 9172 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9173 = build_function_type_list (V4SF_type_node,
9174 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9175 tree opaque_ftype_opaque_opaque_opaque
9176 = build_function_type_list (opaque_V4SI_type_node,
9177 opaque_V4SI_type_node, opaque_V4SI_type_node,
9178 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9179 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9180 = build_function_type_list (V4SF_type_node,
9181 V4SF_type_node, V4SF_type_node,
9182 V4SI_type_node, NULL_TREE);
2212663f 9183 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9184 = build_function_type_list (V4SF_type_node,
9185 V4SF_type_node, V4SF_type_node,
9186 V4SF_type_node, NULL_TREE);
f676971a 9187 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9188 = build_function_type_list (V4SI_type_node,
9189 V4SI_type_node, V4SI_type_node,
9190 V4SI_type_node, NULL_TREE);
0ac081f6 9191 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9192 = build_function_type_list (V8HI_type_node,
9193 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9194 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9195 = build_function_type_list (V8HI_type_node,
9196 V8HI_type_node, V8HI_type_node,
9197 V8HI_type_node, NULL_TREE);
c4ad648e 9198 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9199 = build_function_type_list (V4SI_type_node,
9200 V8HI_type_node, V8HI_type_node,
9201 V4SI_type_node, NULL_TREE);
c4ad648e 9202 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9203 = build_function_type_list (V4SI_type_node,
9204 V16QI_type_node, V16QI_type_node,
9205 V4SI_type_node, NULL_TREE);
0ac081f6 9206 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9207 = build_function_type_list (V16QI_type_node,
9208 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9209 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9210 = build_function_type_list (V4SI_type_node,
9211 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9212 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9213 = build_function_type_list (V8HI_type_node,
9214 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9215 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9216 = build_function_type_list (V4SI_type_node,
9217 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9218 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9219 = build_function_type_list (V8HI_type_node,
9220 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9221 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9222 = build_function_type_list (V16QI_type_node,
9223 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9224 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9225 = build_function_type_list (V4SI_type_node,
9226 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9227 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9228 = build_function_type_list (V4SI_type_node,
9229 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9230 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9231 = build_function_type_list (V4SI_type_node,
9232 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9233 tree v4si_ftype_v8hi
9234 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9235 tree int_ftype_v4si_v4si
9236 = build_function_type_list (integer_type_node,
9237 V4SI_type_node, V4SI_type_node, NULL_TREE);
9238 tree int_ftype_v4sf_v4sf
9239 = build_function_type_list (integer_type_node,
9240 V4SF_type_node, V4SF_type_node, NULL_TREE);
9241 tree int_ftype_v16qi_v16qi
9242 = build_function_type_list (integer_type_node,
9243 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9244 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9245 = build_function_type_list (integer_type_node,
9246 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9247
6f317ef3 9248 /* Add the simple ternary operators. */
2212663f 9249 d = (struct builtin_description *) bdesc_3arg;
ca7558fc 9250 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9251 {
2212663f
DB
9252 enum machine_mode mode0, mode1, mode2, mode3;
9253 tree type;
58646b77
PB
9254 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9255 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9256
58646b77
PB
9257 if (is_overloaded)
9258 {
9259 mode0 = VOIDmode;
9260 mode1 = VOIDmode;
9261 mode2 = VOIDmode;
9262 mode3 = VOIDmode;
9263 }
9264 else
9265 {
9266 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9267 continue;
f676971a 9268
58646b77
PB
9269 mode0 = insn_data[d->icode].operand[0].mode;
9270 mode1 = insn_data[d->icode].operand[1].mode;
9271 mode2 = insn_data[d->icode].operand[2].mode;
9272 mode3 = insn_data[d->icode].operand[3].mode;
9273 }
bb8df8a6 9274
2212663f
DB
9275 /* When all four are of the same mode. */
9276 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9277 {
9278 switch (mode0)
9279 {
58646b77
PB
9280 case VOIDmode:
9281 type = opaque_ftype_opaque_opaque_opaque;
9282 break;
617e0e1d
DB
9283 case V4SImode:
9284 type = v4si_ftype_v4si_v4si_v4si;
9285 break;
2212663f
DB
9286 case V4SFmode:
9287 type = v4sf_ftype_v4sf_v4sf_v4sf;
9288 break;
9289 case V8HImode:
9290 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 9291 break;
2212663f
DB
9292 case V16QImode:
9293 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9294 break;
2212663f 9295 default:
37409796 9296 gcc_unreachable ();
2212663f
DB
9297 }
9298 }
9299 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 9300 {
2212663f
DB
9301 switch (mode0)
9302 {
9303 case V4SImode:
9304 type = v4si_ftype_v4si_v4si_v16qi;
9305 break;
9306 case V4SFmode:
9307 type = v4sf_ftype_v4sf_v4sf_v16qi;
9308 break;
9309 case V8HImode:
9310 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 9311 break;
2212663f
DB
9312 case V16QImode:
9313 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9314 break;
2212663f 9315 default:
37409796 9316 gcc_unreachable ();
2212663f
DB
9317 }
9318 }
f676971a 9319 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 9320 && mode3 == V4SImode)
24408032 9321 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 9322 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 9323 && mode3 == V4SImode)
24408032 9324 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 9325 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 9326 && mode3 == V4SImode)
24408032
AH
9327 type = v4sf_ftype_v4sf_v4sf_v4si;
9328
a7b376ee 9329 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
9330 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9331 && mode3 == QImode)
b9e4e5d1 9332 type = v16qi_ftype_v16qi_v16qi_int;
24408032 9333
a7b376ee 9334 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
9335 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9336 && mode3 == QImode)
b9e4e5d1 9337 type = v8hi_ftype_v8hi_v8hi_int;
24408032 9338
a7b376ee 9339 /* vint, vint, vint, 4-bit literal. */
24408032
AH
9340 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9341 && mode3 == QImode)
b9e4e5d1 9342 type = v4si_ftype_v4si_v4si_int;
24408032 9343
a7b376ee 9344 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
9345 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9346 && mode3 == QImode)
b9e4e5d1 9347 type = v4sf_ftype_v4sf_v4sf_int;
24408032 9348
2212663f 9349 else
37409796 9350 gcc_unreachable ();
2212663f
DB
9351
9352 def_builtin (d->mask, d->name, type, d->code);
9353 }
9354
0ac081f6 9355 /* Add the simple binary operators. */
00b960c7 9356 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 9357 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
9358 {
9359 enum machine_mode mode0, mode1, mode2;
9360 tree type;
58646b77
PB
9361 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9362 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 9363
58646b77
PB
9364 if (is_overloaded)
9365 {
9366 mode0 = VOIDmode;
9367 mode1 = VOIDmode;
9368 mode2 = VOIDmode;
9369 }
9370 else
bb8df8a6 9371 {
58646b77
PB
9372 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9373 continue;
f676971a 9374
58646b77
PB
9375 mode0 = insn_data[d->icode].operand[0].mode;
9376 mode1 = insn_data[d->icode].operand[1].mode;
9377 mode2 = insn_data[d->icode].operand[2].mode;
9378 }
0ac081f6
AH
9379
9380 /* When all three operands are of the same mode. */
9381 if (mode0 == mode1 && mode1 == mode2)
9382 {
9383 switch (mode0)
9384 {
58646b77
PB
9385 case VOIDmode:
9386 type = opaque_ftype_opaque_opaque;
9387 break;
0ac081f6
AH
9388 case V4SFmode:
9389 type = v4sf_ftype_v4sf_v4sf;
9390 break;
9391 case V4SImode:
9392 type = v4si_ftype_v4si_v4si;
9393 break;
9394 case V16QImode:
9395 type = v16qi_ftype_v16qi_v16qi;
9396 break;
9397 case V8HImode:
9398 type = v8hi_ftype_v8hi_v8hi;
9399 break;
a3170dc6
AH
9400 case V2SImode:
9401 type = v2si_ftype_v2si_v2si;
9402 break;
9403 case V2SFmode:
9404 type = v2sf_ftype_v2sf_v2sf;
9405 break;
9406 case SImode:
9407 type = int_ftype_int_int;
9408 break;
0ac081f6 9409 default:
37409796 9410 gcc_unreachable ();
0ac081f6
AH
9411 }
9412 }
9413
9414 /* A few other combos we really don't want to do manually. */
9415
9416 /* vint, vfloat, vfloat. */
9417 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9418 type = v4si_ftype_v4sf_v4sf;
9419
9420 /* vshort, vchar, vchar. */
9421 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9422 type = v8hi_ftype_v16qi_v16qi;
9423
9424 /* vint, vshort, vshort. */
9425 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9426 type = v4si_ftype_v8hi_v8hi;
9427
9428 /* vshort, vint, vint. */
9429 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9430 type = v8hi_ftype_v4si_v4si;
9431
9432 /* vchar, vshort, vshort. */
9433 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9434 type = v16qi_ftype_v8hi_v8hi;
9435
9436 /* vint, vchar, vint. */
9437 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9438 type = v4si_ftype_v16qi_v4si;
9439
fa066a23
AH
9440 /* vint, vchar, vchar. */
9441 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9442 type = v4si_ftype_v16qi_v16qi;
9443
0ac081f6
AH
9444 /* vint, vshort, vint. */
9445 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9446 type = v4si_ftype_v8hi_v4si;
f676971a 9447
a7b376ee 9448 /* vint, vint, 5-bit literal. */
2212663f 9449 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9450 type = v4si_ftype_v4si_int;
f676971a 9451
a7b376ee 9452 /* vshort, vshort, 5-bit literal. */
2212663f 9453 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 9454 type = v8hi_ftype_v8hi_int;
f676971a 9455
a7b376ee 9456 /* vchar, vchar, 5-bit literal. */
2212663f 9457 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 9458 type = v16qi_ftype_v16qi_int;
0ac081f6 9459
a7b376ee 9460 /* vfloat, vint, 5-bit literal. */
617e0e1d 9461 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9462 type = v4sf_ftype_v4si_int;
f676971a 9463
a7b376ee 9464 /* vint, vfloat, 5-bit literal. */
617e0e1d 9465 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 9466 type = v4si_ftype_v4sf_int;
617e0e1d 9467
a3170dc6
AH
9468 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9469 type = v2si_ftype_int_int;
9470
9471 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9472 type = v2si_ftype_v2si_char;
9473
9474 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9475 type = v2si_ftype_int_char;
9476
37409796 9477 else
0ac081f6 9478 {
37409796
NS
9479 /* int, x, x. */
9480 gcc_assert (mode0 == SImode);
0ac081f6
AH
9481 switch (mode1)
9482 {
9483 case V4SImode:
9484 type = int_ftype_v4si_v4si;
9485 break;
9486 case V4SFmode:
9487 type = int_ftype_v4sf_v4sf;
9488 break;
9489 case V16QImode:
9490 type = int_ftype_v16qi_v16qi;
9491 break;
9492 case V8HImode:
9493 type = int_ftype_v8hi_v8hi;
9494 break;
9495 default:
37409796 9496 gcc_unreachable ();
0ac081f6
AH
9497 }
9498 }
9499
2212663f
DB
9500 def_builtin (d->mask, d->name, type, d->code);
9501 }
24408032 9502
2212663f
DB
9503 /* Add the simple unary operators. */
9504 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 9505 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
9506 {
9507 enum machine_mode mode0, mode1;
9508 tree type;
58646b77
PB
9509 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9510 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9511
9512 if (is_overloaded)
9513 {
9514 mode0 = VOIDmode;
9515 mode1 = VOIDmode;
9516 }
9517 else
9518 {
9519 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9520 continue;
bb8df8a6 9521
58646b77
PB
9522 mode0 = insn_data[d->icode].operand[0].mode;
9523 mode1 = insn_data[d->icode].operand[1].mode;
9524 }
2212663f
DB
9525
9526 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 9527 type = v4si_ftype_int;
2212663f 9528 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 9529 type = v8hi_ftype_int;
2212663f 9530 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 9531 type = v16qi_ftype_int;
58646b77
PB
9532 else if (mode0 == VOIDmode && mode1 == VOIDmode)
9533 type = opaque_ftype_opaque;
617e0e1d
DB
9534 else if (mode0 == V4SFmode && mode1 == V4SFmode)
9535 type = v4sf_ftype_v4sf;
20e26713
AH
9536 else if (mode0 == V8HImode && mode1 == V16QImode)
9537 type = v8hi_ftype_v16qi;
9538 else if (mode0 == V4SImode && mode1 == V8HImode)
9539 type = v4si_ftype_v8hi;
a3170dc6
AH
9540 else if (mode0 == V2SImode && mode1 == V2SImode)
9541 type = v2si_ftype_v2si;
9542 else if (mode0 == V2SFmode && mode1 == V2SFmode)
9543 type = v2sf_ftype_v2sf;
9544 else if (mode0 == V2SFmode && mode1 == V2SImode)
9545 type = v2sf_ftype_v2si;
9546 else if (mode0 == V2SImode && mode1 == V2SFmode)
9547 type = v2si_ftype_v2sf;
9548 else if (mode0 == V2SImode && mode1 == QImode)
9549 type = v2si_ftype_char;
2212663f 9550 else
37409796 9551 gcc_unreachable ();
2212663f 9552
0ac081f6
AH
9553 def_builtin (d->mask, d->name, type, d->code);
9554 }
9555}
9556
c15c90bb
ZW
9557static void
9558rs6000_init_libfuncs (void)
9559{
602ea4d3
JJ
9560 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
9561 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 9562 {
602ea4d3
JJ
9563 /* AIX library routines for float->int conversion. */
9564 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9565 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9566 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9567 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9568 }
c15c90bb 9569
602ea4d3 9570 if (!TARGET_IEEEQUAD)
98c41d98 9571 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
9572 if (!TARGET_XL_COMPAT)
9573 {
9574 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9575 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9576 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9577 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 9578
17caeff2 9579 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
9580 {
9581 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
9582 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
9583 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
9584 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
9585 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
9586 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
9587 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
9588
9589 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
9590 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
9591 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
9592 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
9593 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
9594 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
9595 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
9596 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
9597 }
b26941b4
JM
9598
9599 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
9600 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
9601 }
9602 else
9603 {
9604 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9605 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9606 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9607 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9608 }
c9034561 9609 else
c15c90bb 9610 {
c9034561 9611 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
9612
9613 set_optab_libfunc (add_optab, TFmode, "_q_add");
9614 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9615 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9616 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9617 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9618 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9619 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9620
c9034561
ZW
9621 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9622 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9623 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9624 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9625 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9626 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9627
85363ca0
ZW
9628 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9629 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9630 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9631 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9632 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9633 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9634 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 9635 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
9636 }
9637}
fba73eb1
DE
9638
9639\f
9640/* Expand a block clear operation, and return 1 if successful. Return 0
9641 if we should let the compiler generate normal code.
9642
9643 operands[0] is the destination
9644 operands[1] is the length
57e84f18 9645 operands[3] is the alignment */
fba73eb1
DE
9646
9647int
9648expand_block_clear (rtx operands[])
9649{
9650 rtx orig_dest = operands[0];
9651 rtx bytes_rtx = operands[1];
57e84f18 9652 rtx align_rtx = operands[3];
5514620a
GK
9653 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9654 HOST_WIDE_INT align;
9655 HOST_WIDE_INT bytes;
fba73eb1
DE
9656 int offset;
9657 int clear_bytes;
5514620a 9658 int clear_step;
fba73eb1
DE
9659
9660 /* If this is not a fixed size move, just call memcpy */
9661 if (! constp)
9662 return 0;
9663
37409796
NS
9664 /* This must be a fixed size alignment */
9665 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
9666 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9667
9668 /* Anything to clear? */
9669 bytes = INTVAL (bytes_rtx);
9670 if (bytes <= 0)
9671 return 1;
9672
5514620a
GK
9673 /* Use the builtin memset after a point, to avoid huge code bloat.
9674 When optimize_size, avoid any significant code bloat; calling
9675 memset is about 4 instructions, so allow for one instruction to
9676 load zero and three to do clearing. */
9677 if (TARGET_ALTIVEC && align >= 128)
9678 clear_step = 16;
9679 else if (TARGET_POWERPC64 && align >= 32)
9680 clear_step = 8;
9681 else
9682 clear_step = 4;
fba73eb1 9683
5514620a
GK
9684 if (optimize_size && bytes > 3 * clear_step)
9685 return 0;
9686 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
9687 return 0;
9688
9689 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9690 {
fba73eb1
DE
9691 enum machine_mode mode = BLKmode;
9692 rtx dest;
f676971a 9693
5514620a
GK
9694 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9695 {
9696 clear_bytes = 16;
9697 mode = V4SImode;
9698 }
9699 else if (bytes >= 8 && TARGET_POWERPC64
9700 /* 64-bit loads and stores require word-aligned
9701 displacements. */
9702 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
9703 {
9704 clear_bytes = 8;
9705 mode = DImode;
fba73eb1 9706 }
5514620a 9707 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
9708 { /* move 4 bytes */
9709 clear_bytes = 4;
9710 mode = SImode;
fba73eb1 9711 }
ec53fc93 9712 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
9713 { /* move 2 bytes */
9714 clear_bytes = 2;
9715 mode = HImode;
fba73eb1
DE
9716 }
9717 else /* move 1 byte at a time */
9718 {
9719 clear_bytes = 1;
9720 mode = QImode;
fba73eb1 9721 }
f676971a 9722
fba73eb1 9723 dest = adjust_address (orig_dest, mode, offset);
f676971a 9724
5514620a 9725 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
9726 }
9727
9728 return 1;
9729}
9730
35aff10b 9731\f
7e69e155
MM
9732/* Expand a block move operation, and return 1 if successful. Return 0
9733 if we should let the compiler generate normal code.
9734
9735 operands[0] is the destination
9736 operands[1] is the source
9737 operands[2] is the length
9738 operands[3] is the alignment */
9739
3933e0e1
MM
9740#define MAX_MOVE_REG 4
9741
7e69e155 9742int
a2369ed3 9743expand_block_move (rtx operands[])
7e69e155 9744{
b6c9286a
MM
9745 rtx orig_dest = operands[0];
9746 rtx orig_src = operands[1];
7e69e155 9747 rtx bytes_rtx = operands[2];
7e69e155 9748 rtx align_rtx = operands[3];
3933e0e1 9749 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 9750 int align;
3933e0e1
MM
9751 int bytes;
9752 int offset;
7e69e155 9753 int move_bytes;
cabfd258
GK
9754 rtx stores[MAX_MOVE_REG];
9755 int num_reg = 0;
7e69e155 9756
3933e0e1 9757 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 9758 if (! constp)
3933e0e1
MM
9759 return 0;
9760
37409796
NS
9761 /* This must be a fixed size alignment */
9762 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 9763 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 9764
7e69e155 9765 /* Anything to move? */
3933e0e1
MM
9766 bytes = INTVAL (bytes_rtx);
9767 if (bytes <= 0)
7e69e155
MM
9768 return 1;
9769
ea9982a8 9770 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 9771 reg_parm_stack_space. */
ea9982a8 9772 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
9773 return 0;
9774
cabfd258 9775 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 9776 {
cabfd258 9777 union {
70128ad9 9778 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 9779 rtx (*mov) (rtx, rtx);
cabfd258
GK
9780 } gen_func;
9781 enum machine_mode mode = BLKmode;
9782 rtx src, dest;
f676971a 9783
5514620a
GK
9784 /* Altivec first, since it will be faster than a string move
9785 when it applies, and usually not significantly larger. */
9786 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9787 {
9788 move_bytes = 16;
9789 mode = V4SImode;
9790 gen_func.mov = gen_movv4si;
9791 }
9792 else if (TARGET_STRING
cabfd258
GK
9793 && bytes > 24 /* move up to 32 bytes at a time */
9794 && ! fixed_regs[5]
9795 && ! fixed_regs[6]
9796 && ! fixed_regs[7]
9797 && ! fixed_regs[8]
9798 && ! fixed_regs[9]
9799 && ! fixed_regs[10]
9800 && ! fixed_regs[11]
9801 && ! fixed_regs[12])
7e69e155 9802 {
cabfd258 9803 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 9804 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
9805 }
9806 else if (TARGET_STRING
9807 && bytes > 16 /* move up to 24 bytes at a time */
9808 && ! fixed_regs[5]
9809 && ! fixed_regs[6]
9810 && ! fixed_regs[7]
9811 && ! fixed_regs[8]
9812 && ! fixed_regs[9]
9813 && ! fixed_regs[10])
9814 {
9815 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 9816 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
9817 }
9818 else if (TARGET_STRING
9819 && bytes > 8 /* move up to 16 bytes at a time */
9820 && ! fixed_regs[5]
9821 && ! fixed_regs[6]
9822 && ! fixed_regs[7]
9823 && ! fixed_regs[8])
9824 {
9825 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 9826 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
9827 }
9828 else if (bytes >= 8 && TARGET_POWERPC64
9829 /* 64-bit loads and stores require word-aligned
9830 displacements. */
fba73eb1 9831 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
9832 {
9833 move_bytes = 8;
9834 mode = DImode;
9835 gen_func.mov = gen_movdi;
9836 }
9837 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9838 { /* move up to 8 bytes at a time */
9839 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 9840 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 9841 }
cd7d9ca4 9842 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
9843 { /* move 4 bytes */
9844 move_bytes = 4;
9845 mode = SImode;
9846 gen_func.mov = gen_movsi;
9847 }
ec53fc93 9848 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
9849 { /* move 2 bytes */
9850 move_bytes = 2;
9851 mode = HImode;
9852 gen_func.mov = gen_movhi;
9853 }
9854 else if (TARGET_STRING && bytes > 1)
9855 { /* move up to 4 bytes at a time */
9856 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 9857 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
9858 }
9859 else /* move 1 byte at a time */
9860 {
9861 move_bytes = 1;
9862 mode = QImode;
9863 gen_func.mov = gen_movqi;
9864 }
f676971a 9865
cabfd258
GK
9866 src = adjust_address (orig_src, mode, offset);
9867 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
9868
9869 if (mode != BLKmode)
cabfd258
GK
9870 {
9871 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 9872
cabfd258
GK
9873 emit_insn ((*gen_func.mov) (tmp_reg, src));
9874 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 9875 }
3933e0e1 9876
cabfd258
GK
9877 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
9878 {
9879 int i;
9880 for (i = 0; i < num_reg; i++)
9881 emit_insn (stores[i]);
9882 num_reg = 0;
9883 }
35aff10b 9884
cabfd258 9885 if (mode == BLKmode)
7e69e155 9886 {
70128ad9 9887 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
9888 patterns require zero offset. */
9889 if (!REG_P (XEXP (src, 0)))
b6c9286a 9890 {
cabfd258
GK
9891 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
9892 src = replace_equiv_address (src, src_reg);
b6c9286a 9893 }
cabfd258 9894 set_mem_size (src, GEN_INT (move_bytes));
f676971a 9895
cabfd258 9896 if (!REG_P (XEXP (dest, 0)))
3933e0e1 9897 {
cabfd258
GK
9898 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
9899 dest = replace_equiv_address (dest, dest_reg);
7e69e155 9900 }
cabfd258 9901 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 9902
70128ad9 9903 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
9904 GEN_INT (move_bytes & 31),
9905 align_rtx));
7e69e155 9906 }
7e69e155
MM
9907 }
9908
9909 return 1;
9910}
9911
d62294f5 9912\f
9caa3eb2
DE
9913/* Return a string to perform a load_multiple operation.
9914 operands[0] is the vector.
9915 operands[1] is the source address.
9916 operands[2] is the first destination register. */
9917
9918const char *
a2369ed3 9919rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
9920{
9921 /* We have to handle the case where the pseudo used to contain the address
9922 is assigned to one of the output registers. */
9923 int i, j;
9924 int words = XVECLEN (operands[0], 0);
9925 rtx xop[10];
9926
9927 if (XVECLEN (operands[0], 0) == 1)
9928 return "{l|lwz} %2,0(%1)";
9929
9930 for (i = 0; i < words; i++)
9931 if (refers_to_regno_p (REGNO (operands[2]) + i,
9932 REGNO (operands[2]) + i + 1, operands[1], 0))
9933 {
9934 if (i == words-1)
9935 {
9936 xop[0] = GEN_INT (4 * (words-1));
9937 xop[1] = operands[1];
9938 xop[2] = operands[2];
9939 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
9940 return "";
9941 }
9942 else if (i == 0)
9943 {
9944 xop[0] = GEN_INT (4 * (words-1));
9945 xop[1] = operands[1];
9946 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
9947 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
9948 return "";
9949 }
9950 else
9951 {
9952 for (j = 0; j < words; j++)
9953 if (j != i)
9954 {
9955 xop[0] = GEN_INT (j * 4);
9956 xop[1] = operands[1];
9957 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
9958 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
9959 }
9960 xop[0] = GEN_INT (i * 4);
9961 xop[1] = operands[1];
9962 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
9963 return "";
9964 }
9965 }
9966
9967 return "{lsi|lswi} %2,%1,%N0";
9968}
9969
9878760c 9970\f
a4f6c312
SS
9971/* A validation routine: say whether CODE, a condition code, and MODE
9972 match. The other alternatives either don't make sense or should
9973 never be generated. */
39a10a29 9974
48d72335 9975void
a2369ed3 9976validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 9977{
37409796
NS
9978 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
9979 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
9980 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
9981
9982 /* These don't make sense. */
37409796
NS
9983 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
9984 || mode != CCUNSmode);
39a10a29 9985
37409796
NS
9986 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
9987 || mode == CCUNSmode);
39a10a29 9988
37409796
NS
9989 gcc_assert (mode == CCFPmode
9990 || (code != ORDERED && code != UNORDERED
9991 && code != UNEQ && code != LTGT
9992 && code != UNGT && code != UNLT
9993 && code != UNGE && code != UNLE));
f676971a
EC
9994
9995 /* These should never be generated except for
bc9ec0e0 9996 flag_finite_math_only. */
37409796
NS
9997 gcc_assert (mode != CCFPmode
9998 || flag_finite_math_only
9999 || (code != LE && code != GE
10000 && code != UNEQ && code != LTGT
10001 && code != UNGT && code != UNLT));
39a10a29
GK
10002
10003 /* These are invalid; the information is not there. */
37409796 10004 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10005}
10006
9878760c
RK
10007\f
10008/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10009 mask required to convert the result of a rotate insn into a shift
b1765bde 10010 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10011
10012int
a2369ed3 10013includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10014{
e2c953b6
DE
10015 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10016
10017 shift_mask <<= INTVAL (shiftop);
9878760c 10018
b1765bde 10019 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10020}
10021
10022/* Similar, but for right shift. */
10023
10024int
a2369ed3 10025includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10026{
a7653a2c 10027 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10028
10029 shift_mask >>= INTVAL (shiftop);
10030
b1765bde 10031 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10032}
10033
c5059423
AM
10034/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10035 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10036 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10037
10038int
a2369ed3 10039includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10040{
c5059423
AM
10041 if (GET_CODE (andop) == CONST_INT)
10042 {
02071907 10043 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10044
c5059423 10045 c = INTVAL (andop);
02071907 10046 if (c == 0 || c == ~0)
c5059423 10047 return 0;
e2c953b6 10048
02071907 10049 shift_mask = ~0;
c5059423
AM
10050 shift_mask <<= INTVAL (shiftop);
10051
b6d08ca1 10052 /* Find the least significant one bit. */
c5059423
AM
10053 lsb = c & -c;
10054
10055 /* It must coincide with the LSB of the shift mask. */
10056 if (-lsb != shift_mask)
10057 return 0;
e2c953b6 10058
c5059423
AM
10059 /* Invert to look for the next transition (if any). */
10060 c = ~c;
10061
10062 /* Remove the low group of ones (originally low group of zeros). */
10063 c &= -lsb;
10064
10065 /* Again find the lsb, and check we have all 1's above. */
10066 lsb = c & -c;
10067 return c == -lsb;
10068 }
10069 else if (GET_CODE (andop) == CONST_DOUBLE
10070 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10071 {
02071907
AM
10072 HOST_WIDE_INT low, high, lsb;
10073 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10074
10075 low = CONST_DOUBLE_LOW (andop);
10076 if (HOST_BITS_PER_WIDE_INT < 64)
10077 high = CONST_DOUBLE_HIGH (andop);
10078
10079 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10080 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10081 return 0;
10082
10083 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10084 {
02071907 10085 shift_mask_high = ~0;
c5059423
AM
10086 if (INTVAL (shiftop) > 32)
10087 shift_mask_high <<= INTVAL (shiftop) - 32;
10088
10089 lsb = high & -high;
10090
10091 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10092 return 0;
10093
10094 high = ~high;
10095 high &= -lsb;
10096
10097 lsb = high & -high;
10098 return high == -lsb;
10099 }
10100
02071907 10101 shift_mask_low = ~0;
c5059423
AM
10102 shift_mask_low <<= INTVAL (shiftop);
10103
10104 lsb = low & -low;
10105
10106 if (-lsb != shift_mask_low)
10107 return 0;
10108
10109 if (HOST_BITS_PER_WIDE_INT < 64)
10110 high = ~high;
10111 low = ~low;
10112 low &= -lsb;
10113
10114 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10115 {
10116 lsb = high & -high;
10117 return high == -lsb;
10118 }
10119
10120 lsb = low & -low;
10121 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10122 }
10123 else
10124 return 0;
10125}
e2c953b6 10126
c5059423
AM
10127/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10128 to perform a left shift. It must have SHIFTOP or more least
c1207243 10129 significant 0's, with the remainder of the word 1's. */
e2c953b6 10130
c5059423 10131int
a2369ed3 10132includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10133{
e2c953b6 10134 if (GET_CODE (andop) == CONST_INT)
c5059423 10135 {
02071907 10136 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10137
02071907 10138 shift_mask = ~0;
c5059423
AM
10139 shift_mask <<= INTVAL (shiftop);
10140 c = INTVAL (andop);
10141
c1207243 10142 /* Find the least significant one bit. */
c5059423
AM
10143 lsb = c & -c;
10144
10145 /* It must be covered by the shift mask.
a4f6c312 10146 This test also rejects c == 0. */
c5059423
AM
10147 if ((lsb & shift_mask) == 0)
10148 return 0;
10149
10150 /* Check we have all 1's above the transition, and reject all 1's. */
10151 return c == -lsb && lsb != 1;
10152 }
10153 else if (GET_CODE (andop) == CONST_DOUBLE
10154 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10155 {
02071907 10156 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10157
10158 low = CONST_DOUBLE_LOW (andop);
10159
10160 if (HOST_BITS_PER_WIDE_INT < 64)
10161 {
02071907 10162 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10163
10164 high = CONST_DOUBLE_HIGH (andop);
10165
10166 if (low == 0)
10167 {
02071907 10168 shift_mask_high = ~0;
c5059423
AM
10169 if (INTVAL (shiftop) > 32)
10170 shift_mask_high <<= INTVAL (shiftop) - 32;
10171
10172 lsb = high & -high;
10173
10174 if ((lsb & shift_mask_high) == 0)
10175 return 0;
10176
10177 return high == -lsb;
10178 }
10179 if (high != ~0)
10180 return 0;
10181 }
10182
02071907 10183 shift_mask_low = ~0;
c5059423
AM
10184 shift_mask_low <<= INTVAL (shiftop);
10185
10186 lsb = low & -low;
10187
10188 if ((lsb & shift_mask_low) == 0)
10189 return 0;
10190
10191 return low == -lsb && lsb != 1;
10192 }
e2c953b6 10193 else
c5059423 10194 return 0;
9878760c 10195}
35068b43 10196
11ac38b2
DE
10197/* Return 1 if operands will generate a valid arguments to rlwimi
10198instruction for insert with right shift in 64-bit mode. The mask may
10199not start on the first bit or stop on the last bit because wrap-around
10200effects of instruction do not correspond to semantics of RTL insn. */
10201
10202int
10203insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10204{
429ec7dc
DE
10205 if (INTVAL (startop) > 32
10206 && INTVAL (startop) < 64
10207 && INTVAL (sizeop) > 1
10208 && INTVAL (sizeop) + INTVAL (startop) < 64
10209 && INTVAL (shiftop) > 0
10210 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10211 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10212 return 1;
10213
10214 return 0;
10215}
10216
35068b43 10217/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10218 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10219
10220int
a2369ed3 10221registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10222{
10223 /* We might have been passed a SUBREG. */
f676971a 10224 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10225 return 0;
f676971a 10226
90f81f99
AP
10227 /* We might have been passed non floating point registers. */
10228 if (!FP_REGNO_P (REGNO (reg1))
10229 || !FP_REGNO_P (REGNO (reg2)))
10230 return 0;
35068b43
RK
10231
10232 return (REGNO (reg1) == REGNO (reg2) - 1);
10233}
10234
a4f6c312
SS
10235/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10236 addr1 and addr2 must be in consecutive memory locations
10237 (addr2 == addr1 + 8). */
35068b43
RK
10238
10239int
90f81f99 10240mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10241{
90f81f99 10242 rtx addr1, addr2;
bb8df8a6
EC
10243 unsigned int reg1, reg2;
10244 int offset1, offset2;
35068b43 10245
90f81f99
AP
10246 /* The mems cannot be volatile. */
10247 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10248 return 0;
f676971a 10249
90f81f99
AP
10250 addr1 = XEXP (mem1, 0);
10251 addr2 = XEXP (mem2, 0);
10252
35068b43
RK
10253 /* Extract an offset (if used) from the first addr. */
10254 if (GET_CODE (addr1) == PLUS)
10255 {
10256 /* If not a REG, return zero. */
10257 if (GET_CODE (XEXP (addr1, 0)) != REG)
10258 return 0;
10259 else
10260 {
c4ad648e 10261 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
10262 /* The offset must be constant! */
10263 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
10264 return 0;
10265 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
10266 }
10267 }
10268 else if (GET_CODE (addr1) != REG)
10269 return 0;
10270 else
10271 {
10272 reg1 = REGNO (addr1);
10273 /* This was a simple (mem (reg)) expression. Offset is 0. */
10274 offset1 = 0;
10275 }
10276
bb8df8a6
EC
10277 /* And now for the second addr. */
10278 if (GET_CODE (addr2) == PLUS)
10279 {
10280 /* If not a REG, return zero. */
10281 if (GET_CODE (XEXP (addr2, 0)) != REG)
10282 return 0;
10283 else
10284 {
10285 reg2 = REGNO (XEXP (addr2, 0));
10286 /* The offset must be constant. */
10287 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
10288 return 0;
10289 offset2 = INTVAL (XEXP (addr2, 1));
10290 }
10291 }
10292 else if (GET_CODE (addr2) != REG)
35068b43 10293 return 0;
bb8df8a6
EC
10294 else
10295 {
10296 reg2 = REGNO (addr2);
10297 /* This was a simple (mem (reg)) expression. Offset is 0. */
10298 offset2 = 0;
10299 }
35068b43 10300
bb8df8a6
EC
10301 /* Both of these must have the same base register. */
10302 if (reg1 != reg2)
35068b43
RK
10303 return 0;
10304
10305 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 10306 if (offset2 != offset1 + 8)
35068b43
RK
10307 return 0;
10308
10309 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
10310 instructions. */
10311 return 1;
10312}
9878760c
RK
10313\f
10314/* Return the register class of a scratch register needed to copy IN into
10315 or out of a register in CLASS in MODE. If it can be done directly,
10316 NO_REGS is returned. */
10317
10318enum reg_class
3c4774e0
R
10319rs6000_secondary_reload_class (enum reg_class class,
10320 enum machine_mode mode ATTRIBUTE_UNUSED,
10321 rtx in)
9878760c 10322{
5accd822 10323 int regno;
9878760c 10324
ab82a49f
AP
10325 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10326#if TARGET_MACHO
c4ad648e 10327 && MACHOPIC_INDIRECT
ab82a49f 10328#endif
c4ad648e 10329 ))
46fad5b7
DJ
10330 {
10331 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
10332 other than BASE_REGS for TARGET_ELF. So indicate that a
10333 register from BASE_REGS is needed as an intermediate
10334 register.
f676971a 10335
46fad5b7
DJ
10336 On Darwin, pic addresses require a load from memory, which
10337 needs a base register. */
10338 if (class != BASE_REGS
c4ad648e
AM
10339 && (GET_CODE (in) == SYMBOL_REF
10340 || GET_CODE (in) == HIGH
10341 || GET_CODE (in) == LABEL_REF
10342 || GET_CODE (in) == CONST))
10343 return BASE_REGS;
46fad5b7 10344 }
e7b7998a 10345
5accd822
DE
10346 if (GET_CODE (in) == REG)
10347 {
10348 regno = REGNO (in);
10349 if (regno >= FIRST_PSEUDO_REGISTER)
10350 {
10351 regno = true_regnum (in);
10352 if (regno >= FIRST_PSEUDO_REGISTER)
10353 regno = -1;
10354 }
10355 }
10356 else if (GET_CODE (in) == SUBREG)
10357 {
10358 regno = true_regnum (in);
10359 if (regno >= FIRST_PSEUDO_REGISTER)
10360 regno = -1;
10361 }
10362 else
10363 regno = -1;
10364
9878760c
RK
10365 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10366 into anything. */
10367 if (class == GENERAL_REGS || class == BASE_REGS
10368 || (regno >= 0 && INT_REGNO_P (regno)))
10369 return NO_REGS;
10370
10371 /* Constants, memory, and FP registers can go into FP registers. */
10372 if ((regno == -1 || FP_REGNO_P (regno))
10373 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10374 return NO_REGS;
10375
0ac081f6
AH
10376 /* Memory, and AltiVec registers can go into AltiVec registers. */
10377 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10378 && class == ALTIVEC_REGS)
10379 return NO_REGS;
10380
9878760c
RK
10381 /* We can copy among the CR registers. */
10382 if ((class == CR_REGS || class == CR0_REGS)
10383 && regno >= 0 && CR_REGNO_P (regno))
10384 return NO_REGS;
10385
10386 /* Otherwise, we need GENERAL_REGS. */
10387 return GENERAL_REGS;
10388}
10389\f
10390/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 10391 know this is a valid comparison.
9878760c
RK
10392
10393 SCC_P is 1 if this is for an scc. That means that %D will have been
10394 used instead of %C, so the bits will be in different places.
10395
b4ac57ab 10396 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
10397
10398int
a2369ed3 10399ccr_bit (rtx op, int scc_p)
9878760c
RK
10400{
10401 enum rtx_code code = GET_CODE (op);
10402 enum machine_mode cc_mode;
10403 int cc_regnum;
10404 int base_bit;
9ebbca7d 10405 rtx reg;
9878760c 10406
ec8e098d 10407 if (!COMPARISON_P (op))
9878760c
RK
10408 return -1;
10409
9ebbca7d
GK
10410 reg = XEXP (op, 0);
10411
37409796 10412 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
10413
10414 cc_mode = GET_MODE (reg);
10415 cc_regnum = REGNO (reg);
10416 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 10417
39a10a29 10418 validate_condition_mode (code, cc_mode);
c5defebb 10419
b7053a3f
GK
10420 /* When generating a sCOND operation, only positive conditions are
10421 allowed. */
37409796
NS
10422 gcc_assert (!scc_p
10423 || code == EQ || code == GT || code == LT || code == UNORDERED
10424 || code == GTU || code == LTU);
f676971a 10425
9878760c
RK
10426 switch (code)
10427 {
10428 case NE:
10429 return scc_p ? base_bit + 3 : base_bit + 2;
10430 case EQ:
10431 return base_bit + 2;
1c882ea4 10432 case GT: case GTU: case UNLE:
9878760c 10433 return base_bit + 1;
1c882ea4 10434 case LT: case LTU: case UNGE:
9878760c 10435 return base_bit;
1c882ea4
GK
10436 case ORDERED: case UNORDERED:
10437 return base_bit + 3;
9878760c
RK
10438
10439 case GE: case GEU:
39a10a29 10440 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
10441 unordered position. So test that bit. For integer, this is ! LT
10442 unless this is an scc insn. */
39a10a29 10443 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
10444
10445 case LE: case LEU:
39a10a29 10446 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 10447
9878760c 10448 default:
37409796 10449 gcc_unreachable ();
9878760c
RK
10450 }
10451}
1ff7789b 10452\f
8d30c4ee 10453/* Return the GOT register. */
1ff7789b 10454
9390387d 10455rtx
a2369ed3 10456rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 10457{
a4f6c312
SS
10458 /* The second flow pass currently (June 1999) can't update
10459 regs_ever_live without disturbing other parts of the compiler, so
10460 update it here to make the prolog/epilogue code happy. */
1db02437
FS
10461 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
10462 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
1ff7789b 10463
8d30c4ee 10464 current_function_uses_pic_offset_table = 1;
3cb999d8 10465
1ff7789b
MM
10466 return pic_offset_table_rtx;
10467}
a7df97e6 10468\f
e2500fed
GK
10469/* Function to init struct machine_function.
10470 This will be called, via a pointer variable,
10471 from push_function_context. */
a7df97e6 10472
e2500fed 10473static struct machine_function *
863d938c 10474rs6000_init_machine_status (void)
a7df97e6 10475{
e2500fed 10476 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 10477}
9878760c 10478\f
0ba1b2ff
AM
10479/* These macros test for integers and extract the low-order bits. */
10480#define INT_P(X) \
10481((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
10482 && GET_MODE (X) == VOIDmode)
10483
10484#define INT_LOWPART(X) \
10485 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10486
10487int
a2369ed3 10488extract_MB (rtx op)
0ba1b2ff
AM
10489{
10490 int i;
10491 unsigned long val = INT_LOWPART (op);
10492
10493 /* If the high bit is zero, the value is the first 1 bit we find
10494 from the left. */
10495 if ((val & 0x80000000) == 0)
10496 {
37409796 10497 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10498
10499 i = 1;
10500 while (((val <<= 1) & 0x80000000) == 0)
10501 ++i;
10502 return i;
10503 }
10504
10505 /* If the high bit is set and the low bit is not, or the mask is all
10506 1's, the value is zero. */
10507 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
10508 return 0;
10509
10510 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10511 from the right. */
10512 i = 31;
10513 while (((val >>= 1) & 1) != 0)
10514 --i;
10515
10516 return i;
10517}
10518
10519int
a2369ed3 10520extract_ME (rtx op)
0ba1b2ff
AM
10521{
10522 int i;
10523 unsigned long val = INT_LOWPART (op);
10524
10525 /* If the low bit is zero, the value is the first 1 bit we find from
10526 the right. */
10527 if ((val & 1) == 0)
10528 {
37409796 10529 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10530
10531 i = 30;
10532 while (((val >>= 1) & 1) == 0)
10533 --i;
10534
10535 return i;
10536 }
10537
10538 /* If the low bit is set and the high bit is not, or the mask is all
10539 1's, the value is 31. */
10540 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
10541 return 31;
10542
10543 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10544 from the left. */
10545 i = 0;
10546 while (((val <<= 1) & 0x80000000) != 0)
10547 ++i;
10548
10549 return i;
10550}
10551
c4501e62
JJ
10552/* Locate some local-dynamic symbol still in use by this function
10553 so that we can print its name in some tls_ld pattern. */
10554
10555static const char *
863d938c 10556rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
10557{
10558 rtx insn;
10559
10560 if (cfun->machine->some_ld_name)
10561 return cfun->machine->some_ld_name;
10562
10563 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10564 if (INSN_P (insn)
10565 && for_each_rtx (&PATTERN (insn),
10566 rs6000_get_some_local_dynamic_name_1, 0))
10567 return cfun->machine->some_ld_name;
10568
37409796 10569 gcc_unreachable ();
c4501e62
JJ
10570}
10571
10572/* Helper function for rs6000_get_some_local_dynamic_name. */
10573
10574static int
a2369ed3 10575rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
10576{
10577 rtx x = *px;
10578
10579 if (GET_CODE (x) == SYMBOL_REF)
10580 {
10581 const char *str = XSTR (x, 0);
10582 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10583 {
10584 cfun->machine->some_ld_name = str;
10585 return 1;
10586 }
10587 }
10588
10589 return 0;
10590}
10591
85b776df
AM
10592/* Write out a function code label. */
10593
10594void
10595rs6000_output_function_entry (FILE *file, const char *fname)
10596{
10597 if (fname[0] != '.')
10598 {
10599 switch (DEFAULT_ABI)
10600 {
10601 default:
37409796 10602 gcc_unreachable ();
85b776df
AM
10603
10604 case ABI_AIX:
10605 if (DOT_SYMBOLS)
10606 putc ('.', file);
10607 else
10608 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10609 break;
10610
10611 case ABI_V4:
10612 case ABI_DARWIN:
10613 break;
10614 }
10615 }
10616 if (TARGET_AIX)
10617 RS6000_OUTPUT_BASENAME (file, fname);
10618 else
10619 assemble_name (file, fname);
10620}
10621
9878760c
RK
10622/* Print an operand. Recognize special options, documented below. */
10623
38c1f2d7 10624#if TARGET_ELF
d9407988 10625#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 10626#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
10627#else
10628#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 10629#define SMALL_DATA_REG 0
ba5e43aa
MM
10630#endif
10631
9878760c 10632void
a2369ed3 10633print_operand (FILE *file, rtx x, int code)
9878760c
RK
10634{
10635 int i;
a260abc9 10636 HOST_WIDE_INT val;
0ba1b2ff 10637 unsigned HOST_WIDE_INT uval;
9878760c
RK
10638
10639 switch (code)
10640 {
a8b3aeda 10641 case '.':
a85d226b
RK
10642 /* Write out an instruction after the call which may be replaced
10643 with glue code by the loader. This depends on the AIX version. */
10644 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
10645 return;
10646
81eace42
GK
10647 /* %a is output_address. */
10648
9854d9ed
RK
10649 case 'A':
10650 /* If X is a constant integer whose low-order 5 bits are zero,
10651 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 10652 in the AIX assembler where "sri" with a zero shift count
20e26713 10653 writes a trash instruction. */
9854d9ed 10654 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 10655 putc ('l', file);
9854d9ed 10656 else
76229ac8 10657 putc ('r', file);
9854d9ed
RK
10658 return;
10659
10660 case 'b':
e2c953b6
DE
10661 /* If constant, low-order 16 bits of constant, unsigned.
10662 Otherwise, write normally. */
10663 if (INT_P (x))
10664 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10665 else
10666 print_operand (file, x, 0);
cad12a8d
RK
10667 return;
10668
a260abc9
DE
10669 case 'B':
10670 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10671 for 64-bit mask direction. */
9390387d 10672 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 10673 return;
a260abc9 10674
81eace42
GK
10675 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10676 output_operand. */
10677
423c1189
AH
10678 case 'c':
10679 /* X is a CR register. Print the number of the GT bit of the CR. */
10680 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10681 output_operand_lossage ("invalid %%E value");
10682 else
10683 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10684 return;
10685
10686 case 'D':
cef6b86c 10687 /* Like 'J' but get to the GT bit only. */
37409796 10688 gcc_assert (GET_CODE (x) == REG);
423c1189 10689
cef6b86c
EB
10690 /* Bit 1 is GT bit. */
10691 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 10692
cef6b86c
EB
10693 /* Add one for shift count in rlinm for scc. */
10694 fprintf (file, "%d", i + 1);
423c1189
AH
10695 return;
10696
9854d9ed 10697 case 'E':
39a10a29 10698 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
10699 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10700 output_operand_lossage ("invalid %%E value");
78fbdbf7 10701 else
39a10a29 10702 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 10703 return;
9854d9ed
RK
10704
10705 case 'f':
10706 /* X is a CR register. Print the shift count needed to move it
10707 to the high-order four bits. */
10708 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10709 output_operand_lossage ("invalid %%f value");
10710 else
9ebbca7d 10711 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10712 return;
10713
10714 case 'F':
10715 /* Similar, but print the count for the rotate in the opposite
10716 direction. */
10717 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10718 output_operand_lossage ("invalid %%F value");
10719 else
9ebbca7d 10720 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10721 return;
10722
10723 case 'G':
10724 /* X is a constant integer. If it is negative, print "m",
43aa4e05 10725 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
10726 if (GET_CODE (x) != CONST_INT)
10727 output_operand_lossage ("invalid %%G value");
10728 else if (INTVAL (x) >= 0)
76229ac8 10729 putc ('z', file);
9854d9ed 10730 else
76229ac8 10731 putc ('m', file);
9854d9ed 10732 return;
e2c953b6 10733
9878760c 10734 case 'h':
a4f6c312
SS
10735 /* If constant, output low-order five bits. Otherwise, write
10736 normally. */
9878760c 10737 if (INT_P (x))
5f59ecb7 10738 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
10739 else
10740 print_operand (file, x, 0);
10741 return;
10742
64305719 10743 case 'H':
a4f6c312
SS
10744 /* If constant, output low-order six bits. Otherwise, write
10745 normally. */
64305719 10746 if (INT_P (x))
5f59ecb7 10747 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
10748 else
10749 print_operand (file, x, 0);
10750 return;
10751
9854d9ed
RK
10752 case 'I':
10753 /* Print `i' if this is a constant, else nothing. */
9878760c 10754 if (INT_P (x))
76229ac8 10755 putc ('i', file);
9878760c
RK
10756 return;
10757
9854d9ed
RK
10758 case 'j':
10759 /* Write the bit number in CCR for jump. */
10760 i = ccr_bit (x, 0);
10761 if (i == -1)
10762 output_operand_lossage ("invalid %%j code");
9878760c 10763 else
9854d9ed 10764 fprintf (file, "%d", i);
9878760c
RK
10765 return;
10766
9854d9ed
RK
10767 case 'J':
10768 /* Similar, but add one for shift count in rlinm for scc and pass
10769 scc flag to `ccr_bit'. */
10770 i = ccr_bit (x, 1);
10771 if (i == -1)
10772 output_operand_lossage ("invalid %%J code");
10773 else
a0466a68
RK
10774 /* If we want bit 31, write a shift count of zero, not 32. */
10775 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
10776 return;
10777
9854d9ed
RK
10778 case 'k':
10779 /* X must be a constant. Write the 1's complement of the
10780 constant. */
9878760c 10781 if (! INT_P (x))
9854d9ed 10782 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
10783 else
10784 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
10785 return;
10786
81eace42 10787 case 'K':
9ebbca7d
GK
10788 /* X must be a symbolic constant on ELF. Write an
10789 expression suitable for an 'addi' that adds in the low 16
10790 bits of the MEM. */
10791 if (GET_CODE (x) != CONST)
10792 {
10793 print_operand_address (file, x);
10794 fputs ("@l", file);
10795 }
10796 else
10797 {
10798 if (GET_CODE (XEXP (x, 0)) != PLUS
10799 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10800 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10801 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 10802 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
10803 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10804 fputs ("@l", file);
ed8d2920
MM
10805 /* For GNU as, there must be a non-alphanumeric character
10806 between 'l' and the number. The '-' is added by
10807 print_operand() already. */
10808 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10809 fputs ("+", file);
9ebbca7d
GK
10810 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10811 }
81eace42
GK
10812 return;
10813
10814 /* %l is output_asm_label. */
9ebbca7d 10815
9854d9ed
RK
10816 case 'L':
10817 /* Write second word of DImode or DFmode reference. Works on register
10818 or non-indexed memory only. */
10819 if (GET_CODE (x) == REG)
fb5c67a7 10820 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
10821 else if (GET_CODE (x) == MEM)
10822 {
10823 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 10824 we have already done it, we can just use an offset of word. */
9854d9ed
RK
10825 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10826 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
10827 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10828 UNITS_PER_WORD));
9854d9ed 10829 else
d7624dc0
RK
10830 output_address (XEXP (adjust_address_nv (x, SImode,
10831 UNITS_PER_WORD),
10832 0));
ed8908e7 10833
ba5e43aa 10834 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10835 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10836 reg_names[SMALL_DATA_REG]);
9854d9ed 10837 }
9878760c 10838 return;
f676971a 10839
9878760c
RK
10840 case 'm':
10841 /* MB value for a mask operand. */
b1765bde 10842 if (! mask_operand (x, SImode))
9878760c
RK
10843 output_operand_lossage ("invalid %%m value");
10844
0ba1b2ff 10845 fprintf (file, "%d", extract_MB (x));
9878760c
RK
10846 return;
10847
10848 case 'M':
10849 /* ME value for a mask operand. */
b1765bde 10850 if (! mask_operand (x, SImode))
a260abc9 10851 output_operand_lossage ("invalid %%M value");
9878760c 10852
0ba1b2ff 10853 fprintf (file, "%d", extract_ME (x));
9878760c
RK
10854 return;
10855
81eace42
GK
10856 /* %n outputs the negative of its operand. */
10857
9878760c
RK
10858 case 'N':
10859 /* Write the number of elements in the vector times 4. */
10860 if (GET_CODE (x) != PARALLEL)
10861 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
10862 else
10863 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
10864 return;
10865
10866 case 'O':
10867 /* Similar, but subtract 1 first. */
10868 if (GET_CODE (x) != PARALLEL)
1427100a 10869 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
10870 else
10871 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
10872 return;
10873
9854d9ed
RK
10874 case 'p':
10875 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10876 if (! INT_P (x)
2bfcf297 10877 || INT_LOWPART (x) < 0
9854d9ed
RK
10878 || (i = exact_log2 (INT_LOWPART (x))) < 0)
10879 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
10880 else
10881 fprintf (file, "%d", i);
9854d9ed
RK
10882 return;
10883
9878760c
RK
10884 case 'P':
10885 /* The operand must be an indirect memory reference. The result
8bb418a3 10886 is the register name. */
9878760c
RK
10887 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
10888 || REGNO (XEXP (x, 0)) >= 32)
10889 output_operand_lossage ("invalid %%P value");
e2c953b6 10890 else
fb5c67a7 10891 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
10892 return;
10893
dfbdccdb
GK
10894 case 'q':
10895 /* This outputs the logical code corresponding to a boolean
10896 expression. The expression may have one or both operands
39a10a29 10897 negated (if one, only the first one). For condition register
c4ad648e
AM
10898 logical operations, it will also treat the negated
10899 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 10900 {
63bc1d05 10901 const char *const *t = 0;
dfbdccdb
GK
10902 const char *s;
10903 enum rtx_code code = GET_CODE (x);
10904 static const char * const tbl[3][3] = {
10905 { "and", "andc", "nor" },
10906 { "or", "orc", "nand" },
10907 { "xor", "eqv", "xor" } };
10908
10909 if (code == AND)
10910 t = tbl[0];
10911 else if (code == IOR)
10912 t = tbl[1];
10913 else if (code == XOR)
10914 t = tbl[2];
10915 else
10916 output_operand_lossage ("invalid %%q value");
10917
10918 if (GET_CODE (XEXP (x, 0)) != NOT)
10919 s = t[0];
10920 else
10921 {
10922 if (GET_CODE (XEXP (x, 1)) == NOT)
10923 s = t[2];
10924 else
10925 s = t[1];
10926 }
f676971a 10927
dfbdccdb
GK
10928 fputs (s, file);
10929 }
10930 return;
10931
2c4a9cff
DE
10932 case 'Q':
10933 if (TARGET_MFCRF)
3b6ce0af 10934 fputc (',', file);
5efb1046 10935 /* FALLTHRU */
2c4a9cff
DE
10936 else
10937 return;
10938
9854d9ed
RK
10939 case 'R':
10940 /* X is a CR register. Print the mask for `mtcrf'. */
10941 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10942 output_operand_lossage ("invalid %%R value");
10943 else
9ebbca7d 10944 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 10945 return;
9854d9ed
RK
10946
10947 case 's':
10948 /* Low 5 bits of 32 - value */
10949 if (! INT_P (x))
10950 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
10951 else
10952 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 10953 return;
9854d9ed 10954
a260abc9 10955 case 'S':
0ba1b2ff 10956 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
10957 CONST_INT 32-bit mask is considered sign-extended so any
10958 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 10959 if (! mask64_operand (x, DImode))
a260abc9
DE
10960 output_operand_lossage ("invalid %%S value");
10961
0ba1b2ff 10962 uval = INT_LOWPART (x);
a260abc9 10963
0ba1b2ff 10964 if (uval & 1) /* Clear Left */
a260abc9 10965 {
f099d360
GK
10966#if HOST_BITS_PER_WIDE_INT > 64
10967 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10968#endif
0ba1b2ff 10969 i = 64;
a260abc9 10970 }
0ba1b2ff 10971 else /* Clear Right */
a260abc9 10972 {
0ba1b2ff 10973 uval = ~uval;
f099d360
GK
10974#if HOST_BITS_PER_WIDE_INT > 64
10975 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10976#endif
0ba1b2ff 10977 i = 63;
a260abc9 10978 }
0ba1b2ff
AM
10979 while (uval != 0)
10980 --i, uval >>= 1;
37409796 10981 gcc_assert (i >= 0);
0ba1b2ff
AM
10982 fprintf (file, "%d", i);
10983 return;
a260abc9 10984
a3170dc6
AH
10985 case 't':
10986 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 10987 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
10988
10989 /* Bit 3 is OV bit. */
10990 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10991
10992 /* If we want bit 31, write a shift count of zero, not 32. */
10993 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10994 return;
10995
cccf3bdc
DE
10996 case 'T':
10997 /* Print the symbolic name of a branch target register. */
10998 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10999 && REGNO (x) != COUNT_REGISTER_REGNUM))
11000 output_operand_lossage ("invalid %%T value");
e2c953b6 11001 else if (REGNO (x) == LINK_REGISTER_REGNUM)
cccf3bdc
DE
11002 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11003 else
11004 fputs ("ctr", file);
11005 return;
11006
9854d9ed 11007 case 'u':
802a0058 11008 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11009 if (! INT_P (x))
11010 output_operand_lossage ("invalid %%u value");
e2c953b6 11011 else
f676971a 11012 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11013 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11014 return;
11015
802a0058
MM
11016 case 'v':
11017 /* High-order 16 bits of constant for use in signed operand. */
11018 if (! INT_P (x))
11019 output_operand_lossage ("invalid %%v value");
e2c953b6 11020 else
134c32f6
DE
11021 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11022 (INT_LOWPART (x) >> 16) & 0xffff);
11023 return;
802a0058 11024
9854d9ed
RK
11025 case 'U':
11026 /* Print `u' if this has an auto-increment or auto-decrement. */
11027 if (GET_CODE (x) == MEM
11028 && (GET_CODE (XEXP (x, 0)) == PRE_INC
11029 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
76229ac8 11030 putc ('u', file);
9854d9ed 11031 return;
9878760c 11032
e0cd0770
JC
11033 case 'V':
11034 /* Print the trap code for this operand. */
11035 switch (GET_CODE (x))
11036 {
11037 case EQ:
11038 fputs ("eq", file); /* 4 */
11039 break;
11040 case NE:
11041 fputs ("ne", file); /* 24 */
11042 break;
11043 case LT:
11044 fputs ("lt", file); /* 16 */
11045 break;
11046 case LE:
11047 fputs ("le", file); /* 20 */
11048 break;
11049 case GT:
11050 fputs ("gt", file); /* 8 */
11051 break;
11052 case GE:
11053 fputs ("ge", file); /* 12 */
11054 break;
11055 case LTU:
11056 fputs ("llt", file); /* 2 */
11057 break;
11058 case LEU:
11059 fputs ("lle", file); /* 6 */
11060 break;
11061 case GTU:
11062 fputs ("lgt", file); /* 1 */
11063 break;
11064 case GEU:
11065 fputs ("lge", file); /* 5 */
11066 break;
11067 default:
37409796 11068 gcc_unreachable ();
e0cd0770
JC
11069 }
11070 break;
11071
9854d9ed
RK
11072 case 'w':
11073 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11074 normally. */
11075 if (INT_P (x))
f676971a 11076 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11077 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11078 else
11079 print_operand (file, x, 0);
9878760c
RK
11080 return;
11081
9854d9ed 11082 case 'W':
e2c953b6 11083 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11084 val = (GET_CODE (x) == CONST_INT
11085 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11086
11087 if (val < 0)
11088 i = -1;
9854d9ed 11089 else
e2c953b6
DE
11090 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11091 if ((val <<= 1) < 0)
11092 break;
11093
11094#if HOST_BITS_PER_WIDE_INT == 32
11095 if (GET_CODE (x) == CONST_INT && i >= 0)
11096 i += 32; /* zero-extend high-part was all 0's */
11097 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11098 {
11099 val = CONST_DOUBLE_LOW (x);
11100
37409796
NS
11101 gcc_assert (val);
11102 if (val < 0)
e2c953b6
DE
11103 --i;
11104 else
11105 for ( ; i < 64; i++)
11106 if ((val <<= 1) < 0)
11107 break;
11108 }
11109#endif
11110
11111 fprintf (file, "%d", i + 1);
9854d9ed 11112 return;
9878760c 11113
9854d9ed
RK
11114 case 'X':
11115 if (GET_CODE (x) == MEM
4d588c14 11116 && legitimate_indexed_address_p (XEXP (x, 0), 0))
76229ac8 11117 putc ('x', file);
9854d9ed 11118 return;
9878760c 11119
9854d9ed
RK
11120 case 'Y':
11121 /* Like 'L', for third word of TImode */
11122 if (GET_CODE (x) == REG)
fb5c67a7 11123 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11124 else if (GET_CODE (x) == MEM)
9878760c 11125 {
9854d9ed
RK
11126 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11127 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11128 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11129 else
d7624dc0 11130 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11131 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11132 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11133 reg_names[SMALL_DATA_REG]);
9878760c
RK
11134 }
11135 return;
f676971a 11136
9878760c 11137 case 'z':
b4ac57ab
RS
11138 /* X is a SYMBOL_REF. Write out the name preceded by a
11139 period and without any trailing data in brackets. Used for function
4d30c363
MM
11140 names. If we are configured for System V (or the embedded ABI) on
11141 the PowerPC, do not emit the period, since those systems do not use
11142 TOCs and the like. */
37409796 11143 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11144
c4ad648e
AM
11145 /* Mark the decl as referenced so that cgraph will output the
11146 function. */
9bf6462a 11147 if (SYMBOL_REF_DECL (x))
c4ad648e 11148 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11149
85b776df 11150 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11151 if (TARGET_MACHO)
11152 {
11153 const char *name = XSTR (x, 0);
a031e781 11154#if TARGET_MACHO
3b48085e 11155 if (MACHOPIC_INDIRECT
11abc112
MM
11156 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11157 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11158#endif
11159 assemble_name (file, name);
11160 }
85b776df 11161 else if (!DOT_SYMBOLS)
9739c90c 11162 assemble_name (file, XSTR (x, 0));
85b776df
AM
11163 else
11164 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11165 return;
11166
9854d9ed
RK
11167 case 'Z':
11168 /* Like 'L', for last word of TImode. */
11169 if (GET_CODE (x) == REG)
fb5c67a7 11170 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11171 else if (GET_CODE (x) == MEM)
11172 {
11173 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11174 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11175 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11176 else
d7624dc0 11177 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11178 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11179 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11180 reg_names[SMALL_DATA_REG]);
9854d9ed 11181 }
5c23c401 11182 return;
0ac081f6 11183
a3170dc6 11184 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11185 case 'y':
11186 {
11187 rtx tmp;
11188
37409796 11189 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11190
11191 tmp = XEXP (x, 0);
11192
90d3ff1c 11193 /* Ugly hack because %y is overloaded. */
8ef65e3d 11194 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11195 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11196 || GET_MODE (x) == TFmode
11197 || GET_MODE (x) == TImode))
a3170dc6
AH
11198 {
11199 /* Handle [reg]. */
11200 if (GET_CODE (tmp) == REG)
11201 {
11202 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11203 break;
11204 }
11205 /* Handle [reg+UIMM]. */
11206 else if (GET_CODE (tmp) == PLUS &&
11207 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11208 {
11209 int x;
11210
37409796 11211 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11212
11213 x = INTVAL (XEXP (tmp, 1));
11214 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11215 break;
11216 }
11217
11218 /* Fall through. Must be [reg+reg]. */
11219 }
850e8d3d
DN
11220 if (TARGET_ALTIVEC
11221 && GET_CODE (tmp) == AND
11222 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11223 && INTVAL (XEXP (tmp, 1)) == -16)
11224 tmp = XEXP (tmp, 0);
0ac081f6 11225 if (GET_CODE (tmp) == REG)
c62f2db5 11226 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11227 else
0ac081f6 11228 {
37409796 11229 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11230 && REG_P (XEXP (tmp, 0))
11231 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11232
0ac081f6
AH
11233 if (REGNO (XEXP (tmp, 0)) == 0)
11234 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11235 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11236 else
11237 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11238 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11239 }
0ac081f6
AH
11240 break;
11241 }
f676971a 11242
9878760c
RK
11243 case 0:
11244 if (GET_CODE (x) == REG)
11245 fprintf (file, "%s", reg_names[REGNO (x)]);
11246 else if (GET_CODE (x) == MEM)
11247 {
11248 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11249 know the width from the mode. */
11250 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
11251 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
11252 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 11253 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
11254 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
11255 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 11256 else
a54d04b7 11257 output_address (XEXP (x, 0));
9878760c
RK
11258 }
11259 else
a54d04b7 11260 output_addr_const (file, x);
a85d226b 11261 return;
9878760c 11262
c4501e62
JJ
11263 case '&':
11264 assemble_name (file, rs6000_get_some_local_dynamic_name ());
11265 return;
11266
9878760c
RK
11267 default:
11268 output_operand_lossage ("invalid %%xn code");
11269 }
11270}
11271\f
11272/* Print the address of an operand. */
11273
11274void
a2369ed3 11275print_operand_address (FILE *file, rtx x)
9878760c
RK
11276{
11277 if (GET_CODE (x) == REG)
4697a36c 11278 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
11279 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
11280 || GET_CODE (x) == LABEL_REF)
9878760c
RK
11281 {
11282 output_addr_const (file, x);
ba5e43aa 11283 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11284 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11285 reg_names[SMALL_DATA_REG]);
37409796
NS
11286 else
11287 gcc_assert (!TARGET_TOC);
9878760c
RK
11288 }
11289 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
11290 {
9024f4b8 11291 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 11292 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
11293 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
11294 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 11295 else
4697a36c
MM
11296 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
11297 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
11298 }
11299 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
11300 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
11301 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
11302#if TARGET_ELF
11303 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11304 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
11305 {
11306 output_addr_const (file, XEXP (x, 1));
11307 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11308 }
c859cda6
DJ
11309#endif
11310#if TARGET_MACHO
11311 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11312 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
11313 {
11314 fprintf (file, "lo16(");
11315 output_addr_const (file, XEXP (x, 1));
11316 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11317 }
3cb999d8 11318#endif
4d588c14 11319 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 11320 {
2bfcf297 11321 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 11322 {
2bfcf297
DB
11323 rtx contains_minus = XEXP (x, 1);
11324 rtx minus, symref;
11325 const char *name;
f676971a 11326
9ebbca7d 11327 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 11328 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
11329 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11330 contains_minus = XEXP (contains_minus, 0);
11331
2bfcf297
DB
11332 minus = XEXP (contains_minus, 0);
11333 symref = XEXP (minus, 0);
11334 XEXP (contains_minus, 0) = symref;
11335 if (TARGET_ELF)
11336 {
11337 char *newname;
11338
11339 name = XSTR (symref, 0);
11340 newname = alloca (strlen (name) + sizeof ("@toc"));
11341 strcpy (newname, name);
11342 strcat (newname, "@toc");
11343 XSTR (symref, 0) = newname;
11344 }
11345 output_addr_const (file, XEXP (x, 1));
11346 if (TARGET_ELF)
11347 XSTR (symref, 0) = name;
9ebbca7d
GK
11348 XEXP (contains_minus, 0) = minus;
11349 }
11350 else
11351 output_addr_const (file, XEXP (x, 1));
11352
11353 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11354 }
9878760c 11355 else
37409796 11356 gcc_unreachable ();
9878760c
RK
11357}
11358\f
88cad84b 11359/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
11360 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11361 is defined. It also needs to handle DI-mode objects on 64-bit
11362 targets. */
11363
11364static bool
a2369ed3 11365rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 11366{
f4f4921e 11367#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 11368 /* Special handling for SI values. */
84dcde01 11369 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 11370 {
301d03af 11371 static int recurse = 0;
f676971a 11372
301d03af
RS
11373 /* For -mrelocatable, we mark all addresses that need to be fixed up
11374 in the .fixup section. */
11375 if (TARGET_RELOCATABLE
d6b5193b
RS
11376 && in_section != toc_section
11377 && in_section != text_section
4325ca90 11378 && !unlikely_text_section_p (in_section)
301d03af
RS
11379 && !recurse
11380 && GET_CODE (x) != CONST_INT
11381 && GET_CODE (x) != CONST_DOUBLE
11382 && CONSTANT_P (x))
11383 {
11384 char buf[256];
11385
11386 recurse = 1;
11387 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11388 fixuplabelno++;
11389 ASM_OUTPUT_LABEL (asm_out_file, buf);
11390 fprintf (asm_out_file, "\t.long\t(");
11391 output_addr_const (asm_out_file, x);
11392 fprintf (asm_out_file, ")@fixup\n");
11393 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11394 ASM_OUTPUT_ALIGN (asm_out_file, 2);
11395 fprintf (asm_out_file, "\t.long\t");
11396 assemble_name (asm_out_file, buf);
11397 fprintf (asm_out_file, "\n\t.previous\n");
11398 recurse = 0;
11399 return true;
11400 }
11401 /* Remove initial .'s to turn a -mcall-aixdesc function
11402 address into the address of the descriptor, not the function
11403 itself. */
11404 else if (GET_CODE (x) == SYMBOL_REF
11405 && XSTR (x, 0)[0] == '.'
11406 && DEFAULT_ABI == ABI_AIX)
11407 {
11408 const char *name = XSTR (x, 0);
11409 while (*name == '.')
11410 name++;
11411
11412 fprintf (asm_out_file, "\t.long\t%s\n", name);
11413 return true;
11414 }
11415 }
f4f4921e 11416#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
11417 return default_assemble_integer (x, size, aligned_p);
11418}
93638d7a
AM
11419
11420#ifdef HAVE_GAS_HIDDEN
11421/* Emit an assembler directive to set symbol visibility for DECL to
11422 VISIBILITY_TYPE. */
11423
5add3202 11424static void
a2369ed3 11425rs6000_assemble_visibility (tree decl, int vis)
93638d7a 11426{
93638d7a
AM
11427 /* Functions need to have their entry point symbol visibility set as
11428 well as their descriptor symbol visibility. */
85b776df
AM
11429 if (DEFAULT_ABI == ABI_AIX
11430 && DOT_SYMBOLS
11431 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 11432 {
25fdb4dc 11433 static const char * const visibility_types[] = {
c4ad648e 11434 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
11435 };
11436
11437 const char *name, *type;
93638d7a
AM
11438
11439 name = ((* targetm.strip_name_encoding)
11440 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 11441 type = visibility_types[vis];
93638d7a 11442
25fdb4dc
RH
11443 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11444 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 11445 }
25fdb4dc
RH
11446 else
11447 default_assemble_visibility (decl, vis);
93638d7a
AM
11448}
11449#endif
301d03af 11450\f
39a10a29 11451enum rtx_code
a2369ed3 11452rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
11453{
11454 /* Reversal of FP compares takes care -- an ordered compare
11455 becomes an unordered compare and vice versa. */
f676971a 11456 if (mode == CCFPmode
bc9ec0e0
GK
11457 && (!flag_finite_math_only
11458 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11459 || code == UNEQ || code == LTGT))
bab6226b 11460 return reverse_condition_maybe_unordered (code);
39a10a29 11461 else
bab6226b 11462 return reverse_condition (code);
39a10a29
GK
11463}
11464
39a10a29
GK
11465/* Generate a compare for CODE. Return a brand-new rtx that
11466 represents the result of the compare. */
a4f6c312 11467
39a10a29 11468static rtx
a2369ed3 11469rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
11470{
11471 enum machine_mode comp_mode;
11472 rtx compare_result;
11473
11474 if (rs6000_compare_fp_p)
11475 comp_mode = CCFPmode;
11476 else if (code == GTU || code == LTU
c4ad648e 11477 || code == GEU || code == LEU)
39a10a29 11478 comp_mode = CCUNSmode;
60934f9c
NS
11479 else if ((code == EQ || code == NE)
11480 && GET_CODE (rs6000_compare_op0) == SUBREG
11481 && GET_CODE (rs6000_compare_op1) == SUBREG
11482 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
11483 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
11484 /* These are unsigned values, perhaps there will be a later
11485 ordering compare that can be shared with this one.
11486 Unfortunately we cannot detect the signedness of the operands
11487 for non-subregs. */
11488 comp_mode = CCUNSmode;
39a10a29
GK
11489 else
11490 comp_mode = CCmode;
11491
11492 /* First, the compare. */
11493 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 11494
cef6b86c 11495 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 11496 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 11497 && rs6000_compare_fp_p)
a3170dc6 11498 {
64022b5d 11499 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
11500 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
11501
11502 if (op_mode == VOIDmode)
11503 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 11504
cef6b86c
EB
11505 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11506 This explains the following mess. */
423c1189 11507
a3170dc6
AH
11508 switch (code)
11509 {
423c1189 11510 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
11511 switch (op_mode)
11512 {
11513 case SFmode:
11514 cmp = flag_unsafe_math_optimizations
11515 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
11516 rs6000_compare_op1)
11517 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
11518 rs6000_compare_op1);
11519 break;
11520
11521 case DFmode:
11522 cmp = flag_unsafe_math_optimizations
11523 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
11524 rs6000_compare_op1)
11525 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
11526 rs6000_compare_op1);
11527 break;
11528
17caeff2
JM
11529 case TFmode:
11530 cmp = flag_unsafe_math_optimizations
11531 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
11532 rs6000_compare_op1)
11533 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
11534 rs6000_compare_op1);
11535 break;
11536
37409796
NS
11537 default:
11538 gcc_unreachable ();
11539 }
a3170dc6 11540 break;
bb8df8a6 11541
423c1189 11542 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
11543 switch (op_mode)
11544 {
11545 case SFmode:
11546 cmp = flag_unsafe_math_optimizations
11547 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
11548 rs6000_compare_op1)
11549 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
11550 rs6000_compare_op1);
11551 break;
bb8df8a6 11552
37409796
NS
11553 case DFmode:
11554 cmp = flag_unsafe_math_optimizations
11555 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
11556 rs6000_compare_op1)
11557 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
11558 rs6000_compare_op1);
11559 break;
11560
17caeff2
JM
11561 case TFmode:
11562 cmp = flag_unsafe_math_optimizations
11563 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
11564 rs6000_compare_op1)
11565 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
11566 rs6000_compare_op1);
11567 break;
11568
37409796
NS
11569 default:
11570 gcc_unreachable ();
11571 }
a3170dc6 11572 break;
bb8df8a6 11573
423c1189 11574 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
11575 switch (op_mode)
11576 {
11577 case SFmode:
11578 cmp = flag_unsafe_math_optimizations
11579 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11580 rs6000_compare_op1)
11581 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11582 rs6000_compare_op1);
11583 break;
bb8df8a6 11584
37409796
NS
11585 case DFmode:
11586 cmp = flag_unsafe_math_optimizations
11587 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11588 rs6000_compare_op1)
11589 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11590 rs6000_compare_op1);
11591 break;
11592
17caeff2
JM
11593 case TFmode:
11594 cmp = flag_unsafe_math_optimizations
11595 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
11596 rs6000_compare_op1)
11597 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
11598 rs6000_compare_op1);
11599 break;
11600
37409796
NS
11601 default:
11602 gcc_unreachable ();
11603 }
a3170dc6 11604 break;
4d4cbc0e 11605 default:
37409796 11606 gcc_unreachable ();
a3170dc6
AH
11607 }
11608
11609 /* Synthesize LE and GE from LT/GT || EQ. */
11610 if (code == LE || code == GE || code == LEU || code == GEU)
11611 {
a3170dc6
AH
11612 emit_insn (cmp);
11613
11614 switch (code)
11615 {
11616 case LE: code = LT; break;
11617 case GE: code = GT; break;
11618 case LEU: code = LT; break;
11619 case GEU: code = GT; break;
37409796 11620 default: gcc_unreachable ();
a3170dc6
AH
11621 }
11622
a3170dc6
AH
11623 compare_result2 = gen_reg_rtx (CCFPmode);
11624
11625 /* Do the EQ. */
37409796
NS
11626 switch (op_mode)
11627 {
11628 case SFmode:
11629 cmp = flag_unsafe_math_optimizations
11630 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11631 rs6000_compare_op1)
11632 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11633 rs6000_compare_op1);
11634 break;
11635
11636 case DFmode:
11637 cmp = flag_unsafe_math_optimizations
11638 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11639 rs6000_compare_op1)
11640 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11641 rs6000_compare_op1);
11642 break;
11643
17caeff2
JM
11644 case TFmode:
11645 cmp = flag_unsafe_math_optimizations
11646 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
11647 rs6000_compare_op1)
11648 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
11649 rs6000_compare_op1);
11650 break;
11651
37409796
NS
11652 default:
11653 gcc_unreachable ();
11654 }
a3170dc6
AH
11655 emit_insn (cmp);
11656
a3170dc6 11657 /* OR them together. */
64022b5d
AH
11658 or_result = gen_reg_rtx (CCFPmode);
11659 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11660 compare_result2);
a3170dc6
AH
11661 compare_result = or_result;
11662 code = EQ;
11663 }
11664 else
11665 {
a3170dc6 11666 if (code == NE || code == LTGT)
a3170dc6 11667 code = NE;
423c1189
AH
11668 else
11669 code = EQ;
a3170dc6
AH
11670 }
11671
11672 emit_insn (cmp);
11673 }
11674 else
de17c25f
DE
11675 {
11676 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11677 CLOBBERs to match cmptf_internal2 pattern. */
11678 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11679 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 11680 && !TARGET_IEEEQUAD
de17c25f
DE
11681 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11682 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11683 gen_rtvec (9,
11684 gen_rtx_SET (VOIDmode,
11685 compare_result,
11686 gen_rtx_COMPARE (comp_mode,
11687 rs6000_compare_op0,
11688 rs6000_compare_op1)),
11689 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11690 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11691 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11692 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11693 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11694 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11695 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11696 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
11697 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11698 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11699 {
11700 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11701 comp_mode = CCEQmode;
11702 compare_result = gen_reg_rtx (CCEQmode);
11703 if (TARGET_64BIT)
11704 emit_insn (gen_stack_protect_testdi (compare_result,
11705 rs6000_compare_op0, op1));
11706 else
11707 emit_insn (gen_stack_protect_testsi (compare_result,
11708 rs6000_compare_op0, op1));
11709 }
de17c25f
DE
11710 else
11711 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11712 gen_rtx_COMPARE (comp_mode,
11713 rs6000_compare_op0,
11714 rs6000_compare_op1)));
11715 }
f676971a 11716
ca5adc63 11717 /* Some kinds of FP comparisons need an OR operation;
e7108df9 11718 under flag_finite_math_only we don't bother. */
39a10a29 11719 if (rs6000_compare_fp_p
e7108df9 11720 && !flag_finite_math_only
8ef65e3d 11721 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
11722 && (code == LE || code == GE
11723 || code == UNEQ || code == LTGT
11724 || code == UNGT || code == UNLT))
11725 {
11726 enum rtx_code or1, or2;
11727 rtx or1_rtx, or2_rtx, compare2_rtx;
11728 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 11729
39a10a29
GK
11730 switch (code)
11731 {
11732 case LE: or1 = LT; or2 = EQ; break;
11733 case GE: or1 = GT; or2 = EQ; break;
11734 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11735 case LTGT: or1 = LT; or2 = GT; break;
11736 case UNGT: or1 = UNORDERED; or2 = GT; break;
11737 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 11738 default: gcc_unreachable ();
39a10a29
GK
11739 }
11740 validate_condition_mode (or1, comp_mode);
11741 validate_condition_mode (or2, comp_mode);
1c563bed
KH
11742 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11743 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
11744 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11745 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11746 const_true_rtx);
11747 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11748
11749 compare_result = or_result;
11750 code = EQ;
11751 }
11752
11753 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 11754
1c563bed 11755 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
11756}
11757
11758
11759/* Emit the RTL for an sCOND pattern. */
11760
11761void
a2369ed3 11762rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
11763{
11764 rtx condition_rtx;
11765 enum machine_mode op_mode;
b7053a3f 11766 enum rtx_code cond_code;
39a10a29
GK
11767
11768 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
11769 cond_code = GET_CODE (condition_rtx);
11770
8ef65e3d 11771 if (rs6000_compare_fp_p
423c1189
AH
11772 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11773 {
11774 rtx t;
11775
11776 PUT_MODE (condition_rtx, SImode);
11777 t = XEXP (condition_rtx, 0);
11778
37409796 11779 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
11780
11781 if (cond_code == NE)
64022b5d 11782 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 11783
64022b5d 11784 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
11785 return;
11786 }
11787
b7053a3f
GK
11788 if (cond_code == NE
11789 || cond_code == GE || cond_code == LE
11790 || cond_code == GEU || cond_code == LEU
11791 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11792 {
11793 rtx not_result = gen_reg_rtx (CCEQmode);
11794 rtx not_op, rev_cond_rtx;
11795 enum machine_mode cc_mode;
f676971a 11796
b7053a3f
GK
11797 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11798
1c563bed 11799 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 11800 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
11801 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11802 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11803 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11804 }
39a10a29
GK
11805
11806 op_mode = GET_MODE (rs6000_compare_op0);
11807 if (op_mode == VOIDmode)
11808 op_mode = GET_MODE (rs6000_compare_op1);
11809
11810 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11811 {
11812 PUT_MODE (condition_rtx, DImode);
11813 convert_move (result, condition_rtx, 0);
11814 }
11815 else
11816 {
11817 PUT_MODE (condition_rtx, SImode);
11818 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11819 }
11820}
11821
39a10a29
GK
11822/* Emit a branch of kind CODE to location LOC. */
11823
11824void
a2369ed3 11825rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
11826{
11827 rtx condition_rtx, loc_ref;
11828
11829 condition_rtx = rs6000_generate_compare (code);
11830 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11831 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11832 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11833 loc_ref, pc_rtx)));
11834}
11835
12a4e8c5
GK
11836/* Return the string to output a conditional branch to LABEL, which is
11837 the operand number of the label, or -1 if the branch is really a
f676971a 11838 conditional return.
12a4e8c5
GK
11839
11840 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11841 condition code register and its mode specifies what kind of
11842 comparison we made.
11843
a0ab749a 11844 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
11845
11846 INSN is the insn. */
11847
11848char *
a2369ed3 11849output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
11850{
11851 static char string[64];
11852 enum rtx_code code = GET_CODE (op);
11853 rtx cc_reg = XEXP (op, 0);
11854 enum machine_mode mode = GET_MODE (cc_reg);
11855 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 11856 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
11857 int really_reversed = reversed ^ need_longbranch;
11858 char *s = string;
11859 const char *ccode;
11860 const char *pred;
11861 rtx note;
11862
39a10a29
GK
11863 validate_condition_mode (code, mode);
11864
11865 /* Work out which way this really branches. We could use
11866 reverse_condition_maybe_unordered here always but this
11867 makes the resulting assembler clearer. */
12a4e8c5 11868 if (really_reversed)
de40e1df
DJ
11869 {
11870 /* Reversal of FP compares takes care -- an ordered compare
11871 becomes an unordered compare and vice versa. */
11872 if (mode == CCFPmode)
11873 code = reverse_condition_maybe_unordered (code);
11874 else
11875 code = reverse_condition (code);
11876 }
12a4e8c5 11877
8ef65e3d 11878 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
11879 {
11880 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11881 to the GT bit. */
37409796
NS
11882 switch (code)
11883 {
11884 case EQ:
11885 /* Opposite of GT. */
11886 code = GT;
11887 break;
11888
11889 case NE:
11890 code = UNLE;
11891 break;
11892
11893 default:
11894 gcc_unreachable ();
11895 }
a3170dc6
AH
11896 }
11897
39a10a29 11898 switch (code)
12a4e8c5
GK
11899 {
11900 /* Not all of these are actually distinct opcodes, but
11901 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
11902 case NE: case LTGT:
11903 ccode = "ne"; break;
11904 case EQ: case UNEQ:
11905 ccode = "eq"; break;
f676971a 11906 case GE: case GEU:
50a0b056 11907 ccode = "ge"; break;
f676971a 11908 case GT: case GTU: case UNGT:
50a0b056 11909 ccode = "gt"; break;
f676971a 11910 case LE: case LEU:
50a0b056 11911 ccode = "le"; break;
f676971a 11912 case LT: case LTU: case UNLT:
50a0b056 11913 ccode = "lt"; break;
12a4e8c5
GK
11914 case UNORDERED: ccode = "un"; break;
11915 case ORDERED: ccode = "nu"; break;
11916 case UNGE: ccode = "nl"; break;
11917 case UNLE: ccode = "ng"; break;
11918 default:
37409796 11919 gcc_unreachable ();
12a4e8c5 11920 }
f676971a
EC
11921
11922 /* Maybe we have a guess as to how likely the branch is.
94a54f47 11923 The old mnemonics don't have a way to specify this information. */
f4857b9b 11924 pred = "";
12a4e8c5
GK
11925 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
11926 if (note != NULL_RTX)
11927 {
11928 /* PROB is the difference from 50%. */
11929 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
11930
11931 /* Only hint for highly probable/improbable branches on newer
11932 cpus as static prediction overrides processor dynamic
11933 prediction. For older cpus we may as well always hint, but
11934 assume not taken for branches that are very close to 50% as a
11935 mispredicted taken branch is more expensive than a
f676971a 11936 mispredicted not-taken branch. */
ec507f2d 11937 if (rs6000_always_hint
2c9e13f3
JH
11938 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
11939 && br_prob_note_reliable_p (note)))
f4857b9b
AM
11940 {
11941 if (abs (prob) > REG_BR_PROB_BASE / 20
11942 && ((prob > 0) ^ need_longbranch))
c4ad648e 11943 pred = "+";
f4857b9b
AM
11944 else
11945 pred = "-";
11946 }
12a4e8c5 11947 }
12a4e8c5
GK
11948
11949 if (label == NULL)
94a54f47 11950 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 11951 else
94a54f47 11952 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 11953
37c67319 11954 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 11955 Assume they'd only be the first character.... */
37c67319
GK
11956 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
11957 *s++ = '%';
94a54f47 11958 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
11959
11960 if (label != NULL)
11961 {
11962 /* If the branch distance was too far, we may have to use an
11963 unconditional branch to go the distance. */
11964 if (need_longbranch)
44518ddd 11965 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
11966 else
11967 s += sprintf (s, ",%s", label);
11968 }
11969
11970 return string;
11971}
50a0b056 11972
64022b5d 11973/* Return the string to flip the GT bit on a CR. */
423c1189 11974char *
64022b5d 11975output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
11976{
11977 static char string[64];
11978 int a, b;
11979
37409796
NS
11980 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
11981 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 11982
64022b5d
AH
11983 /* GT bit. */
11984 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
11985 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
11986
11987 sprintf (string, "crnot %d,%d", a, b);
11988 return string;
11989}
11990
21213b4c
DP
11991/* Return insn index for the vector compare instruction for given CODE,
11992 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11993 not available. */
11994
11995static int
94ff898d 11996get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
11997 enum machine_mode dest_mode,
11998 enum machine_mode op_mode)
11999{
12000 if (!TARGET_ALTIVEC)
12001 return INSN_NOT_AVAILABLE;
12002
12003 switch (code)
12004 {
12005 case EQ:
12006 if (dest_mode == V16QImode && op_mode == V16QImode)
12007 return UNSPEC_VCMPEQUB;
12008 if (dest_mode == V8HImode && op_mode == V8HImode)
12009 return UNSPEC_VCMPEQUH;
12010 if (dest_mode == V4SImode && op_mode == V4SImode)
12011 return UNSPEC_VCMPEQUW;
12012 if (dest_mode == V4SImode && op_mode == V4SFmode)
12013 return UNSPEC_VCMPEQFP;
12014 break;
12015 case GE:
12016 if (dest_mode == V4SImode && op_mode == V4SFmode)
12017 return UNSPEC_VCMPGEFP;
12018 case GT:
12019 if (dest_mode == V16QImode && op_mode == V16QImode)
12020 return UNSPEC_VCMPGTSB;
12021 if (dest_mode == V8HImode && op_mode == V8HImode)
12022 return UNSPEC_VCMPGTSH;
12023 if (dest_mode == V4SImode && op_mode == V4SImode)
12024 return UNSPEC_VCMPGTSW;
12025 if (dest_mode == V4SImode && op_mode == V4SFmode)
12026 return UNSPEC_VCMPGTFP;
12027 break;
12028 case GTU:
12029 if (dest_mode == V16QImode && op_mode == V16QImode)
12030 return UNSPEC_VCMPGTUB;
12031 if (dest_mode == V8HImode && op_mode == V8HImode)
12032 return UNSPEC_VCMPGTUH;
12033 if (dest_mode == V4SImode && op_mode == V4SImode)
12034 return UNSPEC_VCMPGTUW;
12035 break;
12036 default:
12037 break;
12038 }
12039 return INSN_NOT_AVAILABLE;
12040}
12041
12042/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12043 DMODE is expected destination mode. This is a recursive function. */
12044
12045static rtx
12046rs6000_emit_vector_compare (enum rtx_code rcode,
12047 rtx op0, rtx op1,
12048 enum machine_mode dmode)
12049{
12050 int vec_cmp_insn;
12051 rtx mask;
12052 enum machine_mode dest_mode;
12053 enum machine_mode op_mode = GET_MODE (op1);
12054
37409796
NS
12055 gcc_assert (TARGET_ALTIVEC);
12056 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12057
12058 /* Floating point vector compare instructions uses destination V4SImode.
12059 Move destination to appropriate mode later. */
12060 if (dmode == V4SFmode)
12061 dest_mode = V4SImode;
12062 else
12063 dest_mode = dmode;
12064
12065 mask = gen_reg_rtx (dest_mode);
12066 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12067
12068 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12069 {
12070 bool swap_operands = false;
12071 bool try_again = false;
12072 switch (rcode)
12073 {
12074 case LT:
12075 rcode = GT;
12076 swap_operands = true;
12077 try_again = true;
12078 break;
12079 case LTU:
12080 rcode = GTU;
12081 swap_operands = true;
12082 try_again = true;
12083 break;
12084 case NE:
370df7db
JC
12085 case UNLE:
12086 case UNLT:
12087 case UNGE:
12088 case UNGT:
12089 /* Invert condition and try again.
12090 e.g., A != B becomes ~(A==B). */
21213b4c 12091 {
370df7db 12092 enum rtx_code rev_code;
21213b4c 12093 enum insn_code nor_code;
d1123cde 12094 rtx eq_rtx;
370df7db
JC
12095
12096 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12097 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12098 dest_mode);
94ff898d 12099
21213b4c 12100 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
37409796 12101 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12102 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12103
12104 if (dmode != dest_mode)
12105 {
12106 rtx temp = gen_reg_rtx (dest_mode);
12107 convert_move (temp, mask, 0);
12108 return temp;
12109 }
12110 return mask;
12111 }
12112 break;
12113 case GE:
12114 case GEU:
12115 case LE:
12116 case LEU:
12117 /* Try GT/GTU/LT/LTU OR EQ */
12118 {
12119 rtx c_rtx, eq_rtx;
12120 enum insn_code ior_code;
12121 enum rtx_code new_code;
12122
37409796
NS
12123 switch (rcode)
12124 {
12125 case GE:
12126 new_code = GT;
12127 break;
12128
12129 case GEU:
12130 new_code = GTU;
12131 break;
12132
12133 case LE:
12134 new_code = LT;
12135 break;
12136
12137 case LEU:
12138 new_code = LTU;
12139 break;
12140
12141 default:
12142 gcc_unreachable ();
12143 }
21213b4c
DP
12144
12145 c_rtx = rs6000_emit_vector_compare (new_code,
12146 op0, op1, dest_mode);
12147 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12148 dest_mode);
12149
12150 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
37409796 12151 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12152 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12153 if (dmode != dest_mode)
12154 {
12155 rtx temp = gen_reg_rtx (dest_mode);
12156 convert_move (temp, mask, 0);
12157 return temp;
12158 }
12159 return mask;
12160 }
12161 break;
12162 default:
37409796 12163 gcc_unreachable ();
21213b4c
DP
12164 }
12165
12166 if (try_again)
12167 {
12168 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12169 /* You only get two chances. */
12170 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12171 }
12172
12173 if (swap_operands)
12174 {
12175 rtx tmp;
12176 tmp = op0;
12177 op0 = op1;
12178 op1 = tmp;
12179 }
12180 }
12181
915167f5
GK
12182 emit_insn (gen_rtx_SET (VOIDmode, mask,
12183 gen_rtx_UNSPEC (dest_mode,
12184 gen_rtvec (2, op0, op1),
12185 vec_cmp_insn)));
21213b4c
DP
12186 if (dmode != dest_mode)
12187 {
12188 rtx temp = gen_reg_rtx (dest_mode);
12189 convert_move (temp, mask, 0);
12190 return temp;
12191 }
12192 return mask;
12193}
12194
12195/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12196 valid insn doesn exist for given mode. */
12197
12198static int
12199get_vsel_insn (enum machine_mode mode)
12200{
12201 switch (mode)
12202 {
12203 case V4SImode:
12204 return UNSPEC_VSEL4SI;
12205 break;
12206 case V4SFmode:
12207 return UNSPEC_VSEL4SF;
12208 break;
12209 case V8HImode:
12210 return UNSPEC_VSEL8HI;
12211 break;
12212 case V16QImode:
12213 return UNSPEC_VSEL16QI;
12214 break;
12215 default:
12216 return INSN_NOT_AVAILABLE;
12217 break;
12218 }
12219 return INSN_NOT_AVAILABLE;
12220}
12221
12222/* Emit vector select insn where DEST is destination using
12223 operands OP1, OP2 and MASK. */
12224
12225static void
12226rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12227{
12228 rtx t, temp;
12229 enum machine_mode dest_mode = GET_MODE (dest);
12230 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12231
12232 temp = gen_reg_rtx (dest_mode);
94ff898d 12233
bb8df8a6 12234 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12235 select op2. */
915167f5
GK
12236 t = gen_rtx_SET (VOIDmode, temp,
12237 gen_rtx_UNSPEC (dest_mode,
12238 gen_rtvec (3, op2, op1, mask),
12239 vsel_insn_index));
21213b4c
DP
12240 emit_insn (t);
12241 emit_move_insn (dest, temp);
12242 return;
12243}
12244
94ff898d 12245/* Emit vector conditional expression.
21213b4c
DP
12246 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12247 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12248
12249int
12250rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
12251 rtx cond, rtx cc_op0, rtx cc_op1)
12252{
12253 enum machine_mode dest_mode = GET_MODE (dest);
12254 enum rtx_code rcode = GET_CODE (cond);
12255 rtx mask;
12256
12257 if (!TARGET_ALTIVEC)
12258 return 0;
12259
12260 /* Get the vector mask for the given relational operations. */
12261 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
12262
12263 rs6000_emit_vector_select (dest, op1, op2, mask);
12264
12265 return 1;
12266}
12267
50a0b056
GK
12268/* Emit a conditional move: move TRUE_COND to DEST if OP of the
12269 operands of the last comparison is nonzero/true, FALSE_COND if it
12270 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 12271
50a0b056 12272int
a2369ed3 12273rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
12274{
12275 enum rtx_code code = GET_CODE (op);
12276 rtx op0 = rs6000_compare_op0;
12277 rtx op1 = rs6000_compare_op1;
12278 REAL_VALUE_TYPE c1;
3148ad6d
DJ
12279 enum machine_mode compare_mode = GET_MODE (op0);
12280 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 12281 rtx temp;
add2402e 12282 bool is_against_zero;
50a0b056 12283
a3c9585f 12284 /* These modes should always match. */
a3170dc6
AH
12285 if (GET_MODE (op1) != compare_mode
12286 /* In the isel case however, we can use a compare immediate, so
12287 op1 may be a small constant. */
12288 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 12289 return 0;
178c3eff 12290 if (GET_MODE (true_cond) != result_mode)
3148ad6d 12291 return 0;
178c3eff 12292 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
12293 return 0;
12294
50a0b056 12295 /* First, work out if the hardware can do this at all, or
a3c9585f 12296 if it's too slow.... */
50a0b056 12297 if (! rs6000_compare_fp_p)
a3170dc6
AH
12298 {
12299 if (TARGET_ISEL)
12300 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
12301 return 0;
12302 }
8ef65e3d 12303 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 12304 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 12305 return 0;
50a0b056 12306
add2402e 12307 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 12308
add2402e
GK
12309 /* A floating-point subtract might overflow, underflow, or produce
12310 an inexact result, thus changing the floating-point flags, so it
12311 can't be generated if we care about that. It's safe if one side
12312 of the construct is zero, since then no subtract will be
12313 generated. */
ebb109ad 12314 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
12315 && flag_trapping_math && ! is_against_zero)
12316 return 0;
12317
50a0b056
GK
12318 /* Eliminate half of the comparisons by switching operands, this
12319 makes the remaining code simpler. */
12320 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 12321 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
12322 {
12323 code = reverse_condition_maybe_unordered (code);
12324 temp = true_cond;
12325 true_cond = false_cond;
12326 false_cond = temp;
12327 }
12328
12329 /* UNEQ and LTGT take four instructions for a comparison with zero,
12330 it'll probably be faster to use a branch here too. */
bc9ec0e0 12331 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 12332 return 0;
f676971a 12333
50a0b056
GK
12334 if (GET_CODE (op1) == CONST_DOUBLE)
12335 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 12336
b6d08ca1 12337 /* We're going to try to implement comparisons by performing
50a0b056
GK
12338 a subtract, then comparing against zero. Unfortunately,
12339 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 12340 know that the operand is finite and the comparison
50a0b056 12341 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 12342 if (HONOR_INFINITIES (compare_mode)
50a0b056 12343 && code != GT && code != UNGE
045572c7 12344 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
12345 /* Constructs of the form (a OP b ? a : b) are safe. */
12346 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 12347 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
12348 && ! rtx_equal_p (op1, true_cond))))
12349 return 0;
add2402e 12350
50a0b056
GK
12351 /* At this point we know we can use fsel. */
12352
12353 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
12354 if (! is_against_zero)
12355 {
12356 temp = gen_reg_rtx (compare_mode);
12357 emit_insn (gen_rtx_SET (VOIDmode, temp,
12358 gen_rtx_MINUS (compare_mode, op0, op1)));
12359 op0 = temp;
12360 op1 = CONST0_RTX (compare_mode);
12361 }
50a0b056
GK
12362
12363 /* If we don't care about NaNs we can reduce some of the comparisons
12364 down to faster ones. */
bc9ec0e0 12365 if (! HONOR_NANS (compare_mode))
50a0b056
GK
12366 switch (code)
12367 {
12368 case GT:
12369 code = LE;
12370 temp = true_cond;
12371 true_cond = false_cond;
12372 false_cond = temp;
12373 break;
12374 case UNGE:
12375 code = GE;
12376 break;
12377 case UNEQ:
12378 code = EQ;
12379 break;
12380 default:
12381 break;
12382 }
12383
12384 /* Now, reduce everything down to a GE. */
12385 switch (code)
12386 {
12387 case GE:
12388 break;
12389
12390 case LE:
3148ad6d
DJ
12391 temp = gen_reg_rtx (compare_mode);
12392 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12393 op0 = temp;
12394 break;
12395
12396 case ORDERED:
3148ad6d
DJ
12397 temp = gen_reg_rtx (compare_mode);
12398 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
12399 op0 = temp;
12400 break;
12401
12402 case EQ:
3148ad6d 12403 temp = gen_reg_rtx (compare_mode);
f676971a 12404 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
12405 gen_rtx_NEG (compare_mode,
12406 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
12407 op0 = temp;
12408 break;
12409
12410 case UNGE:
bc9ec0e0 12411 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 12412 temp = gen_reg_rtx (result_mode);
50a0b056 12413 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 12414 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12415 gen_rtx_GE (VOIDmode,
12416 op0, op1),
12417 true_cond, false_cond)));
bc9ec0e0
GK
12418 false_cond = true_cond;
12419 true_cond = temp;
50a0b056 12420
3148ad6d
DJ
12421 temp = gen_reg_rtx (compare_mode);
12422 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12423 op0 = temp;
12424 break;
12425
12426 case GT:
bc9ec0e0 12427 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 12428 temp = gen_reg_rtx (result_mode);
50a0b056 12429 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 12430 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12431 gen_rtx_GE (VOIDmode,
12432 op0, op1),
12433 true_cond, false_cond)));
bc9ec0e0
GK
12434 true_cond = false_cond;
12435 false_cond = temp;
50a0b056 12436
3148ad6d
DJ
12437 temp = gen_reg_rtx (compare_mode);
12438 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12439 op0 = temp;
12440 break;
12441
12442 default:
37409796 12443 gcc_unreachable ();
50a0b056
GK
12444 }
12445
12446 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 12447 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12448 gen_rtx_GE (VOIDmode,
12449 op0, op1),
12450 true_cond, false_cond)));
12451 return 1;
12452}
12453
a3170dc6
AH
12454/* Same as above, but for ints (isel). */
12455
12456static int
a2369ed3 12457rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
12458{
12459 rtx condition_rtx, cr;
12460
12461 /* All isel implementations thus far are 32-bits. */
12462 if (GET_MODE (rs6000_compare_op0) != SImode)
12463 return 0;
12464
12465 /* We still have to do the compare, because isel doesn't do a
12466 compare, it just looks at the CRx bits set by a previous compare
12467 instruction. */
12468 condition_rtx = rs6000_generate_compare (GET_CODE (op));
12469 cr = XEXP (condition_rtx, 0);
12470
12471 if (GET_MODE (cr) == CCmode)
12472 emit_insn (gen_isel_signed (dest, condition_rtx,
12473 true_cond, false_cond, cr));
12474 else
12475 emit_insn (gen_isel_unsigned (dest, condition_rtx,
12476 true_cond, false_cond, cr));
12477
12478 return 1;
12479}
12480
12481const char *
a2369ed3 12482output_isel (rtx *operands)
a3170dc6
AH
12483{
12484 enum rtx_code code;
12485
12486 code = GET_CODE (operands[1]);
12487 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
12488 {
12489 PUT_CODE (operands[1], reverse_condition (code));
12490 return "isel %0,%3,%2,%j1";
12491 }
12492 else
12493 return "isel %0,%2,%3,%j1";
12494}
12495
50a0b056 12496void
a2369ed3 12497rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
12498{
12499 enum machine_mode mode = GET_MODE (op0);
5dc8d536 12500 enum rtx_code c;
50a0b056 12501 rtx target;
5dc8d536
AH
12502
12503 if (code == SMAX || code == SMIN)
12504 c = GE;
12505 else
12506 c = GEU;
12507
50a0b056 12508 if (code == SMAX || code == UMAX)
f676971a 12509 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
12510 op0, op1, mode, 0);
12511 else
f676971a 12512 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 12513 op1, op0, mode, 0);
37409796 12514 gcc_assert (target);
50a0b056
GK
12515 if (target != dest)
12516 emit_move_insn (dest, target);
12517}
46c07df8 12518
915167f5
GK
12519/* Emit instructions to perform a load-reserved/store-conditional operation.
12520 The operation performed is an atomic
12521 (set M (CODE:MODE M OP))
12522 If not NULL, BEFORE is atomically set to M before the operation, and
12523 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 12524 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
12525 Either OP or M may be wrapped in a NOT operation. */
12526
12527void
12528rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
12529 rtx m, rtx op, rtx before_param, rtx after_param,
12530 bool sync_p)
12531{
12532 enum machine_mode used_mode;
12533 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
12534 rtx used_m;
12535 rtvec vec;
12536 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12537 rtx shift = NULL_RTX;
bb8df8a6 12538
915167f5
GK
12539 if (sync_p)
12540 emit_insn (gen_memory_barrier ());
bb8df8a6 12541
915167f5
GK
12542 if (GET_CODE (m) == NOT)
12543 used_m = XEXP (m, 0);
12544 else
12545 used_m = m;
12546
12547 /* If this is smaller than SImode, we'll have to use SImode with
12548 adjustments. */
12549 if (mode == QImode || mode == HImode)
12550 {
12551 rtx newop, oldop;
12552
12553 if (MEM_ALIGN (used_m) >= 32)
12554 {
12555 int ishift = 0;
12556 if (BYTES_BIG_ENDIAN)
12557 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 12558
915167f5
GK
12559 shift = GEN_INT (ishift);
12560 }
12561 else
12562 {
12563 rtx addrSI, aligned_addr;
a9c9d3fa 12564 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 12565
915167f5
GK
12566 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
12567 XEXP (used_m, 0)));
12568 shift = gen_reg_rtx (SImode);
12569
12570 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
12571 GEN_INT (shift_mask)));
12572 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
12573
12574 aligned_addr = expand_binop (Pmode, and_optab,
12575 XEXP (used_m, 0),
12576 GEN_INT (-4), NULL_RTX,
12577 1, OPTAB_LIB_WIDEN);
12578 used_m = change_address (used_m, SImode, aligned_addr);
12579 set_mem_align (used_m, 32);
12580 /* It's safe to keep the old alias set of USED_M, because
12581 the operation is atomic and only affects the original
12582 USED_M. */
12583 if (GET_CODE (m) == NOT)
12584 m = gen_rtx_NOT (SImode, used_m);
12585 else
12586 m = used_m;
12587 }
12588
12589 if (GET_CODE (op) == NOT)
12590 {
12591 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
12592 oldop = gen_rtx_NOT (SImode, oldop);
12593 }
12594 else
12595 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 12596
915167f5
GK
12597 switch (code)
12598 {
12599 case IOR:
12600 case XOR:
12601 newop = expand_binop (SImode, and_optab,
12602 oldop, GEN_INT (imask), NULL_RTX,
12603 1, OPTAB_LIB_WIDEN);
12604 emit_insn (gen_ashlsi3 (newop, newop, shift));
12605 break;
12606
12607 case AND:
12608 newop = expand_binop (SImode, ior_optab,
12609 oldop, GEN_INT (~imask), NULL_RTX,
12610 1, OPTAB_LIB_WIDEN);
a9c9d3fa 12611 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
12612 break;
12613
12614 case PLUS:
9f0076e5 12615 case MINUS:
915167f5
GK
12616 {
12617 rtx mask;
bb8df8a6 12618
915167f5
GK
12619 newop = expand_binop (SImode, and_optab,
12620 oldop, GEN_INT (imask), NULL_RTX,
12621 1, OPTAB_LIB_WIDEN);
12622 emit_insn (gen_ashlsi3 (newop, newop, shift));
12623
12624 mask = gen_reg_rtx (SImode);
12625 emit_move_insn (mask, GEN_INT (imask));
12626 emit_insn (gen_ashlsi3 (mask, mask, shift));
12627
9f0076e5
DE
12628 if (code == PLUS)
12629 newop = gen_rtx_PLUS (SImode, m, newop);
12630 else
12631 newop = gen_rtx_MINUS (SImode, m, newop);
12632 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
12633 newop = gen_rtx_IOR (SImode, newop,
12634 gen_rtx_AND (SImode,
12635 gen_rtx_NOT (SImode, mask),
12636 m));
12637 break;
12638 }
12639
12640 default:
12641 gcc_unreachable ();
12642 }
12643
a9c9d3fa
GK
12644 if (GET_CODE (m) == NOT)
12645 {
12646 rtx mask, xorm;
12647
12648 mask = gen_reg_rtx (SImode);
12649 emit_move_insn (mask, GEN_INT (imask));
12650 emit_insn (gen_ashlsi3 (mask, mask, shift));
12651
12652 xorm = gen_rtx_XOR (SImode, used_m, mask);
12653 /* Depending on the value of 'op', the XOR or the operation might
12654 be able to be simplified away. */
12655 newop = simplify_gen_binary (code, SImode, xorm, newop);
12656 }
915167f5
GK
12657 op = newop;
12658 used_mode = SImode;
12659 before = gen_reg_rtx (used_mode);
12660 after = gen_reg_rtx (used_mode);
12661 }
12662 else
12663 {
12664 used_mode = mode;
12665 before = before_param;
12666 after = after_param;
12667
12668 if (before == NULL_RTX)
12669 before = gen_reg_rtx (used_mode);
12670 if (after == NULL_RTX)
12671 after = gen_reg_rtx (used_mode);
12672 }
bb8df8a6 12673
9f0076e5
DE
12674 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12675 && used_mode != mode)
915167f5
GK
12676 the_op = op; /* Computed above. */
12677 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12678 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12679 else
12680 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12681
12682 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12683 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12684 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
12685 gen_rtx_UNSPEC (used_mode,
12686 gen_rtvec (1, the_op),
12687 UNSPEC_SYNC_OP));
915167f5
GK
12688 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12689
9f0076e5 12690 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
12691 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12692 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12693 else
12694 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12695 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12696
12697 /* Shift and mask the return values properly. */
12698 if (used_mode != mode && before_param)
12699 {
12700 emit_insn (gen_lshrsi3 (before, before, shift));
12701 convert_move (before_param, before, 1);
12702 }
12703
12704 if (used_mode != mode && after_param)
12705 {
12706 emit_insn (gen_lshrsi3 (after, after, shift));
12707 convert_move (after_param, after, 1);
12708 }
12709
12710 /* The previous sequence will end with a branch that's dependent on
12711 the conditional store, so placing an isync will ensure that no
12712 other instructions (especially, no load or store instructions)
12713 can start before the atomic operation completes. */
12714 if (sync_p)
12715 emit_insn (gen_isync ());
12716}
12717
b52110d4
DE
12718/* A subroutine of the atomic operation splitters. Jump to LABEL if
12719 COND is true. Mark the jump as unlikely to be taken. */
12720
12721static void
12722emit_unlikely_jump (rtx cond, rtx label)
12723{
12724 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12725 rtx x;
12726
12727 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12728 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12729 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12730}
12731
12732/* A subroutine of the atomic operation splitters. Emit a load-locked
12733 instruction in MODE. */
12734
12735static void
12736emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12737{
12738 rtx (*fn) (rtx, rtx) = NULL;
12739 if (mode == SImode)
12740 fn = gen_load_locked_si;
12741 else if (mode == DImode)
12742 fn = gen_load_locked_di;
12743 emit_insn (fn (reg, mem));
12744}
12745
12746/* A subroutine of the atomic operation splitters. Emit a store-conditional
12747 instruction in MODE. */
12748
12749static void
12750emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12751{
12752 rtx (*fn) (rtx, rtx, rtx) = NULL;
12753 if (mode == SImode)
12754 fn = gen_store_conditional_si;
12755 else if (mode == DImode)
12756 fn = gen_store_conditional_di;
12757
9f0076e5 12758 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
12759 if (PPC405_ERRATUM77)
12760 emit_insn (gen_memory_barrier ());
12761
12762 emit_insn (fn (res, mem, val));
12763}
12764
ea2c620c 12765/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 12766 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
12767 operand of the binary operator. BEFORE and AFTER are optional locations to
12768 return the value of MEM either before of after the operation. SCRATCH is
12769 a scratch register. */
12770
12771void
12772rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12773 rtx before, rtx after, rtx scratch)
12774{
12775 enum machine_mode mode = GET_MODE (mem);
12776 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12777
12778 emit_insn (gen_memory_barrier ());
12779
12780 label = gen_label_rtx ();
12781 emit_label (label);
12782 label = gen_rtx_LABEL_REF (VOIDmode, label);
12783
12784 if (before == NULL_RTX)
12785 before = scratch;
12786 emit_load_locked (mode, before, mem);
12787
12788 if (code == NOT)
12789 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12790 else if (code == AND)
12791 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12792 else
12793 x = gen_rtx_fmt_ee (code, mode, before, val);
12794
12795 if (after != NULL_RTX)
12796 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12797 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12798
12799 emit_store_conditional (mode, cond, mem, scratch);
12800
12801 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12802 emit_unlikely_jump (x, label);
12803
12804 emit_insn (gen_isync ());
12805}
12806
b52110d4
DE
12807/* Expand an atomic compare and swap operation. MEM is the memory on which
12808 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12809 value to be stored. SCRATCH is a scratch GPR. */
12810
12811void
12812rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12813 rtx scratch)
12814{
12815 enum machine_mode mode = GET_MODE (mem);
12816 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12817
12818 emit_insn (gen_memory_barrier ());
12819
12820 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12821 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12822 emit_label (XEXP (label1, 0));
12823
12824 emit_load_locked (mode, retval, mem);
12825
12826 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12827 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12828
12829 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12830 emit_unlikely_jump (x, label2);
12831
12832 emit_move_insn (scratch, newval);
12833 emit_store_conditional (mode, cond, mem, scratch);
12834
12835 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12836 emit_unlikely_jump (x, label1);
12837
12838 emit_insn (gen_isync ());
12839 emit_label (XEXP (label2, 0));
12840}
12841
12842/* Expand an atomic test and set operation. MEM is the memory on which
12843 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12844
12845void
12846rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
12847{
12848 enum machine_mode mode = GET_MODE (mem);
12849 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12850
12851 emit_insn (gen_memory_barrier ());
12852
12853 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12854 emit_label (XEXP (label, 0));
12855
12856 emit_load_locked (mode, retval, mem);
12857 emit_move_insn (scratch, val);
12858 emit_store_conditional (mode, cond, mem, scratch);
12859
12860 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12861 emit_unlikely_jump (x, label);
12862
12863 emit_insn (gen_isync ());
12864}
12865
9fc75b97
DE
12866void
12867rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
12868{
12869 enum machine_mode mode = GET_MODE (mem);
12870 rtx addrSI, align, wdst, shift, mask;
12871 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
12872 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12873
12874 /* Shift amount for subword relative to aligned word. */
12875 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
12876 shift = gen_reg_rtx (SImode);
12877 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
12878 GEN_INT (shift_mask)));
12879 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
12880
12881 /* Shift and mask old value into position within word. */
12882 oldval = convert_modes (SImode, mode, oldval, 1);
12883 oldval = expand_binop (SImode, and_optab,
12884 oldval, GEN_INT (imask), NULL_RTX,
12885 1, OPTAB_LIB_WIDEN);
12886 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
12887
12888 /* Shift and mask new value into position within word. */
12889 newval = convert_modes (SImode, mode, newval, 1);
12890 newval = expand_binop (SImode, and_optab,
12891 newval, GEN_INT (imask), NULL_RTX,
12892 1, OPTAB_LIB_WIDEN);
12893 emit_insn (gen_ashlsi3 (newval, newval, shift));
12894
12895 /* Mask for insertion. */
12896 mask = gen_reg_rtx (SImode);
12897 emit_move_insn (mask, GEN_INT (imask));
12898 emit_insn (gen_ashlsi3 (mask, mask, shift));
12899
12900 /* Address of aligned word containing subword. */
12901 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
12902 NULL_RTX, 1, OPTAB_LIB_WIDEN);
12903 mem = change_address (mem, SImode, align);
12904 set_mem_align (mem, 32);
12905 MEM_VOLATILE_P (mem) = 1;
12906
12907 wdst = gen_reg_rtx (SImode);
12908 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
12909 oldval, newval, mem));
12910
12911 emit_move_insn (dst, gen_lowpart (mode, wdst));
12912}
12913
12914void
12915rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
12916 rtx oldval, rtx newval, rtx mem,
12917 rtx scratch)
12918{
12919 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12920
12921 emit_insn (gen_memory_barrier ());
12922 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12923 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12924 emit_label (XEXP (label1, 0));
12925
12926 emit_load_locked (SImode, scratch, mem);
12927
12928 /* Mask subword within loaded value for comparison with oldval.
12929 Use UNSPEC_AND to avoid clobber.*/
12930 emit_insn (gen_rtx_SET (SImode, dest,
12931 gen_rtx_UNSPEC (SImode,
12932 gen_rtvec (2, scratch, mask),
12933 UNSPEC_AND)));
12934
12935 x = gen_rtx_COMPARE (CCmode, dest, oldval);
12936 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12937
12938 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12939 emit_unlikely_jump (x, label2);
12940
12941 /* Clear subword within loaded value for insertion of new value. */
12942 emit_insn (gen_rtx_SET (SImode, scratch,
12943 gen_rtx_AND (SImode,
12944 gen_rtx_NOT (SImode, mask), scratch)));
12945 emit_insn (gen_iorsi3 (scratch, scratch, newval));
12946 emit_store_conditional (SImode, cond, mem, scratch);
12947
12948 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12949 emit_unlikely_jump (x, label1);
12950
12951 emit_insn (gen_isync ());
12952 emit_label (XEXP (label2, 0));
12953}
12954
12955
b52110d4 12956 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
12957 multi-register moves. It will emit at most one instruction for
12958 each register that is accessed; that is, it won't emit li/lis pairs
12959 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12960 register. */
46c07df8 12961
46c07df8 12962void
a9baceb1 12963rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 12964{
a9baceb1
GK
12965 /* The register number of the first register being moved. */
12966 int reg;
12967 /* The mode that is to be moved. */
12968 enum machine_mode mode;
12969 /* The mode that the move is being done in, and its size. */
12970 enum machine_mode reg_mode;
12971 int reg_mode_size;
12972 /* The number of registers that will be moved. */
12973 int nregs;
12974
12975 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
12976 mode = GET_MODE (dst);
c8b622ff 12977 nregs = hard_regno_nregs[reg][mode];
a9baceb1 12978 if (FP_REGNO_P (reg))
7393f7f8 12979 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
12980 else if (ALTIVEC_REGNO_P (reg))
12981 reg_mode = V16QImode;
8521c414
JM
12982 else if (TARGET_E500_DOUBLE && mode == TFmode)
12983 reg_mode = DFmode;
a9baceb1
GK
12984 else
12985 reg_mode = word_mode;
12986 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 12987
37409796 12988 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 12989
a9baceb1
GK
12990 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
12991 {
12992 /* Move register range backwards, if we might have destructive
12993 overlap. */
12994 int i;
12995 for (i = nregs - 1; i >= 0; i--)
f676971a 12996 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
12997 simplify_gen_subreg (reg_mode, dst, mode,
12998 i * reg_mode_size),
12999 simplify_gen_subreg (reg_mode, src, mode,
13000 i * reg_mode_size)));
13001 }
46c07df8
HP
13002 else
13003 {
a9baceb1
GK
13004 int i;
13005 int j = -1;
13006 bool used_update = false;
46c07df8 13007
c1e55850 13008 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13009 {
13010 rtx breg;
3a1f863f 13011
a9baceb1
GK
13012 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13013 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13014 {
13015 rtx delta_rtx;
a9baceb1 13016 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13017 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13018 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13019 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13020 emit_insn (TARGET_32BIT
13021 ? gen_addsi3 (breg, breg, delta_rtx)
13022 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13023 src = replace_equiv_address (src, breg);
3a1f863f 13024 }
d04b6e6e 13025 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13026 {
13e2e16e 13027 rtx basereg;
c1e55850
GK
13028 basereg = gen_rtx_REG (Pmode, reg);
13029 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13030 src = replace_equiv_address (src, basereg);
c1e55850 13031 }
3a1f863f 13032
0423421f
AM
13033 breg = XEXP (src, 0);
13034 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13035 breg = XEXP (breg, 0);
13036
13037 /* If the base register we are using to address memory is
13038 also a destination reg, then change that register last. */
13039 if (REG_P (breg)
13040 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13041 && REGNO (breg) < REGNO (dst) + nregs)
13042 j = REGNO (breg) - REGNO (dst);
c4ad648e 13043 }
46c07df8 13044
a9baceb1 13045 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13046 {
13047 rtx breg;
13048
a9baceb1
GK
13049 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13050 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13051 {
13052 rtx delta_rtx;
a9baceb1 13053 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13054 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13055 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13056 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13057
13058 /* We have to update the breg before doing the store.
13059 Use store with update, if available. */
13060
13061 if (TARGET_UPDATE)
13062 {
a9baceb1 13063 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13064 emit_insn (TARGET_32BIT
13065 ? (TARGET_POWERPC64
13066 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13067 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13068 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13069 used_update = true;
3a1f863f
DE
13070 }
13071 else
a9baceb1
GK
13072 emit_insn (TARGET_32BIT
13073 ? gen_addsi3 (breg, breg, delta_rtx)
13074 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13075 dst = replace_equiv_address (dst, breg);
3a1f863f 13076 }
37409796 13077 else
d04b6e6e 13078 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13079 }
13080
46c07df8 13081 for (i = 0; i < nregs; i++)
f676971a 13082 {
3a1f863f
DE
13083 /* Calculate index to next subword. */
13084 ++j;
f676971a 13085 if (j == nregs)
3a1f863f 13086 j = 0;
46c07df8 13087
112cdef5 13088 /* If compiler already emitted move of first word by
a9baceb1 13089 store with update, no need to do anything. */
3a1f863f 13090 if (j == 0 && used_update)
a9baceb1 13091 continue;
f676971a 13092
a9baceb1
GK
13093 emit_insn (gen_rtx_SET (VOIDmode,
13094 simplify_gen_subreg (reg_mode, dst, mode,
13095 j * reg_mode_size),
13096 simplify_gen_subreg (reg_mode, src, mode,
13097 j * reg_mode_size)));
3a1f863f 13098 }
46c07df8
HP
13099 }
13100}
13101
12a4e8c5 13102\f
a4f6c312
SS
13103/* This page contains routines that are used to determine what the
13104 function prologue and epilogue code will do and write them out. */
9878760c 13105
a4f6c312
SS
13106/* Return the first fixed-point register that is required to be
13107 saved. 32 if none. */
9878760c
RK
13108
13109int
863d938c 13110first_reg_to_save (void)
9878760c
RK
13111{
13112 int first_reg;
13113
13114 /* Find lowest numbered live register. */
13115 for (first_reg = 13; first_reg <= 31; first_reg++)
f676971a 13116 if (regs_ever_live[first_reg]
a38d360d 13117 && (! call_used_regs[first_reg]
1db02437 13118 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13119 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13120 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13121 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13122 break;
13123
ee890fe2 13124#if TARGET_MACHO
93638d7a
AM
13125 if (flag_pic
13126 && current_function_uses_pic_offset_table
13127 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13128 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13129#endif
13130
9878760c
RK
13131 return first_reg;
13132}
13133
13134/* Similar, for FP regs. */
13135
13136int
863d938c 13137first_fp_reg_to_save (void)
9878760c
RK
13138{
13139 int first_reg;
13140
13141 /* Find lowest numbered live register. */
13142 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
13143 if (regs_ever_live[first_reg])
13144 break;
13145
13146 return first_reg;
13147}
00b960c7
AH
13148
13149/* Similar, for AltiVec regs. */
13150
13151static int
863d938c 13152first_altivec_reg_to_save (void)
00b960c7
AH
13153{
13154 int i;
13155
13156 /* Stack frame remains as is unless we are in AltiVec ABI. */
13157 if (! TARGET_ALTIVEC_ABI)
13158 return LAST_ALTIVEC_REGNO + 1;
13159
22fa69da 13160 /* On Darwin, the unwind routines are compiled without
982afe02 13161 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13162 altivec registers when necessary. */
13163 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13164 && ! TARGET_ALTIVEC)
13165 return FIRST_ALTIVEC_REGNO + 20;
13166
00b960c7
AH
13167 /* Find lowest numbered live register. */
13168 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
13169 if (regs_ever_live[i])
13170 break;
13171
13172 return i;
13173}
13174
13175/* Return a 32-bit mask of the AltiVec registers we need to set in
13176 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13177 the 32-bit word is 0. */
13178
13179static unsigned int
863d938c 13180compute_vrsave_mask (void)
00b960c7
AH
13181{
13182 unsigned int i, mask = 0;
13183
22fa69da 13184 /* On Darwin, the unwind routines are compiled without
982afe02 13185 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13186 call-saved altivec registers when necessary. */
13187 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13188 && ! TARGET_ALTIVEC)
13189 mask |= 0xFFF;
13190
00b960c7
AH
13191 /* First, find out if we use _any_ altivec registers. */
13192 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
13193 if (regs_ever_live[i])
13194 mask |= ALTIVEC_REG_BIT (i);
13195
13196 if (mask == 0)
13197 return mask;
13198
00b960c7
AH
13199 /* Next, remove the argument registers from the set. These must
13200 be in the VRSAVE mask set by the caller, so we don't need to add
13201 them in again. More importantly, the mask we compute here is
13202 used to generate CLOBBERs in the set_vrsave insn, and we do not
13203 wish the argument registers to die. */
a6cf80f2 13204 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13205 mask &= ~ALTIVEC_REG_BIT (i);
13206
13207 /* Similarly, remove the return value from the set. */
13208 {
13209 bool yes = false;
13210 diddle_return_value (is_altivec_return_reg, &yes);
13211 if (yes)
13212 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13213 }
13214
13215 return mask;
13216}
13217
d62294f5 13218/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13219 size of prologues/epilogues by calling our own save/restore-the-world
13220 routines. */
d62294f5
FJ
13221
13222static void
f57fe068
AM
13223compute_save_world_info (rs6000_stack_t *info_ptr)
13224{
13225 info_ptr->world_save_p = 1;
13226 info_ptr->world_save_p
13227 = (WORLD_SAVE_P (info_ptr)
13228 && DEFAULT_ABI == ABI_DARWIN
13229 && ! (current_function_calls_setjmp && flag_exceptions)
13230 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13231 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13232 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13233 && info_ptr->cr_save_p);
f676971a 13234
d62294f5
FJ
13235 /* This will not work in conjunction with sibcalls. Make sure there
13236 are none. (This check is expensive, but seldom executed.) */
f57fe068 13237 if (WORLD_SAVE_P (info_ptr))
f676971a 13238 {
d62294f5
FJ
13239 rtx insn;
13240 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
13241 if ( GET_CODE (insn) == CALL_INSN
13242 && SIBLING_CALL_P (insn))
13243 {
13244 info_ptr->world_save_p = 0;
13245 break;
13246 }
d62294f5 13247 }
f676971a 13248
f57fe068 13249 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
13250 {
13251 /* Even if we're not touching VRsave, make sure there's room on the
13252 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 13253 will attempt to save it. */
d62294f5
FJ
13254 info_ptr->vrsave_size = 4;
13255
13256 /* "Save" the VRsave register too if we're saving the world. */
13257 if (info_ptr->vrsave_mask == 0)
c4ad648e 13258 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
13259
13260 /* Because the Darwin register save/restore routines only handle
c4ad648e 13261 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 13262 check. */
37409796
NS
13263 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
13264 && (info_ptr->first_altivec_reg_save
13265 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 13266 }
f676971a 13267 return;
d62294f5
FJ
13268}
13269
13270
00b960c7 13271static void
a2369ed3 13272is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
13273{
13274 bool *yes = (bool *) xyes;
13275 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
13276 *yes = true;
13277}
13278
4697a36c
MM
13279\f
13280/* Calculate the stack information for the current function. This is
13281 complicated by having two separate calling sequences, the AIX calling
13282 sequence and the V.4 calling sequence.
13283
592696dd 13284 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 13285 32-bit 64-bit
4697a36c 13286 SP----> +---------------------------------------+
a260abc9 13287 | back chain to caller | 0 0
4697a36c 13288 +---------------------------------------+
a260abc9 13289 | saved CR | 4 8 (8-11)
4697a36c 13290 +---------------------------------------+
a260abc9 13291 | saved LR | 8 16
4697a36c 13292 +---------------------------------------+
a260abc9 13293 | reserved for compilers | 12 24
4697a36c 13294 +---------------------------------------+
a260abc9 13295 | reserved for binders | 16 32
4697a36c 13296 +---------------------------------------+
a260abc9 13297 | saved TOC pointer | 20 40
4697a36c 13298 +---------------------------------------+
a260abc9 13299 | Parameter save area (P) | 24 48
4697a36c 13300 +---------------------------------------+
a260abc9 13301 | Alloca space (A) | 24+P etc.
802a0058 13302 +---------------------------------------+
a7df97e6 13303 | Local variable space (L) | 24+P+A
4697a36c 13304 +---------------------------------------+
a7df97e6 13305 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 13306 +---------------------------------------+
00b960c7
AH
13307 | Save area for AltiVec registers (W) | 24+P+A+L+X
13308 +---------------------------------------+
13309 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
13310 +---------------------------------------+
13311 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 13312 +---------------------------------------+
00b960c7
AH
13313 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
13314 +---------------------------------------+
13315 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
13316 +---------------------------------------+
13317 old SP->| back chain to caller's caller |
13318 +---------------------------------------+
13319
5376a30c
KR
13320 The required alignment for AIX configurations is two words (i.e., 8
13321 or 16 bytes).
13322
13323
4697a36c
MM
13324 V.4 stack frames look like:
13325
13326 SP----> +---------------------------------------+
13327 | back chain to caller | 0
13328 +---------------------------------------+
5eb387b8 13329 | caller's saved LR | 4
4697a36c
MM
13330 +---------------------------------------+
13331 | Parameter save area (P) | 8
13332 +---------------------------------------+
a7df97e6 13333 | Alloca space (A) | 8+P
f676971a 13334 +---------------------------------------+
a7df97e6 13335 | Varargs save area (V) | 8+P+A
f676971a 13336 +---------------------------------------+
a7df97e6 13337 | Local variable space (L) | 8+P+A+V
f676971a 13338 +---------------------------------------+
a7df97e6 13339 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 13340 +---------------------------------------+
00b960c7
AH
13341 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
13342 +---------------------------------------+
13343 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
13344 +---------------------------------------+
13345 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
13346 +---------------------------------------+
c4ad648e
AM
13347 | SPE: area for 64-bit GP registers |
13348 +---------------------------------------+
13349 | SPE alignment padding |
13350 +---------------------------------------+
00b960c7 13351 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 13352 +---------------------------------------+
00b960c7 13353 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 13354 +---------------------------------------+
00b960c7 13355 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
13356 +---------------------------------------+
13357 old SP->| back chain to caller's caller |
13358 +---------------------------------------+
b6c9286a 13359
5376a30c
KR
13360 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13361 given. (But note below and in sysv4.h that we require only 8 and
13362 may round up the size of our stack frame anyways. The historical
13363 reason is early versions of powerpc-linux which didn't properly
13364 align the stack at program startup. A happy side-effect is that
13365 -mno-eabi libraries can be used with -meabi programs.)
13366
50d440bc 13367 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
13368 the stack alignment requirements may differ. If -mno-eabi is not
13369 given, the required stack alignment is 8 bytes; if -mno-eabi is
13370 given, the required alignment is 16 bytes. (But see V.4 comment
13371 above.) */
4697a36c 13372
61b2fbe7
MM
13373#ifndef ABI_STACK_BOUNDARY
13374#define ABI_STACK_BOUNDARY STACK_BOUNDARY
13375#endif
13376
d1d0c603 13377static rs6000_stack_t *
863d938c 13378rs6000_stack_info (void)
4697a36c 13379{
022123e6 13380 static rs6000_stack_t info;
4697a36c 13381 rs6000_stack_t *info_ptr = &info;
327e5343 13382 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 13383 int ehrd_size;
64045029 13384 int save_align;
44688022 13385 HOST_WIDE_INT non_fixed_size;
4697a36c 13386
022123e6 13387 memset (&info, 0, sizeof (info));
4697a36c 13388
c19de7aa
AH
13389 if (TARGET_SPE)
13390 {
13391 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 13392 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
13393 cfun->machine->insn_chain_scanned_p
13394 = spe_func_has_64bit_regs_p () + 1;
13395 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
13396 }
13397
a4f6c312 13398 /* Select which calling sequence. */
178274da 13399 info_ptr->abi = DEFAULT_ABI;
9878760c 13400
a4f6c312 13401 /* Calculate which registers need to be saved & save area size. */
4697a36c 13402 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 13403 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
906fb125 13404 even if it currently looks like we won't. */
2bfcf297 13405 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
13406 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
13407 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
1db02437
FS
13408 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
13409 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
906fb125
GK
13410 else
13411 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
4697a36c 13412
a3170dc6
AH
13413 /* For the SPE, we have an additional upper 32-bits on each GPR.
13414 Ideally we should save the entire 64-bits only when the upper
13415 half is used in SIMD instructions. Since we only record
13416 registers live (not the size they are used in), this proves
13417 difficult because we'd have to traverse the instruction chain at
13418 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
13419 so we opt to save the GPRs in 64-bits always if but one register
13420 gets used in 64-bits. Otherwise, all the registers in the frame
13421 get saved in 32-bits.
a3170dc6 13422
c19de7aa 13423 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 13424 traditional GP save area will be empty. */
c19de7aa 13425 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13426 info_ptr->gp_size = 0;
13427
4697a36c
MM
13428 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13429 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13430
00b960c7
AH
13431 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13432 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13433 - info_ptr->first_altivec_reg_save);
13434
592696dd 13435 /* Does this function call anything? */
71f123ca
FS
13436 info_ptr->calls_p = (! current_function_is_leaf
13437 || cfun->machine->ra_needs_full_frame);
b6c9286a 13438
a4f6c312 13439 /* Determine if we need to save the link register. */
022123e6
AM
13440 if ((DEFAULT_ABI == ABI_AIX
13441 && current_function_profile
13442 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
13443#ifdef TARGET_RELOCATABLE
13444 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13445#endif
13446 || (info_ptr->first_fp_reg_save != 64
13447 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 13448 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 13449 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
13450 || info_ptr->calls_p
13451 || rs6000_ra_ever_killed ())
4697a36c
MM
13452 {
13453 info_ptr->lr_save_p = 1;
9ebbca7d 13454 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
4697a36c
MM
13455 }
13456
9ebbca7d 13457 /* Determine if we need to save the condition code registers. */
f676971a 13458 if (regs_ever_live[CR2_REGNO]
9ebbca7d
GK
13459 || regs_ever_live[CR3_REGNO]
13460 || regs_ever_live[CR4_REGNO])
4697a36c
MM
13461 {
13462 info_ptr->cr_save_p = 1;
178274da 13463 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
13464 info_ptr->cr_size = reg_size;
13465 }
13466
83720594
RH
13467 /* If the current function calls __builtin_eh_return, then we need
13468 to allocate stack space for registers that will hold data for
13469 the exception handler. */
13470 if (current_function_calls_eh_return)
13471 {
13472 unsigned int i;
13473 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
13474 continue;
a3170dc6
AH
13475
13476 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
13477 ehrd_size = i * (TARGET_SPE_ABI
13478 && info_ptr->spe_64bit_regs_used != 0
13479 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
13480 }
13481 else
13482 ehrd_size = 0;
13483
592696dd 13484 /* Determine various sizes. */
4697a36c
MM
13485 info_ptr->reg_size = reg_size;
13486 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 13487 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 13488 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 13489 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
13490 if (FRAME_GROWS_DOWNWARD)
13491 info_ptr->vars_size
5b667039
JJ
13492 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
13493 + info_ptr->parm_size,
7d5175e1 13494 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
13495 - (info_ptr->fixed_size + info_ptr->vars_size
13496 + info_ptr->parm_size);
00b960c7 13497
c19de7aa 13498 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13499 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
13500 else
13501 info_ptr->spe_gp_size = 0;
13502
4d774ff8
HP
13503 if (TARGET_ALTIVEC_ABI)
13504 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 13505 else
4d774ff8
HP
13506 info_ptr->vrsave_mask = 0;
13507
13508 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
13509 info_ptr->vrsave_size = 4;
13510 else
13511 info_ptr->vrsave_size = 0;
b6c9286a 13512
d62294f5
FJ
13513 compute_save_world_info (info_ptr);
13514
592696dd 13515 /* Calculate the offsets. */
178274da 13516 switch (DEFAULT_ABI)
4697a36c 13517 {
b6c9286a 13518 case ABI_NONE:
24d304eb 13519 default:
37409796 13520 gcc_unreachable ();
b6c9286a
MM
13521
13522 case ABI_AIX:
ee890fe2 13523 case ABI_DARWIN:
b6c9286a
MM
13524 info_ptr->fp_save_offset = - info_ptr->fp_size;
13525 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
13526
13527 if (TARGET_ALTIVEC_ABI)
13528 {
13529 info_ptr->vrsave_save_offset
13530 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
13531
982afe02 13532 /* Align stack so vector save area is on a quadword boundary.
9278121c 13533 The padding goes above the vectors. */
00b960c7
AH
13534 if (info_ptr->altivec_size != 0)
13535 info_ptr->altivec_padding_size
9278121c 13536 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
13537 else
13538 info_ptr->altivec_padding_size = 0;
13539
13540 info_ptr->altivec_save_offset
13541 = info_ptr->vrsave_save_offset
13542 - info_ptr->altivec_padding_size
13543 - info_ptr->altivec_size;
9278121c
GK
13544 gcc_assert (info_ptr->altivec_size == 0
13545 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
13546
13547 /* Adjust for AltiVec case. */
13548 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
13549 }
13550 else
13551 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
13552 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
13553 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
13554 break;
13555
13556 case ABI_V4:
b6c9286a
MM
13557 info_ptr->fp_save_offset = - info_ptr->fp_size;
13558 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 13559 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 13560
c19de7aa 13561 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
13562 {
13563 /* Align stack so SPE GPR save area is aligned on a
13564 double-word boundary. */
13565 if (info_ptr->spe_gp_size != 0)
13566 info_ptr->spe_padding_size
13567 = 8 - (-info_ptr->cr_save_offset % 8);
13568 else
13569 info_ptr->spe_padding_size = 0;
13570
13571 info_ptr->spe_gp_save_offset
13572 = info_ptr->cr_save_offset
13573 - info_ptr->spe_padding_size
13574 - info_ptr->spe_gp_size;
13575
13576 /* Adjust for SPE case. */
022123e6 13577 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 13578 }
a3170dc6 13579 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
13580 {
13581 info_ptr->vrsave_save_offset
13582 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
13583
13584 /* Align stack so vector save area is on a quadword boundary. */
13585 if (info_ptr->altivec_size != 0)
13586 info_ptr->altivec_padding_size
13587 = 16 - (-info_ptr->vrsave_save_offset % 16);
13588 else
13589 info_ptr->altivec_padding_size = 0;
13590
13591 info_ptr->altivec_save_offset
13592 = info_ptr->vrsave_save_offset
13593 - info_ptr->altivec_padding_size
13594 - info_ptr->altivec_size;
13595
13596 /* Adjust for AltiVec case. */
022123e6 13597 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
13598 }
13599 else
022123e6
AM
13600 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
13601 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
13602 info_ptr->lr_save_offset = reg_size;
13603 break;
4697a36c
MM
13604 }
13605
64045029 13606 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
13607 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
13608 + info_ptr->gp_size
13609 + info_ptr->altivec_size
13610 + info_ptr->altivec_padding_size
a3170dc6
AH
13611 + info_ptr->spe_gp_size
13612 + info_ptr->spe_padding_size
00b960c7
AH
13613 + ehrd_size
13614 + info_ptr->cr_size
022123e6 13615 + info_ptr->vrsave_size,
64045029 13616 save_align);
00b960c7 13617
44688022 13618 non_fixed_size = (info_ptr->vars_size
ff381587 13619 + info_ptr->parm_size
5b667039 13620 + info_ptr->save_size);
ff381587 13621
44688022
AM
13622 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
13623 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
13624
13625 /* Determine if we need to allocate any stack frame:
13626
a4f6c312
SS
13627 For AIX we need to push the stack if a frame pointer is needed
13628 (because the stack might be dynamically adjusted), if we are
13629 debugging, if we make calls, or if the sum of fp_save, gp_save,
13630 and local variables are more than the space needed to save all
13631 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13632 + 18*8 = 288 (GPR13 reserved).
ff381587 13633
a4f6c312
SS
13634 For V.4 we don't have the stack cushion that AIX uses, but assume
13635 that the debugger can handle stackless frames. */
ff381587
MM
13636
13637 if (info_ptr->calls_p)
13638 info_ptr->push_p = 1;
13639
178274da 13640 else if (DEFAULT_ABI == ABI_V4)
44688022 13641 info_ptr->push_p = non_fixed_size != 0;
ff381587 13642
178274da
AM
13643 else if (frame_pointer_needed)
13644 info_ptr->push_p = 1;
13645
13646 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
13647 info_ptr->push_p = 1;
13648
ff381587 13649 else
44688022 13650 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 13651
a4f6c312 13652 /* Zero offsets if we're not saving those registers. */
8dda1a21 13653 if (info_ptr->fp_size == 0)
4697a36c
MM
13654 info_ptr->fp_save_offset = 0;
13655
8dda1a21 13656 if (info_ptr->gp_size == 0)
4697a36c
MM
13657 info_ptr->gp_save_offset = 0;
13658
00b960c7
AH
13659 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
13660 info_ptr->altivec_save_offset = 0;
13661
13662 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
13663 info_ptr->vrsave_save_offset = 0;
13664
c19de7aa
AH
13665 if (! TARGET_SPE_ABI
13666 || info_ptr->spe_64bit_regs_used == 0
13667 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
13668 info_ptr->spe_gp_save_offset = 0;
13669
c81fc13e 13670 if (! info_ptr->lr_save_p)
4697a36c
MM
13671 info_ptr->lr_save_offset = 0;
13672
c81fc13e 13673 if (! info_ptr->cr_save_p)
4697a36c
MM
13674 info_ptr->cr_save_offset = 0;
13675
13676 return info_ptr;
13677}
13678
c19de7aa
AH
13679/* Return true if the current function uses any GPRs in 64-bit SIMD
13680 mode. */
13681
13682static bool
863d938c 13683spe_func_has_64bit_regs_p (void)
c19de7aa
AH
13684{
13685 rtx insns, insn;
13686
13687 /* Functions that save and restore all the call-saved registers will
13688 need to save/restore the registers in 64-bits. */
13689 if (current_function_calls_eh_return
13690 || current_function_calls_setjmp
13691 || current_function_has_nonlocal_goto)
13692 return true;
13693
13694 insns = get_insns ();
13695
13696 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13697 {
13698 if (INSN_P (insn))
13699 {
13700 rtx i;
13701
b5a5beb9
AH
13702 /* FIXME: This should be implemented with attributes...
13703
13704 (set_attr "spe64" "true")....then,
13705 if (get_spe64(insn)) return true;
13706
13707 It's the only reliable way to do the stuff below. */
13708
c19de7aa 13709 i = PATTERN (insn);
f82f556d
AH
13710 if (GET_CODE (i) == SET)
13711 {
13712 enum machine_mode mode = GET_MODE (SET_SRC (i));
13713
13714 if (SPE_VECTOR_MODE (mode))
13715 return true;
17caeff2 13716 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
13717 return true;
13718 }
c19de7aa
AH
13719 }
13720 }
13721
13722 return false;
13723}
13724
d1d0c603 13725static void
a2369ed3 13726debug_stack_info (rs6000_stack_t *info)
9878760c 13727{
d330fd93 13728 const char *abi_string;
24d304eb 13729
c81fc13e 13730 if (! info)
4697a36c
MM
13731 info = rs6000_stack_info ();
13732
13733 fprintf (stderr, "\nStack information for function %s:\n",
13734 ((current_function_decl && DECL_NAME (current_function_decl))
13735 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13736 : "<unknown>"));
13737
24d304eb
RK
13738 switch (info->abi)
13739 {
b6c9286a
MM
13740 default: abi_string = "Unknown"; break;
13741 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 13742 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 13743 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 13744 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
13745 }
13746
13747 fprintf (stderr, "\tABI = %5s\n", abi_string);
13748
00b960c7
AH
13749 if (TARGET_ALTIVEC_ABI)
13750 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13751
a3170dc6
AH
13752 if (TARGET_SPE_ABI)
13753 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13754
4697a36c
MM
13755 if (info->first_gp_reg_save != 32)
13756 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13757
13758 if (info->first_fp_reg_save != 64)
13759 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 13760
00b960c7
AH
13761 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13762 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13763 info->first_altivec_reg_save);
13764
4697a36c
MM
13765 if (info->lr_save_p)
13766 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 13767
4697a36c
MM
13768 if (info->cr_save_p)
13769 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13770
00b960c7
AH
13771 if (info->vrsave_mask)
13772 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13773
4697a36c
MM
13774 if (info->push_p)
13775 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13776
13777 if (info->calls_p)
13778 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13779
4697a36c
MM
13780 if (info->gp_save_offset)
13781 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13782
13783 if (info->fp_save_offset)
13784 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13785
00b960c7
AH
13786 if (info->altivec_save_offset)
13787 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13788 info->altivec_save_offset);
13789
a3170dc6
AH
13790 if (info->spe_gp_save_offset)
13791 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13792 info->spe_gp_save_offset);
13793
00b960c7
AH
13794 if (info->vrsave_save_offset)
13795 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13796 info->vrsave_save_offset);
13797
4697a36c
MM
13798 if (info->lr_save_offset)
13799 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13800
13801 if (info->cr_save_offset)
13802 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13803
13804 if (info->varargs_save_offset)
13805 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13806
13807 if (info->total_size)
d1d0c603
JJ
13808 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13809 info->total_size);
4697a36c 13810
4697a36c 13811 if (info->vars_size)
d1d0c603
JJ
13812 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13813 info->vars_size);
4697a36c
MM
13814
13815 if (info->parm_size)
13816 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13817
13818 if (info->fixed_size)
13819 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13820
13821 if (info->gp_size)
13822 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13823
a3170dc6
AH
13824 if (info->spe_gp_size)
13825 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13826
4697a36c
MM
13827 if (info->fp_size)
13828 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13829
00b960c7
AH
13830 if (info->altivec_size)
13831 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13832
13833 if (info->vrsave_size)
13834 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13835
13836 if (info->altivec_padding_size)
13837 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13838 info->altivec_padding_size);
13839
a3170dc6
AH
13840 if (info->spe_padding_size)
13841 fprintf (stderr, "\tspe_padding_size = %5d\n",
13842 info->spe_padding_size);
13843
4697a36c
MM
13844 if (info->cr_size)
13845 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
13846
13847 if (info->save_size)
13848 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
13849
13850 if (info->reg_size != 4)
13851 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
13852
13853 fprintf (stderr, "\n");
9878760c 13854}
71f123ca
FS
13855
13856rtx
a2369ed3 13857rs6000_return_addr (int count, rtx frame)
71f123ca 13858{
a4f6c312
SS
13859 /* Currently we don't optimize very well between prolog and body
13860 code and for PIC code the code can be actually quite bad, so
13861 don't try to be too clever here. */
f1384257 13862 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
13863 {
13864 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
13865
13866 return
13867 gen_rtx_MEM
13868 (Pmode,
13869 memory_address
13870 (Pmode,
13871 plus_constant (copy_to_reg
13872 (gen_rtx_MEM (Pmode,
13873 memory_address (Pmode, frame))),
13874 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
13875 }
13876
8c29550d 13877 cfun->machine->ra_need_lr = 1;
9e2f7ec7 13878 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
71f123ca
FS
13879}
13880
5e1bf043
DJ
13881/* Say whether a function is a candidate for sibcall handling or not.
13882 We do not allow indirect calls to be optimized into sibling calls.
13883 Also, we can't do it if there are any vector parameters; there's
13884 nowhere to put the VRsave code so it works; note that functions with
13885 vector parameters are required to have a prototype, so the argument
13886 type info must be available here. (The tail recursion case can work
13887 with vector parameters, but there's no way to distinguish here.) */
4977bab6 13888static bool
a2369ed3 13889rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
13890{
13891 tree type;
4977bab6 13892 if (decl)
5e1bf043
DJ
13893 {
13894 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 13895 {
4977bab6 13896 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
13897 type; type = TREE_CHAIN (type))
13898 {
c15b529f 13899 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 13900 return false;
5e1bf043 13901 }
c4ad648e 13902 }
5e1bf043 13903 if (DEFAULT_ABI == ABI_DARWIN
4977bab6 13904 || (*targetm.binds_local_p) (decl))
2bcc50d0 13905 {
4977bab6 13906 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
13907
13908 if (!lookup_attribute ("longcall", attr_list)
13909 || lookup_attribute ("shortcall", attr_list))
4977bab6 13910 return true;
2bcc50d0 13911 }
5e1bf043 13912 }
4977bab6 13913 return false;
5e1bf043
DJ
13914}
13915
e7e64a25
AS
13916/* NULL if INSN insn is valid within a low-overhead loop.
13917 Otherwise return why doloop cannot be applied.
9419649c
DE
13918 PowerPC uses the COUNT register for branch on table instructions. */
13919
e7e64a25
AS
13920static const char *
13921rs6000_invalid_within_doloop (rtx insn)
9419649c
DE
13922{
13923 if (CALL_P (insn))
e7e64a25 13924 return "Function call in the loop.";
9419649c
DE
13925
13926 if (JUMP_P (insn)
13927 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
13928 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 13929 return "Computed branch in the loop.";
9419649c 13930
e7e64a25 13931 return NULL;
9419649c
DE
13932}
13933
71f123ca 13934static int
863d938c 13935rs6000_ra_ever_killed (void)
71f123ca
FS
13936{
13937 rtx top;
5e1bf043
DJ
13938 rtx reg;
13939 rtx insn;
71f123ca 13940
dd292d0a 13941 if (current_function_is_thunk)
71f123ca 13942 return 0;
eb0424da 13943
36f7e964
AH
13944 /* regs_ever_live has LR marked as used if any sibcalls are present,
13945 but this should not force saving and restoring in the
13946 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 13947 clobbers LR, so that is inappropriate. */
36f7e964 13948
5e1bf043
DJ
13949 /* Also, the prologue can generate a store into LR that
13950 doesn't really count, like this:
36f7e964 13951
5e1bf043
DJ
13952 move LR->R0
13953 bcl to set PIC register
13954 move LR->R31
13955 move R0->LR
36f7e964
AH
13956
13957 When we're called from the epilogue, we need to avoid counting
13958 this as a store. */
f676971a 13959
71f123ca
FS
13960 push_topmost_sequence ();
13961 top = get_insns ();
13962 pop_topmost_sequence ();
5e1bf043 13963 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
71f123ca 13964
5e1bf043
DJ
13965 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
13966 {
13967 if (INSN_P (insn))
13968 {
022123e6
AM
13969 if (CALL_P (insn))
13970 {
13971 if (!SIBLING_CALL_P (insn))
13972 return 1;
13973 }
13974 else if (find_regno_note (insn, REG_INC, LINK_REGISTER_REGNUM))
5e1bf043 13975 return 1;
36f7e964
AH
13976 else if (set_of (reg, insn) != NULL_RTX
13977 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
13978 return 1;
13979 }
13980 }
13981 return 0;
71f123ca 13982}
4697a36c 13983\f
8cd8f856
GK
13984/* Add a REG_MAYBE_DEAD note to the insn. */
13985static void
a2369ed3 13986rs6000_maybe_dead (rtx insn)
8cd8f856
GK
13987{
13988 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
13989 const0_rtx,
13990 REG_NOTES (insn));
13991}
13992
9ebbca7d 13993/* Emit instructions needed to load the TOC register.
c7ca610e 13994 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 13995 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
13996
13997void
a2369ed3 13998rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 13999{
027fbf43 14000 rtx dest, insn;
1db02437 14001 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14002
7f970b70 14003 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14004 {
7f970b70 14005 char buf[30];
e65a3857 14006 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14007
14008 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14009 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14010 if (flag_pic == 2)
14011 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14012 else
14013 got = rs6000_got_sym ();
14014 tmp1 = tmp2 = dest;
14015 if (!fromprolog)
14016 {
14017 tmp1 = gen_reg_rtx (Pmode);
14018 tmp2 = gen_reg_rtx (Pmode);
14019 }
e65a3857 14020 insn = emit_insn (gen_load_toc_v4_PIC_1 (lab));
7f970b70
AM
14021 if (fromprolog)
14022 rs6000_maybe_dead (insn);
e65a3857
DE
14023 insn = emit_move_insn (tmp1,
14024 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7f970b70
AM
14025 if (fromprolog)
14026 rs6000_maybe_dead (insn);
14027 insn = emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14028 if (fromprolog)
14029 rs6000_maybe_dead (insn);
14030 insn = emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
14031 if (fromprolog)
14032 rs6000_maybe_dead (insn);
14033 }
14034 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14035 {
e65a3857 14036 insn = emit_insn (gen_load_toc_v4_pic_si ());
027fbf43
JJ
14037 if (fromprolog)
14038 rs6000_maybe_dead (insn);
e65a3857
DE
14039 insn = emit_move_insn (dest,
14040 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
027fbf43
JJ
14041 if (fromprolog)
14042 rs6000_maybe_dead (insn);
20b71b17
AM
14043 }
14044 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14045 {
14046 char buf[30];
20b71b17
AM
14047 rtx temp0 = (fromprolog
14048 ? gen_rtx_REG (Pmode, 0)
14049 : gen_reg_rtx (Pmode));
20b71b17 14050
20b71b17
AM
14051 if (fromprolog)
14052 {
ccbca5e4 14053 rtx symF, symL;
38c1f2d7 14054
20b71b17
AM
14055 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14056 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14057
20b71b17
AM
14058 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14059 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14060
e65a3857
DE
14061 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (symF)));
14062 rs6000_maybe_dead (emit_move_insn (dest,
14063 gen_rtx_REG (Pmode,
14064 LINK_REGISTER_REGNUM)));
20b71b17
AM
14065 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
14066 symL,
14067 symF)));
9ebbca7d
GK
14068 }
14069 else
20b71b17
AM
14070 {
14071 rtx tocsym;
20b71b17
AM
14072
14073 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14074 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14075 emit_move_insn (dest,
14076 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
027fbf43 14077 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14078 }
027fbf43
JJ
14079 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
14080 if (fromprolog)
14081 rs6000_maybe_dead (insn);
9ebbca7d 14082 }
20b71b17
AM
14083 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14084 {
14085 /* This is for AIX code running in non-PIC ELF32. */
14086 char buf[30];
14087 rtx realsym;
14088 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14089 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14090
027fbf43
JJ
14091 insn = emit_insn (gen_elf_high (dest, realsym));
14092 if (fromprolog)
14093 rs6000_maybe_dead (insn);
14094 insn = emit_insn (gen_elf_low (dest, dest, realsym));
14095 if (fromprolog)
14096 rs6000_maybe_dead (insn);
20b71b17 14097 }
37409796 14098 else
9ebbca7d 14099 {
37409796 14100 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14101
9ebbca7d 14102 if (TARGET_32BIT)
027fbf43 14103 insn = emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14104 else
027fbf43
JJ
14105 insn = emit_insn (gen_load_toc_aix_di (dest));
14106 if (fromprolog)
14107 rs6000_maybe_dead (insn);
9ebbca7d
GK
14108 }
14109}
14110
d1d0c603
JJ
14111/* Emit instructions to restore the link register after determining where
14112 its value has been stored. */
14113
14114void
14115rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14116{
14117 rs6000_stack_t *info = rs6000_stack_info ();
14118 rtx operands[2];
14119
14120 operands[0] = source;
14121 operands[1] = scratch;
14122
14123 if (info->lr_save_p)
14124 {
14125 rtx frame_rtx = stack_pointer_rtx;
14126 HOST_WIDE_INT sp_offset = 0;
14127 rtx tmp;
14128
14129 if (frame_pointer_needed
14130 || current_function_calls_alloca
14131 || info->total_size > 32767)
14132 {
0be76840 14133 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14134 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14135 frame_rtx = operands[1];
14136 }
14137 else if (info->push_p)
14138 sp_offset = info->total_size;
14139
14140 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14141 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14142 emit_move_insn (tmp, operands[0]);
14143 }
14144 else
14145 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
14146}
14147
f103e34d
GK
14148static GTY(()) int set = -1;
14149
f676971a 14150int
863d938c 14151get_TOC_alias_set (void)
9ebbca7d 14152{
f103e34d
GK
14153 if (set == -1)
14154 set = new_alias_set ();
14155 return set;
f676971a 14156}
9ebbca7d 14157
c1207243 14158/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14159 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14160 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14161#if TARGET_ELF
3c9eb5f4 14162static int
f676971a 14163uses_TOC (void)
9ebbca7d 14164{
c4501e62 14165 rtx insn;
38c1f2d7 14166
c4501e62
JJ
14167 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14168 if (INSN_P (insn))
14169 {
14170 rtx pat = PATTERN (insn);
14171 int i;
9ebbca7d 14172
f676971a 14173 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14174 for (i = 0; i < XVECLEN (pat, 0); i++)
14175 {
14176 rtx sub = XVECEXP (pat, 0, i);
14177 if (GET_CODE (sub) == USE)
14178 {
14179 sub = XEXP (sub, 0);
14180 if (GET_CODE (sub) == UNSPEC
14181 && XINT (sub, 1) == UNSPEC_TOC)
14182 return 1;
14183 }
14184 }
14185 }
14186 return 0;
9ebbca7d 14187}
c954844a 14188#endif
38c1f2d7 14189
9ebbca7d 14190rtx
f676971a 14191create_TOC_reference (rtx symbol)
9ebbca7d 14192{
b69542f7
AM
14193 if (no_new_pseudos)
14194 regs_ever_live[TOC_REGISTER] = 1;
f676971a 14195 return gen_rtx_PLUS (Pmode,
a8a05998 14196 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14197 gen_rtx_CONST (Pmode,
14198 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14199 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14200}
38c1f2d7 14201
fc4767bb
JJ
14202/* If _Unwind_* has been called from within the same module,
14203 toc register is not guaranteed to be saved to 40(1) on function
14204 entry. Save it there in that case. */
c7ca610e 14205
9ebbca7d 14206void
863d938c 14207rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14208{
14209 rtx mem;
14210 rtx stack_top = gen_reg_rtx (Pmode);
14211 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14212 rtx opcode = gen_reg_rtx (SImode);
14213 rtx tocompare = gen_reg_rtx (SImode);
14214 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14215
8308679f 14216 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14217 emit_move_insn (stack_top, mem);
14218
8308679f
DE
14219 mem = gen_frame_mem (Pmode,
14220 gen_rtx_PLUS (Pmode, stack_top,
14221 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14222 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14223 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14224 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14225 : 0xE8410028, SImode));
9ebbca7d 14226
fc4767bb 14227 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14228 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14229 no_toc_save_needed);
9ebbca7d 14230
8308679f
DE
14231 mem = gen_frame_mem (Pmode,
14232 gen_rtx_PLUS (Pmode, stack_top,
14233 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14234 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14235 emit_label (no_toc_save_needed);
9ebbca7d 14236}
38c1f2d7 14237\f
0be76840
DE
14238/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14239 and the change to the stack pointer. */
ba4828e0 14240
9ebbca7d 14241static void
863d938c 14242rs6000_emit_stack_tie (void)
9ebbca7d 14243{
0be76840
DE
14244 rtx mem = gen_frame_mem (BLKmode,
14245 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14246
9ebbca7d
GK
14247 emit_insn (gen_stack_tie (mem));
14248}
38c1f2d7 14249
9ebbca7d
GK
14250/* Emit the correct code for allocating stack space, as insns.
14251 If COPY_R12, make sure a copy of the old frame is left in r12.
14252 The generated code may use hard register 0 as a temporary. */
14253
14254static void
a2369ed3 14255rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14256{
9ebbca7d
GK
14257 rtx insn;
14258 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14259 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14260 rtx todec = gen_int_mode (-size, Pmode);
14261
14262 if (INTVAL (todec) != -size)
14263 {
d4ee4d25 14264 warning (0, "stack frame too large");
61168ff1
RS
14265 emit_insn (gen_trap ());
14266 return;
14267 }
a157febd
GK
14268
14269 if (current_function_limit_stack)
14270 {
14271 if (REG_P (stack_limit_rtx)
f676971a 14272 && REGNO (stack_limit_rtx) > 1
a157febd
GK
14273 && REGNO (stack_limit_rtx) <= 31)
14274 {
5b71a4e7 14275 emit_insn (TARGET_32BIT
9ebbca7d
GK
14276 ? gen_addsi3 (tmp_reg,
14277 stack_limit_rtx,
14278 GEN_INT (size))
14279 : gen_adddi3 (tmp_reg,
14280 stack_limit_rtx,
14281 GEN_INT (size)));
5b71a4e7 14282
9ebbca7d
GK
14283 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14284 const0_rtx));
a157febd
GK
14285 }
14286 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 14287 && TARGET_32BIT
f607bc57 14288 && DEFAULT_ABI == ABI_V4)
a157febd 14289 {
9ebbca7d 14290 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
14291 gen_rtx_PLUS (Pmode,
14292 stack_limit_rtx,
9ebbca7d 14293 GEN_INT (size)));
5b71a4e7 14294
9ebbca7d
GK
14295 emit_insn (gen_elf_high (tmp_reg, toload));
14296 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
14297 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14298 const0_rtx));
a157febd
GK
14299 }
14300 else
d4ee4d25 14301 warning (0, "stack limit expression is not supported");
a157febd
GK
14302 }
14303
9ebbca7d
GK
14304 if (copy_r12 || ! TARGET_UPDATE)
14305 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
14306
38c1f2d7
MM
14307 if (TARGET_UPDATE)
14308 {
9ebbca7d 14309 if (size > 32767)
38c1f2d7 14310 {
9ebbca7d 14311 /* Need a note here so that try_split doesn't get confused. */
9390387d 14312 if (get_last_insn () == NULL_RTX)
2e040219 14313 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
14314 insn = emit_move_insn (tmp_reg, todec);
14315 try_split (PATTERN (insn), insn, 0);
14316 todec = tmp_reg;
38c1f2d7 14317 }
5b71a4e7
DE
14318
14319 insn = emit_insn (TARGET_32BIT
14320 ? gen_movsi_update (stack_reg, stack_reg,
14321 todec, stack_reg)
c4ad648e 14322 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 14323 todec, stack_reg));
38c1f2d7
MM
14324 }
14325 else
14326 {
5b71a4e7
DE
14327 insn = emit_insn (TARGET_32BIT
14328 ? gen_addsi3 (stack_reg, stack_reg, todec)
14329 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
14330 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
14331 gen_rtx_REG (Pmode, 12));
14332 }
f676971a 14333
9ebbca7d 14334 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 14335 REG_NOTES (insn) =
9ebbca7d 14336 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 14337 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
14338 gen_rtx_PLUS (Pmode, stack_reg,
14339 GEN_INT (-size))),
14340 REG_NOTES (insn));
14341}
14342
a4f6c312
SS
14343/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14344 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14345 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
14346 deduce these equivalences by itself so it wasn't necessary to hold
14347 its hand so much. */
9ebbca7d
GK
14348
14349static void
f676971a 14350rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 14351 rtx reg2, rtx rreg)
9ebbca7d
GK
14352{
14353 rtx real, temp;
14354
e56c4463
JL
14355 /* copy_rtx will not make unique copies of registers, so we need to
14356 ensure we don't have unwanted sharing here. */
14357 if (reg == reg2)
14358 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14359
14360 if (reg == rreg)
14361 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14362
9ebbca7d
GK
14363 real = copy_rtx (PATTERN (insn));
14364
89e7058f
AH
14365 if (reg2 != NULL_RTX)
14366 real = replace_rtx (real, reg2, rreg);
f676971a
EC
14367
14368 real = replace_rtx (real, reg,
9ebbca7d
GK
14369 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14370 STACK_POINTER_REGNUM),
14371 GEN_INT (val)));
f676971a 14372
9ebbca7d
GK
14373 /* We expect that 'real' is either a SET or a PARALLEL containing
14374 SETs (and possibly other stuff). In a PARALLEL, all the SETs
14375 are important so they all have to be marked RTX_FRAME_RELATED_P. */
14376
14377 if (GET_CODE (real) == SET)
14378 {
14379 rtx set = real;
f676971a 14380
9ebbca7d
GK
14381 temp = simplify_rtx (SET_SRC (set));
14382 if (temp)
14383 SET_SRC (set) = temp;
14384 temp = simplify_rtx (SET_DEST (set));
14385 if (temp)
14386 SET_DEST (set) = temp;
14387 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 14388 {
9ebbca7d
GK
14389 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14390 if (temp)
14391 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 14392 }
38c1f2d7 14393 }
37409796 14394 else
9ebbca7d
GK
14395 {
14396 int i;
37409796
NS
14397
14398 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
14399 for (i = 0; i < XVECLEN (real, 0); i++)
14400 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
14401 {
14402 rtx set = XVECEXP (real, 0, i);
f676971a 14403
9ebbca7d
GK
14404 temp = simplify_rtx (SET_SRC (set));
14405 if (temp)
14406 SET_SRC (set) = temp;
14407 temp = simplify_rtx (SET_DEST (set));
14408 if (temp)
14409 SET_DEST (set) = temp;
14410 if (GET_CODE (SET_DEST (set)) == MEM)
14411 {
14412 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14413 if (temp)
14414 XEXP (SET_DEST (set), 0) = temp;
14415 }
14416 RTX_FRAME_RELATED_P (set) = 1;
14417 }
14418 }
c19de7aa
AH
14419
14420 if (TARGET_SPE)
14421 real = spe_synthesize_frame_save (real);
14422
9ebbca7d
GK
14423 RTX_FRAME_RELATED_P (insn) = 1;
14424 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14425 real,
14426 REG_NOTES (insn));
38c1f2d7
MM
14427}
14428
c19de7aa
AH
14429/* Given an SPE frame note, return a PARALLEL of SETs with the
14430 original note, plus a synthetic register save. */
14431
14432static rtx
a2369ed3 14433spe_synthesize_frame_save (rtx real)
c19de7aa
AH
14434{
14435 rtx synth, offset, reg, real2;
14436
14437 if (GET_CODE (real) != SET
14438 || GET_MODE (SET_SRC (real)) != V2SImode)
14439 return real;
14440
14441 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14442 frame related note. The parallel contains a set of the register
41f3a930 14443 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
14444 This is so we can differentiate between 64-bit and 32-bit saves.
14445 Words cannot describe this nastiness. */
14446
37409796
NS
14447 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14448 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14449 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
14450
14451 /* Transform:
14452 (set (mem (plus (reg x) (const y)))
14453 (reg z))
14454 into:
14455 (set (mem (plus (reg x) (const y+4)))
41f3a930 14456 (reg z+1200))
c19de7aa
AH
14457 */
14458
14459 real2 = copy_rtx (real);
14460 PUT_MODE (SET_DEST (real2), SImode);
14461 reg = SET_SRC (real2);
14462 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14463 synth = copy_rtx (real2);
14464
14465 if (BYTES_BIG_ENDIAN)
14466 {
14467 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14468 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14469 }
14470
14471 reg = SET_SRC (synth);
41f3a930 14472
c19de7aa 14473 synth = replace_rtx (synth, reg,
41f3a930 14474 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
14475
14476 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14477 synth = replace_rtx (synth, offset,
14478 GEN_INT (INTVAL (offset)
14479 + (BYTES_BIG_ENDIAN ? 0 : 4)));
14480
14481 RTX_FRAME_RELATED_P (synth) = 1;
14482 RTX_FRAME_RELATED_P (real2) = 1;
14483 if (BYTES_BIG_ENDIAN)
14484 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14485 else
14486 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14487
14488 return real;
14489}
14490
00b960c7
AH
14491/* Returns an insn that has a vrsave set operation with the
14492 appropriate CLOBBERs. */
14493
14494static rtx
a2369ed3 14495generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
14496{
14497 int nclobs, i;
14498 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 14499 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 14500
a004eb82
AH
14501 clobs[0]
14502 = gen_rtx_SET (VOIDmode,
14503 vrsave,
14504 gen_rtx_UNSPEC_VOLATILE (SImode,
14505 gen_rtvec (2, reg, vrsave),
3aca4bff 14506 UNSPECV_SET_VRSAVE));
00b960c7
AH
14507
14508 nclobs = 1;
14509
9aa86737
AH
14510 /* We need to clobber the registers in the mask so the scheduler
14511 does not move sets to VRSAVE before sets of AltiVec registers.
14512
14513 However, if the function receives nonlocal gotos, reload will set
14514 all call saved registers live. We will end up with:
14515
14516 (set (reg 999) (mem))
14517 (parallel [ (set (reg vrsave) (unspec blah))
14518 (clobber (reg 999))])
14519
14520 The clobber will cause the store into reg 999 to be dead, and
14521 flow will attempt to delete an epilogue insn. In this case, we
14522 need an unspec use/set of the register. */
00b960c7
AH
14523
14524 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 14525 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
14526 {
14527 if (!epiloguep || call_used_regs [i])
14528 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
14529 gen_rtx_REG (V4SImode, i));
14530 else
14531 {
14532 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
14533
14534 clobs[nclobs++]
a004eb82
AH
14535 = gen_rtx_SET (VOIDmode,
14536 reg,
14537 gen_rtx_UNSPEC (V4SImode,
14538 gen_rtvec (1, reg), 27));
9aa86737
AH
14539 }
14540 }
00b960c7
AH
14541
14542 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
14543
14544 for (i = 0; i < nclobs; ++i)
14545 XVECEXP (insn, 0, i) = clobs[i];
14546
14547 return insn;
14548}
14549
89e7058f
AH
14550/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14551 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
14552
14553static void
f676971a 14554emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 14555 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
14556{
14557 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
14558 rtx replacea, replaceb;
14559
14560 int_rtx = GEN_INT (offset);
14561
14562 /* Some cases that need register indexed addressing. */
14563 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 14564 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
14565 || (TARGET_SPE_ABI
14566 && SPE_VECTOR_MODE (mode)
14567 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
14568 {
14569 /* Whomever calls us must make sure r11 is available in the
c4ad648e 14570 flow path of instructions in the prologue. */
89e7058f
AH
14571 offset_rtx = gen_rtx_REG (Pmode, 11);
14572 emit_move_insn (offset_rtx, int_rtx);
14573
14574 replacea = offset_rtx;
14575 replaceb = int_rtx;
14576 }
14577 else
14578 {
14579 offset_rtx = int_rtx;
14580 replacea = NULL_RTX;
14581 replaceb = NULL_RTX;
14582 }
14583
14584 reg = gen_rtx_REG (mode, regno);
14585 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 14586 mem = gen_frame_mem (mode, addr);
89e7058f
AH
14587
14588 insn = emit_move_insn (mem, reg);
14589
14590 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
14591}
14592
a3170dc6
AH
14593/* Emit an offset memory reference suitable for a frame store, while
14594 converting to a valid addressing mode. */
14595
14596static rtx
a2369ed3 14597gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
14598{
14599 rtx int_rtx, offset_rtx;
14600
14601 int_rtx = GEN_INT (offset);
14602
4d4cbc0e
AH
14603 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
14604 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
14605 {
14606 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14607 emit_move_insn (offset_rtx, int_rtx);
14608 }
14609 else
14610 offset_rtx = int_rtx;
14611
0be76840 14612 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
14613}
14614
6d0a8091
DJ
14615/* Look for user-defined global regs. We should not save and restore these,
14616 and cannot use stmw/lmw if there are any in its range. */
14617
14618static bool
14619no_global_regs_above (int first_greg)
14620{
14621 int i;
14622 for (i = 0; i < 32 - first_greg; i++)
14623 if (global_regs[first_greg + i])
14624 return false;
14625 return true;
14626}
14627
699c914a
MS
14628#ifndef TARGET_FIX_AND_CONTINUE
14629#define TARGET_FIX_AND_CONTINUE 0
14630#endif
14631
9ebbca7d
GK
14632/* Emit function prologue as insns. */
14633
9878760c 14634void
863d938c 14635rs6000_emit_prologue (void)
9878760c 14636{
4697a36c 14637 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 14638 enum machine_mode reg_mode = Pmode;
327e5343 14639 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
14640 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14641 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
14642 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 14643 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
14644 rtx insn;
14645 int saving_FPRs_inline;
14646 int using_store_multiple;
14647 HOST_WIDE_INT sp_offset = 0;
f676971a 14648
699c914a
MS
14649 if (TARGET_FIX_AND_CONTINUE)
14650 {
14651 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 14652 address by modifying the first 5 instructions of the function
699c914a
MS
14653 to branch to the overriding function. This is necessary to
14654 permit function pointers that point to the old function to
14655 actually forward to the new function. */
14656 emit_insn (gen_nop ());
14657 emit_insn (gen_nop ());
de2ab0ca 14658 emit_insn (gen_nop ());
699c914a
MS
14659 emit_insn (gen_nop ());
14660 emit_insn (gen_nop ());
14661 }
14662
14663 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14664 {
14665 reg_mode = V2SImode;
14666 reg_size = 8;
14667 }
a3170dc6 14668
9ebbca7d 14669 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
14670 && (!TARGET_SPE_ABI
14671 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
14672 && info->first_gp_reg_save < 31
14673 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 14674 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 14675 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 14676 || current_function_calls_eh_return
8c29550d 14677 || cfun->machine->ra_need_lr);
9ebbca7d
GK
14678
14679 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
14680 if (! WORLD_SAVE_P (info)
14681 && info->push_p
acd0b319
AM
14682 && (DEFAULT_ABI == ABI_V4
14683 || current_function_calls_eh_return))
9ebbca7d
GK
14684 {
14685 if (info->total_size < 32767)
14686 sp_offset = info->total_size;
14687 else
14688 frame_reg_rtx = frame_ptr_rtx;
f676971a 14689 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
14690 (frame_reg_rtx != sp_reg_rtx
14691 && (info->cr_save_p
14692 || info->lr_save_p
14693 || info->first_fp_reg_save < 64
14694 || info->first_gp_reg_save < 32
14695 )));
14696 if (frame_reg_rtx != sp_reg_rtx)
14697 rs6000_emit_stack_tie ();
14698 }
14699
d62294f5 14700 /* Handle world saves specially here. */
f57fe068 14701 if (WORLD_SAVE_P (info))
d62294f5
FJ
14702 {
14703 int i, j, sz;
14704 rtx treg;
14705 rtvec p;
22fa69da 14706 rtx reg0;
d62294f5
FJ
14707
14708 /* save_world expects lr in r0. */
22fa69da 14709 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 14710 if (info->lr_save_p)
c4ad648e 14711 {
22fa69da 14712 insn = emit_move_insn (reg0,
c4ad648e
AM
14713 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14714 RTX_FRAME_RELATED_P (insn) = 1;
14715 }
d62294f5
FJ
14716
14717 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 14718 assumptions about the offsets of various bits of the stack
992d08b1 14719 frame. */
37409796
NS
14720 gcc_assert (info->gp_save_offset == -220
14721 && info->fp_save_offset == -144
14722 && info->lr_save_offset == 8
14723 && info->cr_save_offset == 4
14724 && info->push_p
14725 && info->lr_save_p
14726 && (!current_function_calls_eh_return
14727 || info->ehrd_offset == -432)
14728 && info->vrsave_save_offset == -224
22fa69da 14729 && info->altivec_save_offset == -416);
d62294f5
FJ
14730
14731 treg = gen_rtx_REG (SImode, 11);
14732 emit_move_insn (treg, GEN_INT (-info->total_size));
14733
14734 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 14735 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
14736
14737 /* Preserve CR2 for save_world prologues */
22fa69da 14738 sz = 5;
d62294f5
FJ
14739 sz += 32 - info->first_gp_reg_save;
14740 sz += 64 - info->first_fp_reg_save;
14741 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14742 p = rtvec_alloc (sz);
14743 j = 0;
14744 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 14745 gen_rtx_REG (SImode,
c4ad648e 14746 LINK_REGISTER_REGNUM));
d62294f5 14747 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14748 gen_rtx_SYMBOL_REF (Pmode,
14749 "*save_world"));
d62294f5 14750 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
14751 properly. */
14752 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14753 {
14754 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14755 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14756 GEN_INT (info->fp_save_offset
14757 + sp_offset + 8 * i));
0be76840 14758 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
14759
14760 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14761 }
d62294f5 14762 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14763 {
14764 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14765 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14766 GEN_INT (info->altivec_save_offset
14767 + sp_offset + 16 * i));
0be76840 14768 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
14769
14770 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14771 }
d62294f5 14772 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14773 {
14774 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14775 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14776 GEN_INT (info->gp_save_offset
14777 + sp_offset + reg_size * i));
0be76840 14778 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14779
14780 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14781 }
14782
14783 {
14784 /* CR register traditionally saved as CR2. */
14785 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14786 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14787 GEN_INT (info->cr_save_offset
14788 + sp_offset));
0be76840 14789 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14790
14791 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14792 }
22fa69da
GK
14793 /* Explain about use of R0. */
14794 if (info->lr_save_p)
14795 {
14796 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14797 GEN_INT (info->lr_save_offset
14798 + sp_offset));
14799 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 14800
22fa69da
GK
14801 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
14802 }
14803 /* Explain what happens to the stack pointer. */
14804 {
14805 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
14806 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
14807 }
d62294f5
FJ
14808
14809 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14810 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
14811 treg, GEN_INT (-info->total_size));
14812 sp_offset = info->total_size;
d62294f5
FJ
14813 }
14814
9ebbca7d 14815 /* If we use the link register, get it into r0. */
f57fe068 14816 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8
GK
14817 {
14818 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
14819 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
14820 RTX_FRAME_RELATED_P (insn) = 1;
14821 }
9ebbca7d
GK
14822
14823 /* If we need to save CR, put it into r12. */
f57fe068 14824 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 14825 {
f8a57be8 14826 rtx set;
f676971a 14827
9ebbca7d 14828 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
14829 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14830 RTX_FRAME_RELATED_P (insn) = 1;
14831 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14832 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14833 But that's OK. All we have to do is specify that _one_ condition
14834 code register is saved in this stack slot. The thrower's epilogue
14835 will then restore all the call-saved registers.
14836 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14837 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14838 gen_rtx_REG (SImode, CR2_REGNO));
14839 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14840 set,
14841 REG_NOTES (insn));
9ebbca7d
GK
14842 }
14843
a4f6c312
SS
14844 /* Do any required saving of fpr's. If only one or two to save, do
14845 it ourselves. Otherwise, call function. */
f57fe068 14846 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
14847 {
14848 int i;
14849 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 14850 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d 14851 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
14852 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
14853 info->first_fp_reg_save + i,
14854 info->fp_save_offset + sp_offset + 8 * i,
14855 info->total_size);
9ebbca7d 14856 }
f57fe068 14857 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
14858 {
14859 int i;
14860 char rname[30];
520a57c8 14861 const char *alloc_rname;
9ebbca7d
GK
14862 rtvec p;
14863 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
14864
14865 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
14866 gen_rtx_REG (Pmode,
9ebbca7d
GK
14867 LINK_REGISTER_REGNUM));
14868 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
14869 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 14870 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
14871 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
14872 gen_rtx_SYMBOL_REF (Pmode,
14873 alloc_rname));
14874 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14875 {
14876 rtx addr, reg, mem;
14877 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14878 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 14879 GEN_INT (info->fp_save_offset
9ebbca7d 14880 + sp_offset + 8*i));
0be76840 14881 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
14882
14883 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
14884 }
14885 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14886 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
14887 NULL_RTX, NULL_RTX);
14888 }
b6c9286a 14889
9ebbca7d
GK
14890 /* Save GPRs. This is done as a PARALLEL if we are using
14891 the store-multiple instructions. */
f57fe068 14892 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 14893 {
308c142a 14894 rtvec p;
9ebbca7d
GK
14895 int i;
14896 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
14897 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
14898 {
14899 rtx addr, reg, mem;
14900 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
14901 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14902 GEN_INT (info->gp_save_offset
14903 + sp_offset
9ebbca7d 14904 + reg_size * i));
0be76840 14905 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
14906
14907 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
14908 }
14909 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 14910 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 14911 NULL_RTX, NULL_RTX);
b6c9286a 14912 }
f57fe068 14913 else if (!WORLD_SAVE_P (info))
b6c9286a 14914 {
9ebbca7d
GK
14915 int i;
14916 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
14917 if ((regs_ever_live[info->first_gp_reg_save + i]
14918 && (!call_used_regs[info->first_gp_reg_save + i]
14919 || (i + info->first_gp_reg_save
b4db40bf
JJ
14920 == RS6000_PIC_OFFSET_TABLE_REGNUM
14921 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 14922 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14923 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 14924 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
a3170dc6
AH
14925 {
14926 rtx addr, reg, mem;
14927 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14928
c19de7aa 14929 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
14930 {
14931 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
14932 rtx b;
14933
14934 if (!SPE_CONST_OFFSET_OK (offset))
14935 {
14936 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14937 emit_move_insn (b, GEN_INT (offset));
14938 }
14939 else
14940 b = GEN_INT (offset);
14941
14942 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
0be76840 14943 mem = gen_frame_mem (V2SImode, addr);
a3170dc6
AH
14944 insn = emit_move_insn (mem, reg);
14945
14946 if (GET_CODE (b) == CONST_INT)
14947 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14948 NULL_RTX, NULL_RTX);
14949 else
14950 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14951 b, GEN_INT (offset));
14952 }
14953 else
14954 {
f676971a
EC
14955 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14956 GEN_INT (info->gp_save_offset
14957 + sp_offset
a3170dc6 14958 + reg_size * i));
0be76840 14959 mem = gen_frame_mem (reg_mode, addr);
a3170dc6
AH
14960
14961 insn = emit_move_insn (mem, reg);
f676971a 14962 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
a3170dc6
AH
14963 NULL_RTX, NULL_RTX);
14964 }
14965 }
9ebbca7d
GK
14966 }
14967
83720594
RH
14968 /* ??? There's no need to emit actual instructions here, but it's the
14969 easiest way to get the frame unwind information emitted. */
22fa69da 14970 if (current_function_calls_eh_return)
83720594 14971 {
78e1b90d
DE
14972 unsigned int i, regno;
14973
fc4767bb
JJ
14974 /* In AIX ABI we need to pretend we save r2 here. */
14975 if (TARGET_AIX)
14976 {
14977 rtx addr, reg, mem;
14978
14979 reg = gen_rtx_REG (reg_mode, 2);
14980 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14981 GEN_INT (sp_offset + 5 * reg_size));
0be76840 14982 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
14983
14984 insn = emit_move_insn (mem, reg);
f676971a 14985 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
14986 NULL_RTX, NULL_RTX);
14987 PATTERN (insn) = gen_blockage ();
14988 }
14989
83720594
RH
14990 for (i = 0; ; ++i)
14991 {
83720594
RH
14992 regno = EH_RETURN_DATA_REGNO (i);
14993 if (regno == INVALID_REGNUM)
14994 break;
14995
89e7058f
AH
14996 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
14997 info->ehrd_offset + sp_offset
14998 + reg_size * (int) i,
14999 info->total_size);
83720594
RH
15000 }
15001 }
15002
9ebbca7d 15003 /* Save lr if we used it. */
f57fe068 15004 if (!WORLD_SAVE_P (info) && info->lr_save_p)
9ebbca7d
GK
15005 {
15006 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15007 GEN_INT (info->lr_save_offset + sp_offset));
15008 rtx reg = gen_rtx_REG (Pmode, 0);
15009 rtx mem = gen_rtx_MEM (Pmode, addr);
0be76840 15010 /* This should not be of frame_alias_set, because of
9ebbca7d 15011 __builtin_return_address. */
f676971a 15012
9ebbca7d 15013 insn = emit_move_insn (mem, reg);
f676971a 15014 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15015 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15016 }
15017
15018 /* Save CR if we use any that must be preserved. */
f57fe068 15019 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15020 {
15021 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15022 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15023 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15024 /* See the large comment above about why CR2_REGNO is used. */
15025 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15026
9ebbca7d
GK
15027 /* If r12 was used to hold the original sp, copy cr into r0 now
15028 that it's free. */
15029 if (REGNO (frame_reg_rtx) == 12)
15030 {
f8a57be8
GK
15031 rtx set;
15032
9ebbca7d 15033 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15034 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15035 RTX_FRAME_RELATED_P (insn) = 1;
15036 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15037 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15038 set,
15039 REG_NOTES (insn));
f676971a 15040
9ebbca7d
GK
15041 }
15042 insn = emit_move_insn (mem, cr_save_rtx);
15043
f676971a 15044 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15045 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15046 }
15047
f676971a 15048 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15049 for which it was done previously. */
f57fe068 15050 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15051 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5
EC
15052 {
15053 rs6000_emit_allocate_stack (info->total_size, FALSE);
15054 sp_offset = info->total_size;
15055 }
9ebbca7d
GK
15056
15057 /* Set frame pointer, if needed. */
15058 if (frame_pointer_needed)
15059 {
7d5175e1 15060 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15061 sp_reg_rtx);
15062 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15063 }
9878760c 15064
2b2c2fe5
EC
15065 /* Save AltiVec registers if needed. Save here because the red zone does
15066 not include AltiVec registers. */
15067 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15068 {
15069 int i;
15070
15071 /* There should be a non inline version of this, for when we
15072 are saving lots of vector registers. */
15073 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15074 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15075 {
15076 rtx areg, savereg, mem;
15077 int offset;
15078
15079 offset = info->altivec_save_offset + sp_offset
15080 + 16 * (i - info->first_altivec_reg_save);
15081
15082 savereg = gen_rtx_REG (V4SImode, i);
15083
15084 areg = gen_rtx_REG (Pmode, 0);
15085 emit_move_insn (areg, GEN_INT (offset));
15086
15087 /* AltiVec addressing mode is [reg+reg]. */
15088 mem = gen_frame_mem (V4SImode,
15089 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15090
15091 insn = emit_move_insn (mem, savereg);
15092
15093 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15094 areg, GEN_INT (offset));
15095 }
15096 }
15097
15098 /* VRSAVE is a bit vector representing which AltiVec registers
15099 are used. The OS uses this to determine which vector
15100 registers to save on a context switch. We need to save
15101 VRSAVE on the stack frame, add whatever AltiVec registers we
15102 used in this function, and do the corresponding magic in the
15103 epilogue. */
15104
15105 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15106 && info->vrsave_mask != 0)
15107 {
15108 rtx reg, mem, vrsave;
15109 int offset;
15110
15111 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15112 as frame_reg_rtx and r11 as the static chain pointer for
15113 nested functions. */
15114 reg = gen_rtx_REG (SImode, 0);
15115 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15116 if (TARGET_MACHO)
15117 emit_insn (gen_get_vrsave_internal (reg));
15118 else
15119 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15120
15121 if (!WORLD_SAVE_P (info))
15122 {
15123 /* Save VRSAVE. */
15124 offset = info->vrsave_save_offset + sp_offset;
15125 mem = gen_frame_mem (SImode,
15126 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15127 GEN_INT (offset)));
15128 insn = emit_move_insn (mem, reg);
15129 }
15130
15131 /* Include the registers in the mask. */
15132 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15133
15134 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15135 }
15136
1db02437 15137 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15138 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15139 || (DEFAULT_ABI == ABI_V4
15140 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
1db02437 15141 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
c4ad648e
AM
15142 {
15143 /* If emit_load_toc_table will use the link register, we need to save
15144 it. We use R12 for this purpose because emit_load_toc_table
15145 can use register 0. This allows us to use a plain 'blr' to return
15146 from the procedure more often. */
15147 int save_LR_around_toc_setup = (TARGET_ELF
15148 && DEFAULT_ABI != ABI_AIX
15149 && flag_pic
15150 && ! info->lr_save_p
15151 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15152 if (save_LR_around_toc_setup)
15153 {
15154 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
f8a57be8 15155
c4ad648e
AM
15156 insn = emit_move_insn (frame_ptr_rtx, lr);
15157 rs6000_maybe_dead (insn);
15158 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15159
c4ad648e 15160 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15161
c4ad648e
AM
15162 insn = emit_move_insn (lr, frame_ptr_rtx);
15163 rs6000_maybe_dead (insn);
15164 RTX_FRAME_RELATED_P (insn) = 1;
15165 }
15166 else
15167 rs6000_emit_load_toc_table (TRUE);
15168 }
ee890fe2 15169
fcce224d 15170#if TARGET_MACHO
ee890fe2
SS
15171 if (DEFAULT_ABI == ABI_DARWIN
15172 && flag_pic && current_function_uses_pic_offset_table)
15173 {
f8a57be8 15174 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11abc112 15175 rtx src = machopic_function_base_sym ();
ee890fe2 15176
6d0a8091
DJ
15177 /* Save and restore LR locally around this call (in R0). */
15178 if (!info->lr_save_p)
15179 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode, 0), lr));
15180
316fbf19 15181 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (src)));
ee890fe2 15182
f676971a 15183 insn = emit_move_insn (gen_rtx_REG (Pmode,
f8a57be8
GK
15184 RS6000_PIC_OFFSET_TABLE_REGNUM),
15185 lr);
15186 rs6000_maybe_dead (insn);
6d0a8091
DJ
15187
15188 if (!info->lr_save_p)
15189 rs6000_maybe_dead (emit_move_insn (lr, gen_rtx_REG (Pmode, 0)));
ee890fe2 15190 }
fcce224d 15191#endif
9ebbca7d
GK
15192}
15193
9ebbca7d 15194/* Write function prologue. */
a4f6c312 15195
08c148a8 15196static void
f676971a 15197rs6000_output_function_prologue (FILE *file,
a2369ed3 15198 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15199{
15200 rs6000_stack_t *info = rs6000_stack_info ();
15201
4697a36c
MM
15202 if (TARGET_DEBUG_STACK)
15203 debug_stack_info (info);
9878760c 15204
a4f6c312
SS
15205 /* Write .extern for any function we will call to save and restore
15206 fp values. */
15207 if (info->first_fp_reg_save < 64
15208 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 15209 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 15210 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
15211 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
15212 RESTORE_FP_SUFFIX);
9878760c 15213
c764f757
RK
15214 /* Write .extern for AIX common mode routines, if needed. */
15215 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
15216 {
f6709c70
JW
15217 fputs ("\t.extern __mulh\n", file);
15218 fputs ("\t.extern __mull\n", file);
15219 fputs ("\t.extern __divss\n", file);
15220 fputs ("\t.extern __divus\n", file);
15221 fputs ("\t.extern __quoss\n", file);
15222 fputs ("\t.extern __quous\n", file);
c764f757
RK
15223 common_mode_defined = 1;
15224 }
9878760c 15225
9ebbca7d 15226 if (! HAVE_prologue)
979721f8 15227 {
9ebbca7d 15228 start_sequence ();
9dda4cc8 15229
a4f6c312
SS
15230 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
15231 the "toplevel" insn chain. */
2e040219 15232 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15233 rs6000_emit_prologue ();
2e040219 15234 emit_note (NOTE_INSN_DELETED);
178c3eff 15235
a3c9585f 15236 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15237 {
15238 rtx insn;
15239 unsigned addr = 0;
15240 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15241 {
15242 INSN_ADDRESSES_NEW (insn, addr);
15243 addr += 4;
15244 }
15245 }
9dda4cc8 15246
9ebbca7d 15247 if (TARGET_DEBUG_STACK)
a4f6c312 15248 debug_rtx_list (get_insns (), 100);
c9d691e9 15249 final (get_insns (), file, FALSE);
9ebbca7d 15250 end_sequence ();
979721f8
MM
15251 }
15252
9ebbca7d
GK
15253 rs6000_pic_labelno++;
15254}
f676971a 15255
9ebbca7d 15256/* Emit function epilogue as insns.
9878760c 15257
9ebbca7d
GK
15258 At present, dwarf2out_frame_debug_expr doesn't understand
15259 register restores, so we don't bother setting RTX_FRAME_RELATED_P
15260 anywhere in the epilogue. Most of the insns below would in any case
15261 need special notes to explain where r11 is in relation to the stack. */
9878760c 15262
9ebbca7d 15263void
a2369ed3 15264rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
15265{
15266 rs6000_stack_t *info;
15267 int restoring_FPRs_inline;
15268 int using_load_multiple;
d296e02e 15269 int using_mtcr_multiple;
9ebbca7d
GK
15270 int use_backchain_to_restore_sp;
15271 int sp_offset = 0;
15272 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
15273 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 15274 enum machine_mode reg_mode = Pmode;
327e5343 15275 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15276 int i;
15277
c19de7aa
AH
15278 info = rs6000_stack_info ();
15279
15280 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
15281 {
15282 reg_mode = V2SImode;
15283 reg_size = 8;
15284 }
15285
9ebbca7d 15286 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15287 && (!TARGET_SPE_ABI
15288 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15289 && info->first_gp_reg_save < 31
15290 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15291 restoring_FPRs_inline = (sibcall
83720594 15292 || current_function_calls_eh_return
9ebbca7d
GK
15293 || info->first_fp_reg_save == 64
15294 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 15295 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
15296 || current_function_calls_alloca
15297 || info->total_size > 32767);
d296e02e 15298 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
15299 || rs6000_cpu == PROCESSOR_PPC603
15300 || rs6000_cpu == PROCESSOR_PPC750
15301 || optimize_size);
15302
f57fe068 15303 if (WORLD_SAVE_P (info))
d62294f5
FJ
15304 {
15305 int i, j;
15306 char rname[30];
15307 const char *alloc_rname;
15308 rtvec p;
15309
15310 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
15311 stack slot (which is not likely to be our caller.)
15312 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
15313 rest_world is similar, except any R10 parameter is ignored.
15314 The exception-handling stuff that was here in 2.95 is no
15315 longer necessary. */
d62294f5
FJ
15316
15317 p = rtvec_alloc (9
15318 + 1
f676971a 15319 + 32 - info->first_gp_reg_save
c4ad648e
AM
15320 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
15321 + 63 + 1 - info->first_fp_reg_save);
d62294f5 15322
c4ad648e
AM
15323 strcpy (rname, ((current_function_calls_eh_return) ?
15324 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
15325 alloc_rname = ggc_strdup (rname);
15326
15327 j = 0;
15328 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
15329 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
15330 gen_rtx_REG (Pmode,
15331 LINK_REGISTER_REGNUM));
d62294f5 15332 RTVEC_ELT (p, j++)
c4ad648e 15333 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 15334 /* The instruction pattern requires a clobber here;
c4ad648e 15335 it is shared with the restVEC helper. */
d62294f5 15336 RTVEC_ELT (p, j++)
c4ad648e 15337 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
15338
15339 {
c4ad648e
AM
15340 /* CR register traditionally saved as CR2. */
15341 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15342 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15343 GEN_INT (info->cr_save_offset));
0be76840 15344 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15345
15346 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
15347 }
15348
15349 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15350 {
15351 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15352 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15353 GEN_INT (info->gp_save_offset
15354 + reg_size * i));
0be76840 15355 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15356
15357 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15358 }
d62294f5 15359 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15360 {
15361 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15362 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15363 GEN_INT (info->altivec_save_offset
15364 + 16 * i));
0be76840 15365 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15366
15367 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15368 }
d62294f5 15369 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
15370 {
15371 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15372 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15373 GEN_INT (info->fp_save_offset
15374 + 8 * i));
0be76840 15375 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15376
15377 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15378 }
d62294f5 15379 RTVEC_ELT (p, j++)
c4ad648e 15380 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 15381 RTVEC_ELT (p, j++)
c4ad648e 15382 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 15383 RTVEC_ELT (p, j++)
c4ad648e 15384 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 15385 RTVEC_ELT (p, j++)
c4ad648e 15386 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 15387 RTVEC_ELT (p, j++)
c4ad648e 15388 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
15389 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15390
15391 return;
15392 }
15393
2b2c2fe5
EC
15394 /* Set sp_offset based on the stack push from the prologue. */
15395 if ((DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return)
15396 && info->total_size < 32767)
15397 sp_offset = info->total_size;
f676971a 15398
9aa86737
AH
15399 /* Restore AltiVec registers if needed. */
15400 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15401 {
15402 int i;
15403
15404 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15405 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15406 {
15407 rtx addr, areg, mem;
15408
15409 areg = gen_rtx_REG (Pmode, 0);
15410 emit_move_insn
15411 (areg, GEN_INT (info->altivec_save_offset
15412 + sp_offset
15413 + 16 * (i - info->first_altivec_reg_save)));
15414
15415 /* AltiVec addressing mode is [reg+reg]. */
15416 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 15417 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
15418
15419 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15420 }
15421 }
15422
15423 /* Restore VRSAVE if needed. */
44688022 15424 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 15425 && info->vrsave_mask != 0)
9aa86737
AH
15426 {
15427 rtx addr, mem, reg;
15428
15429 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15430 GEN_INT (info->vrsave_save_offset + sp_offset));
0be76840 15431 mem = gen_frame_mem (SImode, addr);
9aa86737
AH
15432 reg = gen_rtx_REG (SImode, 12);
15433 emit_move_insn (reg, mem);
15434
15435 emit_insn (generate_set_vrsave (reg, info, 1));
15436 }
15437
2b2c2fe5
EC
15438 sp_offset = 0;
15439
15440 /* If we have a frame pointer, a call to alloca, or a large stack
15441 frame, restore the old stack pointer using the backchain. Otherwise,
15442 we know what size to update it with. */
15443 if (use_backchain_to_restore_sp)
15444 {
15445 /* Under V.4, don't reset the stack pointer until after we're done
15446 loading the saved registers. */
15447 if (DEFAULT_ABI == ABI_V4)
15448 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
15449
15450 emit_move_insn (frame_reg_rtx,
15451 gen_rtx_MEM (Pmode, sp_reg_rtx));
15452 }
15453 else if (info->push_p)
15454 {
15455 if (DEFAULT_ABI == ABI_V4
15456 || current_function_calls_eh_return)
15457 sp_offset = info->total_size;
15458 else
15459 {
15460 emit_insn (TARGET_32BIT
15461 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15462 GEN_INT (info->total_size))
15463 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15464 GEN_INT (info->total_size)));
15465 }
15466 }
15467
9ebbca7d
GK
15468 /* Get the old lr if we saved it. */
15469 if (info->lr_save_p)
b6c9286a 15470 {
a3170dc6
AH
15471 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
15472 info->lr_save_offset + sp_offset);
ba4828e0 15473
9ebbca7d 15474 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 15475 }
f676971a 15476
9ebbca7d
GK
15477 /* Get the old cr if we saved it. */
15478 if (info->cr_save_p)
15479 {
15480 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15481 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15482 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 15483
9ebbca7d
GK
15484 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
15485 }
f676971a 15486
9ebbca7d 15487 /* Set LR here to try to overlap restores below. */
4697a36c 15488 if (info->lr_save_p)
9ebbca7d
GK
15489 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
15490 gen_rtx_REG (Pmode, 0));
f676971a 15491
83720594
RH
15492 /* Load exception handler data registers, if needed. */
15493 if (current_function_calls_eh_return)
15494 {
78e1b90d
DE
15495 unsigned int i, regno;
15496
fc4767bb
JJ
15497 if (TARGET_AIX)
15498 {
15499 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15500 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15501 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15502
15503 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
15504 }
15505
83720594
RH
15506 for (i = 0; ; ++i)
15507 {
a3170dc6 15508 rtx mem;
83720594
RH
15509
15510 regno = EH_RETURN_DATA_REGNO (i);
15511 if (regno == INVALID_REGNUM)
15512 break;
15513
a3170dc6
AH
15514 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
15515 info->ehrd_offset + sp_offset
15516 + reg_size * (int) i);
83720594
RH
15517
15518 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
15519 }
15520 }
f676971a 15521
9ebbca7d
GK
15522 /* Restore GPRs. This is done as a PARALLEL if we are using
15523 the load-multiple instructions. */
15524 if (using_load_multiple)
979721f8 15525 {
9ebbca7d
GK
15526 rtvec p;
15527 p = rtvec_alloc (32 - info->first_gp_reg_save);
15528 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 15529 {
f676971a
EC
15530 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15531 GEN_INT (info->gp_save_offset
15532 + sp_offset
9ebbca7d 15533 + reg_size * i));
0be76840 15534 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 15535
f676971a 15536 RTVEC_ELT (p, i) =
9ebbca7d
GK
15537 gen_rtx_SET (VOIDmode,
15538 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15539 mem);
979721f8 15540 }
9ebbca7d 15541 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 15542 }
9ebbca7d
GK
15543 else
15544 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
6d0a8091
DJ
15545 if ((regs_ever_live[info->first_gp_reg_save + i]
15546 && (!call_used_regs[info->first_gp_reg_save + i]
15547 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
b4db40bf 15548 && TARGET_TOC && TARGET_MINIMAL_TOC)))
6d0a8091 15549 || (i + info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 15550 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
f607bc57 15551 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9ebbca7d 15552 {
f676971a
EC
15553 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15554 GEN_INT (info->gp_save_offset
15555 + sp_offset
9ebbca7d 15556 + reg_size * i));
0be76840 15557 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 15558
a3170dc6 15559 /* Restore 64-bit quantities for SPE. */
c19de7aa 15560 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
15561 {
15562 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
15563 rtx b;
15564
15565 if (!SPE_CONST_OFFSET_OK (offset))
15566 {
15567 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15568 emit_move_insn (b, GEN_INT (offset));
15569 }
15570 else
15571 b = GEN_INT (offset);
15572
15573 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
0be76840 15574 mem = gen_frame_mem (V2SImode, addr);
a3170dc6
AH
15575 }
15576
f676971a 15577 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 15578 info->first_gp_reg_save + i), mem);
9ebbca7d 15579 }
9878760c 15580
9ebbca7d
GK
15581 /* Restore fpr's if we need to do it without calling a function. */
15582 if (restoring_FPRs_inline)
15583 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
f676971a 15584 if ((regs_ever_live[info->first_fp_reg_save+i]
9ebbca7d
GK
15585 && ! call_used_regs[info->first_fp_reg_save+i]))
15586 {
15587 rtx addr, mem;
15588 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
15589 GEN_INT (info->fp_save_offset
15590 + sp_offset
a4f6c312 15591 + 8 * i));
0be76840 15592 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15593
f676971a 15594 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
15595 info->first_fp_reg_save + i),
15596 mem);
15597 }
8d30c4ee 15598
9ebbca7d
GK
15599 /* If we saved cr, restore it here. Just those that were used. */
15600 if (info->cr_save_p)
979721f8 15601 {
9ebbca7d 15602 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 15603 int count = 0;
f676971a 15604
d296e02e 15605 if (using_mtcr_multiple)
979721f8 15606 {
9ebbca7d
GK
15607 for (i = 0; i < 8; i++)
15608 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
e35b9579 15609 count++;
37409796 15610 gcc_assert (count);
e35b9579
GK
15611 }
15612
d296e02e 15613 if (using_mtcr_multiple && count > 1)
e35b9579
GK
15614 {
15615 rtvec p;
15616 int ndx;
f676971a 15617
e35b9579 15618 p = rtvec_alloc (count);
9ebbca7d 15619
e35b9579 15620 ndx = 0;
9ebbca7d
GK
15621 for (i = 0; i < 8; i++)
15622 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
15623 {
15624 rtvec r = rtvec_alloc (2);
15625 RTVEC_ELT (r, 0) = r12_rtx;
15626 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 15627 RTVEC_ELT (p, ndx) =
f676971a 15628 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 15629 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 15630 ndx++;
9ebbca7d
GK
15631 }
15632 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 15633 gcc_assert (ndx == count);
979721f8
MM
15634 }
15635 else
9ebbca7d
GK
15636 for (i = 0; i < 8; i++)
15637 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
979721f8 15638 {
f676971a 15639 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
15640 CR0_REGNO+i),
15641 r12_rtx));
979721f8 15642 }
979721f8
MM
15643 }
15644
9ebbca7d 15645 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
15646 have been done. */
15647 if (frame_reg_rtx != sp_reg_rtx)
15648 {
15649 /* This blockage is needed so that sched doesn't decide to move
15650 the sp change before the register restores. */
15651 rs6000_emit_stack_tie ();
15652 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15653 }
15654 else if (sp_offset != 0)
15655 emit_insn (TARGET_32BIT
15656 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15657 GEN_INT (sp_offset))
15658 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15659 GEN_INT (sp_offset)));
b6c9286a 15660
83720594
RH
15661 if (current_function_calls_eh_return)
15662 {
15663 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 15664 emit_insn (TARGET_32BIT
83720594
RH
15665 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15666 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15667 }
15668
9ebbca7d
GK
15669 if (!sibcall)
15670 {
15671 rtvec p;
15672 if (! restoring_FPRs_inline)
15673 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15674 else
15675 p = rtvec_alloc (2);
b6c9286a 15676
e35b9579 15677 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
15678 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15679 gen_rtx_REG (Pmode,
9ebbca7d 15680 LINK_REGISTER_REGNUM));
9ebbca7d
GK
15681
15682 /* If we have to restore more than two FP registers, branch to the
15683 restore function. It will return to our caller. */
15684 if (! restoring_FPRs_inline)
15685 {
15686 int i;
15687 char rname[30];
520a57c8 15688 const char *alloc_rname;
979721f8 15689
f676971a 15690 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 15691 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 15692 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15693 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15694 gen_rtx_SYMBOL_REF (Pmode,
15695 alloc_rname));
b6c9286a 15696
9ebbca7d
GK
15697 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15698 {
15699 rtx addr, mem;
15700 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15701 GEN_INT (info->fp_save_offset + 8*i));
0be76840 15702 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15703
f676971a 15704 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
15705 gen_rtx_SET (VOIDmode,
15706 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15707 mem);
b6c9286a
MM
15708 }
15709 }
f676971a 15710
9ebbca7d 15711 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 15712 }
9878760c
RK
15713}
15714
15715/* Write function epilogue. */
15716
08c148a8 15717static void
f676971a 15718rs6000_output_function_epilogue (FILE *file,
a2369ed3 15719 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 15720{
9ebbca7d 15721 if (! HAVE_epilogue)
9878760c 15722 {
9ebbca7d
GK
15723 rtx insn = get_last_insn ();
15724 /* If the last insn was a BARRIER, we don't have to write anything except
15725 the trace table. */
15726 if (GET_CODE (insn) == NOTE)
15727 insn = prev_nonnote_insn (insn);
15728 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 15729 {
9ebbca7d
GK
15730 /* This is slightly ugly, but at least we don't have two
15731 copies of the epilogue-emitting code. */
15732 start_sequence ();
15733
15734 /* A NOTE_INSN_DELETED is supposed to be at the start
15735 and end of the "toplevel" insn chain. */
2e040219 15736 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15737 rs6000_emit_epilogue (FALSE);
2e040219 15738 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15739
a3c9585f 15740 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15741 {
15742 rtx insn;
15743 unsigned addr = 0;
15744 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15745 {
15746 INSN_ADDRESSES_NEW (insn, addr);
15747 addr += 4;
15748 }
15749 }
15750
9ebbca7d 15751 if (TARGET_DEBUG_STACK)
a4f6c312 15752 debug_rtx_list (get_insns (), 100);
c9d691e9 15753 final (get_insns (), file, FALSE);
9ebbca7d 15754 end_sequence ();
4697a36c 15755 }
9878760c 15756 }
b4ac57ab 15757
efdba735
SH
15758#if TARGET_MACHO
15759 macho_branch_islands ();
0e5da0be
GK
15760 /* Mach-O doesn't support labels at the end of objects, so if
15761 it looks like we might want one, insert a NOP. */
15762 {
15763 rtx insn = get_last_insn ();
15764 while (insn
15765 && NOTE_P (insn)
a38e7aa5 15766 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 15767 insn = PREV_INSN (insn);
f676971a
EC
15768 if (insn
15769 && (LABEL_P (insn)
0e5da0be 15770 || (NOTE_P (insn)
a38e7aa5 15771 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
15772 fputs ("\tnop\n", file);
15773 }
15774#endif
15775
9b30bae2 15776 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
15777 on its format.
15778
15779 We don't output a traceback table if -finhibit-size-directive was
15780 used. The documentation for -finhibit-size-directive reads
15781 ``don't output a @code{.size} assembler directive, or anything
15782 else that would cause trouble if the function is split in the
15783 middle, and the two halves are placed at locations far apart in
15784 memory.'' The traceback table has this property, since it
15785 includes the offset from the start of the function to the
4d30c363
MM
15786 traceback table itself.
15787
15788 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 15789 different traceback table. */
57ac7be9 15790 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 15791 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 15792 {
69c75916 15793 const char *fname = NULL;
3ac88239 15794 const char *language_string = lang_hooks.name;
6041bf2f 15795 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 15796 int i;
57ac7be9 15797 int optional_tbtab;
8097c268 15798 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
15799
15800 if (rs6000_traceback == traceback_full)
15801 optional_tbtab = 1;
15802 else if (rs6000_traceback == traceback_part)
15803 optional_tbtab = 0;
15804 else
15805 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 15806
69c75916
AM
15807 if (optional_tbtab)
15808 {
15809 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
15810 while (*fname == '.') /* V.4 encodes . in the name */
15811 fname++;
15812
15813 /* Need label immediately before tbtab, so we can compute
15814 its offset from the function start. */
15815 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
15816 ASM_OUTPUT_LABEL (file, fname);
15817 }
314fc5a9
ILT
15818
15819 /* The .tbtab pseudo-op can only be used for the first eight
15820 expressions, since it can't handle the possibly variable
15821 length fields that follow. However, if you omit the optional
15822 fields, the assembler outputs zeros for all optional fields
15823 anyways, giving each variable length field is minimum length
15824 (as defined in sys/debug.h). Thus we can not use the .tbtab
15825 pseudo-op at all. */
15826
15827 /* An all-zero word flags the start of the tbtab, for debuggers
15828 that have to find it by searching forward from the entry
15829 point or from the current pc. */
19d2d16f 15830 fputs ("\t.long 0\n", file);
314fc5a9
ILT
15831
15832 /* Tbtab format type. Use format type 0. */
19d2d16f 15833 fputs ("\t.byte 0,", file);
314fc5a9 15834
5fc921c1
DE
15835 /* Language type. Unfortunately, there does not seem to be any
15836 official way to discover the language being compiled, so we
15837 use language_string.
15838 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
15839 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
15840 a number, so for now use 9. */
5fc921c1 15841 if (! strcmp (language_string, "GNU C"))
314fc5a9 15842 i = 0;
6de9cd9a
DN
15843 else if (! strcmp (language_string, "GNU F77")
15844 || ! strcmp (language_string, "GNU F95"))
314fc5a9 15845 i = 1;
8b83775b 15846 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 15847 i = 2;
5fc921c1
DE
15848 else if (! strcmp (language_string, "GNU Ada"))
15849 i = 3;
56438901
AM
15850 else if (! strcmp (language_string, "GNU C++")
15851 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 15852 i = 9;
9517ead8
AG
15853 else if (! strcmp (language_string, "GNU Java"))
15854 i = 13;
5fc921c1
DE
15855 else if (! strcmp (language_string, "GNU Objective-C"))
15856 i = 14;
314fc5a9 15857 else
37409796 15858 gcc_unreachable ();
314fc5a9
ILT
15859 fprintf (file, "%d,", i);
15860
15861 /* 8 single bit fields: global linkage (not set for C extern linkage,
15862 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15863 from start of procedure stored in tbtab, internal function, function
15864 has controlled storage, function has no toc, function uses fp,
15865 function logs/aborts fp operations. */
15866 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
15867 fprintf (file, "%d,",
15868 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
15869
15870 /* 6 bitfields: function is interrupt handler, name present in
15871 proc table, function calls alloca, on condition directives
15872 (controls stack walks, 3 bits), saves condition reg, saves
15873 link reg. */
15874 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15875 set up as a frame pointer, even when there is no alloca call. */
15876 fprintf (file, "%d,",
6041bf2f
DE
15877 ((optional_tbtab << 6)
15878 | ((optional_tbtab & frame_pointer_needed) << 5)
15879 | (info->cr_save_p << 1)
15880 | (info->lr_save_p)));
314fc5a9 15881
6041bf2f 15882 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
15883 (6 bits). */
15884 fprintf (file, "%d,",
4697a36c 15885 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
15886
15887 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15888 fprintf (file, "%d,", (32 - first_reg_to_save ()));
15889
6041bf2f
DE
15890 if (optional_tbtab)
15891 {
15892 /* Compute the parameter info from the function decl argument
15893 list. */
15894 tree decl;
15895 int next_parm_info_bit = 31;
314fc5a9 15896
6041bf2f
DE
15897 for (decl = DECL_ARGUMENTS (current_function_decl);
15898 decl; decl = TREE_CHAIN (decl))
15899 {
15900 rtx parameter = DECL_INCOMING_RTL (decl);
15901 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 15902
6041bf2f
DE
15903 if (GET_CODE (parameter) == REG)
15904 {
ebb109ad 15905 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
15906 {
15907 int bits;
15908
15909 float_parms++;
15910
37409796
NS
15911 switch (mode)
15912 {
15913 case SFmode:
15914 bits = 0x2;
15915 break;
15916
15917 case DFmode:
7393f7f8 15918 case DDmode:
37409796 15919 case TFmode:
7393f7f8 15920 case TDmode:
37409796
NS
15921 bits = 0x3;
15922 break;
15923
15924 default:
15925 gcc_unreachable ();
15926 }
6041bf2f
DE
15927
15928 /* If only one bit will fit, don't or in this entry. */
15929 if (next_parm_info_bit > 0)
15930 parm_info |= (bits << (next_parm_info_bit - 1));
15931 next_parm_info_bit -= 2;
15932 }
15933 else
15934 {
15935 fixed_parms += ((GET_MODE_SIZE (mode)
15936 + (UNITS_PER_WORD - 1))
15937 / UNITS_PER_WORD);
15938 next_parm_info_bit -= 1;
15939 }
15940 }
15941 }
15942 }
314fc5a9
ILT
15943
15944 /* Number of fixed point parameters. */
15945 /* This is actually the number of words of fixed point parameters; thus
15946 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15947 fprintf (file, "%d,", fixed_parms);
15948
15949 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15950 all on stack. */
15951 /* This is actually the number of fp registers that hold parameters;
15952 and thus the maximum value is 13. */
15953 /* Set parameters on stack bit if parameters are not in their original
15954 registers, regardless of whether they are on the stack? Xlc
15955 seems to set the bit when not optimizing. */
15956 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
15957
6041bf2f
DE
15958 if (! optional_tbtab)
15959 return;
15960
314fc5a9
ILT
15961 /* Optional fields follow. Some are variable length. */
15962
15963 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15964 11 double float. */
15965 /* There is an entry for each parameter in a register, in the order that
15966 they occur in the parameter list. Any intervening arguments on the
15967 stack are ignored. If the list overflows a long (max possible length
15968 34 bits) then completely leave off all elements that don't fit. */
15969 /* Only emit this long if there was at least one parameter. */
15970 if (fixed_parms || float_parms)
15971 fprintf (file, "\t.long %d\n", parm_info);
15972
15973 /* Offset from start of code to tb table. */
19d2d16f 15974 fputs ("\t.long ", file);
314fc5a9 15975 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
15976 if (TARGET_AIX)
15977 RS6000_OUTPUT_BASENAME (file, fname);
15978 else
15979 assemble_name (file, fname);
15980 putc ('-', file);
15981 rs6000_output_function_entry (file, fname);
19d2d16f 15982 putc ('\n', file);
314fc5a9
ILT
15983
15984 /* Interrupt handler mask. */
15985 /* Omit this long, since we never set the interrupt handler bit
15986 above. */
15987
15988 /* Number of CTL (controlled storage) anchors. */
15989 /* Omit this long, since the has_ctl bit is never set above. */
15990
15991 /* Displacement into stack of each CTL anchor. */
15992 /* Omit this list of longs, because there are no CTL anchors. */
15993
15994 /* Length of function name. */
69c75916
AM
15995 if (*fname == '*')
15996 ++fname;
296b8152 15997 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
15998
15999 /* Function name. */
16000 assemble_string (fname, strlen (fname));
16001
16002 /* Register for alloca automatic storage; this is always reg 31.
16003 Only emit this if the alloca bit was set above. */
16004 if (frame_pointer_needed)
19d2d16f 16005 fputs ("\t.byte 31\n", file);
b1765bde
DE
16006
16007 fputs ("\t.align 2\n", file);
9b30bae2 16008 }
9878760c 16009}
17167fd8 16010\f
a4f6c312
SS
16011/* A C compound statement that outputs the assembler code for a thunk
16012 function, used to implement C++ virtual function calls with
16013 multiple inheritance. The thunk acts as a wrapper around a virtual
16014 function, adjusting the implicit object parameter before handing
16015 control off to the real function.
16016
16017 First, emit code to add the integer DELTA to the location that
16018 contains the incoming first argument. Assume that this argument
16019 contains a pointer, and is the one used to pass the `this' pointer
16020 in C++. This is the incoming argument *before* the function
16021 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16022 values of all other incoming arguments.
17167fd8
MM
16023
16024 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16025 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16026 not touch the return address. Hence returning from FUNCTION will
16027 return to whoever called the current `thunk'.
17167fd8 16028
a4f6c312
SS
16029 The effect must be as if FUNCTION had been called directly with the
16030 adjusted first argument. This macro is responsible for emitting
16031 all of the code for a thunk function; output_function_prologue()
16032 and output_function_epilogue() are not invoked.
17167fd8 16033
a4f6c312
SS
16034 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16035 been extracted from it.) It might possibly be useful on some
16036 targets, but probably not.
17167fd8 16037
a4f6c312
SS
16038 If you do not define this macro, the target-independent code in the
16039 C++ frontend will generate a less efficient heavyweight thunk that
16040 calls FUNCTION instead of jumping to it. The generic approach does
16041 not support varargs. */
17167fd8 16042
3961e8fe 16043static void
f676971a
EC
16044rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16045 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16046 tree function)
17167fd8 16047{
5b71a4e7 16048 rtx this, insn, funexp;
17167fd8 16049
5b71a4e7 16050 reload_completed = 1;
fe3ad572 16051 epilogue_completed = 1;
5b71a4e7 16052 no_new_pseudos = 1;
56a7189a 16053
5b71a4e7 16054 /* Mark the end of the (empty) prologue. */
2e040219 16055 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16056
5b71a4e7
DE
16057 /* Find the "this" pointer. If the function returns a structure,
16058 the structure return pointer is in r3. */
61f71b34 16059 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16060 this = gen_rtx_REG (Pmode, 4);
56a7189a 16061 else
5b71a4e7 16062 this = gen_rtx_REG (Pmode, 3);
17167fd8 16063
5b71a4e7
DE
16064 /* Apply the constant offset, if required. */
16065 if (delta)
16066 {
16067 rtx delta_rtx = GEN_INT (delta);
16068 emit_insn (TARGET_32BIT
16069 ? gen_addsi3 (this, this, delta_rtx)
16070 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16071 }
16072
5b71a4e7
DE
16073 /* Apply the offset from the vtable, if required. */
16074 if (vcall_offset)
17167fd8 16075 {
5b71a4e7
DE
16076 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16077 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16078
5b71a4e7 16079 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16080 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16081 {
16082 emit_insn (TARGET_32BIT
16083 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16084 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16085 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16086 }
16087 else
16088 {
16089 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16090
16091 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16092 }
5b71a4e7
DE
16093 emit_insn (TARGET_32BIT
16094 ? gen_addsi3 (this, this, tmp)
16095 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16096 }
16097
5b71a4e7
DE
16098 /* Generate a tail call to the target function. */
16099 if (!TREE_USED (function))
16100 {
16101 assemble_external (function);
16102 TREE_USED (function) = 1;
16103 }
16104 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16105 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16106
16107#if TARGET_MACHO
ab82a49f 16108 if (MACHOPIC_INDIRECT)
5b71a4e7 16109 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16110#endif
5b71a4e7
DE
16111
16112 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16113 generate sibcall RTL explicitly. */
5b71a4e7
DE
16114 insn = emit_call_insn (
16115 gen_rtx_PARALLEL (VOIDmode,
16116 gen_rtvec (4,
16117 gen_rtx_CALL (VOIDmode,
16118 funexp, const0_rtx),
16119 gen_rtx_USE (VOIDmode, const0_rtx),
16120 gen_rtx_USE (VOIDmode,
16121 gen_rtx_REG (SImode,
16122 LINK_REGISTER_REGNUM)),
16123 gen_rtx_RETURN (VOIDmode))));
16124 SIBLING_CALL_P (insn) = 1;
16125 emit_barrier ();
16126
16127 /* Run just enough of rest_of_compilation to get the insns emitted.
16128 There's not really enough bulk here to make other passes such as
16129 instruction scheduling worth while. Note that use_thunk calls
16130 assemble_start_function and assemble_end_function. */
16131 insn = get_insns ();
55e092c4 16132 insn_locators_alloc ();
5b71a4e7
DE
16133 shorten_branches (insn);
16134 final_start_function (insn, file, 1);
c9d691e9 16135 final (insn, file, 1);
5b71a4e7
DE
16136 final_end_function ();
16137
16138 reload_completed = 0;
fe3ad572 16139 epilogue_completed = 0;
5b71a4e7 16140 no_new_pseudos = 0;
9ebbca7d 16141}
9ebbca7d
GK
16142\f
16143/* A quick summary of the various types of 'constant-pool tables'
16144 under PowerPC:
16145
f676971a 16146 Target Flags Name One table per
9ebbca7d
GK
16147 AIX (none) AIX TOC object file
16148 AIX -mfull-toc AIX TOC object file
16149 AIX -mminimal-toc AIX minimal TOC translation unit
16150 SVR4/EABI (none) SVR4 SDATA object file
16151 SVR4/EABI -fpic SVR4 pic object file
16152 SVR4/EABI -fPIC SVR4 PIC translation unit
16153 SVR4/EABI -mrelocatable EABI TOC function
16154 SVR4/EABI -maix AIX TOC object file
f676971a 16155 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16156 AIX minimal TOC translation unit
16157
16158 Name Reg. Set by entries contains:
16159 made by addrs? fp? sum?
16160
16161 AIX TOC 2 crt0 as Y option option
16162 AIX minimal TOC 30 prolog gcc Y Y option
16163 SVR4 SDATA 13 crt0 gcc N Y N
16164 SVR4 pic 30 prolog ld Y not yet N
16165 SVR4 PIC 30 prolog gcc Y option option
16166 EABI TOC 30 prolog gcc Y option option
16167
16168*/
16169
9ebbca7d
GK
16170/* Hash functions for the hash table. */
16171
16172static unsigned
a2369ed3 16173rs6000_hash_constant (rtx k)
9ebbca7d 16174{
46b33600
RH
16175 enum rtx_code code = GET_CODE (k);
16176 enum machine_mode mode = GET_MODE (k);
16177 unsigned result = (code << 3) ^ mode;
16178 const char *format;
16179 int flen, fidx;
f676971a 16180
46b33600
RH
16181 format = GET_RTX_FORMAT (code);
16182 flen = strlen (format);
16183 fidx = 0;
9ebbca7d 16184
46b33600
RH
16185 switch (code)
16186 {
16187 case LABEL_REF:
16188 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
16189
16190 case CONST_DOUBLE:
16191 if (mode != VOIDmode)
16192 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
16193 flen = 2;
16194 break;
16195
16196 case CODE_LABEL:
16197 fidx = 3;
16198 break;
16199
16200 default:
16201 break;
16202 }
9ebbca7d
GK
16203
16204 for (; fidx < flen; fidx++)
16205 switch (format[fidx])
16206 {
16207 case 's':
16208 {
16209 unsigned i, len;
16210 const char *str = XSTR (k, fidx);
16211 len = strlen (str);
16212 result = result * 613 + len;
16213 for (i = 0; i < len; i++)
16214 result = result * 613 + (unsigned) str[i];
17167fd8
MM
16215 break;
16216 }
9ebbca7d
GK
16217 case 'u':
16218 case 'e':
16219 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
16220 break;
16221 case 'i':
16222 case 'n':
16223 result = result * 613 + (unsigned) XINT (k, fidx);
16224 break;
16225 case 'w':
16226 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
16227 result = result * 613 + (unsigned) XWINT (k, fidx);
16228 else
16229 {
16230 size_t i;
9390387d 16231 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
16232 result = result * 613 + (unsigned) (XWINT (k, fidx)
16233 >> CHAR_BIT * i);
16234 }
16235 break;
09501938
DE
16236 case '0':
16237 break;
9ebbca7d 16238 default:
37409796 16239 gcc_unreachable ();
9ebbca7d 16240 }
46b33600 16241
9ebbca7d
GK
16242 return result;
16243}
16244
16245static unsigned
a2369ed3 16246toc_hash_function (const void *hash_entry)
9ebbca7d 16247{
f676971a 16248 const struct toc_hash_struct *thc =
a9098fd0
GK
16249 (const struct toc_hash_struct *) hash_entry;
16250 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
16251}
16252
16253/* Compare H1 and H2 for equivalence. */
16254
16255static int
a2369ed3 16256toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
16257{
16258 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
16259 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
16260
a9098fd0
GK
16261 if (((const struct toc_hash_struct *) h1)->key_mode
16262 != ((const struct toc_hash_struct *) h2)->key_mode)
16263 return 0;
16264
5692c7bc 16265 return rtx_equal_p (r1, r2);
9ebbca7d
GK
16266}
16267
28e510bd
MM
16268/* These are the names given by the C++ front-end to vtables, and
16269 vtable-like objects. Ideally, this logic should not be here;
16270 instead, there should be some programmatic way of inquiring as
16271 to whether or not an object is a vtable. */
16272
16273#define VTABLE_NAME_P(NAME) \
9390387d 16274 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
16275 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
16276 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 16277 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 16278 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
16279
16280void
a2369ed3 16281rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
16282{
16283 /* Currently C++ toc references to vtables can be emitted before it
16284 is decided whether the vtable is public or private. If this is
16285 the case, then the linker will eventually complain that there is
f676971a 16286 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
16287 we emit the TOC reference to reference the symbol and not the
16288 section. */
16289 const char *name = XSTR (x, 0);
54ee9799 16290
f676971a 16291 if (VTABLE_NAME_P (name))
54ee9799
DE
16292 {
16293 RS6000_OUTPUT_BASENAME (file, name);
16294 }
16295 else
16296 assemble_name (file, name);
28e510bd
MM
16297}
16298
a4f6c312
SS
16299/* Output a TOC entry. We derive the entry name from what is being
16300 written. */
9878760c
RK
16301
16302void
a2369ed3 16303output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
16304{
16305 char buf[256];
3cce094d 16306 const char *name = buf;
ec940faa 16307 const char *real_name;
9878760c 16308 rtx base = x;
16fdeb48 16309 HOST_WIDE_INT offset = 0;
9878760c 16310
37409796 16311 gcc_assert (!TARGET_NO_TOC);
4697a36c 16312
9ebbca7d
GK
16313 /* When the linker won't eliminate them, don't output duplicate
16314 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
16315 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
16316 CODE_LABELs. */
16317 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
16318 {
16319 struct toc_hash_struct *h;
16320 void * * found;
f676971a 16321
17211ab5 16322 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 16323 time because GGC is not initialized at that point. */
17211ab5 16324 if (toc_hash_table == NULL)
f676971a 16325 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
16326 toc_hash_eq, NULL);
16327
9ebbca7d
GK
16328 h = ggc_alloc (sizeof (*h));
16329 h->key = x;
a9098fd0 16330 h->key_mode = mode;
9ebbca7d 16331 h->labelno = labelno;
f676971a 16332
9ebbca7d
GK
16333 found = htab_find_slot (toc_hash_table, h, 1);
16334 if (*found == NULL)
16335 *found = h;
f676971a 16336 else /* This is indeed a duplicate.
9ebbca7d
GK
16337 Set this label equal to that label. */
16338 {
16339 fputs ("\t.set ", file);
16340 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
16341 fprintf (file, "%d,", labelno);
16342 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 16343 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
16344 found)->labelno));
16345 return;
16346 }
16347 }
16348
16349 /* If we're going to put a double constant in the TOC, make sure it's
16350 aligned properly when strict alignment is on. */
ff1720ed
RK
16351 if (GET_CODE (x) == CONST_DOUBLE
16352 && STRICT_ALIGNMENT
a9098fd0 16353 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
16354 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
16355 ASM_OUTPUT_ALIGN (file, 3);
16356 }
16357
4977bab6 16358 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 16359
37c37a57
RK
16360 /* Handle FP constants specially. Note that if we have a minimal
16361 TOC, things we put here aren't actually in the TOC, so we can allow
16362 FP constants. */
00b79d54
BE
16363 if (GET_CODE (x) == CONST_DOUBLE &&
16364 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
16365 {
16366 REAL_VALUE_TYPE rv;
16367 long k[4];
16368
16369 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16370 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16371 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
16372 else
16373 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
16374
16375 if (TARGET_64BIT)
16376 {
16377 if (TARGET_MINIMAL_TOC)
16378 fputs (DOUBLE_INT_ASM_OP, file);
16379 else
16380 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16381 k[0] & 0xffffffff, k[1] & 0xffffffff,
16382 k[2] & 0xffffffff, k[3] & 0xffffffff);
16383 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
16384 k[0] & 0xffffffff, k[1] & 0xffffffff,
16385 k[2] & 0xffffffff, k[3] & 0xffffffff);
16386 return;
16387 }
16388 else
16389 {
16390 if (TARGET_MINIMAL_TOC)
16391 fputs ("\t.long ", file);
16392 else
16393 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16394 k[0] & 0xffffffff, k[1] & 0xffffffff,
16395 k[2] & 0xffffffff, k[3] & 0xffffffff);
16396 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16397 k[0] & 0xffffffff, k[1] & 0xffffffff,
16398 k[2] & 0xffffffff, k[3] & 0xffffffff);
16399 return;
16400 }
16401 }
00b79d54
BE
16402 else if (GET_CODE (x) == CONST_DOUBLE &&
16403 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 16404 {
042259f2
DE
16405 REAL_VALUE_TYPE rv;
16406 long k[2];
0adc764e 16407
042259f2 16408 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16409
16410 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16411 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
16412 else
16413 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 16414
13ded975
DE
16415 if (TARGET_64BIT)
16416 {
16417 if (TARGET_MINIMAL_TOC)
2bfcf297 16418 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 16419 else
2f0552b6
AM
16420 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16421 k[0] & 0xffffffff, k[1] & 0xffffffff);
16422 fprintf (file, "0x%lx%08lx\n",
16423 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
16424 return;
16425 }
1875cc88 16426 else
13ded975
DE
16427 {
16428 if (TARGET_MINIMAL_TOC)
2bfcf297 16429 fputs ("\t.long ", file);
13ded975 16430 else
2f0552b6
AM
16431 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16432 k[0] & 0xffffffff, k[1] & 0xffffffff);
16433 fprintf (file, "0x%lx,0x%lx\n",
16434 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
16435 return;
16436 }
9878760c 16437 }
00b79d54
BE
16438 else if (GET_CODE (x) == CONST_DOUBLE &&
16439 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 16440 {
042259f2
DE
16441 REAL_VALUE_TYPE rv;
16442 long l;
9878760c 16443
042259f2 16444 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16445 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16446 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
16447 else
16448 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 16449
31bfaa0b
DE
16450 if (TARGET_64BIT)
16451 {
16452 if (TARGET_MINIMAL_TOC)
2bfcf297 16453 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 16454 else
2f0552b6
AM
16455 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16456 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
16457 return;
16458 }
042259f2 16459 else
31bfaa0b
DE
16460 {
16461 if (TARGET_MINIMAL_TOC)
2bfcf297 16462 fputs ("\t.long ", file);
31bfaa0b 16463 else
2f0552b6
AM
16464 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16465 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
16466 return;
16467 }
042259f2 16468 }
f176e826 16469 else if (GET_MODE (x) == VOIDmode
a9098fd0 16470 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 16471 {
e2c953b6 16472 unsigned HOST_WIDE_INT low;
042259f2
DE
16473 HOST_WIDE_INT high;
16474
16475 if (GET_CODE (x) == CONST_DOUBLE)
16476 {
16477 low = CONST_DOUBLE_LOW (x);
16478 high = CONST_DOUBLE_HIGH (x);
16479 }
16480 else
16481#if HOST_BITS_PER_WIDE_INT == 32
16482 {
16483 low = INTVAL (x);
0858c623 16484 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
16485 }
16486#else
16487 {
c4ad648e
AM
16488 low = INTVAL (x) & 0xffffffff;
16489 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
16490 }
16491#endif
9878760c 16492
a9098fd0
GK
16493 /* TOC entries are always Pmode-sized, but since this
16494 is a bigendian machine then if we're putting smaller
16495 integer constants in the TOC we have to pad them.
16496 (This is still a win over putting the constants in
16497 a separate constant pool, because then we'd have
02a4ec28
FS
16498 to have both a TOC entry _and_ the actual constant.)
16499
16500 For a 32-bit target, CONST_INT values are loaded and shifted
16501 entirely within `low' and can be stored in one TOC entry. */
16502
37409796
NS
16503 /* It would be easy to make this work, but it doesn't now. */
16504 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
16505
16506 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
16507 {
16508#if HOST_BITS_PER_WIDE_INT == 32
16509 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
16510 POINTER_SIZE, &low, &high, 0);
16511#else
16512 low |= high << 32;
16513 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
16514 high = (HOST_WIDE_INT) low >> 32;
16515 low &= 0xffffffff;
16516#endif
16517 }
a9098fd0 16518
13ded975
DE
16519 if (TARGET_64BIT)
16520 {
16521 if (TARGET_MINIMAL_TOC)
2bfcf297 16522 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 16523 else
2f0552b6
AM
16524 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16525 (long) high & 0xffffffff, (long) low & 0xffffffff);
16526 fprintf (file, "0x%lx%08lx\n",
16527 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
16528 return;
16529 }
1875cc88 16530 else
13ded975 16531 {
02a4ec28
FS
16532 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
16533 {
16534 if (TARGET_MINIMAL_TOC)
2bfcf297 16535 fputs ("\t.long ", file);
02a4ec28 16536 else
2bfcf297 16537 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
16538 (long) high & 0xffffffff, (long) low & 0xffffffff);
16539 fprintf (file, "0x%lx,0x%lx\n",
16540 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 16541 }
13ded975 16542 else
02a4ec28
FS
16543 {
16544 if (TARGET_MINIMAL_TOC)
2bfcf297 16545 fputs ("\t.long ", file);
02a4ec28 16546 else
2f0552b6
AM
16547 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
16548 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 16549 }
13ded975
DE
16550 return;
16551 }
9878760c
RK
16552 }
16553
16554 if (GET_CODE (x) == CONST)
16555 {
37409796 16556 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 16557
9878760c
RK
16558 base = XEXP (XEXP (x, 0), 0);
16559 offset = INTVAL (XEXP (XEXP (x, 0), 1));
16560 }
f676971a 16561
37409796
NS
16562 switch (GET_CODE (base))
16563 {
16564 case SYMBOL_REF:
16565 name = XSTR (base, 0);
16566 break;
16567
16568 case LABEL_REF:
16569 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
16570 CODE_LABEL_NUMBER (XEXP (base, 0)));
16571 break;
16572
16573 case CODE_LABEL:
16574 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
16575 break;
16576
16577 default:
16578 gcc_unreachable ();
16579 }
9878760c 16580
772c5265 16581 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 16582 if (TARGET_MINIMAL_TOC)
2bfcf297 16583 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
16584 else
16585 {
b6c9286a 16586 fprintf (file, "\t.tc %s", real_name);
9878760c 16587
1875cc88 16588 if (offset < 0)
16fdeb48 16589 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 16590 else if (offset)
16fdeb48 16591 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 16592
19d2d16f 16593 fputs ("[TC],", file);
1875cc88 16594 }
581bc4de
MM
16595
16596 /* Currently C++ toc references to vtables can be emitted before it
16597 is decided whether the vtable is public or private. If this is
16598 the case, then the linker will eventually complain that there is
16599 a TOC reference to an unknown section. Thus, for vtables only,
16600 we emit the TOC reference to reference the symbol and not the
16601 section. */
28e510bd 16602 if (VTABLE_NAME_P (name))
581bc4de 16603 {
54ee9799 16604 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 16605 if (offset < 0)
16fdeb48 16606 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 16607 else if (offset > 0)
16fdeb48 16608 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
16609 }
16610 else
16611 output_addr_const (file, x);
19d2d16f 16612 putc ('\n', file);
9878760c
RK
16613}
16614\f
16615/* Output an assembler pseudo-op to write an ASCII string of N characters
16616 starting at P to FILE.
16617
16618 On the RS/6000, we have to do this using the .byte operation and
16619 write out special characters outside the quoted string.
16620 Also, the assembler is broken; very long strings are truncated,
a4f6c312 16621 so we must artificially break them up early. */
9878760c
RK
16622
16623void
a2369ed3 16624output_ascii (FILE *file, const char *p, int n)
9878760c
RK
16625{
16626 char c;
16627 int i, count_string;
d330fd93
KG
16628 const char *for_string = "\t.byte \"";
16629 const char *for_decimal = "\t.byte ";
16630 const char *to_close = NULL;
9878760c
RK
16631
16632 count_string = 0;
16633 for (i = 0; i < n; i++)
16634 {
16635 c = *p++;
16636 if (c >= ' ' && c < 0177)
16637 {
16638 if (for_string)
16639 fputs (for_string, file);
16640 putc (c, file);
16641
16642 /* Write two quotes to get one. */
16643 if (c == '"')
16644 {
16645 putc (c, file);
16646 ++count_string;
16647 }
16648
16649 for_string = NULL;
16650 for_decimal = "\"\n\t.byte ";
16651 to_close = "\"\n";
16652 ++count_string;
16653
16654 if (count_string >= 512)
16655 {
16656 fputs (to_close, file);
16657
16658 for_string = "\t.byte \"";
16659 for_decimal = "\t.byte ";
16660 to_close = NULL;
16661 count_string = 0;
16662 }
16663 }
16664 else
16665 {
16666 if (for_decimal)
16667 fputs (for_decimal, file);
16668 fprintf (file, "%d", c);
16669
16670 for_string = "\n\t.byte \"";
16671 for_decimal = ", ";
16672 to_close = "\n";
16673 count_string = 0;
16674 }
16675 }
16676
16677 /* Now close the string if we have written one. Then end the line. */
16678 if (to_close)
9ebbca7d 16679 fputs (to_close, file);
9878760c
RK
16680}
16681\f
16682/* Generate a unique section name for FILENAME for a section type
16683 represented by SECTION_DESC. Output goes into BUF.
16684
16685 SECTION_DESC can be any string, as long as it is different for each
16686 possible section type.
16687
16688 We name the section in the same manner as xlc. The name begins with an
16689 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
16690 names) with the last period replaced by the string SECTION_DESC. If
16691 FILENAME does not contain a period, SECTION_DESC is appended to the end of
16692 the name. */
9878760c
RK
16693
16694void
f676971a 16695rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 16696 const char *section_desc)
9878760c 16697{
9ebbca7d 16698 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
16699 char *p;
16700 int len;
9878760c
RK
16701
16702 after_last_slash = filename;
16703 for (q = filename; *q; q++)
11e5fe42
RK
16704 {
16705 if (*q == '/')
16706 after_last_slash = q + 1;
16707 else if (*q == '.')
16708 last_period = q;
16709 }
9878760c 16710
11e5fe42 16711 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 16712 *buf = (char *) xmalloc (len);
9878760c
RK
16713
16714 p = *buf;
16715 *p++ = '_';
16716
16717 for (q = after_last_slash; *q; q++)
16718 {
11e5fe42 16719 if (q == last_period)
c4ad648e 16720 {
9878760c
RK
16721 strcpy (p, section_desc);
16722 p += strlen (section_desc);
e3981aab 16723 break;
c4ad648e 16724 }
9878760c 16725
e9a780ec 16726 else if (ISALNUM (*q))
c4ad648e 16727 *p++ = *q;
9878760c
RK
16728 }
16729
11e5fe42 16730 if (last_period == 0)
9878760c
RK
16731 strcpy (p, section_desc);
16732 else
16733 *p = '\0';
16734}
e165f3f0 16735\f
a4f6c312 16736/* Emit profile function. */
411707f4 16737
411707f4 16738void
a2369ed3 16739output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 16740{
858081ad
AH
16741 /* Non-standard profiling for kernels, which just saves LR then calls
16742 _mcount without worrying about arg saves. The idea is to change
16743 the function prologue as little as possible as it isn't easy to
16744 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
16745 if (TARGET_PROFILE_KERNEL)
16746 return;
16747
8480e480
CC
16748 if (DEFAULT_ABI == ABI_AIX)
16749 {
9739c90c
JJ
16750#ifndef NO_PROFILE_COUNTERS
16751# define NO_PROFILE_COUNTERS 0
16752#endif
f676971a 16753 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
16754 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16755 else
16756 {
16757 char buf[30];
16758 const char *label_name;
16759 rtx fun;
411707f4 16760
9739c90c
JJ
16761 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16762 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16763 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 16764
9739c90c
JJ
16765 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16766 fun, Pmode);
16767 }
8480e480 16768 }
ee890fe2
SS
16769 else if (DEFAULT_ABI == ABI_DARWIN)
16770 {
d5fa86ba 16771 const char *mcount_name = RS6000_MCOUNT;
ee890fe2
SS
16772 int caller_addr_regno = LINK_REGISTER_REGNUM;
16773
16774 /* Be conservative and always set this, at least for now. */
16775 current_function_uses_pic_offset_table = 1;
16776
16777#if TARGET_MACHO
16778 /* For PIC code, set up a stub and collect the caller's address
16779 from r0, which is where the prologue puts it. */
11abc112
MM
16780 if (MACHOPIC_INDIRECT
16781 && current_function_uses_pic_offset_table)
16782 caller_addr_regno = 0;
ee890fe2
SS
16783#endif
16784 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16785 0, VOIDmode, 1,
16786 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16787 }
411707f4
CC
16788}
16789
a4f6c312 16790/* Write function profiler code. */
e165f3f0
RK
16791
16792void
a2369ed3 16793output_function_profiler (FILE *file, int labelno)
e165f3f0 16794{
3daf36a4 16795 char buf[100];
e165f3f0 16796
38c1f2d7 16797 switch (DEFAULT_ABI)
3daf36a4 16798 {
38c1f2d7 16799 default:
37409796 16800 gcc_unreachable ();
38c1f2d7
MM
16801
16802 case ABI_V4:
09eeeacb
AM
16803 if (!TARGET_32BIT)
16804 {
d4ee4d25 16805 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
16806 return;
16807 }
ffcfcb5f 16808 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 16809 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
16810 if (NO_PROFILE_COUNTERS)
16811 {
16812 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16813 reg_names[0], reg_names[1]);
16814 }
16815 else if (TARGET_SECURE_PLT && flag_pic)
16816 {
16817 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16818 reg_names[0], reg_names[1]);
16819 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
16820 asm_fprintf (file, "\t{cau|addis} %s,%s,",
16821 reg_names[12], reg_names[12]);
16822 assemble_name (file, buf);
16823 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
16824 assemble_name (file, buf);
16825 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
16826 }
16827 else if (flag_pic == 1)
38c1f2d7 16828 {
dfdfa60f 16829 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
16830 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16831 reg_names[0], reg_names[1]);
17167fd8 16832 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 16833 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 16834 assemble_name (file, buf);
17167fd8 16835 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 16836 }
9ebbca7d 16837 else if (flag_pic > 1)
38c1f2d7 16838 {
71625f3d
AM
16839 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16840 reg_names[0], reg_names[1]);
9ebbca7d 16841 /* Now, we need to get the address of the label. */
71625f3d 16842 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 16843 assemble_name (file, buf);
9ebbca7d
GK
16844 fputs ("-.\n1:", file);
16845 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 16846 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
16847 reg_names[0], reg_names[11]);
16848 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
16849 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 16850 }
38c1f2d7
MM
16851 else
16852 {
17167fd8 16853 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 16854 assemble_name (file, buf);
dfdfa60f 16855 fputs ("@ha\n", file);
71625f3d
AM
16856 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
16857 reg_names[0], reg_names[1]);
a260abc9 16858 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 16859 assemble_name (file, buf);
17167fd8 16860 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
16861 }
16862
50d440bc 16863 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
16864 fprintf (file, "\tbl %s%s\n",
16865 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
16866 break;
16867
16868 case ABI_AIX:
ee890fe2 16869 case ABI_DARWIN:
ffcfcb5f
AM
16870 if (!TARGET_PROFILE_KERNEL)
16871 {
a3c9585f 16872 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
16873 }
16874 else
16875 {
37409796 16876 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
16877
16878 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
16879 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
16880
6de9cd9a 16881 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
16882 {
16883 asm_fprintf (file, "\tstd %s,24(%s)\n",
16884 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16885 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16886 asm_fprintf (file, "\tld %s,24(%s)\n",
16887 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
16888 }
16889 else
16890 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
16891 }
38c1f2d7
MM
16892 break;
16893 }
e165f3f0 16894}
a251ffd0 16895
b54cf83a 16896\f
44cd321e
PS
16897
16898/* The following variable value is the last issued insn. */
16899
16900static rtx last_scheduled_insn;
16901
16902/* The following variable helps to balance issuing of load and
16903 store instructions */
16904
16905static int load_store_pendulum;
16906
b54cf83a
DE
16907/* Power4 load update and store update instructions are cracked into a
16908 load or store and an integer insn which are executed in the same cycle.
16909 Branches have their own dispatch slot which does not count against the
16910 GCC issue rate, but it changes the program flow so there are no other
16911 instructions to issue in this cycle. */
16912
16913static int
f676971a
EC
16914rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
16915 int verbose ATTRIBUTE_UNUSED,
a2369ed3 16916 rtx insn, int more)
b54cf83a 16917{
44cd321e 16918 last_scheduled_insn = insn;
b54cf83a
DE
16919 if (GET_CODE (PATTERN (insn)) == USE
16920 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
16921 {
16922 cached_can_issue_more = more;
16923 return cached_can_issue_more;
16924 }
16925
16926 if (insn_terminates_group_p (insn, current_group))
16927 {
16928 cached_can_issue_more = 0;
16929 return cached_can_issue_more;
16930 }
b54cf83a 16931
d296e02e
AP
16932 /* If no reservation, but reach here */
16933 if (recog_memoized (insn) < 0)
16934 return more;
16935
ec507f2d 16936 if (rs6000_sched_groups)
b54cf83a 16937 {
cbe26ab8 16938 if (is_microcoded_insn (insn))
44cd321e 16939 cached_can_issue_more = 0;
cbe26ab8 16940 else if (is_cracked_insn (insn))
44cd321e
PS
16941 cached_can_issue_more = more > 2 ? more - 2 : 0;
16942 else
16943 cached_can_issue_more = more - 1;
16944
16945 return cached_can_issue_more;
b54cf83a 16946 }
165b263e 16947
d296e02e
AP
16948 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
16949 return 0;
16950
44cd321e
PS
16951 cached_can_issue_more = more - 1;
16952 return cached_can_issue_more;
b54cf83a
DE
16953}
16954
a251ffd0
TG
16955/* Adjust the cost of a scheduling dependency. Return the new cost of
16956 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16957
c237e94a 16958static int
0a4f0294 16959rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 16960{
44cd321e 16961 enum attr_type attr_type;
a251ffd0 16962
44cd321e 16963 if (! recog_memoized (insn))
a251ffd0
TG
16964 return 0;
16965
44cd321e 16966 switch (REG_NOTE_KIND (link))
a251ffd0 16967 {
44cd321e
PS
16968 case REG_DEP_TRUE:
16969 {
16970 /* Data dependency; DEP_INSN writes a register that INSN reads
16971 some cycles later. */
16972
16973 /* Separate a load from a narrower, dependent store. */
16974 if (rs6000_sched_groups
16975 && GET_CODE (PATTERN (insn)) == SET
16976 && GET_CODE (PATTERN (dep_insn)) == SET
16977 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
16978 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
16979 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
16980 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
16981 return cost + 14;
16982
16983 attr_type = get_attr_type (insn);
16984
16985 switch (attr_type)
16986 {
16987 case TYPE_JMPREG:
16988 /* Tell the first scheduling pass about the latency between
16989 a mtctr and bctr (and mtlr and br/blr). The first
16990 scheduling pass will not know about this latency since
16991 the mtctr instruction, which has the latency associated
16992 to it, will be generated by reload. */
16993 return TARGET_POWER ? 5 : 4;
16994 case TYPE_BRANCH:
16995 /* Leave some extra cycles between a compare and its
16996 dependent branch, to inhibit expensive mispredicts. */
16997 if ((rs6000_cpu_attr == CPU_PPC603
16998 || rs6000_cpu_attr == CPU_PPC604
16999 || rs6000_cpu_attr == CPU_PPC604E
17000 || rs6000_cpu_attr == CPU_PPC620
17001 || rs6000_cpu_attr == CPU_PPC630
17002 || rs6000_cpu_attr == CPU_PPC750
17003 || rs6000_cpu_attr == CPU_PPC7400
17004 || rs6000_cpu_attr == CPU_PPC7450
17005 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17006 || rs6000_cpu_attr == CPU_POWER5
17007 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17008 && recog_memoized (dep_insn)
17009 && (INSN_CODE (dep_insn) >= 0))
982afe02 17010
44cd321e
PS
17011 switch (get_attr_type (dep_insn))
17012 {
17013 case TYPE_CMP:
17014 case TYPE_COMPARE:
17015 case TYPE_DELAYED_COMPARE:
17016 case TYPE_IMUL_COMPARE:
17017 case TYPE_LMUL_COMPARE:
17018 case TYPE_FPCOMPARE:
17019 case TYPE_CR_LOGICAL:
17020 case TYPE_DELAYED_CR:
17021 return cost + 2;
17022 default:
17023 break;
17024 }
17025 break;
17026
17027 case TYPE_STORE:
17028 case TYPE_STORE_U:
17029 case TYPE_STORE_UX:
17030 case TYPE_FPSTORE:
17031 case TYPE_FPSTORE_U:
17032 case TYPE_FPSTORE_UX:
17033 if ((rs6000_cpu == PROCESSOR_POWER6)
17034 && recog_memoized (dep_insn)
17035 && (INSN_CODE (dep_insn) >= 0))
17036 {
17037
17038 if (GET_CODE (PATTERN (insn)) != SET)
17039 /* If this happens, we have to extend this to schedule
17040 optimally. Return default for now. */
17041 return cost;
17042
17043 /* Adjust the cost for the case where the value written
17044 by a fixed point operation is used as the address
17045 gen value on a store. */
17046 switch (get_attr_type (dep_insn))
17047 {
17048 case TYPE_LOAD:
17049 case TYPE_LOAD_U:
17050 case TYPE_LOAD_UX:
17051 case TYPE_CNTLZ:
17052 {
17053 if (! store_data_bypass_p (dep_insn, insn))
17054 return 4;
17055 break;
17056 }
17057 case TYPE_LOAD_EXT:
17058 case TYPE_LOAD_EXT_U:
17059 case TYPE_LOAD_EXT_UX:
17060 case TYPE_VAR_SHIFT_ROTATE:
17061 case TYPE_VAR_DELAYED_COMPARE:
17062 {
17063 if (! store_data_bypass_p (dep_insn, insn))
17064 return 6;
17065 break;
17066 }
17067 case TYPE_INTEGER:
17068 case TYPE_COMPARE:
17069 case TYPE_FAST_COMPARE:
17070 case TYPE_EXTS:
17071 case TYPE_SHIFT:
17072 case TYPE_INSERT_WORD:
17073 case TYPE_INSERT_DWORD:
17074 case TYPE_FPLOAD_U:
17075 case TYPE_FPLOAD_UX:
17076 case TYPE_STORE_U:
17077 case TYPE_STORE_UX:
17078 case TYPE_FPSTORE_U:
17079 case TYPE_FPSTORE_UX:
17080 {
17081 if (! store_data_bypass_p (dep_insn, insn))
17082 return 3;
17083 break;
17084 }
17085 case TYPE_IMUL:
17086 case TYPE_IMUL2:
17087 case TYPE_IMUL3:
17088 case TYPE_LMUL:
17089 case TYPE_IMUL_COMPARE:
17090 case TYPE_LMUL_COMPARE:
17091 {
17092 if (! store_data_bypass_p (dep_insn, insn))
17093 return 17;
17094 break;
17095 }
17096 case TYPE_IDIV:
17097 {
17098 if (! store_data_bypass_p (dep_insn, insn))
17099 return 45;
17100 break;
17101 }
17102 case TYPE_LDIV:
17103 {
17104 if (! store_data_bypass_p (dep_insn, insn))
17105 return 57;
17106 break;
17107 }
17108 default:
17109 break;
17110 }
17111 }
17112 break;
17113
17114 case TYPE_LOAD:
17115 case TYPE_LOAD_U:
17116 case TYPE_LOAD_UX:
17117 case TYPE_LOAD_EXT:
17118 case TYPE_LOAD_EXT_U:
17119 case TYPE_LOAD_EXT_UX:
17120 if ((rs6000_cpu == PROCESSOR_POWER6)
17121 && recog_memoized (dep_insn)
17122 && (INSN_CODE (dep_insn) >= 0))
17123 {
17124
17125 /* Adjust the cost for the case where the value written
17126 by a fixed point instruction is used within the address
17127 gen portion of a subsequent load(u)(x) */
17128 switch (get_attr_type (dep_insn))
17129 {
17130 case TYPE_LOAD:
17131 case TYPE_LOAD_U:
17132 case TYPE_LOAD_UX:
17133 case TYPE_CNTLZ:
17134 {
17135 if (set_to_load_agen (dep_insn, insn))
17136 return 4;
17137 break;
17138 }
17139 case TYPE_LOAD_EXT:
17140 case TYPE_LOAD_EXT_U:
17141 case TYPE_LOAD_EXT_UX:
17142 case TYPE_VAR_SHIFT_ROTATE:
17143 case TYPE_VAR_DELAYED_COMPARE:
17144 {
17145 if (set_to_load_agen (dep_insn, insn))
17146 return 6;
17147 break;
17148 }
17149 case TYPE_INTEGER:
17150 case TYPE_COMPARE:
17151 case TYPE_FAST_COMPARE:
17152 case TYPE_EXTS:
17153 case TYPE_SHIFT:
17154 case TYPE_INSERT_WORD:
17155 case TYPE_INSERT_DWORD:
17156 case TYPE_FPLOAD_U:
17157 case TYPE_FPLOAD_UX:
17158 case TYPE_STORE_U:
17159 case TYPE_STORE_UX:
17160 case TYPE_FPSTORE_U:
17161 case TYPE_FPSTORE_UX:
17162 {
17163 if (set_to_load_agen (dep_insn, insn))
17164 return 3;
17165 break;
17166 }
17167 case TYPE_IMUL:
17168 case TYPE_IMUL2:
17169 case TYPE_IMUL3:
17170 case TYPE_LMUL:
17171 case TYPE_IMUL_COMPARE:
17172 case TYPE_LMUL_COMPARE:
17173 {
17174 if (set_to_load_agen (dep_insn, insn))
17175 return 17;
17176 break;
17177 }
17178 case TYPE_IDIV:
17179 {
17180 if (set_to_load_agen (dep_insn, insn))
17181 return 45;
17182 break;
17183 }
17184 case TYPE_LDIV:
17185 {
17186 if (set_to_load_agen (dep_insn, insn))
17187 return 57;
17188 break;
17189 }
17190 default:
17191 break;
17192 }
17193 }
17194 break;
17195
17196 case TYPE_FPLOAD:
17197 if ((rs6000_cpu == PROCESSOR_POWER6)
17198 && recog_memoized (dep_insn)
17199 && (INSN_CODE (dep_insn) >= 0)
17200 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
17201 return 2;
17202
17203 default:
17204 break;
17205 }
c9dbf840 17206
a251ffd0 17207 /* Fall out to return default cost. */
44cd321e
PS
17208 }
17209 break;
17210
17211 case REG_DEP_OUTPUT:
17212 /* Output dependency; DEP_INSN writes a register that INSN writes some
17213 cycles later. */
17214 if ((rs6000_cpu == PROCESSOR_POWER6)
17215 && recog_memoized (dep_insn)
17216 && (INSN_CODE (dep_insn) >= 0))
17217 {
17218 attr_type = get_attr_type (insn);
17219
17220 switch (attr_type)
17221 {
17222 case TYPE_FP:
17223 if (get_attr_type (dep_insn) == TYPE_FP)
17224 return 1;
17225 break;
17226 case TYPE_FPLOAD:
17227 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
17228 return 2;
17229 break;
17230 default:
17231 break;
17232 }
17233 }
17234 case REG_DEP_ANTI:
17235 /* Anti dependency; DEP_INSN reads a register that INSN writes some
17236 cycles later. */
17237 return 0;
17238
17239 default:
17240 gcc_unreachable ();
a251ffd0
TG
17241 }
17242
17243 return cost;
17244}
b6c9286a 17245
cbe26ab8 17246/* The function returns a true if INSN is microcoded.
839a4992 17247 Return false otherwise. */
cbe26ab8
DN
17248
17249static bool
17250is_microcoded_insn (rtx insn)
17251{
17252 if (!insn || !INSN_P (insn)
17253 || GET_CODE (PATTERN (insn)) == USE
17254 || GET_CODE (PATTERN (insn)) == CLOBBER)
17255 return false;
17256
d296e02e
AP
17257 if (rs6000_cpu_attr == CPU_CELL)
17258 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
17259
ec507f2d 17260 if (rs6000_sched_groups)
cbe26ab8
DN
17261 {
17262 enum attr_type type = get_attr_type (insn);
17263 if (type == TYPE_LOAD_EXT_U
17264 || type == TYPE_LOAD_EXT_UX
17265 || type == TYPE_LOAD_UX
17266 || type == TYPE_STORE_UX
17267 || type == TYPE_MFCR)
c4ad648e 17268 return true;
cbe26ab8
DN
17269 }
17270
17271 return false;
17272}
17273
cbe26ab8
DN
17274/* The function returns true if INSN is cracked into 2 instructions
17275 by the processor (and therefore occupies 2 issue slots). */
17276
17277static bool
17278is_cracked_insn (rtx insn)
17279{
17280 if (!insn || !INSN_P (insn)
17281 || GET_CODE (PATTERN (insn)) == USE
17282 || GET_CODE (PATTERN (insn)) == CLOBBER)
17283 return false;
17284
ec507f2d 17285 if (rs6000_sched_groups)
cbe26ab8
DN
17286 {
17287 enum attr_type type = get_attr_type (insn);
17288 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
17289 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
17290 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
17291 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
17292 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
17293 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
17294 || type == TYPE_IDIV || type == TYPE_LDIV
17295 || type == TYPE_INSERT_WORD)
17296 return true;
cbe26ab8
DN
17297 }
17298
17299 return false;
17300}
17301
17302/* The function returns true if INSN can be issued only from
a3c9585f 17303 the branch slot. */
cbe26ab8
DN
17304
17305static bool
17306is_branch_slot_insn (rtx insn)
17307{
17308 if (!insn || !INSN_P (insn)
17309 || GET_CODE (PATTERN (insn)) == USE
17310 || GET_CODE (PATTERN (insn)) == CLOBBER)
17311 return false;
17312
ec507f2d 17313 if (rs6000_sched_groups)
cbe26ab8
DN
17314 {
17315 enum attr_type type = get_attr_type (insn);
17316 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 17317 return true;
cbe26ab8
DN
17318 return false;
17319 }
17320
17321 return false;
17322}
79ae11c4 17323
44cd321e
PS
17324/* The function returns true if out_inst sets a value that is
17325 used in the address generation computation of in_insn */
17326static bool
17327set_to_load_agen (rtx out_insn, rtx in_insn)
17328{
17329 rtx out_set, in_set;
17330
17331 /* For performance reasons, only handle the simple case where
17332 both loads are a single_set. */
17333 out_set = single_set (out_insn);
17334 if (out_set)
17335 {
17336 in_set = single_set (in_insn);
17337 if (in_set)
17338 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
17339 }
17340
17341 return false;
17342}
17343
17344/* The function returns true if the target storage location of
17345 out_insn is adjacent to the target storage location of in_insn */
17346/* Return 1 if memory locations are adjacent. */
17347
17348static bool
17349adjacent_mem_locations (rtx insn1, rtx insn2)
17350{
17351
e3a0e200
PB
17352 rtx a = get_store_dest (PATTERN (insn1));
17353 rtx b = get_store_dest (PATTERN (insn2));
17354
44cd321e
PS
17355 if ((GET_CODE (XEXP (a, 0)) == REG
17356 || (GET_CODE (XEXP (a, 0)) == PLUS
17357 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
17358 && (GET_CODE (XEXP (b, 0)) == REG
17359 || (GET_CODE (XEXP (b, 0)) == PLUS
17360 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
17361 {
17362 HOST_WIDE_INT val0 = 0, val1 = 0;
17363 rtx reg0, reg1;
17364 int val_diff;
17365
17366 if (GET_CODE (XEXP (a, 0)) == PLUS)
17367 {
17368 reg0 = XEXP (XEXP (a, 0), 0);
17369 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
17370 }
17371 else
17372 reg0 = XEXP (a, 0);
17373
17374 if (GET_CODE (XEXP (b, 0)) == PLUS)
17375 {
17376 reg1 = XEXP (XEXP (b, 0), 0);
17377 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
17378 }
17379 else
17380 reg1 = XEXP (b, 0);
17381
17382 val_diff = val1 - val0;
17383
17384 return ((REGNO (reg0) == REGNO (reg1))
17385 && (val_diff == INTVAL (MEM_SIZE (a))
17386 || val_diff == -INTVAL (MEM_SIZE (b))));
17387 }
17388
17389 return false;
17390}
17391
a4f6c312 17392/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
17393 priority INSN_PRIORITY (INSN). Increase the priority to execute the
17394 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
17395 define this macro if you do not need to adjust the scheduling
17396 priorities of insns. */
bef84347 17397
c237e94a 17398static int
a2369ed3 17399rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 17400{
a4f6c312
SS
17401 /* On machines (like the 750) which have asymmetric integer units,
17402 where one integer unit can do multiply and divides and the other
17403 can't, reduce the priority of multiply/divide so it is scheduled
17404 before other integer operations. */
bef84347
VM
17405
17406#if 0
2c3c49de 17407 if (! INSN_P (insn))
bef84347
VM
17408 return priority;
17409
17410 if (GET_CODE (PATTERN (insn)) == USE)
17411 return priority;
17412
17413 switch (rs6000_cpu_attr) {
17414 case CPU_PPC750:
17415 switch (get_attr_type (insn))
17416 {
17417 default:
17418 break;
17419
17420 case TYPE_IMUL:
17421 case TYPE_IDIV:
3cb999d8
DE
17422 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
17423 priority, priority);
bef84347
VM
17424 if (priority >= 0 && priority < 0x01000000)
17425 priority >>= 3;
17426 break;
17427 }
17428 }
17429#endif
17430
44cd321e 17431 if (insn_must_be_first_in_group (insn)
79ae11c4 17432 && reload_completed
f676971a 17433 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
17434 && rs6000_sched_restricted_insns_priority)
17435 {
17436
c4ad648e
AM
17437 /* Prioritize insns that can be dispatched only in the first
17438 dispatch slot. */
79ae11c4 17439 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
17440 /* Attach highest priority to insn. This means that in
17441 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 17442 precede 'priority' (critical path) considerations. */
f676971a 17443 return current_sched_info->sched_max_insns_priority;
79ae11c4 17444 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 17445 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
17446 haifa-sched.c:ready_sort(), only 'priority' (critical path)
17447 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
17448 return (priority + 1);
17449 }
79ae11c4 17450
44cd321e
PS
17451 if (rs6000_cpu == PROCESSOR_POWER6
17452 && ((load_store_pendulum == -2 && is_load_insn (insn))
17453 || (load_store_pendulum == 2 && is_store_insn (insn))))
17454 /* Attach highest priority to insn if the scheduler has just issued two
17455 stores and this instruction is a load, or two loads and this instruction
17456 is a store. Power6 wants loads and stores scheduled alternately
17457 when possible */
17458 return current_sched_info->sched_max_insns_priority;
17459
bef84347
VM
17460 return priority;
17461}
17462
d296e02e
AP
17463/* Return true if the instruction is nonpipelined on the Cell. */
17464static bool
17465is_nonpipeline_insn (rtx insn)
17466{
17467 enum attr_type type;
17468 if (!insn || !INSN_P (insn)
17469 || GET_CODE (PATTERN (insn)) == USE
17470 || GET_CODE (PATTERN (insn)) == CLOBBER)
17471 return false;
17472
17473 type = get_attr_type (insn);
17474 if (type == TYPE_IMUL
17475 || type == TYPE_IMUL2
17476 || type == TYPE_IMUL3
17477 || type == TYPE_LMUL
17478 || type == TYPE_IDIV
17479 || type == TYPE_LDIV
17480 || type == TYPE_SDIV
17481 || type == TYPE_DDIV
17482 || type == TYPE_SSQRT
17483 || type == TYPE_DSQRT
17484 || type == TYPE_MFCR
17485 || type == TYPE_MFCRF
17486 || type == TYPE_MFJMPR)
17487 {
17488 return true;
17489 }
17490 return false;
17491}
17492
17493
a4f6c312
SS
17494/* Return how many instructions the machine can issue per cycle. */
17495
c237e94a 17496static int
863d938c 17497rs6000_issue_rate (void)
b6c9286a 17498{
3317bab1
DE
17499 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
17500 if (!reload_completed)
17501 return 1;
17502
b6c9286a 17503 switch (rs6000_cpu_attr) {
3cb999d8
DE
17504 case CPU_RIOS1: /* ? */
17505 case CPU_RS64A:
17506 case CPU_PPC601: /* ? */
ed947a96 17507 case CPU_PPC7450:
3cb999d8 17508 return 3;
b54cf83a 17509 case CPU_PPC440:
b6c9286a 17510 case CPU_PPC603:
bef84347 17511 case CPU_PPC750:
ed947a96 17512 case CPU_PPC7400:
be12c2b0 17513 case CPU_PPC8540:
d296e02e 17514 case CPU_CELL:
f676971a 17515 return 2;
3cb999d8 17516 case CPU_RIOS2:
b6c9286a 17517 case CPU_PPC604:
19684119 17518 case CPU_PPC604E:
b6c9286a 17519 case CPU_PPC620:
3cb999d8 17520 case CPU_PPC630:
b6c9286a 17521 return 4;
cbe26ab8 17522 case CPU_POWER4:
ec507f2d 17523 case CPU_POWER5:
44cd321e 17524 case CPU_POWER6:
cbe26ab8 17525 return 5;
b6c9286a
MM
17526 default:
17527 return 1;
17528 }
17529}
17530
be12c2b0
VM
17531/* Return how many instructions to look ahead for better insn
17532 scheduling. */
17533
17534static int
863d938c 17535rs6000_use_sched_lookahead (void)
be12c2b0
VM
17536{
17537 if (rs6000_cpu_attr == CPU_PPC8540)
17538 return 4;
d296e02e
AP
17539 if (rs6000_cpu_attr == CPU_CELL)
17540 return (reload_completed ? 8 : 0);
be12c2b0
VM
17541 return 0;
17542}
17543
d296e02e
AP
17544/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
17545static int
17546rs6000_use_sched_lookahead_guard (rtx insn)
17547{
17548 if (rs6000_cpu_attr != CPU_CELL)
17549 return 1;
17550
17551 if (insn == NULL_RTX || !INSN_P (insn))
17552 abort ();
982afe02 17553
d296e02e
AP
17554 if (!reload_completed
17555 || is_nonpipeline_insn (insn)
17556 || is_microcoded_insn (insn))
17557 return 0;
17558
17559 return 1;
17560}
17561
569fa502
DN
17562/* Determine is PAT refers to memory. */
17563
17564static bool
17565is_mem_ref (rtx pat)
17566{
17567 const char * fmt;
17568 int i, j;
17569 bool ret = false;
17570
17571 if (GET_CODE (pat) == MEM)
17572 return true;
17573
17574 /* Recursively process the pattern. */
17575 fmt = GET_RTX_FORMAT (GET_CODE (pat));
17576
17577 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
17578 {
17579 if (fmt[i] == 'e')
17580 ret |= is_mem_ref (XEXP (pat, i));
17581 else if (fmt[i] == 'E')
17582 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
17583 ret |= is_mem_ref (XVECEXP (pat, i, j));
17584 }
17585
17586 return ret;
17587}
17588
17589/* Determine if PAT is a PATTERN of a load insn. */
f676971a 17590
569fa502
DN
17591static bool
17592is_load_insn1 (rtx pat)
17593{
17594 if (!pat || pat == NULL_RTX)
17595 return false;
17596
17597 if (GET_CODE (pat) == SET)
17598 return is_mem_ref (SET_SRC (pat));
17599
17600 if (GET_CODE (pat) == PARALLEL)
17601 {
17602 int i;
17603
17604 for (i = 0; i < XVECLEN (pat, 0); i++)
17605 if (is_load_insn1 (XVECEXP (pat, 0, i)))
17606 return true;
17607 }
17608
17609 return false;
17610}
17611
17612/* Determine if INSN loads from memory. */
17613
17614static bool
17615is_load_insn (rtx insn)
17616{
17617 if (!insn || !INSN_P (insn))
17618 return false;
17619
17620 if (GET_CODE (insn) == CALL_INSN)
17621 return false;
17622
17623 return is_load_insn1 (PATTERN (insn));
17624}
17625
17626/* Determine if PAT is a PATTERN of a store insn. */
17627
17628static bool
17629is_store_insn1 (rtx pat)
17630{
17631 if (!pat || pat == NULL_RTX)
17632 return false;
17633
17634 if (GET_CODE (pat) == SET)
17635 return is_mem_ref (SET_DEST (pat));
17636
17637 if (GET_CODE (pat) == PARALLEL)
17638 {
17639 int i;
17640
17641 for (i = 0; i < XVECLEN (pat, 0); i++)
17642 if (is_store_insn1 (XVECEXP (pat, 0, i)))
17643 return true;
17644 }
17645
17646 return false;
17647}
17648
17649/* Determine if INSN stores to memory. */
17650
17651static bool
17652is_store_insn (rtx insn)
17653{
17654 if (!insn || !INSN_P (insn))
17655 return false;
17656
17657 return is_store_insn1 (PATTERN (insn));
17658}
17659
e3a0e200
PB
17660/* Return the dest of a store insn. */
17661
17662static rtx
17663get_store_dest (rtx pat)
17664{
17665 gcc_assert (is_store_insn1 (pat));
17666
17667 if (GET_CODE (pat) == SET)
17668 return SET_DEST (pat);
17669 else if (GET_CODE (pat) == PARALLEL)
17670 {
17671 int i;
17672
17673 for (i = 0; i < XVECLEN (pat, 0); i++)
17674 {
17675 rtx inner_pat = XVECEXP (pat, 0, i);
17676 if (GET_CODE (inner_pat) == SET
17677 && is_mem_ref (SET_DEST (inner_pat)))
17678 return inner_pat;
17679 }
17680 }
17681 /* We shouldn't get here, because we should have either a simple
17682 store insn or a store with update which are covered above. */
17683 gcc_unreachable();
17684}
17685
569fa502
DN
17686/* Returns whether the dependence between INSN and NEXT is considered
17687 costly by the given target. */
17688
17689static bool
b198261f 17690rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 17691{
b198261f
MK
17692 rtx insn;
17693 rtx next;
17694
aabcd309 17695 /* If the flag is not enabled - no dependence is considered costly;
f676971a 17696 allow all dependent insns in the same group.
569fa502
DN
17697 This is the most aggressive option. */
17698 if (rs6000_sched_costly_dep == no_dep_costly)
17699 return false;
17700
f676971a 17701 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
17702 do not allow dependent instructions in the same group.
17703 This is the most conservative option. */
17704 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 17705 return true;
569fa502 17706
b198261f
MK
17707 insn = DEP_PRO (dep);
17708 next = DEP_CON (dep);
17709
f676971a
EC
17710 if (rs6000_sched_costly_dep == store_to_load_dep_costly
17711 && is_load_insn (next)
569fa502
DN
17712 && is_store_insn (insn))
17713 /* Prevent load after store in the same group. */
17714 return true;
17715
17716 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 17717 && is_load_insn (next)
569fa502 17718 && is_store_insn (insn)
b198261f 17719 && DEP_KIND (dep) == REG_DEP_TRUE)
c4ad648e
AM
17720 /* Prevent load after store in the same group if it is a true
17721 dependence. */
569fa502 17722 return true;
f676971a
EC
17723
17724 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
17725 and will not be scheduled in the same group. */
17726 if (rs6000_sched_costly_dep <= max_dep_latency
17727 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
17728 return true;
17729
17730 return false;
17731}
17732
f676971a 17733/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
17734 skipping any "non-active" insns - insns that will not actually occupy
17735 an issue slot. Return NULL_RTX if such an insn is not found. */
17736
17737static rtx
17738get_next_active_insn (rtx insn, rtx tail)
17739{
f489aff8 17740 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
17741 return NULL_RTX;
17742
f489aff8 17743 while (1)
cbe26ab8 17744 {
f489aff8
AM
17745 insn = NEXT_INSN (insn);
17746 if (insn == NULL_RTX || insn == tail)
17747 return NULL_RTX;
cbe26ab8 17748
f489aff8
AM
17749 if (CALL_P (insn)
17750 || JUMP_P (insn)
17751 || (NONJUMP_INSN_P (insn)
17752 && GET_CODE (PATTERN (insn)) != USE
17753 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 17754 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
17755 break;
17756 }
17757 return insn;
cbe26ab8
DN
17758}
17759
44cd321e
PS
17760/* We are about to begin issuing insns for this clock cycle. */
17761
17762static int
17763rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
17764 rtx *ready ATTRIBUTE_UNUSED,
17765 int *pn_ready ATTRIBUTE_UNUSED,
17766 int clock_var ATTRIBUTE_UNUSED)
17767{
d296e02e
AP
17768 int n_ready = *pn_ready;
17769
44cd321e
PS
17770 if (sched_verbose)
17771 fprintf (dump, "// rs6000_sched_reorder :\n");
17772
d296e02e
AP
17773 /* Reorder the ready list, if the second to last ready insn
17774 is a nonepipeline insn. */
17775 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
17776 {
17777 if (is_nonpipeline_insn (ready[n_ready - 1])
17778 && (recog_memoized (ready[n_ready - 2]) > 0))
17779 /* Simply swap first two insns. */
17780 {
17781 rtx tmp = ready[n_ready - 1];
17782 ready[n_ready - 1] = ready[n_ready - 2];
17783 ready[n_ready - 2] = tmp;
17784 }
17785 }
17786
44cd321e
PS
17787 if (rs6000_cpu == PROCESSOR_POWER6)
17788 load_store_pendulum = 0;
17789
17790 return rs6000_issue_rate ();
17791}
17792
17793/* Like rs6000_sched_reorder, but called after issuing each insn. */
17794
17795static int
17796rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
17797 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
17798{
17799 if (sched_verbose)
17800 fprintf (dump, "// rs6000_sched_reorder2 :\n");
17801
17802 /* For Power6, we need to handle some special cases to try and keep the
17803 store queue from overflowing and triggering expensive flushes.
17804
17805 This code monitors how load and store instructions are being issued
17806 and skews the ready list one way or the other to increase the likelihood
17807 that a desired instruction is issued at the proper time.
17808
17809 A couple of things are done. First, we maintain a "load_store_pendulum"
17810 to track the current state of load/store issue.
17811
17812 - If the pendulum is at zero, then no loads or stores have been
17813 issued in the current cycle so we do nothing.
17814
17815 - If the pendulum is 1, then a single load has been issued in this
17816 cycle and we attempt to locate another load in the ready list to
17817 issue with it.
17818
2f8e468b 17819 - If the pendulum is -2, then two stores have already been
44cd321e
PS
17820 issued in this cycle, so we increase the priority of the first load
17821 in the ready list to increase it's likelihood of being chosen first
17822 in the next cycle.
17823
17824 - If the pendulum is -1, then a single store has been issued in this
17825 cycle and we attempt to locate another store in the ready list to
17826 issue with it, preferring a store to an adjacent memory location to
17827 facilitate store pairing in the store queue.
17828
17829 - If the pendulum is 2, then two loads have already been
17830 issued in this cycle, so we increase the priority of the first store
17831 in the ready list to increase it's likelihood of being chosen first
17832 in the next cycle.
17833
17834 - If the pendulum < -2 or > 2, then do nothing.
17835
17836 Note: This code covers the most common scenarios. There exist non
17837 load/store instructions which make use of the LSU and which
17838 would need to be accounted for to strictly model the behavior
17839 of the machine. Those instructions are currently unaccounted
17840 for to help minimize compile time overhead of this code.
17841 */
17842 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
17843 {
17844 int pos;
17845 int i;
17846 rtx tmp;
17847
17848 if (is_store_insn (last_scheduled_insn))
17849 /* Issuing a store, swing the load_store_pendulum to the left */
17850 load_store_pendulum--;
17851 else if (is_load_insn (last_scheduled_insn))
17852 /* Issuing a load, swing the load_store_pendulum to the right */
17853 load_store_pendulum++;
17854 else
17855 return cached_can_issue_more;
17856
17857 /* If the pendulum is balanced, or there is only one instruction on
17858 the ready list, then all is well, so return. */
17859 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
17860 return cached_can_issue_more;
17861
17862 if (load_store_pendulum == 1)
17863 {
17864 /* A load has been issued in this cycle. Scan the ready list
17865 for another load to issue with it */
17866 pos = *pn_ready-1;
17867
17868 while (pos >= 0)
17869 {
17870 if (is_load_insn (ready[pos]))
17871 {
17872 /* Found a load. Move it to the head of the ready list,
17873 and adjust it's priority so that it is more likely to
17874 stay there */
17875 tmp = ready[pos];
17876 for (i=pos; i<*pn_ready-1; i++)
17877 ready[i] = ready[i + 1];
17878 ready[*pn_ready-1] = tmp;
17879 if INSN_PRIORITY_KNOWN (tmp)
17880 INSN_PRIORITY (tmp)++;
17881 break;
17882 }
17883 pos--;
17884 }
17885 }
17886 else if (load_store_pendulum == -2)
17887 {
17888 /* Two stores have been issued in this cycle. Increase the
17889 priority of the first load in the ready list to favor it for
17890 issuing in the next cycle. */
17891 pos = *pn_ready-1;
17892
17893 while (pos >= 0)
17894 {
17895 if (is_load_insn (ready[pos])
17896 && INSN_PRIORITY_KNOWN (ready[pos]))
17897 {
17898 INSN_PRIORITY (ready[pos])++;
17899
17900 /* Adjust the pendulum to account for the fact that a load
17901 was found and increased in priority. This is to prevent
17902 increasing the priority of multiple loads */
17903 load_store_pendulum--;
17904
17905 break;
17906 }
17907 pos--;
17908 }
17909 }
17910 else if (load_store_pendulum == -1)
17911 {
17912 /* A store has been issued in this cycle. Scan the ready list for
17913 another store to issue with it, preferring a store to an adjacent
17914 memory location */
17915 int first_store_pos = -1;
17916
17917 pos = *pn_ready-1;
17918
17919 while (pos >= 0)
17920 {
17921 if (is_store_insn (ready[pos]))
17922 {
17923 /* Maintain the index of the first store found on the
17924 list */
17925 if (first_store_pos == -1)
17926 first_store_pos = pos;
17927
17928 if (is_store_insn (last_scheduled_insn)
17929 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
17930 {
17931 /* Found an adjacent store. Move it to the head of the
17932 ready list, and adjust it's priority so that it is
17933 more likely to stay there */
17934 tmp = ready[pos];
17935 for (i=pos; i<*pn_ready-1; i++)
17936 ready[i] = ready[i + 1];
17937 ready[*pn_ready-1] = tmp;
17938 if INSN_PRIORITY_KNOWN (tmp)
17939 INSN_PRIORITY (tmp)++;
17940 first_store_pos = -1;
17941
17942 break;
17943 };
17944 }
17945 pos--;
17946 }
17947
17948 if (first_store_pos >= 0)
17949 {
17950 /* An adjacent store wasn't found, but a non-adjacent store was,
17951 so move the non-adjacent store to the front of the ready
17952 list, and adjust its priority so that it is more likely to
17953 stay there. */
17954 tmp = ready[first_store_pos];
17955 for (i=first_store_pos; i<*pn_ready-1; i++)
17956 ready[i] = ready[i + 1];
17957 ready[*pn_ready-1] = tmp;
17958 if INSN_PRIORITY_KNOWN (tmp)
17959 INSN_PRIORITY (tmp)++;
17960 }
17961 }
17962 else if (load_store_pendulum == 2)
17963 {
17964 /* Two loads have been issued in this cycle. Increase the priority
17965 of the first store in the ready list to favor it for issuing in
17966 the next cycle. */
17967 pos = *pn_ready-1;
17968
17969 while (pos >= 0)
17970 {
17971 if (is_store_insn (ready[pos])
17972 && INSN_PRIORITY_KNOWN (ready[pos]))
17973 {
17974 INSN_PRIORITY (ready[pos])++;
17975
17976 /* Adjust the pendulum to account for the fact that a store
17977 was found and increased in priority. This is to prevent
17978 increasing the priority of multiple stores */
17979 load_store_pendulum++;
17980
17981 break;
17982 }
17983 pos--;
17984 }
17985 }
17986 }
17987
17988 return cached_can_issue_more;
17989}
17990
839a4992 17991/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
17992 of group WHICH_GROUP.
17993
17994 If WHICH_GROUP == current_group, this function will return true if INSN
17995 causes the termination of the current group (i.e, the dispatch group to
17996 which INSN belongs). This means that INSN will be the last insn in the
17997 group it belongs to.
17998
17999 If WHICH_GROUP == previous_group, this function will return true if INSN
18000 causes the termination of the previous group (i.e, the dispatch group that
18001 precedes the group to which INSN belongs). This means that INSN will be
18002 the first insn in the group it belongs to). */
18003
18004static bool
18005insn_terminates_group_p (rtx insn, enum group_termination which_group)
18006{
44cd321e 18007 bool first, last;
cbe26ab8
DN
18008
18009 if (! insn)
18010 return false;
569fa502 18011
44cd321e
PS
18012 first = insn_must_be_first_in_group (insn);
18013 last = insn_must_be_last_in_group (insn);
cbe26ab8 18014
44cd321e 18015 if (first && last)
cbe26ab8
DN
18016 return true;
18017
18018 if (which_group == current_group)
44cd321e 18019 return last;
cbe26ab8 18020 else if (which_group == previous_group)
44cd321e
PS
18021 return first;
18022
18023 return false;
18024}
18025
18026
18027static bool
18028insn_must_be_first_in_group (rtx insn)
18029{
18030 enum attr_type type;
18031
18032 if (!insn
18033 || insn == NULL_RTX
18034 || GET_CODE (insn) == NOTE
18035 || GET_CODE (PATTERN (insn)) == USE
18036 || GET_CODE (PATTERN (insn)) == CLOBBER)
18037 return false;
18038
18039 switch (rs6000_cpu)
cbe26ab8 18040 {
44cd321e
PS
18041 case PROCESSOR_POWER5:
18042 if (is_cracked_insn (insn))
18043 return true;
18044 case PROCESSOR_POWER4:
18045 if (is_microcoded_insn (insn))
18046 return true;
18047
18048 if (!rs6000_sched_groups)
18049 return false;
18050
18051 type = get_attr_type (insn);
18052
18053 switch (type)
18054 {
18055 case TYPE_MFCR:
18056 case TYPE_MFCRF:
18057 case TYPE_MTCR:
18058 case TYPE_DELAYED_CR:
18059 case TYPE_CR_LOGICAL:
18060 case TYPE_MTJMPR:
18061 case TYPE_MFJMPR:
18062 case TYPE_IDIV:
18063 case TYPE_LDIV:
18064 case TYPE_LOAD_L:
18065 case TYPE_STORE_C:
18066 case TYPE_ISYNC:
18067 case TYPE_SYNC:
18068 return true;
18069 default:
18070 break;
18071 }
18072 break;
18073 case PROCESSOR_POWER6:
18074 type = get_attr_type (insn);
18075
18076 switch (type)
18077 {
18078 case TYPE_INSERT_DWORD:
18079 case TYPE_EXTS:
18080 case TYPE_CNTLZ:
18081 case TYPE_SHIFT:
18082 case TYPE_VAR_SHIFT_ROTATE:
18083 case TYPE_TRAP:
18084 case TYPE_IMUL:
18085 case TYPE_IMUL2:
18086 case TYPE_IMUL3:
18087 case TYPE_LMUL:
18088 case TYPE_IDIV:
18089 case TYPE_INSERT_WORD:
18090 case TYPE_DELAYED_COMPARE:
18091 case TYPE_IMUL_COMPARE:
18092 case TYPE_LMUL_COMPARE:
18093 case TYPE_FPCOMPARE:
18094 case TYPE_MFCR:
18095 case TYPE_MTCR:
18096 case TYPE_MFJMPR:
18097 case TYPE_MTJMPR:
18098 case TYPE_ISYNC:
18099 case TYPE_SYNC:
18100 case TYPE_LOAD_L:
18101 case TYPE_STORE_C:
18102 case TYPE_LOAD_U:
18103 case TYPE_LOAD_UX:
18104 case TYPE_LOAD_EXT_UX:
18105 case TYPE_STORE_U:
18106 case TYPE_STORE_UX:
18107 case TYPE_FPLOAD_U:
18108 case TYPE_FPLOAD_UX:
18109 case TYPE_FPSTORE_U:
18110 case TYPE_FPSTORE_UX:
18111 return true;
18112 default:
18113 break;
18114 }
18115 break;
18116 default:
18117 break;
18118 }
18119
18120 return false;
18121}
18122
18123static bool
18124insn_must_be_last_in_group (rtx insn)
18125{
18126 enum attr_type type;
18127
18128 if (!insn
18129 || insn == NULL_RTX
18130 || GET_CODE (insn) == NOTE
18131 || GET_CODE (PATTERN (insn)) == USE
18132 || GET_CODE (PATTERN (insn)) == CLOBBER)
18133 return false;
18134
18135 switch (rs6000_cpu) {
18136 case PROCESSOR_POWER4:
18137 case PROCESSOR_POWER5:
18138 if (is_microcoded_insn (insn))
18139 return true;
18140
18141 if (is_branch_slot_insn (insn))
18142 return true;
18143
18144 break;
18145 case PROCESSOR_POWER6:
18146 type = get_attr_type (insn);
18147
18148 switch (type)
18149 {
18150 case TYPE_EXTS:
18151 case TYPE_CNTLZ:
18152 case TYPE_SHIFT:
18153 case TYPE_VAR_SHIFT_ROTATE:
18154 case TYPE_TRAP:
18155 case TYPE_IMUL:
18156 case TYPE_IMUL2:
18157 case TYPE_IMUL3:
18158 case TYPE_LMUL:
18159 case TYPE_IDIV:
18160 case TYPE_DELAYED_COMPARE:
18161 case TYPE_IMUL_COMPARE:
18162 case TYPE_LMUL_COMPARE:
18163 case TYPE_FPCOMPARE:
18164 case TYPE_MFCR:
18165 case TYPE_MTCR:
18166 case TYPE_MFJMPR:
18167 case TYPE_MTJMPR:
18168 case TYPE_ISYNC:
18169 case TYPE_SYNC:
18170 case TYPE_LOAD_L:
18171 case TYPE_STORE_C:
18172 return true;
18173 default:
18174 break;
cbe26ab8 18175 }
44cd321e
PS
18176 break;
18177 default:
18178 break;
18179 }
cbe26ab8
DN
18180
18181 return false;
18182}
18183
839a4992 18184/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
18185 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18186
18187static bool
18188is_costly_group (rtx *group_insns, rtx next_insn)
18189{
18190 int i;
cbe26ab8
DN
18191 int issue_rate = rs6000_issue_rate ();
18192
18193 for (i = 0; i < issue_rate; i++)
18194 {
b198261f 18195 dep_link_t link;
cbe26ab8 18196 rtx insn = group_insns[i];
b198261f 18197
cbe26ab8 18198 if (!insn)
c4ad648e 18199 continue;
b198261f
MK
18200
18201 FOR_EACH_DEP_LINK (link, INSN_FORW_DEPS (insn))
c4ad648e 18202 {
b198261f
MK
18203 dep_t dep = DEP_LINK_DEP (link);
18204 rtx next = DEP_CON (dep);
18205
18206 if (next == next_insn
18207 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
18208 return true;
c4ad648e 18209 }
cbe26ab8
DN
18210 }
18211
18212 return false;
18213}
18214
f676971a 18215/* Utility of the function redefine_groups.
cbe26ab8
DN
18216 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
18217 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
18218 to keep it "far" (in a separate group) from GROUP_INSNS, following
18219 one of the following schemes, depending on the value of the flag
18220 -minsert_sched_nops = X:
18221 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 18222 in order to force NEXT_INSN into a separate group.
f676971a
EC
18223 (2) X < sched_finish_regroup_exact: insert exactly X nops.
18224 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
18225 insertion (has a group just ended, how many vacant issue slots remain in the
18226 last group, and how many dispatch groups were encountered so far). */
18227
f676971a 18228static int
c4ad648e
AM
18229force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
18230 rtx next_insn, bool *group_end, int can_issue_more,
18231 int *group_count)
cbe26ab8
DN
18232{
18233 rtx nop;
18234 bool force;
18235 int issue_rate = rs6000_issue_rate ();
18236 bool end = *group_end;
18237 int i;
18238
18239 if (next_insn == NULL_RTX)
18240 return can_issue_more;
18241
18242 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
18243 return can_issue_more;
18244
18245 force = is_costly_group (group_insns, next_insn);
18246 if (!force)
18247 return can_issue_more;
18248
18249 if (sched_verbose > 6)
18250 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 18251 *group_count ,can_issue_more);
cbe26ab8
DN
18252
18253 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
18254 {
18255 if (*group_end)
c4ad648e 18256 can_issue_more = 0;
cbe26ab8
DN
18257
18258 /* Since only a branch can be issued in the last issue_slot, it is
18259 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
18260 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
18261 in this case the last nop will start a new group and the branch
18262 will be forced to the new group. */
cbe26ab8 18263 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 18264 can_issue_more--;
cbe26ab8
DN
18265
18266 while (can_issue_more > 0)
c4ad648e 18267 {
9390387d 18268 nop = gen_nop ();
c4ad648e
AM
18269 emit_insn_before (nop, next_insn);
18270 can_issue_more--;
18271 }
cbe26ab8
DN
18272
18273 *group_end = true;
18274 return 0;
f676971a 18275 }
cbe26ab8
DN
18276
18277 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
18278 {
18279 int n_nops = rs6000_sched_insert_nops;
18280
f676971a 18281 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 18282 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 18283 if (can_issue_more == 0)
c4ad648e 18284 can_issue_more = issue_rate;
cbe26ab8
DN
18285 can_issue_more--;
18286 if (can_issue_more == 0)
c4ad648e
AM
18287 {
18288 can_issue_more = issue_rate - 1;
18289 (*group_count)++;
18290 end = true;
18291 for (i = 0; i < issue_rate; i++)
18292 {
18293 group_insns[i] = 0;
18294 }
18295 }
cbe26ab8
DN
18296
18297 while (n_nops > 0)
c4ad648e
AM
18298 {
18299 nop = gen_nop ();
18300 emit_insn_before (nop, next_insn);
18301 if (can_issue_more == issue_rate - 1) /* new group begins */
18302 end = false;
18303 can_issue_more--;
18304 if (can_issue_more == 0)
18305 {
18306 can_issue_more = issue_rate - 1;
18307 (*group_count)++;
18308 end = true;
18309 for (i = 0; i < issue_rate; i++)
18310 {
18311 group_insns[i] = 0;
18312 }
18313 }
18314 n_nops--;
18315 }
cbe26ab8
DN
18316
18317 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 18318 can_issue_more++;
cbe26ab8 18319
c4ad648e
AM
18320 /* Is next_insn going to start a new group? */
18321 *group_end
18322 = (end
cbe26ab8
DN
18323 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18324 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18325 || (can_issue_more < issue_rate &&
c4ad648e 18326 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18327 if (*group_end && end)
c4ad648e 18328 (*group_count)--;
cbe26ab8
DN
18329
18330 if (sched_verbose > 6)
c4ad648e
AM
18331 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
18332 *group_count, can_issue_more);
f676971a
EC
18333 return can_issue_more;
18334 }
cbe26ab8
DN
18335
18336 return can_issue_more;
18337}
18338
18339/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 18340 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
18341 form in practice. It tries to achieve this synchronization by forcing the
18342 estimated processor grouping on the compiler (as opposed to the function
18343 'pad_goups' which tries to force the scheduler's grouping on the processor).
18344
18345 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
18346 examines the (estimated) dispatch groups that will be formed by the processor
18347 dispatcher. It marks these group boundaries to reflect the estimated
18348 processor grouping, overriding the grouping that the scheduler had marked.
18349 Depending on the value of the flag '-minsert-sched-nops' this function can
18350 force certain insns into separate groups or force a certain distance between
18351 them by inserting nops, for example, if there exists a "costly dependence"
18352 between the insns.
18353
18354 The function estimates the group boundaries that the processor will form as
0fa2e4df 18355 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
18356 each insn. A subsequent insn will start a new group if one of the following
18357 4 cases applies:
18358 - no more vacant issue slots remain in the current dispatch group.
18359 - only the last issue slot, which is the branch slot, is vacant, but the next
18360 insn is not a branch.
18361 - only the last 2 or less issue slots, including the branch slot, are vacant,
18362 which means that a cracked insn (which occupies two issue slots) can't be
18363 issued in this group.
f676971a 18364 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
18365 start a new group. */
18366
18367static int
18368redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
18369{
18370 rtx insn, next_insn;
18371 int issue_rate;
18372 int can_issue_more;
18373 int slot, i;
18374 bool group_end;
18375 int group_count = 0;
18376 rtx *group_insns;
18377
18378 /* Initialize. */
18379 issue_rate = rs6000_issue_rate ();
18380 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 18381 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
18382 {
18383 group_insns[i] = 0;
18384 }
18385 can_issue_more = issue_rate;
18386 slot = 0;
18387 insn = get_next_active_insn (prev_head_insn, tail);
18388 group_end = false;
18389
18390 while (insn != NULL_RTX)
18391 {
18392 slot = (issue_rate - can_issue_more);
18393 group_insns[slot] = insn;
18394 can_issue_more =
c4ad648e 18395 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 18396 if (insn_terminates_group_p (insn, current_group))
c4ad648e 18397 can_issue_more = 0;
cbe26ab8
DN
18398
18399 next_insn = get_next_active_insn (insn, tail);
18400 if (next_insn == NULL_RTX)
c4ad648e 18401 return group_count + 1;
cbe26ab8 18402
c4ad648e
AM
18403 /* Is next_insn going to start a new group? */
18404 group_end
18405 = (can_issue_more == 0
18406 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18407 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18408 || (can_issue_more < issue_rate &&
18409 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18410
f676971a 18411 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
18412 next_insn, &group_end, can_issue_more,
18413 &group_count);
cbe26ab8
DN
18414
18415 if (group_end)
c4ad648e
AM
18416 {
18417 group_count++;
18418 can_issue_more = 0;
18419 for (i = 0; i < issue_rate; i++)
18420 {
18421 group_insns[i] = 0;
18422 }
18423 }
cbe26ab8
DN
18424
18425 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 18426 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 18427 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 18428 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
18429
18430 insn = next_insn;
18431 if (can_issue_more == 0)
c4ad648e
AM
18432 can_issue_more = issue_rate;
18433 } /* while */
cbe26ab8
DN
18434
18435 return group_count;
18436}
18437
18438/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
18439 dispatch group boundaries that the scheduler had marked. Pad with nops
18440 any dispatch groups which have vacant issue slots, in order to force the
18441 scheduler's grouping on the processor dispatcher. The function
18442 returns the number of dispatch groups found. */
18443
18444static int
18445pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
18446{
18447 rtx insn, next_insn;
18448 rtx nop;
18449 int issue_rate;
18450 int can_issue_more;
18451 int group_end;
18452 int group_count = 0;
18453
18454 /* Initialize issue_rate. */
18455 issue_rate = rs6000_issue_rate ();
18456 can_issue_more = issue_rate;
18457
18458 insn = get_next_active_insn (prev_head_insn, tail);
18459 next_insn = get_next_active_insn (insn, tail);
18460
18461 while (insn != NULL_RTX)
18462 {
18463 can_issue_more =
18464 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
18465
18466 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
18467
18468 if (next_insn == NULL_RTX)
c4ad648e 18469 break;
cbe26ab8
DN
18470
18471 if (group_end)
c4ad648e
AM
18472 {
18473 /* If the scheduler had marked group termination at this location
18474 (between insn and next_indn), and neither insn nor next_insn will
18475 force group termination, pad the group with nops to force group
18476 termination. */
18477 if (can_issue_more
18478 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
18479 && !insn_terminates_group_p (insn, current_group)
18480 && !insn_terminates_group_p (next_insn, previous_group))
18481 {
9390387d 18482 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
18483 can_issue_more--;
18484
18485 while (can_issue_more)
18486 {
18487 nop = gen_nop ();
18488 emit_insn_before (nop, next_insn);
18489 can_issue_more--;
18490 }
18491 }
18492
18493 can_issue_more = issue_rate;
18494 group_count++;
18495 }
cbe26ab8
DN
18496
18497 insn = next_insn;
18498 next_insn = get_next_active_insn (insn, tail);
18499 }
18500
18501 return group_count;
18502}
18503
44cd321e
PS
18504/* We're beginning a new block. Initialize data structures as necessary. */
18505
18506static void
18507rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
18508 int sched_verbose ATTRIBUTE_UNUSED,
18509 int max_ready ATTRIBUTE_UNUSED)
982afe02 18510{
44cd321e
PS
18511 last_scheduled_insn = NULL_RTX;
18512 load_store_pendulum = 0;
18513}
18514
cbe26ab8
DN
18515/* The following function is called at the end of scheduling BB.
18516 After reload, it inserts nops at insn group bundling. */
18517
18518static void
38f391a5 18519rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
18520{
18521 int n_groups;
18522
18523 if (sched_verbose)
18524 fprintf (dump, "=== Finishing schedule.\n");
18525
ec507f2d 18526 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
18527 {
18528 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 18529 return;
cbe26ab8
DN
18530
18531 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
18532 n_groups = pad_groups (dump, sched_verbose,
18533 current_sched_info->prev_head,
18534 current_sched_info->next_tail);
cbe26ab8 18535 else
c4ad648e
AM
18536 n_groups = redefine_groups (dump, sched_verbose,
18537 current_sched_info->prev_head,
18538 current_sched_info->next_tail);
cbe26ab8
DN
18539
18540 if (sched_verbose >= 6)
18541 {
18542 fprintf (dump, "ngroups = %d\n", n_groups);
18543 print_rtl (dump, current_sched_info->prev_head);
18544 fprintf (dump, "Done finish_sched\n");
18545 }
18546 }
18547}
b6c9286a 18548\f
b6c9286a
MM
18549/* Length in units of the trampoline for entering a nested function. */
18550
18551int
863d938c 18552rs6000_trampoline_size (void)
b6c9286a
MM
18553{
18554 int ret = 0;
18555
18556 switch (DEFAULT_ABI)
18557 {
18558 default:
37409796 18559 gcc_unreachable ();
b6c9286a
MM
18560
18561 case ABI_AIX:
8f802bfb 18562 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
18563 break;
18564
4dabc42d 18565 case ABI_DARWIN:
b6c9286a 18566 case ABI_V4:
03a7e1a5 18567 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 18568 break;
b6c9286a
MM
18569 }
18570
18571 return ret;
18572}
18573
18574/* Emit RTL insns to initialize the variable parts of a trampoline.
18575 FNADDR is an RTX for the address of the function's pure code.
18576 CXT is an RTX for the static chain value for the function. */
18577
18578void
a2369ed3 18579rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 18580{
8bd04c56 18581 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 18582 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
18583
18584 switch (DEFAULT_ABI)
18585 {
18586 default:
37409796 18587 gcc_unreachable ();
b6c9286a 18588
8bd04c56 18589/* Macros to shorten the code expansions below. */
9613eaff 18590#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 18591#define MEM_PLUS(addr,offset) \
9613eaff 18592 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 18593
b6c9286a
MM
18594 /* Under AIX, just build the 3 word function descriptor */
18595 case ABI_AIX:
8bd04c56 18596 {
9613eaff
SH
18597 rtx fn_reg = gen_reg_rtx (Pmode);
18598 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 18599 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 18600 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
18601 emit_move_insn (MEM_DEREF (addr), fn_reg);
18602 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
18603 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
18604 }
b6c9286a
MM
18605 break;
18606
4dabc42d
TC
18607 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
18608 case ABI_DARWIN:
b6c9286a 18609 case ABI_V4:
9613eaff 18610 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 18611 FALSE, VOIDmode, 4,
9613eaff 18612 addr, Pmode,
eaf1bcf1 18613 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
18614 fnaddr, Pmode,
18615 ctx_reg, Pmode);
b6c9286a 18616 break;
b6c9286a
MM
18617 }
18618
18619 return;
18620}
7509c759
MM
18621
18622\f
91d231cb 18623/* Table of valid machine attributes. */
a4f6c312 18624
91d231cb 18625const struct attribute_spec rs6000_attribute_table[] =
7509c759 18626{
91d231cb 18627 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 18628 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
18629 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
18630 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
18631 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
18632 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
18633#ifdef SUBTARGET_ATTRIBUTE_TABLE
18634 SUBTARGET_ATTRIBUTE_TABLE,
18635#endif
a5c76ee6 18636 { NULL, 0, 0, false, false, false, NULL }
91d231cb 18637};
7509c759 18638
8bb418a3
ZL
18639/* Handle the "altivec" attribute. The attribute may have
18640 arguments as follows:
f676971a 18641
8bb418a3
ZL
18642 __attribute__((altivec(vector__)))
18643 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
18644 __attribute__((altivec(bool__))) (always followed by 'unsigned')
18645
18646 and may appear more than once (e.g., 'vector bool char') in a
18647 given declaration. */
18648
18649static tree
f90ac3f0
UP
18650rs6000_handle_altivec_attribute (tree *node,
18651 tree name ATTRIBUTE_UNUSED,
18652 tree args,
8bb418a3
ZL
18653 int flags ATTRIBUTE_UNUSED,
18654 bool *no_add_attrs)
18655{
18656 tree type = *node, result = NULL_TREE;
18657 enum machine_mode mode;
18658 int unsigned_p;
18659 char altivec_type
18660 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
18661 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
18662 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 18663 : '?');
8bb418a3
ZL
18664
18665 while (POINTER_TYPE_P (type)
18666 || TREE_CODE (type) == FUNCTION_TYPE
18667 || TREE_CODE (type) == METHOD_TYPE
18668 || TREE_CODE (type) == ARRAY_TYPE)
18669 type = TREE_TYPE (type);
18670
18671 mode = TYPE_MODE (type);
18672
f90ac3f0
UP
18673 /* Check for invalid AltiVec type qualifiers. */
18674 if (type == long_unsigned_type_node || type == long_integer_type_node)
18675 {
18676 if (TARGET_64BIT)
18677 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
18678 else if (rs6000_warn_altivec_long)
d4ee4d25 18679 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
18680 }
18681 else if (type == long_long_unsigned_type_node
18682 || type == long_long_integer_type_node)
18683 error ("use of %<long long%> in AltiVec types is invalid");
18684 else if (type == double_type_node)
18685 error ("use of %<double%> in AltiVec types is invalid");
18686 else if (type == long_double_type_node)
18687 error ("use of %<long double%> in AltiVec types is invalid");
18688 else if (type == boolean_type_node)
18689 error ("use of boolean types in AltiVec types is invalid");
18690 else if (TREE_CODE (type) == COMPLEX_TYPE)
18691 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
18692 else if (DECIMAL_FLOAT_MODE_P (mode))
18693 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
18694
18695 switch (altivec_type)
18696 {
18697 case 'v':
8df83eae 18698 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
18699 switch (mode)
18700 {
c4ad648e
AM
18701 case SImode:
18702 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
18703 break;
18704 case HImode:
18705 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
18706 break;
18707 case QImode:
18708 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
18709 break;
18710 case SFmode: result = V4SF_type_node; break;
18711 /* If the user says 'vector int bool', we may be handed the 'bool'
18712 attribute _before_ the 'vector' attribute, and so select the
18713 proper type in the 'b' case below. */
18714 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
18715 result = type;
18716 default: break;
8bb418a3
ZL
18717 }
18718 break;
18719 case 'b':
18720 switch (mode)
18721 {
c4ad648e
AM
18722 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
18723 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
18724 case QImode: case V16QImode: result = bool_V16QI_type_node;
18725 default: break;
8bb418a3
ZL
18726 }
18727 break;
18728 case 'p':
18729 switch (mode)
18730 {
c4ad648e
AM
18731 case V8HImode: result = pixel_V8HI_type_node;
18732 default: break;
8bb418a3
ZL
18733 }
18734 default: break;
18735 }
18736
7958a2a6
FJ
18737 if (result && result != type && TYPE_READONLY (type))
18738 result = build_qualified_type (result, TYPE_QUAL_CONST);
18739
8bb418a3
ZL
18740 *no_add_attrs = true; /* No need to hang on to the attribute. */
18741
f90ac3f0 18742 if (result)
8bb418a3
ZL
18743 *node = reconstruct_complex_type (*node, result);
18744
18745 return NULL_TREE;
18746}
18747
f18eca82
ZL
18748/* AltiVec defines four built-in scalar types that serve as vector
18749 elements; we must teach the compiler how to mangle them. */
18750
18751static const char *
18752rs6000_mangle_fundamental_type (tree type)
18753{
18754 if (type == bool_char_type_node) return "U6__boolc";
18755 if (type == bool_short_type_node) return "U6__bools";
18756 if (type == pixel_type_node) return "u7__pixel";
18757 if (type == bool_int_type_node) return "U6__booli";
18758
337bde91
DE
18759 /* Mangle IBM extended float long double as `g' (__float128) on
18760 powerpc*-linux where long-double-64 previously was the default. */
18761 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
18762 && TARGET_ELF
18763 && TARGET_LONG_DOUBLE_128
18764 && !TARGET_IEEEQUAD)
18765 return "g";
18766
f18eca82
ZL
18767 /* For all other types, use normal C++ mangling. */
18768 return NULL;
18769}
18770
a5c76ee6
ZW
18771/* Handle a "longcall" or "shortcall" attribute; arguments as in
18772 struct attribute_spec.handler. */
a4f6c312 18773
91d231cb 18774static tree
f676971a
EC
18775rs6000_handle_longcall_attribute (tree *node, tree name,
18776 tree args ATTRIBUTE_UNUSED,
18777 int flags ATTRIBUTE_UNUSED,
a2369ed3 18778 bool *no_add_attrs)
91d231cb
JM
18779{
18780 if (TREE_CODE (*node) != FUNCTION_TYPE
18781 && TREE_CODE (*node) != FIELD_DECL
18782 && TREE_CODE (*node) != TYPE_DECL)
18783 {
5c498b10 18784 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
18785 IDENTIFIER_POINTER (name));
18786 *no_add_attrs = true;
18787 }
6a4cee5f 18788
91d231cb 18789 return NULL_TREE;
7509c759
MM
18790}
18791
a5c76ee6
ZW
18792/* Set longcall attributes on all functions declared when
18793 rs6000_default_long_calls is true. */
18794static void
a2369ed3 18795rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
18796{
18797 if (rs6000_default_long_calls
18798 && (TREE_CODE (type) == FUNCTION_TYPE
18799 || TREE_CODE (type) == METHOD_TYPE))
18800 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
18801 NULL_TREE,
18802 TYPE_ATTRIBUTES (type));
16d6f994
EC
18803
18804#if TARGET_MACHO
18805 darwin_set_default_type_attributes (type);
18806#endif
a5c76ee6
ZW
18807}
18808
3cb999d8
DE
18809/* Return a reference suitable for calling a function with the
18810 longcall attribute. */
a4f6c312 18811
9390387d 18812rtx
a2369ed3 18813rs6000_longcall_ref (rtx call_ref)
6a4cee5f 18814{
d330fd93 18815 const char *call_name;
6a4cee5f
MM
18816 tree node;
18817
18818 if (GET_CODE (call_ref) != SYMBOL_REF)
18819 return call_ref;
18820
18821 /* System V adds '.' to the internal name, so skip them. */
18822 call_name = XSTR (call_ref, 0);
18823 if (*call_name == '.')
18824 {
18825 while (*call_name == '.')
18826 call_name++;
18827
18828 node = get_identifier (call_name);
39403d82 18829 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
18830 }
18831
18832 return force_reg (Pmode, call_ref);
18833}
7509c759 18834\f
77ccdfed
EC
18835#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
18836#define TARGET_USE_MS_BITFIELD_LAYOUT 0
18837#endif
18838
18839/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
18840 struct attribute_spec.handler. */
18841static tree
18842rs6000_handle_struct_attribute (tree *node, tree name,
18843 tree args ATTRIBUTE_UNUSED,
18844 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
18845{
18846 tree *type = NULL;
18847 if (DECL_P (*node))
18848 {
18849 if (TREE_CODE (*node) == TYPE_DECL)
18850 type = &TREE_TYPE (*node);
18851 }
18852 else
18853 type = node;
18854
18855 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
18856 || TREE_CODE (*type) == UNION_TYPE)))
18857 {
18858 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
18859 *no_add_attrs = true;
18860 }
18861
18862 else if ((is_attribute_p ("ms_struct", name)
18863 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
18864 || ((is_attribute_p ("gcc_struct", name)
18865 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
18866 {
18867 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
18868 IDENTIFIER_POINTER (name));
18869 *no_add_attrs = true;
18870 }
18871
18872 return NULL_TREE;
18873}
18874
18875static bool
18876rs6000_ms_bitfield_layout_p (tree record_type)
18877{
18878 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
18879 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
18880 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
18881}
18882\f
b64a1b53
RH
18883#ifdef USING_ELFOS_H
18884
d6b5193b 18885/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 18886
d6b5193b
RS
18887static void
18888rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
18889{
18890 if (DEFAULT_ABI == ABI_AIX
18891 && TARGET_MINIMAL_TOC
18892 && !TARGET_RELOCATABLE)
18893 {
18894 if (!toc_initialized)
18895 {
18896 toc_initialized = 1;
18897 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
18898 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
18899 fprintf (asm_out_file, "\t.tc ");
18900 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
18901 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
18902 fprintf (asm_out_file, "\n");
18903
18904 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18905 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
18906 fprintf (asm_out_file, " = .+32768\n");
18907 }
18908 else
18909 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18910 }
18911 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
18912 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
18913 else
18914 {
18915 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
18916 if (!toc_initialized)
18917 {
18918 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
18919 fprintf (asm_out_file, " = .+32768\n");
18920 toc_initialized = 1;
18921 }
18922 }
18923}
18924
18925/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 18926
b64a1b53 18927static void
d6b5193b
RS
18928rs6000_elf_asm_init_sections (void)
18929{
18930 toc_section
18931 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
18932
18933 sdata2_section
18934 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
18935 SDATA2_SECTION_ASM_OP);
18936}
18937
18938/* Implement TARGET_SELECT_RTX_SECTION. */
18939
18940static section *
f676971a 18941rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 18942 unsigned HOST_WIDE_INT align)
7509c759 18943{
a9098fd0 18944 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 18945 return toc_section;
7509c759 18946 else
d6b5193b 18947 return default_elf_select_rtx_section (mode, x, align);
7509c759 18948}
d9407988 18949\f
d1908feb
JJ
18950/* For a SYMBOL_REF, set generic flags and then perform some
18951 target-specific processing.
18952
d1908feb
JJ
18953 When the AIX ABI is requested on a non-AIX system, replace the
18954 function name with the real name (with a leading .) rather than the
18955 function descriptor name. This saves a lot of overriding code to
18956 read the prefixes. */
d9407988 18957
fb49053f 18958static void
a2369ed3 18959rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 18960{
d1908feb 18961 default_encode_section_info (decl, rtl, first);
b2003250 18962
d1908feb
JJ
18963 if (first
18964 && TREE_CODE (decl) == FUNCTION_DECL
18965 && !TARGET_AIX
18966 && DEFAULT_ABI == ABI_AIX)
d9407988 18967 {
c6a2438a 18968 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
18969 size_t len = strlen (XSTR (sym_ref, 0));
18970 char *str = alloca (len + 2);
18971 str[0] = '.';
18972 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
18973 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 18974 }
d9407988
MM
18975}
18976
c1b7d95a 18977bool
a2369ed3 18978rs6000_elf_in_small_data_p (tree decl)
0e5dbd9b
DE
18979{
18980 if (rs6000_sdata == SDATA_NONE)
18981 return false;
18982
7482ad25
AF
18983 /* We want to merge strings, so we never consider them small data. */
18984 if (TREE_CODE (decl) == STRING_CST)
18985 return false;
18986
18987 /* Functions are never in the small data area. */
18988 if (TREE_CODE (decl) == FUNCTION_DECL)
18989 return false;
18990
0e5dbd9b
DE
18991 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
18992 {
18993 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
18994 if (strcmp (section, ".sdata") == 0
18995 || strcmp (section, ".sdata2") == 0
20bfcd69
GK
18996 || strcmp (section, ".sbss") == 0
18997 || strcmp (section, ".sbss2") == 0
18998 || strcmp (section, ".PPC.EMB.sdata0") == 0
18999 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19000 return true;
19001 }
19002 else
19003 {
19004 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19005
19006 if (size > 0
307b599c 19007 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19008 /* If it's not public, and we're not going to reference it there,
19009 there's no need to put it in the small data section. */
0e5dbd9b
DE
19010 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19011 return true;
19012 }
19013
19014 return false;
19015}
19016
b91da81f 19017#endif /* USING_ELFOS_H */
aacd3885
RS
19018\f
19019/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19020
aacd3885
RS
19021static bool
19022rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
19023{
19024 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19025}
a6c2a102 19026\f
000034eb 19027/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19028 ADDR can be effectively incremented by incrementing REG.
19029
19030 r0 is special and we must not select it as an address
19031 register by this routine since our caller will try to
19032 increment the returned register via an "la" instruction. */
000034eb 19033
9390387d 19034rtx
a2369ed3 19035find_addr_reg (rtx addr)
000034eb
DE
19036{
19037 while (GET_CODE (addr) == PLUS)
19038 {
02441cd6
JL
19039 if (GET_CODE (XEXP (addr, 0)) == REG
19040 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19041 addr = XEXP (addr, 0);
02441cd6
JL
19042 else if (GET_CODE (XEXP (addr, 1)) == REG
19043 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19044 addr = XEXP (addr, 1);
19045 else if (CONSTANT_P (XEXP (addr, 0)))
19046 addr = XEXP (addr, 1);
19047 else if (CONSTANT_P (XEXP (addr, 1)))
19048 addr = XEXP (addr, 0);
19049 else
37409796 19050 gcc_unreachable ();
000034eb 19051 }
37409796
NS
19052 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19053 return addr;
000034eb
DE
19054}
19055
a6c2a102 19056void
a2369ed3 19057rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19058{
19059 fatal_insn ("bad address", op);
19060}
c8023011 19061
ee890fe2
SS
19062#if TARGET_MACHO
19063
efdba735 19064static tree branch_island_list = 0;
ee890fe2 19065
efdba735
SH
19066/* Remember to generate a branch island for far calls to the given
19067 function. */
ee890fe2 19068
f676971a 19069static void
c4ad648e
AM
19070add_compiler_branch_island (tree label_name, tree function_name,
19071 int line_number)
ee890fe2 19072{
efdba735 19073 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19074 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19075 TREE_CHAIN (branch_island) = branch_island_list;
19076 branch_island_list = branch_island;
ee890fe2
SS
19077}
19078
efdba735
SH
19079#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19080#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19081#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19082 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19083
efdba735
SH
19084/* Generate far-jump branch islands for everything on the
19085 branch_island_list. Invoked immediately after the last instruction
19086 of the epilogue has been emitted; the branch-islands must be
19087 appended to, and contiguous with, the function body. Mach-O stubs
19088 are generated in machopic_output_stub(). */
ee890fe2 19089
efdba735
SH
19090static void
19091macho_branch_islands (void)
19092{
19093 char tmp_buf[512];
19094 tree branch_island;
19095
19096 for (branch_island = branch_island_list;
19097 branch_island;
19098 branch_island = TREE_CHAIN (branch_island))
19099 {
19100 const char *label =
19101 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19102 const char *name =
11abc112 19103 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19104 char name_buf[512];
19105 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19106 if (name[0] == '*' || name[0] == '&')
19107 strcpy (name_buf, name+1);
19108 else
19109 {
19110 name_buf[0] = '_';
19111 strcpy (name_buf+1, name);
19112 }
19113 strcpy (tmp_buf, "\n");
19114 strcat (tmp_buf, label);
ee890fe2 19115#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19116 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19117 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19118#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19119 if (flag_pic)
19120 {
19121 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19122 strcat (tmp_buf, label);
19123 strcat (tmp_buf, "_pic\n");
19124 strcat (tmp_buf, label);
19125 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19126
efdba735
SH
19127 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19128 strcat (tmp_buf, name_buf);
19129 strcat (tmp_buf, " - ");
19130 strcat (tmp_buf, label);
19131 strcat (tmp_buf, "_pic)\n");
f676971a 19132
efdba735 19133 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19134
efdba735
SH
19135 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19136 strcat (tmp_buf, name_buf);
19137 strcat (tmp_buf, " - ");
19138 strcat (tmp_buf, label);
19139 strcat (tmp_buf, "_pic)\n");
f676971a 19140
efdba735
SH
19141 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19142 }
19143 else
19144 {
19145 strcat (tmp_buf, ":\nlis r12,hi16(");
19146 strcat (tmp_buf, name_buf);
19147 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19148 strcat (tmp_buf, name_buf);
19149 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19150 }
19151 output_asm_insn (tmp_buf, 0);
ee890fe2 19152#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19153 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19154 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19155#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19156 }
ee890fe2 19157
efdba735 19158 branch_island_list = 0;
ee890fe2
SS
19159}
19160
19161/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19162 already there or not. */
19163
efdba735 19164static int
a2369ed3 19165no_previous_def (tree function_name)
ee890fe2 19166{
efdba735
SH
19167 tree branch_island;
19168 for (branch_island = branch_island_list;
19169 branch_island;
19170 branch_island = TREE_CHAIN (branch_island))
19171 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
19172 return 0;
19173 return 1;
19174}
19175
19176/* GET_PREV_LABEL gets the label name from the previous definition of
19177 the function. */
19178
efdba735 19179static tree
a2369ed3 19180get_prev_label (tree function_name)
ee890fe2 19181{
efdba735
SH
19182 tree branch_island;
19183 for (branch_island = branch_island_list;
19184 branch_island;
19185 branch_island = TREE_CHAIN (branch_island))
19186 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
19187 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
19188 return 0;
19189}
19190
75b1b789
MS
19191#ifndef DARWIN_LINKER_GENERATES_ISLANDS
19192#define DARWIN_LINKER_GENERATES_ISLANDS 0
19193#endif
19194
19195/* KEXTs still need branch islands. */
19196#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
19197 || flag_mkernel || flag_apple_kext)
19198
ee890fe2 19199/* INSN is either a function call or a millicode call. It may have an
f676971a 19200 unconditional jump in its delay slot.
ee890fe2
SS
19201
19202 CALL_DEST is the routine we are calling. */
19203
19204char *
c4ad648e
AM
19205output_call (rtx insn, rtx *operands, int dest_operand_number,
19206 int cookie_operand_number)
ee890fe2
SS
19207{
19208 static char buf[256];
75b1b789
MS
19209 if (DARWIN_GENERATE_ISLANDS
19210 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 19211 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
19212 {
19213 tree labelname;
efdba735 19214 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 19215
ee890fe2
SS
19216 if (no_previous_def (funname))
19217 {
ee890fe2
SS
19218 rtx label_rtx = gen_label_rtx ();
19219 char *label_buf, temp_buf[256];
19220 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
19221 CODE_LABEL_NUMBER (label_rtx));
19222 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
19223 labelname = get_identifier (label_buf);
a38e7aa5 19224 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
19225 }
19226 else
19227 labelname = get_prev_label (funname);
19228
efdba735
SH
19229 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
19230 instruction will reach 'foo', otherwise link as 'bl L42'".
19231 "L42" should be a 'branch island', that will do a far jump to
19232 'foo'. Branch islands are generated in
19233 macho_branch_islands(). */
ee890fe2 19234 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 19235 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
19236 }
19237 else
efdba735
SH
19238 sprintf (buf, "bl %%z%d", dest_operand_number);
19239 return buf;
ee890fe2
SS
19240}
19241
ee890fe2
SS
19242/* Generate PIC and indirect symbol stubs. */
19243
19244void
a2369ed3 19245machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
19246{
19247 unsigned int length;
a4f6c312
SS
19248 char *symbol_name, *lazy_ptr_name;
19249 char *local_label_0;
ee890fe2
SS
19250 static int label = 0;
19251
df56a27f 19252 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 19253 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 19254
ee890fe2 19255
ee890fe2
SS
19256 length = strlen (symb);
19257 symbol_name = alloca (length + 32);
19258 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
19259
19260 lazy_ptr_name = alloca (length + 32);
19261 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
19262
ee890fe2 19263 if (flag_pic == 2)
56c779bc 19264 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 19265 else
56c779bc 19266 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
19267
19268 if (flag_pic == 2)
19269 {
d974312d
DJ
19270 fprintf (file, "\t.align 5\n");
19271
19272 fprintf (file, "%s:\n", stub);
19273 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19274
876455fa 19275 label++;
89da1f32 19276 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 19277 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 19278
ee890fe2
SS
19279 fprintf (file, "\tmflr r0\n");
19280 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
19281 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
19282 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
19283 lazy_ptr_name, local_label_0);
19284 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
19285 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
19286 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
19287 lazy_ptr_name, local_label_0);
19288 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
19289 fprintf (file, "\tbctr\n");
19290 }
19291 else
d974312d
DJ
19292 {
19293 fprintf (file, "\t.align 4\n");
19294
19295 fprintf (file, "%s:\n", stub);
19296 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19297
19298 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
19299 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
19300 (TARGET_64BIT ? "ldu" : "lwzu"),
19301 lazy_ptr_name);
d974312d
DJ
19302 fprintf (file, "\tmtctr r12\n");
19303 fprintf (file, "\tbctr\n");
19304 }
f676971a 19305
56c779bc 19306 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
19307 fprintf (file, "%s:\n", lazy_ptr_name);
19308 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
19309 fprintf (file, "%sdyld_stub_binding_helper\n",
19310 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
19311}
19312
19313/* Legitimize PIC addresses. If the address is already
19314 position-independent, we return ORIG. Newly generated
19315 position-independent addresses go into a reg. This is REG if non
19316 zero, otherwise we allocate register(s) as necessary. */
19317
4fbbe694 19318#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
19319
19320rtx
f676971a 19321rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 19322 rtx reg)
ee890fe2
SS
19323{
19324 rtx base, offset;
19325
19326 if (reg == NULL && ! reload_in_progress && ! reload_completed)
19327 reg = gen_reg_rtx (Pmode);
19328
19329 if (GET_CODE (orig) == CONST)
19330 {
37409796
NS
19331 rtx reg_temp;
19332
ee890fe2
SS
19333 if (GET_CODE (XEXP (orig, 0)) == PLUS
19334 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
19335 return orig;
19336
37409796 19337 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 19338
37409796
NS
19339 /* Use a different reg for the intermediate value, as
19340 it will be marked UNCHANGING. */
19341 reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
19342 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
19343 Pmode, reg_temp);
19344 offset =
19345 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
19346 Pmode, reg);
bb8df8a6 19347
ee890fe2
SS
19348 if (GET_CODE (offset) == CONST_INT)
19349 {
19350 if (SMALL_INT (offset))
ed8908e7 19351 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
19352 else if (! reload_in_progress && ! reload_completed)
19353 offset = force_reg (Pmode, offset);
19354 else
c859cda6
DJ
19355 {
19356 rtx mem = force_const_mem (Pmode, orig);
19357 return machopic_legitimize_pic_address (mem, Pmode, reg);
19358 }
ee890fe2 19359 }
f1c25d3b 19360 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
19361 }
19362
19363 /* Fall back on generic machopic code. */
19364 return machopic_legitimize_pic_address (orig, mode, reg);
19365}
19366
c4e18b1c
GK
19367/* Output a .machine directive for the Darwin assembler, and call
19368 the generic start_file routine. */
19369
19370static void
19371rs6000_darwin_file_start (void)
19372{
94ff898d 19373 static const struct
c4e18b1c
GK
19374 {
19375 const char *arg;
19376 const char *name;
19377 int if_set;
19378 } mapping[] = {
55dbfb48 19379 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
19380 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
19381 { "power4", "ppc970", 0 },
19382 { "G5", "ppc970", 0 },
19383 { "7450", "ppc7450", 0 },
19384 { "7400", "ppc7400", MASK_ALTIVEC },
19385 { "G4", "ppc7400", 0 },
19386 { "750", "ppc750", 0 },
19387 { "740", "ppc750", 0 },
19388 { "G3", "ppc750", 0 },
19389 { "604e", "ppc604e", 0 },
19390 { "604", "ppc604", 0 },
19391 { "603e", "ppc603", 0 },
19392 { "603", "ppc603", 0 },
19393 { "601", "ppc601", 0 },
19394 { NULL, "ppc", 0 } };
19395 const char *cpu_id = "";
19396 size_t i;
94ff898d 19397
9390387d 19398 rs6000_file_start ();
192d0f89 19399 darwin_file_start ();
c4e18b1c
GK
19400
19401 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
19402 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
19403 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
19404 && rs6000_select[i].string[0] != '\0')
19405 cpu_id = rs6000_select[i].string;
19406
19407 /* Look through the mapping array. Pick the first name that either
19408 matches the argument, has a bit set in IF_SET that is also set
19409 in the target flags, or has a NULL name. */
19410
19411 i = 0;
19412 while (mapping[i].arg != NULL
19413 && strcmp (mapping[i].arg, cpu_id) != 0
19414 && (mapping[i].if_set & target_flags) == 0)
19415 i++;
19416
19417 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
19418}
19419
ee890fe2 19420#endif /* TARGET_MACHO */
7c262518
RH
19421
19422#if TARGET_ELF
9b580a0b
RH
19423static int
19424rs6000_elf_reloc_rw_mask (void)
7c262518 19425{
9b580a0b
RH
19426 if (flag_pic)
19427 return 3;
19428 else if (DEFAULT_ABI == ABI_AIX)
19429 return 2;
19430 else
19431 return 0;
7c262518 19432}
d9f6800d
RH
19433
19434/* Record an element in the table of global constructors. SYMBOL is
19435 a SYMBOL_REF of the function to be called; PRIORITY is a number
19436 between 0 and MAX_INIT_PRIORITY.
19437
19438 This differs from default_named_section_asm_out_constructor in
19439 that we have special handling for -mrelocatable. */
19440
19441static void
a2369ed3 19442rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
19443{
19444 const char *section = ".ctors";
19445 char buf[16];
19446
19447 if (priority != DEFAULT_INIT_PRIORITY)
19448 {
19449 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
19450 /* Invert the numbering so the linker puts us in the proper
19451 order; constructors are run from right to left, and the
19452 linker sorts in increasing order. */
19453 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
19454 section = buf;
19455 }
19456
d6b5193b 19457 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 19458 assemble_align (POINTER_SIZE);
d9f6800d
RH
19459
19460 if (TARGET_RELOCATABLE)
19461 {
19462 fputs ("\t.long (", asm_out_file);
19463 output_addr_const (asm_out_file, symbol);
19464 fputs (")@fixup\n", asm_out_file);
19465 }
19466 else
c8af3574 19467 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
19468}
19469
19470static void
a2369ed3 19471rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
19472{
19473 const char *section = ".dtors";
19474 char buf[16];
19475
19476 if (priority != DEFAULT_INIT_PRIORITY)
19477 {
19478 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
19479 /* Invert the numbering so the linker puts us in the proper
19480 order; constructors are run from right to left, and the
19481 linker sorts in increasing order. */
19482 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
19483 section = buf;
19484 }
19485
d6b5193b 19486 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 19487 assemble_align (POINTER_SIZE);
d9f6800d
RH
19488
19489 if (TARGET_RELOCATABLE)
19490 {
19491 fputs ("\t.long (", asm_out_file);
19492 output_addr_const (asm_out_file, symbol);
19493 fputs (")@fixup\n", asm_out_file);
19494 }
19495 else
c8af3574 19496 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 19497}
9739c90c
JJ
19498
19499void
a2369ed3 19500rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
19501{
19502 if (TARGET_64BIT)
19503 {
19504 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
19505 ASM_OUTPUT_LABEL (file, name);
19506 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
19507 rs6000_output_function_entry (file, name);
19508 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
19509 if (DOT_SYMBOLS)
9739c90c 19510 {
85b776df 19511 fputs ("\t.size\t", file);
9739c90c 19512 assemble_name (file, name);
85b776df
AM
19513 fputs (",24\n\t.type\t.", file);
19514 assemble_name (file, name);
19515 fputs (",@function\n", file);
19516 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
19517 {
19518 fputs ("\t.globl\t.", file);
19519 assemble_name (file, name);
19520 putc ('\n', file);
19521 }
9739c90c 19522 }
85b776df
AM
19523 else
19524 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 19525 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
19526 rs6000_output_function_entry (file, name);
19527 fputs (":\n", file);
9739c90c
JJ
19528 return;
19529 }
19530
19531 if (TARGET_RELOCATABLE
7f970b70 19532 && !TARGET_SECURE_PLT
9739c90c 19533 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 19534 && uses_TOC ())
9739c90c
JJ
19535 {
19536 char buf[256];
19537
19538 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
19539
19540 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
19541 fprintf (file, "\t.long ");
19542 assemble_name (file, buf);
19543 putc ('-', file);
19544 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
19545 assemble_name (file, buf);
19546 putc ('\n', file);
19547 }
19548
19549 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
19550 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
19551
19552 if (DEFAULT_ABI == ABI_AIX)
19553 {
19554 const char *desc_name, *orig_name;
19555
19556 orig_name = (*targetm.strip_name_encoding) (name);
19557 desc_name = orig_name;
19558 while (*desc_name == '.')
19559 desc_name++;
19560
19561 if (TREE_PUBLIC (decl))
19562 fprintf (file, "\t.globl %s\n", desc_name);
19563
19564 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19565 fprintf (file, "%s:\n", desc_name);
19566 fprintf (file, "\t.long %s\n", orig_name);
19567 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
19568 if (DEFAULT_ABI == ABI_AIX)
19569 fputs ("\t.long 0\n", file);
19570 fprintf (file, "\t.previous\n");
19571 }
19572 ASM_OUTPUT_LABEL (file, name);
19573}
1334b570
AM
19574
19575static void
19576rs6000_elf_end_indicate_exec_stack (void)
19577{
19578 if (TARGET_32BIT)
19579 file_end_indicate_exec_stack ();
19580}
7c262518
RH
19581#endif
19582
cbaaba19 19583#if TARGET_XCOFF
0d5817b2
DE
19584static void
19585rs6000_xcoff_asm_output_anchor (rtx symbol)
19586{
19587 char buffer[100];
19588
19589 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
19590 SYMBOL_REF_BLOCK_OFFSET (symbol));
19591 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
19592}
19593
7c262518 19594static void
a2369ed3 19595rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
19596{
19597 fputs (GLOBAL_ASM_OP, stream);
19598 RS6000_OUTPUT_BASENAME (stream, name);
19599 putc ('\n', stream);
19600}
19601
d6b5193b
RS
19602/* A get_unnamed_decl callback, used for read-only sections. PTR
19603 points to the section string variable. */
19604
19605static void
19606rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
19607{
19608 fprintf (asm_out_file, "\t.csect %s[RO],3\n",
19609 *(const char *const *) directive);
19610}
19611
19612/* Likewise for read-write sections. */
19613
19614static void
19615rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
19616{
19617 fprintf (asm_out_file, "\t.csect %s[RW],3\n",
19618 *(const char *const *) directive);
19619}
19620
19621/* A get_unnamed_section callback, used for switching to toc_section. */
19622
19623static void
19624rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19625{
19626 if (TARGET_MINIMAL_TOC)
19627 {
19628 /* toc_section is always selected at least once from
19629 rs6000_xcoff_file_start, so this is guaranteed to
19630 always be defined once and only once in each file. */
19631 if (!toc_initialized)
19632 {
19633 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
19634 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
19635 toc_initialized = 1;
19636 }
19637 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
19638 (TARGET_32BIT ? "" : ",3"));
19639 }
19640 else
19641 fputs ("\t.toc\n", asm_out_file);
19642}
19643
19644/* Implement TARGET_ASM_INIT_SECTIONS. */
19645
19646static void
19647rs6000_xcoff_asm_init_sections (void)
19648{
19649 read_only_data_section
19650 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
19651 &xcoff_read_only_section_name);
19652
19653 private_data_section
19654 = get_unnamed_section (SECTION_WRITE,
19655 rs6000_xcoff_output_readwrite_section_asm_op,
19656 &xcoff_private_data_section_name);
19657
19658 read_only_private_data_section
19659 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
19660 &xcoff_private_data_section_name);
19661
19662 toc_section
19663 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
19664
19665 readonly_data_section = read_only_data_section;
19666 exception_section = data_section;
19667}
19668
9b580a0b
RH
19669static int
19670rs6000_xcoff_reloc_rw_mask (void)
19671{
19672 return 3;
19673}
19674
b275d088 19675static void
c18a5b6c
MM
19676rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
19677 tree decl ATTRIBUTE_UNUSED)
7c262518 19678{
0e5dbd9b
DE
19679 int smclass;
19680 static const char * const suffix[3] = { "PR", "RO", "RW" };
19681
19682 if (flags & SECTION_CODE)
19683 smclass = 0;
19684 else if (flags & SECTION_WRITE)
19685 smclass = 2;
19686 else
19687 smclass = 1;
19688
5b5198f7 19689 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 19690 (flags & SECTION_CODE) ? "." : "",
5b5198f7 19691 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 19692}
ae46c4e0 19693
d6b5193b 19694static section *
f676971a 19695rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 19696 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 19697{
9b580a0b 19698 if (decl_readonly_section (decl, reloc))
ae46c4e0 19699 {
0e5dbd9b 19700 if (TREE_PUBLIC (decl))
d6b5193b 19701 return read_only_data_section;
ae46c4e0 19702 else
d6b5193b 19703 return read_only_private_data_section;
ae46c4e0
RH
19704 }
19705 else
19706 {
0e5dbd9b 19707 if (TREE_PUBLIC (decl))
d6b5193b 19708 return data_section;
ae46c4e0 19709 else
d6b5193b 19710 return private_data_section;
ae46c4e0
RH
19711 }
19712}
19713
19714static void
a2369ed3 19715rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
19716{
19717 const char *name;
ae46c4e0 19718
5b5198f7
DE
19719 /* Use select_section for private and uninitialized data. */
19720 if (!TREE_PUBLIC (decl)
19721 || DECL_COMMON (decl)
0e5dbd9b
DE
19722 || DECL_INITIAL (decl) == NULL_TREE
19723 || DECL_INITIAL (decl) == error_mark_node
19724 || (flag_zero_initialized_in_bss
19725 && initializer_zerop (DECL_INITIAL (decl))))
19726 return;
19727
19728 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
19729 name = (*targetm.strip_name_encoding) (name);
19730 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 19731}
b64a1b53 19732
fb49053f
RH
19733/* Select section for constant in constant pool.
19734
19735 On RS/6000, all constants are in the private read-only data area.
19736 However, if this is being placed in the TOC it must be output as a
19737 toc entry. */
19738
d6b5193b 19739static section *
f676971a 19740rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 19741 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
19742{
19743 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19744 return toc_section;
b64a1b53 19745 else
d6b5193b 19746 return read_only_private_data_section;
b64a1b53 19747}
772c5265
RH
19748
19749/* Remove any trailing [DS] or the like from the symbol name. */
19750
19751static const char *
a2369ed3 19752rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
19753{
19754 size_t len;
19755 if (*name == '*')
19756 name++;
19757 len = strlen (name);
19758 if (name[len - 1] == ']')
19759 return ggc_alloc_string (name, len - 4);
19760 else
19761 return name;
19762}
19763
5add3202
DE
19764/* Section attributes. AIX is always PIC. */
19765
19766static unsigned int
a2369ed3 19767rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 19768{
5b5198f7 19769 unsigned int align;
9b580a0b 19770 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
19771
19772 /* Align to at least UNIT size. */
19773 if (flags & SECTION_CODE)
19774 align = MIN_UNITS_PER_WORD;
19775 else
19776 /* Increase alignment of large objects if not already stricter. */
19777 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
19778 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
19779 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
19780
19781 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 19782}
a5fe455b 19783
1bc7c5b6
ZW
19784/* Output at beginning of assembler file.
19785
19786 Initialize the section names for the RS/6000 at this point.
19787
19788 Specify filename, including full path, to assembler.
19789
19790 We want to go into the TOC section so at least one .toc will be emitted.
19791 Also, in order to output proper .bs/.es pairs, we need at least one static
19792 [RW] section emitted.
19793
19794 Finally, declare mcount when profiling to make the assembler happy. */
19795
19796static void
863d938c 19797rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
19798{
19799 rs6000_gen_section_name (&xcoff_bss_section_name,
19800 main_input_filename, ".bss_");
19801 rs6000_gen_section_name (&xcoff_private_data_section_name,
19802 main_input_filename, ".rw_");
19803 rs6000_gen_section_name (&xcoff_read_only_section_name,
19804 main_input_filename, ".ro_");
19805
19806 fputs ("\t.file\t", asm_out_file);
19807 output_quoted_string (asm_out_file, main_input_filename);
19808 fputc ('\n', asm_out_file);
1bc7c5b6 19809 if (write_symbols != NO_DEBUG)
d6b5193b
RS
19810 switch_to_section (private_data_section);
19811 switch_to_section (text_section);
1bc7c5b6
ZW
19812 if (profile_flag)
19813 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
19814 rs6000_file_start ();
19815}
19816
a5fe455b
ZW
19817/* Output at end of assembler file.
19818 On the RS/6000, referencing data should automatically pull in text. */
19819
19820static void
863d938c 19821rs6000_xcoff_file_end (void)
a5fe455b 19822{
d6b5193b 19823 switch_to_section (text_section);
a5fe455b 19824 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 19825 switch_to_section (data_section);
a5fe455b
ZW
19826 fputs (TARGET_32BIT
19827 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
19828 asm_out_file);
19829}
f1384257 19830#endif /* TARGET_XCOFF */
0e5dbd9b 19831
3c50106f
RH
19832/* Compute a (partial) cost for rtx X. Return true if the complete
19833 cost has been computed, and false if subexpressions should be
19834 scanned. In either case, *TOTAL contains the cost result. */
19835
19836static bool
1494c534 19837rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 19838{
f0517163
RS
19839 enum machine_mode mode = GET_MODE (x);
19840
3c50106f
RH
19841 switch (code)
19842 {
30a555d9 19843 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 19844 case CONST_INT:
066cd967
DE
19845 if (((outer_code == SET
19846 || outer_code == PLUS
19847 || outer_code == MINUS)
279bb624
DE
19848 && (satisfies_constraint_I (x)
19849 || satisfies_constraint_L (x)))
066cd967 19850 || (outer_code == AND
279bb624
DE
19851 && (satisfies_constraint_K (x)
19852 || (mode == SImode
19853 ? satisfies_constraint_L (x)
19854 : satisfies_constraint_J (x))
1990cd79
AM
19855 || mask_operand (x, mode)
19856 || (mode == DImode
19857 && mask64_operand (x, DImode))))
22e54023 19858 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
19859 && (satisfies_constraint_K (x)
19860 || (mode == SImode
19861 ? satisfies_constraint_L (x)
19862 : satisfies_constraint_J (x))))
066cd967
DE
19863 || outer_code == ASHIFT
19864 || outer_code == ASHIFTRT
19865 || outer_code == LSHIFTRT
19866 || outer_code == ROTATE
19867 || outer_code == ROTATERT
d5861a7a 19868 || outer_code == ZERO_EXTRACT
066cd967 19869 || (outer_code == MULT
279bb624 19870 && satisfies_constraint_I (x))
22e54023
DE
19871 || ((outer_code == DIV || outer_code == UDIV
19872 || outer_code == MOD || outer_code == UMOD)
19873 && exact_log2 (INTVAL (x)) >= 0)
066cd967 19874 || (outer_code == COMPARE
279bb624
DE
19875 && (satisfies_constraint_I (x)
19876 || satisfies_constraint_K (x)))
22e54023 19877 || (outer_code == EQ
279bb624
DE
19878 && (satisfies_constraint_I (x)
19879 || satisfies_constraint_K (x)
19880 || (mode == SImode
19881 ? satisfies_constraint_L (x)
19882 : satisfies_constraint_J (x))))
22e54023 19883 || (outer_code == GTU
279bb624 19884 && satisfies_constraint_I (x))
22e54023 19885 || (outer_code == LTU
279bb624 19886 && satisfies_constraint_P (x)))
066cd967
DE
19887 {
19888 *total = 0;
19889 return true;
19890 }
19891 else if ((outer_code == PLUS
4ae234b0 19892 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 19893 || (outer_code == MINUS
4ae234b0 19894 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
19895 || ((outer_code == SET
19896 || outer_code == IOR
19897 || outer_code == XOR)
19898 && (INTVAL (x)
19899 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
19900 {
19901 *total = COSTS_N_INSNS (1);
19902 return true;
19903 }
19904 /* FALLTHRU */
19905
19906 case CONST_DOUBLE:
f6fe3a22 19907 if (mode == DImode && code == CONST_DOUBLE)
066cd967 19908 {
f6fe3a22
DE
19909 if ((outer_code == IOR || outer_code == XOR)
19910 && CONST_DOUBLE_HIGH (x) == 0
19911 && (CONST_DOUBLE_LOW (x)
19912 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
19913 {
19914 *total = 0;
19915 return true;
19916 }
19917 else if ((outer_code == AND && and64_2_operand (x, DImode))
19918 || ((outer_code == SET
19919 || outer_code == IOR
19920 || outer_code == XOR)
19921 && CONST_DOUBLE_HIGH (x) == 0))
19922 {
19923 *total = COSTS_N_INSNS (1);
19924 return true;
19925 }
066cd967
DE
19926 }
19927 /* FALLTHRU */
19928
3c50106f 19929 case CONST:
066cd967 19930 case HIGH:
3c50106f 19931 case SYMBOL_REF:
066cd967
DE
19932 case MEM:
19933 /* When optimizing for size, MEM should be slightly more expensive
19934 than generating address, e.g., (plus (reg) (const)).
c112cf2b 19935 L1 cache latency is about two instructions. */
066cd967 19936 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
19937 return true;
19938
30a555d9
DE
19939 case LABEL_REF:
19940 *total = 0;
19941 return true;
19942
3c50106f 19943 case PLUS:
f0517163 19944 if (mode == DFmode)
066cd967
DE
19945 {
19946 if (GET_CODE (XEXP (x, 0)) == MULT)
19947 {
19948 /* FNMA accounted in outer NEG. */
19949 if (outer_code == NEG)
19950 *total = rs6000_cost->dmul - rs6000_cost->fp;
19951 else
19952 *total = rs6000_cost->dmul;
19953 }
19954 else
19955 *total = rs6000_cost->fp;
19956 }
f0517163 19957 else if (mode == SFmode)
066cd967
DE
19958 {
19959 /* FNMA accounted in outer NEG. */
19960 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
19961 *total = 0;
19962 else
19963 *total = rs6000_cost->fp;
19964 }
f0517163 19965 else
066cd967
DE
19966 *total = COSTS_N_INSNS (1);
19967 return false;
3c50106f 19968
52190329 19969 case MINUS:
f0517163 19970 if (mode == DFmode)
066cd967 19971 {
762c919f
JM
19972 if (GET_CODE (XEXP (x, 0)) == MULT
19973 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
19974 {
19975 /* FNMA accounted in outer NEG. */
19976 if (outer_code == NEG)
762c919f 19977 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
19978 else
19979 *total = rs6000_cost->dmul;
19980 }
19981 else
19982 *total = rs6000_cost->fp;
19983 }
f0517163 19984 else if (mode == SFmode)
066cd967
DE
19985 {
19986 /* FNMA accounted in outer NEG. */
19987 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
19988 *total = 0;
19989 else
19990 *total = rs6000_cost->fp;
19991 }
f0517163 19992 else
c4ad648e 19993 *total = COSTS_N_INSNS (1);
066cd967 19994 return false;
3c50106f
RH
19995
19996 case MULT:
c9dbf840 19997 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 19998 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 19999 {
8b897cfa
RS
20000 if (INTVAL (XEXP (x, 1)) >= -256
20001 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20002 *total = rs6000_cost->mulsi_const9;
8b897cfa 20003 else
06a67bdd 20004 *total = rs6000_cost->mulsi_const;
3c50106f 20005 }
066cd967
DE
20006 /* FMA accounted in outer PLUS/MINUS. */
20007 else if ((mode == DFmode || mode == SFmode)
20008 && (outer_code == PLUS || outer_code == MINUS))
20009 *total = 0;
f0517163 20010 else if (mode == DFmode)
06a67bdd 20011 *total = rs6000_cost->dmul;
f0517163 20012 else if (mode == SFmode)
06a67bdd 20013 *total = rs6000_cost->fp;
f0517163 20014 else if (mode == DImode)
06a67bdd 20015 *total = rs6000_cost->muldi;
8b897cfa 20016 else
06a67bdd 20017 *total = rs6000_cost->mulsi;
066cd967 20018 return false;
3c50106f
RH
20019
20020 case DIV:
20021 case MOD:
f0517163
RS
20022 if (FLOAT_MODE_P (mode))
20023 {
06a67bdd
RS
20024 *total = mode == DFmode ? rs6000_cost->ddiv
20025 : rs6000_cost->sdiv;
066cd967 20026 return false;
f0517163 20027 }
5efb1046 20028 /* FALLTHRU */
3c50106f
RH
20029
20030 case UDIV:
20031 case UMOD:
627b6fe2
DJ
20032 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20033 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20034 {
20035 if (code == DIV || code == MOD)
20036 /* Shift, addze */
20037 *total = COSTS_N_INSNS (2);
20038 else
20039 /* Shift */
20040 *total = COSTS_N_INSNS (1);
20041 }
c4ad648e 20042 else
627b6fe2
DJ
20043 {
20044 if (GET_MODE (XEXP (x, 1)) == DImode)
20045 *total = rs6000_cost->divdi;
20046 else
20047 *total = rs6000_cost->divsi;
20048 }
20049 /* Add in shift and subtract for MOD. */
20050 if (code == MOD || code == UMOD)
20051 *total += COSTS_N_INSNS (2);
066cd967 20052 return false;
3c50106f
RH
20053
20054 case FFS:
20055 *total = COSTS_N_INSNS (4);
066cd967 20056 return false;
3c50106f 20057
06a67bdd 20058 case NOT:
066cd967
DE
20059 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20060 {
20061 *total = 0;
20062 return false;
20063 }
20064 /* FALLTHRU */
20065
20066 case AND:
20067 case IOR:
20068 case XOR:
d5861a7a
DE
20069 case ZERO_EXTRACT:
20070 *total = COSTS_N_INSNS (1);
20071 return false;
20072
066cd967
DE
20073 case ASHIFT:
20074 case ASHIFTRT:
20075 case LSHIFTRT:
20076 case ROTATE:
20077 case ROTATERT:
d5861a7a 20078 /* Handle mul_highpart. */
066cd967
DE
20079 if (outer_code == TRUNCATE
20080 && GET_CODE (XEXP (x, 0)) == MULT)
20081 {
20082 if (mode == DImode)
20083 *total = rs6000_cost->muldi;
20084 else
20085 *total = rs6000_cost->mulsi;
20086 return true;
20087 }
d5861a7a
DE
20088 else if (outer_code == AND)
20089 *total = 0;
20090 else
20091 *total = COSTS_N_INSNS (1);
20092 return false;
20093
20094 case SIGN_EXTEND:
20095 case ZERO_EXTEND:
20096 if (GET_CODE (XEXP (x, 0)) == MEM)
20097 *total = 0;
20098 else
20099 *total = COSTS_N_INSNS (1);
066cd967 20100 return false;
06a67bdd 20101
066cd967
DE
20102 case COMPARE:
20103 case NEG:
20104 case ABS:
20105 if (!FLOAT_MODE_P (mode))
20106 {
20107 *total = COSTS_N_INSNS (1);
20108 return false;
20109 }
20110 /* FALLTHRU */
20111
20112 case FLOAT:
20113 case UNSIGNED_FLOAT:
20114 case FIX:
20115 case UNSIGNED_FIX:
06a67bdd
RS
20116 case FLOAT_TRUNCATE:
20117 *total = rs6000_cost->fp;
066cd967 20118 return false;
06a67bdd 20119
a2af5043
DJ
20120 case FLOAT_EXTEND:
20121 if (mode == DFmode)
20122 *total = 0;
20123 else
20124 *total = rs6000_cost->fp;
20125 return false;
20126
06a67bdd
RS
20127 case UNSPEC:
20128 switch (XINT (x, 1))
20129 {
20130 case UNSPEC_FRSP:
20131 *total = rs6000_cost->fp;
20132 return true;
20133
20134 default:
20135 break;
20136 }
20137 break;
20138
20139 case CALL:
20140 case IF_THEN_ELSE:
20141 if (optimize_size)
20142 {
20143 *total = COSTS_N_INSNS (1);
20144 return true;
20145 }
066cd967
DE
20146 else if (FLOAT_MODE_P (mode)
20147 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20148 {
20149 *total = rs6000_cost->fp;
20150 return false;
20151 }
06a67bdd
RS
20152 break;
20153
c0600ecd
DE
20154 case EQ:
20155 case GTU:
20156 case LTU:
22e54023
DE
20157 /* Carry bit requires mode == Pmode.
20158 NEG or PLUS already counted so only add one. */
20159 if (mode == Pmode
20160 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 20161 {
22e54023
DE
20162 *total = COSTS_N_INSNS (1);
20163 return true;
20164 }
20165 if (outer_code == SET)
20166 {
20167 if (XEXP (x, 1) == const0_rtx)
c0600ecd 20168 {
22e54023 20169 *total = COSTS_N_INSNS (2);
c0600ecd 20170 return true;
c0600ecd 20171 }
22e54023
DE
20172 else if (mode == Pmode)
20173 {
20174 *total = COSTS_N_INSNS (3);
20175 return false;
20176 }
20177 }
20178 /* FALLTHRU */
20179
20180 case GT:
20181 case LT:
20182 case UNORDERED:
20183 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
20184 {
20185 *total = COSTS_N_INSNS (2);
20186 return true;
c0600ecd 20187 }
22e54023
DE
20188 /* CC COMPARE. */
20189 if (outer_code == COMPARE)
20190 {
20191 *total = 0;
20192 return true;
20193 }
20194 break;
c0600ecd 20195
3c50106f 20196 default:
06a67bdd 20197 break;
3c50106f 20198 }
06a67bdd
RS
20199
20200 return false;
3c50106f
RH
20201}
20202
34bb030a
DE
20203/* A C expression returning the cost of moving data from a register of class
20204 CLASS1 to one of CLASS2. */
20205
20206int
f676971a 20207rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 20208 enum reg_class from, enum reg_class to)
34bb030a
DE
20209{
20210 /* Moves from/to GENERAL_REGS. */
20211 if (reg_classes_intersect_p (to, GENERAL_REGS)
20212 || reg_classes_intersect_p (from, GENERAL_REGS))
20213 {
20214 if (! reg_classes_intersect_p (to, GENERAL_REGS))
20215 from = to;
20216
20217 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
20218 return (rs6000_memory_move_cost (mode, from, 0)
20219 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
20220
c4ad648e
AM
20221 /* It's more expensive to move CR_REGS than CR0_REGS because of the
20222 shift. */
34bb030a
DE
20223 else if (from == CR_REGS)
20224 return 4;
20225
20226 else
c4ad648e 20227 /* A move will cost one instruction per GPR moved. */
c8b622ff 20228 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
20229 }
20230
c4ad648e 20231 /* Moving between two similar registers is just one instruction. */
34bb030a 20232 else if (reg_classes_intersect_p (to, from))
7393f7f8 20233 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 20234
c4ad648e 20235 /* Everything else has to go through GENERAL_REGS. */
34bb030a 20236 else
f676971a 20237 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
20238 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
20239}
20240
20241/* A C expressions returning the cost of moving data of MODE from a register to
20242 or from memory. */
20243
20244int
f676971a 20245rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 20246 int in ATTRIBUTE_UNUSED)
34bb030a
DE
20247{
20248 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 20249 return 4 * hard_regno_nregs[0][mode];
34bb030a 20250 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 20251 return 4 * hard_regno_nregs[32][mode];
34bb030a 20252 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 20253 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
20254 else
20255 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
20256}
20257
ef765ea9
DE
20258/* Newton-Raphson approximation of single-precision floating point divide n/d.
20259 Assumes no trapping math and finite arguments. */
20260
20261void
20262rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
20263{
20264 rtx x0, e0, e1, y1, u0, v0, one;
20265
20266 x0 = gen_reg_rtx (SFmode);
20267 e0 = gen_reg_rtx (SFmode);
20268 e1 = gen_reg_rtx (SFmode);
20269 y1 = gen_reg_rtx (SFmode);
20270 u0 = gen_reg_rtx (SFmode);
20271 v0 = gen_reg_rtx (SFmode);
20272 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
20273
20274 /* x0 = 1./d estimate */
20275 emit_insn (gen_rtx_SET (VOIDmode, x0,
20276 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
20277 UNSPEC_FRES)));
20278 /* e0 = 1. - d * x0 */
20279 emit_insn (gen_rtx_SET (VOIDmode, e0,
20280 gen_rtx_MINUS (SFmode, one,
20281 gen_rtx_MULT (SFmode, d, x0))));
20282 /* e1 = e0 + e0 * e0 */
20283 emit_insn (gen_rtx_SET (VOIDmode, e1,
20284 gen_rtx_PLUS (SFmode,
20285 gen_rtx_MULT (SFmode, e0, e0), e0)));
20286 /* y1 = x0 + e1 * x0 */
20287 emit_insn (gen_rtx_SET (VOIDmode, y1,
20288 gen_rtx_PLUS (SFmode,
20289 gen_rtx_MULT (SFmode, e1, x0), x0)));
20290 /* u0 = n * y1 */
20291 emit_insn (gen_rtx_SET (VOIDmode, u0,
20292 gen_rtx_MULT (SFmode, n, y1)));
20293 /* v0 = n - d * u0 */
20294 emit_insn (gen_rtx_SET (VOIDmode, v0,
20295 gen_rtx_MINUS (SFmode, n,
20296 gen_rtx_MULT (SFmode, d, u0))));
20297 /* res = u0 + v0 * y1 */
20298 emit_insn (gen_rtx_SET (VOIDmode, res,
20299 gen_rtx_PLUS (SFmode,
20300 gen_rtx_MULT (SFmode, v0, y1), u0)));
20301}
20302
20303/* Newton-Raphson approximation of double-precision floating point divide n/d.
20304 Assumes no trapping math and finite arguments. */
20305
20306void
20307rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
20308{
20309 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
20310
20311 x0 = gen_reg_rtx (DFmode);
20312 e0 = gen_reg_rtx (DFmode);
20313 e1 = gen_reg_rtx (DFmode);
20314 e2 = gen_reg_rtx (DFmode);
20315 y1 = gen_reg_rtx (DFmode);
20316 y2 = gen_reg_rtx (DFmode);
20317 y3 = gen_reg_rtx (DFmode);
20318 u0 = gen_reg_rtx (DFmode);
20319 v0 = gen_reg_rtx (DFmode);
20320 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
20321
20322 /* x0 = 1./d estimate */
20323 emit_insn (gen_rtx_SET (VOIDmode, x0,
20324 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
20325 UNSPEC_FRES)));
20326 /* e0 = 1. - d * x0 */
20327 emit_insn (gen_rtx_SET (VOIDmode, e0,
20328 gen_rtx_MINUS (DFmode, one,
20329 gen_rtx_MULT (SFmode, d, x0))));
20330 /* y1 = x0 + e0 * x0 */
20331 emit_insn (gen_rtx_SET (VOIDmode, y1,
20332 gen_rtx_PLUS (DFmode,
20333 gen_rtx_MULT (DFmode, e0, x0), x0)));
20334 /* e1 = e0 * e0 */
20335 emit_insn (gen_rtx_SET (VOIDmode, e1,
20336 gen_rtx_MULT (DFmode, e0, e0)));
20337 /* y2 = y1 + e1 * y1 */
20338 emit_insn (gen_rtx_SET (VOIDmode, y2,
20339 gen_rtx_PLUS (DFmode,
20340 gen_rtx_MULT (DFmode, e1, y1), y1)));
20341 /* e2 = e1 * e1 */
20342 emit_insn (gen_rtx_SET (VOIDmode, e2,
20343 gen_rtx_MULT (DFmode, e1, e1)));
20344 /* y3 = y2 + e2 * y2 */
20345 emit_insn (gen_rtx_SET (VOIDmode, y3,
20346 gen_rtx_PLUS (DFmode,
20347 gen_rtx_MULT (DFmode, e2, y2), y2)));
20348 /* u0 = n * y3 */
20349 emit_insn (gen_rtx_SET (VOIDmode, u0,
20350 gen_rtx_MULT (DFmode, n, y3)));
20351 /* v0 = n - d * u0 */
20352 emit_insn (gen_rtx_SET (VOIDmode, v0,
20353 gen_rtx_MINUS (DFmode, n,
20354 gen_rtx_MULT (DFmode, d, u0))));
20355 /* res = u0 + v0 * y3 */
20356 emit_insn (gen_rtx_SET (VOIDmode, res,
20357 gen_rtx_PLUS (DFmode,
20358 gen_rtx_MULT (DFmode, v0, y3), u0)));
20359}
20360
565ef4ba
RS
20361
20362/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
20363 target, and SRC is the argument operand. */
20364
20365void
20366rs6000_emit_popcount (rtx dst, rtx src)
20367{
20368 enum machine_mode mode = GET_MODE (dst);
20369 rtx tmp1, tmp2;
20370
20371 tmp1 = gen_reg_rtx (mode);
20372
20373 if (mode == SImode)
20374 {
20375 emit_insn (gen_popcntbsi2 (tmp1, src));
20376 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
20377 NULL_RTX, 0);
20378 tmp2 = force_reg (SImode, tmp2);
20379 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
20380 }
20381 else
20382 {
20383 emit_insn (gen_popcntbdi2 (tmp1, src));
20384 tmp2 = expand_mult (DImode, tmp1,
20385 GEN_INT ((HOST_WIDE_INT)
20386 0x01010101 << 32 | 0x01010101),
20387 NULL_RTX, 0);
20388 tmp2 = force_reg (DImode, tmp2);
20389 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
20390 }
20391}
20392
20393
20394/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
20395 target, and SRC is the argument operand. */
20396
20397void
20398rs6000_emit_parity (rtx dst, rtx src)
20399{
20400 enum machine_mode mode = GET_MODE (dst);
20401 rtx tmp;
20402
20403 tmp = gen_reg_rtx (mode);
20404 if (mode == SImode)
20405 {
20406 /* Is mult+shift >= shift+xor+shift+xor? */
20407 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
20408 {
20409 rtx tmp1, tmp2, tmp3, tmp4;
20410
20411 tmp1 = gen_reg_rtx (SImode);
20412 emit_insn (gen_popcntbsi2 (tmp1, src));
20413
20414 tmp2 = gen_reg_rtx (SImode);
20415 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
20416 tmp3 = gen_reg_rtx (SImode);
20417 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
20418
20419 tmp4 = gen_reg_rtx (SImode);
20420 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
20421 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
20422 }
20423 else
20424 rs6000_emit_popcount (tmp, src);
20425 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
20426 }
20427 else
20428 {
20429 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
20430 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
20431 {
20432 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
20433
20434 tmp1 = gen_reg_rtx (DImode);
20435 emit_insn (gen_popcntbdi2 (tmp1, src));
20436
20437 tmp2 = gen_reg_rtx (DImode);
20438 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
20439 tmp3 = gen_reg_rtx (DImode);
20440 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
20441
20442 tmp4 = gen_reg_rtx (DImode);
20443 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
20444 tmp5 = gen_reg_rtx (DImode);
20445 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
20446
20447 tmp6 = gen_reg_rtx (DImode);
20448 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
20449 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
20450 }
20451 else
20452 rs6000_emit_popcount (tmp, src);
20453 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
20454 }
20455}
20456
ded9bf77
AH
20457/* Return an RTX representing where to find the function value of a
20458 function returning MODE. */
20459static rtx
20460rs6000_complex_function_value (enum machine_mode mode)
20461{
20462 unsigned int regno;
20463 rtx r1, r2;
20464 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 20465 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 20466
18f63bfa
AH
20467 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
20468 regno = FP_ARG_RETURN;
354ed18f
AH
20469 else
20470 {
18f63bfa 20471 regno = GP_ARG_RETURN;
ded9bf77 20472
18f63bfa
AH
20473 /* 32-bit is OK since it'll go in r3/r4. */
20474 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
20475 return gen_rtx_REG (mode, regno);
20476 }
20477
18f63bfa
AH
20478 if (inner_bytes >= 8)
20479 return gen_rtx_REG (mode, regno);
20480
ded9bf77
AH
20481 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
20482 const0_rtx);
20483 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 20484 GEN_INT (inner_bytes));
ded9bf77
AH
20485 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
20486}
20487
a6ebc39a
AH
20488/* Define how to find the value returned by a function.
20489 VALTYPE is the data type of the value (as a tree).
20490 If the precise function being called is known, FUNC is its FUNCTION_DECL;
20491 otherwise, FUNC is 0.
20492
20493 On the SPE, both FPs and vectors are returned in r3.
20494
20495 On RS/6000 an integer value is in r3 and a floating-point value is in
20496 fp1, unless -msoft-float. */
20497
20498rtx
20499rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
20500{
20501 enum machine_mode mode;
2a8fa26c 20502 unsigned int regno;
a6ebc39a 20503
594a51fe
SS
20504 /* Special handling for structs in darwin64. */
20505 if (rs6000_darwin64_abi
20506 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
20507 && TREE_CODE (valtype) == RECORD_TYPE
20508 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
20509 {
20510 CUMULATIVE_ARGS valcum;
20511 rtx valret;
20512
0b5383eb 20513 valcum.words = 0;
594a51fe
SS
20514 valcum.fregno = FP_ARG_MIN_REG;
20515 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
20516 /* Do a trial code generation as if this were going to be passed as
20517 an argument; if any part goes in memory, we return NULL. */
20518 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
20519 if (valret)
20520 return valret;
20521 /* Otherwise fall through to standard ABI rules. */
20522 }
20523
0e67400a
FJ
20524 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
20525 {
20526 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20527 return gen_rtx_PARALLEL (DImode,
20528 gen_rtvec (2,
20529 gen_rtx_EXPR_LIST (VOIDmode,
20530 gen_rtx_REG (SImode, GP_ARG_RETURN),
20531 const0_rtx),
20532 gen_rtx_EXPR_LIST (VOIDmode,
20533 gen_rtx_REG (SImode,
20534 GP_ARG_RETURN + 1),
20535 GEN_INT (4))));
20536 }
0f086e42
FJ
20537 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
20538 {
20539 return gen_rtx_PARALLEL (DCmode,
20540 gen_rtvec (4,
20541 gen_rtx_EXPR_LIST (VOIDmode,
20542 gen_rtx_REG (SImode, GP_ARG_RETURN),
20543 const0_rtx),
20544 gen_rtx_EXPR_LIST (VOIDmode,
20545 gen_rtx_REG (SImode,
20546 GP_ARG_RETURN + 1),
20547 GEN_INT (4)),
20548 gen_rtx_EXPR_LIST (VOIDmode,
20549 gen_rtx_REG (SImode,
20550 GP_ARG_RETURN + 2),
20551 GEN_INT (8)),
20552 gen_rtx_EXPR_LIST (VOIDmode,
20553 gen_rtx_REG (SImode,
20554 GP_ARG_RETURN + 3),
20555 GEN_INT (12))));
20556 }
602ea4d3 20557
7348aa7f
FXC
20558 mode = TYPE_MODE (valtype);
20559 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 20560 || POINTER_TYPE_P (valtype))
b78d48dd 20561 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 20562
00b79d54 20563 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
20564 {
20565 if (TARGET_HARD_FLOAT && TARGET_FPRS)
20566 {
20567 switch (mode)
20568 {
20569 default:
20570 gcc_unreachable ();
20571 case SDmode:
20572 regno = GP_ARG_RETURN;
20573 break;
20574 case DDmode:
20575 regno = FP_ARG_RETURN;
20576 break;
20577 case TDmode:
20578 /* Use f2:f3 specified by the ABI. */
20579 regno = FP_ARG_RETURN + 1;
20580 break;
20581 }
20582 }
20583 else
20584 regno = GP_ARG_RETURN;
20585 }
00b79d54 20586 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 20587 regno = FP_ARG_RETURN;
ded9bf77 20588 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 20589 && targetm.calls.split_complex_arg)
ded9bf77 20590 return rs6000_complex_function_value (mode);
44688022 20591 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 20592 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 20593 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 20594 regno = ALTIVEC_ARG_RETURN;
18f63bfa 20595 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
20596 && (mode == DFmode || mode == DCmode
20597 || mode == TFmode || mode == TCmode))
18f63bfa 20598 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
20599 else
20600 regno = GP_ARG_RETURN;
20601
20602 return gen_rtx_REG (mode, regno);
20603}
20604
ded9bf77
AH
20605/* Define how to find the value returned by a library function
20606 assuming the value has mode MODE. */
20607rtx
20608rs6000_libcall_value (enum machine_mode mode)
20609{
20610 unsigned int regno;
20611
2e6c9641
FJ
20612 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
20613 {
20614 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20615 return gen_rtx_PARALLEL (DImode,
20616 gen_rtvec (2,
20617 gen_rtx_EXPR_LIST (VOIDmode,
20618 gen_rtx_REG (SImode, GP_ARG_RETURN),
20619 const0_rtx),
20620 gen_rtx_EXPR_LIST (VOIDmode,
20621 gen_rtx_REG (SImode,
20622 GP_ARG_RETURN + 1),
20623 GEN_INT (4))));
20624 }
20625
00b79d54 20626 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
20627 {
20628 if (TARGET_HARD_FLOAT && TARGET_FPRS)
20629 {
20630 switch (mode)
20631 {
20632 default:
20633 gcc_unreachable ();
20634 case SDmode:
20635 regno = GP_ARG_RETURN;
20636 break;
20637 case DDmode:
20638 regno = FP_ARG_RETURN;
20639 break;
20640 case TDmode:
20641 /* Use f2:f3 specified by the ABI. */
20642 regno = FP_ARG_RETURN + 1;
20643 break;
20644 }
20645 }
20646 else
20647 regno = GP_ARG_RETURN;
20648 }
00b79d54 20649 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
20650 && TARGET_HARD_FLOAT && TARGET_FPRS)
20651 regno = FP_ARG_RETURN;
44688022
AM
20652 else if (ALTIVEC_VECTOR_MODE (mode)
20653 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 20654 regno = ALTIVEC_ARG_RETURN;
42ba5130 20655 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 20656 return rs6000_complex_function_value (mode);
18f63bfa 20657 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
20658 && (mode == DFmode || mode == DCmode
20659 || mode == TFmode || mode == TCmode))
18f63bfa 20660 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
20661 else
20662 regno = GP_ARG_RETURN;
20663
20664 return gen_rtx_REG (mode, regno);
20665}
20666
d1d0c603
JJ
20667/* Define the offset between two registers, FROM to be eliminated and its
20668 replacement TO, at the start of a routine. */
20669HOST_WIDE_INT
20670rs6000_initial_elimination_offset (int from, int to)
20671{
20672 rs6000_stack_t *info = rs6000_stack_info ();
20673 HOST_WIDE_INT offset;
20674
7d5175e1 20675 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 20676 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
20677 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
20678 {
20679 offset = info->push_p ? 0 : -info->total_size;
20680 if (FRAME_GROWS_DOWNWARD)
5b667039 20681 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
20682 }
20683 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
20684 offset = FRAME_GROWS_DOWNWARD
5b667039 20685 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
20686 : 0;
20687 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
20688 offset = info->total_size;
20689 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
20690 offset = info->push_p ? info->total_size : 0;
20691 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
20692 offset = 0;
20693 else
37409796 20694 gcc_unreachable ();
d1d0c603
JJ
20695
20696 return offset;
20697}
20698
58646b77 20699/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 20700
c8e4f0e9 20701static bool
58646b77 20702rs6000_is_opaque_type (tree type)
62e1dfcf 20703{
58646b77 20704 return (type == opaque_V2SI_type_node
2abe3e28 20705 || type == opaque_V2SF_type_node
58646b77
PB
20706 || type == opaque_p_V2SI_type_node
20707 || type == opaque_V4SI_type_node);
62e1dfcf
NC
20708}
20709
96714395 20710static rtx
a2369ed3 20711rs6000_dwarf_register_span (rtx reg)
96714395
AH
20712{
20713 unsigned regno;
20714
4d4cbc0e
AH
20715 if (TARGET_SPE
20716 && (SPE_VECTOR_MODE (GET_MODE (reg))
20717 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
20718 ;
20719 else
96714395
AH
20720 return NULL_RTX;
20721
20722 regno = REGNO (reg);
20723
20724 /* The duality of the SPE register size wreaks all kinds of havoc.
20725 This is a way of distinguishing r0 in 32-bits from r0 in
20726 64-bits. */
20727 return
20728 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
20729 BYTES_BIG_ENDIAN
20730 ? gen_rtvec (2,
20731 gen_rtx_REG (SImode, regno + 1200),
20732 gen_rtx_REG (SImode, regno))
20733 : gen_rtvec (2,
20734 gen_rtx_REG (SImode, regno),
20735 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
20736}
20737
37ea0b7e
JM
20738/* Fill in sizes for SPE register high parts in table used by unwinder. */
20739
20740static void
20741rs6000_init_dwarf_reg_sizes_extra (tree address)
20742{
20743 if (TARGET_SPE)
20744 {
20745 int i;
20746 enum machine_mode mode = TYPE_MODE (char_type_node);
20747 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
20748 rtx mem = gen_rtx_MEM (BLKmode, addr);
20749 rtx value = gen_int_mode (4, mode);
20750
20751 for (i = 1201; i < 1232; i++)
20752 {
20753 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
20754 HOST_WIDE_INT offset
20755 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
20756
20757 emit_move_insn (adjust_address (mem, mode, offset), value);
20758 }
20759 }
20760}
20761
93c9d1ba
AM
20762/* Map internal gcc register numbers to DWARF2 register numbers. */
20763
20764unsigned int
20765rs6000_dbx_register_number (unsigned int regno)
20766{
20767 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
20768 return regno;
20769 if (regno == MQ_REGNO)
20770 return 100;
20771 if (regno == LINK_REGISTER_REGNUM)
20772 return 108;
20773 if (regno == COUNT_REGISTER_REGNUM)
20774 return 109;
20775 if (CR_REGNO_P (regno))
20776 return regno - CR0_REGNO + 86;
20777 if (regno == XER_REGNO)
20778 return 101;
20779 if (ALTIVEC_REGNO_P (regno))
20780 return regno - FIRST_ALTIVEC_REGNO + 1124;
20781 if (regno == VRSAVE_REGNO)
20782 return 356;
20783 if (regno == VSCR_REGNO)
20784 return 67;
20785 if (regno == SPE_ACC_REGNO)
20786 return 99;
20787 if (regno == SPEFSCR_REGNO)
20788 return 612;
20789 /* SPE high reg number. We get these values of regno from
20790 rs6000_dwarf_register_span. */
37409796
NS
20791 gcc_assert (regno >= 1200 && regno < 1232);
20792 return regno;
93c9d1ba
AM
20793}
20794
93f90be6 20795/* target hook eh_return_filter_mode */
f676971a 20796static enum machine_mode
93f90be6
FJ
20797rs6000_eh_return_filter_mode (void)
20798{
20799 return TARGET_32BIT ? SImode : word_mode;
20800}
20801
00b79d54
BE
20802/* Target hook for scalar_mode_supported_p. */
20803static bool
20804rs6000_scalar_mode_supported_p (enum machine_mode mode)
20805{
20806 if (DECIMAL_FLOAT_MODE_P (mode))
20807 return true;
20808 else
20809 return default_scalar_mode_supported_p (mode);
20810}
20811
f676971a
EC
20812/* Target hook for vector_mode_supported_p. */
20813static bool
20814rs6000_vector_mode_supported_p (enum machine_mode mode)
20815{
20816
20817 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
20818 return true;
20819
20820 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
20821 return true;
20822
20823 else
20824 return false;
20825}
20826
bb8df8a6
EC
20827/* Target hook for invalid_arg_for_unprototyped_fn. */
20828static const char *
4d3e6fae
FJ
20829invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
20830{
20831 return (!rs6000_darwin64_abi
20832 && typelist == 0
20833 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
20834 && (funcdecl == NULL_TREE
20835 || (TREE_CODE (funcdecl) == FUNCTION_DECL
20836 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
20837 ? N_("AltiVec argument passed to unprototyped function")
20838 : NULL;
20839}
20840
3aebbe5f
JJ
20841/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
20842 setup by using __stack_chk_fail_local hidden function instead of
20843 calling __stack_chk_fail directly. Otherwise it is better to call
20844 __stack_chk_fail directly. */
20845
20846static tree
20847rs6000_stack_protect_fail (void)
20848{
20849 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
20850 ? default_hidden_stack_protect_fail ()
20851 : default_external_stack_protect_fail ();
20852}
20853
17211ab5 20854#include "gt-rs6000.h"