]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
[multiple changes]
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8ef65e3d 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
cd3ce9b4 55#include "tree-gimple.h"
4d3e6fae 56#include "intl.h"
59d6560b 57#include "params.h"
279bb624 58#include "tm-constrs.h"
1bc7c5b6
ZW
59#if TARGET_XCOFF
60#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61#endif
93a27b7b
ZW
62#if TARGET_MACHO
63#include "gstab.h" /* for N_SLINE */
64#endif
9b30bae2 65
7509c759
MM
66#ifndef TARGET_NO_PROTOTYPE
67#define TARGET_NO_PROTOTYPE 0
68#endif
69
9878760c
RK
70#define min(A,B) ((A) < (B) ? (A) : (B))
71#define max(A,B) ((A) > (B) ? (A) : (B))
72
d1d0c603
JJ
73/* Structure used to define the rs6000 stack */
74typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
81 int push_p; /* true if we need to allocate stack space */
82 int calls_p; /* true if the function makes any calls */
c4ad648e 83 int world_save_p; /* true if we're saving *everything*:
d62294f5 84 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
85 enum rs6000_abi abi; /* which ABI to use */
86 int gp_save_offset; /* offset to save GP regs from initial SP */
87 int fp_save_offset; /* offset to save FP regs from initial SP */
88 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
89 int lr_save_offset; /* offset to save LR from initial SP */
90 int cr_save_offset; /* offset to save CR from initial SP */
91 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
92 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
96 HOST_WIDE_INT vars_size; /* variable save area size */
97 int parm_size; /* outgoing parameter size */
98 int save_size; /* save area size */
99 int fixed_size; /* fixed size of stack frame */
100 int gp_size; /* size of saved GP registers */
101 int fp_size; /* size of saved FP registers */
102 int altivec_size; /* size of saved AltiVec registers */
103 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
d1d0c603
JJ
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
111} rs6000_stack_t;
112
5b667039
JJ
113/* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */
115typedef struct machine_function GTY(())
116{
117 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
118 int ra_needs_full_frame;
119 /* Some local-dynamic symbol. */
120 const char *some_ld_name;
121 /* Whether the instruction chain has been scanned already. */
122 int insn_chain_scanned_p;
123 /* Flags if __builtin_return_address (0) was used. */
124 int ra_need_lr;
125 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
126 varargs save area. */
127 HOST_WIDE_INT varargs_save_offset;
128} machine_function;
129
5248c961
RK
130/* Target cpu type */
131
132enum processor_type rs6000_cpu;
8e3f41e7
MM
133struct rs6000_cpu_select rs6000_select[3] =
134{
815cdc52
MM
135 /* switch name, tune arch */
136 { (const char *)0, "--with-cpu=", 1, 1 },
137 { (const char *)0, "-mcpu=", 1, 1 },
138 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 139};
5248c961 140
d296e02e
AP
141static GTY(()) bool rs6000_cell_dont_microcode;
142
ec507f2d
DE
143/* Always emit branch hint bits. */
144static GTY(()) bool rs6000_always_hint;
145
146/* Schedule instructions for group formation. */
147static GTY(()) bool rs6000_sched_groups;
148
44cd321e
PS
149/* Align branch targets. */
150static GTY(()) bool rs6000_align_branch_targets;
151
569fa502
DN
152/* Support for -msched-costly-dep option. */
153const char *rs6000_sched_costly_dep_str;
154enum rs6000_dependence_cost rs6000_sched_costly_dep;
155
cbe26ab8
DN
156/* Support for -minsert-sched-nops option. */
157const char *rs6000_sched_insert_nops_str;
158enum rs6000_nop_insertion rs6000_sched_insert_nops;
159
7ccf35ed 160/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 161static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 162
602ea4d3 163/* Size of long double. */
6fa3f289
ZW
164int rs6000_long_double_type_size;
165
602ea4d3
JJ
166/* IEEE quad extended precision long double. */
167int rs6000_ieeequad;
168
169/* Whether -mabi=altivec has appeared. */
6fa3f289
ZW
170int rs6000_altivec_abi;
171
a3170dc6
AH
172/* Nonzero if we want SPE ABI extensions. */
173int rs6000_spe_abi;
174
5da702b1
AH
175/* Nonzero if floating point operations are done in the GPRs. */
176int rs6000_float_gprs = 0;
177
594a51fe
SS
178/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
179int rs6000_darwin64_abi;
180
a0ab749a 181/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 182static GTY(()) int common_mode_defined;
c81bebd7 183
9878760c
RK
184/* Save information from a "cmpxx" operation until the branch or scc is
185 emitted. */
9878760c
RK
186rtx rs6000_compare_op0, rs6000_compare_op1;
187int rs6000_compare_fp_p;
874a0744 188
874a0744
MM
189/* Label number of label created for -mrelocatable, to call to so we can
190 get the address of the GOT section */
191int rs6000_pic_labelno;
c81bebd7 192
b91da81f 193#ifdef USING_ELFOS_H
c81bebd7 194/* Which abi to adhere to */
9739c90c 195const char *rs6000_abi_name;
d9407988
MM
196
197/* Semantics of the small data area */
198enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
199
200/* Which small data model to use */
815cdc52 201const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
202
203/* Counter for labels which are to be placed in .fixup. */
204int fixuplabelno = 0;
874a0744 205#endif
4697a36c 206
c4501e62
JJ
207/* Bit size of immediate TLS offsets and string from which it is decoded. */
208int rs6000_tls_size = 32;
209const char *rs6000_tls_size_string;
210
b6c9286a
MM
211/* ABI enumeration available for subtarget to use. */
212enum rs6000_abi rs6000_current_abi;
213
85b776df
AM
214/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
215int dot_symbols;
216
38c1f2d7 217/* Debug flags */
815cdc52 218const char *rs6000_debug_name;
38c1f2d7
MM
219int rs6000_debug_stack; /* debug stack applications */
220int rs6000_debug_arg; /* debug argument handling */
221
aabcd309 222/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
223bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
224
58646b77
PB
225/* Built in types. */
226
227tree rs6000_builtin_types[RS6000_BTI_MAX];
228tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 229
57ac7be9
AM
230const char *rs6000_traceback_name;
231static enum {
232 traceback_default = 0,
233 traceback_none,
234 traceback_part,
235 traceback_full
236} rs6000_traceback;
237
38c1f2d7
MM
238/* Flag to say the TOC is initialized */
239int toc_initialized;
9ebbca7d 240char toc_label_name[10];
38c1f2d7 241
44cd321e
PS
242/* Cached value of rs6000_variable_issue. This is cached in
243 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
244static short cached_can_issue_more;
245
d6b5193b
RS
246static GTY(()) section *read_only_data_section;
247static GTY(()) section *private_data_section;
248static GTY(()) section *read_only_private_data_section;
249static GTY(()) section *sdata2_section;
250static GTY(()) section *toc_section;
251
a3c9585f
KH
252/* Control alignment for fields within structures. */
253/* String from -malign-XXXXX. */
025d9908
KH
254int rs6000_alignment_flags;
255
78f5898b
AH
256/* True for any options that were explicitly set. */
257struct {
df01da37 258 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 259 bool alignment; /* True if -malign- was used. */
d3603e8c 260 bool abi; /* True if -mabi=spe/nospe was used. */
78f5898b
AH
261 bool spe; /* True if -mspe= was used. */
262 bool float_gprs; /* True if -mfloat-gprs= was used. */
263 bool isel; /* True if -misel was used. */
264 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 265 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
78f5898b
AH
266} rs6000_explicit_options;
267
a3170dc6
AH
268struct builtin_description
269{
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
273 unsigned int mask;
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
277};
8b897cfa
RS
278\f
279/* Target cpu costs. */
280
281struct processor_costs {
c4ad648e 282 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
283 const int mulsi_const; /* cost of SImode multiplication by constant. */
284 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
285 const int muldi; /* cost of DImode multiplication. */
286 const int divsi; /* cost of SImode division. */
287 const int divdi; /* cost of DImode division. */
288 const int fp; /* cost of simple SFmode and DFmode insns. */
289 const int dmul; /* cost of DFmode multiplication (and fmadd). */
290 const int sdiv; /* cost of SFmode division (fdivs). */
291 const int ddiv; /* cost of DFmode division (fdiv). */
0b11da67
DE
292 const int cache_line_size; /* cache block in bytes. */
293 const int l1_cache_lines; /* number of lines in L1 cache. */
294 const int simultaneous_prefetches; /* number of parallel prefetch
295 operations. */
8b897cfa
RS
296};
297
298const struct processor_costs *rs6000_cost;
299
300/* Processor costs (relative to an add) */
301
302/* Instruction size costs on 32bit processors. */
303static const
304struct processor_costs size32_cost = {
06a67bdd
RS
305 COSTS_N_INSNS (1), /* mulsi */
306 COSTS_N_INSNS (1), /* mulsi_const */
307 COSTS_N_INSNS (1), /* mulsi_const9 */
308 COSTS_N_INSNS (1), /* muldi */
309 COSTS_N_INSNS (1), /* divsi */
310 COSTS_N_INSNS (1), /* divdi */
311 COSTS_N_INSNS (1), /* fp */
312 COSTS_N_INSNS (1), /* dmul */
313 COSTS_N_INSNS (1), /* sdiv */
314 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
315 32,
316 0,
317 0,
8b897cfa
RS
318};
319
320/* Instruction size costs on 64bit processors. */
321static const
322struct processor_costs size64_cost = {
06a67bdd
RS
323 COSTS_N_INSNS (1), /* mulsi */
324 COSTS_N_INSNS (1), /* mulsi_const */
325 COSTS_N_INSNS (1), /* mulsi_const9 */
326 COSTS_N_INSNS (1), /* muldi */
327 COSTS_N_INSNS (1), /* divsi */
328 COSTS_N_INSNS (1), /* divdi */
329 COSTS_N_INSNS (1), /* fp */
330 COSTS_N_INSNS (1), /* dmul */
331 COSTS_N_INSNS (1), /* sdiv */
332 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
333 128,
334 0,
335 0,
8b897cfa
RS
336};
337
338/* Instruction costs on RIOS1 processors. */
339static const
340struct processor_costs rios1_cost = {
06a67bdd
RS
341 COSTS_N_INSNS (5), /* mulsi */
342 COSTS_N_INSNS (4), /* mulsi_const */
343 COSTS_N_INSNS (3), /* mulsi_const9 */
344 COSTS_N_INSNS (5), /* muldi */
345 COSTS_N_INSNS (19), /* divsi */
346 COSTS_N_INSNS (19), /* divdi */
347 COSTS_N_INSNS (2), /* fp */
348 COSTS_N_INSNS (2), /* dmul */
349 COSTS_N_INSNS (19), /* sdiv */
350 COSTS_N_INSNS (19), /* ddiv */
0b11da67
DE
351 32,
352 1024, /* cache lines */
353 0, /* streams */
8b897cfa
RS
354};
355
356/* Instruction costs on RIOS2 processors. */
357static const
358struct processor_costs rios2_cost = {
06a67bdd
RS
359 COSTS_N_INSNS (2), /* mulsi */
360 COSTS_N_INSNS (2), /* mulsi_const */
361 COSTS_N_INSNS (2), /* mulsi_const9 */
362 COSTS_N_INSNS (2), /* muldi */
363 COSTS_N_INSNS (13), /* divsi */
364 COSTS_N_INSNS (13), /* divdi */
365 COSTS_N_INSNS (2), /* fp */
366 COSTS_N_INSNS (2), /* dmul */
367 COSTS_N_INSNS (17), /* sdiv */
368 COSTS_N_INSNS (17), /* ddiv */
0b11da67
DE
369 32,
370 1024, /* cache lines */
371 0, /* streams */
8b897cfa
RS
372};
373
374/* Instruction costs on RS64A processors. */
375static const
376struct processor_costs rs64a_cost = {
06a67bdd
RS
377 COSTS_N_INSNS (20), /* mulsi */
378 COSTS_N_INSNS (12), /* mulsi_const */
379 COSTS_N_INSNS (8), /* mulsi_const9 */
380 COSTS_N_INSNS (34), /* muldi */
381 COSTS_N_INSNS (65), /* divsi */
382 COSTS_N_INSNS (67), /* divdi */
383 COSTS_N_INSNS (4), /* fp */
384 COSTS_N_INSNS (4), /* dmul */
385 COSTS_N_INSNS (31), /* sdiv */
386 COSTS_N_INSNS (31), /* ddiv */
0b11da67
DE
387 128,
388 1024, /* cache lines */
389 1, /* streams */
8b897cfa
RS
390};
391
392/* Instruction costs on MPCCORE processors. */
393static const
394struct processor_costs mpccore_cost = {
06a67bdd
RS
395 COSTS_N_INSNS (2), /* mulsi */
396 COSTS_N_INSNS (2), /* mulsi_const */
397 COSTS_N_INSNS (2), /* mulsi_const9 */
398 COSTS_N_INSNS (2), /* muldi */
399 COSTS_N_INSNS (6), /* divsi */
400 COSTS_N_INSNS (6), /* divdi */
401 COSTS_N_INSNS (4), /* fp */
402 COSTS_N_INSNS (5), /* dmul */
403 COSTS_N_INSNS (10), /* sdiv */
404 COSTS_N_INSNS (17), /* ddiv */
0b11da67
DE
405 128,
406 512, /* cache lines */
407 1, /* streams */
8b897cfa
RS
408};
409
410/* Instruction costs on PPC403 processors. */
411static const
412struct processor_costs ppc403_cost = {
06a67bdd
RS
413 COSTS_N_INSNS (4), /* mulsi */
414 COSTS_N_INSNS (4), /* mulsi_const */
415 COSTS_N_INSNS (4), /* mulsi_const9 */
416 COSTS_N_INSNS (4), /* muldi */
417 COSTS_N_INSNS (33), /* divsi */
418 COSTS_N_INSNS (33), /* divdi */
419 COSTS_N_INSNS (11), /* fp */
420 COSTS_N_INSNS (11), /* dmul */
421 COSTS_N_INSNS (11), /* sdiv */
422 COSTS_N_INSNS (11), /* ddiv */
0b11da67
DE
423 32,
424 128, /* cache lines */
425 1, /* streams */
8b897cfa
RS
426};
427
428/* Instruction costs on PPC405 processors. */
429static const
430struct processor_costs ppc405_cost = {
06a67bdd
RS
431 COSTS_N_INSNS (5), /* mulsi */
432 COSTS_N_INSNS (4), /* mulsi_const */
433 COSTS_N_INSNS (3), /* mulsi_const9 */
434 COSTS_N_INSNS (5), /* muldi */
435 COSTS_N_INSNS (35), /* divsi */
436 COSTS_N_INSNS (35), /* divdi */
437 COSTS_N_INSNS (11), /* fp */
438 COSTS_N_INSNS (11), /* dmul */
439 COSTS_N_INSNS (11), /* sdiv */
440 COSTS_N_INSNS (11), /* ddiv */
0b11da67
DE
441 32,
442 512, /* cache lines */
443 1, /* streams */
8b897cfa
RS
444};
445
446/* Instruction costs on PPC440 processors. */
447static const
448struct processor_costs ppc440_cost = {
06a67bdd
RS
449 COSTS_N_INSNS (3), /* mulsi */
450 COSTS_N_INSNS (2), /* mulsi_const */
451 COSTS_N_INSNS (2), /* mulsi_const9 */
452 COSTS_N_INSNS (3), /* muldi */
453 COSTS_N_INSNS (34), /* divsi */
454 COSTS_N_INSNS (34), /* divdi */
455 COSTS_N_INSNS (5), /* fp */
456 COSTS_N_INSNS (5), /* dmul */
457 COSTS_N_INSNS (19), /* sdiv */
458 COSTS_N_INSNS (33), /* ddiv */
0b11da67
DE
459 32,
460 1024, /* cache lines */
461 1, /* streams */
8b897cfa
RS
462};
463
464/* Instruction costs on PPC601 processors. */
465static const
466struct processor_costs ppc601_cost = {
06a67bdd
RS
467 COSTS_N_INSNS (5), /* mulsi */
468 COSTS_N_INSNS (5), /* mulsi_const */
469 COSTS_N_INSNS (5), /* mulsi_const9 */
470 COSTS_N_INSNS (5), /* muldi */
471 COSTS_N_INSNS (36), /* divsi */
472 COSTS_N_INSNS (36), /* divdi */
473 COSTS_N_INSNS (4), /* fp */
474 COSTS_N_INSNS (5), /* dmul */
475 COSTS_N_INSNS (17), /* sdiv */
476 COSTS_N_INSNS (31), /* ddiv */
0b11da67
DE
477 32,
478 1024, /* cache lines */
479 1, /* streams */
8b897cfa
RS
480};
481
482/* Instruction costs on PPC603 processors. */
483static const
484struct processor_costs ppc603_cost = {
06a67bdd
RS
485 COSTS_N_INSNS (5), /* mulsi */
486 COSTS_N_INSNS (3), /* mulsi_const */
487 COSTS_N_INSNS (2), /* mulsi_const9 */
488 COSTS_N_INSNS (5), /* muldi */
489 COSTS_N_INSNS (37), /* divsi */
490 COSTS_N_INSNS (37), /* divdi */
491 COSTS_N_INSNS (3), /* fp */
492 COSTS_N_INSNS (4), /* dmul */
493 COSTS_N_INSNS (18), /* sdiv */
494 COSTS_N_INSNS (33), /* ddiv */
0b11da67
DE
495 32,
496 256, /* cache lines */
497 1, /* streams */
8b897cfa
RS
498};
499
500/* Instruction costs on PPC604 processors. */
501static const
502struct processor_costs ppc604_cost = {
06a67bdd
RS
503 COSTS_N_INSNS (4), /* mulsi */
504 COSTS_N_INSNS (4), /* mulsi_const */
505 COSTS_N_INSNS (4), /* mulsi_const9 */
506 COSTS_N_INSNS (4), /* muldi */
507 COSTS_N_INSNS (20), /* divsi */
508 COSTS_N_INSNS (20), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (3), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (32), /* ddiv */
0b11da67
DE
513 32,
514 512, /* cache lines */
515 1, /* streams */
8b897cfa
RS
516};
517
518/* Instruction costs on PPC604e processors. */
519static const
520struct processor_costs ppc604e_cost = {
06a67bdd
RS
521 COSTS_N_INSNS (2), /* mulsi */
522 COSTS_N_INSNS (2), /* mulsi_const */
523 COSTS_N_INSNS (2), /* mulsi_const9 */
524 COSTS_N_INSNS (2), /* muldi */
525 COSTS_N_INSNS (20), /* divsi */
526 COSTS_N_INSNS (20), /* divdi */
527 COSTS_N_INSNS (3), /* fp */
528 COSTS_N_INSNS (3), /* dmul */
529 COSTS_N_INSNS (18), /* sdiv */
530 COSTS_N_INSNS (32), /* ddiv */
0b11da67
DE
531 32,
532 1024, /* cache lines */
533 1, /* streams */
8b897cfa
RS
534};
535
f0517163 536/* Instruction costs on PPC620 processors. */
8b897cfa
RS
537static const
538struct processor_costs ppc620_cost = {
06a67bdd
RS
539 COSTS_N_INSNS (5), /* mulsi */
540 COSTS_N_INSNS (4), /* mulsi_const */
541 COSTS_N_INSNS (3), /* mulsi_const9 */
542 COSTS_N_INSNS (7), /* muldi */
543 COSTS_N_INSNS (21), /* divsi */
544 COSTS_N_INSNS (37), /* divdi */
545 COSTS_N_INSNS (3), /* fp */
546 COSTS_N_INSNS (3), /* dmul */
547 COSTS_N_INSNS (18), /* sdiv */
548 COSTS_N_INSNS (32), /* ddiv */
0b11da67
DE
549 128,
550 512, /* cache lines */
551 1, /* streams */
f0517163
RS
552};
553
554/* Instruction costs on PPC630 processors. */
555static const
556struct processor_costs ppc630_cost = {
06a67bdd
RS
557 COSTS_N_INSNS (5), /* mulsi */
558 COSTS_N_INSNS (4), /* mulsi_const */
559 COSTS_N_INSNS (3), /* mulsi_const9 */
560 COSTS_N_INSNS (7), /* muldi */
561 COSTS_N_INSNS (21), /* divsi */
562 COSTS_N_INSNS (37), /* divdi */
563 COSTS_N_INSNS (3), /* fp */
564 COSTS_N_INSNS (3), /* dmul */
565 COSTS_N_INSNS (17), /* sdiv */
566 COSTS_N_INSNS (21), /* ddiv */
0b11da67
DE
567 128,
568 512, /* cache lines */
569 1, /* streams */
8b897cfa
RS
570};
571
d296e02e
AP
572/* Instruction costs on Cell processor. */
573/* COSTS_N_INSNS (1) ~ one add. */
574static const
575struct processor_costs ppccell_cost = {
576 COSTS_N_INSNS (9/2)+2, /* mulsi */
577 COSTS_N_INSNS (6/2), /* mulsi_const */
578 COSTS_N_INSNS (6/2), /* mulsi_const9 */
579 COSTS_N_INSNS (15/2)+2, /* muldi */
580 COSTS_N_INSNS (38/2), /* divsi */
581 COSTS_N_INSNS (70/2), /* divdi */
582 COSTS_N_INSNS (10/2), /* fp */
583 COSTS_N_INSNS (10/2), /* dmul */
584 COSTS_N_INSNS (74/2), /* sdiv */
585 COSTS_N_INSNS (74/2), /* ddiv */
0b11da67
DE
586 128,
587 256, /* cache lines */
588 6, /* streams */
d296e02e
AP
589};
590
8b897cfa
RS
591/* Instruction costs on PPC750 and PPC7400 processors. */
592static const
593struct processor_costs ppc750_cost = {
06a67bdd
RS
594 COSTS_N_INSNS (5), /* mulsi */
595 COSTS_N_INSNS (3), /* mulsi_const */
596 COSTS_N_INSNS (2), /* mulsi_const9 */
597 COSTS_N_INSNS (5), /* muldi */
598 COSTS_N_INSNS (17), /* divsi */
599 COSTS_N_INSNS (17), /* divdi */
600 COSTS_N_INSNS (3), /* fp */
601 COSTS_N_INSNS (3), /* dmul */
602 COSTS_N_INSNS (17), /* sdiv */
603 COSTS_N_INSNS (31), /* ddiv */
0b11da67
DE
604 32,
605 1024, /* cache lines */
606 1, /* streams */
8b897cfa
RS
607};
608
609/* Instruction costs on PPC7450 processors. */
610static const
611struct processor_costs ppc7450_cost = {
06a67bdd
RS
612 COSTS_N_INSNS (4), /* mulsi */
613 COSTS_N_INSNS (3), /* mulsi_const */
614 COSTS_N_INSNS (3), /* mulsi_const9 */
615 COSTS_N_INSNS (4), /* muldi */
616 COSTS_N_INSNS (23), /* divsi */
617 COSTS_N_INSNS (23), /* divdi */
618 COSTS_N_INSNS (5), /* fp */
619 COSTS_N_INSNS (5), /* dmul */
620 COSTS_N_INSNS (21), /* sdiv */
621 COSTS_N_INSNS (35), /* ddiv */
0b11da67
DE
622 32,
623 1024, /* cache lines */
624 1, /* streams */
8b897cfa 625};
a3170dc6 626
8b897cfa
RS
627/* Instruction costs on PPC8540 processors. */
628static const
629struct processor_costs ppc8540_cost = {
06a67bdd
RS
630 COSTS_N_INSNS (4), /* mulsi */
631 COSTS_N_INSNS (4), /* mulsi_const */
632 COSTS_N_INSNS (4), /* mulsi_const9 */
633 COSTS_N_INSNS (4), /* muldi */
634 COSTS_N_INSNS (19), /* divsi */
635 COSTS_N_INSNS (19), /* divdi */
636 COSTS_N_INSNS (4), /* fp */
637 COSTS_N_INSNS (4), /* dmul */
638 COSTS_N_INSNS (29), /* sdiv */
639 COSTS_N_INSNS (29), /* ddiv */
0b11da67
DE
640 32,
641 1024, /* cache lines */
642 1, /* prefetch streams /*/
8b897cfa
RS
643};
644
645/* Instruction costs on POWER4 and POWER5 processors. */
646static const
647struct processor_costs power4_cost = {
06a67bdd
RS
648 COSTS_N_INSNS (3), /* mulsi */
649 COSTS_N_INSNS (2), /* mulsi_const */
650 COSTS_N_INSNS (2), /* mulsi_const9 */
651 COSTS_N_INSNS (4), /* muldi */
652 COSTS_N_INSNS (18), /* divsi */
653 COSTS_N_INSNS (34), /* divdi */
654 COSTS_N_INSNS (3), /* fp */
655 COSTS_N_INSNS (3), /* dmul */
656 COSTS_N_INSNS (17), /* sdiv */
657 COSTS_N_INSNS (17), /* ddiv */
0b11da67
DE
658 128,
659 256, /* cache lines */
660 8, /* prefetch streams /*/
8b897cfa
RS
661};
662
44cd321e
PS
663/* Instruction costs on POWER6 processors. */
664static const
665struct processor_costs power6_cost = {
666 COSTS_N_INSNS (8), /* mulsi */
667 COSTS_N_INSNS (8), /* mulsi_const */
668 COSTS_N_INSNS (8), /* mulsi_const9 */
669 COSTS_N_INSNS (8), /* muldi */
670 COSTS_N_INSNS (22), /* divsi */
671 COSTS_N_INSNS (28), /* divdi */
672 COSTS_N_INSNS (3), /* fp */
673 COSTS_N_INSNS (3), /* dmul */
674 COSTS_N_INSNS (13), /* sdiv */
675 COSTS_N_INSNS (16), /* ddiv */
0b11da67
DE
676 128,
677 512, /* cache lines */
678 16, /* prefetch streams */
44cd321e
PS
679};
680
8b897cfa 681\f
a2369ed3 682static bool rs6000_function_ok_for_sibcall (tree, tree);
2ffa9a0c 683static const char *rs6000_invalid_within_doloop (rtx);
a2369ed3 684static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
685static void rs6000_emit_stack_tie (void);
686static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
687static rtx spe_synthesize_frame_save (rtx);
688static bool spe_func_has_64bit_regs_p (void);
b20a9cca 689static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 690 int, HOST_WIDE_INT);
a2369ed3
DJ
691static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
692static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
693static unsigned rs6000_hash_constant (rtx);
694static unsigned toc_hash_function (const void *);
695static int toc_hash_eq (const void *, const void *);
696static int constant_pool_expr_1 (rtx, int *, int *);
697static bool constant_pool_expr_p (rtx);
d04b6e6e 698static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
699static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
700static struct machine_function * rs6000_init_machine_status (void);
701static bool rs6000_assemble_integer (rtx, unsigned int, int);
6d0a8091 702static bool no_global_regs_above (int);
5add3202 703#ifdef HAVE_GAS_HIDDEN
a2369ed3 704static void rs6000_assemble_visibility (tree, int);
5add3202 705#endif
a2369ed3
DJ
706static int rs6000_ra_ever_killed (void);
707static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 708static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
77ccdfed
EC
709static bool rs6000_ms_bitfield_layout_p (tree);
710static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 711static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
608063c3 712static const char *rs6000_mangle_type (tree);
b86fe7b4 713extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 714static void rs6000_set_default_type_attributes (tree);
52ff33d0 715static bool rs6000_reg_live_or_pic_offset_p (int);
a2369ed3
DJ
716static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
717static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
718static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
719 tree);
a2369ed3 720static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
c6e8c921 721static bool rs6000_return_in_memory (tree, tree);
a2369ed3 722static void rs6000_file_start (void);
7c262518 723#if TARGET_ELF
9b580a0b 724static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
725static void rs6000_elf_asm_out_constructor (rtx, int);
726static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 727static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 728static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
729static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
730 unsigned HOST_WIDE_INT);
a56d7372 731static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 732 ATTRIBUTE_UNUSED;
7c262518 733#endif
aacd3885 734static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
cbaaba19 735#if TARGET_XCOFF
0d5817b2 736static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 737static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 738static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 739static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 740static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 741static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 742 unsigned HOST_WIDE_INT);
d6b5193b
RS
743static void rs6000_xcoff_unique_section (tree, int);
744static section *rs6000_xcoff_select_rtx_section
745 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
746static const char * rs6000_xcoff_strip_name_encoding (const char *);
747static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
748static void rs6000_xcoff_file_start (void);
749static void rs6000_xcoff_file_end (void);
f1384257 750#endif
a2369ed3
DJ
751static int rs6000_variable_issue (FILE *, int, rtx, int);
752static bool rs6000_rtx_costs (rtx, int, int, int *);
753static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 754static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 755static bool is_microcoded_insn (rtx);
d296e02e 756static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
757static bool is_cracked_insn (rtx);
758static bool is_branch_slot_insn (rtx);
44cd321e 759static bool is_load_insn (rtx);
e3a0e200 760static rtx get_store_dest (rtx pat);
44cd321e
PS
761static bool is_store_insn (rtx);
762static bool set_to_load_agen (rtx,rtx);
982afe02 763static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
764static int rs6000_adjust_priority (rtx, int);
765static int rs6000_issue_rate (void);
b198261f 766static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
767static rtx get_next_active_insn (rtx, rtx);
768static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
769static bool insn_must_be_first_in_group (rtx);
770static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
771static bool is_costly_group (rtx *, rtx);
772static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
773static int redefine_groups (FILE *, int, rtx, rtx);
774static int pad_groups (FILE *, int, rtx, rtx);
775static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
776static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
777static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 778static int rs6000_use_sched_lookahead (void);
d296e02e 779static int rs6000_use_sched_lookahead_guard (rtx);
7ccf35ed 780static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
781static tree rs6000_builtin_mul_widen_even (tree);
782static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 783static tree rs6000_builtin_conversion (enum tree_code, tree);
a2369ed3 784
58646b77 785static void def_builtin (int, const char *, tree, int);
5b900a4c 786static bool rs6000_vector_alignment_reachable (tree, bool);
a2369ed3
DJ
787static void rs6000_init_builtins (void);
788static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
789static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
790static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
791static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
792static void altivec_init_builtins (void);
793static void rs6000_common_init_builtins (void);
c15c90bb 794static void rs6000_init_libfuncs (void);
a2369ed3 795
b20a9cca
AM
796static void enable_mask_for_builtins (struct builtin_description *, int,
797 enum rs6000_builtins,
798 enum rs6000_builtins);
7c62e993 799static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
800static void spe_init_builtins (void);
801static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 802static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
803static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
804static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
805static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
806static rs6000_stack_t *rs6000_stack_info (void);
807static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
808
809static rtx altivec_expand_builtin (tree, rtx, bool *);
810static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
811static rtx altivec_expand_st_builtin (tree, rtx, bool *);
812static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
813static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 814static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 815 const char *, tree, rtx);
b4a62fa0 816static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 817static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
818static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
819static rtx altivec_expand_vec_set_builtin (tree);
820static rtx altivec_expand_vec_ext_builtin (tree, rtx);
821static int get_element_number (tree, tree);
78f5898b 822static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 823static void rs6000_parse_tls_size_option (void);
5da702b1 824static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
825static int first_altivec_reg_to_save (void);
826static unsigned int compute_vrsave_mask (void);
9390387d 827static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
828static void is_altivec_return_reg (rtx, void *);
829static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
830int easy_vector_constant (rtx, enum machine_mode);
58646b77 831static bool rs6000_is_opaque_type (tree);
a2369ed3 832static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 833static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 834static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 835static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
836static rtx rs6000_tls_get_addr (void);
837static rtx rs6000_got_sym (void);
9390387d 838static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
839static const char *rs6000_get_some_local_dynamic_name (void);
840static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 841static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 842static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 843 enum machine_mode, tree);
0b5383eb
DJ
844static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
845 HOST_WIDE_INT);
846static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
847 tree, HOST_WIDE_INT);
848static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
849 HOST_WIDE_INT,
850 rtx[], int *);
851static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
852 tree, HOST_WIDE_INT,
853 rtx[], int *);
854static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
ec6376ab 855static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 856static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
857static void setup_incoming_varargs (CUMULATIVE_ARGS *,
858 enum machine_mode, tree,
859 int *, int);
8cd5a4e0
RH
860static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
861 tree, bool);
78a52f11
RH
862static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
863 tree, bool);
4d3e6fae 864static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
efdba735
SH
865#if TARGET_MACHO
866static void macho_branch_islands (void);
efdba735
SH
867static int no_previous_def (tree function_name);
868static tree get_prev_label (tree function_name);
c4e18b1c 869static void rs6000_darwin_file_start (void);
efdba735
SH
870#endif
871
c35d187f 872static tree rs6000_build_builtin_va_list (void);
23a60a04 873static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
fe984136 874static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
00b79d54 875static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 876static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 877static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 878 enum machine_mode);
94ff898d 879static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
880 enum machine_mode);
881static int get_vsel_insn (enum machine_mode);
882static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 883static tree rs6000_stack_protect_fail (void);
21213b4c
DP
884
885const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
886static enum machine_mode rs6000_eh_return_filter_mode (void);
887
17211ab5
GK
888/* Hash table stuff for keeping track of TOC entries. */
889
890struct toc_hash_struct GTY(())
891{
892 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
893 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
894 rtx key;
895 enum machine_mode key_mode;
896 int labelno;
897};
898
899static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
900\f
901/* Default register names. */
902char rs6000_reg_names[][8] =
903{
802a0058
MM
904 "0", "1", "2", "3", "4", "5", "6", "7",
905 "8", "9", "10", "11", "12", "13", "14", "15",
906 "16", "17", "18", "19", "20", "21", "22", "23",
907 "24", "25", "26", "27", "28", "29", "30", "31",
908 "0", "1", "2", "3", "4", "5", "6", "7",
909 "8", "9", "10", "11", "12", "13", "14", "15",
910 "16", "17", "18", "19", "20", "21", "22", "23",
911 "24", "25", "26", "27", "28", "29", "30", "31",
912 "mq", "lr", "ctr","ap",
913 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
914 "xer",
915 /* AltiVec registers. */
0cd5e3a1
AH
916 "0", "1", "2", "3", "4", "5", "6", "7",
917 "8", "9", "10", "11", "12", "13", "14", "15",
918 "16", "17", "18", "19", "20", "21", "22", "23",
919 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
920 "vrsave", "vscr",
921 /* SPE registers. */
7d5175e1
JJ
922 "spe_acc", "spefscr",
923 /* Soft frame pointer. */
924 "sfp"
c81bebd7
MM
925};
926
927#ifdef TARGET_REGNAMES
8b60264b 928static const char alt_reg_names[][8] =
c81bebd7 929{
802a0058
MM
930 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
931 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
932 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
933 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
934 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
935 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
936 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
937 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
938 "mq", "lr", "ctr", "ap",
939 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 940 "xer",
59a4c851 941 /* AltiVec registers. */
0ac081f6 942 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
943 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
944 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
945 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
946 "vrsave", "vscr",
947 /* SPE registers. */
7d5175e1
JJ
948 "spe_acc", "spefscr",
949 /* Soft frame pointer. */
950 "sfp"
c81bebd7
MM
951};
952#endif
9878760c 953\f
daf11973
MM
954#ifndef MASK_STRICT_ALIGN
955#define MASK_STRICT_ALIGN 0
956#endif
ffcfcb5f
AM
957#ifndef TARGET_PROFILE_KERNEL
958#define TARGET_PROFILE_KERNEL 0
959#endif
3961e8fe
RH
960
961/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
962#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
963\f
964/* Initialize the GCC target structure. */
91d231cb
JM
965#undef TARGET_ATTRIBUTE_TABLE
966#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
967#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
968#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 969
301d03af
RS
970#undef TARGET_ASM_ALIGNED_DI_OP
971#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
972
973/* Default unaligned ops are only provided for ELF. Find the ops needed
974 for non-ELF systems. */
975#ifndef OBJECT_FORMAT_ELF
cbaaba19 976#if TARGET_XCOFF
ae6c1efd 977/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
978 64-bit targets. */
979#undef TARGET_ASM_UNALIGNED_HI_OP
980#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
981#undef TARGET_ASM_UNALIGNED_SI_OP
982#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
983#undef TARGET_ASM_UNALIGNED_DI_OP
984#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
985#else
986/* For Darwin. */
987#undef TARGET_ASM_UNALIGNED_HI_OP
988#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
989#undef TARGET_ASM_UNALIGNED_SI_OP
990#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
991#undef TARGET_ASM_UNALIGNED_DI_OP
992#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
993#undef TARGET_ASM_ALIGNED_DI_OP
994#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
995#endif
996#endif
997
998/* This hook deals with fixups for relocatable code and DI-mode objects
999 in 64-bit code. */
1000#undef TARGET_ASM_INTEGER
1001#define TARGET_ASM_INTEGER rs6000_assemble_integer
1002
93638d7a
AM
1003#ifdef HAVE_GAS_HIDDEN
1004#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1005#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1006#endif
1007
c4501e62
JJ
1008#undef TARGET_HAVE_TLS
1009#define TARGET_HAVE_TLS HAVE_AS_TLS
1010
1011#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1012#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1013
08c148a8
NB
1014#undef TARGET_ASM_FUNCTION_PROLOGUE
1015#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1016#undef TARGET_ASM_FUNCTION_EPILOGUE
1017#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1018
b54cf83a
DE
1019#undef TARGET_SCHED_VARIABLE_ISSUE
1020#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1021
c237e94a
ZW
1022#undef TARGET_SCHED_ISSUE_RATE
1023#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1024#undef TARGET_SCHED_ADJUST_COST
1025#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1026#undef TARGET_SCHED_ADJUST_PRIORITY
1027#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1028#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1029#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1030#undef TARGET_SCHED_INIT
1031#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1032#undef TARGET_SCHED_FINISH
1033#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1034#undef TARGET_SCHED_REORDER
1035#define TARGET_SCHED_REORDER rs6000_sched_reorder
1036#undef TARGET_SCHED_REORDER2
1037#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1038
be12c2b0
VM
1039#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1040#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1041
d296e02e
AP
1042#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1043#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1044
7ccf35ed
DN
1045#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1046#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1047#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1048#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1049#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1050#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1051#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1052#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
7ccf35ed 1053
5b900a4c
DN
1054#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1055#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1056
0ac081f6
AH
1057#undef TARGET_INIT_BUILTINS
1058#define TARGET_INIT_BUILTINS rs6000_init_builtins
1059
1060#undef TARGET_EXPAND_BUILTIN
1061#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1062
608063c3
JB
1063#undef TARGET_MANGLE_TYPE
1064#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1065
c15c90bb
ZW
1066#undef TARGET_INIT_LIBFUNCS
1067#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1068
f1384257 1069#if TARGET_MACHO
0e5dbd9b 1070#undef TARGET_BINDS_LOCAL_P
31920d83 1071#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1072#endif
0e5dbd9b 1073
77ccdfed
EC
1074#undef TARGET_MS_BITFIELD_LAYOUT_P
1075#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1076
3961e8fe
RH
1077#undef TARGET_ASM_OUTPUT_MI_THUNK
1078#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1079
3961e8fe 1080#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5b71a4e7 1081#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
00b960c7 1082
4977bab6
ZW
1083#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1084#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1085
2e3f0db6
DJ
1086#undef TARGET_INVALID_WITHIN_DOLOOP
1087#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1088
3c50106f
RH
1089#undef TARGET_RTX_COSTS
1090#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67
RH
1091#undef TARGET_ADDRESS_COST
1092#define TARGET_ADDRESS_COST hook_int_rtx_0
3c50106f 1093
c8e4f0e9 1094#undef TARGET_VECTOR_OPAQUE_P
58646b77 1095#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1096
96714395
AH
1097#undef TARGET_DWARF_REGISTER_SPAN
1098#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1099
37ea0b7e
JM
1100#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1101#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1102
c6e8c921
GK
1103/* On rs6000, function arguments are promoted, as are function return
1104 values. */
1105#undef TARGET_PROMOTE_FUNCTION_ARGS
1106#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
1107#undef TARGET_PROMOTE_FUNCTION_RETURN
1108#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
1109
c6e8c921
GK
1110#undef TARGET_RETURN_IN_MEMORY
1111#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1112
1113#undef TARGET_SETUP_INCOMING_VARARGS
1114#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1115
1116/* Always strict argument naming on rs6000. */
1117#undef TARGET_STRICT_ARGUMENT_NAMING
1118#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1119#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1120#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130
RH
1121#undef TARGET_SPLIT_COMPLEX_ARG
1122#define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
fe984136
RH
1123#undef TARGET_MUST_PASS_IN_STACK
1124#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1125#undef TARGET_PASS_BY_REFERENCE
1126#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1127#undef TARGET_ARG_PARTIAL_BYTES
1128#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1129
c35d187f
RH
1130#undef TARGET_BUILD_BUILTIN_VA_LIST
1131#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1132
cd3ce9b4
JM
1133#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1134#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1135
93f90be6
FJ
1136#undef TARGET_EH_RETURN_FILTER_MODE
1137#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1138
00b79d54
BE
1139#undef TARGET_SCALAR_MODE_SUPPORTED_P
1140#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1141
f676971a
EC
1142#undef TARGET_VECTOR_MODE_SUPPORTED_P
1143#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1144
4d3e6fae
FJ
1145#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1146#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1147
78f5898b
AH
1148#undef TARGET_HANDLE_OPTION
1149#define TARGET_HANDLE_OPTION rs6000_handle_option
1150
1151#undef TARGET_DEFAULT_TARGET_FLAGS
1152#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1153 (TARGET_DEFAULT)
78f5898b 1154
3aebbe5f
JJ
1155#undef TARGET_STACK_PROTECT_FAIL
1156#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1157
445cf5eb
JM
1158/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1159 The PowerPC architecture requires only weak consistency among
1160 processors--that is, memory accesses between processors need not be
1161 sequentially consistent and memory accesses among processors can occur
1162 in any order. The ability to order memory accesses weakly provides
1163 opportunities for more efficient use of the system bus. Unless a
1164 dependency exists, the 604e allows read operations to precede store
1165 operations. */
1166#undef TARGET_RELAXED_ORDERING
1167#define TARGET_RELAXED_ORDERING true
1168
fdbe66f2
EB
1169#ifdef HAVE_AS_TLS
1170#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1171#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1172#endif
1173
aacd3885
RS
1174/* Use a 32-bit anchor range. This leads to sequences like:
1175
1176 addis tmp,anchor,high
1177 add dest,tmp,low
1178
1179 where tmp itself acts as an anchor, and can be shared between
1180 accesses to the same 64k page. */
1181#undef TARGET_MIN_ANCHOR_OFFSET
1182#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1183#undef TARGET_MAX_ANCHOR_OFFSET
1184#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1185#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1186#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1187
f6897b10 1188struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1189\f
0d1fbc8c
AH
1190
1191/* Value is 1 if hard register REGNO can hold a value of machine-mode
1192 MODE. */
1193static int
1194rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1195{
1196 /* The GPRs can hold any mode, but values bigger than one register
1197 cannot go past R31. */
1198 if (INT_REGNO_P (regno))
1199 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1200
a5a97921 1201 /* The float registers can only hold floating modes and DImode.
7393f7f8 1202 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1203 if (FP_REGNO_P (regno))
1204 return
ebb109ad 1205 (SCALAR_FLOAT_MODE_P (mode)
c092b045 1206 && (mode != TDmode || (regno % 2) == 0)
7393f7f8 1207 && mode != SDmode
0d1fbc8c
AH
1208 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1209 || (GET_MODE_CLASS (mode) == MODE_INT
1210 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1211
1212 /* The CR register can only hold CC modes. */
1213 if (CR_REGNO_P (regno))
1214 return GET_MODE_CLASS (mode) == MODE_CC;
1215
1216 if (XER_REGNO_P (regno))
1217 return mode == PSImode;
1218
1219 /* AltiVec only in AldyVec registers. */
1220 if (ALTIVEC_REGNO_P (regno))
1221 return ALTIVEC_VECTOR_MODE (mode);
1222
1223 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1224 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1225 return 1;
1226
1227 /* We cannot put TImode anywhere except general register and it must be
1228 able to fit within the register set. */
1229
1230 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1231}
1232
1233/* Initialize rs6000_hard_regno_mode_ok_p table. */
1234static void
1235rs6000_init_hard_regno_mode_ok (void)
1236{
1237 int r, m;
1238
1239 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1240 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1241 if (rs6000_hard_regno_mode_ok (r, m))
1242 rs6000_hard_regno_mode_ok_p[m][r] = true;
1243}
1244
e4cad568
GK
1245#if TARGET_MACHO
1246/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1247
1248static void
1249darwin_rs6000_override_options (void)
1250{
1251 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1252 off. */
1253 rs6000_altivec_abi = 1;
1254 TARGET_ALTIVEC_VRSAVE = 1;
1255 if (DEFAULT_ABI == ABI_DARWIN)
1256 {
1257 if (MACHO_DYNAMIC_NO_PIC_P)
1258 {
1259 if (flag_pic)
1260 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1261 flag_pic = 0;
1262 }
1263 else if (flag_pic == 1)
1264 {
1265 flag_pic = 2;
1266 }
1267 }
1268 if (TARGET_64BIT && ! TARGET_POWERPC64)
1269 {
1270 target_flags |= MASK_POWERPC64;
1271 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1272 }
1273 if (flag_mkernel)
1274 {
1275 rs6000_default_long_calls = 1;
1276 target_flags |= MASK_SOFT_FLOAT;
1277 }
1278
1279 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1280 Altivec. */
1281 if (!flag_mkernel && !flag_apple_kext
1282 && TARGET_64BIT
1283 && ! (target_flags_explicit & MASK_ALTIVEC))
1284 target_flags |= MASK_ALTIVEC;
1285
1286 /* Unless the user (not the configurer) has explicitly overridden
1287 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1288 G4 unless targetting the kernel. */
1289 if (!flag_mkernel
1290 && !flag_apple_kext
1291 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1292 && ! (target_flags_explicit & MASK_ALTIVEC)
1293 && ! rs6000_select[1].string)
1294 {
1295 target_flags |= MASK_ALTIVEC;
1296 }
1297}
1298#endif
1299
c1e55850
GK
1300/* If not otherwise specified by a target, make 'long double' equivalent to
1301 'double'. */
1302
1303#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1304#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1305#endif
1306
5248c961
RK
1307/* Override command line options. Mostly we process the processor
1308 type and sometimes adjust other TARGET_ options. */
1309
1310void
d779d0dc 1311rs6000_override_options (const char *default_cpu)
5248c961 1312{
c4d38ccb 1313 size_t i, j;
8e3f41e7 1314 struct rs6000_cpu_select *ptr;
66188a7e 1315 int set_masks;
5248c961 1316
66188a7e 1317 /* Simplifications for entries below. */
85638c0d 1318
66188a7e
GK
1319 enum {
1320 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1321 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1322 };
85638c0d 1323
66188a7e
GK
1324 /* This table occasionally claims that a processor does not support
1325 a particular feature even though it does, but the feature is slower
1326 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1327 complete description of the processor's support.
66188a7e
GK
1328
1329 Please keep this list in order, and don't forget to update the
1330 documentation in invoke.texi when adding a new processor or
1331 flag. */
5248c961
RK
1332 static struct ptt
1333 {
8b60264b
KG
1334 const char *const name; /* Canonical processor name. */
1335 const enum processor_type processor; /* Processor type enum value. */
1336 const int target_enable; /* Target flags to enable. */
8b60264b 1337 } const processor_target_table[]
66188a7e 1338 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1339 {"403", PROCESSOR_PPC403,
66188a7e 1340 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1341 {"405", PROCESSOR_PPC405,
716019c0
JM
1342 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1343 {"405fp", PROCESSOR_PPC405,
1344 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1345 {"440", PROCESSOR_PPC440,
716019c0
JM
1346 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1347 {"440fp", PROCESSOR_PPC440,
1348 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1349 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1350 {"601", PROCESSOR_PPC601,
66188a7e
GK
1351 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1352 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1353 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1354 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1355 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1356 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1357 {"620", PROCESSOR_PPC620,
1358 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1359 {"630", PROCESSOR_PPC630,
1360 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1361 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1362 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1363 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1364 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1365 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1366 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1367 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1368 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1369 /* 8548 has a dummy entry for now. */
a45bce6e 1370 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
66188a7e 1371 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1372 {"970", PROCESSOR_POWER4,
66188a7e 1373 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1374 {"cell", PROCESSOR_CELL,
1375 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1376 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1377 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1378 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1379 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1380 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1381 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1382 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1383 {"power2", PROCESSOR_POWER,
1384 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1385 {"power3", PROCESSOR_PPC630,
1386 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1387 {"power4", PROCESSOR_POWER4,
fc091c8e 1388 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
ec507f2d 1389 {"power5", PROCESSOR_POWER5,
432218ba
DE
1390 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1391 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7
DE
1392 {"power5+", PROCESSOR_POWER5,
1393 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1394 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1395 {"power6", PROCESSOR_POWER6,
e118597e 1396 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1397 | MASK_FPRND | MASK_CMPB | MASK_DFP },
44cd321e
PS
1398 {"power6x", PROCESSOR_POWER6,
1399 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_MFCRF | MASK_POPCNTB
b639c3c2 1400 | MASK_FPRND | MASK_CMPB | MASK_MFPGPR | MASK_DFP },
66188a7e
GK
1401 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1402 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1403 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1404 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1405 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1406 {"rios2", PROCESSOR_RIOS2,
1407 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1408 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1409 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1410 {"rs64", PROCESSOR_RS64A,
1411 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1412 };
5248c961 1413
ca7558fc 1414 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1415
66188a7e
GK
1416 /* Some OSs don't support saving the high part of 64-bit registers on
1417 context switch. Other OSs don't support saving Altivec registers.
1418 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1419 settings; if the user wants either, the user must explicitly specify
1420 them and we won't interfere with the user's specification. */
1421
1422 enum {
1423 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1424 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1425 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1426 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1427 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1428 };
0d1fbc8c
AH
1429
1430 rs6000_init_hard_regno_mode_ok ();
1431
c4ad648e 1432 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1433#ifdef OS_MISSING_POWERPC64
1434 if (OS_MISSING_POWERPC64)
1435 set_masks &= ~MASK_POWERPC64;
1436#endif
1437#ifdef OS_MISSING_ALTIVEC
1438 if (OS_MISSING_ALTIVEC)
1439 set_masks &= ~MASK_ALTIVEC;
1440#endif
1441
768875a8
AM
1442 /* Don't override by the processor default if given explicitly. */
1443 set_masks &= ~target_flags_explicit;
957211c3 1444
a4f6c312 1445 /* Identify the processor type. */
8e3f41e7 1446 rs6000_select[0].string = default_cpu;
3cb999d8 1447 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1448
b6a1cbae 1449 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1450 {
8e3f41e7
MM
1451 ptr = &rs6000_select[i];
1452 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1453 {
8e3f41e7
MM
1454 for (j = 0; j < ptt_size; j++)
1455 if (! strcmp (ptr->string, processor_target_table[j].name))
1456 {
1457 if (ptr->set_tune_p)
1458 rs6000_cpu = processor_target_table[j].processor;
1459
1460 if (ptr->set_arch_p)
1461 {
66188a7e
GK
1462 target_flags &= ~set_masks;
1463 target_flags |= (processor_target_table[j].target_enable
1464 & set_masks);
8e3f41e7
MM
1465 }
1466 break;
1467 }
1468
4406229e 1469 if (j == ptt_size)
8e3f41e7 1470 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1471 }
1472 }
8a61d227 1473
993f19a8 1474 if (TARGET_E500)
a3170dc6
AH
1475 rs6000_isel = 1;
1476
dff9f1b6
DE
1477 /* If we are optimizing big endian systems for space, use the load/store
1478 multiple and string instructions. */
ef792183 1479 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1480 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1481
a4f6c312
SS
1482 /* Don't allow -mmultiple or -mstring on little endian systems
1483 unless the cpu is a 750, because the hardware doesn't support the
1484 instructions used in little endian mode, and causes an alignment
1485 trap. The 750 does not cause an alignment trap (except when the
1486 target is unaligned). */
bef84347 1487
b21fb038 1488 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1489 {
1490 if (TARGET_MULTIPLE)
1491 {
1492 target_flags &= ~MASK_MULTIPLE;
b21fb038 1493 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1494 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1495 }
1496
1497 if (TARGET_STRING)
1498 {
1499 target_flags &= ~MASK_STRING;
b21fb038 1500 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1501 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1502 }
1503 }
3933e0e1 1504
38c1f2d7
MM
1505 /* Set debug flags */
1506 if (rs6000_debug_name)
1507 {
bfc79d3b 1508 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1509 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1510 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1511 rs6000_debug_stack = 1;
bfc79d3b 1512 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1513 rs6000_debug_arg = 1;
1514 else
c725bd79 1515 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1516 }
1517
57ac7be9
AM
1518 if (rs6000_traceback_name)
1519 {
1520 if (! strncmp (rs6000_traceback_name, "full", 4))
1521 rs6000_traceback = traceback_full;
1522 else if (! strncmp (rs6000_traceback_name, "part", 4))
1523 rs6000_traceback = traceback_part;
1524 else if (! strncmp (rs6000_traceback_name, "no", 2))
1525 rs6000_traceback = traceback_none;
1526 else
9e637a26 1527 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1528 rs6000_traceback_name);
1529 }
1530
78f5898b
AH
1531 if (!rs6000_explicit_options.long_double)
1532 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1533
602ea4d3 1534#ifndef POWERPC_LINUX
d3603e8c 1535 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1536 rs6000_ieeequad = 1;
1537#endif
1538
6d0ef01e
HP
1539 /* Set Altivec ABI as default for powerpc64 linux. */
1540 if (TARGET_ELF && TARGET_64BIT)
1541 {
1542 rs6000_altivec_abi = 1;
78f5898b 1543 TARGET_ALTIVEC_VRSAVE = 1;
6d0ef01e
HP
1544 }
1545
594a51fe
SS
1546 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1547 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1548 {
1549 rs6000_darwin64_abi = 1;
9c7956fd 1550#if TARGET_MACHO
6ac49599 1551 darwin_one_byte_bool = 1;
9c7956fd 1552#endif
d9168963
SS
1553 /* Default to natural alignment, for better performance. */
1554 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1555 }
1556
194c524a
DE
1557 /* Place FP constants in the constant pool instead of TOC
1558 if section anchors enabled. */
1559 if (flag_section_anchors)
1560 TARGET_NO_FP_IN_TOC = 1;
1561
c4501e62
JJ
1562 /* Handle -mtls-size option. */
1563 rs6000_parse_tls_size_option ();
1564
a7ae18e2
AH
1565#ifdef SUBTARGET_OVERRIDE_OPTIONS
1566 SUBTARGET_OVERRIDE_OPTIONS;
1567#endif
1568#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1569 SUBSUBTARGET_OVERRIDE_OPTIONS;
1570#endif
4d4cbc0e
AH
1571#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1572 SUB3TARGET_OVERRIDE_OPTIONS;
1573#endif
a7ae18e2 1574
5da702b1
AH
1575 if (TARGET_E500)
1576 {
1577 /* The e500 does not have string instructions, and we set
1578 MASK_STRING above when optimizing for size. */
1579 if ((target_flags & MASK_STRING) != 0)
1580 target_flags = target_flags & ~MASK_STRING;
1581 }
1582 else if (rs6000_select[1].string != NULL)
1583 {
1584 /* For the powerpc-eabispe configuration, we set all these by
1585 default, so let's unset them if we manually set another
1586 CPU that is not the E500. */
78f5898b 1587 if (!rs6000_explicit_options.abi)
5da702b1 1588 rs6000_spe_abi = 0;
78f5898b 1589 if (!rs6000_explicit_options.spe)
5da702b1 1590 rs6000_spe = 0;
78f5898b 1591 if (!rs6000_explicit_options.float_gprs)
5da702b1 1592 rs6000_float_gprs = 0;
78f5898b 1593 if (!rs6000_explicit_options.isel)
5da702b1
AH
1594 rs6000_isel = 0;
1595 }
b5044283 1596
eca0d5e8
JM
1597 /* Detect invalid option combinations with E500. */
1598 CHECK_E500_OPTIONS;
1599
ec507f2d 1600 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1601 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1602 && rs6000_cpu != PROCESSOR_POWER6
1603 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1604 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1605 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1606 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1607 || rs6000_cpu == PROCESSOR_POWER5
1608 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1609
ec507f2d
DE
1610 rs6000_sched_restricted_insns_priority
1611 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1612
569fa502 1613 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1614 rs6000_sched_costly_dep
1615 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1616
569fa502
DN
1617 if (rs6000_sched_costly_dep_str)
1618 {
f676971a 1619 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1620 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1621 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1622 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1623 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1624 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1625 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1626 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1627 else
c4ad648e 1628 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1629 }
1630
1631 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1632 rs6000_sched_insert_nops
1633 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1634
cbe26ab8
DN
1635 if (rs6000_sched_insert_nops_str)
1636 {
1637 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1638 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1639 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1640 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1641 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1642 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1643 else
c4ad648e 1644 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1645 }
1646
c81bebd7 1647#ifdef TARGET_REGNAMES
a4f6c312
SS
1648 /* If the user desires alternate register names, copy in the
1649 alternate names now. */
c81bebd7 1650 if (TARGET_REGNAMES)
4e135bdd 1651 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1652#endif
1653
df01da37 1654 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1655 If -maix-struct-return or -msvr4-struct-return was explicitly
1656 used, don't override with the ABI default. */
df01da37
DE
1657 if (!rs6000_explicit_options.aix_struct_ret)
1658 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1659
602ea4d3 1660 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1661 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1662
f676971a 1663 if (TARGET_TOC)
9ebbca7d 1664 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1665
301d03af
RS
1666 /* We can only guarantee the availability of DI pseudo-ops when
1667 assembling for 64-bit targets. */
ae6c1efd 1668 if (!TARGET_64BIT)
301d03af
RS
1669 {
1670 targetm.asm_out.aligned_op.di = NULL;
1671 targetm.asm_out.unaligned_op.di = NULL;
1672 }
1673
1494c534
DE
1674 /* Set branch target alignment, if not optimizing for size. */
1675 if (!optimize_size)
1676 {
d296e02e
AP
1677 /* Cell wants to be aligned 8byte for dual issue. */
1678 if (rs6000_cpu == PROCESSOR_CELL)
1679 {
1680 if (align_functions <= 0)
1681 align_functions = 8;
1682 if (align_jumps <= 0)
1683 align_jumps = 8;
1684 if (align_loops <= 0)
1685 align_loops = 8;
1686 }
44cd321e 1687 if (rs6000_align_branch_targets)
1494c534
DE
1688 {
1689 if (align_functions <= 0)
1690 align_functions = 16;
1691 if (align_jumps <= 0)
1692 align_jumps = 16;
1693 if (align_loops <= 0)
1694 align_loops = 16;
1695 }
1696 if (align_jumps_max_skip <= 0)
1697 align_jumps_max_skip = 15;
1698 if (align_loops_max_skip <= 0)
1699 align_loops_max_skip = 15;
1700 }
2792d578 1701
71f123ca
FS
1702 /* Arrange to save and restore machine status around nested functions. */
1703 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1704
1705 /* We should always be splitting complex arguments, but we can't break
1706 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1707 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1708 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1709
1710 /* Initialize rs6000_cost with the appropriate target costs. */
1711 if (optimize_size)
1712 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1713 else
1714 switch (rs6000_cpu)
1715 {
1716 case PROCESSOR_RIOS1:
1717 rs6000_cost = &rios1_cost;
1718 break;
1719
1720 case PROCESSOR_RIOS2:
1721 rs6000_cost = &rios2_cost;
1722 break;
1723
1724 case PROCESSOR_RS64A:
1725 rs6000_cost = &rs64a_cost;
1726 break;
1727
1728 case PROCESSOR_MPCCORE:
1729 rs6000_cost = &mpccore_cost;
1730 break;
1731
1732 case PROCESSOR_PPC403:
1733 rs6000_cost = &ppc403_cost;
1734 break;
1735
1736 case PROCESSOR_PPC405:
1737 rs6000_cost = &ppc405_cost;
1738 break;
1739
1740 case PROCESSOR_PPC440:
1741 rs6000_cost = &ppc440_cost;
1742 break;
1743
1744 case PROCESSOR_PPC601:
1745 rs6000_cost = &ppc601_cost;
1746 break;
1747
1748 case PROCESSOR_PPC603:
1749 rs6000_cost = &ppc603_cost;
1750 break;
1751
1752 case PROCESSOR_PPC604:
1753 rs6000_cost = &ppc604_cost;
1754 break;
1755
1756 case PROCESSOR_PPC604e:
1757 rs6000_cost = &ppc604e_cost;
1758 break;
1759
1760 case PROCESSOR_PPC620:
8b897cfa
RS
1761 rs6000_cost = &ppc620_cost;
1762 break;
1763
f0517163
RS
1764 case PROCESSOR_PPC630:
1765 rs6000_cost = &ppc630_cost;
1766 break;
1767
982afe02 1768 case PROCESSOR_CELL:
d296e02e
AP
1769 rs6000_cost = &ppccell_cost;
1770 break;
1771
8b897cfa
RS
1772 case PROCESSOR_PPC750:
1773 case PROCESSOR_PPC7400:
1774 rs6000_cost = &ppc750_cost;
1775 break;
1776
1777 case PROCESSOR_PPC7450:
1778 rs6000_cost = &ppc7450_cost;
1779 break;
1780
1781 case PROCESSOR_PPC8540:
1782 rs6000_cost = &ppc8540_cost;
1783 break;
1784
1785 case PROCESSOR_POWER4:
1786 case PROCESSOR_POWER5:
1787 rs6000_cost = &power4_cost;
1788 break;
1789
44cd321e
PS
1790 case PROCESSOR_POWER6:
1791 rs6000_cost = &power6_cost;
1792 break;
1793
8b897cfa 1794 default:
37409796 1795 gcc_unreachable ();
8b897cfa 1796 }
0b11da67
DE
1797
1798 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1799 set_param_value ("simultaneous-prefetches",
1800 rs6000_cost->simultaneous_prefetches);
1801 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
1802 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_lines);
1803 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1804 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5248c961 1805}
5accd822 1806
7ccf35ed
DN
1807/* Implement targetm.vectorize.builtin_mask_for_load. */
1808static tree
1809rs6000_builtin_mask_for_load (void)
1810{
1811 if (TARGET_ALTIVEC)
1812 return altivec_builtin_mask_for_load;
1813 else
1814 return 0;
1815}
1816
f57d17f1
TM
1817/* Implement targetm.vectorize.builtin_conversion. */
1818static tree
1819rs6000_builtin_conversion (enum tree_code code, tree type)
1820{
1821 if (!TARGET_ALTIVEC)
1822 return NULL_TREE;
982afe02 1823
f57d17f1
TM
1824 switch (code)
1825 {
1826 case FLOAT_EXPR:
1827 switch (TYPE_MODE (type))
1828 {
1829 case V4SImode:
982afe02 1830 return TYPE_UNSIGNED (type) ?
f57d17f1
TM
1831 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX] :
1832 rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
1833 default:
1834 return NULL_TREE;
1835 }
1836 default:
1837 return NULL_TREE;
1838 }
1839}
1840
89d67cca
DN
1841/* Implement targetm.vectorize.builtin_mul_widen_even. */
1842static tree
1843rs6000_builtin_mul_widen_even (tree type)
1844{
1845 if (!TARGET_ALTIVEC)
1846 return NULL_TREE;
1847
1848 switch (TYPE_MODE (type))
1849 {
1850 case V8HImode:
982afe02 1851 return TYPE_UNSIGNED (type) ?
89d67cca
DN
1852 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH] :
1853 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
1854
1855 case V16QImode:
1856 return TYPE_UNSIGNED (type) ?
1857 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB] :
1858 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
1859 default:
1860 return NULL_TREE;
1861 }
1862}
1863
1864/* Implement targetm.vectorize.builtin_mul_widen_odd. */
1865static tree
1866rs6000_builtin_mul_widen_odd (tree type)
1867{
1868 if (!TARGET_ALTIVEC)
1869 return NULL_TREE;
1870
1871 switch (TYPE_MODE (type))
1872 {
1873 case V8HImode:
1874 return TYPE_UNSIGNED (type) ?
1875 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH] :
1876 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
1877
1878 case V16QImode:
1879 return TYPE_UNSIGNED (type) ?
1880 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB] :
1881 rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
1882 default:
1883 return NULL_TREE;
1884 }
1885}
1886
5b900a4c
DN
1887
1888/* Return true iff, data reference of TYPE can reach vector alignment (16)
1889 after applying N number of iterations. This routine does not determine
1890 how may iterations are required to reach desired alignment. */
1891
1892static bool
1893rs6000_vector_alignment_reachable (tree type ATTRIBUTE_UNUSED, bool is_packed)
1894{
1895 if (is_packed)
1896 return false;
1897
1898 if (TARGET_32BIT)
1899 {
1900 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
1901 return true;
1902
1903 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
1904 return true;
1905
1906 return false;
1907 }
1908 else
1909 {
1910 if (TARGET_MACHO)
1911 return false;
1912
1913 /* Assuming that all other types are naturally aligned. CHECKME! */
1914 return true;
1915 }
1916}
1917
5da702b1
AH
1918/* Handle generic options of the form -mfoo=yes/no.
1919 NAME is the option name.
1920 VALUE is the option value.
1921 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1922 whether the option value is 'yes' or 'no' respectively. */
993f19a8 1923static void
5da702b1 1924rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 1925{
5da702b1 1926 if (value == 0)
993f19a8 1927 return;
5da702b1
AH
1928 else if (!strcmp (value, "yes"))
1929 *flag = 1;
1930 else if (!strcmp (value, "no"))
1931 *flag = 0;
08b57fb3 1932 else
5da702b1 1933 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
1934}
1935
c4501e62
JJ
1936/* Validate and record the size specified with the -mtls-size option. */
1937
1938static void
863d938c 1939rs6000_parse_tls_size_option (void)
c4501e62
JJ
1940{
1941 if (rs6000_tls_size_string == 0)
1942 return;
1943 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1944 rs6000_tls_size = 16;
1945 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1946 rs6000_tls_size = 32;
1947 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1948 rs6000_tls_size = 64;
1949 else
9e637a26 1950 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
1951}
1952
5accd822 1953void
a2369ed3 1954optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 1955{
2e3f0db6
DJ
1956 if (DEFAULT_ABI == ABI_DARWIN)
1957 /* The Darwin libraries never set errno, so we might as well
1958 avoid calling them when that's the only reason we would. */
1959 flag_errno_math = 0;
59d6560b
DE
1960
1961 /* Double growth factor to counter reduced min jump length. */
1962 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
1963
1964 /* Enable section anchors by default.
1965 Skip section anchors for Objective C and Objective C++
1966 until front-ends fixed. */
23f99493 1967 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
194c524a 1968 flag_section_anchors = 1;
5accd822 1969}
78f5898b
AH
1970
1971/* Implement TARGET_HANDLE_OPTION. */
1972
1973static bool
1974rs6000_handle_option (size_t code, const char *arg, int value)
1975{
1976 switch (code)
1977 {
1978 case OPT_mno_power:
1979 target_flags &= ~(MASK_POWER | MASK_POWER2
1980 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
1981 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1982 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
1983 break;
1984 case OPT_mno_powerpc:
1985 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1986 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
1987 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1988 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
1989 break;
1990 case OPT_mfull_toc:
d2894ab5
DE
1991 target_flags &= ~MASK_MINIMAL_TOC;
1992 TARGET_NO_FP_IN_TOC = 0;
1993 TARGET_NO_SUM_IN_TOC = 0;
1994 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
1995#ifdef TARGET_USES_SYSV4_OPT
1996 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1997 just the same as -mminimal-toc. */
1998 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 1999 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2000#endif
2001 break;
2002
2003#ifdef TARGET_USES_SYSV4_OPT
2004 case OPT_mtoc:
2005 /* Make -mtoc behave like -mminimal-toc. */
2006 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2007 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2008 break;
2009#endif
2010
2011#ifdef TARGET_USES_AIX64_OPT
2012 case OPT_maix64:
2013#else
2014 case OPT_m64:
2015#endif
2c9c9afd
AM
2016 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2017 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2018 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2019 break;
2020
2021#ifdef TARGET_USES_AIX64_OPT
2022 case OPT_maix32:
2023#else
2024 case OPT_m32:
2025#endif
2026 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2027 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2028 break;
2029
2030 case OPT_minsert_sched_nops_:
2031 rs6000_sched_insert_nops_str = arg;
2032 break;
2033
2034 case OPT_mminimal_toc:
2035 if (value == 1)
2036 {
d2894ab5
DE
2037 TARGET_NO_FP_IN_TOC = 0;
2038 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2039 }
2040 break;
2041
2042 case OPT_mpower:
2043 if (value == 1)
c2dba4ab
AH
2044 {
2045 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2046 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2047 }
78f5898b
AH
2048 break;
2049
2050 case OPT_mpower2:
2051 if (value == 1)
c2dba4ab
AH
2052 {
2053 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2054 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2055 }
78f5898b
AH
2056 break;
2057
2058 case OPT_mpowerpc_gpopt:
2059 case OPT_mpowerpc_gfxopt:
2060 if (value == 1)
c2dba4ab
AH
2061 {
2062 target_flags |= MASK_POWERPC;
2063 target_flags_explicit |= MASK_POWERPC;
2064 }
78f5898b
AH
2065 break;
2066
df01da37
DE
2067 case OPT_maix_struct_return:
2068 case OPT_msvr4_struct_return:
2069 rs6000_explicit_options.aix_struct_ret = true;
2070 break;
2071
78f5898b
AH
2072 case OPT_mvrsave_:
2073 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2074 break;
78f5898b
AH
2075
2076 case OPT_misel_:
2077 rs6000_explicit_options.isel = true;
2078 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2079 break;
2080
2081 case OPT_mspe_:
2082 rs6000_explicit_options.spe = true;
2083 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2084 break;
2085
2086 case OPT_mdebug_:
2087 rs6000_debug_name = arg;
2088 break;
2089
2090#ifdef TARGET_USES_SYSV4_OPT
2091 case OPT_mcall_:
2092 rs6000_abi_name = arg;
2093 break;
2094
2095 case OPT_msdata_:
2096 rs6000_sdata_name = arg;
2097 break;
2098
2099 case OPT_mtls_size_:
2100 rs6000_tls_size_string = arg;
2101 break;
2102
2103 case OPT_mrelocatable:
2104 if (value == 1)
c2dba4ab 2105 {
e0bf274f
AM
2106 target_flags |= MASK_MINIMAL_TOC;
2107 target_flags_explicit |= MASK_MINIMAL_TOC;
2108 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2109 }
78f5898b
AH
2110 break;
2111
2112 case OPT_mrelocatable_lib:
2113 if (value == 1)
c2dba4ab 2114 {
e0bf274f
AM
2115 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2116 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2117 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2118 }
78f5898b 2119 else
c2dba4ab
AH
2120 {
2121 target_flags &= ~MASK_RELOCATABLE;
2122 target_flags_explicit |= MASK_RELOCATABLE;
2123 }
78f5898b
AH
2124 break;
2125#endif
2126
2127 case OPT_mabi_:
78f5898b
AH
2128 if (!strcmp (arg, "altivec"))
2129 {
d3603e8c 2130 rs6000_explicit_options.abi = true;
78f5898b
AH
2131 rs6000_altivec_abi = 1;
2132 rs6000_spe_abi = 0;
2133 }
2134 else if (! strcmp (arg, "no-altivec"))
d3603e8c
AM
2135 {
2136 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2137 the default for rs6000_spe_abi to be chosen later. */
2138 rs6000_altivec_abi = 0;
2139 }
78f5898b
AH
2140 else if (! strcmp (arg, "spe"))
2141 {
d3603e8c 2142 rs6000_explicit_options.abi = true;
78f5898b
AH
2143 rs6000_spe_abi = 1;
2144 rs6000_altivec_abi = 0;
2145 if (!TARGET_SPE_ABI)
2146 error ("not configured for ABI: '%s'", arg);
2147 }
2148 else if (! strcmp (arg, "no-spe"))
d3603e8c
AM
2149 {
2150 rs6000_explicit_options.abi = true;
2151 rs6000_spe_abi = 0;
2152 }
78f5898b
AH
2153
2154 /* These are here for testing during development only, do not
2155 document in the manual please. */
2156 else if (! strcmp (arg, "d64"))
2157 {
2158 rs6000_darwin64_abi = 1;
2159 warning (0, "Using darwin64 ABI");
2160 }
2161 else if (! strcmp (arg, "d32"))
2162 {
2163 rs6000_darwin64_abi = 0;
2164 warning (0, "Using old darwin ABI");
2165 }
2166
602ea4d3
JJ
2167 else if (! strcmp (arg, "ibmlongdouble"))
2168 {
d3603e8c 2169 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2170 rs6000_ieeequad = 0;
2171 warning (0, "Using IBM extended precision long double");
2172 }
2173 else if (! strcmp (arg, "ieeelongdouble"))
2174 {
d3603e8c 2175 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2176 rs6000_ieeequad = 1;
2177 warning (0, "Using IEEE extended precision long double");
2178 }
2179
78f5898b
AH
2180 else
2181 {
2182 error ("unknown ABI specified: '%s'", arg);
2183 return false;
2184 }
2185 break;
2186
2187 case OPT_mcpu_:
2188 rs6000_select[1].string = arg;
2189 break;
2190
2191 case OPT_mtune_:
2192 rs6000_select[2].string = arg;
2193 break;
2194
2195 case OPT_mtraceback_:
2196 rs6000_traceback_name = arg;
2197 break;
2198
2199 case OPT_mfloat_gprs_:
2200 rs6000_explicit_options.float_gprs = true;
2201 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2202 rs6000_float_gprs = 1;
2203 else if (! strcmp (arg, "double"))
2204 rs6000_float_gprs = 2;
2205 else if (! strcmp (arg, "no"))
2206 rs6000_float_gprs = 0;
2207 else
2208 {
2209 error ("invalid option for -mfloat-gprs: '%s'", arg);
2210 return false;
2211 }
2212 break;
2213
2214 case OPT_mlong_double_:
2215 rs6000_explicit_options.long_double = true;
2216 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2217 if (value != 64 && value != 128)
2218 {
2219 error ("Unknown switch -mlong-double-%s", arg);
2220 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2221 return false;
2222 }
2223 else
2224 rs6000_long_double_type_size = value;
2225 break;
2226
2227 case OPT_msched_costly_dep_:
2228 rs6000_sched_costly_dep_str = arg;
2229 break;
2230
2231 case OPT_malign_:
2232 rs6000_explicit_options.alignment = true;
2233 if (! strcmp (arg, "power"))
2234 {
2235 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2236 some C library functions, so warn about it. The flag may be
2237 useful for performance studies from time to time though, so
2238 don't disable it entirely. */
2239 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2240 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2241 " it is incompatible with the installed C and C++ libraries");
2242 rs6000_alignment_flags = MASK_ALIGN_POWER;
2243 }
2244 else if (! strcmp (arg, "natural"))
2245 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2246 else
2247 {
2248 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2249 return false;
2250 }
2251 break;
2252 }
2253 return true;
2254}
3cfa4909
MM
2255\f
2256/* Do anything needed at the start of the asm file. */
2257
1bc7c5b6 2258static void
863d938c 2259rs6000_file_start (void)
3cfa4909 2260{
c4d38ccb 2261 size_t i;
3cfa4909 2262 char buffer[80];
d330fd93 2263 const char *start = buffer;
3cfa4909 2264 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2265 const char *default_cpu = TARGET_CPU_DEFAULT;
2266 FILE *file = asm_out_file;
2267
2268 default_file_start ();
2269
2270#ifdef TARGET_BI_ARCH
2271 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2272 default_cpu = 0;
2273#endif
3cfa4909
MM
2274
2275 if (flag_verbose_asm)
2276 {
2277 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2278 rs6000_select[0].string = default_cpu;
2279
b6a1cbae 2280 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2281 {
2282 ptr = &rs6000_select[i];
2283 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2284 {
2285 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2286 start = "";
2287 }
2288 }
2289
9c6b4ed9 2290 if (PPC405_ERRATUM77)
b0bfee6e 2291 {
9c6b4ed9 2292 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2293 start = "";
2294 }
b0bfee6e 2295
b91da81f 2296#ifdef USING_ELFOS_H
3cfa4909
MM
2297 switch (rs6000_sdata)
2298 {
2299 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2300 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2301 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2302 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2303 }
2304
2305 if (rs6000_sdata && g_switch_value)
2306 {
307b599c
MK
2307 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2308 g_switch_value);
3cfa4909
MM
2309 start = "";
2310 }
2311#endif
2312
2313 if (*start == '\0')
949ea356 2314 putc ('\n', file);
3cfa4909 2315 }
b723e82f 2316
e51917ae
JM
2317#ifdef HAVE_AS_GNU_ATTRIBUTE
2318 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
2319 fprintf (file, "\t.gnu_attribute 4, %d\n",
2320 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2321#endif
2322
b723e82f
JJ
2323 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2324 {
d6b5193b
RS
2325 switch_to_section (toc_section);
2326 switch_to_section (text_section);
b723e82f 2327 }
3cfa4909 2328}
c4e18b1c 2329
5248c961 2330\f
a0ab749a 2331/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2332
2333int
863d938c 2334direct_return (void)
9878760c 2335{
4697a36c
MM
2336 if (reload_completed)
2337 {
2338 rs6000_stack_t *info = rs6000_stack_info ();
2339
2340 if (info->first_gp_reg_save == 32
2341 && info->first_fp_reg_save == 64
00b960c7 2342 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2343 && ! info->lr_save_p
2344 && ! info->cr_save_p
00b960c7 2345 && info->vrsave_mask == 0
c81fc13e 2346 && ! info->push_p)
4697a36c
MM
2347 return 1;
2348 }
2349
2350 return 0;
9878760c
RK
2351}
2352
4e74d8ec
MM
2353/* Return the number of instructions it takes to form a constant in an
2354 integer register. */
2355
48d72335 2356int
a2369ed3 2357num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2358{
2359 /* signed constant loadable with {cal|addi} */
547b216d 2360 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2361 return 1;
2362
4e74d8ec 2363 /* constant loadable with {cau|addis} */
547b216d
DE
2364 else if ((value & 0xffff) == 0
2365 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2366 return 1;
2367
5f59ecb7 2368#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2369 else if (TARGET_POWERPC64)
4e74d8ec 2370 {
a65c591c
DE
2371 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2372 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2373
a65c591c 2374 if (high == 0 || high == -1)
4e74d8ec
MM
2375 return 2;
2376
a65c591c 2377 high >>= 1;
4e74d8ec 2378
a65c591c 2379 if (low == 0)
4e74d8ec 2380 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2381 else
2382 return (num_insns_constant_wide (high)
e396202a 2383 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2384 }
2385#endif
2386
2387 else
2388 return 2;
2389}
2390
2391int
a2369ed3 2392num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2393{
37409796 2394 HOST_WIDE_INT low, high;
bb8df8a6 2395
37409796 2396 switch (GET_CODE (op))
0d30d435 2397 {
37409796 2398 case CONST_INT:
0d30d435 2399#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2400 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2401 && mask64_operand (op, mode))
c4ad648e 2402 return 2;
0d30d435
DE
2403 else
2404#endif
2405 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2406
37409796
NS
2407 case CONST_DOUBLE:
2408 if (mode == SFmode)
2409 {
2410 long l;
2411 REAL_VALUE_TYPE rv;
bb8df8a6 2412
37409796
NS
2413 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2414 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2415 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2416 }
a260abc9 2417
37409796
NS
2418 if (mode == VOIDmode || mode == DImode)
2419 {
2420 high = CONST_DOUBLE_HIGH (op);
2421 low = CONST_DOUBLE_LOW (op);
2422 }
2423 else
2424 {
2425 long l[2];
2426 REAL_VALUE_TYPE rv;
bb8df8a6 2427
37409796 2428 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2429 if (DECIMAL_FLOAT_MODE_P (mode))
2430 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2431 else
2432 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2433 high = l[WORDS_BIG_ENDIAN == 0];
2434 low = l[WORDS_BIG_ENDIAN != 0];
2435 }
47ad8c61 2436
37409796
NS
2437 if (TARGET_32BIT)
2438 return (num_insns_constant_wide (low)
2439 + num_insns_constant_wide (high));
2440 else
2441 {
2442 if ((high == 0 && low >= 0)
2443 || (high == -1 && low < 0))
2444 return num_insns_constant_wide (low);
bb8df8a6 2445
1990cd79 2446 else if (mask64_operand (op, mode))
37409796 2447 return 2;
bb8df8a6 2448
37409796
NS
2449 else if (low == 0)
2450 return num_insns_constant_wide (high) + 1;
bb8df8a6 2451
37409796
NS
2452 else
2453 return (num_insns_constant_wide (high)
2454 + num_insns_constant_wide (low) + 1);
2455 }
bb8df8a6 2456
37409796
NS
2457 default:
2458 gcc_unreachable ();
4e74d8ec 2459 }
4e74d8ec
MM
2460}
2461
0972012c
RS
2462/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2463 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2464 corresponding element of the vector, but for V4SFmode and V2SFmode,
2465 the corresponding "float" is interpreted as an SImode integer. */
2466
2467static HOST_WIDE_INT
2468const_vector_elt_as_int (rtx op, unsigned int elt)
2469{
2470 rtx tmp = CONST_VECTOR_ELT (op, elt);
2471 if (GET_MODE (op) == V4SFmode
2472 || GET_MODE (op) == V2SFmode)
2473 tmp = gen_lowpart (SImode, tmp);
2474 return INTVAL (tmp);
2475}
452a7d36 2476
77ccdfed 2477/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2478 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2479 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2480 all items are set to the same value and contain COPIES replicas of the
2481 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2482 operand and the others are set to the value of the operand's msb. */
2483
2484static bool
2485vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2486{
66180ff3
PB
2487 enum machine_mode mode = GET_MODE (op);
2488 enum machine_mode inner = GET_MODE_INNER (mode);
2489
2490 unsigned i;
2491 unsigned nunits = GET_MODE_NUNITS (mode);
2492 unsigned bitsize = GET_MODE_BITSIZE (inner);
2493 unsigned mask = GET_MODE_MASK (inner);
2494
0972012c 2495 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2496 HOST_WIDE_INT splat_val = val;
2497 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2498
2499 /* Construct the value to be splatted, if possible. If not, return 0. */
2500 for (i = 2; i <= copies; i *= 2)
452a7d36 2501 {
66180ff3
PB
2502 HOST_WIDE_INT small_val;
2503 bitsize /= 2;
2504 small_val = splat_val >> bitsize;
2505 mask >>= bitsize;
2506 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2507 return false;
2508 splat_val = small_val;
2509 }
c4ad648e 2510
66180ff3
PB
2511 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2512 if (EASY_VECTOR_15 (splat_val))
2513 ;
2514
2515 /* Also check if we can splat, and then add the result to itself. Do so if
2516 the value is positive, of if the splat instruction is using OP's mode;
2517 for splat_val < 0, the splat and the add should use the same mode. */
2518 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2519 && (splat_val >= 0 || (step == 1 && copies == 1)))
2520 ;
2521
2522 else
2523 return false;
2524
2525 /* Check if VAL is present in every STEP-th element, and the
2526 other elements are filled with its most significant bit. */
2527 for (i = 0; i < nunits - 1; ++i)
2528 {
2529 HOST_WIDE_INT desired_val;
2530 if (((i + 1) & (step - 1)) == 0)
2531 desired_val = val;
2532 else
2533 desired_val = msb_val;
2534
0972012c 2535 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2536 return false;
452a7d36 2537 }
66180ff3
PB
2538
2539 return true;
452a7d36
HP
2540}
2541
69ef87e2 2542
77ccdfed 2543/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2544 with a vspltisb, vspltish or vspltisw. */
2545
2546bool
2547easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2548{
66180ff3 2549 unsigned step, copies;
d744e06e 2550
66180ff3
PB
2551 if (mode == VOIDmode)
2552 mode = GET_MODE (op);
2553 else if (mode != GET_MODE (op))
2554 return false;
d744e06e 2555
66180ff3
PB
2556 /* Start with a vspltisw. */
2557 step = GET_MODE_NUNITS (mode) / 4;
2558 copies = 1;
2559
2560 if (vspltis_constant (op, step, copies))
2561 return true;
2562
2563 /* Then try with a vspltish. */
2564 if (step == 1)
2565 copies <<= 1;
2566 else
2567 step >>= 1;
2568
2569 if (vspltis_constant (op, step, copies))
2570 return true;
2571
2572 /* And finally a vspltisb. */
2573 if (step == 1)
2574 copies <<= 1;
2575 else
2576 step >>= 1;
2577
2578 if (vspltis_constant (op, step, copies))
2579 return true;
2580
2581 return false;
d744e06e
AH
2582}
2583
66180ff3
PB
2584/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2585 result is OP. Abort if it is not possible. */
d744e06e 2586
f676971a 2587rtx
66180ff3 2588gen_easy_altivec_constant (rtx op)
452a7d36 2589{
66180ff3
PB
2590 enum machine_mode mode = GET_MODE (op);
2591 int nunits = GET_MODE_NUNITS (mode);
2592 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2593 unsigned step = nunits / 4;
2594 unsigned copies = 1;
2595
2596 /* Start with a vspltisw. */
2597 if (vspltis_constant (op, step, copies))
2598 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2599
2600 /* Then try with a vspltish. */
2601 if (step == 1)
2602 copies <<= 1;
2603 else
2604 step >>= 1;
2605
2606 if (vspltis_constant (op, step, copies))
2607 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2608
2609 /* And finally a vspltisb. */
2610 if (step == 1)
2611 copies <<= 1;
2612 else
2613 step >>= 1;
2614
2615 if (vspltis_constant (op, step, copies))
2616 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2617
2618 gcc_unreachable ();
d744e06e
AH
2619}
2620
2621const char *
a2369ed3 2622output_vec_const_move (rtx *operands)
d744e06e
AH
2623{
2624 int cst, cst2;
2625 enum machine_mode mode;
2626 rtx dest, vec;
2627
2628 dest = operands[0];
2629 vec = operands[1];
d744e06e 2630 mode = GET_MODE (dest);
69ef87e2 2631
d744e06e
AH
2632 if (TARGET_ALTIVEC)
2633 {
66180ff3 2634 rtx splat_vec;
d744e06e
AH
2635 if (zero_constant (vec, mode))
2636 return "vxor %0,%0,%0";
37409796 2637
66180ff3
PB
2638 splat_vec = gen_easy_altivec_constant (vec);
2639 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2640 operands[1] = XEXP (splat_vec, 0);
2641 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2642 return "#";
bb8df8a6 2643
66180ff3 2644 switch (GET_MODE (splat_vec))
98ef3137 2645 {
37409796 2646 case V4SImode:
66180ff3 2647 return "vspltisw %0,%1";
c4ad648e 2648
37409796 2649 case V8HImode:
66180ff3 2650 return "vspltish %0,%1";
c4ad648e 2651
37409796 2652 case V16QImode:
66180ff3 2653 return "vspltisb %0,%1";
bb8df8a6 2654
37409796
NS
2655 default:
2656 gcc_unreachable ();
98ef3137 2657 }
69ef87e2
AH
2658 }
2659
37409796 2660 gcc_assert (TARGET_SPE);
bb8df8a6 2661
37409796
NS
2662 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2663 pattern of V1DI, V4HI, and V2SF.
2664
2665 FIXME: We should probably return # and add post reload
2666 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2667 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2668 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2669 operands[1] = CONST_VECTOR_ELT (vec, 0);
2670 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2671 if (cst == cst2)
2672 return "li %0,%1\n\tevmergelo %0,%0,%0";
2673 else
2674 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2675}
2676
7a4eca66
DE
2677/* Initialize vector TARGET to VALS. */
2678
2679void
2680rs6000_expand_vector_init (rtx target, rtx vals)
2681{
2682 enum machine_mode mode = GET_MODE (target);
2683 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2684 int n_elts = GET_MODE_NUNITS (mode);
2685 int n_var = 0, one_var = -1;
2686 bool all_same = true, all_const_zero = true;
2687 rtx x, mem;
2688 int i;
2689
2690 for (i = 0; i < n_elts; ++i)
2691 {
2692 x = XVECEXP (vals, 0, i);
2693 if (!CONSTANT_P (x))
2694 ++n_var, one_var = i;
2695 else if (x != CONST0_RTX (inner_mode))
2696 all_const_zero = false;
2697
2698 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2699 all_same = false;
2700 }
2701
2702 if (n_var == 0)
2703 {
2704 if (mode != V4SFmode && all_const_zero)
2705 {
2706 /* Zero register. */
2707 emit_insn (gen_rtx_SET (VOIDmode, target,
2708 gen_rtx_XOR (mode, target, target)));
2709 return;
2710 }
66180ff3 2711 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
7a4eca66
DE
2712 {
2713 /* Splat immediate. */
66180ff3 2714 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
7a4eca66
DE
2715 return;
2716 }
2717 else if (all_same)
2718 ; /* Splat vector element. */
2719 else
2720 {
2721 /* Load from constant pool. */
2722 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2723 return;
2724 }
2725 }
2726
2727 /* Store value to stack temp. Load vector element. Splat. */
2728 if (all_same)
2729 {
2730 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2731 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2732 XVECEXP (vals, 0, 0));
2733 x = gen_rtx_UNSPEC (VOIDmode,
2734 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2735 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2736 gen_rtvec (2,
2737 gen_rtx_SET (VOIDmode,
2738 target, mem),
2739 x)));
2740 x = gen_rtx_VEC_SELECT (inner_mode, target,
2741 gen_rtx_PARALLEL (VOIDmode,
2742 gen_rtvec (1, const0_rtx)));
2743 emit_insn (gen_rtx_SET (VOIDmode, target,
2744 gen_rtx_VEC_DUPLICATE (mode, x)));
2745 return;
2746 }
2747
2748 /* One field is non-constant. Load constant then overwrite
2749 varying field. */
2750 if (n_var == 1)
2751 {
2752 rtx copy = copy_rtx (vals);
2753
57b51d4d 2754 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
2755 varying element. */
2756 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2757 rs6000_expand_vector_init (target, copy);
2758
2759 /* Insert variable. */
2760 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2761 return;
2762 }
2763
2764 /* Construct the vector in memory one field at a time
2765 and load the whole vector. */
2766 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2767 for (i = 0; i < n_elts; i++)
2768 emit_move_insn (adjust_address_nv (mem, inner_mode,
2769 i * GET_MODE_SIZE (inner_mode)),
2770 XVECEXP (vals, 0, i));
2771 emit_move_insn (target, mem);
2772}
2773
2774/* Set field ELT of TARGET to VAL. */
2775
2776void
2777rs6000_expand_vector_set (rtx target, rtx val, int elt)
2778{
2779 enum machine_mode mode = GET_MODE (target);
2780 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2781 rtx reg = gen_reg_rtx (mode);
2782 rtx mask, mem, x;
2783 int width = GET_MODE_SIZE (inner_mode);
2784 int i;
2785
2786 /* Load single variable value. */
2787 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2788 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2789 x = gen_rtx_UNSPEC (VOIDmode,
2790 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2791 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2792 gen_rtvec (2,
2793 gen_rtx_SET (VOIDmode,
2794 reg, mem),
2795 x)));
2796
2797 /* Linear sequence. */
2798 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2799 for (i = 0; i < 16; ++i)
2800 XVECEXP (mask, 0, i) = GEN_INT (i);
2801
2802 /* Set permute mask to insert element into target. */
2803 for (i = 0; i < width; ++i)
2804 XVECEXP (mask, 0, elt*width + i)
2805 = GEN_INT (i + 0x10);
2806 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2807 x = gen_rtx_UNSPEC (mode,
2808 gen_rtvec (3, target, reg,
2809 force_reg (V16QImode, x)),
2810 UNSPEC_VPERM);
2811 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2812}
2813
2814/* Extract field ELT from VEC into TARGET. */
2815
2816void
2817rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2818{
2819 enum machine_mode mode = GET_MODE (vec);
2820 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2821 rtx mem, x;
2822
2823 /* Allocate mode-sized buffer. */
2824 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2825
2826 /* Add offset to field within buffer matching vector element. */
2827 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2828
2829 /* Store single field into mode-sized buffer. */
2830 x = gen_rtx_UNSPEC (VOIDmode,
2831 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2832 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2833 gen_rtvec (2,
2834 gen_rtx_SET (VOIDmode,
2835 mem, vec),
2836 x)));
2837 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2838}
2839
0ba1b2ff
AM
2840/* Generates shifts and masks for a pair of rldicl or rldicr insns to
2841 implement ANDing by the mask IN. */
2842void
a2369ed3 2843build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
2844{
2845#if HOST_BITS_PER_WIDE_INT >= 64
2846 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2847 int shift;
2848
37409796 2849 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
2850
2851 c = INTVAL (in);
2852 if (c & 1)
2853 {
2854 /* Assume c initially something like 0x00fff000000fffff. The idea
2855 is to rotate the word so that the middle ^^^^^^ group of zeros
2856 is at the MS end and can be cleared with an rldicl mask. We then
2857 rotate back and clear off the MS ^^ group of zeros with a
2858 second rldicl. */
2859 c = ~c; /* c == 0xff000ffffff00000 */
2860 lsb = c & -c; /* lsb == 0x0000000000100000 */
2861 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2862 c = ~c; /* c == 0x00fff000000fffff */
2863 c &= -lsb; /* c == 0x00fff00000000000 */
2864 lsb = c & -c; /* lsb == 0x0000100000000000 */
2865 c = ~c; /* c == 0xff000fffffffffff */
2866 c &= -lsb; /* c == 0xff00000000000000 */
2867 shift = 0;
2868 while ((lsb >>= 1) != 0)
2869 shift++; /* shift == 44 on exit from loop */
2870 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2871 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2872 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
2873 }
2874 else
0ba1b2ff
AM
2875 {
2876 /* Assume c initially something like 0xff000f0000000000. The idea
2877 is to rotate the word so that the ^^^ middle group of zeros
2878 is at the LS end and can be cleared with an rldicr mask. We then
2879 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2880 a second rldicr. */
2881 lsb = c & -c; /* lsb == 0x0000010000000000 */
2882 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2883 c = ~c; /* c == 0x00fff0ffffffffff */
2884 c &= -lsb; /* c == 0x00fff00000000000 */
2885 lsb = c & -c; /* lsb == 0x0000100000000000 */
2886 c = ~c; /* c == 0xff000fffffffffff */
2887 c &= -lsb; /* c == 0xff00000000000000 */
2888 shift = 0;
2889 while ((lsb >>= 1) != 0)
2890 shift++; /* shift == 44 on exit from loop */
2891 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2892 m1 >>= shift; /* m1 == 0x0000000000000fff */
2893 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2894 }
2895
2896 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2897 masks will be all 1's. We are guaranteed more than one transition. */
2898 out[0] = GEN_INT (64 - shift);
2899 out[1] = GEN_INT (m1);
2900 out[2] = GEN_INT (shift);
2901 out[3] = GEN_INT (m2);
2902#else
045572c7
GK
2903 (void)in;
2904 (void)out;
37409796 2905 gcc_unreachable ();
0ba1b2ff 2906#endif
a260abc9
DE
2907}
2908
54b695e7 2909/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
2910
2911bool
54b695e7
AH
2912invalid_e500_subreg (rtx op, enum machine_mode mode)
2913{
61c76239
JM
2914 if (TARGET_E500_DOUBLE)
2915 {
17caeff2
JM
2916 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
2917 subreg:TI and reg:TF. */
61c76239 2918 if (GET_CODE (op) == SUBREG
17caeff2 2919 && (mode == SImode || mode == DImode || mode == TImode)
61c76239 2920 && REG_P (SUBREG_REG (op))
17caeff2
JM
2921 && (GET_MODE (SUBREG_REG (op)) == DFmode
2922 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
2923 return true;
2924
17caeff2
JM
2925 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
2926 reg:TI. */
61c76239 2927 if (GET_CODE (op) == SUBREG
17caeff2 2928 && (mode == DFmode || mode == TFmode)
61c76239 2929 && REG_P (SUBREG_REG (op))
17caeff2
JM
2930 && (GET_MODE (SUBREG_REG (op)) == DImode
2931 || GET_MODE (SUBREG_REG (op)) == TImode))
61c76239
JM
2932 return true;
2933 }
54b695e7 2934
61c76239
JM
2935 if (TARGET_SPE
2936 && GET_CODE (op) == SUBREG
2937 && mode == SImode
54b695e7 2938 && REG_P (SUBREG_REG (op))
14502dad 2939 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
2940 return true;
2941
2942 return false;
2943}
2944
58182de3 2945/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
2946 field is an FP double while the FP fields remain word aligned. */
2947
19d66194 2948unsigned int
fa5b0972
AM
2949rs6000_special_round_type_align (tree type, unsigned int computed,
2950 unsigned int specified)
95727fb8 2951{
fa5b0972 2952 unsigned int align = MAX (computed, specified);
95727fb8 2953 tree field = TYPE_FIELDS (type);
95727fb8 2954
bb8df8a6 2955 /* Skip all non field decls */
85962ac8 2956 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
2957 field = TREE_CHAIN (field);
2958
fa5b0972
AM
2959 if (field != NULL && field != type)
2960 {
2961 type = TREE_TYPE (field);
2962 while (TREE_CODE (type) == ARRAY_TYPE)
2963 type = TREE_TYPE (type);
2964
2965 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2966 align = MAX (align, 64);
2967 }
95727fb8 2968
fa5b0972 2969 return align;
95727fb8
AP
2970}
2971
58182de3
GK
2972/* Darwin increases record alignment to the natural alignment of
2973 the first field. */
2974
2975unsigned int
2976darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
2977 unsigned int specified)
2978{
2979 unsigned int align = MAX (computed, specified);
2980
2981 if (TYPE_PACKED (type))
2982 return align;
2983
2984 /* Find the first field, looking down into aggregates. */
2985 do {
2986 tree field = TYPE_FIELDS (type);
2987 /* Skip all non field decls */
2988 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2989 field = TREE_CHAIN (field);
2990 if (! field)
2991 break;
2992 type = TREE_TYPE (field);
2993 while (TREE_CODE (type) == ARRAY_TYPE)
2994 type = TREE_TYPE (type);
2995 } while (AGGREGATE_TYPE_P (type));
2996
2997 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
2998 align = MAX (align, TYPE_ALIGN (type));
2999
3000 return align;
3001}
3002
a4f6c312 3003/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3004
3005int
f676971a 3006small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3007 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3008{
38c1f2d7 3009#if TARGET_ELF
5f59ecb7 3010 rtx sym_ref;
7509c759 3011
d9407988 3012 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3013 return 0;
a54d04b7 3014
f607bc57 3015 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3016 return 0;
3017
88228c4b
MM
3018 if (GET_CODE (op) == SYMBOL_REF)
3019 sym_ref = op;
3020
3021 else if (GET_CODE (op) != CONST
3022 || GET_CODE (XEXP (op, 0)) != PLUS
3023 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3024 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3025 return 0;
3026
88228c4b 3027 else
dbf55e53
MM
3028 {
3029 rtx sum = XEXP (op, 0);
3030 HOST_WIDE_INT summand;
3031
3032 /* We have to be careful here, because it is the referenced address
c4ad648e 3033 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3034 summand = INTVAL (XEXP (sum, 1));
307b599c 3035 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3036 return 0;
dbf55e53
MM
3037
3038 sym_ref = XEXP (sum, 0);
3039 }
88228c4b 3040
20bfcd69 3041 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3042#else
3043 return 0;
3044#endif
7509c759 3045}
46c07df8 3046
3a1f863f 3047/* Return true if either operand is a general purpose register. */
46c07df8 3048
3a1f863f
DE
3049bool
3050gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3051{
3a1f863f
DE
3052 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3053 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3054}
3055
9ebbca7d 3056\f
4d588c14
RH
3057/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3058
f676971a
EC
3059static int
3060constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3061{
9390387d 3062 switch (GET_CODE (op))
9ebbca7d
GK
3063 {
3064 case SYMBOL_REF:
c4501e62
JJ
3065 if (RS6000_SYMBOL_REF_TLS_P (op))
3066 return 0;
3067 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3068 {
3069 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3070 {
3071 *have_sym = 1;
3072 return 1;
3073 }
3074 else
3075 return 0;
3076 }
3077 else if (! strcmp (XSTR (op, 0), toc_label_name))
3078 {
3079 *have_toc = 1;
3080 return 1;
3081 }
3082 else
3083 return 0;
9ebbca7d
GK
3084 case PLUS:
3085 case MINUS:
c1f11548
DE
3086 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3087 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3088 case CONST:
a4f6c312 3089 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3090 case CONST_INT:
a4f6c312 3091 return 1;
9ebbca7d 3092 default:
a4f6c312 3093 return 0;
9ebbca7d
GK
3094 }
3095}
3096
4d588c14 3097static bool
a2369ed3 3098constant_pool_expr_p (rtx op)
9ebbca7d
GK
3099{
3100 int have_sym = 0;
3101 int have_toc = 0;
3102 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3103}
3104
48d72335 3105bool
a2369ed3 3106toc_relative_expr_p (rtx op)
9ebbca7d 3107{
4d588c14
RH
3108 int have_sym = 0;
3109 int have_toc = 0;
3110 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3111}
3112
4d588c14 3113bool
a2369ed3 3114legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3115{
3116 return (TARGET_TOC
3117 && GET_CODE (x) == PLUS
3118 && GET_CODE (XEXP (x, 0)) == REG
3119 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3120 && constant_pool_expr_p (XEXP (x, 1)));
3121}
3122
d04b6e6e
EB
3123static bool
3124legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3125{
3126 return (DEFAULT_ABI == ABI_V4
3127 && !flag_pic && !TARGET_TOC
3128 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3129 && small_data_operand (x, mode));
3130}
3131
60cdabab
DE
3132/* SPE offset addressing is limited to 5-bits worth of double words. */
3133#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3134
76d2b81d
DJ
3135bool
3136rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3137{
3138 unsigned HOST_WIDE_INT offset, extra;
3139
3140 if (GET_CODE (x) != PLUS)
3141 return false;
3142 if (GET_CODE (XEXP (x, 0)) != REG)
3143 return false;
3144 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3145 return false;
60cdabab
DE
3146 if (legitimate_constant_pool_address_p (x))
3147 return true;
4d588c14
RH
3148 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3149 return false;
3150
3151 offset = INTVAL (XEXP (x, 1));
3152 extra = 0;
3153 switch (mode)
3154 {
3155 case V16QImode:
3156 case V8HImode:
3157 case V4SFmode:
3158 case V4SImode:
7a4eca66
DE
3159 /* AltiVec vector modes. Only reg+reg addressing is valid and
3160 constant offset zero should not occur due to canonicalization.
3161 Allow any offset when not strict before reload. */
3162 return !strict;
4d588c14
RH
3163
3164 case V4HImode:
3165 case V2SImode:
3166 case V1DImode:
3167 case V2SFmode:
3168 /* SPE vector modes. */
3169 return SPE_CONST_OFFSET_OK (offset);
3170
3171 case DFmode:
7393f7f8 3172 case DDmode:
4d4cbc0e
AH
3173 if (TARGET_E500_DOUBLE)
3174 return SPE_CONST_OFFSET_OK (offset);
3175
4d588c14 3176 case DImode:
54b695e7
AH
3177 /* On e500v2, we may have:
3178
3179 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3180
3181 Which gets addressed with evldd instructions. */
3182 if (TARGET_E500_DOUBLE)
3183 return SPE_CONST_OFFSET_OK (offset);
3184
7393f7f8 3185 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3186 extra = 4;
3187 else if (offset & 3)
3188 return false;
3189 break;
3190
3191 case TFmode:
17caeff2
JM
3192 if (TARGET_E500_DOUBLE)
3193 return (SPE_CONST_OFFSET_OK (offset)
3194 && SPE_CONST_OFFSET_OK (offset + 8));
3195
4d588c14 3196 case TImode:
7393f7f8
BE
3197 case TDmode:
3198 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3199 extra = 12;
3200 else if (offset & 3)
3201 return false;
3202 else
3203 extra = 8;
3204 break;
3205
3206 default:
3207 break;
3208 }
3209
b1917422
AM
3210 offset += 0x8000;
3211 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3212}
3213
6fb5fa3c 3214bool
a2369ed3 3215legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3216{
3217 rtx op0, op1;
3218
3219 if (GET_CODE (x) != PLUS)
3220 return false;
850e8d3d 3221
4d588c14
RH
3222 op0 = XEXP (x, 0);
3223 op1 = XEXP (x, 1);
3224
bf00cc0f 3225 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3226 replaced with proper base and index regs. */
3227 if (!strict
3228 && reload_in_progress
3229 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3230 && REG_P (op1))
3231 return true;
3232
3233 return (REG_P (op0) && REG_P (op1)
3234 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3235 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3236 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3237 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3238}
3239
48d72335 3240inline bool
a2369ed3 3241legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3242{
3243 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3244}
3245
48d72335 3246bool
4c81e946
FJ
3247macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3248{
c4ad648e 3249 if (!TARGET_MACHO || !flag_pic
9390387d 3250 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3251 return false;
3252 x = XEXP (x, 0);
4c81e946
FJ
3253
3254 if (GET_CODE (x) != LO_SUM)
3255 return false;
3256 if (GET_CODE (XEXP (x, 0)) != REG)
3257 return false;
3258 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3259 return false;
3260 x = XEXP (x, 1);
3261
3262 return CONSTANT_P (x);
3263}
3264
4d588c14 3265static bool
a2369ed3 3266legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3267{
3268 if (GET_CODE (x) != LO_SUM)
3269 return false;
3270 if (GET_CODE (XEXP (x, 0)) != REG)
3271 return false;
3272 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3273 return false;
54b695e7 3274 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3275 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3276 || mode == DImode))
f82f556d 3277 return false;
4d588c14
RH
3278 x = XEXP (x, 1);
3279
8622e235 3280 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3281 {
a29077da 3282 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3283 return false;
3284 if (TARGET_TOC)
3285 return false;
3286 if (GET_MODE_NUNITS (mode) != 1)
3287 return false;
5e5f01b9 3288 if (GET_MODE_BITSIZE (mode) > 64
3c028f65
AM
3289 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
3290 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
4d588c14
RH
3291 return false;
3292
3293 return CONSTANT_P (x);
3294 }
3295
3296 return false;
3297}
3298
3299
9ebbca7d
GK
3300/* Try machine-dependent ways of modifying an illegitimate address
3301 to be legitimate. If we find one, return the new, valid address.
3302 This is used from only one place: `memory_address' in explow.c.
3303
a4f6c312
SS
3304 OLDX is the address as it was before break_out_memory_refs was
3305 called. In some cases it is useful to look at this to decide what
3306 needs to be done.
9ebbca7d 3307
a4f6c312 3308 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3309
a4f6c312
SS
3310 It is always safe for this function to do nothing. It exists to
3311 recognize opportunities to optimize the output.
9ebbca7d
GK
3312
3313 On RS/6000, first check for the sum of a register with a constant
3314 integer that is out of range. If so, generate code to add the
3315 constant with the low-order 16 bits masked to the register and force
3316 this result into another register (this can be done with `cau').
3317 Then generate an address of REG+(CONST&0xffff), allowing for the
3318 possibility of bit 16 being a one.
3319
3320 Then check for the sum of a register and something not constant, try to
3321 load the other things into a register and return the sum. */
4d588c14 3322
9ebbca7d 3323rtx
a2369ed3
DJ
3324rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3325 enum machine_mode mode)
0ac081f6 3326{
c4501e62
JJ
3327 if (GET_CODE (x) == SYMBOL_REF)
3328 {
3329 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3330 if (model != 0)
3331 return rs6000_legitimize_tls_address (x, model);
3332 }
3333
f676971a 3334 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3335 && GET_CODE (XEXP (x, 0)) == REG
3336 && GET_CODE (XEXP (x, 1)) == CONST_INT
3337 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
f676971a 3338 {
9ebbca7d
GK
3339 HOST_WIDE_INT high_int, low_int;
3340 rtx sum;
a65c591c
DE
3341 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3342 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3343 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3344 GEN_INT (high_int)), 0);
3345 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3346 }
f676971a 3347 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3348 && GET_CODE (XEXP (x, 0)) == REG
3349 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3350 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3351 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3352 || TARGET_POWERPC64
7393f7f8
BE
3353 || (((mode != DImode && mode != DFmode && mode != DDmode)
3354 || TARGET_E500_DOUBLE)
3355 && mode != TFmode && mode != TDmode))
9ebbca7d
GK
3356 && (TARGET_POWERPC64 || mode != DImode)
3357 && mode != TImode)
3358 {
3359 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3360 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3361 }
0ac081f6
AH
3362 else if (ALTIVEC_VECTOR_MODE (mode))
3363 {
3364 rtx reg;
3365
3366 /* Make sure both operands are registers. */
3367 if (GET_CODE (x) == PLUS)
9f85ed45 3368 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3369 force_reg (Pmode, XEXP (x, 1)));
3370
3371 reg = force_reg (Pmode, x);
3372 return reg;
3373 }
4d4cbc0e 3374 else if (SPE_VECTOR_MODE (mode)
17caeff2 3375 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3376 || mode == DDmode || mode == TDmode
54b695e7 3377 || mode == DImode)))
a3170dc6 3378 {
54b695e7
AH
3379 if (mode == DImode)
3380 return NULL_RTX;
a3170dc6
AH
3381 /* We accept [reg + reg] and [reg + OFFSET]. */
3382
3383 if (GET_CODE (x) == PLUS)
c4ad648e
AM
3384 {
3385 rtx op1 = XEXP (x, 0);
3386 rtx op2 = XEXP (x, 1);
a3170dc6 3387
c4ad648e 3388 op1 = force_reg (Pmode, op1);
a3170dc6 3389
c4ad648e
AM
3390 if (GET_CODE (op2) != REG
3391 && (GET_CODE (op2) != CONST_INT
3392 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3393 op2 = force_reg (Pmode, op2);
a3170dc6 3394
c4ad648e
AM
3395 return gen_rtx_PLUS (Pmode, op1, op2);
3396 }
a3170dc6
AH
3397
3398 return force_reg (Pmode, x);
3399 }
f1384257
AM
3400 else if (TARGET_ELF
3401 && TARGET_32BIT
3402 && TARGET_NO_TOC
3403 && ! flag_pic
9ebbca7d 3404 && GET_CODE (x) != CONST_INT
f676971a 3405 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3406 && CONSTANT_P (x)
6ac7bf2c
GK
3407 && GET_MODE_NUNITS (mode) == 1
3408 && (GET_MODE_BITSIZE (mode) <= 32
a3170dc6 3409 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
9ebbca7d
GK
3410 {
3411 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3412 emit_insn (gen_elf_high (reg, x));
3413 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3414 }
ee890fe2
SS
3415 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3416 && ! flag_pic
ab82a49f
AP
3417#if TARGET_MACHO
3418 && ! MACHO_DYNAMIC_NO_PIC_P
3419#endif
ee890fe2 3420 && GET_CODE (x) != CONST_INT
f676971a 3421 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3422 && CONSTANT_P (x)
f82f556d 3423 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
f676971a 3424 && mode != DImode
ee890fe2
SS
3425 && mode != TImode)
3426 {
3427 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3428 emit_insn (gen_macho_high (reg, x));
3429 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3430 }
f676971a 3431 else if (TARGET_TOC
4d588c14 3432 && constant_pool_expr_p (x)
a9098fd0 3433 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3434 {
3435 return create_TOC_reference (x);
3436 }
3437 else
3438 return NULL_RTX;
3439}
258bfae2 3440
fdbe66f2 3441/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3442 We need to emit DTP-relative relocations. */
3443
fdbe66f2 3444static void
c973d557
JJ
3445rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3446{
3447 switch (size)
3448 {
3449 case 4:
3450 fputs ("\t.long\t", file);
3451 break;
3452 case 8:
3453 fputs (DOUBLE_INT_ASM_OP, file);
3454 break;
3455 default:
37409796 3456 gcc_unreachable ();
c973d557
JJ
3457 }
3458 output_addr_const (file, x);
3459 fputs ("@dtprel+0x8000", file);
3460}
3461
c4501e62
JJ
3462/* Construct the SYMBOL_REF for the tls_get_addr function. */
3463
3464static GTY(()) rtx rs6000_tls_symbol;
3465static rtx
863d938c 3466rs6000_tls_get_addr (void)
c4501e62
JJ
3467{
3468 if (!rs6000_tls_symbol)
3469 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3470
3471 return rs6000_tls_symbol;
3472}
3473
3474/* Construct the SYMBOL_REF for TLS GOT references. */
3475
3476static GTY(()) rtx rs6000_got_symbol;
3477static rtx
863d938c 3478rs6000_got_sym (void)
c4501e62
JJ
3479{
3480 if (!rs6000_got_symbol)
3481 {
3482 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3483 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3484 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3485 }
c4501e62
JJ
3486
3487 return rs6000_got_symbol;
3488}
3489
3490/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3491 this (thread-local) address. */
3492
3493static rtx
a2369ed3 3494rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3495{
3496 rtx dest, insn;
3497
3498 dest = gen_reg_rtx (Pmode);
3499 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3500 {
3501 rtx tlsreg;
3502
3503 if (TARGET_64BIT)
3504 {
3505 tlsreg = gen_rtx_REG (Pmode, 13);
3506 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3507 }
3508 else
3509 {
3510 tlsreg = gen_rtx_REG (Pmode, 2);
3511 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3512 }
3513 emit_insn (insn);
3514 }
3515 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3516 {
3517 rtx tlsreg, tmp;
3518
3519 tmp = gen_reg_rtx (Pmode);
3520 if (TARGET_64BIT)
3521 {
3522 tlsreg = gen_rtx_REG (Pmode, 13);
3523 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3524 }
3525 else
3526 {
3527 tlsreg = gen_rtx_REG (Pmode, 2);
3528 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3529 }
3530 emit_insn (insn);
3531 if (TARGET_64BIT)
3532 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3533 else
3534 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3535 emit_insn (insn);
3536 }
3537 else
3538 {
3539 rtx r3, got, tga, tmp1, tmp2, eqv;
3540
4fed8f8f
AM
3541 /* We currently use relocations like @got@tlsgd for tls, which
3542 means the linker will handle allocation of tls entries, placing
3543 them in the .got section. So use a pointer to the .got section,
3544 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3545 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3546 if (TARGET_64BIT)
972f427b 3547 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3548 else
3549 {
3550 if (flag_pic == 1)
3551 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3552 else
3553 {
3554 rtx gsym = rs6000_got_sym ();
3555 got = gen_reg_rtx (Pmode);
3556 if (flag_pic == 0)
3557 rs6000_emit_move (got, gsym, Pmode);
3558 else
3559 {
e65a3857 3560 rtx tmp3, mem;
c4501e62
JJ
3561 rtx first, last;
3562
c4501e62
JJ
3563 tmp1 = gen_reg_rtx (Pmode);
3564 tmp2 = gen_reg_rtx (Pmode);
3565 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3566 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3567
e65a3857
DE
3568 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3569 emit_move_insn (tmp1,
1de43f85 3570 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3571 emit_move_insn (tmp2, mem);
3572 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3573 last = emit_move_insn (got, tmp3);
bd94cb6e 3574 set_unique_reg_note (last, REG_EQUAL, gsym);
6fb5fa3c 3575 maybe_encapsulate_block (first, last, gsym);
c4501e62
JJ
3576 }
3577 }
3578 }
3579
3580 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3581 {
3582 r3 = gen_rtx_REG (Pmode, 3);
3583 if (TARGET_64BIT)
3584 insn = gen_tls_gd_64 (r3, got, addr);
3585 else
3586 insn = gen_tls_gd_32 (r3, got, addr);
3587 start_sequence ();
3588 emit_insn (insn);
3589 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3590 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3591 insn = emit_call_insn (insn);
3592 CONST_OR_PURE_CALL_P (insn) = 1;
3593 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3594 insn = get_insns ();
3595 end_sequence ();
3596 emit_libcall_block (insn, dest, r3, addr);
3597 }
3598 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3599 {
3600 r3 = gen_rtx_REG (Pmode, 3);
3601 if (TARGET_64BIT)
3602 insn = gen_tls_ld_64 (r3, got);
3603 else
3604 insn = gen_tls_ld_32 (r3, got);
3605 start_sequence ();
3606 emit_insn (insn);
3607 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3608 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3609 insn = emit_call_insn (insn);
3610 CONST_OR_PURE_CALL_P (insn) = 1;
3611 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3612 insn = get_insns ();
3613 end_sequence ();
3614 tmp1 = gen_reg_rtx (Pmode);
3615 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3616 UNSPEC_TLSLD);
3617 emit_libcall_block (insn, tmp1, r3, eqv);
3618 if (rs6000_tls_size == 16)
3619 {
3620 if (TARGET_64BIT)
3621 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3622 else
3623 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3624 }
3625 else if (rs6000_tls_size == 32)
3626 {
3627 tmp2 = gen_reg_rtx (Pmode);
3628 if (TARGET_64BIT)
3629 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3630 else
3631 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3632 emit_insn (insn);
3633 if (TARGET_64BIT)
3634 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3635 else
3636 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3637 }
3638 else
3639 {
3640 tmp2 = gen_reg_rtx (Pmode);
3641 if (TARGET_64BIT)
3642 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3643 else
3644 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3645 emit_insn (insn);
3646 insn = gen_rtx_SET (Pmode, dest,
3647 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3648 }
3649 emit_insn (insn);
3650 }
3651 else
3652 {
a7b376ee 3653 /* IE, or 64-bit offset LE. */
c4501e62
JJ
3654 tmp2 = gen_reg_rtx (Pmode);
3655 if (TARGET_64BIT)
3656 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3657 else
3658 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3659 emit_insn (insn);
3660 if (TARGET_64BIT)
3661 insn = gen_tls_tls_64 (dest, tmp2, addr);
3662 else
3663 insn = gen_tls_tls_32 (dest, tmp2, addr);
3664 emit_insn (insn);
3665 }
3666 }
3667
3668 return dest;
3669}
3670
c4501e62
JJ
3671/* Return 1 if X contains a thread-local symbol. */
3672
3673bool
a2369ed3 3674rs6000_tls_referenced_p (rtx x)
c4501e62 3675{
cd413cab
AP
3676 if (! TARGET_HAVE_TLS)
3677 return false;
3678
c4501e62
JJ
3679 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3680}
3681
3682/* Return 1 if *X is a thread-local symbol. This is the same as
3683 rs6000_tls_symbol_ref except for the type of the unused argument. */
3684
9390387d 3685static int
a2369ed3 3686rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
3687{
3688 return RS6000_SYMBOL_REF_TLS_P (*x);
3689}
3690
24ea750e
DJ
3691/* The convention appears to be to define this wherever it is used.
3692 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3693 is now used here. */
3694#ifndef REG_MODE_OK_FOR_BASE_P
3695#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3696#endif
3697
3698/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3699 replace the input X, or the original X if no replacement is called for.
3700 The output parameter *WIN is 1 if the calling macro should goto WIN,
3701 0 if it should not.
3702
3703 For RS/6000, we wish to handle large displacements off a base
3704 register by splitting the addend across an addiu/addis and the mem insn.
3705 This cuts number of extra insns needed from 3 to 1.
3706
3707 On Darwin, we use this to generate code for floating point constants.
3708 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3709 The Darwin code is inside #if TARGET_MACHO because only then is
3710 machopic_function_base_name() defined. */
3711rtx
f676971a 3712rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
3713 int opnum, int type,
3714 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 3715{
f676971a 3716 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
3717 if (GET_CODE (x) == PLUS
3718 && GET_CODE (XEXP (x, 0)) == PLUS
3719 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3720 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3721 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3722 {
3723 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3724 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3725 opnum, (enum reload_type)type);
24ea750e
DJ
3726 *win = 1;
3727 return x;
3728 }
3deb2758 3729
24ea750e
DJ
3730#if TARGET_MACHO
3731 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3732 && GET_CODE (x) == LO_SUM
3733 && GET_CODE (XEXP (x, 0)) == PLUS
3734 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3735 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3736 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3737 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3738 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3739 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3740 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3741 {
3742 /* Result of previous invocation of this function on Darwin
6f317ef3 3743 floating point constant. */
24ea750e 3744 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3745 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3746 opnum, (enum reload_type)type);
24ea750e
DJ
3747 *win = 1;
3748 return x;
3749 }
3750#endif
4937d02d
DE
3751
3752 /* Force ld/std non-word aligned offset into base register by wrapping
3753 in offset 0. */
3754 if (GET_CODE (x) == PLUS
3755 && GET_CODE (XEXP (x, 0)) == REG
3756 && REGNO (XEXP (x, 0)) < 32
3757 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3758 && GET_CODE (XEXP (x, 1)) == CONST_INT
3759 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 3760 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
3761 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3762 && TARGET_POWERPC64)
3763 {
3764 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3765 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3766 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3767 opnum, (enum reload_type) type);
3768 *win = 1;
3769 return x;
3770 }
3771
24ea750e
DJ
3772 if (GET_CODE (x) == PLUS
3773 && GET_CODE (XEXP (x, 0)) == REG
3774 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3775 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 3776 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 3777 && !SPE_VECTOR_MODE (mode)
17caeff2 3778 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
54b695e7 3779 || mode == DImode))
78c875e8 3780 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
3781 {
3782 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3783 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3784 HOST_WIDE_INT high
c4ad648e 3785 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
3786
3787 /* Check for 32-bit overflow. */
3788 if (high + low != val)
c4ad648e 3789 {
24ea750e
DJ
3790 *win = 0;
3791 return x;
3792 }
3793
3794 /* Reload the high part into a base reg; leave the low part
c4ad648e 3795 in the mem directly. */
24ea750e
DJ
3796
3797 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
3798 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3799 GEN_INT (high)),
3800 GEN_INT (low));
24ea750e
DJ
3801
3802 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
3803 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3804 opnum, (enum reload_type)type);
24ea750e
DJ
3805 *win = 1;
3806 return x;
3807 }
4937d02d 3808
24ea750e 3809 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 3810 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 3811 && !SPE_VECTOR_MODE (mode)
8308679f
DE
3812#if TARGET_MACHO
3813 && DEFAULT_ABI == ABI_DARWIN
a29077da 3814 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
3815#else
3816 && DEFAULT_ABI == ABI_V4
3817 && !flag_pic
3818#endif
7393f7f8 3819 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
7b5d92b2
AM
3820 The same goes for DImode without 64-bit gprs and DFmode
3821 without fprs. */
0d8c1c97 3822 && mode != TFmode
7393f7f8 3823 && mode != TDmode
7b5d92b2
AM
3824 && (mode != DImode || TARGET_POWERPC64)
3825 && (mode != DFmode || TARGET_POWERPC64
3826 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 3827 {
8308679f 3828#if TARGET_MACHO
a29077da
GK
3829 if (flag_pic)
3830 {
3831 rtx offset = gen_rtx_CONST (Pmode,
3832 gen_rtx_MINUS (Pmode, x,
11abc112 3833 machopic_function_base_sym ()));
a29077da
GK
3834 x = gen_rtx_LO_SUM (GET_MODE (x),
3835 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3836 gen_rtx_HIGH (Pmode, offset)), offset);
3837 }
3838 else
8308679f 3839#endif
a29077da 3840 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 3841 gen_rtx_HIGH (Pmode, x), x);
a29077da 3842
24ea750e 3843 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
3844 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3845 opnum, (enum reload_type)type);
24ea750e
DJ
3846 *win = 1;
3847 return x;
3848 }
4937d02d 3849
dec1f3aa
DE
3850 /* Reload an offset address wrapped by an AND that represents the
3851 masking of the lower bits. Strip the outer AND and let reload
3852 convert the offset address into an indirect address. */
3853 if (TARGET_ALTIVEC
3854 && ALTIVEC_VECTOR_MODE (mode)
3855 && GET_CODE (x) == AND
3856 && GET_CODE (XEXP (x, 0)) == PLUS
3857 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3858 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3859 && GET_CODE (XEXP (x, 1)) == CONST_INT
3860 && INTVAL (XEXP (x, 1)) == -16)
3861 {
3862 x = XEXP (x, 0);
3863 *win = 1;
3864 return x;
3865 }
3866
24ea750e 3867 if (TARGET_TOC
4d588c14 3868 && constant_pool_expr_p (x)
c1f11548 3869 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 3870 {
194c524a 3871 x = create_TOC_reference (x);
24ea750e
DJ
3872 *win = 1;
3873 return x;
3874 }
3875 *win = 0;
3876 return x;
f676971a 3877}
24ea750e 3878
258bfae2
FS
3879/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3880 that is a valid memory address for an instruction.
3881 The MODE argument is the machine mode for the MEM expression
3882 that wants to use this address.
3883
3884 On the RS/6000, there are four valid address: a SYMBOL_REF that
3885 refers to a constant pool entry of an address (or the sum of it
3886 plus a constant), a short (16-bit signed) constant plus a register,
3887 the sum of two registers, or a register indirect, possibly with an
5bdc5878 3888 auto-increment. For DFmode and DImode with a constant plus register,
258bfae2
FS
3889 we must ensure that both words are addressable or PowerPC64 with offset
3890 word aligned.
3891
3892 For modes spanning multiple registers (DFmode in 32-bit GPRs,
7393f7f8
BE
3893 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
3894 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
3895 during assembly output. */
3896int
a2369ed3 3897rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 3898{
850e8d3d
DN
3899 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3900 if (TARGET_ALTIVEC
3901 && ALTIVEC_VECTOR_MODE (mode)
3902 && GET_CODE (x) == AND
3903 && GET_CODE (XEXP (x, 1)) == CONST_INT
3904 && INTVAL (XEXP (x, 1)) == -16)
3905 x = XEXP (x, 0);
3906
c4501e62
JJ
3907 if (RS6000_SYMBOL_REF_TLS_P (x))
3908 return 0;
4d588c14 3909 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
3910 return 1;
3911 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 3912 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 3913 && !SPE_VECTOR_MODE (mode)
429ec7dc 3914 && mode != TFmode
7393f7f8 3915 && mode != TDmode
54b695e7 3916 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2
JM
3917 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3918 || mode == DImode))
258bfae2 3919 && TARGET_UPDATE
4d588c14 3920 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 3921 return 1;
d04b6e6e 3922 if (legitimate_small_data_p (mode, x))
258bfae2 3923 return 1;
4d588c14 3924 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
3925 return 1;
3926 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3927 if (! reg_ok_strict
3928 && GET_CODE (x) == PLUS
3929 && GET_CODE (XEXP (x, 0)) == REG
708d2456 3930 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 3931 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
3932 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3933 return 1;
76d2b81d 3934 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3935 return 1;
3936 if (mode != TImode
76d2b81d 3937 && mode != TFmode
7393f7f8 3938 && mode != TDmode
a3170dc6
AH
3939 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3940 || TARGET_POWERPC64
4d4cbc0e 3941 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
258bfae2 3942 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 3943 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 3944 return 1;
6fb5fa3c
DB
3945 if (GET_CODE (x) == PRE_MODIFY
3946 && mode != TImode
3947 && mode != TFmode
3948 && mode != TDmode
3949 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3950 || TARGET_POWERPC64
3951 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3952 && (TARGET_POWERPC64 || mode != DImode)
3953 && !ALTIVEC_VECTOR_MODE (mode)
3954 && !SPE_VECTOR_MODE (mode)
3955 /* Restrict addressing for DI because of our SUBREG hackery. */
3956 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3957 && TARGET_UPDATE
3958 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
3959 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
3960 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
3961 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3962 return 1;
4d588c14 3963 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
3964 return 1;
3965 return 0;
3966}
4d588c14
RH
3967
3968/* Go to LABEL if ADDR (a legitimate address expression)
3969 has an effect that depends on the machine mode it is used for.
3970
3971 On the RS/6000 this is true of all integral offsets (since AltiVec
3972 modes don't allow them) or is a pre-increment or decrement.
3973
3974 ??? Except that due to conceptual problems in offsettable_address_p
3975 we can't really report the problems of integral offsets. So leave
f676971a 3976 this assuming that the adjustable offset must be valid for the
4d588c14
RH
3977 sub-words of a TFmode operand, which is what we had before. */
3978
3979bool
a2369ed3 3980rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
3981{
3982 switch (GET_CODE (addr))
3983 {
3984 case PLUS:
3985 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3986 {
3987 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3988 return val + 12 + 0x8000 >= 0x10000;
3989 }
3990 break;
3991
3992 case LO_SUM:
3993 return true;
3994
6fb5fa3c
DB
3995 case PRE_INC:
3996 case PRE_DEC:
3997 case PRE_MODIFY:
3998 return TARGET_UPDATE;
4d588c14
RH
3999
4000 default:
4001 break;
4002 }
4003
4004 return false;
4005}
d8ecbcdb 4006
d04b6e6e
EB
4007/* More elaborate version of recog's offsettable_memref_p predicate
4008 that works around the ??? note of rs6000_mode_dependent_address.
4009 In particular it accepts
4010
4011 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4012
4013 in 32-bit mode, that the recog predicate rejects. */
4014
4015bool
4016rs6000_offsettable_memref_p (rtx op)
4017{
4018 if (!MEM_P (op))
4019 return false;
4020
4021 /* First mimic offsettable_memref_p. */
4022 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4023 return true;
4024
4025 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4026 the latter predicate knows nothing about the mode of the memory
4027 reference and, therefore, assumes that it is the largest supported
4028 mode (TFmode). As a consequence, legitimate offsettable memory
4029 references are rejected. rs6000_legitimate_offset_address_p contains
4030 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4031 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4032}
4033
d8ecbcdb
AH
4034/* Return number of consecutive hard regs needed starting at reg REGNO
4035 to hold something of mode MODE.
4036 This is ordinarily the length in words of a value of mode MODE
4037 but can be less for certain modes in special long registers.
4038
4039 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4040 scalar instructions. The upper 32 bits are only available to the
4041 SIMD instructions.
4042
4043 POWER and PowerPC GPRs hold 32 bits worth;
4044 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4045
4046int
4047rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4048{
4049 if (FP_REGNO_P (regno))
4050 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4051
4052 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4053 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4054
4055 if (ALTIVEC_REGNO_P (regno))
4056 return
4057 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4058
8521c414
JM
4059 /* The value returned for SCmode in the E500 double case is 2 for
4060 ABI compatibility; storing an SCmode value in a single register
4061 would require function_arg and rs6000_spe_function_arg to handle
4062 SCmode so as to pass the value correctly in a pair of
4063 registers. */
4064 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode)
4065 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4066
d8ecbcdb
AH
4067 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4068}
2aa4498c
AH
4069
4070/* Change register usage conditional on target flags. */
4071void
4072rs6000_conditional_register_usage (void)
4073{
4074 int i;
4075
4076 /* Set MQ register fixed (already call_used) if not POWER
4077 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4078 be allocated. */
4079 if (! TARGET_POWER)
4080 fixed_regs[64] = 1;
4081
7c9ac5c0 4082 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4083 if (TARGET_64BIT)
4084 fixed_regs[13] = call_used_regs[13]
4085 = call_really_used_regs[13] = 1;
4086
4087 /* Conditionally disable FPRs. */
4088 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4089 for (i = 32; i < 64; i++)
4090 fixed_regs[i] = call_used_regs[i]
c4ad648e 4091 = call_really_used_regs[i] = 1;
2aa4498c 4092
7c9ac5c0
PH
4093 /* The TOC register is not killed across calls in a way that is
4094 visible to the compiler. */
4095 if (DEFAULT_ABI == ABI_AIX)
4096 call_really_used_regs[2] = 0;
4097
2aa4498c
AH
4098 if (DEFAULT_ABI == ABI_V4
4099 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4100 && flag_pic == 2)
4101 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4102
4103 if (DEFAULT_ABI == ABI_V4
4104 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4105 && flag_pic == 1)
4106 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4107 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4108 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4109
4110 if (DEFAULT_ABI == ABI_DARWIN
4111 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4112 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4113 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4114 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4115
b4db40bf
JJ
4116 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4117 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4118 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4119
2aa4498c
AH
4120 if (TARGET_ALTIVEC)
4121 global_regs[VSCR_REGNO] = 1;
4122
4123 if (TARGET_SPE)
4124 {
4125 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4126 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4127 registers in prologues and epilogues. We no longer use r14
4128 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4129 pool for link-compatibility with older versions of GCC. Once
4130 "old" code has died out, we can return r14 to the allocation
4131 pool. */
4132 fixed_regs[14]
4133 = call_used_regs[14]
4134 = call_really_used_regs[14] = 1;
2aa4498c
AH
4135 }
4136
4137 if (! TARGET_ALTIVEC)
4138 {
4139 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4140 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4141 call_really_used_regs[VRSAVE_REGNO] = 1;
4142 }
4143
4144 if (TARGET_ALTIVEC_ABI)
4145 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4146 call_used_regs[i] = call_really_used_regs[i] = 1;
4147}
fb4d4348 4148\f
a4f6c312
SS
4149/* Try to output insns to set TARGET equal to the constant C if it can
4150 be done in less than N insns. Do all computations in MODE.
4151 Returns the place where the output has been placed if it can be
4152 done and the insns have been emitted. If it would take more than N
4153 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4154
4155rtx
f676971a 4156rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4157 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4158{
af8cb5c5 4159 rtx result, insn, set;
2bfcf297
DB
4160 HOST_WIDE_INT c0, c1;
4161
37409796 4162 switch (mode)
2bfcf297 4163 {
37409796
NS
4164 case QImode:
4165 case HImode:
2bfcf297 4166 if (dest == NULL)
c4ad648e 4167 dest = gen_reg_rtx (mode);
2bfcf297
DB
4168 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4169 return dest;
bb8df8a6 4170
37409796 4171 case SImode:
b3a13419 4172 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4173
d448860e 4174 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4175 GEN_INT (INTVAL (source)
4176 & (~ (HOST_WIDE_INT) 0xffff))));
4177 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4178 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4179 GEN_INT (INTVAL (source) & 0xffff))));
4180 result = dest;
37409796
NS
4181 break;
4182
4183 case DImode:
4184 switch (GET_CODE (source))
af8cb5c5 4185 {
37409796 4186 case CONST_INT:
af8cb5c5
DE
4187 c0 = INTVAL (source);
4188 c1 = -(c0 < 0);
37409796 4189 break;
bb8df8a6 4190
37409796 4191 case CONST_DOUBLE:
2bfcf297 4192#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4193 c0 = CONST_DOUBLE_LOW (source);
4194 c1 = -(c0 < 0);
2bfcf297 4195#else
af8cb5c5
DE
4196 c0 = CONST_DOUBLE_LOW (source);
4197 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4198#endif
37409796
NS
4199 break;
4200
4201 default:
4202 gcc_unreachable ();
af8cb5c5 4203 }
af8cb5c5
DE
4204
4205 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4206 break;
4207
4208 default:
4209 gcc_unreachable ();
2bfcf297 4210 }
2bfcf297 4211
af8cb5c5
DE
4212 insn = get_last_insn ();
4213 set = single_set (insn);
4214 if (! CONSTANT_P (SET_SRC (set)))
4215 set_unique_reg_note (insn, REG_EQUAL, source);
4216
4217 return result;
2bfcf297
DB
4218}
4219
4220/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4221 fall back to a straight forward decomposition. We do this to avoid
4222 exponential run times encountered when looking for longer sequences
4223 with rs6000_emit_set_const. */
4224static rtx
a2369ed3 4225rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4226{
4227 if (!TARGET_POWERPC64)
4228 {
4229 rtx operand1, operand2;
4230
4231 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4232 DImode);
d448860e 4233 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4234 DImode);
4235 emit_move_insn (operand1, GEN_INT (c1));
4236 emit_move_insn (operand2, GEN_INT (c2));
4237 }
4238 else
4239 {
bc06712d 4240 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4241
bc06712d 4242 ud1 = c1 & 0xffff;
f921c9c9 4243 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4244#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4245 c2 = c1 >> 32;
2bfcf297 4246#endif
bc06712d 4247 ud3 = c2 & 0xffff;
f921c9c9 4248 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4249
f676971a 4250 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4251 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4252 {
bc06712d 4253 if (ud1 & 0x8000)
b78d48dd 4254 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4255 else
4256 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4257 }
2bfcf297 4258
f676971a 4259 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4260 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4261 {
bc06712d 4262 if (ud2 & 0x8000)
f676971a 4263 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4264 - 0x80000000));
252b88f7 4265 else
bc06712d
TR
4266 emit_move_insn (dest, GEN_INT (ud2 << 16));
4267 if (ud1 != 0)
d448860e
JH
4268 emit_move_insn (copy_rtx (dest),
4269 gen_rtx_IOR (DImode, copy_rtx (dest),
4270 GEN_INT (ud1)));
252b88f7 4271 }
f676971a 4272 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4273 || (ud4 == 0 && ! (ud3 & 0x8000)))
4274 {
4275 if (ud3 & 0x8000)
f676971a 4276 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4277 - 0x80000000));
4278 else
4279 emit_move_insn (dest, GEN_INT (ud3 << 16));
4280
4281 if (ud2 != 0)
d448860e
JH
4282 emit_move_insn (copy_rtx (dest),
4283 gen_rtx_IOR (DImode, copy_rtx (dest),
4284 GEN_INT (ud2)));
4285 emit_move_insn (copy_rtx (dest),
4286 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4287 GEN_INT (16)));
bc06712d 4288 if (ud1 != 0)
d448860e
JH
4289 emit_move_insn (copy_rtx (dest),
4290 gen_rtx_IOR (DImode, copy_rtx (dest),
4291 GEN_INT (ud1)));
bc06712d 4292 }
f676971a 4293 else
bc06712d
TR
4294 {
4295 if (ud4 & 0x8000)
f676971a 4296 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4297 - 0x80000000));
4298 else
4299 emit_move_insn (dest, GEN_INT (ud4 << 16));
4300
4301 if (ud3 != 0)
d448860e
JH
4302 emit_move_insn (copy_rtx (dest),
4303 gen_rtx_IOR (DImode, copy_rtx (dest),
4304 GEN_INT (ud3)));
2bfcf297 4305
d448860e
JH
4306 emit_move_insn (copy_rtx (dest),
4307 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4308 GEN_INT (32)));
bc06712d 4309 if (ud2 != 0)
d448860e
JH
4310 emit_move_insn (copy_rtx (dest),
4311 gen_rtx_IOR (DImode, copy_rtx (dest),
4312 GEN_INT (ud2 << 16)));
bc06712d 4313 if (ud1 != 0)
d448860e
JH
4314 emit_move_insn (copy_rtx (dest),
4315 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4316 }
4317 }
2bfcf297
DB
4318 return dest;
4319}
4320
76d2b81d 4321/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4322 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4323
4324static void
4325rs6000_eliminate_indexed_memrefs (rtx operands[2])
4326{
4327 if (GET_CODE (operands[0]) == MEM
4328 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4329 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4330 && ! reload_in_progress)
4331 operands[0]
4332 = replace_equiv_address (operands[0],
4333 copy_addr_to_reg (XEXP (operands[0], 0)));
4334
4335 if (GET_CODE (operands[1]) == MEM
4336 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4337 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4338 && ! reload_in_progress)
4339 operands[1]
4340 = replace_equiv_address (operands[1],
4341 copy_addr_to_reg (XEXP (operands[1], 0)));
4342}
4343
fb4d4348
GK
4344/* Emit a move from SOURCE to DEST in mode MODE. */
4345void
a2369ed3 4346rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4347{
4348 rtx operands[2];
4349 operands[0] = dest;
4350 operands[1] = source;
f676971a 4351
fb4d4348
GK
4352 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4353 if (GET_CODE (operands[1]) == CONST_DOUBLE
4354 && ! FLOAT_MODE_P (mode)
4355 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4356 {
4357 /* FIXME. This should never happen. */
4358 /* Since it seems that it does, do the safe thing and convert
4359 to a CONST_INT. */
2496c7bd 4360 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4361 }
37409796
NS
4362 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4363 || FLOAT_MODE_P (mode)
4364 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4365 || CONST_DOUBLE_LOW (operands[1]) < 0)
4366 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4367 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4368
c9e8cb32
DD
4369 /* Check if GCC is setting up a block move that will end up using FP
4370 registers as temporaries. We must make sure this is acceptable. */
4371 if (GET_CODE (operands[0]) == MEM
4372 && GET_CODE (operands[1]) == MEM
4373 && mode == DImode
41543739
GK
4374 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4375 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4376 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4377 ? 32 : MEM_ALIGN (operands[0])))
4378 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4379 ? 32
41543739
GK
4380 : MEM_ALIGN (operands[1]))))
4381 && ! MEM_VOLATILE_P (operands [0])
4382 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4383 {
41543739
GK
4384 emit_move_insn (adjust_address (operands[0], SImode, 0),
4385 adjust_address (operands[1], SImode, 0));
d448860e
JH
4386 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4387 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4388 return;
4389 }
630d42a0 4390
b3a13419 4391 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4392 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4393 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4394
a3170dc6
AH
4395 if (mode == SFmode && ! TARGET_POWERPC
4396 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4397 && GET_CODE (operands[0]) == MEM)
fb4d4348 4398 {
ffc14f31
GK
4399 int regnum;
4400
4401 if (reload_in_progress || reload_completed)
4402 regnum = true_regnum (operands[1]);
4403 else if (GET_CODE (operands[1]) == REG)
4404 regnum = REGNO (operands[1]);
4405 else
4406 regnum = -1;
f676971a 4407
fb4d4348
GK
4408 /* If operands[1] is a register, on POWER it may have
4409 double-precision data in it, so truncate it to single
4410 precision. */
4411 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4412 {
4413 rtx newreg;
b3a13419 4414 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4415 : gen_reg_rtx (mode));
fb4d4348
GK
4416 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4417 operands[1] = newreg;
4418 }
4419 }
4420
c4501e62
JJ
4421 /* Recognize the case where operand[1] is a reference to thread-local
4422 data and load its address to a register. */
84f52ebd 4423 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4424 {
84f52ebd
RH
4425 enum tls_model model;
4426 rtx tmp = operands[1];
4427 rtx addend = NULL;
4428
4429 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4430 {
4431 addend = XEXP (XEXP (tmp, 0), 1);
4432 tmp = XEXP (XEXP (tmp, 0), 0);
4433 }
4434
4435 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4436 model = SYMBOL_REF_TLS_MODEL (tmp);
4437 gcc_assert (model != 0);
4438
4439 tmp = rs6000_legitimize_tls_address (tmp, model);
4440 if (addend)
4441 {
4442 tmp = gen_rtx_PLUS (mode, tmp, addend);
4443 tmp = force_operand (tmp, operands[0]);
4444 }
4445 operands[1] = tmp;
c4501e62
JJ
4446 }
4447
8f4e6caf
RH
4448 /* Handle the case where reload calls us with an invalid address. */
4449 if (reload_in_progress && mode == Pmode
69ef87e2 4450 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4451 || ! nonimmediate_operand (operands[0], mode)))
4452 goto emit_set;
4453
a9baceb1
GK
4454 /* 128-bit constant floating-point values on Darwin should really be
4455 loaded as two parts. */
8521c414 4456 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4457 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4458 {
4459 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4460 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4461 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4462 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4463 simplify_gen_subreg (imode, operands[1], mode, 0),
4464 imode);
4465 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4466 GET_MODE_SIZE (imode)),
4467 simplify_gen_subreg (imode, operands[1], mode,
4468 GET_MODE_SIZE (imode)),
4469 imode);
a9baceb1
GK
4470 return;
4471 }
4472
fb4d4348
GK
4473 /* FIXME: In the long term, this switch statement should go away
4474 and be replaced by a sequence of tests based on things like
4475 mode == Pmode. */
4476 switch (mode)
4477 {
4478 case HImode:
4479 case QImode:
4480 if (CONSTANT_P (operands[1])
4481 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4482 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4483 break;
4484
06f4e019 4485 case TFmode:
7393f7f8 4486 case TDmode:
76d2b81d
DJ
4487 rs6000_eliminate_indexed_memrefs (operands);
4488 /* fall through */
4489
fb4d4348 4490 case DFmode:
7393f7f8 4491 case DDmode:
fb4d4348 4492 case SFmode:
f676971a 4493 if (CONSTANT_P (operands[1])
fb4d4348 4494 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4495 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4496 break;
f676971a 4497
0ac081f6
AH
4498 case V16QImode:
4499 case V8HImode:
4500 case V4SFmode:
4501 case V4SImode:
a3170dc6
AH
4502 case V4HImode:
4503 case V2SFmode:
4504 case V2SImode:
00a892b8 4505 case V1DImode:
69ef87e2 4506 if (CONSTANT_P (operands[1])
d744e06e 4507 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
4508 operands[1] = force_const_mem (mode, operands[1]);
4509 break;
f676971a 4510
fb4d4348 4511 case SImode:
a9098fd0 4512 case DImode:
fb4d4348
GK
4513 /* Use default pattern for address of ELF small data */
4514 if (TARGET_ELF
a9098fd0 4515 && mode == Pmode
f607bc57 4516 && DEFAULT_ABI == ABI_V4
f676971a 4517 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
4518 || GET_CODE (operands[1]) == CONST)
4519 && small_data_operand (operands[1], mode))
fb4d4348
GK
4520 {
4521 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4522 return;
4523 }
4524
f607bc57 4525 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
4526 && mode == Pmode && mode == SImode
4527 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
4528 {
4529 emit_insn (gen_movsi_got (operands[0], operands[1]));
4530 return;
4531 }
4532
ee890fe2 4533 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
4534 && TARGET_NO_TOC
4535 && ! flag_pic
a9098fd0 4536 && mode == Pmode
fb4d4348
GK
4537 && CONSTANT_P (operands[1])
4538 && GET_CODE (operands[1]) != HIGH
4539 && GET_CODE (operands[1]) != CONST_INT)
4540 {
b3a13419
ILT
4541 rtx target = (!can_create_pseudo_p ()
4542 ? operands[0]
4543 : gen_reg_rtx (mode));
fb4d4348
GK
4544
4545 /* If this is a function address on -mcall-aixdesc,
4546 convert it to the address of the descriptor. */
4547 if (DEFAULT_ABI == ABI_AIX
4548 && GET_CODE (operands[1]) == SYMBOL_REF
4549 && XSTR (operands[1], 0)[0] == '.')
4550 {
4551 const char *name = XSTR (operands[1], 0);
4552 rtx new_ref;
4553 while (*name == '.')
4554 name++;
4555 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4556 CONSTANT_POOL_ADDRESS_P (new_ref)
4557 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 4558 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 4559 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 4560 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
4561 operands[1] = new_ref;
4562 }
7509c759 4563
ee890fe2
SS
4564 if (DEFAULT_ABI == ABI_DARWIN)
4565 {
ab82a49f
AP
4566#if TARGET_MACHO
4567 if (MACHO_DYNAMIC_NO_PIC_P)
4568 {
4569 /* Take care of any required data indirection. */
4570 operands[1] = rs6000_machopic_legitimize_pic_address (
4571 operands[1], mode, operands[0]);
4572 if (operands[0] != operands[1])
4573 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 4574 operands[0], operands[1]));
ab82a49f
AP
4575 return;
4576 }
4577#endif
b8a55285
AP
4578 emit_insn (gen_macho_high (target, operands[1]));
4579 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
4580 return;
4581 }
4582
fb4d4348
GK
4583 emit_insn (gen_elf_high (target, operands[1]));
4584 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4585 return;
4586 }
4587
a9098fd0
GK
4588 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4589 and we have put it in the TOC, we just need to make a TOC-relative
4590 reference to it. */
4591 if (TARGET_TOC
4592 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 4593 && constant_pool_expr_p (operands[1])
a9098fd0
GK
4594 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4595 get_pool_mode (operands[1])))
fb4d4348 4596 {
a9098fd0 4597 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 4598 }
a9098fd0
GK
4599 else if (mode == Pmode
4600 && CONSTANT_P (operands[1])
38886f37
AO
4601 && ((GET_CODE (operands[1]) != CONST_INT
4602 && ! easy_fp_constant (operands[1], mode))
4603 || (GET_CODE (operands[1]) == CONST_INT
4604 && num_insns_constant (operands[1], mode) > 2)
4605 || (GET_CODE (operands[0]) == REG
4606 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 4607 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
4608 && ! legitimate_constant_pool_address_p (operands[1])
4609 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
4610 {
4611 /* Emit a USE operation so that the constant isn't deleted if
4612 expensive optimizations are turned on because nobody
4613 references it. This should only be done for operands that
4614 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4615 This should not be done for operands that contain LABEL_REFs.
4616 For now, we just handle the obvious case. */
4617 if (GET_CODE (operands[1]) != LABEL_REF)
4618 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4619
c859cda6 4620#if TARGET_MACHO
ee890fe2 4621 /* Darwin uses a special PIC legitimizer. */
ab82a49f 4622 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 4623 {
ee890fe2
SS
4624 operands[1] =
4625 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
4626 operands[0]);
4627 if (operands[0] != operands[1])
4628 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
4629 return;
4630 }
c859cda6 4631#endif
ee890fe2 4632
fb4d4348
GK
4633 /* If we are to limit the number of things we put in the TOC and
4634 this is a symbol plus a constant we can add in one insn,
4635 just put the symbol in the TOC and add the constant. Don't do
4636 this if reload is in progress. */
4637 if (GET_CODE (operands[1]) == CONST
4638 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4639 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 4640 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
4641 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4642 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4643 && ! side_effects_p (operands[0]))
4644 {
a4f6c312
SS
4645 rtx sym =
4646 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
4647 rtx other = XEXP (XEXP (operands[1], 0), 1);
4648
a9098fd0
GK
4649 sym = force_reg (mode, sym);
4650 if (mode == SImode)
4651 emit_insn (gen_addsi3 (operands[0], sym, other));
4652 else
4653 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
4654 return;
4655 }
4656
a9098fd0 4657 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4658
f676971a 4659 if (TARGET_TOC
4d588c14 4660 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
4661 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4662 get_pool_constant (XEXP (operands[1], 0)),
4663 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 4664 {
ba4828e0 4665 operands[1]
542a8afa 4666 = gen_const_mem (mode,
c4ad648e 4667 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 4668 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 4669 }
fb4d4348
GK
4670 }
4671 break;
a9098fd0 4672
fb4d4348 4673 case TImode:
76d2b81d
DJ
4674 rs6000_eliminate_indexed_memrefs (operands);
4675
27dc0551
DE
4676 if (TARGET_POWER)
4677 {
4678 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4679 gen_rtvec (2,
4680 gen_rtx_SET (VOIDmode,
4681 operands[0], operands[1]),
4682 gen_rtx_CLOBBER (VOIDmode,
4683 gen_rtx_SCRATCH (SImode)))));
4684 return;
4685 }
fb4d4348
GK
4686 break;
4687
4688 default:
37409796 4689 gcc_unreachable ();
fb4d4348
GK
4690 }
4691
a9098fd0
GK
4692 /* Above, we may have called force_const_mem which may have returned
4693 an invalid address. If we can, fix this up; otherwise, reload will
4694 have to deal with it. */
8f4e6caf
RH
4695 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4696 operands[1] = validize_mem (operands[1]);
a9098fd0 4697
8f4e6caf 4698 emit_set:
fb4d4348
GK
4699 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4700}
4697a36c 4701\f
2858f73a
GK
4702/* Nonzero if we can use a floating-point register to pass this arg. */
4703#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 4704 (SCALAR_FLOAT_MODE_P (MODE) \
7393f7f8 4705 && (MODE) != SDmode \
2858f73a
GK
4706 && (CUM)->fregno <= FP_ARG_MAX_REG \
4707 && TARGET_HARD_FLOAT && TARGET_FPRS)
4708
4709/* Nonzero if we can use an AltiVec register to pass this arg. */
4710#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4711 (ALTIVEC_VECTOR_MODE (MODE) \
4712 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4713 && TARGET_ALTIVEC_ABI \
83953138 4714 && (NAMED))
2858f73a 4715
c6e8c921
GK
4716/* Return a nonzero value to say to return the function value in
4717 memory, just as large structures are always returned. TYPE will be
4718 the data type of the value, and FNTYPE will be the type of the
4719 function doing the returning, or @code{NULL} for libcalls.
4720
4721 The AIX ABI for the RS/6000 specifies that all structures are
4722 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4723 specifies that structures <= 8 bytes are returned in r3/r4, but a
4724 draft put them in memory, and GCC used to implement the draft
df01da37 4725 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
4726 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4727 compatibility can change DRAFT_V4_STRUCT_RET to override the
4728 default, and -m switches get the final word. See
4729 rs6000_override_options for more details.
4730
4731 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4732 long double support is enabled. These values are returned in memory.
4733
4734 int_size_in_bytes returns -1 for variable size objects, which go in
4735 memory always. The cast to unsigned makes -1 > 8. */
4736
4737static bool
4738rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4739{
594a51fe
SS
4740 /* In the darwin64 abi, try to use registers for larger structs
4741 if possible. */
0b5383eb 4742 if (rs6000_darwin64_abi
594a51fe 4743 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
4744 && int_size_in_bytes (type) > 0)
4745 {
4746 CUMULATIVE_ARGS valcum;
4747 rtx valret;
4748
4749 valcum.words = 0;
4750 valcum.fregno = FP_ARG_MIN_REG;
4751 valcum.vregno = ALTIVEC_ARG_MIN_REG;
4752 /* Do a trial code generation as if this were going to be passed
4753 as an argument; if any part goes in memory, we return NULL. */
4754 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
4755 if (valret)
4756 return false;
4757 /* Otherwise fall through to more conventional ABI rules. */
4758 }
594a51fe 4759
c6e8c921 4760 if (AGGREGATE_TYPE_P (type)
df01da37 4761 && (aix_struct_return
c6e8c921
GK
4762 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4763 return true;
b693336b 4764
bada2eb8
DE
4765 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4766 modes only exist for GCC vector types if -maltivec. */
4767 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
4768 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4769 return false;
4770
b693336b
PB
4771 /* Return synthetic vectors in memory. */
4772 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 4773 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
4774 {
4775 static bool warned_for_return_big_vectors = false;
4776 if (!warned_for_return_big_vectors)
4777 {
d4ee4d25 4778 warning (0, "GCC vector returned by reference: "
b693336b
PB
4779 "non-standard ABI extension with no compatibility guarantee");
4780 warned_for_return_big_vectors = true;
4781 }
4782 return true;
4783 }
4784
602ea4d3 4785 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 4786 return true;
ad630bef 4787
c6e8c921
GK
4788 return false;
4789}
4790
4697a36c
MM
4791/* Initialize a variable CUM of type CUMULATIVE_ARGS
4792 for a call to a function whose data type is FNTYPE.
4793 For a library call, FNTYPE is 0.
4794
4795 For incoming args we set the number of arguments in the prototype large
1c20ae99 4796 so we never return a PARALLEL. */
4697a36c
MM
4797
4798void
f676971a 4799init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
4800 rtx libname ATTRIBUTE_UNUSED, int incoming,
4801 int libcall, int n_named_args)
4697a36c
MM
4802{
4803 static CUMULATIVE_ARGS zero_cumulative;
4804
4805 *cum = zero_cumulative;
4806 cum->words = 0;
4807 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 4808 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 4809 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
4810 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4811 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 4812 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
4813 cum->stdarg = fntype
4814 && (TYPE_ARG_TYPES (fntype) != 0
4815 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4816 != void_type_node));
4697a36c 4817
0f6937fe
AM
4818 cum->nargs_prototype = 0;
4819 if (incoming || cum->prototype)
4820 cum->nargs_prototype = n_named_args;
4697a36c 4821
a5c76ee6 4822 /* Check for a longcall attribute. */
3eb4e360
AM
4823 if ((!fntype && rs6000_default_long_calls)
4824 || (fntype
4825 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4826 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
4827 cum->call_cookie |= CALL_LONG;
6a4cee5f 4828
4697a36c
MM
4829 if (TARGET_DEBUG_ARG)
4830 {
4831 fprintf (stderr, "\ninit_cumulative_args:");
4832 if (fntype)
4833 {
4834 tree ret_type = TREE_TYPE (fntype);
4835 fprintf (stderr, " ret code = %s,",
4836 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4837 }
4838
6a4cee5f
MM
4839 if (cum->call_cookie & CALL_LONG)
4840 fprintf (stderr, " longcall,");
4841
4697a36c
MM
4842 fprintf (stderr, " proto = %d, nargs = %d\n",
4843 cum->prototype, cum->nargs_prototype);
4844 }
f676971a 4845
c4ad648e
AM
4846 if (fntype
4847 && !TARGET_ALTIVEC
4848 && TARGET_ALTIVEC_ABI
4849 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4850 {
c85ce869 4851 error ("cannot return value in vector register because"
c4ad648e 4852 " altivec instructions are disabled, use -maltivec"
c85ce869 4853 " to enable them");
c4ad648e 4854 }
4697a36c
MM
4855}
4856\f
fe984136
RH
4857/* Return true if TYPE must be passed on the stack and not in registers. */
4858
4859static bool
4860rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4861{
4862 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4863 return must_pass_in_stack_var_size (mode, type);
4864 else
4865 return must_pass_in_stack_var_size_or_pad (mode, type);
4866}
4867
c229cba9
DE
4868/* If defined, a C expression which determines whether, and in which
4869 direction, to pad out an argument with extra space. The value
4870 should be of type `enum direction': either `upward' to pad above
4871 the argument, `downward' to pad below, or `none' to inhibit
4872 padding.
4873
4874 For the AIX ABI structs are always stored left shifted in their
4875 argument slot. */
4876
9ebbca7d 4877enum direction
a2369ed3 4878function_arg_padding (enum machine_mode mode, tree type)
c229cba9 4879{
6e985040
AM
4880#ifndef AGGREGATE_PADDING_FIXED
4881#define AGGREGATE_PADDING_FIXED 0
4882#endif
4883#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4884#define AGGREGATES_PAD_UPWARD_ALWAYS 0
4885#endif
4886
4887 if (!AGGREGATE_PADDING_FIXED)
4888 {
4889 /* GCC used to pass structures of the same size as integer types as
4890 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 4891 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
4892 passed padded downward, except that -mstrict-align further
4893 muddied the water in that multi-component structures of 2 and 4
4894 bytes in size were passed padded upward.
4895
4896 The following arranges for best compatibility with previous
4897 versions of gcc, but removes the -mstrict-align dependency. */
4898 if (BYTES_BIG_ENDIAN)
4899 {
4900 HOST_WIDE_INT size = 0;
4901
4902 if (mode == BLKmode)
4903 {
4904 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4905 size = int_size_in_bytes (type);
4906 }
4907 else
4908 size = GET_MODE_SIZE (mode);
4909
4910 if (size == 1 || size == 2 || size == 4)
4911 return downward;
4912 }
4913 return upward;
4914 }
4915
4916 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4917 {
4918 if (type != 0 && AGGREGATE_TYPE_P (type))
4919 return upward;
4920 }
c229cba9 4921
d3704c46
KH
4922 /* Fall back to the default. */
4923 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
4924}
4925
b6c9286a 4926/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 4927 of an argument with the specified mode and type. If it is not defined,
b6c9286a 4928 PARM_BOUNDARY is used for all arguments.
f676971a 4929
84e9ad15
AM
4930 V.4 wants long longs and doubles to be double word aligned. Just
4931 testing the mode size is a boneheaded way to do this as it means
4932 that other types such as complex int are also double word aligned.
4933 However, we're stuck with this because changing the ABI might break
4934 existing library interfaces.
4935
b693336b
PB
4936 Doubleword align SPE vectors.
4937 Quadword align Altivec vectors.
4938 Quadword align large synthetic vector types. */
b6c9286a
MM
4939
4940int
b693336b 4941function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 4942{
84e9ad15
AM
4943 if (DEFAULT_ABI == ABI_V4
4944 && (GET_MODE_SIZE (mode) == 8
4945 || (TARGET_HARD_FLOAT
4946 && TARGET_FPRS
7393f7f8 4947 && (mode == TFmode || mode == TDmode))))
4ed78545 4948 return 64;
ad630bef
DE
4949 else if (SPE_VECTOR_MODE (mode)
4950 || (type && TREE_CODE (type) == VECTOR_TYPE
4951 && int_size_in_bytes (type) >= 8
4952 && int_size_in_bytes (type) < 16))
e1f83b4d 4953 return 64;
ad630bef
DE
4954 else if (ALTIVEC_VECTOR_MODE (mode)
4955 || (type && TREE_CODE (type) == VECTOR_TYPE
4956 && int_size_in_bytes (type) >= 16))
0ac081f6 4957 return 128;
0b5383eb
DJ
4958 else if (rs6000_darwin64_abi && mode == BLKmode
4959 && type && TYPE_ALIGN (type) > 64)
4960 return 128;
9ebbca7d 4961 else
b6c9286a 4962 return PARM_BOUNDARY;
b6c9286a 4963}
c53bdcf5 4964
294bd182
AM
4965/* For a function parm of MODE and TYPE, return the starting word in
4966 the parameter area. NWORDS of the parameter area are already used. */
4967
4968static unsigned int
4969rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
4970{
4971 unsigned int align;
4972 unsigned int parm_offset;
4973
4974 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4975 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
4976 return nwords + (-(parm_offset + nwords) & align);
4977}
4978
c53bdcf5
AM
4979/* Compute the size (in words) of a function argument. */
4980
4981static unsigned long
4982rs6000_arg_size (enum machine_mode mode, tree type)
4983{
4984 unsigned long size;
4985
4986 if (mode != BLKmode)
4987 size = GET_MODE_SIZE (mode);
4988 else
4989 size = int_size_in_bytes (type);
4990
4991 if (TARGET_32BIT)
4992 return (size + 3) >> 2;
4993 else
4994 return (size + 7) >> 3;
4995}
b6c9286a 4996\f
0b5383eb 4997/* Use this to flush pending int fields. */
594a51fe
SS
4998
4999static void
0b5383eb
DJ
5000rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5001 HOST_WIDE_INT bitpos)
594a51fe 5002{
0b5383eb
DJ
5003 unsigned int startbit, endbit;
5004 int intregs, intoffset;
5005 enum machine_mode mode;
594a51fe 5006
0b5383eb
DJ
5007 if (cum->intoffset == -1)
5008 return;
594a51fe 5009
0b5383eb
DJ
5010 intoffset = cum->intoffset;
5011 cum->intoffset = -1;
5012
5013 if (intoffset % BITS_PER_WORD != 0)
5014 {
5015 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5016 MODE_INT, 0);
5017 if (mode == BLKmode)
594a51fe 5018 {
0b5383eb
DJ
5019 /* We couldn't find an appropriate mode, which happens,
5020 e.g., in packed structs when there are 3 bytes to load.
5021 Back intoffset back to the beginning of the word in this
5022 case. */
5023 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5024 }
594a51fe 5025 }
0b5383eb
DJ
5026
5027 startbit = intoffset & -BITS_PER_WORD;
5028 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5029 intregs = (endbit - startbit) / BITS_PER_WORD;
5030 cum->words += intregs;
5031}
5032
5033/* The darwin64 ABI calls for us to recurse down through structs,
5034 looking for elements passed in registers. Unfortunately, we have
5035 to track int register count here also because of misalignments
5036 in powerpc alignment mode. */
5037
5038static void
5039rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5040 tree type,
5041 HOST_WIDE_INT startbitpos)
5042{
5043 tree f;
5044
5045 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5046 if (TREE_CODE (f) == FIELD_DECL)
5047 {
5048 HOST_WIDE_INT bitpos = startbitpos;
5049 tree ftype = TREE_TYPE (f);
70fb00df
AP
5050 enum machine_mode mode;
5051 if (ftype == error_mark_node)
5052 continue;
5053 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5054
5055 if (DECL_SIZE (f) != 0
5056 && host_integerp (bit_position (f), 1))
5057 bitpos += int_bit_position (f);
5058
5059 /* ??? FIXME: else assume zero offset. */
5060
5061 if (TREE_CODE (ftype) == RECORD_TYPE)
5062 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5063 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5064 {
5065 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5066 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5067 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5068 }
5069 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5070 {
5071 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5072 cum->vregno++;
5073 cum->words += 2;
5074 }
5075 else if (cum->intoffset == -1)
5076 cum->intoffset = bitpos;
5077 }
594a51fe
SS
5078}
5079
4697a36c
MM
5080/* Update the data in CUM to advance over an argument
5081 of mode MODE and data type TYPE.
b2d04ecf
AM
5082 (TYPE is null for libcalls where that information may not be available.)
5083
5084 Note that for args passed by reference, function_arg will be called
5085 with MODE and TYPE set to that of the pointer to the arg, not the arg
5086 itself. */
4697a36c
MM
5087
5088void
f676971a 5089function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5090 tree type, int named, int depth)
4697a36c 5091{
0b5383eb
DJ
5092 int size;
5093
594a51fe
SS
5094 /* Only tick off an argument if we're not recursing. */
5095 if (depth == 0)
5096 cum->nargs_prototype--;
4697a36c 5097
ad630bef
DE
5098 if (TARGET_ALTIVEC_ABI
5099 && (ALTIVEC_VECTOR_MODE (mode)
5100 || (type && TREE_CODE (type) == VECTOR_TYPE
5101 && int_size_in_bytes (type) == 16)))
0ac081f6 5102 {
4ed78545
AM
5103 bool stack = false;
5104
2858f73a 5105 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5106 {
6d0ef01e
HP
5107 cum->vregno++;
5108 if (!TARGET_ALTIVEC)
c85ce869 5109 error ("cannot pass argument in vector register because"
6d0ef01e 5110 " altivec instructions are disabled, use -maltivec"
c85ce869 5111 " to enable them");
4ed78545
AM
5112
5113 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5114 even if it is going to be passed in a vector register.
4ed78545
AM
5115 Darwin does the same for variable-argument functions. */
5116 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5117 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5118 stack = true;
6d0ef01e 5119 }
4ed78545
AM
5120 else
5121 stack = true;
5122
5123 if (stack)
c4ad648e 5124 {
a594a19c 5125 int align;
f676971a 5126
2858f73a
GK
5127 /* Vector parameters must be 16-byte aligned. This places
5128 them at 2 mod 4 in terms of words in 32-bit mode, since
5129 the parameter save area starts at offset 24 from the
5130 stack. In 64-bit mode, they just have to start on an
5131 even word, since the parameter save area is 16-byte
5132 aligned. Space for GPRs is reserved even if the argument
5133 will be passed in memory. */
5134 if (TARGET_32BIT)
4ed78545 5135 align = (2 - cum->words) & 3;
2858f73a
GK
5136 else
5137 align = cum->words & 1;
c53bdcf5 5138 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5139
a594a19c
GK
5140 if (TARGET_DEBUG_ARG)
5141 {
f676971a 5142 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5143 cum->words, align);
5144 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5145 cum->nargs_prototype, cum->prototype,
2858f73a 5146 GET_MODE_NAME (mode));
a594a19c
GK
5147 }
5148 }
0ac081f6 5149 }
a4b0320c 5150 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5151 && !cum->stdarg
5152 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5153 cum->sysv_gregno++;
594a51fe
SS
5154
5155 else if (rs6000_darwin64_abi
5156 && mode == BLKmode
0b5383eb
DJ
5157 && TREE_CODE (type) == RECORD_TYPE
5158 && (size = int_size_in_bytes (type)) > 0)
5159 {
5160 /* Variable sized types have size == -1 and are
5161 treated as if consisting entirely of ints.
5162 Pad to 16 byte boundary if needed. */
5163 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5164 && (cum->words % 2) != 0)
5165 cum->words++;
5166 /* For varargs, we can just go up by the size of the struct. */
5167 if (!named)
5168 cum->words += (size + 7) / 8;
5169 else
5170 {
5171 /* It is tempting to say int register count just goes up by
5172 sizeof(type)/8, but this is wrong in a case such as
5173 { int; double; int; } [powerpc alignment]. We have to
5174 grovel through the fields for these too. */
5175 cum->intoffset = 0;
5176 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5177 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5178 size * BITS_PER_UNIT);
5179 }
5180 }
f607bc57 5181 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5182 {
a3170dc6 5183 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5184 && (mode == SFmode || mode == DFmode
7393f7f8 5185 || mode == DDmode || mode == TDmode
602ea4d3 5186 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5187 {
2d83f070
JJ
5188 /* _Decimal128 must use an even/odd register pair. This assumes
5189 that the register number is odd when fregno is odd. */
5190 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5191 cum->fregno++;
5192
5193 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5194 <= FP_ARG_V4_MAX_REG)
602ea4d3 5195 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5196 else
5197 {
602ea4d3 5198 cum->fregno = FP_ARG_V4_MAX_REG + 1;
7393f7f8 5199 if (mode == DFmode || mode == TFmode || mode == DDmode || mode == TDmode)
c4ad648e 5200 cum->words += cum->words & 1;
c53bdcf5 5201 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5202 }
4697a36c 5203 }
4cc833b7
RH
5204 else
5205 {
b2d04ecf 5206 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5207 int gregno = cum->sysv_gregno;
5208
4ed78545
AM
5209 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5210 (r7,r8) or (r9,r10). As does any other 2 word item such
5211 as complex int due to a historical mistake. */
5212 if (n_words == 2)
5213 gregno += (1 - gregno) & 1;
4cc833b7 5214
4ed78545 5215 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5216 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5217 {
4ed78545
AM
5218 /* Long long and SPE vectors are aligned on the stack.
5219 So are other 2 word items such as complex int due to
5220 a historical mistake. */
4cc833b7
RH
5221 if (n_words == 2)
5222 cum->words += cum->words & 1;
5223 cum->words += n_words;
5224 }
4697a36c 5225
4cc833b7
RH
5226 /* Note: continuing to accumulate gregno past when we've started
5227 spilling to the stack indicates the fact that we've started
5228 spilling to the stack to expand_builtin_saveregs. */
5229 cum->sysv_gregno = gregno + n_words;
5230 }
4697a36c 5231
4cc833b7
RH
5232 if (TARGET_DEBUG_ARG)
5233 {
5234 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5235 cum->words, cum->fregno);
5236 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5237 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5238 fprintf (stderr, "mode = %4s, named = %d\n",
5239 GET_MODE_NAME (mode), named);
5240 }
4697a36c
MM
5241 }
5242 else
4cc833b7 5243 {
b2d04ecf 5244 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5245 int start_words = cum->words;
5246 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5247
294bd182 5248 cum->words = align_words + n_words;
4697a36c 5249
ebb109ad 5250 if (SCALAR_FLOAT_MODE_P (mode)
7393f7f8 5251 && mode != SDmode
a3170dc6 5252 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5253 {
5254 /* _Decimal128 must be passed in an even/odd float register pair.
5255 This assumes that the register number is odd when fregno is
5256 odd. */
5257 if (mode == TDmode && (cum->fregno % 2) == 1)
5258 cum->fregno++;
5259 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5260 }
4cc833b7
RH
5261
5262 if (TARGET_DEBUG_ARG)
5263 {
5264 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5265 cum->words, cum->fregno);
5266 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5267 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5268 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5269 named, align_words - start_words, depth);
4cc833b7
RH
5270 }
5271 }
4697a36c 5272}
a6c9bed4 5273
f82f556d
AH
5274static rtx
5275spe_build_register_parallel (enum machine_mode mode, int gregno)
5276{
17caeff2 5277 rtx r1, r3, r5, r7;
f82f556d 5278
37409796 5279 switch (mode)
f82f556d 5280 {
37409796 5281 case DFmode:
54b695e7
AH
5282 r1 = gen_rtx_REG (DImode, gregno);
5283 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5284 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5285
5286 case DCmode:
17caeff2 5287 case TFmode:
54b695e7
AH
5288 r1 = gen_rtx_REG (DImode, gregno);
5289 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5290 r3 = gen_rtx_REG (DImode, gregno + 2);
5291 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5292 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5293
17caeff2
JM
5294 case TCmode:
5295 r1 = gen_rtx_REG (DImode, gregno);
5296 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5297 r3 = gen_rtx_REG (DImode, gregno + 2);
5298 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5299 r5 = gen_rtx_REG (DImode, gregno + 4);
5300 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5301 r7 = gen_rtx_REG (DImode, gregno + 6);
5302 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5303 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5304
37409796
NS
5305 default:
5306 gcc_unreachable ();
f82f556d 5307 }
f82f556d 5308}
b78d48dd 5309
f82f556d 5310/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5311static rtx
f676971a 5312rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5313 tree type)
a6c9bed4 5314{
f82f556d
AH
5315 int gregno = cum->sysv_gregno;
5316
5317 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5318 are passed and returned in a pair of GPRs for ABI compatibility. */
17caeff2
JM
5319 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DCmode
5320 || mode == TFmode || mode == TCmode))
f82f556d 5321 {
b5870bee
AH
5322 int n_words = rs6000_arg_size (mode, type);
5323
f82f556d 5324 /* Doubles go in an odd/even register pair (r5/r6, etc). */
b5870bee
AH
5325 if (mode == DFmode)
5326 gregno += (1 - gregno) & 1;
f82f556d 5327
b5870bee
AH
5328 /* Multi-reg args are not split between registers and stack. */
5329 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5330 return NULL_RTX;
5331
5332 return spe_build_register_parallel (mode, gregno);
5333 }
a6c9bed4
AH
5334 if (cum->stdarg)
5335 {
c53bdcf5 5336 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5337
5338 /* SPE vectors are put in odd registers. */
5339 if (n_words == 2 && (gregno & 1) == 0)
5340 gregno += 1;
5341
5342 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5343 {
5344 rtx r1, r2;
5345 enum machine_mode m = SImode;
5346
5347 r1 = gen_rtx_REG (m, gregno);
5348 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5349 r2 = gen_rtx_REG (m, gregno + 1);
5350 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5351 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5352 }
5353 else
b78d48dd 5354 return NULL_RTX;
a6c9bed4
AH
5355 }
5356 else
5357 {
f82f556d
AH
5358 if (gregno <= GP_ARG_MAX_REG)
5359 return gen_rtx_REG (mode, gregno);
a6c9bed4 5360 else
b78d48dd 5361 return NULL_RTX;
a6c9bed4
AH
5362 }
5363}
5364
0b5383eb
DJ
5365/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5366 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5367
0b5383eb 5368static void
bb8df8a6 5369rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5370 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5371{
0b5383eb
DJ
5372 enum machine_mode mode;
5373 unsigned int regno;
5374 unsigned int startbit, endbit;
5375 int this_regno, intregs, intoffset;
5376 rtx reg;
594a51fe 5377
0b5383eb
DJ
5378 if (cum->intoffset == -1)
5379 return;
5380
5381 intoffset = cum->intoffset;
5382 cum->intoffset = -1;
5383
5384 /* If this is the trailing part of a word, try to only load that
5385 much into the register. Otherwise load the whole register. Note
5386 that in the latter case we may pick up unwanted bits. It's not a
5387 problem at the moment but may wish to revisit. */
5388
5389 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5390 {
0b5383eb
DJ
5391 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5392 MODE_INT, 0);
5393 if (mode == BLKmode)
5394 {
5395 /* We couldn't find an appropriate mode, which happens,
5396 e.g., in packed structs when there are 3 bytes to load.
5397 Back intoffset back to the beginning of the word in this
5398 case. */
5399 intoffset = intoffset & -BITS_PER_WORD;
5400 mode = word_mode;
5401 }
5402 }
5403 else
5404 mode = word_mode;
5405
5406 startbit = intoffset & -BITS_PER_WORD;
5407 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5408 intregs = (endbit - startbit) / BITS_PER_WORD;
5409 this_regno = cum->words + intoffset / BITS_PER_WORD;
5410
5411 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5412 cum->use_stack = 1;
bb8df8a6 5413
0b5383eb
DJ
5414 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5415 if (intregs <= 0)
5416 return;
5417
5418 intoffset /= BITS_PER_UNIT;
5419 do
5420 {
5421 regno = GP_ARG_MIN_REG + this_regno;
5422 reg = gen_rtx_REG (mode, regno);
5423 rvec[(*k)++] =
5424 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5425
5426 this_regno += 1;
5427 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5428 mode = word_mode;
5429 intregs -= 1;
5430 }
5431 while (intregs > 0);
5432}
5433
5434/* Recursive workhorse for the following. */
5435
5436static void
bb8df8a6 5437rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, tree type,
0b5383eb
DJ
5438 HOST_WIDE_INT startbitpos, rtx rvec[],
5439 int *k)
5440{
5441 tree f;
5442
5443 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5444 if (TREE_CODE (f) == FIELD_DECL)
5445 {
5446 HOST_WIDE_INT bitpos = startbitpos;
5447 tree ftype = TREE_TYPE (f);
70fb00df
AP
5448 enum machine_mode mode;
5449 if (ftype == error_mark_node)
5450 continue;
5451 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5452
5453 if (DECL_SIZE (f) != 0
5454 && host_integerp (bit_position (f), 1))
5455 bitpos += int_bit_position (f);
5456
5457 /* ??? FIXME: else assume zero offset. */
5458
5459 if (TREE_CODE (ftype) == RECORD_TYPE)
5460 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5461 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5462 {
0b5383eb
DJ
5463#if 0
5464 switch (mode)
594a51fe 5465 {
0b5383eb
DJ
5466 case SCmode: mode = SFmode; break;
5467 case DCmode: mode = DFmode; break;
5468 case TCmode: mode = TFmode; break;
5469 default: break;
594a51fe 5470 }
0b5383eb
DJ
5471#endif
5472 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5473 rvec[(*k)++]
bb8df8a6 5474 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5475 gen_rtx_REG (mode, cum->fregno++),
5476 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5477 if (mode == TFmode || mode == TDmode)
0b5383eb 5478 cum->fregno++;
594a51fe 5479 }
0b5383eb
DJ
5480 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5481 {
5482 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5483 rvec[(*k)++]
bb8df8a6
EC
5484 = gen_rtx_EXPR_LIST (VOIDmode,
5485 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5486 GEN_INT (bitpos / BITS_PER_UNIT));
5487 }
5488 else if (cum->intoffset == -1)
5489 cum->intoffset = bitpos;
5490 }
5491}
594a51fe 5492
0b5383eb
DJ
5493/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5494 the register(s) to be used for each field and subfield of a struct
5495 being passed by value, along with the offset of where the
5496 register's value may be found in the block. FP fields go in FP
5497 register, vector fields go in vector registers, and everything
bb8df8a6 5498 else goes in int registers, packed as in memory.
8ff40a74 5499
0b5383eb
DJ
5500 This code is also used for function return values. RETVAL indicates
5501 whether this is the case.
8ff40a74 5502
a4d05547 5503 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 5504 calling convention. */
594a51fe 5505
0b5383eb
DJ
5506static rtx
5507rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, tree type,
5508 int named, bool retval)
5509{
5510 rtx rvec[FIRST_PSEUDO_REGISTER];
5511 int k = 1, kbase = 1;
5512 HOST_WIDE_INT typesize = int_size_in_bytes (type);
5513 /* This is a copy; modifications are not visible to our caller. */
5514 CUMULATIVE_ARGS copy_cum = *orig_cum;
5515 CUMULATIVE_ARGS *cum = &copy_cum;
5516
5517 /* Pad to 16 byte boundary if needed. */
5518 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5519 && (cum->words % 2) != 0)
5520 cum->words++;
5521
5522 cum->intoffset = 0;
5523 cum->use_stack = 0;
5524 cum->named = named;
5525
5526 /* Put entries into rvec[] for individual FP and vector fields, and
5527 for the chunks of memory that go in int regs. Note we start at
5528 element 1; 0 is reserved for an indication of using memory, and
5529 may or may not be filled in below. */
5530 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
5531 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
5532
5533 /* If any part of the struct went on the stack put all of it there.
5534 This hack is because the generic code for
5535 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5536 parts of the struct are not at the beginning. */
5537 if (cum->use_stack)
5538 {
5539 if (retval)
5540 return NULL_RTX; /* doesn't go in registers at all */
5541 kbase = 0;
5542 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5543 }
5544 if (k > 1 || cum->use_stack)
5545 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
5546 else
5547 return NULL_RTX;
5548}
5549
b78d48dd
FJ
5550/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5551
5552static rtx
ec6376ab 5553rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 5554{
ec6376ab
AM
5555 int n_units;
5556 int i, k;
5557 rtx rvec[GP_ARG_NUM_REG + 1];
5558
5559 if (align_words >= GP_ARG_NUM_REG)
5560 return NULL_RTX;
5561
5562 n_units = rs6000_arg_size (mode, type);
5563
5564 /* Optimize the simple case where the arg fits in one gpr, except in
5565 the case of BLKmode due to assign_parms assuming that registers are
5566 BITS_PER_WORD wide. */
5567 if (n_units == 0
5568 || (n_units == 1 && mode != BLKmode))
5569 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5570
5571 k = 0;
5572 if (align_words + n_units > GP_ARG_NUM_REG)
5573 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5574 using a magic NULL_RTX component.
79773478
AM
5575 This is not strictly correct. Only some of the arg belongs in
5576 memory, not all of it. However, the normal scheme using
5577 function_arg_partial_nregs can result in unusual subregs, eg.
5578 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5579 store the whole arg to memory is often more efficient than code
5580 to store pieces, and we know that space is available in the right
5581 place for the whole arg. */
ec6376ab
AM
5582 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5583
5584 i = 0;
5585 do
36a454e1 5586 {
ec6376ab
AM
5587 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
5588 rtx off = GEN_INT (i++ * 4);
5589 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 5590 }
ec6376ab
AM
5591 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
5592
5593 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
5594}
5595
4697a36c
MM
5596/* Determine where to put an argument to a function.
5597 Value is zero to push the argument on the stack,
5598 or a hard register in which to store the argument.
5599
5600 MODE is the argument's machine mode.
5601 TYPE is the data type of the argument (as a tree).
5602 This is null for libcalls where that information may
5603 not be available.
5604 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
5605 the preceding args and about the function being called. It is
5606 not modified in this routine.
4697a36c
MM
5607 NAMED is nonzero if this argument is a named parameter
5608 (otherwise it is an extra parameter matching an ellipsis).
5609
5610 On RS/6000 the first eight words of non-FP are normally in registers
5611 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5612 Under V.4, the first 8 FP args are in registers.
5613
5614 If this is floating-point and no prototype is specified, we use
5615 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 5616 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 5617 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
5618 doesn't support PARALLEL anyway.
5619
5620 Note that for args passed by reference, function_arg will be called
5621 with MODE and TYPE set to that of the pointer to the arg, not the arg
5622 itself. */
4697a36c 5623
9390387d 5624rtx
f676971a 5625function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5626 tree type, int named)
4697a36c 5627{
4cc833b7 5628 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 5629
a4f6c312
SS
5630 /* Return a marker to indicate whether CR1 needs to set or clear the
5631 bit that V.4 uses to say fp args were passed in registers.
5632 Assume that we don't need the marker for software floating point,
5633 or compiler generated library calls. */
4697a36c
MM
5634 if (mode == VOIDmode)
5635 {
f607bc57 5636 if (abi == ABI_V4
b9599e46 5637 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
5638 && (cum->stdarg
5639 || (cum->nargs_prototype < 0
5640 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 5641 {
a3170dc6
AH
5642 /* For the SPE, we need to crxor CR6 always. */
5643 if (TARGET_SPE_ABI)
5644 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
5645 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
5646 return GEN_INT (cum->call_cookie
5647 | ((cum->fregno == FP_ARG_MIN_REG)
5648 ? CALL_V4_SET_FP_ARGS
5649 : CALL_V4_CLEAR_FP_ARGS));
7509c759 5650 }
4697a36c 5651
7509c759 5652 return GEN_INT (cum->call_cookie);
4697a36c
MM
5653 }
5654
0b5383eb
DJ
5655 if (rs6000_darwin64_abi && mode == BLKmode
5656 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 5657 {
0b5383eb 5658 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
5659 if (rslt != NULL_RTX)
5660 return rslt;
5661 /* Else fall through to usual handling. */
5662 }
5663
2858f73a 5664 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
5665 if (TARGET_64BIT && ! cum->prototype)
5666 {
c4ad648e
AM
5667 /* Vector parameters get passed in vector register
5668 and also in GPRs or memory, in absence of prototype. */
5669 int align_words;
5670 rtx slot;
5671 align_words = (cum->words + 1) & ~1;
5672
5673 if (align_words >= GP_ARG_NUM_REG)
5674 {
5675 slot = NULL_RTX;
5676 }
5677 else
5678 {
5679 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5680 }
5681 return gen_rtx_PARALLEL (mode,
5682 gen_rtvec (2,
5683 gen_rtx_EXPR_LIST (VOIDmode,
5684 slot, const0_rtx),
5685 gen_rtx_EXPR_LIST (VOIDmode,
5686 gen_rtx_REG (mode, cum->vregno),
5687 const0_rtx)));
c72d6c26
HP
5688 }
5689 else
5690 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
5691 else if (TARGET_ALTIVEC_ABI
5692 && (ALTIVEC_VECTOR_MODE (mode)
5693 || (type && TREE_CODE (type) == VECTOR_TYPE
5694 && int_size_in_bytes (type) == 16)))
0ac081f6 5695 {
2858f73a 5696 if (named || abi == ABI_V4)
a594a19c 5697 return NULL_RTX;
0ac081f6 5698 else
a594a19c
GK
5699 {
5700 /* Vector parameters to varargs functions under AIX or Darwin
5701 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
5702 int align, align_words, n_words;
5703 enum machine_mode part_mode;
a594a19c
GK
5704
5705 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
5706 2 mod 4 in terms of words in 32-bit mode, since the parameter
5707 save area starts at offset 24 from the stack. In 64-bit mode,
5708 they just have to start on an even word, since the parameter
5709 save area is 16-byte aligned. */
5710 if (TARGET_32BIT)
4ed78545 5711 align = (2 - cum->words) & 3;
2858f73a
GK
5712 else
5713 align = cum->words & 1;
a594a19c
GK
5714 align_words = cum->words + align;
5715
5716 /* Out of registers? Memory, then. */
5717 if (align_words >= GP_ARG_NUM_REG)
5718 return NULL_RTX;
ec6376ab
AM
5719
5720 if (TARGET_32BIT && TARGET_POWERPC64)
5721 return rs6000_mixed_function_arg (mode, type, align_words);
5722
2858f73a
GK
5723 /* The vector value goes in GPRs. Only the part of the
5724 value in GPRs is reported here. */
ec6376ab
AM
5725 part_mode = mode;
5726 n_words = rs6000_arg_size (mode, type);
5727 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 5728 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
5729 is either wholly in GPRs or half in GPRs and half not. */
5730 part_mode = DImode;
ec6376ab
AM
5731
5732 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 5733 }
0ac081f6 5734 }
f82f556d
AH
5735 else if (TARGET_SPE_ABI && TARGET_SPE
5736 && (SPE_VECTOR_MODE (mode)
18f63bfa 5737 || (TARGET_E500_DOUBLE && (mode == DFmode
7393f7f8 5738 || mode == DDmode
17caeff2
JM
5739 || mode == DCmode
5740 || mode == TFmode
7393f7f8 5741 || mode == TDmode
17caeff2 5742 || mode == TCmode))))
a6c9bed4 5743 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 5744
f607bc57 5745 else if (abi == ABI_V4)
4697a36c 5746 {
a3170dc6 5747 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5748 && (mode == SFmode || mode == DFmode
7393f7f8
BE
5749 || (mode == TFmode && !TARGET_IEEEQUAD)
5750 || mode == DDmode || mode == TDmode))
4cc833b7 5751 {
2d83f070
JJ
5752 /* _Decimal128 must use an even/odd register pair. This assumes
5753 that the register number is odd when fregno is odd. */
5754 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5755 cum->fregno++;
5756
5757 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5758 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
5759 return gen_rtx_REG (mode, cum->fregno);
5760 else
b78d48dd 5761 return NULL_RTX;
4cc833b7
RH
5762 }
5763 else
5764 {
b2d04ecf 5765 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5766 int gregno = cum->sysv_gregno;
5767
4ed78545
AM
5768 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5769 (r7,r8) or (r9,r10). As does any other 2 word item such
5770 as complex int due to a historical mistake. */
5771 if (n_words == 2)
5772 gregno += (1 - gregno) & 1;
4cc833b7 5773
4ed78545 5774 /* Multi-reg args are not split between registers and stack. */
ec6376ab 5775 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 5776 return NULL_RTX;
ec6376ab
AM
5777
5778 if (TARGET_32BIT && TARGET_POWERPC64)
5779 return rs6000_mixed_function_arg (mode, type,
5780 gregno - GP_ARG_MIN_REG);
5781 return gen_rtx_REG (mode, gregno);
4cc833b7 5782 }
4697a36c 5783 }
4cc833b7
RH
5784 else
5785 {
294bd182 5786 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 5787
2d83f070
JJ
5788 /* _Decimal128 must be passed in an even/odd float register pair.
5789 This assumes that the register number is odd when fregno is odd. */
5790 if (mode == TDmode && (cum->fregno % 2) == 1)
5791 cum->fregno++;
5792
2858f73a 5793 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 5794 {
ec6376ab
AM
5795 rtx rvec[GP_ARG_NUM_REG + 1];
5796 rtx r;
5797 int k;
c53bdcf5
AM
5798 bool needs_psave;
5799 enum machine_mode fmode = mode;
c53bdcf5
AM
5800 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5801
5802 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5803 {
c53bdcf5
AM
5804 /* Currently, we only ever need one reg here because complex
5805 doubles are split. */
7393f7f8
BE
5806 gcc_assert (cum->fregno == FP_ARG_MAX_REG
5807 && (fmode == TFmode || fmode == TDmode));
ec6376ab 5808
7393f7f8
BE
5809 /* Long double or _Decimal128 split over regs and memory. */
5810 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 5811 }
c53bdcf5
AM
5812
5813 /* Do we also need to pass this arg in the parameter save
5814 area? */
5815 needs_psave = (type
5816 && (cum->nargs_prototype <= 0
5817 || (DEFAULT_ABI == ABI_AIX
de17c25f 5818 && TARGET_XL_COMPAT
c53bdcf5
AM
5819 && align_words >= GP_ARG_NUM_REG)));
5820
5821 if (!needs_psave && mode == fmode)
ec6376ab 5822 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 5823
ec6376ab 5824 k = 0;
c53bdcf5
AM
5825 if (needs_psave)
5826 {
ec6376ab 5827 /* Describe the part that goes in gprs or the stack.
c53bdcf5 5828 This piece must come first, before the fprs. */
c53bdcf5
AM
5829 if (align_words < GP_ARG_NUM_REG)
5830 {
5831 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
5832
5833 if (align_words + n_words > GP_ARG_NUM_REG
5834 || (TARGET_32BIT && TARGET_POWERPC64))
5835 {
5836 /* If this is partially on the stack, then we only
5837 include the portion actually in registers here. */
5838 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5839 rtx off;
79773478
AM
5840 int i = 0;
5841 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
5842 /* Not all of the arg fits in gprs. Say that it
5843 goes in memory too, using a magic NULL_RTX
5844 component. Also see comment in
5845 rs6000_mixed_function_arg for why the normal
5846 function_arg_partial_nregs scheme doesn't work
5847 in this case. */
5848 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
5849 const0_rtx);
ec6376ab
AM
5850 do
5851 {
5852 r = gen_rtx_REG (rmode,
5853 GP_ARG_MIN_REG + align_words);
2e6c9641 5854 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
5855 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5856 }
5857 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5858 }
5859 else
5860 {
5861 /* The whole arg fits in gprs. */
5862 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5863 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5864 }
c53bdcf5 5865 }
ec6376ab
AM
5866 else
5867 /* It's entirely in memory. */
5868 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
5869 }
5870
ec6376ab
AM
5871 /* Describe where this piece goes in the fprs. */
5872 r = gen_rtx_REG (fmode, cum->fregno);
5873 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5874
5875 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
5876 }
5877 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 5878 {
ec6376ab
AM
5879 if (TARGET_32BIT && TARGET_POWERPC64)
5880 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 5881
4eeca74f
AM
5882 if (mode == BLKmode)
5883 mode = Pmode;
5884
b2d04ecf
AM
5885 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5886 }
4cc833b7
RH
5887 else
5888 return NULL_RTX;
4697a36c 5889 }
4697a36c
MM
5890}
5891\f
ec6376ab 5892/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
5893 the number of bytes passed in registers. For args passed entirely in
5894 registers or entirely in memory, zero. When an arg is described by a
5895 PARALLEL, perhaps using more than one register type, this function
5896 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 5897
78a52f11
RH
5898static int
5899rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5900 tree type, bool named)
4697a36c 5901{
c53bdcf5 5902 int ret = 0;
ec6376ab 5903 int align_words;
c53bdcf5 5904
f607bc57 5905 if (DEFAULT_ABI == ABI_V4)
4697a36c 5906 return 0;
4697a36c 5907
c53bdcf5
AM
5908 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5909 && cum->nargs_prototype >= 0)
5910 return 0;
5911
0b5383eb
DJ
5912 /* In this complicated case we just disable the partial_nregs code. */
5913 if (rs6000_darwin64_abi && mode == BLKmode
5914 && TREE_CODE (type) == RECORD_TYPE
5915 && int_size_in_bytes (type) > 0)
5916 return 0;
5917
294bd182 5918 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 5919
79773478
AM
5920 if (USE_FP_FOR_ARG_P (cum, mode, type))
5921 {
fb63c729
AM
5922 /* If we are passing this arg in the fixed parameter save area
5923 (gprs or memory) as well as fprs, then this function should
79773478
AM
5924 return the number of partial bytes passed in the parameter
5925 save area rather than partial bytes passed in fprs. */
5926 if (type
5927 && (cum->nargs_prototype <= 0
5928 || (DEFAULT_ABI == ABI_AIX
5929 && TARGET_XL_COMPAT
5930 && align_words >= GP_ARG_NUM_REG)))
5931 return 0;
5932 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
5933 > FP_ARG_MAX_REG + 1)
ac7e839c 5934 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 5935 else if (cum->nargs_prototype >= 0)
4697a36c
MM
5936 return 0;
5937 }
5938
ec6376ab
AM
5939 if (align_words < GP_ARG_NUM_REG
5940 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 5941 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 5942
c53bdcf5 5943 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 5944 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 5945
c53bdcf5 5946 return ret;
4697a36c
MM
5947}
5948\f
5949/* A C expression that indicates when an argument must be passed by
5950 reference. If nonzero for an argument, a copy of that argument is
5951 made in memory and a pointer to the argument is passed instead of
5952 the argument itself. The pointer is passed in whatever way is
5953 appropriate for passing a pointer to that type.
5954
b2d04ecf
AM
5955 Under V.4, aggregates and long double are passed by reference.
5956
5957 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5958 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
5959
5960 As an extension to all ABIs, variable sized types are passed by
5961 reference. */
4697a36c 5962
8cd5a4e0 5963static bool
f676971a 5964rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
bada2eb8
DE
5965 enum machine_mode mode, tree type,
5966 bool named ATTRIBUTE_UNUSED)
4697a36c 5967{
602ea4d3 5968 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
5969 {
5970 if (TARGET_DEBUG_ARG)
bada2eb8
DE
5971 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
5972 return 1;
5973 }
5974
5975 if (!type)
5976 return 0;
4697a36c 5977
bada2eb8
DE
5978 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
5979 {
5980 if (TARGET_DEBUG_ARG)
5981 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
5982 return 1;
5983 }
5984
5985 if (int_size_in_bytes (type) < 0)
5986 {
5987 if (TARGET_DEBUG_ARG)
5988 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
5989 return 1;
5990 }
5991
5992 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5993 modes only exist for GCC vector types if -maltivec. */
5994 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5995 {
5996 if (TARGET_DEBUG_ARG)
5997 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
5998 return 1;
5999 }
b693336b
PB
6000
6001 /* Pass synthetic vectors in memory. */
bada2eb8 6002 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6003 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6004 {
6005 static bool warned_for_pass_big_vectors = false;
6006 if (TARGET_DEBUG_ARG)
6007 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6008 if (!warned_for_pass_big_vectors)
6009 {
d4ee4d25 6010 warning (0, "GCC vector passed by reference: "
b693336b
PB
6011 "non-standard ABI extension with no compatibility guarantee");
6012 warned_for_pass_big_vectors = true;
6013 }
6014 return 1;
6015 }
6016
b2d04ecf 6017 return 0;
4697a36c 6018}
5985c7a6
FJ
6019
6020static void
2d9db8eb 6021rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6022{
6023 int i;
6024 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6025
6026 if (nregs == 0)
6027 return;
6028
c4ad648e 6029 for (i = 0; i < nregs; i++)
5985c7a6 6030 {
9390387d 6031 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6032 if (reload_completed)
c4ad648e
AM
6033 {
6034 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6035 tem = NULL_RTX;
6036 else
6037 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6038 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6039 }
5985c7a6
FJ
6040 else
6041 tem = replace_equiv_address (tem, XEXP (tem, 0));
6042
37409796 6043 gcc_assert (tem);
5985c7a6
FJ
6044
6045 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6046 }
6047}
4697a36c
MM
6048\f
6049/* Perform any needed actions needed for a function that is receiving a
f676971a 6050 variable number of arguments.
4697a36c
MM
6051
6052 CUM is as above.
6053
6054 MODE and TYPE are the mode and type of the current parameter.
6055
6056 PRETEND_SIZE is a variable that should be set to the amount of stack
6057 that must be pushed by the prolog to pretend that our caller pushed
6058 it.
6059
6060 Normally, this macro will push all remaining incoming registers on the
6061 stack and set PRETEND_SIZE to the length of the registers pushed. */
6062
c6e8c921 6063static void
f676971a 6064setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6065 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6066 int no_rtl)
4697a36c 6067{
4cc833b7
RH
6068 CUMULATIVE_ARGS next_cum;
6069 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6070 rtx save_area = NULL_RTX, mem;
dfafc897 6071 int first_reg_offset, set;
4697a36c 6072
f31bf321 6073 /* Skip the last named argument. */
d34c5b80 6074 next_cum = *cum;
594a51fe 6075 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6076
f607bc57 6077 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6078 {
5b667039
JJ
6079 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6080
60e2d0ca 6081 if (! no_rtl)
5b667039
JJ
6082 {
6083 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6084 HOST_WIDE_INT offset = 0;
6085
6086 /* Try to optimize the size of the varargs save area.
6087 The ABI requires that ap.reg_save_area is doubleword
6088 aligned, but we don't need to allocate space for all
6089 the bytes, only those to which we actually will save
6090 anything. */
6091 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6092 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6093 if (TARGET_HARD_FLOAT && TARGET_FPRS
6094 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6095 && cfun->va_list_fpr_size)
6096 {
6097 if (gpr_reg_num)
6098 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6099 * UNITS_PER_FP_WORD;
6100 if (cfun->va_list_fpr_size
6101 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6102 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6103 else
6104 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6105 * UNITS_PER_FP_WORD;
6106 }
6107 if (gpr_reg_num)
6108 {
6109 offset = -((first_reg_offset * reg_size) & ~7);
6110 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6111 {
6112 gpr_reg_num = cfun->va_list_gpr_size;
6113 if (reg_size == 4 && (first_reg_offset & 1))
6114 gpr_reg_num++;
6115 }
6116 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6117 }
6118 else if (fpr_size)
6119 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6120 * UNITS_PER_FP_WORD
6121 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6122
5b667039
JJ
6123 if (gpr_size + fpr_size)
6124 {
6125 rtx reg_save_area
6126 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6127 gcc_assert (GET_CODE (reg_save_area) == MEM);
6128 reg_save_area = XEXP (reg_save_area, 0);
6129 if (GET_CODE (reg_save_area) == PLUS)
6130 {
6131 gcc_assert (XEXP (reg_save_area, 0)
6132 == virtual_stack_vars_rtx);
6133 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6134 offset += INTVAL (XEXP (reg_save_area, 1));
6135 }
6136 else
6137 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6138 }
6139
6140 cfun->machine->varargs_save_offset = offset;
6141 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6142 }
4697a36c 6143 }
60e2d0ca 6144 else
4697a36c 6145 {
d34c5b80 6146 first_reg_offset = next_cum.words;
4cc833b7 6147 save_area = virtual_incoming_args_rtx;
4697a36c 6148
fe984136 6149 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6150 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6151 }
4697a36c 6152
dfafc897 6153 set = get_varargs_alias_set ();
9d30f3c1
JJ
6154 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6155 && cfun->va_list_gpr_size)
4cc833b7 6156 {
9d30f3c1
JJ
6157 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6158
6159 if (va_list_gpr_counter_field)
6160 {
6161 /* V4 va_list_gpr_size counts number of registers needed. */
6162 if (nregs > cfun->va_list_gpr_size)
6163 nregs = cfun->va_list_gpr_size;
6164 }
6165 else
6166 {
6167 /* char * va_list instead counts number of bytes needed. */
6168 if (nregs > cfun->va_list_gpr_size / reg_size)
6169 nregs = cfun->va_list_gpr_size / reg_size;
6170 }
6171
dfafc897 6172 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6173 plus_constant (save_area,
13e2e16e
DE
6174 first_reg_offset * reg_size));
6175 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6176 set_mem_alias_set (mem, set);
8ac61af7 6177 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6178
f676971a 6179 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6180 nregs);
4697a36c
MM
6181 }
6182
4697a36c 6183 /* Save FP registers if needed. */
f607bc57 6184 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6185 && TARGET_HARD_FLOAT && TARGET_FPRS
6186 && ! no_rtl
9d30f3c1
JJ
6187 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6188 && cfun->va_list_fpr_size)
4697a36c 6189 {
9d30f3c1 6190 int fregno = next_cum.fregno, nregs;
9ebbca7d 6191 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6192 rtx lab = gen_label_rtx ();
5b667039
JJ
6193 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6194 * UNITS_PER_FP_WORD);
4697a36c 6195
c4ad648e
AM
6196 emit_jump_insn
6197 (gen_rtx_SET (VOIDmode,
6198 pc_rtx,
6199 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6200 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6201 const0_rtx),
39403d82 6202 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6203 pc_rtx)));
6204
9d30f3c1
JJ
6205 for (nregs = 0;
6206 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6207 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6208 {
5496b36f 6209 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6210 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6211 set_mem_alias_set (mem, set);
94ff898d 6212 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6213 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6214 }
4cc833b7
RH
6215
6216 emit_label (lab);
4697a36c 6217 }
4697a36c 6218}
4697a36c 6219
dfafc897 6220/* Create the va_list data type. */
2c4974b7 6221
c35d187f
RH
6222static tree
6223rs6000_build_builtin_va_list (void)
dfafc897 6224{
64c2816f 6225 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6226
9ebbca7d
GK
6227 /* For AIX, prefer 'char *' because that's what the system
6228 header files like. */
f607bc57 6229 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6230 return build_pointer_type (char_type_node);
dfafc897 6231
f1e639b1 6232 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6233 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6234
f676971a 6235 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6236 unsigned_char_type_node);
f676971a 6237 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6238 unsigned_char_type_node);
64c2816f
DT
6239 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6240 every user file. */
6241 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6242 short_unsigned_type_node);
dfafc897
FS
6243 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6244 ptr_type_node);
6245 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6246 ptr_type_node);
6247
9d30f3c1
JJ
6248 va_list_gpr_counter_field = f_gpr;
6249 va_list_fpr_counter_field = f_fpr;
6250
dfafc897
FS
6251 DECL_FIELD_CONTEXT (f_gpr) = record;
6252 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6253 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6254 DECL_FIELD_CONTEXT (f_ovf) = record;
6255 DECL_FIELD_CONTEXT (f_sav) = record;
6256
bab45a51
FS
6257 TREE_CHAIN (record) = type_decl;
6258 TYPE_NAME (record) = type_decl;
dfafc897
FS
6259 TYPE_FIELDS (record) = f_gpr;
6260 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6261 TREE_CHAIN (f_fpr) = f_res;
6262 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6263 TREE_CHAIN (f_ovf) = f_sav;
6264
6265 layout_type (record);
6266
6267 /* The correct type is an array type of one element. */
6268 return build_array_type (record, build_index_type (size_zero_node));
6269}
6270
6271/* Implement va_start. */
6272
6273void
a2369ed3 6274rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6275{
dfafc897 6276 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6277 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6278 tree gpr, fpr, ovf, sav, t;
2c4974b7 6279
dfafc897 6280 /* Only SVR4 needs something special. */
f607bc57 6281 if (DEFAULT_ABI != ABI_V4)
dfafc897 6282 {
e5faf155 6283 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6284 return;
6285 }
6286
973a648b 6287 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6288 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6289 f_res = TREE_CHAIN (f_fpr);
6290 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6291 f_sav = TREE_CHAIN (f_ovf);
6292
872a65b5 6293 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6294 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6295 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6296 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6297 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
dfafc897
FS
6298
6299 /* Count number of gp and fp argument registers used. */
4cc833b7 6300 words = current_function_args_info.words;
987732e0
DE
6301 n_gpr = MIN (current_function_args_info.sysv_gregno - GP_ARG_MIN_REG,
6302 GP_ARG_NUM_REG);
6303 n_fpr = MIN (current_function_args_info.fregno - FP_ARG_MIN_REG,
6304 FP_ARG_NUM_REG);
dfafc897
FS
6305
6306 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6307 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6308 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6309 words, n_gpr, n_fpr);
dfafc897 6310
9d30f3c1
JJ
6311 if (cfun->va_list_gpr_size)
6312 {
07beea0d 6313 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
47a25a46 6314 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6315 TREE_SIDE_EFFECTS (t) = 1;
6316 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6317 }
58c8adc1 6318
9d30f3c1
JJ
6319 if (cfun->va_list_fpr_size)
6320 {
07beea0d 6321 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
47a25a46 6322 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6323 TREE_SIDE_EFFECTS (t) = 1;
6324 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6325 }
dfafc897
FS
6326
6327 /* Find the overflow area. */
6328 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6329 if (words != 0)
5be014d5
AP
6330 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6331 size_int (words * UNITS_PER_WORD));
07beea0d 6332 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6333 TREE_SIDE_EFFECTS (t) = 1;
6334 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6335
9d30f3c1
JJ
6336 /* If there were no va_arg invocations, don't set up the register
6337 save area. */
6338 if (!cfun->va_list_gpr_size
6339 && !cfun->va_list_fpr_size
6340 && n_gpr < GP_ARG_NUM_REG
6341 && n_fpr < FP_ARG_V4_MAX_REG)
6342 return;
6343
dfafc897
FS
6344 /* Find the register save area. */
6345 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6346 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6347 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6348 size_int (cfun->machine->varargs_save_offset));
07beea0d 6349 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
dfafc897
FS
6350 TREE_SIDE_EFFECTS (t) = 1;
6351 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6352}
6353
6354/* Implement va_arg. */
6355
23a60a04
JM
6356tree
6357rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
cd3ce9b4 6358{
cd3ce9b4
JM
6359 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6360 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6361 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6362 tree lab_false, lab_over, addr;
6363 int align;
6364 tree ptrtype = build_pointer_type (type);
7393f7f8 6365 int regalign = 0;
cd3ce9b4 6366
08b0dc1b
RH
6367 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6368 {
6369 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6370 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6371 }
6372
cd3ce9b4
JM
6373 if (DEFAULT_ABI != ABI_V4)
6374 {
08b0dc1b 6375 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6376 {
6377 tree elem_type = TREE_TYPE (type);
6378 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6379 int elem_size = GET_MODE_SIZE (elem_mode);
6380
6381 if (elem_size < UNITS_PER_WORD)
6382 {
23a60a04 6383 tree real_part, imag_part;
cd3ce9b4
JM
6384 tree post = NULL_TREE;
6385
23a60a04
JM
6386 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6387 &post);
6388 /* Copy the value into a temporary, lest the formal temporary
6389 be reused out from under us. */
6390 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
cd3ce9b4
JM
6391 append_to_statement_list (post, pre_p);
6392
23a60a04
JM
6393 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6394 post_p);
cd3ce9b4 6395
47a25a46 6396 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6397 }
6398 }
6399
23a60a04 6400 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6401 }
6402
6403 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6404 f_fpr = TREE_CHAIN (f_gpr);
6405 f_res = TREE_CHAIN (f_fpr);
6406 f_ovf = TREE_CHAIN (f_res);
6407 f_sav = TREE_CHAIN (f_ovf);
6408
872a65b5 6409 valist = build_va_arg_indirect_ref (valist);
47a25a46
RG
6410 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6411 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6412 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6413 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
cd3ce9b4
JM
6414
6415 size = int_size_in_bytes (type);
6416 rsize = (size + 3) / 4;
6417 align = 1;
6418
08b0dc1b 6419 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6420 && (TYPE_MODE (type) == SFmode
6421 || TYPE_MODE (type) == DFmode
7393f7f8
BE
6422 || TYPE_MODE (type) == TFmode
6423 || TYPE_MODE (type) == DDmode
6424 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6425 {
6426 /* FP args go in FP registers, if present. */
cd3ce9b4 6427 reg = fpr;
602ea4d3 6428 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6429 sav_ofs = 8*4;
6430 sav_scale = 8;
602ea4d3 6431 if (TYPE_MODE (type) != SFmode)
cd3ce9b4
JM
6432 align = 8;
6433 }
6434 else
6435 {
6436 /* Otherwise into GP registers. */
cd3ce9b4
JM
6437 reg = gpr;
6438 n_reg = rsize;
6439 sav_ofs = 0;
6440 sav_scale = 4;
6441 if (n_reg == 2)
6442 align = 8;
6443 }
6444
6445 /* Pull the value out of the saved registers.... */
6446
6447 lab_over = NULL;
6448 addr = create_tmp_var (ptr_type_node, "addr");
6449 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6450
6451 /* AltiVec vectors never go in registers when -mabi=altivec. */
6452 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6453 align = 16;
6454 else
6455 {
6456 lab_false = create_artificial_label ();
6457 lab_over = create_artificial_label ();
6458
6459 /* Long long and SPE vectors are aligned in the registers.
6460 As are any other 2 gpr item such as complex int due to a
6461 historical mistake. */
6462 u = reg;
602ea4d3 6463 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6464 {
7393f7f8 6465 regalign = 1;
cd3ce9b4 6466 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
8fb632eb 6467 build_int_cst (TREE_TYPE (reg), n_reg - 1));
cd3ce9b4
JM
6468 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
6469 }
7393f7f8
BE
6470 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6471 reg number is 0 for f1, so we want to make it odd. */
6472 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6473 {
6474 regalign = 1;
6475 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg, size_int (1));
6476 u = build2 (MODIFY_EXPR, void_type_node, reg, t);
6477 }
cd3ce9b4 6478
95674810 6479 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6480 t = build2 (GE_EXPR, boolean_type_node, u, t);
6481 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6482 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6483 gimplify_and_add (t, pre_p);
6484
6485 t = sav;
6486 if (sav_ofs)
5be014d5 6487 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6488
8fb632eb
ZD
6489 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
6490 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6491 u = fold_convert (sizetype, u);
6492 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
6493 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 6494
07beea0d 6495 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6496 gimplify_and_add (t, pre_p);
6497
6498 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6499 gimplify_and_add (t, pre_p);
6500
6501 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6502 append_to_statement_list (t, pre_p);
6503
7393f7f8 6504 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
6505 {
6506 /* Ensure that we don't find any more args in regs.
7393f7f8 6507 Alignment has taken care of for special cases. */
07beea0d 6508 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (reg), reg, size_int (8));
cd3ce9b4
JM
6509 gimplify_and_add (t, pre_p);
6510 }
6511 }
6512
6513 /* ... otherwise out of the overflow area. */
6514
6515 /* Care for on-stack alignment if needed. */
6516 t = ovf;
6517 if (align != 1)
6518 {
5be014d5
AP
6519 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
6520 t = fold_convert (sizetype, t);
4a90aeeb 6521 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
6522 size_int (-align));
6523 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
6524 }
6525 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6526
07beea0d 6527 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
cd3ce9b4
JM
6528 gimplify_and_add (u, pre_p);
6529
5be014d5 6530 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
07beea0d 6531 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
cd3ce9b4
JM
6532 gimplify_and_add (t, pre_p);
6533
6534 if (lab_over)
6535 {
6536 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6537 append_to_statement_list (t, pre_p);
6538 }
6539
0cfbc62b
JM
6540 if (STRICT_ALIGNMENT
6541 && (TYPE_ALIGN (type)
6542 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
6543 {
6544 /* The value (of type complex double, for example) may not be
6545 aligned in memory in the saved registers, so copy via a
6546 temporary. (This is the same code as used for SPARC.) */
6547 tree tmp = create_tmp_var (type, "va_arg_tmp");
6548 tree dest_addr = build_fold_addr_expr (tmp);
6549
5039610b
SL
6550 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
6551 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
6552
6553 gimplify_and_add (copy, pre_p);
6554 addr = dest_addr;
6555 }
6556
08b0dc1b 6557 addr = fold_convert (ptrtype, addr);
872a65b5 6558 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
6559}
6560
0ac081f6
AH
6561/* Builtins. */
6562
58646b77
PB
6563static void
6564def_builtin (int mask, const char *name, tree type, int code)
6565{
6566 if (mask & target_flags)
6567 {
6568 if (rs6000_builtin_decls[code])
6569 abort ();
6570
6571 rs6000_builtin_decls[code] =
c79efc4d
RÁE
6572 add_builtin_function (name, type, code, BUILT_IN_MD,
6573 NULL, NULL_TREE);
58646b77
PB
6574 }
6575}
0ac081f6 6576
24408032
AH
6577/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6578
2212663f 6579static const struct builtin_description bdesc_3arg[] =
24408032
AH
6580{
6581 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
6582 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
6583 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
6584 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
6585 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
6586 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
6587 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
6588 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
6589 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
6590 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 6591 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
6592 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
6593 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
6594 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
6595 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
6596 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
6597 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
6598 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
6599 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
6600 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
6601 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
6602 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
6603 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
6604
6605 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
6606 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
6607 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
6608 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
6609 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
6610 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
6611 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
6612 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
6613 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
6614 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
6615 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
6616 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
6617 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
6618 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
6619 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
24408032 6620};
2212663f 6621
95385cbb
AH
6622/* DST operations: void foo (void *, const int, const char). */
6623
6624static const struct builtin_description bdesc_dst[] =
6625{
6626 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
6627 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
6628 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
6629 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
6630
6631 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
6632 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
6633 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
6634 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
6635};
6636
2212663f 6637/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 6638
a3170dc6 6639static struct builtin_description bdesc_2arg[] =
0ac081f6 6640{
f18c054f
DB
6641 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
6642 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
6643 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
6644 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
6645 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
6646 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
6647 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
6648 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
6649 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
6650 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
6651 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 6652 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 6653 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
6654 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
6655 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
6656 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
6657 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
6658 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
6659 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
6660 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
6661 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
6662 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
6663 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
6664 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
6665 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
6666 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
6667 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
6668 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
6669 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
6670 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
6671 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
6672 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
6673 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
6674 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
6675 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
6676 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
6677 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
6678 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
6679 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
6680 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
6681 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
6682 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
6683 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
6684 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
6685 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
6686 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
6687 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
6688 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
6689 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
6690 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
6691 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
6692 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
6693 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
6694 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
6695 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
6696 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
6697 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
6698 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
6699 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
6700 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
6701 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
6702 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
6703 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
6704 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 6705 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 6706 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
6707 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
6708 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
6709 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 6710 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
6711 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
6712 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
6713 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
6714 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
6715 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
6716 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
6717 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
6718 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
6719 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
6720 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
6721 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
6722 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
6723 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
6724 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
6725 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
6726 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3e0de9d1
DP
6727 { MASK_ALTIVEC, CODE_FOR_lshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
6728 { MASK_ALTIVEC, CODE_FOR_lshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
6729 { MASK_ALTIVEC, CODE_FOR_lshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
6730 { MASK_ALTIVEC, CODE_FOR_ashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
6731 { MASK_ALTIVEC, CODE_FOR_ashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
6732 { MASK_ALTIVEC, CODE_FOR_ashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
6733 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
6734 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
6735 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
6736 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
6737 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
6738 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
6739 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
6740 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
6741 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
6742 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
6743 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
6744 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
6745 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
6746 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
6747 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
6748 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
6749 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
6750 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 6751 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 6752
58646b77
PB
6753 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
6754 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
6755 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
6756 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
6757 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
6758 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
6759 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
6760 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
6761 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
6762 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
6763 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
6764 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
6765 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
6766 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
6767 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
6768 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
6769 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
6770 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
6771 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
6772 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
6773 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
6774 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
6775 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
6776 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
6777 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
6778 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
6779 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
6780 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
6781 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
6782 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
6783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
6784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
6785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
6786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
6787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
6788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
6789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
6790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
6791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
6792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
6793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
6794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
6795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
6796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
6797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
6798 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
6799 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
6800 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
6801 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
6802 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
6803 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
6804 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
6805 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
6806 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
6807 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
6808 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
6809 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
6810 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
6811 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
6812 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
6813 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
6814 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
6815 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
6816 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
6817 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
6818 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
6819 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
6820 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
6821 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
6822 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
6823 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
6824 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
6825 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
6826 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
6827 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
6828 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
6829 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
6830 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
6831 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
6832 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
6833 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
6834 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
6835 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
6836 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
6837 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
6838 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
6839 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
6840 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
6841 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
6842 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
6843 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
6844 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
6845 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
6846 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
6847 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
6848 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
6849 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
6850 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
6851 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
6852 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
6853 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
6854 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
6855 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
6856 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
6857 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
6858 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
6859 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
6860 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
6861 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
6862 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
6863 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
6864 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
6865 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
6866 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
6867 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
6868 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
6869 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
6870 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
6871 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
6872 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
6873 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
6874 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
6875 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
6876 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
6877 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
6878 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
6879 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
6880
a3170dc6
AH
6881 /* Place holder, leave as first spe builtin. */
6882 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
6883 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
6884 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
6885 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
6886 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
6887 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
6888 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
6889 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
6890 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
6891 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
6892 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
6893 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
6894 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
6895 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
6896 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
6897 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
6898 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
6899 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
6900 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
6901 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
6902 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
6903 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
6904 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
6905 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
6906 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
6907 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
6908 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
6909 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
6910 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
6911 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
6912 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
6913 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
6914 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
6915 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
6916 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
6917 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
6918 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
6919 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
6920 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
6921 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
6922 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
6923 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
6924 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
6925 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
6926 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
6927 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
6928 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
6929 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
6930 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
6931 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
6932 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
6933 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
6934 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
6935 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
6936 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
6937 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
6938 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
6939 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
6940 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
6941 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
6942 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
6943 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
6944 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
6945 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
6946 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
6947 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
6948 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
6949 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
6950 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
6951 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
6952 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
6953 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
6954 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
6955 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
6956 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
6957 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
6958 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
6959 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
6960 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
6961 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
6962 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
6963 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
6964 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
6965 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
6966 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
6967 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
6968 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
6969 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
6970 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
6971 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
6972 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
6973 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
6974 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
6975 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
6976 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
6977 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
6978 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
6979 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
6980 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
6981 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
6982 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
6983 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
6984 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
6985 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
6986 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
6987 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
6988 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
6989 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
6990 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
6991
6992 /* SPE binary operations expecting a 5-bit unsigned literal. */
6993 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
6994
6995 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
6996 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
6997 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
6998 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
6999 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7000 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7001 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7002 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7003 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7004 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7005 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7006 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7007 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7008 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7009 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7010 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7011 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7012 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7013 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7014 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7015 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7016 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7017 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7018 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7019 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7020 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7021
7022 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7023 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7024};
7025
7026/* AltiVec predicates. */
7027
7028struct builtin_description_predicates
7029{
7030 const unsigned int mask;
7031 const enum insn_code icode;
7032 const char *opcode;
7033 const char *const name;
7034 const enum rs6000_builtins code;
7035};
7036
7037static const struct builtin_description_predicates bdesc_altivec_preds[] =
7038{
7039 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7040 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7041 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7042 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7043 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7044 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7045 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7046 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7047 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7048 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7049 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7050 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7051 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7052
7053 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7054 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7055 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7056};
24408032 7057
a3170dc6
AH
7058/* SPE predicates. */
7059static struct builtin_description bdesc_spe_predicates[] =
7060{
7061 /* Place-holder. Leave as first. */
7062 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7063 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7064 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7065 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7066 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7067 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7068 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7069 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7070 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7071 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7072 /* Place-holder. Leave as last. */
7073 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7074};
7075
7076/* SPE evsel predicates. */
7077static struct builtin_description bdesc_spe_evsel[] =
7078{
7079 /* Place-holder. Leave as first. */
7080 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7081 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7082 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7083 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7084 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7085 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7086 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7087 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7088 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7089 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7090 /* Place-holder. Leave as last. */
7091 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7092};
7093
b6d08ca1 7094/* ABS* operations. */
100c4561
AH
7095
7096static const struct builtin_description bdesc_abs[] =
7097{
7098 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7099 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7100 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7101 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7102 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7103 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7104 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7105};
7106
617e0e1d
DB
7107/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7108 foo (VECa). */
24408032 7109
a3170dc6 7110static struct builtin_description bdesc_1arg[] =
2212663f 7111{
617e0e1d
DB
7112 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7113 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7114 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7115 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7116 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7117 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7118 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7119 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7120 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7121 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7122 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7123 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7124 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7125 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7126 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7127 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7128 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7129
58646b77
PB
7130 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7131 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7132 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7133 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7134 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7135 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7136 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7137 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7138 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7139 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7140 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7141 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7142 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7143 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7144 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7145 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7146 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7147 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7148 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7149
a3170dc6
AH
7150 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7151 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7152 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7153 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7154 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7155 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7156 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7157 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7158 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7159 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7160 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7161 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7162 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7163 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7164 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7165 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7166 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7167 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7168 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7169 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7170 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7171 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7172 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7173 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7174 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7175 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7176 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7177 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7178 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7179 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7180
7181 /* Place-holder. Leave as last unary SPE builtin. */
58646b77 7182 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW }
2212663f
DB
7183};
7184
7185static rtx
5039610b 7186rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7187{
7188 rtx pat;
5039610b 7189 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7190 rtx op0 = expand_normal (arg0);
2212663f
DB
7191 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7192 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7193
0559cc77
DE
7194 if (icode == CODE_FOR_nothing)
7195 /* Builtin not supported on this processor. */
7196 return 0;
7197
20e26713
AH
7198 /* If we got invalid arguments bail out before generating bad rtl. */
7199 if (arg0 == error_mark_node)
9a171fcd 7200 return const0_rtx;
20e26713 7201
0559cc77
DE
7202 if (icode == CODE_FOR_altivec_vspltisb
7203 || icode == CODE_FOR_altivec_vspltish
7204 || icode == CODE_FOR_altivec_vspltisw
7205 || icode == CODE_FOR_spe_evsplatfi
7206 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7207 {
7208 /* Only allow 5-bit *signed* literals. */
b44140e7 7209 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7210 || INTVAL (op0) > 15
7211 || INTVAL (op0) < -16)
b44140e7
AH
7212 {
7213 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7214 return const0_rtx;
b44140e7 7215 }
b44140e7
AH
7216 }
7217
c62f2db5 7218 if (target == 0
2212663f
DB
7219 || GET_MODE (target) != tmode
7220 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7221 target = gen_reg_rtx (tmode);
7222
7223 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7224 op0 = copy_to_mode_reg (mode0, op0);
7225
7226 pat = GEN_FCN (icode) (target, op0);
7227 if (! pat)
7228 return 0;
7229 emit_insn (pat);
0ac081f6 7230
2212663f
DB
7231 return target;
7232}
ae4b4a02 7233
100c4561 7234static rtx
5039610b 7235altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7236{
7237 rtx pat, scratch1, scratch2;
5039610b 7238 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7239 rtx op0 = expand_normal (arg0);
100c4561
AH
7240 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7241 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7242
7243 /* If we have invalid arguments, bail out before generating bad rtl. */
7244 if (arg0 == error_mark_node)
9a171fcd 7245 return const0_rtx;
100c4561
AH
7246
7247 if (target == 0
7248 || GET_MODE (target) != tmode
7249 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7250 target = gen_reg_rtx (tmode);
7251
7252 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7253 op0 = copy_to_mode_reg (mode0, op0);
7254
7255 scratch1 = gen_reg_rtx (mode0);
7256 scratch2 = gen_reg_rtx (mode0);
7257
7258 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7259 if (! pat)
7260 return 0;
7261 emit_insn (pat);
7262
7263 return target;
7264}
7265
0ac081f6 7266static rtx
5039610b 7267rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7268{
7269 rtx pat;
5039610b
SL
7270 tree arg0 = CALL_EXPR_ARG (exp, 0);
7271 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7272 rtx op0 = expand_normal (arg0);
7273 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7274 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7275 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7276 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7277
0559cc77
DE
7278 if (icode == CODE_FOR_nothing)
7279 /* Builtin not supported on this processor. */
7280 return 0;
7281
20e26713
AH
7282 /* If we got invalid arguments bail out before generating bad rtl. */
7283 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7284 return const0_rtx;
20e26713 7285
0559cc77
DE
7286 if (icode == CODE_FOR_altivec_vcfux
7287 || icode == CODE_FOR_altivec_vcfsx
7288 || icode == CODE_FOR_altivec_vctsxs
7289 || icode == CODE_FOR_altivec_vctuxs
7290 || icode == CODE_FOR_altivec_vspltb
7291 || icode == CODE_FOR_altivec_vsplth
7292 || icode == CODE_FOR_altivec_vspltw
7293 || icode == CODE_FOR_spe_evaddiw
7294 || icode == CODE_FOR_spe_evldd
7295 || icode == CODE_FOR_spe_evldh
7296 || icode == CODE_FOR_spe_evldw
7297 || icode == CODE_FOR_spe_evlhhesplat
7298 || icode == CODE_FOR_spe_evlhhossplat
7299 || icode == CODE_FOR_spe_evlhhousplat
7300 || icode == CODE_FOR_spe_evlwhe
7301 || icode == CODE_FOR_spe_evlwhos
7302 || icode == CODE_FOR_spe_evlwhou
7303 || icode == CODE_FOR_spe_evlwhsplat
7304 || icode == CODE_FOR_spe_evlwwsplat
7305 || icode == CODE_FOR_spe_evrlwi
7306 || icode == CODE_FOR_spe_evslwi
7307 || icode == CODE_FOR_spe_evsrwis
f5119d10 7308 || icode == CODE_FOR_spe_evsubifw
0559cc77 7309 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7310 {
7311 /* Only allow 5-bit unsigned literals. */
8bb418a3 7312 STRIP_NOPS (arg1);
b44140e7
AH
7313 if (TREE_CODE (arg1) != INTEGER_CST
7314 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7315 {
7316 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7317 return const0_rtx;
b44140e7 7318 }
b44140e7
AH
7319 }
7320
c62f2db5 7321 if (target == 0
0ac081f6
AH
7322 || GET_MODE (target) != tmode
7323 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7324 target = gen_reg_rtx (tmode);
7325
7326 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7327 op0 = copy_to_mode_reg (mode0, op0);
7328 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7329 op1 = copy_to_mode_reg (mode1, op1);
7330
7331 pat = GEN_FCN (icode) (target, op0, op1);
7332 if (! pat)
7333 return 0;
7334 emit_insn (pat);
7335
7336 return target;
7337}
6525c0e7 7338
ae4b4a02 7339static rtx
f676971a 7340altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7341 tree exp, rtx target)
ae4b4a02
AH
7342{
7343 rtx pat, scratch;
5039610b
SL
7344 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7345 tree arg0 = CALL_EXPR_ARG (exp, 1);
7346 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7347 rtx op0 = expand_normal (arg0);
7348 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7349 enum machine_mode tmode = SImode;
7350 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7351 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7352 int cr6_form_int;
7353
7354 if (TREE_CODE (cr6_form) != INTEGER_CST)
7355 {
7356 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7357 return const0_rtx;
ae4b4a02
AH
7358 }
7359 else
7360 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7361
37409796 7362 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7363
7364 /* If we have invalid arguments, bail out before generating bad rtl. */
7365 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7366 return const0_rtx;
ae4b4a02
AH
7367
7368 if (target == 0
7369 || GET_MODE (target) != tmode
7370 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7371 target = gen_reg_rtx (tmode);
7372
7373 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7374 op0 = copy_to_mode_reg (mode0, op0);
7375 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7376 op1 = copy_to_mode_reg (mode1, op1);
7377
7378 scratch = gen_reg_rtx (mode0);
7379
7380 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7381 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7382 if (! pat)
7383 return 0;
7384 emit_insn (pat);
7385
7386 /* The vec_any* and vec_all* predicates use the same opcodes for two
7387 different operations, but the bits in CR6 will be different
7388 depending on what information we want. So we have to play tricks
7389 with CR6 to get the right bits out.
7390
7391 If you think this is disgusting, look at the specs for the
7392 AltiVec predicates. */
7393
c4ad648e
AM
7394 switch (cr6_form_int)
7395 {
7396 case 0:
7397 emit_insn (gen_cr6_test_for_zero (target));
7398 break;
7399 case 1:
7400 emit_insn (gen_cr6_test_for_zero_reverse (target));
7401 break;
7402 case 2:
7403 emit_insn (gen_cr6_test_for_lt (target));
7404 break;
7405 case 3:
7406 emit_insn (gen_cr6_test_for_lt_reverse (target));
7407 break;
7408 default:
7409 error ("argument 1 of __builtin_altivec_predicate is out of range");
7410 break;
7411 }
ae4b4a02
AH
7412
7413 return target;
7414}
7415
b4a62fa0 7416static rtx
5039610b 7417altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
7418{
7419 rtx pat, addr;
5039610b
SL
7420 tree arg0 = CALL_EXPR_ARG (exp, 0);
7421 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
7422 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7423 enum machine_mode mode0 = Pmode;
7424 enum machine_mode mode1 = Pmode;
84217346
MD
7425 rtx op0 = expand_normal (arg0);
7426 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
7427
7428 if (icode == CODE_FOR_nothing)
7429 /* Builtin not supported on this processor. */
7430 return 0;
7431
7432 /* If we got invalid arguments bail out before generating bad rtl. */
7433 if (arg0 == error_mark_node || arg1 == error_mark_node)
7434 return const0_rtx;
7435
7436 if (target == 0
7437 || GET_MODE (target) != tmode
7438 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7439 target = gen_reg_rtx (tmode);
7440
f676971a 7441 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
7442
7443 if (op0 == const0_rtx)
7444 {
7445 addr = gen_rtx_MEM (tmode, op1);
7446 }
7447 else
7448 {
7449 op0 = copy_to_mode_reg (mode0, op0);
7450 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7451 }
7452
7453 pat = GEN_FCN (icode) (target, addr);
7454
7455 if (! pat)
7456 return 0;
7457 emit_insn (pat);
7458
7459 return target;
7460}
7461
61bea3b0 7462static rtx
5039610b 7463spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 7464{
5039610b
SL
7465 tree arg0 = CALL_EXPR_ARG (exp, 0);
7466 tree arg1 = CALL_EXPR_ARG (exp, 1);
7467 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7468 rtx op0 = expand_normal (arg0);
7469 rtx op1 = expand_normal (arg1);
7470 rtx op2 = expand_normal (arg2);
61bea3b0
AH
7471 rtx pat;
7472 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
7473 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
7474 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
7475
7476 /* Invalid arguments. Bail before doing anything stoopid! */
7477 if (arg0 == error_mark_node
7478 || arg1 == error_mark_node
7479 || arg2 == error_mark_node)
7480 return const0_rtx;
7481
7482 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
7483 op0 = copy_to_mode_reg (mode2, op0);
7484 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
7485 op1 = copy_to_mode_reg (mode0, op1);
7486 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7487 op2 = copy_to_mode_reg (mode1, op2);
7488
7489 pat = GEN_FCN (icode) (op1, op2, op0);
7490 if (pat)
7491 emit_insn (pat);
7492 return NULL_RTX;
7493}
7494
6525c0e7 7495static rtx
5039610b 7496altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 7497{
5039610b
SL
7498 tree arg0 = CALL_EXPR_ARG (exp, 0);
7499 tree arg1 = CALL_EXPR_ARG (exp, 1);
7500 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7501 rtx op0 = expand_normal (arg0);
7502 rtx op1 = expand_normal (arg1);
7503 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
7504 rtx pat, addr;
7505 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7506 enum machine_mode mode1 = Pmode;
7507 enum machine_mode mode2 = Pmode;
6525c0e7
AH
7508
7509 /* Invalid arguments. Bail before doing anything stoopid! */
7510 if (arg0 == error_mark_node
7511 || arg1 == error_mark_node
7512 || arg2 == error_mark_node)
9a171fcd 7513 return const0_rtx;
6525c0e7 7514
b4a62fa0
SB
7515 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
7516 op0 = copy_to_mode_reg (tmode, op0);
7517
f676971a 7518 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
7519
7520 if (op1 == const0_rtx)
7521 {
7522 addr = gen_rtx_MEM (tmode, op2);
7523 }
7524 else
7525 {
7526 op1 = copy_to_mode_reg (mode1, op1);
7527 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
7528 }
6525c0e7 7529
b4a62fa0 7530 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
7531 if (pat)
7532 emit_insn (pat);
7533 return NULL_RTX;
7534}
7535
2212663f 7536static rtx
5039610b 7537rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7538{
7539 rtx pat;
5039610b
SL
7540 tree arg0 = CALL_EXPR_ARG (exp, 0);
7541 tree arg1 = CALL_EXPR_ARG (exp, 1);
7542 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7543 rtx op0 = expand_normal (arg0);
7544 rtx op1 = expand_normal (arg1);
7545 rtx op2 = expand_normal (arg2);
2212663f
DB
7546 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7547 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7548 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7549 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 7550
774b5662
DE
7551 if (icode == CODE_FOR_nothing)
7552 /* Builtin not supported on this processor. */
7553 return 0;
7554
20e26713
AH
7555 /* If we got invalid arguments bail out before generating bad rtl. */
7556 if (arg0 == error_mark_node
7557 || arg1 == error_mark_node
7558 || arg2 == error_mark_node)
9a171fcd 7559 return const0_rtx;
20e26713 7560
aba5fb01
NS
7561 if (icode == CODE_FOR_altivec_vsldoi_v4sf
7562 || icode == CODE_FOR_altivec_vsldoi_v4si
7563 || icode == CODE_FOR_altivec_vsldoi_v8hi
7564 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
7565 {
7566 /* Only allow 4-bit unsigned literals. */
8bb418a3 7567 STRIP_NOPS (arg2);
b44140e7
AH
7568 if (TREE_CODE (arg2) != INTEGER_CST
7569 || TREE_INT_CST_LOW (arg2) & ~0xf)
7570 {
7571 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 7572 return const0_rtx;
b44140e7 7573 }
b44140e7
AH
7574 }
7575
c62f2db5 7576 if (target == 0
2212663f
DB
7577 || GET_MODE (target) != tmode
7578 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7579 target = gen_reg_rtx (tmode);
7580
7581 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7582 op0 = copy_to_mode_reg (mode0, op0);
7583 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7584 op1 = copy_to_mode_reg (mode1, op1);
7585 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
7586 op2 = copy_to_mode_reg (mode2, op2);
7587
7588 pat = GEN_FCN (icode) (target, op0, op1, op2);
7589 if (! pat)
7590 return 0;
7591 emit_insn (pat);
7592
7593 return target;
7594}
92898235 7595
3a9b8c7e 7596/* Expand the lvx builtins. */
0ac081f6 7597static rtx
a2369ed3 7598altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 7599{
5039610b 7600 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 7601 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
7602 tree arg0;
7603 enum machine_mode tmode, mode0;
7c3abc73 7604 rtx pat, op0;
3a9b8c7e 7605 enum insn_code icode;
92898235 7606
0ac081f6
AH
7607 switch (fcode)
7608 {
f18c054f 7609 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 7610 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 7611 break;
f18c054f 7612 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 7613 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
7614 break;
7615 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 7616 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
7617 break;
7618 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 7619 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
7620 break;
7621 default:
7622 *expandedp = false;
7623 return NULL_RTX;
7624 }
0ac081f6 7625
3a9b8c7e 7626 *expandedp = true;
f18c054f 7627
5039610b 7628 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7629 op0 = expand_normal (arg0);
3a9b8c7e
AH
7630 tmode = insn_data[icode].operand[0].mode;
7631 mode0 = insn_data[icode].operand[1].mode;
f18c054f 7632
3a9b8c7e
AH
7633 if (target == 0
7634 || GET_MODE (target) != tmode
7635 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7636 target = gen_reg_rtx (tmode);
24408032 7637
3a9b8c7e
AH
7638 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7639 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 7640
3a9b8c7e
AH
7641 pat = GEN_FCN (icode) (target, op0);
7642 if (! pat)
7643 return 0;
7644 emit_insn (pat);
7645 return target;
7646}
f18c054f 7647
3a9b8c7e
AH
7648/* Expand the stvx builtins. */
7649static rtx
f676971a 7650altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7651 bool *expandedp)
3a9b8c7e 7652{
5039610b 7653 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7654 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7655 tree arg0, arg1;
7656 enum machine_mode mode0, mode1;
7c3abc73 7657 rtx pat, op0, op1;
3a9b8c7e 7658 enum insn_code icode;
f18c054f 7659
3a9b8c7e
AH
7660 switch (fcode)
7661 {
7662 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 7663 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
7664 break;
7665 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 7666 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
7667 break;
7668 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 7669 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
7670 break;
7671 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 7672 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
7673 break;
7674 default:
7675 *expandedp = false;
7676 return NULL_RTX;
7677 }
24408032 7678
5039610b
SL
7679 arg0 = CALL_EXPR_ARG (exp, 0);
7680 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7681 op0 = expand_normal (arg0);
7682 op1 = expand_normal (arg1);
3a9b8c7e
AH
7683 mode0 = insn_data[icode].operand[0].mode;
7684 mode1 = insn_data[icode].operand[1].mode;
f18c054f 7685
3a9b8c7e
AH
7686 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7687 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
7688 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7689 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 7690
3a9b8c7e
AH
7691 pat = GEN_FCN (icode) (op0, op1);
7692 if (pat)
7693 emit_insn (pat);
f18c054f 7694
3a9b8c7e
AH
7695 *expandedp = true;
7696 return NULL_RTX;
7697}
f18c054f 7698
3a9b8c7e
AH
7699/* Expand the dst builtins. */
7700static rtx
f676971a 7701altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 7702 bool *expandedp)
3a9b8c7e 7703{
5039610b 7704 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
7705 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7706 tree arg0, arg1, arg2;
7707 enum machine_mode mode0, mode1, mode2;
7c3abc73 7708 rtx pat, op0, op1, op2;
3a9b8c7e 7709 struct builtin_description *d;
a3170dc6 7710 size_t i;
f18c054f 7711
3a9b8c7e 7712 *expandedp = false;
f18c054f 7713
3a9b8c7e
AH
7714 /* Handle DST variants. */
7715 d = (struct builtin_description *) bdesc_dst;
7716 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7717 if (d->code == fcode)
7718 {
5039610b
SL
7719 arg0 = CALL_EXPR_ARG (exp, 0);
7720 arg1 = CALL_EXPR_ARG (exp, 1);
7721 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7722 op0 = expand_normal (arg0);
7723 op1 = expand_normal (arg1);
7724 op2 = expand_normal (arg2);
3a9b8c7e
AH
7725 mode0 = insn_data[d->icode].operand[0].mode;
7726 mode1 = insn_data[d->icode].operand[1].mode;
7727 mode2 = insn_data[d->icode].operand[2].mode;
24408032 7728
3a9b8c7e
AH
7729 /* Invalid arguments, bail out before generating bad rtl. */
7730 if (arg0 == error_mark_node
7731 || arg1 == error_mark_node
7732 || arg2 == error_mark_node)
7733 return const0_rtx;
f18c054f 7734
86e7df90 7735 *expandedp = true;
8bb418a3 7736 STRIP_NOPS (arg2);
3a9b8c7e
AH
7737 if (TREE_CODE (arg2) != INTEGER_CST
7738 || TREE_INT_CST_LOW (arg2) & ~0x3)
7739 {
9e637a26 7740 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
7741 return const0_rtx;
7742 }
f18c054f 7743
3a9b8c7e 7744 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 7745 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
7746 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
7747 op1 = copy_to_mode_reg (mode1, op1);
24408032 7748
3a9b8c7e
AH
7749 pat = GEN_FCN (d->icode) (op0, op1, op2);
7750 if (pat != 0)
7751 emit_insn (pat);
f18c054f 7752
3a9b8c7e
AH
7753 return NULL_RTX;
7754 }
f18c054f 7755
3a9b8c7e
AH
7756 return NULL_RTX;
7757}
24408032 7758
7a4eca66
DE
7759/* Expand vec_init builtin. */
7760static rtx
5039610b 7761altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
7762{
7763 enum machine_mode tmode = TYPE_MODE (type);
7764 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
7765 int i, n_elt = GET_MODE_NUNITS (tmode);
7766 rtvec v = rtvec_alloc (n_elt);
7767
7768 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 7769 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 7770
5039610b 7771 for (i = 0; i < n_elt; ++i)
7a4eca66 7772 {
5039610b 7773 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
7774 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
7775 }
7776
7a4eca66
DE
7777 if (!target || !register_operand (target, tmode))
7778 target = gen_reg_rtx (tmode);
7779
7780 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
7781 return target;
7782}
7783
7784/* Return the integer constant in ARG. Constrain it to be in the range
7785 of the subparts of VEC_TYPE; issue an error if not. */
7786
7787static int
7788get_element_number (tree vec_type, tree arg)
7789{
7790 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
7791
7792 if (!host_integerp (arg, 1)
7793 || (elt = tree_low_cst (arg, 1), elt > max))
7794 {
7795 error ("selector must be an integer constant in the range 0..%wi", max);
7796 return 0;
7797 }
7798
7799 return elt;
7800}
7801
7802/* Expand vec_set builtin. */
7803static rtx
5039610b 7804altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
7805{
7806 enum machine_mode tmode, mode1;
7807 tree arg0, arg1, arg2;
7808 int elt;
7809 rtx op0, op1;
7810
5039610b
SL
7811 arg0 = CALL_EXPR_ARG (exp, 0);
7812 arg1 = CALL_EXPR_ARG (exp, 1);
7813 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
7814
7815 tmode = TYPE_MODE (TREE_TYPE (arg0));
7816 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7817 gcc_assert (VECTOR_MODE_P (tmode));
7818
7819 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
7820 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
7821 elt = get_element_number (TREE_TYPE (arg0), arg2);
7822
7823 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
7824 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
7825
7826 op0 = force_reg (tmode, op0);
7827 op1 = force_reg (mode1, op1);
7828
7829 rs6000_expand_vector_set (op0, op1, elt);
7830
7831 return op0;
7832}
7833
7834/* Expand vec_ext builtin. */
7835static rtx
5039610b 7836altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
7837{
7838 enum machine_mode tmode, mode0;
7839 tree arg0, arg1;
7840 int elt;
7841 rtx op0;
7842
5039610b
SL
7843 arg0 = CALL_EXPR_ARG (exp, 0);
7844 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 7845
84217346 7846 op0 = expand_normal (arg0);
7a4eca66
DE
7847 elt = get_element_number (TREE_TYPE (arg0), arg1);
7848
7849 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
7850 mode0 = TYPE_MODE (TREE_TYPE (arg0));
7851 gcc_assert (VECTOR_MODE_P (mode0));
7852
7853 op0 = force_reg (mode0, op0);
7854
7855 if (optimize || !target || !register_operand (target, tmode))
7856 target = gen_reg_rtx (tmode);
7857
7858 rs6000_expand_vector_extract (target, op0, elt);
7859
7860 return target;
7861}
7862
3a9b8c7e
AH
7863/* Expand the builtin in EXP and store the result in TARGET. Store
7864 true in *EXPANDEDP if we found a builtin to expand. */
7865static rtx
a2369ed3 7866altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e
AH
7867{
7868 struct builtin_description *d;
7869 struct builtin_description_predicates *dp;
7870 size_t i;
7871 enum insn_code icode;
5039610b 7872 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
7873 tree arg0;
7874 rtx op0, pat;
7875 enum machine_mode tmode, mode0;
3a9b8c7e 7876 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 7877
58646b77
PB
7878 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7879 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
7880 {
7881 *expandedp = true;
ea40ba9c 7882 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
7883 return const0_rtx;
7884 }
7885
3a9b8c7e
AH
7886 target = altivec_expand_ld_builtin (exp, target, expandedp);
7887 if (*expandedp)
7888 return target;
0ac081f6 7889
3a9b8c7e
AH
7890 target = altivec_expand_st_builtin (exp, target, expandedp);
7891 if (*expandedp)
7892 return target;
7893
7894 target = altivec_expand_dst_builtin (exp, target, expandedp);
7895 if (*expandedp)
7896 return target;
7897
7898 *expandedp = true;
95385cbb 7899
3a9b8c7e
AH
7900 switch (fcode)
7901 {
6525c0e7 7902 case ALTIVEC_BUILTIN_STVX:
5039610b 7903 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 7904 case ALTIVEC_BUILTIN_STVEBX:
5039610b 7905 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 7906 case ALTIVEC_BUILTIN_STVEHX:
5039610b 7907 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 7908 case ALTIVEC_BUILTIN_STVEWX:
5039610b 7909 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 7910 case ALTIVEC_BUILTIN_STVXL:
5039610b 7911 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 7912
95385cbb
AH
7913 case ALTIVEC_BUILTIN_MFVSCR:
7914 icode = CODE_FOR_altivec_mfvscr;
7915 tmode = insn_data[icode].operand[0].mode;
7916
7917 if (target == 0
7918 || GET_MODE (target) != tmode
7919 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7920 target = gen_reg_rtx (tmode);
f676971a 7921
95385cbb 7922 pat = GEN_FCN (icode) (target);
0ac081f6
AH
7923 if (! pat)
7924 return 0;
7925 emit_insn (pat);
95385cbb
AH
7926 return target;
7927
7928 case ALTIVEC_BUILTIN_MTVSCR:
7929 icode = CODE_FOR_altivec_mtvscr;
5039610b 7930 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7931 op0 = expand_normal (arg0);
95385cbb
AH
7932 mode0 = insn_data[icode].operand[0].mode;
7933
7934 /* If we got invalid arguments bail out before generating bad rtl. */
7935 if (arg0 == error_mark_node)
9a171fcd 7936 return const0_rtx;
95385cbb
AH
7937
7938 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7939 op0 = copy_to_mode_reg (mode0, op0);
7940
7941 pat = GEN_FCN (icode) (op0);
7942 if (pat)
7943 emit_insn (pat);
7944 return NULL_RTX;
3a9b8c7e 7945
95385cbb
AH
7946 case ALTIVEC_BUILTIN_DSSALL:
7947 emit_insn (gen_altivec_dssall ());
7948 return NULL_RTX;
7949
7950 case ALTIVEC_BUILTIN_DSS:
7951 icode = CODE_FOR_altivec_dss;
5039610b 7952 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 7953 STRIP_NOPS (arg0);
84217346 7954 op0 = expand_normal (arg0);
95385cbb
AH
7955 mode0 = insn_data[icode].operand[0].mode;
7956
7957 /* If we got invalid arguments bail out before generating bad rtl. */
7958 if (arg0 == error_mark_node)
9a171fcd 7959 return const0_rtx;
95385cbb 7960
b44140e7
AH
7961 if (TREE_CODE (arg0) != INTEGER_CST
7962 || TREE_INT_CST_LOW (arg0) & ~0x3)
7963 {
7964 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 7965 return const0_rtx;
b44140e7
AH
7966 }
7967
95385cbb
AH
7968 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
7969 op0 = copy_to_mode_reg (mode0, op0);
7970
7971 emit_insn (gen_altivec_dss (op0));
0ac081f6 7972 return NULL_RTX;
7a4eca66
DE
7973
7974 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
7975 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
7976 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
7977 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 7978 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
7979
7980 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
7981 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
7982 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
7983 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 7984 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
7985
7986 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
7987 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
7988 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
7989 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 7990 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
7991
7992 default:
7993 break;
7994 /* Fall through. */
0ac081f6 7995 }
24408032 7996
100c4561
AH
7997 /* Expand abs* operations. */
7998 d = (struct builtin_description *) bdesc_abs;
ca7558fc 7999 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8000 if (d->code == fcode)
5039610b 8001 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8002
ae4b4a02
AH
8003 /* Expand the AltiVec predicates. */
8004 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
ca7558fc 8005 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8006 if (dp->code == fcode)
c4ad648e 8007 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8008 exp, target);
ae4b4a02 8009
6525c0e7
AH
8010 /* LV* are funky. We initialized them differently. */
8011 switch (fcode)
8012 {
8013 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8014 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8015 exp, target);
6525c0e7 8016 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8017 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8018 exp, target);
6525c0e7 8019 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8020 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8021 exp, target);
6525c0e7 8022 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8023 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8024 exp, target);
6525c0e7 8025 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8026 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8027 exp, target);
6525c0e7 8028 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8029 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8030 exp, target);
6525c0e7 8031 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8032 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8033 exp, target);
6525c0e7
AH
8034 default:
8035 break;
8036 /* Fall through. */
8037 }
95385cbb 8038
92898235 8039 *expandedp = false;
0ac081f6
AH
8040 return NULL_RTX;
8041}
8042
a3170dc6
AH
8043/* Binops that need to be initialized manually, but can be expanded
8044 automagically by rs6000_expand_binop_builtin. */
8045static struct builtin_description bdesc_2arg_spe[] =
8046{
8047 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8048 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8049 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8050 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8051 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8052 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8053 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8054 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8055 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8056 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8057 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8058 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8059 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8060 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8061 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8062 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8063 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8064 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8065 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8066 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8067 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8068 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8069};
8070
8071/* Expand the builtin in EXP and store the result in TARGET. Store
8072 true in *EXPANDEDP if we found a builtin to expand.
8073
8074 This expands the SPE builtins that are not simple unary and binary
8075 operations. */
8076static rtx
a2369ed3 8077spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8078{
5039610b 8079 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8080 tree arg1, arg0;
8081 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8082 enum insn_code icode;
8083 enum machine_mode tmode, mode0;
8084 rtx pat, op0;
8085 struct builtin_description *d;
8086 size_t i;
8087
8088 *expandedp = true;
8089
8090 /* Syntax check for a 5-bit unsigned immediate. */
8091 switch (fcode)
8092 {
8093 case SPE_BUILTIN_EVSTDD:
8094 case SPE_BUILTIN_EVSTDH:
8095 case SPE_BUILTIN_EVSTDW:
8096 case SPE_BUILTIN_EVSTWHE:
8097 case SPE_BUILTIN_EVSTWHO:
8098 case SPE_BUILTIN_EVSTWWE:
8099 case SPE_BUILTIN_EVSTWWO:
5039610b 8100 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8101 if (TREE_CODE (arg1) != INTEGER_CST
8102 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8103 {
8104 error ("argument 2 must be a 5-bit unsigned literal");
8105 return const0_rtx;
8106 }
8107 break;
8108 default:
8109 break;
8110 }
8111
00332c9f
AH
8112 /* The evsplat*i instructions are not quite generic. */
8113 switch (fcode)
8114 {
8115 case SPE_BUILTIN_EVSPLATFI:
8116 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8117 exp, target);
00332c9f
AH
8118 case SPE_BUILTIN_EVSPLATI:
8119 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8120 exp, target);
00332c9f
AH
8121 default:
8122 break;
8123 }
8124
a3170dc6
AH
8125 d = (struct builtin_description *) bdesc_2arg_spe;
8126 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8127 if (d->code == fcode)
5039610b 8128 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8129
8130 d = (struct builtin_description *) bdesc_spe_predicates;
8131 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8132 if (d->code == fcode)
5039610b 8133 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8134
8135 d = (struct builtin_description *) bdesc_spe_evsel;
8136 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8137 if (d->code == fcode)
5039610b 8138 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8139
8140 switch (fcode)
8141 {
8142 case SPE_BUILTIN_EVSTDDX:
5039610b 8143 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8144 case SPE_BUILTIN_EVSTDHX:
5039610b 8145 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8146 case SPE_BUILTIN_EVSTDWX:
5039610b 8147 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8148 case SPE_BUILTIN_EVSTWHEX:
5039610b 8149 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8150 case SPE_BUILTIN_EVSTWHOX:
5039610b 8151 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8152 case SPE_BUILTIN_EVSTWWEX:
5039610b 8153 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8154 case SPE_BUILTIN_EVSTWWOX:
5039610b 8155 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8156 case SPE_BUILTIN_EVSTDD:
5039610b 8157 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8158 case SPE_BUILTIN_EVSTDH:
5039610b 8159 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8160 case SPE_BUILTIN_EVSTDW:
5039610b 8161 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8162 case SPE_BUILTIN_EVSTWHE:
5039610b 8163 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8164 case SPE_BUILTIN_EVSTWHO:
5039610b 8165 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8166 case SPE_BUILTIN_EVSTWWE:
5039610b 8167 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8168 case SPE_BUILTIN_EVSTWWO:
5039610b 8169 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8170 case SPE_BUILTIN_MFSPEFSCR:
8171 icode = CODE_FOR_spe_mfspefscr;
8172 tmode = insn_data[icode].operand[0].mode;
8173
8174 if (target == 0
8175 || GET_MODE (target) != tmode
8176 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8177 target = gen_reg_rtx (tmode);
f676971a 8178
a3170dc6
AH
8179 pat = GEN_FCN (icode) (target);
8180 if (! pat)
8181 return 0;
8182 emit_insn (pat);
8183 return target;
8184 case SPE_BUILTIN_MTSPEFSCR:
8185 icode = CODE_FOR_spe_mtspefscr;
5039610b 8186 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8187 op0 = expand_normal (arg0);
a3170dc6
AH
8188 mode0 = insn_data[icode].operand[0].mode;
8189
8190 if (arg0 == error_mark_node)
8191 return const0_rtx;
8192
8193 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8194 op0 = copy_to_mode_reg (mode0, op0);
8195
8196 pat = GEN_FCN (icode) (op0);
8197 if (pat)
8198 emit_insn (pat);
8199 return NULL_RTX;
8200 default:
8201 break;
8202 }
8203
8204 *expandedp = false;
8205 return NULL_RTX;
8206}
8207
8208static rtx
5039610b 8209spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8210{
8211 rtx pat, scratch, tmp;
5039610b
SL
8212 tree form = CALL_EXPR_ARG (exp, 0);
8213 tree arg0 = CALL_EXPR_ARG (exp, 1);
8214 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8215 rtx op0 = expand_normal (arg0);
8216 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8217 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8218 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8219 int form_int;
8220 enum rtx_code code;
8221
8222 if (TREE_CODE (form) != INTEGER_CST)
8223 {
8224 error ("argument 1 of __builtin_spe_predicate must be a constant");
8225 return const0_rtx;
8226 }
8227 else
8228 form_int = TREE_INT_CST_LOW (form);
8229
37409796 8230 gcc_assert (mode0 == mode1);
a3170dc6
AH
8231
8232 if (arg0 == error_mark_node || arg1 == error_mark_node)
8233 return const0_rtx;
8234
8235 if (target == 0
8236 || GET_MODE (target) != SImode
8237 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8238 target = gen_reg_rtx (SImode);
8239
8240 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8241 op0 = copy_to_mode_reg (mode0, op0);
8242 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8243 op1 = copy_to_mode_reg (mode1, op1);
8244
8245 scratch = gen_reg_rtx (CCmode);
8246
8247 pat = GEN_FCN (icode) (scratch, op0, op1);
8248 if (! pat)
8249 return const0_rtx;
8250 emit_insn (pat);
8251
8252 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8253 _lower_. We use one compare, but look in different bits of the
8254 CR for each variant.
8255
8256 There are 2 elements in each SPE simd type (upper/lower). The CR
8257 bits are set as follows:
8258
8259 BIT0 | BIT 1 | BIT 2 | BIT 3
8260 U | L | (U | L) | (U & L)
8261
8262 So, for an "all" relationship, BIT 3 would be set.
8263 For an "any" relationship, BIT 2 would be set. Etc.
8264
8265 Following traditional nomenclature, these bits map to:
8266
8267 BIT0 | BIT 1 | BIT 2 | BIT 3
8268 LT | GT | EQ | OV
8269
8270 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8271 */
8272
8273 switch (form_int)
8274 {
8275 /* All variant. OV bit. */
8276 case 0:
8277 /* We need to get to the OV bit, which is the ORDERED bit. We
8278 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 8279 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
8280 So let's just use another pattern. */
8281 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8282 return target;
8283 /* Any variant. EQ bit. */
8284 case 1:
8285 code = EQ;
8286 break;
8287 /* Upper variant. LT bit. */
8288 case 2:
8289 code = LT;
8290 break;
8291 /* Lower variant. GT bit. */
8292 case 3:
8293 code = GT;
8294 break;
8295 default:
8296 error ("argument 1 of __builtin_spe_predicate is out of range");
8297 return const0_rtx;
8298 }
8299
8300 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8301 emit_move_insn (target, tmp);
8302
8303 return target;
8304}
8305
8306/* The evsel builtins look like this:
8307
8308 e = __builtin_spe_evsel_OP (a, b, c, d);
8309
8310 and work like this:
8311
8312 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8313 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8314*/
8315
8316static rtx
5039610b 8317spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8318{
8319 rtx pat, scratch;
5039610b
SL
8320 tree arg0 = CALL_EXPR_ARG (exp, 0);
8321 tree arg1 = CALL_EXPR_ARG (exp, 1);
8322 tree arg2 = CALL_EXPR_ARG (exp, 2);
8323 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
8324 rtx op0 = expand_normal (arg0);
8325 rtx op1 = expand_normal (arg1);
8326 rtx op2 = expand_normal (arg2);
8327 rtx op3 = expand_normal (arg3);
a3170dc6
AH
8328 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8329 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8330
37409796 8331 gcc_assert (mode0 == mode1);
a3170dc6
AH
8332
8333 if (arg0 == error_mark_node || arg1 == error_mark_node
8334 || arg2 == error_mark_node || arg3 == error_mark_node)
8335 return const0_rtx;
8336
8337 if (target == 0
8338 || GET_MODE (target) != mode0
8339 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
8340 target = gen_reg_rtx (mode0);
8341
8342 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8343 op0 = copy_to_mode_reg (mode0, op0);
8344 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8345 op1 = copy_to_mode_reg (mode0, op1);
8346 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8347 op2 = copy_to_mode_reg (mode0, op2);
8348 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
8349 op3 = copy_to_mode_reg (mode0, op3);
8350
8351 /* Generate the compare. */
8352 scratch = gen_reg_rtx (CCmode);
8353 pat = GEN_FCN (icode) (scratch, op0, op1);
8354 if (! pat)
8355 return const0_rtx;
8356 emit_insn (pat);
8357
8358 if (mode0 == V2SImode)
8359 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
8360 else
8361 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
8362
8363 return target;
8364}
8365
0ac081f6
AH
8366/* Expand an expression EXP that calls a built-in function,
8367 with result going to TARGET if that's convenient
8368 (and in mode MODE if that's convenient).
8369 SUBTARGET may be used as the target for computing one of EXP's operands.
8370 IGNORE is nonzero if the value is to be ignored. */
8371
8372static rtx
a2369ed3 8373rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
8374 enum machine_mode mode ATTRIBUTE_UNUSED,
8375 int ignore ATTRIBUTE_UNUSED)
0ac081f6 8376{
5039610b 8377 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235
AH
8378 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8379 struct builtin_description *d;
8380 size_t i;
8381 rtx ret;
8382 bool success;
f676971a 8383
7ccf35ed
DN
8384 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
8385 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8386 {
8387 int icode = (int) CODE_FOR_altivec_lvsr;
8388 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8389 enum machine_mode mode = insn_data[icode].operand[1].mode;
8390 tree arg;
8391 rtx op, addr, pat;
8392
37409796 8393 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 8394
5039610b 8395 arg = CALL_EXPR_ARG (exp, 0);
37409796 8396 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
8397 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
8398 addr = memory_address (mode, op);
8399 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
8400 op = addr;
8401 else
8402 {
8403 /* For the load case need to negate the address. */
8404 op = gen_reg_rtx (GET_MODE (addr));
8405 emit_insn (gen_rtx_SET (VOIDmode, op,
8406 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 8407 }
7ccf35ed
DN
8408 op = gen_rtx_MEM (mode, op);
8409
8410 if (target == 0
8411 || GET_MODE (target) != tmode
8412 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8413 target = gen_reg_rtx (tmode);
8414
8415 /*pat = gen_altivec_lvsr (target, op);*/
8416 pat = GEN_FCN (icode) (target, op);
8417 if (!pat)
8418 return 0;
8419 emit_insn (pat);
8420
8421 return target;
8422 }
5039610b
SL
8423
8424 /* FIXME: There's got to be a nicer way to handle this case than
8425 constructing a new CALL_EXPR. */
f57d17f1
TM
8426 if (fcode == ALTIVEC_BUILTIN_VCFUX
8427 || fcode == ALTIVEC_BUILTIN_VCFSX)
8428 {
5039610b
SL
8429 if (call_expr_nargs (exp) == 1)
8430 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
8431 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 8432 }
7ccf35ed 8433
0ac081f6 8434 if (TARGET_ALTIVEC)
92898235
AH
8435 {
8436 ret = altivec_expand_builtin (exp, target, &success);
8437
a3170dc6
AH
8438 if (success)
8439 return ret;
8440 }
8441 if (TARGET_SPE)
8442 {
8443 ret = spe_expand_builtin (exp, target, &success);
8444
92898235
AH
8445 if (success)
8446 return ret;
8447 }
8448
37409796 8449 gcc_assert (TARGET_ALTIVEC || TARGET_SPE);
bb8df8a6 8450
37409796
NS
8451 /* Handle simple unary operations. */
8452 d = (struct builtin_description *) bdesc_1arg;
8453 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8454 if (d->code == fcode)
5039610b 8455 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 8456
37409796
NS
8457 /* Handle simple binary operations. */
8458 d = (struct builtin_description *) bdesc_2arg;
8459 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8460 if (d->code == fcode)
5039610b 8461 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 8462
37409796
NS
8463 /* Handle simple ternary operations. */
8464 d = (struct builtin_description *) bdesc_3arg;
8465 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
8466 if (d->code == fcode)
5039610b 8467 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 8468
37409796 8469 gcc_unreachable ();
0ac081f6
AH
8470}
8471
7c62e993
PB
8472static tree
8473build_opaque_vector_type (tree node, int nunits)
8474{
8475 node = copy_node (node);
8476 TYPE_MAIN_VARIANT (node) = node;
8477 return build_vector_type (node, nunits);
8478}
8479
0ac081f6 8480static void
863d938c 8481rs6000_init_builtins (void)
0ac081f6 8482{
4a5eab38
PB
8483 V2SI_type_node = build_vector_type (intSI_type_node, 2);
8484 V2SF_type_node = build_vector_type (float_type_node, 2);
8485 V4HI_type_node = build_vector_type (intHI_type_node, 4);
8486 V4SI_type_node = build_vector_type (intSI_type_node, 4);
8487 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 8488 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
8489 V16QI_type_node = build_vector_type (intQI_type_node, 16);
8490
8491 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
8492 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
8493 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
8494
7c62e993
PB
8495 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
8496 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 8497 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 8498 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 8499
8bb418a3
ZL
8500 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8501 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8502 'vector unsigned short'. */
8503
8dd16ecc
NS
8504 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
8505 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8506 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
8507 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 8508
58646b77
PB
8509 long_integer_type_internal_node = long_integer_type_node;
8510 long_unsigned_type_internal_node = long_unsigned_type_node;
8511 intQI_type_internal_node = intQI_type_node;
8512 uintQI_type_internal_node = unsigned_intQI_type_node;
8513 intHI_type_internal_node = intHI_type_node;
8514 uintHI_type_internal_node = unsigned_intHI_type_node;
8515 intSI_type_internal_node = intSI_type_node;
8516 uintSI_type_internal_node = unsigned_intSI_type_node;
8517 float_type_internal_node = float_type_node;
8518 void_type_internal_node = void_type_node;
8519
8bb418a3
ZL
8520 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8521 get_identifier ("__bool char"),
8522 bool_char_type_node));
8523 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8524 get_identifier ("__bool short"),
8525 bool_short_type_node));
8526 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8527 get_identifier ("__bool int"),
8528 bool_int_type_node));
8529 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8530 get_identifier ("__pixel"),
8531 pixel_type_node));
8532
4a5eab38
PB
8533 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
8534 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
8535 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
8536 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
8537
8538 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8539 get_identifier ("__vector unsigned char"),
8540 unsigned_V16QI_type_node));
8541 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8542 get_identifier ("__vector signed char"),
8543 V16QI_type_node));
8544 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8545 get_identifier ("__vector __bool char"),
8546 bool_V16QI_type_node));
8547
8548 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8549 get_identifier ("__vector unsigned short"),
8550 unsigned_V8HI_type_node));
8551 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8552 get_identifier ("__vector signed short"),
8553 V8HI_type_node));
8554 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8555 get_identifier ("__vector __bool short"),
8556 bool_V8HI_type_node));
8557
8558 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8559 get_identifier ("__vector unsigned int"),
8560 unsigned_V4SI_type_node));
8561 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8562 get_identifier ("__vector signed int"),
8563 V4SI_type_node));
8564 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8565 get_identifier ("__vector __bool int"),
8566 bool_V4SI_type_node));
8567
8568 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8569 get_identifier ("__vector float"),
8570 V4SF_type_node));
8571 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
8572 get_identifier ("__vector __pixel"),
8573 pixel_V8HI_type_node));
8574
a3170dc6 8575 if (TARGET_SPE)
3fdaa45a 8576 spe_init_builtins ();
0ac081f6
AH
8577 if (TARGET_ALTIVEC)
8578 altivec_init_builtins ();
0559cc77
DE
8579 if (TARGET_ALTIVEC || TARGET_SPE)
8580 rs6000_common_init_builtins ();
69ca3549
DE
8581
8582#if TARGET_XCOFF
8583 /* AIX libm provides clog as __clog. */
8584 if (built_in_decls [BUILT_IN_CLOG])
8585 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
8586#endif
0ac081f6
AH
8587}
8588
a3170dc6
AH
8589/* Search through a set of builtins and enable the mask bits.
8590 DESC is an array of builtins.
b6d08ca1 8591 SIZE is the total number of builtins.
a3170dc6
AH
8592 START is the builtin enum at which to start.
8593 END is the builtin enum at which to end. */
0ac081f6 8594static void
a2369ed3 8595enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 8596 enum rs6000_builtins start,
a2369ed3 8597 enum rs6000_builtins end)
a3170dc6
AH
8598{
8599 int i;
8600
8601 for (i = 0; i < size; ++i)
8602 if (desc[i].code == start)
8603 break;
8604
8605 if (i == size)
8606 return;
8607
8608 for (; i < size; ++i)
8609 {
8610 /* Flip all the bits on. */
8611 desc[i].mask = target_flags;
8612 if (desc[i].code == end)
8613 break;
8614 }
8615}
8616
8617static void
863d938c 8618spe_init_builtins (void)
0ac081f6 8619{
a3170dc6
AH
8620 tree endlink = void_list_node;
8621 tree puint_type_node = build_pointer_type (unsigned_type_node);
8622 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 8623 struct builtin_description *d;
0ac081f6
AH
8624 size_t i;
8625
a3170dc6
AH
8626 tree v2si_ftype_4_v2si
8627 = build_function_type
3fdaa45a
AH
8628 (opaque_V2SI_type_node,
8629 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8630 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8631 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8632 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8633 endlink)))));
8634
8635 tree v2sf_ftype_4_v2sf
8636 = build_function_type
3fdaa45a
AH
8637 (opaque_V2SF_type_node,
8638 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8639 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8640 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8641 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8642 endlink)))));
8643
8644 tree int_ftype_int_v2si_v2si
8645 = build_function_type
8646 (integer_type_node,
8647 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8648 tree_cons (NULL_TREE, opaque_V2SI_type_node,
8649 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8650 endlink))));
8651
8652 tree int_ftype_int_v2sf_v2sf
8653 = build_function_type
8654 (integer_type_node,
8655 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
8656 tree_cons (NULL_TREE, opaque_V2SF_type_node,
8657 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
8658 endlink))));
8659
8660 tree void_ftype_v2si_puint_int
8661 = build_function_type (void_type_node,
3fdaa45a 8662 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8663 tree_cons (NULL_TREE, puint_type_node,
8664 tree_cons (NULL_TREE,
8665 integer_type_node,
8666 endlink))));
8667
8668 tree void_ftype_v2si_puint_char
8669 = build_function_type (void_type_node,
3fdaa45a 8670 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
8671 tree_cons (NULL_TREE, puint_type_node,
8672 tree_cons (NULL_TREE,
8673 char_type_node,
8674 endlink))));
8675
8676 tree void_ftype_v2si_pv2si_int
8677 = build_function_type (void_type_node,
3fdaa45a 8678 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8679 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8680 tree_cons (NULL_TREE,
8681 integer_type_node,
8682 endlink))));
8683
8684 tree void_ftype_v2si_pv2si_char
8685 = build_function_type (void_type_node,
3fdaa45a 8686 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 8687 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8688 tree_cons (NULL_TREE,
8689 char_type_node,
8690 endlink))));
8691
8692 tree void_ftype_int
8693 = build_function_type (void_type_node,
8694 tree_cons (NULL_TREE, integer_type_node, endlink));
8695
8696 tree int_ftype_void
36e8d515 8697 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
8698
8699 tree v2si_ftype_pv2si_int
3fdaa45a 8700 = build_function_type (opaque_V2SI_type_node,
6035d635 8701 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
8702 tree_cons (NULL_TREE, integer_type_node,
8703 endlink)));
8704
8705 tree v2si_ftype_puint_int
3fdaa45a 8706 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8707 tree_cons (NULL_TREE, puint_type_node,
8708 tree_cons (NULL_TREE, integer_type_node,
8709 endlink)));
8710
8711 tree v2si_ftype_pushort_int
3fdaa45a 8712 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
8713 tree_cons (NULL_TREE, pushort_type_node,
8714 tree_cons (NULL_TREE, integer_type_node,
8715 endlink)));
8716
00332c9f
AH
8717 tree v2si_ftype_signed_char
8718 = build_function_type (opaque_V2SI_type_node,
8719 tree_cons (NULL_TREE, signed_char_type_node,
8720 endlink));
8721
a3170dc6
AH
8722 /* The initialization of the simple binary and unary builtins is
8723 done in rs6000_common_init_builtins, but we have to enable the
8724 mask bits here manually because we have run out of `target_flags'
8725 bits. We really need to redesign this mask business. */
8726
8727 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
8728 ARRAY_SIZE (bdesc_2arg),
8729 SPE_BUILTIN_EVADDW,
8730 SPE_BUILTIN_EVXOR);
8731 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
8732 ARRAY_SIZE (bdesc_1arg),
8733 SPE_BUILTIN_EVABS,
8734 SPE_BUILTIN_EVSUBFUSIAAW);
8735 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
8736 ARRAY_SIZE (bdesc_spe_predicates),
8737 SPE_BUILTIN_EVCMPEQ,
8738 SPE_BUILTIN_EVFSTSTLT);
8739 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
8740 ARRAY_SIZE (bdesc_spe_evsel),
8741 SPE_BUILTIN_EVSEL_CMPGTS,
8742 SPE_BUILTIN_EVSEL_FSTSTEQ);
8743
36252949
AH
8744 (*lang_hooks.decls.pushdecl)
8745 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
8746 opaque_V2SI_type_node));
8747
a3170dc6 8748 /* Initialize irregular SPE builtins. */
f676971a 8749
a3170dc6
AH
8750 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
8751 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
8752 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
8753 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
8754 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
8755 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
8756 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
8757 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
8758 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
8759 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
8760 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
8761 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
8762 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
8763 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
8764 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
8765 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
8766 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
8767 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
8768
8769 /* Loads. */
8770 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
8771 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
8772 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
8773 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
8774 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
8775 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
8776 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
8777 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
8778 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
8779 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
8780 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
8781 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
8782 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
8783 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
8784 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
8785 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
8786 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
8787 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
8788 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
8789 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
8790 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
8791 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
8792
8793 /* Predicates. */
8794 d = (struct builtin_description *) bdesc_spe_predicates;
8795 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
8796 {
8797 tree type;
8798
8799 switch (insn_data[d->icode].operand[1].mode)
8800 {
8801 case V2SImode:
8802 type = int_ftype_int_v2si_v2si;
8803 break;
8804 case V2SFmode:
8805 type = int_ftype_int_v2sf_v2sf;
8806 break;
8807 default:
37409796 8808 gcc_unreachable ();
a3170dc6
AH
8809 }
8810
8811 def_builtin (d->mask, d->name, type, d->code);
8812 }
8813
8814 /* Evsel predicates. */
8815 d = (struct builtin_description *) bdesc_spe_evsel;
8816 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
8817 {
8818 tree type;
8819
8820 switch (insn_data[d->icode].operand[1].mode)
8821 {
8822 case V2SImode:
8823 type = v2si_ftype_4_v2si;
8824 break;
8825 case V2SFmode:
8826 type = v2sf_ftype_4_v2sf;
8827 break;
8828 default:
37409796 8829 gcc_unreachable ();
a3170dc6
AH
8830 }
8831
8832 def_builtin (d->mask, d->name, type, d->code);
8833 }
8834}
8835
8836static void
863d938c 8837altivec_init_builtins (void)
a3170dc6
AH
8838{
8839 struct builtin_description *d;
8840 struct builtin_description_predicates *dp;
8841 size_t i;
7a4eca66
DE
8842 tree ftype;
8843
a3170dc6
AH
8844 tree pfloat_type_node = build_pointer_type (float_type_node);
8845 tree pint_type_node = build_pointer_type (integer_type_node);
8846 tree pshort_type_node = build_pointer_type (short_integer_type_node);
8847 tree pchar_type_node = build_pointer_type (char_type_node);
8848
8849 tree pvoid_type_node = build_pointer_type (void_type_node);
8850
0dbc3651
ZW
8851 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
8852 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
8853 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
8854 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
8855
8856 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
8857
58646b77
PB
8858 tree int_ftype_opaque
8859 = build_function_type_list (integer_type_node,
8860 opaque_V4SI_type_node, NULL_TREE);
8861
8862 tree opaque_ftype_opaque_int
8863 = build_function_type_list (opaque_V4SI_type_node,
8864 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
8865 tree opaque_ftype_opaque_opaque_int
8866 = build_function_type_list (opaque_V4SI_type_node,
8867 opaque_V4SI_type_node, opaque_V4SI_type_node,
8868 integer_type_node, NULL_TREE);
8869 tree int_ftype_int_opaque_opaque
8870 = build_function_type_list (integer_type_node,
8871 integer_type_node, opaque_V4SI_type_node,
8872 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
8873 tree int_ftype_int_v4si_v4si
8874 = build_function_type_list (integer_type_node,
8875 integer_type_node, V4SI_type_node,
8876 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8877 tree v4sf_ftype_pcfloat
8878 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 8879 tree void_ftype_pfloat_v4sf
b4de2f7d 8880 = build_function_type_list (void_type_node,
a3170dc6 8881 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
8882 tree v4si_ftype_pcint
8883 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
8884 tree void_ftype_pint_v4si
b4de2f7d
AH
8885 = build_function_type_list (void_type_node,
8886 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
8887 tree v8hi_ftype_pcshort
8888 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 8889 tree void_ftype_pshort_v8hi
b4de2f7d
AH
8890 = build_function_type_list (void_type_node,
8891 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
8892 tree v16qi_ftype_pcchar
8893 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 8894 tree void_ftype_pchar_v16qi
b4de2f7d
AH
8895 = build_function_type_list (void_type_node,
8896 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 8897 tree void_ftype_v4si
b4de2f7d 8898 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
8899 tree v8hi_ftype_void
8900 = build_function_type (V8HI_type_node, void_list_node);
8901 tree void_ftype_void
8902 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
8903 tree void_ftype_int
8904 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 8905
58646b77
PB
8906 tree opaque_ftype_long_pcvoid
8907 = build_function_type_list (opaque_V4SI_type_node,
8908 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 8909 tree v16qi_ftype_long_pcvoid
a3170dc6 8910 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
8911 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8912 tree v8hi_ftype_long_pcvoid
a3170dc6 8913 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
8914 long_integer_type_node, pcvoid_type_node, NULL_TREE);
8915 tree v4si_ftype_long_pcvoid
a3170dc6 8916 = build_function_type_list (V4SI_type_node,
b4a62fa0 8917 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 8918
58646b77
PB
8919 tree void_ftype_opaque_long_pvoid
8920 = build_function_type_list (void_type_node,
8921 opaque_V4SI_type_node, long_integer_type_node,
8922 pvoid_type_node, NULL_TREE);
b4a62fa0 8923 tree void_ftype_v4si_long_pvoid
b4de2f7d 8924 = build_function_type_list (void_type_node,
b4a62fa0 8925 V4SI_type_node, long_integer_type_node,
b4de2f7d 8926 pvoid_type_node, NULL_TREE);
b4a62fa0 8927 tree void_ftype_v16qi_long_pvoid
b4de2f7d 8928 = build_function_type_list (void_type_node,
b4a62fa0 8929 V16QI_type_node, long_integer_type_node,
b4de2f7d 8930 pvoid_type_node, NULL_TREE);
b4a62fa0 8931 tree void_ftype_v8hi_long_pvoid
b4de2f7d 8932 = build_function_type_list (void_type_node,
b4a62fa0 8933 V8HI_type_node, long_integer_type_node,
b4de2f7d 8934 pvoid_type_node, NULL_TREE);
a3170dc6
AH
8935 tree int_ftype_int_v8hi_v8hi
8936 = build_function_type_list (integer_type_node,
8937 integer_type_node, V8HI_type_node,
8938 V8HI_type_node, NULL_TREE);
8939 tree int_ftype_int_v16qi_v16qi
8940 = build_function_type_list (integer_type_node,
8941 integer_type_node, V16QI_type_node,
8942 V16QI_type_node, NULL_TREE);
8943 tree int_ftype_int_v4sf_v4sf
8944 = build_function_type_list (integer_type_node,
8945 integer_type_node, V4SF_type_node,
8946 V4SF_type_node, NULL_TREE);
8947 tree v4si_ftype_v4si
8948 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
8949 tree v8hi_ftype_v8hi
8950 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
8951 tree v16qi_ftype_v16qi
8952 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
8953 tree v4sf_ftype_v4sf
8954 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 8955 tree void_ftype_pcvoid_int_int
a3170dc6 8956 = build_function_type_list (void_type_node,
0dbc3651 8957 pcvoid_type_node, integer_type_node,
8bb418a3 8958 integer_type_node, NULL_TREE);
8bb418a3 8959
0dbc3651
ZW
8960 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
8961 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
8962 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
8963 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
8964 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
8965 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
8966 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
8967 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
8968 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
8969 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
8970 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
8971 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
8972 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
8973 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
8974 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
8975 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
8976 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
8977 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
8978 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 8979 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
8980 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
8981 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
8982 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
8983 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
8984 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
8985 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
8986 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
8987 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
8988 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
8989 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
8990 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
8991 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
8992 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
8993 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
8994 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
8995 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
8996 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
8997 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
8998 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
8999 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9000 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9001 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9002 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9003 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9004 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9005 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9006
9007 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9008
9009 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9010 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9011 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9012 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9013 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9014 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9015 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9016 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9017 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9018 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9019
a3170dc6
AH
9020 /* Add the DST variants. */
9021 d = (struct builtin_description *) bdesc_dst;
9022 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9023 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9024
9025 /* Initialize the predicates. */
9026 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
9027 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9028 {
9029 enum machine_mode mode1;
9030 tree type;
58646b77
PB
9031 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9032 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9033
58646b77
PB
9034 if (is_overloaded)
9035 mode1 = VOIDmode;
9036 else
9037 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9038
9039 switch (mode1)
9040 {
58646b77
PB
9041 case VOIDmode:
9042 type = int_ftype_int_opaque_opaque;
9043 break;
a3170dc6
AH
9044 case V4SImode:
9045 type = int_ftype_int_v4si_v4si;
9046 break;
9047 case V8HImode:
9048 type = int_ftype_int_v8hi_v8hi;
9049 break;
9050 case V16QImode:
9051 type = int_ftype_int_v16qi_v16qi;
9052 break;
9053 case V4SFmode:
9054 type = int_ftype_int_v4sf_v4sf;
9055 break;
9056 default:
37409796 9057 gcc_unreachable ();
a3170dc6 9058 }
f676971a 9059
a3170dc6
AH
9060 def_builtin (dp->mask, dp->name, type, dp->code);
9061 }
9062
9063 /* Initialize the abs* operators. */
9064 d = (struct builtin_description *) bdesc_abs;
9065 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9066 {
9067 enum machine_mode mode0;
9068 tree type;
9069
9070 mode0 = insn_data[d->icode].operand[0].mode;
9071
9072 switch (mode0)
9073 {
9074 case V4SImode:
9075 type = v4si_ftype_v4si;
9076 break;
9077 case V8HImode:
9078 type = v8hi_ftype_v8hi;
9079 break;
9080 case V16QImode:
9081 type = v16qi_ftype_v16qi;
9082 break;
9083 case V4SFmode:
9084 type = v4sf_ftype_v4sf;
9085 break;
9086 default:
37409796 9087 gcc_unreachable ();
a3170dc6 9088 }
f676971a 9089
a3170dc6
AH
9090 def_builtin (d->mask, d->name, type, d->code);
9091 }
7ccf35ed 9092
13c62176
DN
9093 if (TARGET_ALTIVEC)
9094 {
9095 tree decl;
9096
9097 /* Initialize target builtin that implements
9098 targetm.vectorize.builtin_mask_for_load. */
9099
c79efc4d
RÁE
9100 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9101 v16qi_ftype_long_pcvoid,
9102 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9103 BUILT_IN_MD, NULL, NULL_TREE);
9104 TREE_READONLY (decl) = 1;
13c62176
DN
9105 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9106 altivec_builtin_mask_for_load = decl;
13c62176 9107 }
7a4eca66
DE
9108
9109 /* Access to the vec_init patterns. */
9110 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9111 integer_type_node, integer_type_node,
9112 integer_type_node, NULL_TREE);
9113 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9114 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9115
9116 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9117 short_integer_type_node,
9118 short_integer_type_node,
9119 short_integer_type_node,
9120 short_integer_type_node,
9121 short_integer_type_node,
9122 short_integer_type_node,
9123 short_integer_type_node, NULL_TREE);
9124 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9125 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9126
9127 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9128 char_type_node, char_type_node,
9129 char_type_node, char_type_node,
9130 char_type_node, char_type_node,
9131 char_type_node, char_type_node,
9132 char_type_node, char_type_node,
9133 char_type_node, char_type_node,
9134 char_type_node, char_type_node,
9135 char_type_node, NULL_TREE);
9136 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9137 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9138
9139 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9140 float_type_node, float_type_node,
9141 float_type_node, NULL_TREE);
9142 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9143 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9144
9145 /* Access to the vec_set patterns. */
9146 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9147 intSI_type_node,
9148 integer_type_node, NULL_TREE);
9149 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9150 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9151
9152 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
9153 intHI_type_node,
9154 integer_type_node, NULL_TREE);
9155 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
9156 ALTIVEC_BUILTIN_VEC_SET_V8HI);
9157
9158 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
9159 intQI_type_node,
9160 integer_type_node, NULL_TREE);
9161 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
9162 ALTIVEC_BUILTIN_VEC_SET_V16QI);
9163
9164 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
9165 float_type_node,
9166 integer_type_node, NULL_TREE);
9167 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
9168 ALTIVEC_BUILTIN_VEC_SET_V4SF);
9169
9170 /* Access to the vec_extract patterns. */
9171 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
9172 integer_type_node, NULL_TREE);
9173 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
9174 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
9175
9176 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
9177 integer_type_node, NULL_TREE);
9178 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
9179 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
9180
9181 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
9182 integer_type_node, NULL_TREE);
9183 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
9184 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
9185
9186 ftype = build_function_type_list (float_type_node, V4SF_type_node,
9187 integer_type_node, NULL_TREE);
9188 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
9189 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
9190}
9191
9192static void
863d938c 9193rs6000_common_init_builtins (void)
a3170dc6
AH
9194{
9195 struct builtin_description *d;
9196 size_t i;
9197
9198 tree v4sf_ftype_v4sf_v4sf_v16qi
9199 = build_function_type_list (V4SF_type_node,
9200 V4SF_type_node, V4SF_type_node,
9201 V16QI_type_node, NULL_TREE);
9202 tree v4si_ftype_v4si_v4si_v16qi
9203 = build_function_type_list (V4SI_type_node,
9204 V4SI_type_node, V4SI_type_node,
9205 V16QI_type_node, NULL_TREE);
9206 tree v8hi_ftype_v8hi_v8hi_v16qi
9207 = build_function_type_list (V8HI_type_node,
9208 V8HI_type_node, V8HI_type_node,
9209 V16QI_type_node, NULL_TREE);
9210 tree v16qi_ftype_v16qi_v16qi_v16qi
9211 = build_function_type_list (V16QI_type_node,
9212 V16QI_type_node, V16QI_type_node,
9213 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
9214 tree v4si_ftype_int
9215 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
9216 tree v8hi_ftype_int
9217 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
9218 tree v16qi_ftype_int
9219 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
9220 tree v8hi_ftype_v16qi
9221 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
9222 tree v4sf_ftype_v4sf
9223 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9224
9225 tree v2si_ftype_v2si_v2si
2abe3e28
AH
9226 = build_function_type_list (opaque_V2SI_type_node,
9227 opaque_V2SI_type_node,
9228 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9229
9230 tree v2sf_ftype_v2sf_v2sf
2abe3e28
AH
9231 = build_function_type_list (opaque_V2SF_type_node,
9232 opaque_V2SF_type_node,
9233 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9234
9235 tree v2si_ftype_int_int
2abe3e28 9236 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9237 integer_type_node, integer_type_node,
9238 NULL_TREE);
9239
58646b77
PB
9240 tree opaque_ftype_opaque
9241 = build_function_type_list (opaque_V4SI_type_node,
9242 opaque_V4SI_type_node, NULL_TREE);
9243
a3170dc6 9244 tree v2si_ftype_v2si
2abe3e28
AH
9245 = build_function_type_list (opaque_V2SI_type_node,
9246 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9247
9248 tree v2sf_ftype_v2sf
2abe3e28
AH
9249 = build_function_type_list (opaque_V2SF_type_node,
9250 opaque_V2SF_type_node, NULL_TREE);
f676971a 9251
a3170dc6 9252 tree v2sf_ftype_v2si
2abe3e28
AH
9253 = build_function_type_list (opaque_V2SF_type_node,
9254 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
9255
9256 tree v2si_ftype_v2sf
2abe3e28
AH
9257 = build_function_type_list (opaque_V2SI_type_node,
9258 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
9259
9260 tree v2si_ftype_v2si_char
2abe3e28
AH
9261 = build_function_type_list (opaque_V2SI_type_node,
9262 opaque_V2SI_type_node,
9263 char_type_node, NULL_TREE);
a3170dc6
AH
9264
9265 tree v2si_ftype_int_char
2abe3e28 9266 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
9267 integer_type_node, char_type_node, NULL_TREE);
9268
9269 tree v2si_ftype_char
2abe3e28
AH
9270 = build_function_type_list (opaque_V2SI_type_node,
9271 char_type_node, NULL_TREE);
a3170dc6
AH
9272
9273 tree int_ftype_int_int
9274 = build_function_type_list (integer_type_node,
9275 integer_type_node, integer_type_node,
9276 NULL_TREE);
95385cbb 9277
58646b77
PB
9278 tree opaque_ftype_opaque_opaque
9279 = build_function_type_list (opaque_V4SI_type_node,
9280 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 9281 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
9282 = build_function_type_list (V4SI_type_node,
9283 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 9284 tree v4sf_ftype_v4si_int
b4de2f7d 9285 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
9286 V4SI_type_node, integer_type_node, NULL_TREE);
9287 tree v4si_ftype_v4sf_int
b4de2f7d 9288 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9289 V4SF_type_node, integer_type_node, NULL_TREE);
9290 tree v4si_ftype_v4si_int
b4de2f7d 9291 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
9292 V4SI_type_node, integer_type_node, NULL_TREE);
9293 tree v8hi_ftype_v8hi_int
b4de2f7d 9294 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
9295 V8HI_type_node, integer_type_node, NULL_TREE);
9296 tree v16qi_ftype_v16qi_int
b4de2f7d 9297 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
9298 V16QI_type_node, integer_type_node, NULL_TREE);
9299 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
9300 = build_function_type_list (V16QI_type_node,
9301 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
9302 integer_type_node, NULL_TREE);
9303 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
9304 = build_function_type_list (V8HI_type_node,
9305 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
9306 integer_type_node, NULL_TREE);
9307 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
9308 = build_function_type_list (V4SI_type_node,
9309 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
9310 integer_type_node, NULL_TREE);
9311 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
9312 = build_function_type_list (V4SF_type_node,
9313 V4SF_type_node, V4SF_type_node,
b9e4e5d1 9314 integer_type_node, NULL_TREE);
0ac081f6 9315 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
9316 = build_function_type_list (V4SF_type_node,
9317 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
9318 tree opaque_ftype_opaque_opaque_opaque
9319 = build_function_type_list (opaque_V4SI_type_node,
9320 opaque_V4SI_type_node, opaque_V4SI_type_node,
9321 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 9322 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
9323 = build_function_type_list (V4SF_type_node,
9324 V4SF_type_node, V4SF_type_node,
9325 V4SI_type_node, NULL_TREE);
2212663f 9326 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
9327 = build_function_type_list (V4SF_type_node,
9328 V4SF_type_node, V4SF_type_node,
9329 V4SF_type_node, NULL_TREE);
f676971a 9330 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
9331 = build_function_type_list (V4SI_type_node,
9332 V4SI_type_node, V4SI_type_node,
9333 V4SI_type_node, NULL_TREE);
0ac081f6 9334 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
9335 = build_function_type_list (V8HI_type_node,
9336 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 9337 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
9338 = build_function_type_list (V8HI_type_node,
9339 V8HI_type_node, V8HI_type_node,
9340 V8HI_type_node, NULL_TREE);
c4ad648e 9341 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
9342 = build_function_type_list (V4SI_type_node,
9343 V8HI_type_node, V8HI_type_node,
9344 V4SI_type_node, NULL_TREE);
c4ad648e 9345 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
9346 = build_function_type_list (V4SI_type_node,
9347 V16QI_type_node, V16QI_type_node,
9348 V4SI_type_node, NULL_TREE);
0ac081f6 9349 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
9350 = build_function_type_list (V16QI_type_node,
9351 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9352 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
9353 = build_function_type_list (V4SI_type_node,
9354 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 9355 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
9356 = build_function_type_list (V8HI_type_node,
9357 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9358 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
9359 = build_function_type_list (V4SI_type_node,
9360 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9361 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
9362 = build_function_type_list (V8HI_type_node,
9363 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 9364 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
9365 = build_function_type_list (V16QI_type_node,
9366 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9367 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
9368 = build_function_type_list (V4SI_type_node,
9369 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 9370 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
9371 = build_function_type_list (V4SI_type_node,
9372 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9373 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
9374 = build_function_type_list (V4SI_type_node,
9375 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9376 tree v4si_ftype_v8hi
9377 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
9378 tree int_ftype_v4si_v4si
9379 = build_function_type_list (integer_type_node,
9380 V4SI_type_node, V4SI_type_node, NULL_TREE);
9381 tree int_ftype_v4sf_v4sf
9382 = build_function_type_list (integer_type_node,
9383 V4SF_type_node, V4SF_type_node, NULL_TREE);
9384 tree int_ftype_v16qi_v16qi
9385 = build_function_type_list (integer_type_node,
9386 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 9387 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
9388 = build_function_type_list (integer_type_node,
9389 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 9390
6f317ef3 9391 /* Add the simple ternary operators. */
2212663f 9392 d = (struct builtin_description *) bdesc_3arg;
ca7558fc 9393 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 9394 {
2212663f
DB
9395 enum machine_mode mode0, mode1, mode2, mode3;
9396 tree type;
58646b77
PB
9397 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9398 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 9399
58646b77
PB
9400 if (is_overloaded)
9401 {
9402 mode0 = VOIDmode;
9403 mode1 = VOIDmode;
9404 mode2 = VOIDmode;
9405 mode3 = VOIDmode;
9406 }
9407 else
9408 {
9409 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9410 continue;
f676971a 9411
58646b77
PB
9412 mode0 = insn_data[d->icode].operand[0].mode;
9413 mode1 = insn_data[d->icode].operand[1].mode;
9414 mode2 = insn_data[d->icode].operand[2].mode;
9415 mode3 = insn_data[d->icode].operand[3].mode;
9416 }
bb8df8a6 9417
2212663f
DB
9418 /* When all four are of the same mode. */
9419 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
9420 {
9421 switch (mode0)
9422 {
58646b77
PB
9423 case VOIDmode:
9424 type = opaque_ftype_opaque_opaque_opaque;
9425 break;
617e0e1d
DB
9426 case V4SImode:
9427 type = v4si_ftype_v4si_v4si_v4si;
9428 break;
2212663f
DB
9429 case V4SFmode:
9430 type = v4sf_ftype_v4sf_v4sf_v4sf;
9431 break;
9432 case V8HImode:
9433 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 9434 break;
2212663f
DB
9435 case V16QImode:
9436 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9437 break;
2212663f 9438 default:
37409796 9439 gcc_unreachable ();
2212663f
DB
9440 }
9441 }
9442 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 9443 {
2212663f
DB
9444 switch (mode0)
9445 {
9446 case V4SImode:
9447 type = v4si_ftype_v4si_v4si_v16qi;
9448 break;
9449 case V4SFmode:
9450 type = v4sf_ftype_v4sf_v4sf_v16qi;
9451 break;
9452 case V8HImode:
9453 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 9454 break;
2212663f
DB
9455 case V16QImode:
9456 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 9457 break;
2212663f 9458 default:
37409796 9459 gcc_unreachable ();
2212663f
DB
9460 }
9461 }
f676971a 9462 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 9463 && mode3 == V4SImode)
24408032 9464 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 9465 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 9466 && mode3 == V4SImode)
24408032 9467 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 9468 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 9469 && mode3 == V4SImode)
24408032
AH
9470 type = v4sf_ftype_v4sf_v4sf_v4si;
9471
a7b376ee 9472 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
9473 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
9474 && mode3 == QImode)
b9e4e5d1 9475 type = v16qi_ftype_v16qi_v16qi_int;
24408032 9476
a7b376ee 9477 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
9478 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
9479 && mode3 == QImode)
b9e4e5d1 9480 type = v8hi_ftype_v8hi_v8hi_int;
24408032 9481
a7b376ee 9482 /* vint, vint, vint, 4-bit literal. */
24408032
AH
9483 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
9484 && mode3 == QImode)
b9e4e5d1 9485 type = v4si_ftype_v4si_v4si_int;
24408032 9486
a7b376ee 9487 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
9488 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
9489 && mode3 == QImode)
b9e4e5d1 9490 type = v4sf_ftype_v4sf_v4sf_int;
24408032 9491
2212663f 9492 else
37409796 9493 gcc_unreachable ();
2212663f
DB
9494
9495 def_builtin (d->mask, d->name, type, d->code);
9496 }
9497
0ac081f6 9498 /* Add the simple binary operators. */
00b960c7 9499 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 9500 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
9501 {
9502 enum machine_mode mode0, mode1, mode2;
9503 tree type;
58646b77
PB
9504 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9505 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 9506
58646b77
PB
9507 if (is_overloaded)
9508 {
9509 mode0 = VOIDmode;
9510 mode1 = VOIDmode;
9511 mode2 = VOIDmode;
9512 }
9513 else
bb8df8a6 9514 {
58646b77
PB
9515 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9516 continue;
f676971a 9517
58646b77
PB
9518 mode0 = insn_data[d->icode].operand[0].mode;
9519 mode1 = insn_data[d->icode].operand[1].mode;
9520 mode2 = insn_data[d->icode].operand[2].mode;
9521 }
0ac081f6
AH
9522
9523 /* When all three operands are of the same mode. */
9524 if (mode0 == mode1 && mode1 == mode2)
9525 {
9526 switch (mode0)
9527 {
58646b77
PB
9528 case VOIDmode:
9529 type = opaque_ftype_opaque_opaque;
9530 break;
0ac081f6
AH
9531 case V4SFmode:
9532 type = v4sf_ftype_v4sf_v4sf;
9533 break;
9534 case V4SImode:
9535 type = v4si_ftype_v4si_v4si;
9536 break;
9537 case V16QImode:
9538 type = v16qi_ftype_v16qi_v16qi;
9539 break;
9540 case V8HImode:
9541 type = v8hi_ftype_v8hi_v8hi;
9542 break;
a3170dc6
AH
9543 case V2SImode:
9544 type = v2si_ftype_v2si_v2si;
9545 break;
9546 case V2SFmode:
9547 type = v2sf_ftype_v2sf_v2sf;
9548 break;
9549 case SImode:
9550 type = int_ftype_int_int;
9551 break;
0ac081f6 9552 default:
37409796 9553 gcc_unreachable ();
0ac081f6
AH
9554 }
9555 }
9556
9557 /* A few other combos we really don't want to do manually. */
9558
9559 /* vint, vfloat, vfloat. */
9560 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
9561 type = v4si_ftype_v4sf_v4sf;
9562
9563 /* vshort, vchar, vchar. */
9564 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
9565 type = v8hi_ftype_v16qi_v16qi;
9566
9567 /* vint, vshort, vshort. */
9568 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
9569 type = v4si_ftype_v8hi_v8hi;
9570
9571 /* vshort, vint, vint. */
9572 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
9573 type = v8hi_ftype_v4si_v4si;
9574
9575 /* vchar, vshort, vshort. */
9576 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
9577 type = v16qi_ftype_v8hi_v8hi;
9578
9579 /* vint, vchar, vint. */
9580 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
9581 type = v4si_ftype_v16qi_v4si;
9582
fa066a23
AH
9583 /* vint, vchar, vchar. */
9584 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
9585 type = v4si_ftype_v16qi_v16qi;
9586
0ac081f6
AH
9587 /* vint, vshort, vint. */
9588 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
9589 type = v4si_ftype_v8hi_v4si;
f676971a 9590
a7b376ee 9591 /* vint, vint, 5-bit literal. */
2212663f 9592 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9593 type = v4si_ftype_v4si_int;
f676971a 9594
a7b376ee 9595 /* vshort, vshort, 5-bit literal. */
2212663f 9596 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 9597 type = v8hi_ftype_v8hi_int;
f676971a 9598
a7b376ee 9599 /* vchar, vchar, 5-bit literal. */
2212663f 9600 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 9601 type = v16qi_ftype_v16qi_int;
0ac081f6 9602
a7b376ee 9603 /* vfloat, vint, 5-bit literal. */
617e0e1d 9604 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 9605 type = v4sf_ftype_v4si_int;
f676971a 9606
a7b376ee 9607 /* vint, vfloat, 5-bit literal. */
617e0e1d 9608 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 9609 type = v4si_ftype_v4sf_int;
617e0e1d 9610
a3170dc6
AH
9611 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
9612 type = v2si_ftype_int_int;
9613
9614 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
9615 type = v2si_ftype_v2si_char;
9616
9617 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
9618 type = v2si_ftype_int_char;
9619
37409796 9620 else
0ac081f6 9621 {
37409796
NS
9622 /* int, x, x. */
9623 gcc_assert (mode0 == SImode);
0ac081f6
AH
9624 switch (mode1)
9625 {
9626 case V4SImode:
9627 type = int_ftype_v4si_v4si;
9628 break;
9629 case V4SFmode:
9630 type = int_ftype_v4sf_v4sf;
9631 break;
9632 case V16QImode:
9633 type = int_ftype_v16qi_v16qi;
9634 break;
9635 case V8HImode:
9636 type = int_ftype_v8hi_v8hi;
9637 break;
9638 default:
37409796 9639 gcc_unreachable ();
0ac081f6
AH
9640 }
9641 }
9642
2212663f
DB
9643 def_builtin (d->mask, d->name, type, d->code);
9644 }
24408032 9645
2212663f
DB
9646 /* Add the simple unary operators. */
9647 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 9648 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
9649 {
9650 enum machine_mode mode0, mode1;
9651 tree type;
58646b77
PB
9652 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9653 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
9654
9655 if (is_overloaded)
9656 {
9657 mode0 = VOIDmode;
9658 mode1 = VOIDmode;
9659 }
9660 else
9661 {
9662 if (d->name == 0 || d->icode == CODE_FOR_nothing)
9663 continue;
bb8df8a6 9664
58646b77
PB
9665 mode0 = insn_data[d->icode].operand[0].mode;
9666 mode1 = insn_data[d->icode].operand[1].mode;
9667 }
2212663f
DB
9668
9669 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 9670 type = v4si_ftype_int;
2212663f 9671 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 9672 type = v8hi_ftype_int;
2212663f 9673 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 9674 type = v16qi_ftype_int;
58646b77
PB
9675 else if (mode0 == VOIDmode && mode1 == VOIDmode)
9676 type = opaque_ftype_opaque;
617e0e1d
DB
9677 else if (mode0 == V4SFmode && mode1 == V4SFmode)
9678 type = v4sf_ftype_v4sf;
20e26713
AH
9679 else if (mode0 == V8HImode && mode1 == V16QImode)
9680 type = v8hi_ftype_v16qi;
9681 else if (mode0 == V4SImode && mode1 == V8HImode)
9682 type = v4si_ftype_v8hi;
a3170dc6
AH
9683 else if (mode0 == V2SImode && mode1 == V2SImode)
9684 type = v2si_ftype_v2si;
9685 else if (mode0 == V2SFmode && mode1 == V2SFmode)
9686 type = v2sf_ftype_v2sf;
9687 else if (mode0 == V2SFmode && mode1 == V2SImode)
9688 type = v2sf_ftype_v2si;
9689 else if (mode0 == V2SImode && mode1 == V2SFmode)
9690 type = v2si_ftype_v2sf;
9691 else if (mode0 == V2SImode && mode1 == QImode)
9692 type = v2si_ftype_char;
2212663f 9693 else
37409796 9694 gcc_unreachable ();
2212663f 9695
0ac081f6
AH
9696 def_builtin (d->mask, d->name, type, d->code);
9697 }
9698}
9699
c15c90bb
ZW
9700static void
9701rs6000_init_libfuncs (void)
9702{
602ea4d3
JJ
9703 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
9704 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 9705 {
602ea4d3
JJ
9706 /* AIX library routines for float->int conversion. */
9707 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
9708 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
9709 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
9710 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
9711 }
c15c90bb 9712
602ea4d3 9713 if (!TARGET_IEEEQUAD)
98c41d98 9714 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
9715 if (!TARGET_XL_COMPAT)
9716 {
9717 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
9718 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
9719 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
9720 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 9721
17caeff2 9722 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
9723 {
9724 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
9725 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
9726 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
9727 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
9728 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
9729 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
9730 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
9731
9732 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
9733 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
9734 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
9735 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
9736 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
9737 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
9738 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
9739 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
9740 }
b26941b4
JM
9741
9742 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
9743 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
9744 }
9745 else
9746 {
9747 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
9748 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
9749 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
9750 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
9751 }
c9034561 9752 else
c15c90bb 9753 {
c9034561 9754 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
9755
9756 set_optab_libfunc (add_optab, TFmode, "_q_add");
9757 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
9758 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
9759 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
9760 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
9761 if (TARGET_PPC_GPOPT || TARGET_POWER2)
9762 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
9763
c9034561
ZW
9764 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
9765 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
9766 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
9767 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
9768 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
9769 set_optab_libfunc (le_optab, TFmode, "_q_fle");
9770
85363ca0
ZW
9771 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
9772 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
9773 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
9774 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
9775 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
9776 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
9777 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 9778 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
9779 }
9780}
fba73eb1
DE
9781
9782\f
9783/* Expand a block clear operation, and return 1 if successful. Return 0
9784 if we should let the compiler generate normal code.
9785
9786 operands[0] is the destination
9787 operands[1] is the length
57e84f18 9788 operands[3] is the alignment */
fba73eb1
DE
9789
9790int
9791expand_block_clear (rtx operands[])
9792{
9793 rtx orig_dest = operands[0];
9794 rtx bytes_rtx = operands[1];
57e84f18 9795 rtx align_rtx = operands[3];
5514620a
GK
9796 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
9797 HOST_WIDE_INT align;
9798 HOST_WIDE_INT bytes;
fba73eb1
DE
9799 int offset;
9800 int clear_bytes;
5514620a 9801 int clear_step;
fba73eb1
DE
9802
9803 /* If this is not a fixed size move, just call memcpy */
9804 if (! constp)
9805 return 0;
9806
37409796
NS
9807 /* This must be a fixed size alignment */
9808 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
9809 align = INTVAL (align_rtx) * BITS_PER_UNIT;
9810
9811 /* Anything to clear? */
9812 bytes = INTVAL (bytes_rtx);
9813 if (bytes <= 0)
9814 return 1;
9815
5514620a
GK
9816 /* Use the builtin memset after a point, to avoid huge code bloat.
9817 When optimize_size, avoid any significant code bloat; calling
9818 memset is about 4 instructions, so allow for one instruction to
9819 load zero and three to do clearing. */
9820 if (TARGET_ALTIVEC && align >= 128)
9821 clear_step = 16;
9822 else if (TARGET_POWERPC64 && align >= 32)
9823 clear_step = 8;
9824 else
9825 clear_step = 4;
fba73eb1 9826
5514620a
GK
9827 if (optimize_size && bytes > 3 * clear_step)
9828 return 0;
9829 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
9830 return 0;
9831
9832 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
9833 {
fba73eb1
DE
9834 enum machine_mode mode = BLKmode;
9835 rtx dest;
f676971a 9836
5514620a
GK
9837 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
9838 {
9839 clear_bytes = 16;
9840 mode = V4SImode;
9841 }
9842 else if (bytes >= 8 && TARGET_POWERPC64
9843 /* 64-bit loads and stores require word-aligned
9844 displacements. */
9845 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
9846 {
9847 clear_bytes = 8;
9848 mode = DImode;
fba73eb1 9849 }
5514620a 9850 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
9851 { /* move 4 bytes */
9852 clear_bytes = 4;
9853 mode = SImode;
fba73eb1 9854 }
ec53fc93 9855 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
9856 { /* move 2 bytes */
9857 clear_bytes = 2;
9858 mode = HImode;
fba73eb1
DE
9859 }
9860 else /* move 1 byte at a time */
9861 {
9862 clear_bytes = 1;
9863 mode = QImode;
fba73eb1 9864 }
f676971a 9865
fba73eb1 9866 dest = adjust_address (orig_dest, mode, offset);
f676971a 9867
5514620a 9868 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
9869 }
9870
9871 return 1;
9872}
9873
35aff10b 9874\f
7e69e155
MM
9875/* Expand a block move operation, and return 1 if successful. Return 0
9876 if we should let the compiler generate normal code.
9877
9878 operands[0] is the destination
9879 operands[1] is the source
9880 operands[2] is the length
9881 operands[3] is the alignment */
9882
3933e0e1
MM
9883#define MAX_MOVE_REG 4
9884
7e69e155 9885int
a2369ed3 9886expand_block_move (rtx operands[])
7e69e155 9887{
b6c9286a
MM
9888 rtx orig_dest = operands[0];
9889 rtx orig_src = operands[1];
7e69e155 9890 rtx bytes_rtx = operands[2];
7e69e155 9891 rtx align_rtx = operands[3];
3933e0e1 9892 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 9893 int align;
3933e0e1
MM
9894 int bytes;
9895 int offset;
7e69e155 9896 int move_bytes;
cabfd258
GK
9897 rtx stores[MAX_MOVE_REG];
9898 int num_reg = 0;
7e69e155 9899
3933e0e1 9900 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 9901 if (! constp)
3933e0e1
MM
9902 return 0;
9903
37409796
NS
9904 /* This must be a fixed size alignment */
9905 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 9906 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 9907
7e69e155 9908 /* Anything to move? */
3933e0e1
MM
9909 bytes = INTVAL (bytes_rtx);
9910 if (bytes <= 0)
7e69e155
MM
9911 return 1;
9912
ea9982a8 9913 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 9914 reg_parm_stack_space. */
ea9982a8 9915 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
9916 return 0;
9917
cabfd258 9918 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 9919 {
cabfd258 9920 union {
70128ad9 9921 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 9922 rtx (*mov) (rtx, rtx);
cabfd258
GK
9923 } gen_func;
9924 enum machine_mode mode = BLKmode;
9925 rtx src, dest;
f676971a 9926
5514620a
GK
9927 /* Altivec first, since it will be faster than a string move
9928 when it applies, and usually not significantly larger. */
9929 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
9930 {
9931 move_bytes = 16;
9932 mode = V4SImode;
9933 gen_func.mov = gen_movv4si;
9934 }
9935 else if (TARGET_STRING
cabfd258
GK
9936 && bytes > 24 /* move up to 32 bytes at a time */
9937 && ! fixed_regs[5]
9938 && ! fixed_regs[6]
9939 && ! fixed_regs[7]
9940 && ! fixed_regs[8]
9941 && ! fixed_regs[9]
9942 && ! fixed_regs[10]
9943 && ! fixed_regs[11]
9944 && ! fixed_regs[12])
7e69e155 9945 {
cabfd258 9946 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 9947 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
9948 }
9949 else if (TARGET_STRING
9950 && bytes > 16 /* move up to 24 bytes at a time */
9951 && ! fixed_regs[5]
9952 && ! fixed_regs[6]
9953 && ! fixed_regs[7]
9954 && ! fixed_regs[8]
9955 && ! fixed_regs[9]
9956 && ! fixed_regs[10])
9957 {
9958 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 9959 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
9960 }
9961 else if (TARGET_STRING
9962 && bytes > 8 /* move up to 16 bytes at a time */
9963 && ! fixed_regs[5]
9964 && ! fixed_regs[6]
9965 && ! fixed_regs[7]
9966 && ! fixed_regs[8])
9967 {
9968 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 9969 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
9970 }
9971 else if (bytes >= 8 && TARGET_POWERPC64
9972 /* 64-bit loads and stores require word-aligned
9973 displacements. */
fba73eb1 9974 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
9975 {
9976 move_bytes = 8;
9977 mode = DImode;
9978 gen_func.mov = gen_movdi;
9979 }
9980 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
9981 { /* move up to 8 bytes at a time */
9982 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 9983 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 9984 }
cd7d9ca4 9985 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
9986 { /* move 4 bytes */
9987 move_bytes = 4;
9988 mode = SImode;
9989 gen_func.mov = gen_movsi;
9990 }
ec53fc93 9991 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
9992 { /* move 2 bytes */
9993 move_bytes = 2;
9994 mode = HImode;
9995 gen_func.mov = gen_movhi;
9996 }
9997 else if (TARGET_STRING && bytes > 1)
9998 { /* move up to 4 bytes at a time */
9999 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10000 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10001 }
10002 else /* move 1 byte at a time */
10003 {
10004 move_bytes = 1;
10005 mode = QImode;
10006 gen_func.mov = gen_movqi;
10007 }
f676971a 10008
cabfd258
GK
10009 src = adjust_address (orig_src, mode, offset);
10010 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10011
10012 if (mode != BLKmode)
cabfd258
GK
10013 {
10014 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10015
cabfd258
GK
10016 emit_insn ((*gen_func.mov) (tmp_reg, src));
10017 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10018 }
3933e0e1 10019
cabfd258
GK
10020 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10021 {
10022 int i;
10023 for (i = 0; i < num_reg; i++)
10024 emit_insn (stores[i]);
10025 num_reg = 0;
10026 }
35aff10b 10027
cabfd258 10028 if (mode == BLKmode)
7e69e155 10029 {
70128ad9 10030 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10031 patterns require zero offset. */
10032 if (!REG_P (XEXP (src, 0)))
b6c9286a 10033 {
cabfd258
GK
10034 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10035 src = replace_equiv_address (src, src_reg);
b6c9286a 10036 }
cabfd258 10037 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10038
cabfd258 10039 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10040 {
cabfd258
GK
10041 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10042 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10043 }
cabfd258 10044 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10045
70128ad9 10046 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10047 GEN_INT (move_bytes & 31),
10048 align_rtx));
7e69e155 10049 }
7e69e155
MM
10050 }
10051
10052 return 1;
10053}
10054
d62294f5 10055\f
9caa3eb2
DE
10056/* Return a string to perform a load_multiple operation.
10057 operands[0] is the vector.
10058 operands[1] is the source address.
10059 operands[2] is the first destination register. */
10060
10061const char *
a2369ed3 10062rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10063{
10064 /* We have to handle the case where the pseudo used to contain the address
10065 is assigned to one of the output registers. */
10066 int i, j;
10067 int words = XVECLEN (operands[0], 0);
10068 rtx xop[10];
10069
10070 if (XVECLEN (operands[0], 0) == 1)
10071 return "{l|lwz} %2,0(%1)";
10072
10073 for (i = 0; i < words; i++)
10074 if (refers_to_regno_p (REGNO (operands[2]) + i,
10075 REGNO (operands[2]) + i + 1, operands[1], 0))
10076 {
10077 if (i == words-1)
10078 {
10079 xop[0] = GEN_INT (4 * (words-1));
10080 xop[1] = operands[1];
10081 xop[2] = operands[2];
10082 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10083 return "";
10084 }
10085 else if (i == 0)
10086 {
10087 xop[0] = GEN_INT (4 * (words-1));
10088 xop[1] = operands[1];
10089 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10090 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10091 return "";
10092 }
10093 else
10094 {
10095 for (j = 0; j < words; j++)
10096 if (j != i)
10097 {
10098 xop[0] = GEN_INT (j * 4);
10099 xop[1] = operands[1];
10100 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10101 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10102 }
10103 xop[0] = GEN_INT (i * 4);
10104 xop[1] = operands[1];
10105 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10106 return "";
10107 }
10108 }
10109
10110 return "{lsi|lswi} %2,%1,%N0";
10111}
10112
9878760c 10113\f
a4f6c312
SS
10114/* A validation routine: say whether CODE, a condition code, and MODE
10115 match. The other alternatives either don't make sense or should
10116 never be generated. */
39a10a29 10117
48d72335 10118void
a2369ed3 10119validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 10120{
37409796
NS
10121 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
10122 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
10123 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
10124
10125 /* These don't make sense. */
37409796
NS
10126 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
10127 || mode != CCUNSmode);
39a10a29 10128
37409796
NS
10129 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
10130 || mode == CCUNSmode);
39a10a29 10131
37409796
NS
10132 gcc_assert (mode == CCFPmode
10133 || (code != ORDERED && code != UNORDERED
10134 && code != UNEQ && code != LTGT
10135 && code != UNGT && code != UNLT
10136 && code != UNGE && code != UNLE));
f676971a
EC
10137
10138 /* These should never be generated except for
bc9ec0e0 10139 flag_finite_math_only. */
37409796
NS
10140 gcc_assert (mode != CCFPmode
10141 || flag_finite_math_only
10142 || (code != LE && code != GE
10143 && code != UNEQ && code != LTGT
10144 && code != UNGT && code != UNLT));
39a10a29
GK
10145
10146 /* These are invalid; the information is not there. */
37409796 10147 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
10148}
10149
9878760c
RK
10150\f
10151/* Return 1 if ANDOP is a mask that has no bits on that are not in the
10152 mask required to convert the result of a rotate insn into a shift
b1765bde 10153 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
10154
10155int
a2369ed3 10156includes_lshift_p (rtx shiftop, rtx andop)
9878760c 10157{
e2c953b6
DE
10158 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
10159
10160 shift_mask <<= INTVAL (shiftop);
9878760c 10161
b1765bde 10162 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
10163}
10164
10165/* Similar, but for right shift. */
10166
10167int
a2369ed3 10168includes_rshift_p (rtx shiftop, rtx andop)
9878760c 10169{
a7653a2c 10170 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
10171
10172 shift_mask >>= INTVAL (shiftop);
10173
b1765bde 10174 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
10175}
10176
c5059423
AM
10177/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10178 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 10179 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
10180
10181int
a2369ed3 10182includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 10183{
c5059423
AM
10184 if (GET_CODE (andop) == CONST_INT)
10185 {
02071907 10186 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 10187
c5059423 10188 c = INTVAL (andop);
02071907 10189 if (c == 0 || c == ~0)
c5059423 10190 return 0;
e2c953b6 10191
02071907 10192 shift_mask = ~0;
c5059423
AM
10193 shift_mask <<= INTVAL (shiftop);
10194
b6d08ca1 10195 /* Find the least significant one bit. */
c5059423
AM
10196 lsb = c & -c;
10197
10198 /* It must coincide with the LSB of the shift mask. */
10199 if (-lsb != shift_mask)
10200 return 0;
e2c953b6 10201
c5059423
AM
10202 /* Invert to look for the next transition (if any). */
10203 c = ~c;
10204
10205 /* Remove the low group of ones (originally low group of zeros). */
10206 c &= -lsb;
10207
10208 /* Again find the lsb, and check we have all 1's above. */
10209 lsb = c & -c;
10210 return c == -lsb;
10211 }
10212 else if (GET_CODE (andop) == CONST_DOUBLE
10213 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10214 {
02071907
AM
10215 HOST_WIDE_INT low, high, lsb;
10216 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
10217
10218 low = CONST_DOUBLE_LOW (andop);
10219 if (HOST_BITS_PER_WIDE_INT < 64)
10220 high = CONST_DOUBLE_HIGH (andop);
10221
10222 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 10223 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
10224 return 0;
10225
10226 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10227 {
02071907 10228 shift_mask_high = ~0;
c5059423
AM
10229 if (INTVAL (shiftop) > 32)
10230 shift_mask_high <<= INTVAL (shiftop) - 32;
10231
10232 lsb = high & -high;
10233
10234 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
10235 return 0;
10236
10237 high = ~high;
10238 high &= -lsb;
10239
10240 lsb = high & -high;
10241 return high == -lsb;
10242 }
10243
02071907 10244 shift_mask_low = ~0;
c5059423
AM
10245 shift_mask_low <<= INTVAL (shiftop);
10246
10247 lsb = low & -low;
10248
10249 if (-lsb != shift_mask_low)
10250 return 0;
10251
10252 if (HOST_BITS_PER_WIDE_INT < 64)
10253 high = ~high;
10254 low = ~low;
10255 low &= -lsb;
10256
10257 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
10258 {
10259 lsb = high & -high;
10260 return high == -lsb;
10261 }
10262
10263 lsb = low & -low;
10264 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
10265 }
10266 else
10267 return 0;
10268}
e2c953b6 10269
c5059423
AM
10270/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10271 to perform a left shift. It must have SHIFTOP or more least
c1207243 10272 significant 0's, with the remainder of the word 1's. */
e2c953b6 10273
c5059423 10274int
a2369ed3 10275includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 10276{
e2c953b6 10277 if (GET_CODE (andop) == CONST_INT)
c5059423 10278 {
02071907 10279 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 10280
02071907 10281 shift_mask = ~0;
c5059423
AM
10282 shift_mask <<= INTVAL (shiftop);
10283 c = INTVAL (andop);
10284
c1207243 10285 /* Find the least significant one bit. */
c5059423
AM
10286 lsb = c & -c;
10287
10288 /* It must be covered by the shift mask.
a4f6c312 10289 This test also rejects c == 0. */
c5059423
AM
10290 if ((lsb & shift_mask) == 0)
10291 return 0;
10292
10293 /* Check we have all 1's above the transition, and reject all 1's. */
10294 return c == -lsb && lsb != 1;
10295 }
10296 else if (GET_CODE (andop) == CONST_DOUBLE
10297 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
10298 {
02071907 10299 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
10300
10301 low = CONST_DOUBLE_LOW (andop);
10302
10303 if (HOST_BITS_PER_WIDE_INT < 64)
10304 {
02071907 10305 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
10306
10307 high = CONST_DOUBLE_HIGH (andop);
10308
10309 if (low == 0)
10310 {
02071907 10311 shift_mask_high = ~0;
c5059423
AM
10312 if (INTVAL (shiftop) > 32)
10313 shift_mask_high <<= INTVAL (shiftop) - 32;
10314
10315 lsb = high & -high;
10316
10317 if ((lsb & shift_mask_high) == 0)
10318 return 0;
10319
10320 return high == -lsb;
10321 }
10322 if (high != ~0)
10323 return 0;
10324 }
10325
02071907 10326 shift_mask_low = ~0;
c5059423
AM
10327 shift_mask_low <<= INTVAL (shiftop);
10328
10329 lsb = low & -low;
10330
10331 if ((lsb & shift_mask_low) == 0)
10332 return 0;
10333
10334 return low == -lsb && lsb != 1;
10335 }
e2c953b6 10336 else
c5059423 10337 return 0;
9878760c 10338}
35068b43 10339
11ac38b2
DE
10340/* Return 1 if operands will generate a valid arguments to rlwimi
10341instruction for insert with right shift in 64-bit mode. The mask may
10342not start on the first bit or stop on the last bit because wrap-around
10343effects of instruction do not correspond to semantics of RTL insn. */
10344
10345int
10346insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
10347{
429ec7dc
DE
10348 if (INTVAL (startop) > 32
10349 && INTVAL (startop) < 64
10350 && INTVAL (sizeop) > 1
10351 && INTVAL (sizeop) + INTVAL (startop) < 64
10352 && INTVAL (shiftop) > 0
10353 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
10354 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
10355 return 1;
10356
10357 return 0;
10358}
10359
35068b43 10360/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 10361 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
10362
10363int
a2369ed3 10364registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
10365{
10366 /* We might have been passed a SUBREG. */
f676971a 10367 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 10368 return 0;
f676971a 10369
90f81f99
AP
10370 /* We might have been passed non floating point registers. */
10371 if (!FP_REGNO_P (REGNO (reg1))
10372 || !FP_REGNO_P (REGNO (reg2)))
10373 return 0;
35068b43
RK
10374
10375 return (REGNO (reg1) == REGNO (reg2) - 1);
10376}
10377
a4f6c312
SS
10378/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10379 addr1 and addr2 must be in consecutive memory locations
10380 (addr2 == addr1 + 8). */
35068b43
RK
10381
10382int
90f81f99 10383mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 10384{
90f81f99 10385 rtx addr1, addr2;
bb8df8a6
EC
10386 unsigned int reg1, reg2;
10387 int offset1, offset2;
35068b43 10388
90f81f99
AP
10389 /* The mems cannot be volatile. */
10390 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
10391 return 0;
f676971a 10392
90f81f99
AP
10393 addr1 = XEXP (mem1, 0);
10394 addr2 = XEXP (mem2, 0);
10395
35068b43
RK
10396 /* Extract an offset (if used) from the first addr. */
10397 if (GET_CODE (addr1) == PLUS)
10398 {
10399 /* If not a REG, return zero. */
10400 if (GET_CODE (XEXP (addr1, 0)) != REG)
10401 return 0;
10402 else
10403 {
c4ad648e 10404 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
10405 /* The offset must be constant! */
10406 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
10407 return 0;
10408 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
10409 }
10410 }
10411 else if (GET_CODE (addr1) != REG)
10412 return 0;
10413 else
10414 {
10415 reg1 = REGNO (addr1);
10416 /* This was a simple (mem (reg)) expression. Offset is 0. */
10417 offset1 = 0;
10418 }
10419
bb8df8a6
EC
10420 /* And now for the second addr. */
10421 if (GET_CODE (addr2) == PLUS)
10422 {
10423 /* If not a REG, return zero. */
10424 if (GET_CODE (XEXP (addr2, 0)) != REG)
10425 return 0;
10426 else
10427 {
10428 reg2 = REGNO (XEXP (addr2, 0));
10429 /* The offset must be constant. */
10430 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
10431 return 0;
10432 offset2 = INTVAL (XEXP (addr2, 1));
10433 }
10434 }
10435 else if (GET_CODE (addr2) != REG)
35068b43 10436 return 0;
bb8df8a6
EC
10437 else
10438 {
10439 reg2 = REGNO (addr2);
10440 /* This was a simple (mem (reg)) expression. Offset is 0. */
10441 offset2 = 0;
10442 }
35068b43 10443
bb8df8a6
EC
10444 /* Both of these must have the same base register. */
10445 if (reg1 != reg2)
35068b43
RK
10446 return 0;
10447
10448 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 10449 if (offset2 != offset1 + 8)
35068b43
RK
10450 return 0;
10451
10452 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
10453 instructions. */
10454 return 1;
10455}
9878760c
RK
10456\f
10457/* Return the register class of a scratch register needed to copy IN into
10458 or out of a register in CLASS in MODE. If it can be done directly,
10459 NO_REGS is returned. */
10460
10461enum reg_class
3c4774e0
R
10462rs6000_secondary_reload_class (enum reg_class class,
10463 enum machine_mode mode ATTRIBUTE_UNUSED,
10464 rtx in)
9878760c 10465{
5accd822 10466 int regno;
9878760c 10467
ab82a49f
AP
10468 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
10469#if TARGET_MACHO
c4ad648e 10470 && MACHOPIC_INDIRECT
ab82a49f 10471#endif
c4ad648e 10472 ))
46fad5b7
DJ
10473 {
10474 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
10475 other than BASE_REGS for TARGET_ELF. So indicate that a
10476 register from BASE_REGS is needed as an intermediate
10477 register.
f676971a 10478
46fad5b7
DJ
10479 On Darwin, pic addresses require a load from memory, which
10480 needs a base register. */
10481 if (class != BASE_REGS
c4ad648e
AM
10482 && (GET_CODE (in) == SYMBOL_REF
10483 || GET_CODE (in) == HIGH
10484 || GET_CODE (in) == LABEL_REF
10485 || GET_CODE (in) == CONST))
10486 return BASE_REGS;
46fad5b7 10487 }
e7b7998a 10488
5accd822
DE
10489 if (GET_CODE (in) == REG)
10490 {
10491 regno = REGNO (in);
10492 if (regno >= FIRST_PSEUDO_REGISTER)
10493 {
10494 regno = true_regnum (in);
10495 if (regno >= FIRST_PSEUDO_REGISTER)
10496 regno = -1;
10497 }
10498 }
10499 else if (GET_CODE (in) == SUBREG)
10500 {
10501 regno = true_regnum (in);
10502 if (regno >= FIRST_PSEUDO_REGISTER)
10503 regno = -1;
10504 }
10505 else
10506 regno = -1;
10507
9878760c
RK
10508 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10509 into anything. */
10510 if (class == GENERAL_REGS || class == BASE_REGS
10511 || (regno >= 0 && INT_REGNO_P (regno)))
10512 return NO_REGS;
10513
10514 /* Constants, memory, and FP registers can go into FP registers. */
10515 if ((regno == -1 || FP_REGNO_P (regno))
10516 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
10517 return NO_REGS;
10518
0ac081f6
AH
10519 /* Memory, and AltiVec registers can go into AltiVec registers. */
10520 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
10521 && class == ALTIVEC_REGS)
10522 return NO_REGS;
10523
9878760c
RK
10524 /* We can copy among the CR registers. */
10525 if ((class == CR_REGS || class == CR0_REGS)
10526 && regno >= 0 && CR_REGNO_P (regno))
10527 return NO_REGS;
10528
10529 /* Otherwise, we need GENERAL_REGS. */
10530 return GENERAL_REGS;
10531}
10532\f
10533/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 10534 know this is a valid comparison.
9878760c
RK
10535
10536 SCC_P is 1 if this is for an scc. That means that %D will have been
10537 used instead of %C, so the bits will be in different places.
10538
b4ac57ab 10539 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
10540
10541int
a2369ed3 10542ccr_bit (rtx op, int scc_p)
9878760c
RK
10543{
10544 enum rtx_code code = GET_CODE (op);
10545 enum machine_mode cc_mode;
10546 int cc_regnum;
10547 int base_bit;
9ebbca7d 10548 rtx reg;
9878760c 10549
ec8e098d 10550 if (!COMPARISON_P (op))
9878760c
RK
10551 return -1;
10552
9ebbca7d
GK
10553 reg = XEXP (op, 0);
10554
37409796 10555 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
10556
10557 cc_mode = GET_MODE (reg);
10558 cc_regnum = REGNO (reg);
10559 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 10560
39a10a29 10561 validate_condition_mode (code, cc_mode);
c5defebb 10562
b7053a3f
GK
10563 /* When generating a sCOND operation, only positive conditions are
10564 allowed. */
37409796
NS
10565 gcc_assert (!scc_p
10566 || code == EQ || code == GT || code == LT || code == UNORDERED
10567 || code == GTU || code == LTU);
f676971a 10568
9878760c
RK
10569 switch (code)
10570 {
10571 case NE:
10572 return scc_p ? base_bit + 3 : base_bit + 2;
10573 case EQ:
10574 return base_bit + 2;
1c882ea4 10575 case GT: case GTU: case UNLE:
9878760c 10576 return base_bit + 1;
1c882ea4 10577 case LT: case LTU: case UNGE:
9878760c 10578 return base_bit;
1c882ea4
GK
10579 case ORDERED: case UNORDERED:
10580 return base_bit + 3;
9878760c
RK
10581
10582 case GE: case GEU:
39a10a29 10583 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
10584 unordered position. So test that bit. For integer, this is ! LT
10585 unless this is an scc insn. */
39a10a29 10586 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
10587
10588 case LE: case LEU:
39a10a29 10589 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 10590
9878760c 10591 default:
37409796 10592 gcc_unreachable ();
9878760c
RK
10593 }
10594}
1ff7789b 10595\f
8d30c4ee 10596/* Return the GOT register. */
1ff7789b 10597
9390387d 10598rtx
a2369ed3 10599rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 10600{
a4f6c312
SS
10601 /* The second flow pass currently (June 1999) can't update
10602 regs_ever_live without disturbing other parts of the compiler, so
10603 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
10604 if (!can_create_pseudo_p ()
10605 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 10606 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 10607
8d30c4ee 10608 current_function_uses_pic_offset_table = 1;
3cb999d8 10609
1ff7789b
MM
10610 return pic_offset_table_rtx;
10611}
a7df97e6 10612\f
e2500fed
GK
10613/* Function to init struct machine_function.
10614 This will be called, via a pointer variable,
10615 from push_function_context. */
a7df97e6 10616
e2500fed 10617static struct machine_function *
863d938c 10618rs6000_init_machine_status (void)
a7df97e6 10619{
e2500fed 10620 return ggc_alloc_cleared (sizeof (machine_function));
a7df97e6 10621}
9878760c 10622\f
0ba1b2ff
AM
10623/* These macros test for integers and extract the low-order bits. */
10624#define INT_P(X) \
10625((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
10626 && GET_MODE (X) == VOIDmode)
10627
10628#define INT_LOWPART(X) \
10629 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10630
10631int
a2369ed3 10632extract_MB (rtx op)
0ba1b2ff
AM
10633{
10634 int i;
10635 unsigned long val = INT_LOWPART (op);
10636
10637 /* If the high bit is zero, the value is the first 1 bit we find
10638 from the left. */
10639 if ((val & 0x80000000) == 0)
10640 {
37409796 10641 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10642
10643 i = 1;
10644 while (((val <<= 1) & 0x80000000) == 0)
10645 ++i;
10646 return i;
10647 }
10648
10649 /* If the high bit is set and the low bit is not, or the mask is all
10650 1's, the value is zero. */
10651 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
10652 return 0;
10653
10654 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10655 from the right. */
10656 i = 31;
10657 while (((val >>= 1) & 1) != 0)
10658 --i;
10659
10660 return i;
10661}
10662
10663int
a2369ed3 10664extract_ME (rtx op)
0ba1b2ff
AM
10665{
10666 int i;
10667 unsigned long val = INT_LOWPART (op);
10668
10669 /* If the low bit is zero, the value is the first 1 bit we find from
10670 the right. */
10671 if ((val & 1) == 0)
10672 {
37409796 10673 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
10674
10675 i = 30;
10676 while (((val >>= 1) & 1) == 0)
10677 --i;
10678
10679 return i;
10680 }
10681
10682 /* If the low bit is set and the high bit is not, or the mask is all
10683 1's, the value is 31. */
10684 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
10685 return 31;
10686
10687 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10688 from the left. */
10689 i = 0;
10690 while (((val <<= 1) & 0x80000000) != 0)
10691 ++i;
10692
10693 return i;
10694}
10695
c4501e62
JJ
10696/* Locate some local-dynamic symbol still in use by this function
10697 so that we can print its name in some tls_ld pattern. */
10698
10699static const char *
863d938c 10700rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
10701{
10702 rtx insn;
10703
10704 if (cfun->machine->some_ld_name)
10705 return cfun->machine->some_ld_name;
10706
10707 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
10708 if (INSN_P (insn)
10709 && for_each_rtx (&PATTERN (insn),
10710 rs6000_get_some_local_dynamic_name_1, 0))
10711 return cfun->machine->some_ld_name;
10712
37409796 10713 gcc_unreachable ();
c4501e62
JJ
10714}
10715
10716/* Helper function for rs6000_get_some_local_dynamic_name. */
10717
10718static int
a2369ed3 10719rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
10720{
10721 rtx x = *px;
10722
10723 if (GET_CODE (x) == SYMBOL_REF)
10724 {
10725 const char *str = XSTR (x, 0);
10726 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
10727 {
10728 cfun->machine->some_ld_name = str;
10729 return 1;
10730 }
10731 }
10732
10733 return 0;
10734}
10735
85b776df
AM
10736/* Write out a function code label. */
10737
10738void
10739rs6000_output_function_entry (FILE *file, const char *fname)
10740{
10741 if (fname[0] != '.')
10742 {
10743 switch (DEFAULT_ABI)
10744 {
10745 default:
37409796 10746 gcc_unreachable ();
85b776df
AM
10747
10748 case ABI_AIX:
10749 if (DOT_SYMBOLS)
10750 putc ('.', file);
10751 else
10752 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
10753 break;
10754
10755 case ABI_V4:
10756 case ABI_DARWIN:
10757 break;
10758 }
10759 }
10760 if (TARGET_AIX)
10761 RS6000_OUTPUT_BASENAME (file, fname);
10762 else
10763 assemble_name (file, fname);
10764}
10765
9878760c
RK
10766/* Print an operand. Recognize special options, documented below. */
10767
38c1f2d7 10768#if TARGET_ELF
d9407988 10769#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 10770#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
10771#else
10772#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 10773#define SMALL_DATA_REG 0
ba5e43aa
MM
10774#endif
10775
9878760c 10776void
a2369ed3 10777print_operand (FILE *file, rtx x, int code)
9878760c
RK
10778{
10779 int i;
a260abc9 10780 HOST_WIDE_INT val;
0ba1b2ff 10781 unsigned HOST_WIDE_INT uval;
9878760c
RK
10782
10783 switch (code)
10784 {
a8b3aeda 10785 case '.':
a85d226b
RK
10786 /* Write out an instruction after the call which may be replaced
10787 with glue code by the loader. This depends on the AIX version. */
10788 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
10789 return;
10790
81eace42
GK
10791 /* %a is output_address. */
10792
9854d9ed
RK
10793 case 'A':
10794 /* If X is a constant integer whose low-order 5 bits are zero,
10795 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 10796 in the AIX assembler where "sri" with a zero shift count
20e26713 10797 writes a trash instruction. */
9854d9ed 10798 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 10799 putc ('l', file);
9854d9ed 10800 else
76229ac8 10801 putc ('r', file);
9854d9ed
RK
10802 return;
10803
10804 case 'b':
e2c953b6
DE
10805 /* If constant, low-order 16 bits of constant, unsigned.
10806 Otherwise, write normally. */
10807 if (INT_P (x))
10808 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
10809 else
10810 print_operand (file, x, 0);
cad12a8d
RK
10811 return;
10812
a260abc9
DE
10813 case 'B':
10814 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10815 for 64-bit mask direction. */
9390387d 10816 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 10817 return;
a260abc9 10818
81eace42
GK
10819 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10820 output_operand. */
10821
423c1189
AH
10822 case 'c':
10823 /* X is a CR register. Print the number of the GT bit of the CR. */
10824 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10825 output_operand_lossage ("invalid %%E value");
10826 else
10827 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
10828 return;
10829
10830 case 'D':
cef6b86c 10831 /* Like 'J' but get to the GT bit only. */
37409796 10832 gcc_assert (GET_CODE (x) == REG);
423c1189 10833
cef6b86c
EB
10834 /* Bit 1 is GT bit. */
10835 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 10836
cef6b86c
EB
10837 /* Add one for shift count in rlinm for scc. */
10838 fprintf (file, "%d", i + 1);
423c1189
AH
10839 return;
10840
9854d9ed 10841 case 'E':
39a10a29 10842 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
10843 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10844 output_operand_lossage ("invalid %%E value");
78fbdbf7 10845 else
39a10a29 10846 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 10847 return;
9854d9ed
RK
10848
10849 case 'f':
10850 /* X is a CR register. Print the shift count needed to move it
10851 to the high-order four bits. */
10852 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10853 output_operand_lossage ("invalid %%f value");
10854 else
9ebbca7d 10855 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10856 return;
10857
10858 case 'F':
10859 /* Similar, but print the count for the rotate in the opposite
10860 direction. */
10861 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
10862 output_operand_lossage ("invalid %%F value");
10863 else
9ebbca7d 10864 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
10865 return;
10866
10867 case 'G':
10868 /* X is a constant integer. If it is negative, print "m",
43aa4e05 10869 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
10870 if (GET_CODE (x) != CONST_INT)
10871 output_operand_lossage ("invalid %%G value");
10872 else if (INTVAL (x) >= 0)
76229ac8 10873 putc ('z', file);
9854d9ed 10874 else
76229ac8 10875 putc ('m', file);
9854d9ed 10876 return;
e2c953b6 10877
9878760c 10878 case 'h':
a4f6c312
SS
10879 /* If constant, output low-order five bits. Otherwise, write
10880 normally. */
9878760c 10881 if (INT_P (x))
5f59ecb7 10882 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
10883 else
10884 print_operand (file, x, 0);
10885 return;
10886
64305719 10887 case 'H':
a4f6c312
SS
10888 /* If constant, output low-order six bits. Otherwise, write
10889 normally. */
64305719 10890 if (INT_P (x))
5f59ecb7 10891 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
10892 else
10893 print_operand (file, x, 0);
10894 return;
10895
9854d9ed
RK
10896 case 'I':
10897 /* Print `i' if this is a constant, else nothing. */
9878760c 10898 if (INT_P (x))
76229ac8 10899 putc ('i', file);
9878760c
RK
10900 return;
10901
9854d9ed
RK
10902 case 'j':
10903 /* Write the bit number in CCR for jump. */
10904 i = ccr_bit (x, 0);
10905 if (i == -1)
10906 output_operand_lossage ("invalid %%j code");
9878760c 10907 else
9854d9ed 10908 fprintf (file, "%d", i);
9878760c
RK
10909 return;
10910
9854d9ed
RK
10911 case 'J':
10912 /* Similar, but add one for shift count in rlinm for scc and pass
10913 scc flag to `ccr_bit'. */
10914 i = ccr_bit (x, 1);
10915 if (i == -1)
10916 output_operand_lossage ("invalid %%J code");
10917 else
a0466a68
RK
10918 /* If we want bit 31, write a shift count of zero, not 32. */
10919 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
10920 return;
10921
9854d9ed
RK
10922 case 'k':
10923 /* X must be a constant. Write the 1's complement of the
10924 constant. */
9878760c 10925 if (! INT_P (x))
9854d9ed 10926 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
10927 else
10928 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
10929 return;
10930
81eace42 10931 case 'K':
9ebbca7d
GK
10932 /* X must be a symbolic constant on ELF. Write an
10933 expression suitable for an 'addi' that adds in the low 16
10934 bits of the MEM. */
10935 if (GET_CODE (x) != CONST)
10936 {
10937 print_operand_address (file, x);
10938 fputs ("@l", file);
10939 }
10940 else
10941 {
10942 if (GET_CODE (XEXP (x, 0)) != PLUS
10943 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
10944 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
10945 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 10946 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
10947 print_operand_address (file, XEXP (XEXP (x, 0), 0));
10948 fputs ("@l", file);
ed8d2920
MM
10949 /* For GNU as, there must be a non-alphanumeric character
10950 between 'l' and the number. The '-' is added by
10951 print_operand() already. */
10952 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
10953 fputs ("+", file);
9ebbca7d
GK
10954 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
10955 }
81eace42
GK
10956 return;
10957
10958 /* %l is output_asm_label. */
9ebbca7d 10959
9854d9ed
RK
10960 case 'L':
10961 /* Write second word of DImode or DFmode reference. Works on register
10962 or non-indexed memory only. */
10963 if (GET_CODE (x) == REG)
fb5c67a7 10964 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
10965 else if (GET_CODE (x) == MEM)
10966 {
10967 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 10968 we have already done it, we can just use an offset of word. */
9854d9ed
RK
10969 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10970 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
10971 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10972 UNITS_PER_WORD));
6fb5fa3c
DB
10973 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
10974 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
10975 UNITS_PER_WORD));
9854d9ed 10976 else
d7624dc0
RK
10977 output_address (XEXP (adjust_address_nv (x, SImode,
10978 UNITS_PER_WORD),
10979 0));
ed8908e7 10980
ba5e43aa 10981 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
10982 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10983 reg_names[SMALL_DATA_REG]);
9854d9ed 10984 }
9878760c 10985 return;
f676971a 10986
9878760c
RK
10987 case 'm':
10988 /* MB value for a mask operand. */
b1765bde 10989 if (! mask_operand (x, SImode))
9878760c
RK
10990 output_operand_lossage ("invalid %%m value");
10991
0ba1b2ff 10992 fprintf (file, "%d", extract_MB (x));
9878760c
RK
10993 return;
10994
10995 case 'M':
10996 /* ME value for a mask operand. */
b1765bde 10997 if (! mask_operand (x, SImode))
a260abc9 10998 output_operand_lossage ("invalid %%M value");
9878760c 10999
0ba1b2ff 11000 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11001 return;
11002
81eace42
GK
11003 /* %n outputs the negative of its operand. */
11004
9878760c
RK
11005 case 'N':
11006 /* Write the number of elements in the vector times 4. */
11007 if (GET_CODE (x) != PARALLEL)
11008 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11009 else
11010 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
11011 return;
11012
11013 case 'O':
11014 /* Similar, but subtract 1 first. */
11015 if (GET_CODE (x) != PARALLEL)
1427100a 11016 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
11017 else
11018 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
11019 return;
11020
9854d9ed
RK
11021 case 'p':
11022 /* X is a CONST_INT that is a power of two. Output the logarithm. */
11023 if (! INT_P (x)
2bfcf297 11024 || INT_LOWPART (x) < 0
9854d9ed
RK
11025 || (i = exact_log2 (INT_LOWPART (x))) < 0)
11026 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
11027 else
11028 fprintf (file, "%d", i);
9854d9ed
RK
11029 return;
11030
9878760c
RK
11031 case 'P':
11032 /* The operand must be an indirect memory reference. The result
8bb418a3 11033 is the register name. */
9878760c
RK
11034 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
11035 || REGNO (XEXP (x, 0)) >= 32)
11036 output_operand_lossage ("invalid %%P value");
e2c953b6 11037 else
fb5c67a7 11038 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
11039 return;
11040
dfbdccdb
GK
11041 case 'q':
11042 /* This outputs the logical code corresponding to a boolean
11043 expression. The expression may have one or both operands
39a10a29 11044 negated (if one, only the first one). For condition register
c4ad648e
AM
11045 logical operations, it will also treat the negated
11046 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 11047 {
63bc1d05 11048 const char *const *t = 0;
dfbdccdb
GK
11049 const char *s;
11050 enum rtx_code code = GET_CODE (x);
11051 static const char * const tbl[3][3] = {
11052 { "and", "andc", "nor" },
11053 { "or", "orc", "nand" },
11054 { "xor", "eqv", "xor" } };
11055
11056 if (code == AND)
11057 t = tbl[0];
11058 else if (code == IOR)
11059 t = tbl[1];
11060 else if (code == XOR)
11061 t = tbl[2];
11062 else
11063 output_operand_lossage ("invalid %%q value");
11064
11065 if (GET_CODE (XEXP (x, 0)) != NOT)
11066 s = t[0];
11067 else
11068 {
11069 if (GET_CODE (XEXP (x, 1)) == NOT)
11070 s = t[2];
11071 else
11072 s = t[1];
11073 }
f676971a 11074
dfbdccdb
GK
11075 fputs (s, file);
11076 }
11077 return;
11078
2c4a9cff
DE
11079 case 'Q':
11080 if (TARGET_MFCRF)
3b6ce0af 11081 fputc (',', file);
5efb1046 11082 /* FALLTHRU */
2c4a9cff
DE
11083 else
11084 return;
11085
9854d9ed
RK
11086 case 'R':
11087 /* X is a CR register. Print the mask for `mtcrf'. */
11088 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11089 output_operand_lossage ("invalid %%R value");
11090 else
9ebbca7d 11091 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 11092 return;
9854d9ed
RK
11093
11094 case 's':
11095 /* Low 5 bits of 32 - value */
11096 if (! INT_P (x))
11097 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
11098 else
11099 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 11100 return;
9854d9ed 11101
a260abc9 11102 case 'S':
0ba1b2ff 11103 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
11104 CONST_INT 32-bit mask is considered sign-extended so any
11105 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 11106 if (! mask64_operand (x, DImode))
a260abc9
DE
11107 output_operand_lossage ("invalid %%S value");
11108
0ba1b2ff 11109 uval = INT_LOWPART (x);
a260abc9 11110
0ba1b2ff 11111 if (uval & 1) /* Clear Left */
a260abc9 11112 {
f099d360
GK
11113#if HOST_BITS_PER_WIDE_INT > 64
11114 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11115#endif
0ba1b2ff 11116 i = 64;
a260abc9 11117 }
0ba1b2ff 11118 else /* Clear Right */
a260abc9 11119 {
0ba1b2ff 11120 uval = ~uval;
f099d360
GK
11121#if HOST_BITS_PER_WIDE_INT > 64
11122 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
11123#endif
0ba1b2ff 11124 i = 63;
a260abc9 11125 }
0ba1b2ff
AM
11126 while (uval != 0)
11127 --i, uval >>= 1;
37409796 11128 gcc_assert (i >= 0);
0ba1b2ff
AM
11129 fprintf (file, "%d", i);
11130 return;
a260abc9 11131
a3170dc6
AH
11132 case 't':
11133 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 11134 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
11135
11136 /* Bit 3 is OV bit. */
11137 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
11138
11139 /* If we want bit 31, write a shift count of zero, not 32. */
11140 fprintf (file, "%d", i == 31 ? 0 : i + 1);
11141 return;
11142
cccf3bdc
DE
11143 case 'T':
11144 /* Print the symbolic name of a branch target register. */
1de43f85
DE
11145 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
11146 && REGNO (x) != CTR_REGNO))
cccf3bdc 11147 output_operand_lossage ("invalid %%T value");
1de43f85 11148 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
11149 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
11150 else
11151 fputs ("ctr", file);
11152 return;
11153
9854d9ed 11154 case 'u':
802a0058 11155 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
11156 if (! INT_P (x))
11157 output_operand_lossage ("invalid %%u value");
e2c953b6 11158 else
f676971a 11159 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 11160 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
11161 return;
11162
802a0058
MM
11163 case 'v':
11164 /* High-order 16 bits of constant for use in signed operand. */
11165 if (! INT_P (x))
11166 output_operand_lossage ("invalid %%v value");
e2c953b6 11167 else
134c32f6
DE
11168 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
11169 (INT_LOWPART (x) >> 16) & 0xffff);
11170 return;
802a0058 11171
9854d9ed
RK
11172 case 'U':
11173 /* Print `u' if this has an auto-increment or auto-decrement. */
11174 if (GET_CODE (x) == MEM
11175 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
11176 || GET_CODE (XEXP (x, 0)) == PRE_DEC
11177 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 11178 putc ('u', file);
9854d9ed 11179 return;
9878760c 11180
e0cd0770
JC
11181 case 'V':
11182 /* Print the trap code for this operand. */
11183 switch (GET_CODE (x))
11184 {
11185 case EQ:
11186 fputs ("eq", file); /* 4 */
11187 break;
11188 case NE:
11189 fputs ("ne", file); /* 24 */
11190 break;
11191 case LT:
11192 fputs ("lt", file); /* 16 */
11193 break;
11194 case LE:
11195 fputs ("le", file); /* 20 */
11196 break;
11197 case GT:
11198 fputs ("gt", file); /* 8 */
11199 break;
11200 case GE:
11201 fputs ("ge", file); /* 12 */
11202 break;
11203 case LTU:
11204 fputs ("llt", file); /* 2 */
11205 break;
11206 case LEU:
11207 fputs ("lle", file); /* 6 */
11208 break;
11209 case GTU:
11210 fputs ("lgt", file); /* 1 */
11211 break;
11212 case GEU:
11213 fputs ("lge", file); /* 5 */
11214 break;
11215 default:
37409796 11216 gcc_unreachable ();
e0cd0770
JC
11217 }
11218 break;
11219
9854d9ed
RK
11220 case 'w':
11221 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11222 normally. */
11223 if (INT_P (x))
f676971a 11224 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 11225 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
11226 else
11227 print_operand (file, x, 0);
9878760c
RK
11228 return;
11229
9854d9ed 11230 case 'W':
e2c953b6 11231 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
11232 val = (GET_CODE (x) == CONST_INT
11233 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
11234
11235 if (val < 0)
11236 i = -1;
9854d9ed 11237 else
e2c953b6
DE
11238 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
11239 if ((val <<= 1) < 0)
11240 break;
11241
11242#if HOST_BITS_PER_WIDE_INT == 32
11243 if (GET_CODE (x) == CONST_INT && i >= 0)
11244 i += 32; /* zero-extend high-part was all 0's */
11245 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
11246 {
11247 val = CONST_DOUBLE_LOW (x);
11248
37409796
NS
11249 gcc_assert (val);
11250 if (val < 0)
e2c953b6
DE
11251 --i;
11252 else
11253 for ( ; i < 64; i++)
11254 if ((val <<= 1) < 0)
11255 break;
11256 }
11257#endif
11258
11259 fprintf (file, "%d", i + 1);
9854d9ed 11260 return;
9878760c 11261
9854d9ed
RK
11262 case 'X':
11263 if (GET_CODE (x) == MEM
6fb5fa3c
DB
11264 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
11265 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
11266 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 11267 putc ('x', file);
9854d9ed 11268 return;
9878760c 11269
9854d9ed
RK
11270 case 'Y':
11271 /* Like 'L', for third word of TImode */
11272 if (GET_CODE (x) == REG)
fb5c67a7 11273 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 11274 else if (GET_CODE (x) == MEM)
9878760c 11275 {
9854d9ed
RK
11276 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11277 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11278 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
11279 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11280 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 11281 else
d7624dc0 11282 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 11283 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11284 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11285 reg_names[SMALL_DATA_REG]);
9878760c
RK
11286 }
11287 return;
f676971a 11288
9878760c 11289 case 'z':
b4ac57ab
RS
11290 /* X is a SYMBOL_REF. Write out the name preceded by a
11291 period and without any trailing data in brackets. Used for function
4d30c363
MM
11292 names. If we are configured for System V (or the embedded ABI) on
11293 the PowerPC, do not emit the period, since those systems do not use
11294 TOCs and the like. */
37409796 11295 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 11296
c4ad648e
AM
11297 /* Mark the decl as referenced so that cgraph will output the
11298 function. */
9bf6462a 11299 if (SYMBOL_REF_DECL (x))
c4ad648e 11300 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 11301
85b776df 11302 /* For macho, check to see if we need a stub. */
f9da97f0
AP
11303 if (TARGET_MACHO)
11304 {
11305 const char *name = XSTR (x, 0);
a031e781 11306#if TARGET_MACHO
3b48085e 11307 if (MACHOPIC_INDIRECT
11abc112
MM
11308 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
11309 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
11310#endif
11311 assemble_name (file, name);
11312 }
85b776df 11313 else if (!DOT_SYMBOLS)
9739c90c 11314 assemble_name (file, XSTR (x, 0));
85b776df
AM
11315 else
11316 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
11317 return;
11318
9854d9ed
RK
11319 case 'Z':
11320 /* Like 'L', for last word of TImode. */
11321 if (GET_CODE (x) == REG)
fb5c67a7 11322 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
11323 else if (GET_CODE (x) == MEM)
11324 {
11325 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11326 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 11327 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
11328 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11329 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 11330 else
d7624dc0 11331 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 11332 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11333 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11334 reg_names[SMALL_DATA_REG]);
9854d9ed 11335 }
5c23c401 11336 return;
0ac081f6 11337
a3170dc6 11338 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
11339 case 'y':
11340 {
11341 rtx tmp;
11342
37409796 11343 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
11344
11345 tmp = XEXP (x, 0);
11346
90d3ff1c 11347 /* Ugly hack because %y is overloaded. */
8ef65e3d 11348 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
11349 && (GET_MODE_SIZE (GET_MODE (x)) == 8
11350 || GET_MODE (x) == TFmode
11351 || GET_MODE (x) == TImode))
a3170dc6
AH
11352 {
11353 /* Handle [reg]. */
11354 if (GET_CODE (tmp) == REG)
11355 {
11356 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
11357 break;
11358 }
11359 /* Handle [reg+UIMM]. */
11360 else if (GET_CODE (tmp) == PLUS &&
11361 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
11362 {
11363 int x;
11364
37409796 11365 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
11366
11367 x = INTVAL (XEXP (tmp, 1));
11368 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
11369 break;
11370 }
11371
11372 /* Fall through. Must be [reg+reg]. */
11373 }
850e8d3d
DN
11374 if (TARGET_ALTIVEC
11375 && GET_CODE (tmp) == AND
11376 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
11377 && INTVAL (XEXP (tmp, 1)) == -16)
11378 tmp = XEXP (tmp, 0);
0ac081f6 11379 if (GET_CODE (tmp) == REG)
c62f2db5 11380 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 11381 else
0ac081f6 11382 {
37409796 11383 gcc_assert (GET_CODE (tmp) == PLUS
9024f4b8
AM
11384 && REG_P (XEXP (tmp, 0))
11385 && REG_P (XEXP (tmp, 1)));
bb8df8a6 11386
0ac081f6
AH
11387 if (REGNO (XEXP (tmp, 0)) == 0)
11388 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
11389 reg_names[ REGNO (XEXP (tmp, 0)) ]);
11390 else
11391 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
11392 reg_names[ REGNO (XEXP (tmp, 1)) ]);
11393 }
0ac081f6
AH
11394 break;
11395 }
f676971a 11396
9878760c
RK
11397 case 0:
11398 if (GET_CODE (x) == REG)
11399 fprintf (file, "%s", reg_names[REGNO (x)]);
11400 else if (GET_CODE (x) == MEM)
11401 {
11402 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11403 know the width from the mode. */
11404 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
11405 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
11406 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 11407 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
11408 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
11409 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
11410 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11411 output_address (XEXP (XEXP (x, 0), 1));
9878760c 11412 else
a54d04b7 11413 output_address (XEXP (x, 0));
9878760c
RK
11414 }
11415 else
a54d04b7 11416 output_addr_const (file, x);
a85d226b 11417 return;
9878760c 11418
c4501e62
JJ
11419 case '&':
11420 assemble_name (file, rs6000_get_some_local_dynamic_name ());
11421 return;
11422
9878760c
RK
11423 default:
11424 output_operand_lossage ("invalid %%xn code");
11425 }
11426}
11427\f
11428/* Print the address of an operand. */
11429
11430void
a2369ed3 11431print_operand_address (FILE *file, rtx x)
9878760c
RK
11432{
11433 if (GET_CODE (x) == REG)
4697a36c 11434 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
11435 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
11436 || GET_CODE (x) == LABEL_REF)
9878760c
RK
11437 {
11438 output_addr_const (file, x);
ba5e43aa 11439 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11440 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11441 reg_names[SMALL_DATA_REG]);
37409796
NS
11442 else
11443 gcc_assert (!TARGET_TOC);
9878760c
RK
11444 }
11445 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
11446 {
9024f4b8 11447 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 11448 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
11449 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
11450 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 11451 else
4697a36c
MM
11452 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
11453 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
11454 }
11455 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
11456 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
11457 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
11458#if TARGET_ELF
11459 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11460 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
11461 {
11462 output_addr_const (file, XEXP (x, 1));
11463 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11464 }
c859cda6
DJ
11465#endif
11466#if TARGET_MACHO
11467 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 11468 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
11469 {
11470 fprintf (file, "lo16(");
11471 output_addr_const (file, XEXP (x, 1));
11472 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
11473 }
3cb999d8 11474#endif
4d588c14 11475 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 11476 {
2bfcf297 11477 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 11478 {
2bfcf297
DB
11479 rtx contains_minus = XEXP (x, 1);
11480 rtx minus, symref;
11481 const char *name;
f676971a 11482
9ebbca7d 11483 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 11484 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
11485 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
11486 contains_minus = XEXP (contains_minus, 0);
11487
2bfcf297
DB
11488 minus = XEXP (contains_minus, 0);
11489 symref = XEXP (minus, 0);
11490 XEXP (contains_minus, 0) = symref;
11491 if (TARGET_ELF)
11492 {
11493 char *newname;
11494
11495 name = XSTR (symref, 0);
11496 newname = alloca (strlen (name) + sizeof ("@toc"));
11497 strcpy (newname, name);
11498 strcat (newname, "@toc");
11499 XSTR (symref, 0) = newname;
11500 }
11501 output_addr_const (file, XEXP (x, 1));
11502 if (TARGET_ELF)
11503 XSTR (symref, 0) = name;
9ebbca7d
GK
11504 XEXP (contains_minus, 0) = minus;
11505 }
11506 else
11507 output_addr_const (file, XEXP (x, 1));
11508
11509 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
11510 }
9878760c 11511 else
37409796 11512 gcc_unreachable ();
9878760c
RK
11513}
11514\f
88cad84b 11515/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
11516 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11517 is defined. It also needs to handle DI-mode objects on 64-bit
11518 targets. */
11519
11520static bool
a2369ed3 11521rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 11522{
f4f4921e 11523#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 11524 /* Special handling for SI values. */
84dcde01 11525 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 11526 {
301d03af 11527 static int recurse = 0;
f676971a 11528
301d03af
RS
11529 /* For -mrelocatable, we mark all addresses that need to be fixed up
11530 in the .fixup section. */
11531 if (TARGET_RELOCATABLE
d6b5193b
RS
11532 && in_section != toc_section
11533 && in_section != text_section
4325ca90 11534 && !unlikely_text_section_p (in_section)
301d03af
RS
11535 && !recurse
11536 && GET_CODE (x) != CONST_INT
11537 && GET_CODE (x) != CONST_DOUBLE
11538 && CONSTANT_P (x))
11539 {
11540 char buf[256];
11541
11542 recurse = 1;
11543 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
11544 fixuplabelno++;
11545 ASM_OUTPUT_LABEL (asm_out_file, buf);
11546 fprintf (asm_out_file, "\t.long\t(");
11547 output_addr_const (asm_out_file, x);
11548 fprintf (asm_out_file, ")@fixup\n");
11549 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
11550 ASM_OUTPUT_ALIGN (asm_out_file, 2);
11551 fprintf (asm_out_file, "\t.long\t");
11552 assemble_name (asm_out_file, buf);
11553 fprintf (asm_out_file, "\n\t.previous\n");
11554 recurse = 0;
11555 return true;
11556 }
11557 /* Remove initial .'s to turn a -mcall-aixdesc function
11558 address into the address of the descriptor, not the function
11559 itself. */
11560 else if (GET_CODE (x) == SYMBOL_REF
11561 && XSTR (x, 0)[0] == '.'
11562 && DEFAULT_ABI == ABI_AIX)
11563 {
11564 const char *name = XSTR (x, 0);
11565 while (*name == '.')
11566 name++;
11567
11568 fprintf (asm_out_file, "\t.long\t%s\n", name);
11569 return true;
11570 }
11571 }
f4f4921e 11572#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
11573 return default_assemble_integer (x, size, aligned_p);
11574}
93638d7a
AM
11575
11576#ifdef HAVE_GAS_HIDDEN
11577/* Emit an assembler directive to set symbol visibility for DECL to
11578 VISIBILITY_TYPE. */
11579
5add3202 11580static void
a2369ed3 11581rs6000_assemble_visibility (tree decl, int vis)
93638d7a 11582{
93638d7a
AM
11583 /* Functions need to have their entry point symbol visibility set as
11584 well as their descriptor symbol visibility. */
85b776df
AM
11585 if (DEFAULT_ABI == ABI_AIX
11586 && DOT_SYMBOLS
11587 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 11588 {
25fdb4dc 11589 static const char * const visibility_types[] = {
c4ad648e 11590 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
11591 };
11592
11593 const char *name, *type;
93638d7a
AM
11594
11595 name = ((* targetm.strip_name_encoding)
11596 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 11597 type = visibility_types[vis];
93638d7a 11598
25fdb4dc
RH
11599 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
11600 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 11601 }
25fdb4dc
RH
11602 else
11603 default_assemble_visibility (decl, vis);
93638d7a
AM
11604}
11605#endif
301d03af 11606\f
39a10a29 11607enum rtx_code
a2369ed3 11608rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
11609{
11610 /* Reversal of FP compares takes care -- an ordered compare
11611 becomes an unordered compare and vice versa. */
f676971a 11612 if (mode == CCFPmode
bc9ec0e0
GK
11613 && (!flag_finite_math_only
11614 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
11615 || code == UNEQ || code == LTGT))
bab6226b 11616 return reverse_condition_maybe_unordered (code);
39a10a29 11617 else
bab6226b 11618 return reverse_condition (code);
39a10a29
GK
11619}
11620
39a10a29
GK
11621/* Generate a compare for CODE. Return a brand-new rtx that
11622 represents the result of the compare. */
a4f6c312 11623
39a10a29 11624static rtx
a2369ed3 11625rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
11626{
11627 enum machine_mode comp_mode;
11628 rtx compare_result;
11629
11630 if (rs6000_compare_fp_p)
11631 comp_mode = CCFPmode;
11632 else if (code == GTU || code == LTU
c4ad648e 11633 || code == GEU || code == LEU)
39a10a29 11634 comp_mode = CCUNSmode;
60934f9c
NS
11635 else if ((code == EQ || code == NE)
11636 && GET_CODE (rs6000_compare_op0) == SUBREG
11637 && GET_CODE (rs6000_compare_op1) == SUBREG
11638 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
11639 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
11640 /* These are unsigned values, perhaps there will be a later
11641 ordering compare that can be shared with this one.
11642 Unfortunately we cannot detect the signedness of the operands
11643 for non-subregs. */
11644 comp_mode = CCUNSmode;
39a10a29
GK
11645 else
11646 comp_mode = CCmode;
11647
11648 /* First, the compare. */
11649 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 11650
cef6b86c 11651 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 11652 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 11653 && rs6000_compare_fp_p)
a3170dc6 11654 {
64022b5d 11655 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
11656 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
11657
11658 if (op_mode == VOIDmode)
11659 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 11660
cef6b86c
EB
11661 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11662 This explains the following mess. */
423c1189 11663
a3170dc6
AH
11664 switch (code)
11665 {
423c1189 11666 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
11667 switch (op_mode)
11668 {
11669 case SFmode:
11670 cmp = flag_unsafe_math_optimizations
11671 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
11672 rs6000_compare_op1)
11673 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
11674 rs6000_compare_op1);
11675 break;
11676
11677 case DFmode:
11678 cmp = flag_unsafe_math_optimizations
11679 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
11680 rs6000_compare_op1)
11681 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
11682 rs6000_compare_op1);
11683 break;
11684
17caeff2
JM
11685 case TFmode:
11686 cmp = flag_unsafe_math_optimizations
11687 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
11688 rs6000_compare_op1)
11689 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
11690 rs6000_compare_op1);
11691 break;
11692
37409796
NS
11693 default:
11694 gcc_unreachable ();
11695 }
a3170dc6 11696 break;
bb8df8a6 11697
423c1189 11698 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
11699 switch (op_mode)
11700 {
11701 case SFmode:
11702 cmp = flag_unsafe_math_optimizations
11703 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
11704 rs6000_compare_op1)
11705 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
11706 rs6000_compare_op1);
11707 break;
bb8df8a6 11708
37409796
NS
11709 case DFmode:
11710 cmp = flag_unsafe_math_optimizations
11711 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
11712 rs6000_compare_op1)
11713 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
11714 rs6000_compare_op1);
11715 break;
11716
17caeff2
JM
11717 case TFmode:
11718 cmp = flag_unsafe_math_optimizations
11719 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
11720 rs6000_compare_op1)
11721 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
11722 rs6000_compare_op1);
11723 break;
11724
37409796
NS
11725 default:
11726 gcc_unreachable ();
11727 }
a3170dc6 11728 break;
bb8df8a6 11729
423c1189 11730 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
11731 switch (op_mode)
11732 {
11733 case SFmode:
11734 cmp = flag_unsafe_math_optimizations
11735 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
11736 rs6000_compare_op1)
11737 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
11738 rs6000_compare_op1);
11739 break;
bb8df8a6 11740
37409796
NS
11741 case DFmode:
11742 cmp = flag_unsafe_math_optimizations
11743 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
11744 rs6000_compare_op1)
11745 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
11746 rs6000_compare_op1);
11747 break;
11748
17caeff2
JM
11749 case TFmode:
11750 cmp = flag_unsafe_math_optimizations
11751 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
11752 rs6000_compare_op1)
11753 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
11754 rs6000_compare_op1);
11755 break;
11756
37409796
NS
11757 default:
11758 gcc_unreachable ();
11759 }
a3170dc6 11760 break;
4d4cbc0e 11761 default:
37409796 11762 gcc_unreachable ();
a3170dc6
AH
11763 }
11764
11765 /* Synthesize LE and GE from LT/GT || EQ. */
11766 if (code == LE || code == GE || code == LEU || code == GEU)
11767 {
a3170dc6
AH
11768 emit_insn (cmp);
11769
11770 switch (code)
11771 {
11772 case LE: code = LT; break;
11773 case GE: code = GT; break;
11774 case LEU: code = LT; break;
11775 case GEU: code = GT; break;
37409796 11776 default: gcc_unreachable ();
a3170dc6
AH
11777 }
11778
a3170dc6
AH
11779 compare_result2 = gen_reg_rtx (CCFPmode);
11780
11781 /* Do the EQ. */
37409796
NS
11782 switch (op_mode)
11783 {
11784 case SFmode:
11785 cmp = flag_unsafe_math_optimizations
11786 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
11787 rs6000_compare_op1)
11788 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
11789 rs6000_compare_op1);
11790 break;
11791
11792 case DFmode:
11793 cmp = flag_unsafe_math_optimizations
11794 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
11795 rs6000_compare_op1)
11796 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
11797 rs6000_compare_op1);
11798 break;
11799
17caeff2
JM
11800 case TFmode:
11801 cmp = flag_unsafe_math_optimizations
11802 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
11803 rs6000_compare_op1)
11804 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
11805 rs6000_compare_op1);
11806 break;
11807
37409796
NS
11808 default:
11809 gcc_unreachable ();
11810 }
a3170dc6
AH
11811 emit_insn (cmp);
11812
a3170dc6 11813 /* OR them together. */
64022b5d
AH
11814 or_result = gen_reg_rtx (CCFPmode);
11815 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
11816 compare_result2);
a3170dc6
AH
11817 compare_result = or_result;
11818 code = EQ;
11819 }
11820 else
11821 {
a3170dc6 11822 if (code == NE || code == LTGT)
a3170dc6 11823 code = NE;
423c1189
AH
11824 else
11825 code = EQ;
a3170dc6
AH
11826 }
11827
11828 emit_insn (cmp);
11829 }
11830 else
de17c25f
DE
11831 {
11832 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11833 CLOBBERs to match cmptf_internal2 pattern. */
11834 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
11835 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 11836 && !TARGET_IEEEQUAD
de17c25f
DE
11837 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
11838 emit_insn (gen_rtx_PARALLEL (VOIDmode,
11839 gen_rtvec (9,
11840 gen_rtx_SET (VOIDmode,
11841 compare_result,
11842 gen_rtx_COMPARE (comp_mode,
11843 rs6000_compare_op0,
11844 rs6000_compare_op1)),
11845 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11846 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11847 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11848 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11849 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11850 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11851 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
11852 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
11853 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
11854 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
11855 {
11856 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
11857 comp_mode = CCEQmode;
11858 compare_result = gen_reg_rtx (CCEQmode);
11859 if (TARGET_64BIT)
11860 emit_insn (gen_stack_protect_testdi (compare_result,
11861 rs6000_compare_op0, op1));
11862 else
11863 emit_insn (gen_stack_protect_testsi (compare_result,
11864 rs6000_compare_op0, op1));
11865 }
de17c25f
DE
11866 else
11867 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
11868 gen_rtx_COMPARE (comp_mode,
11869 rs6000_compare_op0,
11870 rs6000_compare_op1)));
11871 }
f676971a 11872
ca5adc63 11873 /* Some kinds of FP comparisons need an OR operation;
e7108df9 11874 under flag_finite_math_only we don't bother. */
39a10a29 11875 if (rs6000_compare_fp_p
e7108df9 11876 && !flag_finite_math_only
8ef65e3d 11877 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
11878 && (code == LE || code == GE
11879 || code == UNEQ || code == LTGT
11880 || code == UNGT || code == UNLT))
11881 {
11882 enum rtx_code or1, or2;
11883 rtx or1_rtx, or2_rtx, compare2_rtx;
11884 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 11885
39a10a29
GK
11886 switch (code)
11887 {
11888 case LE: or1 = LT; or2 = EQ; break;
11889 case GE: or1 = GT; or2 = EQ; break;
11890 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
11891 case LTGT: or1 = LT; or2 = GT; break;
11892 case UNGT: or1 = UNORDERED; or2 = GT; break;
11893 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 11894 default: gcc_unreachable ();
39a10a29
GK
11895 }
11896 validate_condition_mode (or1, comp_mode);
11897 validate_condition_mode (or2, comp_mode);
1c563bed
KH
11898 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
11899 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
11900 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
11901 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
11902 const_true_rtx);
11903 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
11904
11905 compare_result = or_result;
11906 code = EQ;
11907 }
11908
11909 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 11910
1c563bed 11911 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
11912}
11913
11914
11915/* Emit the RTL for an sCOND pattern. */
11916
11917void
a2369ed3 11918rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
11919{
11920 rtx condition_rtx;
11921 enum machine_mode op_mode;
b7053a3f 11922 enum rtx_code cond_code;
39a10a29
GK
11923
11924 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
11925 cond_code = GET_CODE (condition_rtx);
11926
8ef65e3d 11927 if (rs6000_compare_fp_p
423c1189
AH
11928 && !TARGET_FPRS && TARGET_HARD_FLOAT)
11929 {
11930 rtx t;
11931
11932 PUT_MODE (condition_rtx, SImode);
11933 t = XEXP (condition_rtx, 0);
11934
37409796 11935 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
11936
11937 if (cond_code == NE)
64022b5d 11938 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 11939
64022b5d 11940 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
11941 return;
11942 }
11943
b7053a3f
GK
11944 if (cond_code == NE
11945 || cond_code == GE || cond_code == LE
11946 || cond_code == GEU || cond_code == LEU
11947 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
11948 {
11949 rtx not_result = gen_reg_rtx (CCEQmode);
11950 rtx not_op, rev_cond_rtx;
11951 enum machine_mode cc_mode;
f676971a 11952
b7053a3f
GK
11953 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
11954
1c563bed 11955 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 11956 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
11957 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
11958 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
11959 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
11960 }
39a10a29
GK
11961
11962 op_mode = GET_MODE (rs6000_compare_op0);
11963 if (op_mode == VOIDmode)
11964 op_mode = GET_MODE (rs6000_compare_op1);
11965
11966 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
11967 {
11968 PUT_MODE (condition_rtx, DImode);
11969 convert_move (result, condition_rtx, 0);
11970 }
11971 else
11972 {
11973 PUT_MODE (condition_rtx, SImode);
11974 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
11975 }
11976}
11977
39a10a29
GK
11978/* Emit a branch of kind CODE to location LOC. */
11979
11980void
a2369ed3 11981rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
11982{
11983 rtx condition_rtx, loc_ref;
11984
11985 condition_rtx = rs6000_generate_compare (code);
11986 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
11987 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
11988 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
11989 loc_ref, pc_rtx)));
11990}
11991
12a4e8c5
GK
11992/* Return the string to output a conditional branch to LABEL, which is
11993 the operand number of the label, or -1 if the branch is really a
f676971a 11994 conditional return.
12a4e8c5
GK
11995
11996 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11997 condition code register and its mode specifies what kind of
11998 comparison we made.
11999
a0ab749a 12000 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12001
12002 INSN is the insn. */
12003
12004char *
a2369ed3 12005output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
12006{
12007 static char string[64];
12008 enum rtx_code code = GET_CODE (op);
12009 rtx cc_reg = XEXP (op, 0);
12010 enum machine_mode mode = GET_MODE (cc_reg);
12011 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 12012 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
12013 int really_reversed = reversed ^ need_longbranch;
12014 char *s = string;
12015 const char *ccode;
12016 const char *pred;
12017 rtx note;
12018
39a10a29
GK
12019 validate_condition_mode (code, mode);
12020
12021 /* Work out which way this really branches. We could use
12022 reverse_condition_maybe_unordered here always but this
12023 makes the resulting assembler clearer. */
12a4e8c5 12024 if (really_reversed)
de40e1df
DJ
12025 {
12026 /* Reversal of FP compares takes care -- an ordered compare
12027 becomes an unordered compare and vice versa. */
12028 if (mode == CCFPmode)
12029 code = reverse_condition_maybe_unordered (code);
12030 else
12031 code = reverse_condition (code);
12032 }
12a4e8c5 12033
8ef65e3d 12034 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
12035 {
12036 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
12037 to the GT bit. */
37409796
NS
12038 switch (code)
12039 {
12040 case EQ:
12041 /* Opposite of GT. */
12042 code = GT;
12043 break;
12044
12045 case NE:
12046 code = UNLE;
12047 break;
12048
12049 default:
12050 gcc_unreachable ();
12051 }
a3170dc6
AH
12052 }
12053
39a10a29 12054 switch (code)
12a4e8c5
GK
12055 {
12056 /* Not all of these are actually distinct opcodes, but
12057 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
12058 case NE: case LTGT:
12059 ccode = "ne"; break;
12060 case EQ: case UNEQ:
12061 ccode = "eq"; break;
f676971a 12062 case GE: case GEU:
50a0b056 12063 ccode = "ge"; break;
f676971a 12064 case GT: case GTU: case UNGT:
50a0b056 12065 ccode = "gt"; break;
f676971a 12066 case LE: case LEU:
50a0b056 12067 ccode = "le"; break;
f676971a 12068 case LT: case LTU: case UNLT:
50a0b056 12069 ccode = "lt"; break;
12a4e8c5
GK
12070 case UNORDERED: ccode = "un"; break;
12071 case ORDERED: ccode = "nu"; break;
12072 case UNGE: ccode = "nl"; break;
12073 case UNLE: ccode = "ng"; break;
12074 default:
37409796 12075 gcc_unreachable ();
12a4e8c5 12076 }
f676971a
EC
12077
12078 /* Maybe we have a guess as to how likely the branch is.
94a54f47 12079 The old mnemonics don't have a way to specify this information. */
f4857b9b 12080 pred = "";
12a4e8c5
GK
12081 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
12082 if (note != NULL_RTX)
12083 {
12084 /* PROB is the difference from 50%. */
12085 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
12086
12087 /* Only hint for highly probable/improbable branches on newer
12088 cpus as static prediction overrides processor dynamic
12089 prediction. For older cpus we may as well always hint, but
12090 assume not taken for branches that are very close to 50% as a
12091 mispredicted taken branch is more expensive than a
f676971a 12092 mispredicted not-taken branch. */
ec507f2d 12093 if (rs6000_always_hint
2c9e13f3
JH
12094 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
12095 && br_prob_note_reliable_p (note)))
f4857b9b
AM
12096 {
12097 if (abs (prob) > REG_BR_PROB_BASE / 20
12098 && ((prob > 0) ^ need_longbranch))
c4ad648e 12099 pred = "+";
f4857b9b
AM
12100 else
12101 pred = "-";
12102 }
12a4e8c5 12103 }
12a4e8c5
GK
12104
12105 if (label == NULL)
94a54f47 12106 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 12107 else
94a54f47 12108 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 12109
37c67319 12110 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 12111 Assume they'd only be the first character.... */
37c67319
GK
12112 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
12113 *s++ = '%';
94a54f47 12114 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
12115
12116 if (label != NULL)
12117 {
12118 /* If the branch distance was too far, we may have to use an
12119 unconditional branch to go the distance. */
12120 if (need_longbranch)
44518ddd 12121 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
12122 else
12123 s += sprintf (s, ",%s", label);
12124 }
12125
12126 return string;
12127}
50a0b056 12128
64022b5d 12129/* Return the string to flip the GT bit on a CR. */
423c1189 12130char *
64022b5d 12131output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
12132{
12133 static char string[64];
12134 int a, b;
12135
37409796
NS
12136 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
12137 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 12138
64022b5d
AH
12139 /* GT bit. */
12140 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
12141 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
12142
12143 sprintf (string, "crnot %d,%d", a, b);
12144 return string;
12145}
12146
21213b4c
DP
12147/* Return insn index for the vector compare instruction for given CODE,
12148 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12149 not available. */
12150
12151static int
94ff898d 12152get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
12153 enum machine_mode dest_mode,
12154 enum machine_mode op_mode)
12155{
12156 if (!TARGET_ALTIVEC)
12157 return INSN_NOT_AVAILABLE;
12158
12159 switch (code)
12160 {
12161 case EQ:
12162 if (dest_mode == V16QImode && op_mode == V16QImode)
12163 return UNSPEC_VCMPEQUB;
12164 if (dest_mode == V8HImode && op_mode == V8HImode)
12165 return UNSPEC_VCMPEQUH;
12166 if (dest_mode == V4SImode && op_mode == V4SImode)
12167 return UNSPEC_VCMPEQUW;
12168 if (dest_mode == V4SImode && op_mode == V4SFmode)
12169 return UNSPEC_VCMPEQFP;
12170 break;
12171 case GE:
12172 if (dest_mode == V4SImode && op_mode == V4SFmode)
12173 return UNSPEC_VCMPGEFP;
12174 case GT:
12175 if (dest_mode == V16QImode && op_mode == V16QImode)
12176 return UNSPEC_VCMPGTSB;
12177 if (dest_mode == V8HImode && op_mode == V8HImode)
12178 return UNSPEC_VCMPGTSH;
12179 if (dest_mode == V4SImode && op_mode == V4SImode)
12180 return UNSPEC_VCMPGTSW;
12181 if (dest_mode == V4SImode && op_mode == V4SFmode)
12182 return UNSPEC_VCMPGTFP;
12183 break;
12184 case GTU:
12185 if (dest_mode == V16QImode && op_mode == V16QImode)
12186 return UNSPEC_VCMPGTUB;
12187 if (dest_mode == V8HImode && op_mode == V8HImode)
12188 return UNSPEC_VCMPGTUH;
12189 if (dest_mode == V4SImode && op_mode == V4SImode)
12190 return UNSPEC_VCMPGTUW;
12191 break;
12192 default:
12193 break;
12194 }
12195 return INSN_NOT_AVAILABLE;
12196}
12197
12198/* Emit vector compare for operands OP0 and OP1 using code RCODE.
12199 DMODE is expected destination mode. This is a recursive function. */
12200
12201static rtx
12202rs6000_emit_vector_compare (enum rtx_code rcode,
12203 rtx op0, rtx op1,
12204 enum machine_mode dmode)
12205{
12206 int vec_cmp_insn;
12207 rtx mask;
12208 enum machine_mode dest_mode;
12209 enum machine_mode op_mode = GET_MODE (op1);
12210
37409796
NS
12211 gcc_assert (TARGET_ALTIVEC);
12212 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
12213
12214 /* Floating point vector compare instructions uses destination V4SImode.
12215 Move destination to appropriate mode later. */
12216 if (dmode == V4SFmode)
12217 dest_mode = V4SImode;
12218 else
12219 dest_mode = dmode;
12220
12221 mask = gen_reg_rtx (dest_mode);
12222 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
12223
12224 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
12225 {
12226 bool swap_operands = false;
12227 bool try_again = false;
12228 switch (rcode)
12229 {
12230 case LT:
12231 rcode = GT;
12232 swap_operands = true;
12233 try_again = true;
12234 break;
12235 case LTU:
12236 rcode = GTU;
12237 swap_operands = true;
12238 try_again = true;
12239 break;
12240 case NE:
370df7db
JC
12241 case UNLE:
12242 case UNLT:
12243 case UNGE:
12244 case UNGT:
12245 /* Invert condition and try again.
12246 e.g., A != B becomes ~(A==B). */
21213b4c 12247 {
370df7db 12248 enum rtx_code rev_code;
21213b4c 12249 enum insn_code nor_code;
d1123cde 12250 rtx eq_rtx;
370df7db
JC
12251
12252 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
12253 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
12254 dest_mode);
94ff898d 12255
21213b4c 12256 nor_code = one_cmpl_optab->handlers[(int)dest_mode].insn_code;
37409796 12257 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
12258 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
12259
12260 if (dmode != dest_mode)
12261 {
12262 rtx temp = gen_reg_rtx (dest_mode);
12263 convert_move (temp, mask, 0);
12264 return temp;
12265 }
12266 return mask;
12267 }
12268 break;
12269 case GE:
12270 case GEU:
12271 case LE:
12272 case LEU:
12273 /* Try GT/GTU/LT/LTU OR EQ */
12274 {
12275 rtx c_rtx, eq_rtx;
12276 enum insn_code ior_code;
12277 enum rtx_code new_code;
12278
37409796
NS
12279 switch (rcode)
12280 {
12281 case GE:
12282 new_code = GT;
12283 break;
12284
12285 case GEU:
12286 new_code = GTU;
12287 break;
12288
12289 case LE:
12290 new_code = LT;
12291 break;
12292
12293 case LEU:
12294 new_code = LTU;
12295 break;
12296
12297 default:
12298 gcc_unreachable ();
12299 }
21213b4c
DP
12300
12301 c_rtx = rs6000_emit_vector_compare (new_code,
12302 op0, op1, dest_mode);
12303 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
12304 dest_mode);
12305
12306 ior_code = ior_optab->handlers[(int)dest_mode].insn_code;
37409796 12307 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
12308 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
12309 if (dmode != dest_mode)
12310 {
12311 rtx temp = gen_reg_rtx (dest_mode);
12312 convert_move (temp, mask, 0);
12313 return temp;
12314 }
12315 return mask;
12316 }
12317 break;
12318 default:
37409796 12319 gcc_unreachable ();
21213b4c
DP
12320 }
12321
12322 if (try_again)
12323 {
12324 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
12325 /* You only get two chances. */
12326 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
12327 }
12328
12329 if (swap_operands)
12330 {
12331 rtx tmp;
12332 tmp = op0;
12333 op0 = op1;
12334 op1 = tmp;
12335 }
12336 }
12337
915167f5
GK
12338 emit_insn (gen_rtx_SET (VOIDmode, mask,
12339 gen_rtx_UNSPEC (dest_mode,
12340 gen_rtvec (2, op0, op1),
12341 vec_cmp_insn)));
21213b4c
DP
12342 if (dmode != dest_mode)
12343 {
12344 rtx temp = gen_reg_rtx (dest_mode);
12345 convert_move (temp, mask, 0);
12346 return temp;
12347 }
12348 return mask;
12349}
12350
12351/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12352 valid insn doesn exist for given mode. */
12353
12354static int
12355get_vsel_insn (enum machine_mode mode)
12356{
12357 switch (mode)
12358 {
12359 case V4SImode:
12360 return UNSPEC_VSEL4SI;
12361 break;
12362 case V4SFmode:
12363 return UNSPEC_VSEL4SF;
12364 break;
12365 case V8HImode:
12366 return UNSPEC_VSEL8HI;
12367 break;
12368 case V16QImode:
12369 return UNSPEC_VSEL16QI;
12370 break;
12371 default:
12372 return INSN_NOT_AVAILABLE;
12373 break;
12374 }
12375 return INSN_NOT_AVAILABLE;
12376}
12377
12378/* Emit vector select insn where DEST is destination using
12379 operands OP1, OP2 and MASK. */
12380
12381static void
12382rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
12383{
12384 rtx t, temp;
12385 enum machine_mode dest_mode = GET_MODE (dest);
12386 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
12387
12388 temp = gen_reg_rtx (dest_mode);
94ff898d 12389
bb8df8a6 12390 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 12391 select op2. */
915167f5
GK
12392 t = gen_rtx_SET (VOIDmode, temp,
12393 gen_rtx_UNSPEC (dest_mode,
12394 gen_rtvec (3, op2, op1, mask),
12395 vsel_insn_index));
21213b4c
DP
12396 emit_insn (t);
12397 emit_move_insn (dest, temp);
12398 return;
12399}
12400
94ff898d 12401/* Emit vector conditional expression.
21213b4c
DP
12402 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12403 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12404
12405int
12406rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
12407 rtx cond, rtx cc_op0, rtx cc_op1)
12408{
12409 enum machine_mode dest_mode = GET_MODE (dest);
12410 enum rtx_code rcode = GET_CODE (cond);
12411 rtx mask;
12412
12413 if (!TARGET_ALTIVEC)
12414 return 0;
12415
12416 /* Get the vector mask for the given relational operations. */
12417 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
12418
12419 rs6000_emit_vector_select (dest, op1, op2, mask);
12420
12421 return 1;
12422}
12423
50a0b056
GK
12424/* Emit a conditional move: move TRUE_COND to DEST if OP of the
12425 operands of the last comparison is nonzero/true, FALSE_COND if it
12426 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 12427
50a0b056 12428int
a2369ed3 12429rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
12430{
12431 enum rtx_code code = GET_CODE (op);
12432 rtx op0 = rs6000_compare_op0;
12433 rtx op1 = rs6000_compare_op1;
12434 REAL_VALUE_TYPE c1;
3148ad6d
DJ
12435 enum machine_mode compare_mode = GET_MODE (op0);
12436 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 12437 rtx temp;
add2402e 12438 bool is_against_zero;
50a0b056 12439
a3c9585f 12440 /* These modes should always match. */
a3170dc6
AH
12441 if (GET_MODE (op1) != compare_mode
12442 /* In the isel case however, we can use a compare immediate, so
12443 op1 may be a small constant. */
12444 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 12445 return 0;
178c3eff 12446 if (GET_MODE (true_cond) != result_mode)
3148ad6d 12447 return 0;
178c3eff 12448 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
12449 return 0;
12450
50a0b056 12451 /* First, work out if the hardware can do this at all, or
a3c9585f 12452 if it's too slow.... */
50a0b056 12453 if (! rs6000_compare_fp_p)
a3170dc6
AH
12454 {
12455 if (TARGET_ISEL)
12456 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
12457 return 0;
12458 }
8ef65e3d 12459 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 12460 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 12461 return 0;
50a0b056 12462
add2402e 12463 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 12464
add2402e
GK
12465 /* A floating-point subtract might overflow, underflow, or produce
12466 an inexact result, thus changing the floating-point flags, so it
12467 can't be generated if we care about that. It's safe if one side
12468 of the construct is zero, since then no subtract will be
12469 generated. */
ebb109ad 12470 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
12471 && flag_trapping_math && ! is_against_zero)
12472 return 0;
12473
50a0b056
GK
12474 /* Eliminate half of the comparisons by switching operands, this
12475 makes the remaining code simpler. */
12476 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 12477 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
12478 {
12479 code = reverse_condition_maybe_unordered (code);
12480 temp = true_cond;
12481 true_cond = false_cond;
12482 false_cond = temp;
12483 }
12484
12485 /* UNEQ and LTGT take four instructions for a comparison with zero,
12486 it'll probably be faster to use a branch here too. */
bc9ec0e0 12487 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 12488 return 0;
f676971a 12489
50a0b056
GK
12490 if (GET_CODE (op1) == CONST_DOUBLE)
12491 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 12492
b6d08ca1 12493 /* We're going to try to implement comparisons by performing
50a0b056
GK
12494 a subtract, then comparing against zero. Unfortunately,
12495 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 12496 know that the operand is finite and the comparison
50a0b056 12497 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 12498 if (HONOR_INFINITIES (compare_mode)
50a0b056 12499 && code != GT && code != UNGE
045572c7 12500 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
12501 /* Constructs of the form (a OP b ? a : b) are safe. */
12502 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 12503 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
12504 && ! rtx_equal_p (op1, true_cond))))
12505 return 0;
add2402e 12506
50a0b056
GK
12507 /* At this point we know we can use fsel. */
12508
12509 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
12510 if (! is_against_zero)
12511 {
12512 temp = gen_reg_rtx (compare_mode);
12513 emit_insn (gen_rtx_SET (VOIDmode, temp,
12514 gen_rtx_MINUS (compare_mode, op0, op1)));
12515 op0 = temp;
12516 op1 = CONST0_RTX (compare_mode);
12517 }
50a0b056
GK
12518
12519 /* If we don't care about NaNs we can reduce some of the comparisons
12520 down to faster ones. */
bc9ec0e0 12521 if (! HONOR_NANS (compare_mode))
50a0b056
GK
12522 switch (code)
12523 {
12524 case GT:
12525 code = LE;
12526 temp = true_cond;
12527 true_cond = false_cond;
12528 false_cond = temp;
12529 break;
12530 case UNGE:
12531 code = GE;
12532 break;
12533 case UNEQ:
12534 code = EQ;
12535 break;
12536 default:
12537 break;
12538 }
12539
12540 /* Now, reduce everything down to a GE. */
12541 switch (code)
12542 {
12543 case GE:
12544 break;
12545
12546 case LE:
3148ad6d
DJ
12547 temp = gen_reg_rtx (compare_mode);
12548 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12549 op0 = temp;
12550 break;
12551
12552 case ORDERED:
3148ad6d
DJ
12553 temp = gen_reg_rtx (compare_mode);
12554 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
12555 op0 = temp;
12556 break;
12557
12558 case EQ:
3148ad6d 12559 temp = gen_reg_rtx (compare_mode);
f676971a 12560 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
12561 gen_rtx_NEG (compare_mode,
12562 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
12563 op0 = temp;
12564 break;
12565
12566 case UNGE:
bc9ec0e0 12567 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 12568 temp = gen_reg_rtx (result_mode);
50a0b056 12569 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 12570 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12571 gen_rtx_GE (VOIDmode,
12572 op0, op1),
12573 true_cond, false_cond)));
bc9ec0e0
GK
12574 false_cond = true_cond;
12575 true_cond = temp;
50a0b056 12576
3148ad6d
DJ
12577 temp = gen_reg_rtx (compare_mode);
12578 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12579 op0 = temp;
12580 break;
12581
12582 case GT:
bc9ec0e0 12583 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 12584 temp = gen_reg_rtx (result_mode);
50a0b056 12585 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 12586 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12587 gen_rtx_GE (VOIDmode,
12588 op0, op1),
12589 true_cond, false_cond)));
bc9ec0e0
GK
12590 true_cond = false_cond;
12591 false_cond = temp;
50a0b056 12592
3148ad6d
DJ
12593 temp = gen_reg_rtx (compare_mode);
12594 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
12595 op0 = temp;
12596 break;
12597
12598 default:
37409796 12599 gcc_unreachable ();
50a0b056
GK
12600 }
12601
12602 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 12603 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
12604 gen_rtx_GE (VOIDmode,
12605 op0, op1),
12606 true_cond, false_cond)));
12607 return 1;
12608}
12609
a3170dc6
AH
12610/* Same as above, but for ints (isel). */
12611
12612static int
a2369ed3 12613rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
12614{
12615 rtx condition_rtx, cr;
12616
12617 /* All isel implementations thus far are 32-bits. */
12618 if (GET_MODE (rs6000_compare_op0) != SImode)
12619 return 0;
12620
12621 /* We still have to do the compare, because isel doesn't do a
12622 compare, it just looks at the CRx bits set by a previous compare
12623 instruction. */
12624 condition_rtx = rs6000_generate_compare (GET_CODE (op));
12625 cr = XEXP (condition_rtx, 0);
12626
12627 if (GET_MODE (cr) == CCmode)
12628 emit_insn (gen_isel_signed (dest, condition_rtx,
12629 true_cond, false_cond, cr));
12630 else
12631 emit_insn (gen_isel_unsigned (dest, condition_rtx,
12632 true_cond, false_cond, cr));
12633
12634 return 1;
12635}
12636
12637const char *
a2369ed3 12638output_isel (rtx *operands)
a3170dc6
AH
12639{
12640 enum rtx_code code;
12641
12642 code = GET_CODE (operands[1]);
12643 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
12644 {
12645 PUT_CODE (operands[1], reverse_condition (code));
12646 return "isel %0,%3,%2,%j1";
12647 }
12648 else
12649 return "isel %0,%2,%3,%j1";
12650}
12651
50a0b056 12652void
a2369ed3 12653rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
12654{
12655 enum machine_mode mode = GET_MODE (op0);
5dc8d536 12656 enum rtx_code c;
50a0b056 12657 rtx target;
5dc8d536
AH
12658
12659 if (code == SMAX || code == SMIN)
12660 c = GE;
12661 else
12662 c = GEU;
12663
50a0b056 12664 if (code == SMAX || code == UMAX)
f676971a 12665 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
12666 op0, op1, mode, 0);
12667 else
f676971a 12668 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 12669 op1, op0, mode, 0);
37409796 12670 gcc_assert (target);
50a0b056
GK
12671 if (target != dest)
12672 emit_move_insn (dest, target);
12673}
46c07df8 12674
915167f5
GK
12675/* Emit instructions to perform a load-reserved/store-conditional operation.
12676 The operation performed is an atomic
12677 (set M (CODE:MODE M OP))
12678 If not NULL, BEFORE is atomically set to M before the operation, and
12679 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 12680 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
12681 Either OP or M may be wrapped in a NOT operation. */
12682
12683void
12684rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
12685 rtx m, rtx op, rtx before_param, rtx after_param,
12686 bool sync_p)
12687{
12688 enum machine_mode used_mode;
12689 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
12690 rtx used_m;
12691 rtvec vec;
12692 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
12693 rtx shift = NULL_RTX;
bb8df8a6 12694
915167f5
GK
12695 if (sync_p)
12696 emit_insn (gen_memory_barrier ());
bb8df8a6 12697
915167f5
GK
12698 if (GET_CODE (m) == NOT)
12699 used_m = XEXP (m, 0);
12700 else
12701 used_m = m;
12702
12703 /* If this is smaller than SImode, we'll have to use SImode with
12704 adjustments. */
12705 if (mode == QImode || mode == HImode)
12706 {
12707 rtx newop, oldop;
12708
12709 if (MEM_ALIGN (used_m) >= 32)
12710 {
12711 int ishift = 0;
12712 if (BYTES_BIG_ENDIAN)
12713 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 12714
915167f5
GK
12715 shift = GEN_INT (ishift);
12716 }
12717 else
12718 {
12719 rtx addrSI, aligned_addr;
a9c9d3fa 12720 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 12721
915167f5
GK
12722 addrSI = force_reg (SImode, gen_lowpart_common (SImode,
12723 XEXP (used_m, 0)));
12724 shift = gen_reg_rtx (SImode);
12725
12726 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
12727 GEN_INT (shift_mask)));
12728 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
12729
12730 aligned_addr = expand_binop (Pmode, and_optab,
12731 XEXP (used_m, 0),
12732 GEN_INT (-4), NULL_RTX,
12733 1, OPTAB_LIB_WIDEN);
12734 used_m = change_address (used_m, SImode, aligned_addr);
12735 set_mem_align (used_m, 32);
12736 /* It's safe to keep the old alias set of USED_M, because
12737 the operation is atomic and only affects the original
12738 USED_M. */
12739 if (GET_CODE (m) == NOT)
12740 m = gen_rtx_NOT (SImode, used_m);
12741 else
12742 m = used_m;
12743 }
12744
12745 if (GET_CODE (op) == NOT)
12746 {
12747 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
12748 oldop = gen_rtx_NOT (SImode, oldop);
12749 }
12750 else
12751 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 12752
915167f5
GK
12753 switch (code)
12754 {
12755 case IOR:
12756 case XOR:
12757 newop = expand_binop (SImode, and_optab,
12758 oldop, GEN_INT (imask), NULL_RTX,
12759 1, OPTAB_LIB_WIDEN);
12760 emit_insn (gen_ashlsi3 (newop, newop, shift));
12761 break;
12762
12763 case AND:
12764 newop = expand_binop (SImode, ior_optab,
12765 oldop, GEN_INT (~imask), NULL_RTX,
12766 1, OPTAB_LIB_WIDEN);
a9c9d3fa 12767 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
12768 break;
12769
12770 case PLUS:
9f0076e5 12771 case MINUS:
915167f5
GK
12772 {
12773 rtx mask;
bb8df8a6 12774
915167f5
GK
12775 newop = expand_binop (SImode, and_optab,
12776 oldop, GEN_INT (imask), NULL_RTX,
12777 1, OPTAB_LIB_WIDEN);
12778 emit_insn (gen_ashlsi3 (newop, newop, shift));
12779
12780 mask = gen_reg_rtx (SImode);
12781 emit_move_insn (mask, GEN_INT (imask));
12782 emit_insn (gen_ashlsi3 (mask, mask, shift));
12783
9f0076e5
DE
12784 if (code == PLUS)
12785 newop = gen_rtx_PLUS (SImode, m, newop);
12786 else
12787 newop = gen_rtx_MINUS (SImode, m, newop);
12788 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
12789 newop = gen_rtx_IOR (SImode, newop,
12790 gen_rtx_AND (SImode,
12791 gen_rtx_NOT (SImode, mask),
12792 m));
12793 break;
12794 }
12795
12796 default:
12797 gcc_unreachable ();
12798 }
12799
a9c9d3fa
GK
12800 if (GET_CODE (m) == NOT)
12801 {
12802 rtx mask, xorm;
12803
12804 mask = gen_reg_rtx (SImode);
12805 emit_move_insn (mask, GEN_INT (imask));
12806 emit_insn (gen_ashlsi3 (mask, mask, shift));
12807
12808 xorm = gen_rtx_XOR (SImode, used_m, mask);
12809 /* Depending on the value of 'op', the XOR or the operation might
12810 be able to be simplified away. */
12811 newop = simplify_gen_binary (code, SImode, xorm, newop);
12812 }
915167f5
GK
12813 op = newop;
12814 used_mode = SImode;
12815 before = gen_reg_rtx (used_mode);
12816 after = gen_reg_rtx (used_mode);
12817 }
12818 else
12819 {
12820 used_mode = mode;
12821 before = before_param;
12822 after = after_param;
12823
12824 if (before == NULL_RTX)
12825 before = gen_reg_rtx (used_mode);
12826 if (after == NULL_RTX)
12827 after = gen_reg_rtx (used_mode);
12828 }
bb8df8a6 12829
9f0076e5
DE
12830 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
12831 && used_mode != mode)
915167f5
GK
12832 the_op = op; /* Computed above. */
12833 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
12834 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
12835 else
12836 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
12837
12838 set_after = gen_rtx_SET (VOIDmode, after, the_op);
12839 set_before = gen_rtx_SET (VOIDmode, before, used_m);
12840 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
12841 gen_rtx_UNSPEC (used_mode,
12842 gen_rtvec (1, the_op),
12843 UNSPEC_SYNC_OP));
915167f5
GK
12844 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
12845
9f0076e5 12846 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
12847 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
12848 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
12849 else
12850 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
12851 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
12852
12853 /* Shift and mask the return values properly. */
12854 if (used_mode != mode && before_param)
12855 {
12856 emit_insn (gen_lshrsi3 (before, before, shift));
12857 convert_move (before_param, before, 1);
12858 }
12859
12860 if (used_mode != mode && after_param)
12861 {
12862 emit_insn (gen_lshrsi3 (after, after, shift));
12863 convert_move (after_param, after, 1);
12864 }
12865
12866 /* The previous sequence will end with a branch that's dependent on
12867 the conditional store, so placing an isync will ensure that no
12868 other instructions (especially, no load or store instructions)
12869 can start before the atomic operation completes. */
12870 if (sync_p)
12871 emit_insn (gen_isync ());
12872}
12873
b52110d4
DE
12874/* A subroutine of the atomic operation splitters. Jump to LABEL if
12875 COND is true. Mark the jump as unlikely to be taken. */
12876
12877static void
12878emit_unlikely_jump (rtx cond, rtx label)
12879{
12880 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
12881 rtx x;
12882
12883 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
12884 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
12885 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
12886}
12887
12888/* A subroutine of the atomic operation splitters. Emit a load-locked
12889 instruction in MODE. */
12890
12891static void
12892emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
12893{
12894 rtx (*fn) (rtx, rtx) = NULL;
12895 if (mode == SImode)
12896 fn = gen_load_locked_si;
12897 else if (mode == DImode)
12898 fn = gen_load_locked_di;
12899 emit_insn (fn (reg, mem));
12900}
12901
12902/* A subroutine of the atomic operation splitters. Emit a store-conditional
12903 instruction in MODE. */
12904
12905static void
12906emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
12907{
12908 rtx (*fn) (rtx, rtx, rtx) = NULL;
12909 if (mode == SImode)
12910 fn = gen_store_conditional_si;
12911 else if (mode == DImode)
12912 fn = gen_store_conditional_di;
12913
9f0076e5 12914 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
12915 if (PPC405_ERRATUM77)
12916 emit_insn (gen_memory_barrier ());
12917
12918 emit_insn (fn (res, mem, val));
12919}
12920
ea2c620c 12921/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 12922 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
12923 operand of the binary operator. BEFORE and AFTER are optional locations to
12924 return the value of MEM either before of after the operation. SCRATCH is
12925 a scratch register. */
12926
12927void
12928rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
12929 rtx before, rtx after, rtx scratch)
12930{
12931 enum machine_mode mode = GET_MODE (mem);
12932 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12933
12934 emit_insn (gen_memory_barrier ());
12935
12936 label = gen_label_rtx ();
12937 emit_label (label);
12938 label = gen_rtx_LABEL_REF (VOIDmode, label);
12939
12940 if (before == NULL_RTX)
12941 before = scratch;
12942 emit_load_locked (mode, before, mem);
12943
12944 if (code == NOT)
12945 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
12946 else if (code == AND)
12947 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
12948 else
12949 x = gen_rtx_fmt_ee (code, mode, before, val);
12950
12951 if (after != NULL_RTX)
12952 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
12953 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
12954
12955 emit_store_conditional (mode, cond, mem, scratch);
12956
12957 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12958 emit_unlikely_jump (x, label);
12959
12960 emit_insn (gen_isync ());
12961}
12962
b52110d4
DE
12963/* Expand an atomic compare and swap operation. MEM is the memory on which
12964 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12965 value to be stored. SCRATCH is a scratch GPR. */
12966
12967void
12968rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
12969 rtx scratch)
12970{
12971 enum machine_mode mode = GET_MODE (mem);
12972 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
12973
12974 emit_insn (gen_memory_barrier ());
12975
12976 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12977 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
12978 emit_label (XEXP (label1, 0));
12979
12980 emit_load_locked (mode, retval, mem);
12981
12982 x = gen_rtx_COMPARE (CCmode, retval, oldval);
12983 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
12984
12985 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12986 emit_unlikely_jump (x, label2);
12987
12988 emit_move_insn (scratch, newval);
12989 emit_store_conditional (mode, cond, mem, scratch);
12990
12991 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
12992 emit_unlikely_jump (x, label1);
12993
12994 emit_insn (gen_isync ());
12995 emit_label (XEXP (label2, 0));
12996}
12997
12998/* Expand an atomic test and set operation. MEM is the memory on which
12999 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13000
13001void
13002rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13003{
13004 enum machine_mode mode = GET_MODE (mem);
13005 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13006
13007 emit_insn (gen_memory_barrier ());
13008
13009 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13010 emit_label (XEXP (label, 0));
13011
13012 emit_load_locked (mode, retval, mem);
13013 emit_move_insn (scratch, val);
13014 emit_store_conditional (mode, cond, mem, scratch);
13015
13016 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13017 emit_unlikely_jump (x, label);
13018
13019 emit_insn (gen_isync ());
13020}
13021
9fc75b97
DE
13022void
13023rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
13024{
13025 enum machine_mode mode = GET_MODE (mem);
13026 rtx addrSI, align, wdst, shift, mask;
13027 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
13028 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13029
13030 /* Shift amount for subword relative to aligned word. */
13031 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
13032 shift = gen_reg_rtx (SImode);
13033 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13034 GEN_INT (shift_mask)));
13035 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13036
13037 /* Shift and mask old value into position within word. */
13038 oldval = convert_modes (SImode, mode, oldval, 1);
13039 oldval = expand_binop (SImode, and_optab,
13040 oldval, GEN_INT (imask), NULL_RTX,
13041 1, OPTAB_LIB_WIDEN);
13042 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
13043
13044 /* Shift and mask new value into position within word. */
13045 newval = convert_modes (SImode, mode, newval, 1);
13046 newval = expand_binop (SImode, and_optab,
13047 newval, GEN_INT (imask), NULL_RTX,
13048 1, OPTAB_LIB_WIDEN);
13049 emit_insn (gen_ashlsi3 (newval, newval, shift));
13050
13051 /* Mask for insertion. */
13052 mask = gen_reg_rtx (SImode);
13053 emit_move_insn (mask, GEN_INT (imask));
13054 emit_insn (gen_ashlsi3 (mask, mask, shift));
13055
13056 /* Address of aligned word containing subword. */
13057 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
13058 NULL_RTX, 1, OPTAB_LIB_WIDEN);
13059 mem = change_address (mem, SImode, align);
13060 set_mem_align (mem, 32);
13061 MEM_VOLATILE_P (mem) = 1;
13062
13063 wdst = gen_reg_rtx (SImode);
13064 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
13065 oldval, newval, mem));
13066
13067 emit_move_insn (dst, gen_lowpart (mode, wdst));
13068}
13069
13070void
13071rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
13072 rtx oldval, rtx newval, rtx mem,
13073 rtx scratch)
13074{
13075 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13076
13077 emit_insn (gen_memory_barrier ());
13078 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13079 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13080 emit_label (XEXP (label1, 0));
13081
13082 emit_load_locked (SImode, scratch, mem);
13083
13084 /* Mask subword within loaded value for comparison with oldval.
13085 Use UNSPEC_AND to avoid clobber.*/
13086 emit_insn (gen_rtx_SET (SImode, dest,
13087 gen_rtx_UNSPEC (SImode,
13088 gen_rtvec (2, scratch, mask),
13089 UNSPEC_AND)));
13090
13091 x = gen_rtx_COMPARE (CCmode, dest, oldval);
13092 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13093
13094 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13095 emit_unlikely_jump (x, label2);
13096
13097 /* Clear subword within loaded value for insertion of new value. */
13098 emit_insn (gen_rtx_SET (SImode, scratch,
13099 gen_rtx_AND (SImode,
13100 gen_rtx_NOT (SImode, mask), scratch)));
13101 emit_insn (gen_iorsi3 (scratch, scratch, newval));
13102 emit_store_conditional (SImode, cond, mem, scratch);
13103
13104 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13105 emit_unlikely_jump (x, label1);
13106
13107 emit_insn (gen_isync ());
13108 emit_label (XEXP (label2, 0));
13109}
13110
13111
b52110d4 13112 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
13113 multi-register moves. It will emit at most one instruction for
13114 each register that is accessed; that is, it won't emit li/lis pairs
13115 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13116 register. */
46c07df8 13117
46c07df8 13118void
a9baceb1 13119rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 13120{
a9baceb1
GK
13121 /* The register number of the first register being moved. */
13122 int reg;
13123 /* The mode that is to be moved. */
13124 enum machine_mode mode;
13125 /* The mode that the move is being done in, and its size. */
13126 enum machine_mode reg_mode;
13127 int reg_mode_size;
13128 /* The number of registers that will be moved. */
13129 int nregs;
13130
13131 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
13132 mode = GET_MODE (dst);
c8b622ff 13133 nregs = hard_regno_nregs[reg][mode];
a9baceb1 13134 if (FP_REGNO_P (reg))
7393f7f8 13135 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
13136 else if (ALTIVEC_REGNO_P (reg))
13137 reg_mode = V16QImode;
8521c414
JM
13138 else if (TARGET_E500_DOUBLE && mode == TFmode)
13139 reg_mode = DFmode;
a9baceb1
GK
13140 else
13141 reg_mode = word_mode;
13142 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 13143
37409796 13144 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 13145
a9baceb1
GK
13146 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
13147 {
13148 /* Move register range backwards, if we might have destructive
13149 overlap. */
13150 int i;
13151 for (i = nregs - 1; i >= 0; i--)
f676971a 13152 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
13153 simplify_gen_subreg (reg_mode, dst, mode,
13154 i * reg_mode_size),
13155 simplify_gen_subreg (reg_mode, src, mode,
13156 i * reg_mode_size)));
13157 }
46c07df8
HP
13158 else
13159 {
a9baceb1
GK
13160 int i;
13161 int j = -1;
13162 bool used_update = false;
46c07df8 13163
c1e55850 13164 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
13165 {
13166 rtx breg;
3a1f863f 13167
a9baceb1
GK
13168 if (GET_CODE (XEXP (src, 0)) == PRE_INC
13169 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
13170 {
13171 rtx delta_rtx;
a9baceb1 13172 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
13173 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
13174 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
13175 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
13176 emit_insn (TARGET_32BIT
13177 ? gen_addsi3 (breg, breg, delta_rtx)
13178 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13179 src = replace_equiv_address (src, breg);
3a1f863f 13180 }
d04b6e6e 13181 else if (! rs6000_offsettable_memref_p (src))
c1e55850 13182 {
13e2e16e 13183 rtx basereg;
c1e55850
GK
13184 basereg = gen_rtx_REG (Pmode, reg);
13185 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 13186 src = replace_equiv_address (src, basereg);
c1e55850 13187 }
3a1f863f 13188
0423421f
AM
13189 breg = XEXP (src, 0);
13190 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
13191 breg = XEXP (breg, 0);
13192
13193 /* If the base register we are using to address memory is
13194 also a destination reg, then change that register last. */
13195 if (REG_P (breg)
13196 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
13197 && REGNO (breg) < REGNO (dst) + nregs)
13198 j = REGNO (breg) - REGNO (dst);
c4ad648e 13199 }
46c07df8 13200
a9baceb1 13201 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
13202 {
13203 rtx breg;
13204
a9baceb1
GK
13205 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
13206 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
13207 {
13208 rtx delta_rtx;
a9baceb1 13209 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
13210 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
13211 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
13212 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
13213
13214 /* We have to update the breg before doing the store.
13215 Use store with update, if available. */
13216
13217 if (TARGET_UPDATE)
13218 {
a9baceb1 13219 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
13220 emit_insn (TARGET_32BIT
13221 ? (TARGET_POWERPC64
13222 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
13223 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
13224 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 13225 used_update = true;
3a1f863f
DE
13226 }
13227 else
a9baceb1
GK
13228 emit_insn (TARGET_32BIT
13229 ? gen_addsi3 (breg, breg, delta_rtx)
13230 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 13231 dst = replace_equiv_address (dst, breg);
3a1f863f 13232 }
37409796 13233 else
d04b6e6e 13234 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
13235 }
13236
46c07df8 13237 for (i = 0; i < nregs; i++)
f676971a 13238 {
3a1f863f
DE
13239 /* Calculate index to next subword. */
13240 ++j;
f676971a 13241 if (j == nregs)
3a1f863f 13242 j = 0;
46c07df8 13243
112cdef5 13244 /* If compiler already emitted move of first word by
a9baceb1 13245 store with update, no need to do anything. */
3a1f863f 13246 if (j == 0 && used_update)
a9baceb1 13247 continue;
f676971a 13248
a9baceb1
GK
13249 emit_insn (gen_rtx_SET (VOIDmode,
13250 simplify_gen_subreg (reg_mode, dst, mode,
13251 j * reg_mode_size),
13252 simplify_gen_subreg (reg_mode, src, mode,
13253 j * reg_mode_size)));
3a1f863f 13254 }
46c07df8
HP
13255 }
13256}
13257
12a4e8c5 13258\f
a4f6c312
SS
13259/* This page contains routines that are used to determine what the
13260 function prologue and epilogue code will do and write them out. */
9878760c 13261
a4f6c312
SS
13262/* Return the first fixed-point register that is required to be
13263 saved. 32 if none. */
9878760c
RK
13264
13265int
863d938c 13266first_reg_to_save (void)
9878760c
RK
13267{
13268 int first_reg;
13269
13270 /* Find lowest numbered live register. */
13271 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 13272 if (df_regs_ever_live_p (first_reg)
a38d360d 13273 && (! call_used_regs[first_reg]
1db02437 13274 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 13275 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
13276 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
13277 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
13278 break;
13279
ee890fe2 13280#if TARGET_MACHO
93638d7a
AM
13281 if (flag_pic
13282 && current_function_uses_pic_offset_table
13283 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 13284 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
13285#endif
13286
9878760c
RK
13287 return first_reg;
13288}
13289
13290/* Similar, for FP regs. */
13291
13292int
863d938c 13293first_fp_reg_to_save (void)
9878760c
RK
13294{
13295 int first_reg;
13296
13297 /* Find lowest numbered live register. */
13298 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 13299 if (df_regs_ever_live_p (first_reg))
9878760c
RK
13300 break;
13301
13302 return first_reg;
13303}
00b960c7
AH
13304
13305/* Similar, for AltiVec regs. */
13306
13307static int
863d938c 13308first_altivec_reg_to_save (void)
00b960c7
AH
13309{
13310 int i;
13311
13312 /* Stack frame remains as is unless we are in AltiVec ABI. */
13313 if (! TARGET_ALTIVEC_ABI)
13314 return LAST_ALTIVEC_REGNO + 1;
13315
22fa69da 13316 /* On Darwin, the unwind routines are compiled without
982afe02 13317 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13318 altivec registers when necessary. */
13319 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13320 && ! TARGET_ALTIVEC)
13321 return FIRST_ALTIVEC_REGNO + 20;
13322
00b960c7
AH
13323 /* Find lowest numbered live register. */
13324 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13325 if (df_regs_ever_live_p (i))
00b960c7
AH
13326 break;
13327
13328 return i;
13329}
13330
13331/* Return a 32-bit mask of the AltiVec registers we need to set in
13332 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13333 the 32-bit word is 0. */
13334
13335static unsigned int
863d938c 13336compute_vrsave_mask (void)
00b960c7
AH
13337{
13338 unsigned int i, mask = 0;
13339
22fa69da 13340 /* On Darwin, the unwind routines are compiled without
982afe02 13341 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da
GK
13342 call-saved altivec registers when necessary. */
13343 if (DEFAULT_ABI == ABI_DARWIN && current_function_calls_eh_return
13344 && ! TARGET_ALTIVEC)
13345 mask |= 0xFFF;
13346
00b960c7
AH
13347 /* First, find out if we use _any_ altivec registers. */
13348 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 13349 if (df_regs_ever_live_p (i))
00b960c7
AH
13350 mask |= ALTIVEC_REG_BIT (i);
13351
13352 if (mask == 0)
13353 return mask;
13354
00b960c7
AH
13355 /* Next, remove the argument registers from the set. These must
13356 be in the VRSAVE mask set by the caller, so we don't need to add
13357 them in again. More importantly, the mask we compute here is
13358 used to generate CLOBBERs in the set_vrsave insn, and we do not
13359 wish the argument registers to die. */
a6cf80f2 13360 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
13361 mask &= ~ALTIVEC_REG_BIT (i);
13362
13363 /* Similarly, remove the return value from the set. */
13364 {
13365 bool yes = false;
13366 diddle_return_value (is_altivec_return_reg, &yes);
13367 if (yes)
13368 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
13369 }
13370
13371 return mask;
13372}
13373
d62294f5 13374/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
13375 size of prologues/epilogues by calling our own save/restore-the-world
13376 routines. */
d62294f5
FJ
13377
13378static void
f57fe068
AM
13379compute_save_world_info (rs6000_stack_t *info_ptr)
13380{
13381 info_ptr->world_save_p = 1;
13382 info_ptr->world_save_p
13383 = (WORLD_SAVE_P (info_ptr)
13384 && DEFAULT_ABI == ABI_DARWIN
13385 && ! (current_function_calls_setjmp && flag_exceptions)
13386 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
13387 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
13388 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
13389 && info_ptr->cr_save_p);
f676971a 13390
d62294f5
FJ
13391 /* This will not work in conjunction with sibcalls. Make sure there
13392 are none. (This check is expensive, but seldom executed.) */
f57fe068 13393 if (WORLD_SAVE_P (info_ptr))
f676971a 13394 {
d62294f5
FJ
13395 rtx insn;
13396 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
13397 if ( GET_CODE (insn) == CALL_INSN
13398 && SIBLING_CALL_P (insn))
13399 {
13400 info_ptr->world_save_p = 0;
13401 break;
13402 }
d62294f5 13403 }
f676971a 13404
f57fe068 13405 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
13406 {
13407 /* Even if we're not touching VRsave, make sure there's room on the
13408 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 13409 will attempt to save it. */
d62294f5
FJ
13410 info_ptr->vrsave_size = 4;
13411
13412 /* "Save" the VRsave register too if we're saving the world. */
13413 if (info_ptr->vrsave_mask == 0)
c4ad648e 13414 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
13415
13416 /* Because the Darwin register save/restore routines only handle
c4ad648e 13417 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 13418 check. */
37409796
NS
13419 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
13420 && (info_ptr->first_altivec_reg_save
13421 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 13422 }
f676971a 13423 return;
d62294f5
FJ
13424}
13425
13426
00b960c7 13427static void
a2369ed3 13428is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
13429{
13430 bool *yes = (bool *) xyes;
13431 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
13432 *yes = true;
13433}
13434
4697a36c
MM
13435\f
13436/* Calculate the stack information for the current function. This is
13437 complicated by having two separate calling sequences, the AIX calling
13438 sequence and the V.4 calling sequence.
13439
592696dd 13440 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 13441 32-bit 64-bit
4697a36c 13442 SP----> +---------------------------------------+
a260abc9 13443 | back chain to caller | 0 0
4697a36c 13444 +---------------------------------------+
a260abc9 13445 | saved CR | 4 8 (8-11)
4697a36c 13446 +---------------------------------------+
a260abc9 13447 | saved LR | 8 16
4697a36c 13448 +---------------------------------------+
a260abc9 13449 | reserved for compilers | 12 24
4697a36c 13450 +---------------------------------------+
a260abc9 13451 | reserved for binders | 16 32
4697a36c 13452 +---------------------------------------+
a260abc9 13453 | saved TOC pointer | 20 40
4697a36c 13454 +---------------------------------------+
a260abc9 13455 | Parameter save area (P) | 24 48
4697a36c 13456 +---------------------------------------+
a260abc9 13457 | Alloca space (A) | 24+P etc.
802a0058 13458 +---------------------------------------+
a7df97e6 13459 | Local variable space (L) | 24+P+A
4697a36c 13460 +---------------------------------------+
a7df97e6 13461 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 13462 +---------------------------------------+
00b960c7
AH
13463 | Save area for AltiVec registers (W) | 24+P+A+L+X
13464 +---------------------------------------+
13465 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
13466 +---------------------------------------+
13467 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 13468 +---------------------------------------+
00b960c7
AH
13469 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
13470 +---------------------------------------+
13471 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
13472 +---------------------------------------+
13473 old SP->| back chain to caller's caller |
13474 +---------------------------------------+
13475
5376a30c
KR
13476 The required alignment for AIX configurations is two words (i.e., 8
13477 or 16 bytes).
13478
13479
4697a36c
MM
13480 V.4 stack frames look like:
13481
13482 SP----> +---------------------------------------+
13483 | back chain to caller | 0
13484 +---------------------------------------+
5eb387b8 13485 | caller's saved LR | 4
4697a36c
MM
13486 +---------------------------------------+
13487 | Parameter save area (P) | 8
13488 +---------------------------------------+
a7df97e6 13489 | Alloca space (A) | 8+P
f676971a 13490 +---------------------------------------+
a7df97e6 13491 | Varargs save area (V) | 8+P+A
f676971a 13492 +---------------------------------------+
a7df97e6 13493 | Local variable space (L) | 8+P+A+V
f676971a 13494 +---------------------------------------+
a7df97e6 13495 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 13496 +---------------------------------------+
00b960c7
AH
13497 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
13498 +---------------------------------------+
13499 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
13500 +---------------------------------------+
13501 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
13502 +---------------------------------------+
c4ad648e
AM
13503 | SPE: area for 64-bit GP registers |
13504 +---------------------------------------+
13505 | SPE alignment padding |
13506 +---------------------------------------+
00b960c7 13507 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 13508 +---------------------------------------+
00b960c7 13509 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 13510 +---------------------------------------+
00b960c7 13511 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
13512 +---------------------------------------+
13513 old SP->| back chain to caller's caller |
13514 +---------------------------------------+
b6c9286a 13515
5376a30c
KR
13516 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13517 given. (But note below and in sysv4.h that we require only 8 and
13518 may round up the size of our stack frame anyways. The historical
13519 reason is early versions of powerpc-linux which didn't properly
13520 align the stack at program startup. A happy side-effect is that
13521 -mno-eabi libraries can be used with -meabi programs.)
13522
50d440bc 13523 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
13524 the stack alignment requirements may differ. If -mno-eabi is not
13525 given, the required stack alignment is 8 bytes; if -mno-eabi is
13526 given, the required alignment is 16 bytes. (But see V.4 comment
13527 above.) */
4697a36c 13528
61b2fbe7
MM
13529#ifndef ABI_STACK_BOUNDARY
13530#define ABI_STACK_BOUNDARY STACK_BOUNDARY
13531#endif
13532
d1d0c603 13533static rs6000_stack_t *
863d938c 13534rs6000_stack_info (void)
4697a36c 13535{
022123e6 13536 static rs6000_stack_t info;
4697a36c 13537 rs6000_stack_t *info_ptr = &info;
327e5343 13538 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 13539 int ehrd_size;
64045029 13540 int save_align;
44688022 13541 HOST_WIDE_INT non_fixed_size;
4697a36c 13542
022123e6 13543 memset (&info, 0, sizeof (info));
4697a36c 13544
c19de7aa
AH
13545 if (TARGET_SPE)
13546 {
13547 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 13548 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
13549 cfun->machine->insn_chain_scanned_p
13550 = spe_func_has_64bit_regs_p () + 1;
13551 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
13552 }
13553
a4f6c312 13554 /* Select which calling sequence. */
178274da 13555 info_ptr->abi = DEFAULT_ABI;
9878760c 13556
a4f6c312 13557 /* Calculate which registers need to be saved & save area size. */
4697a36c 13558 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 13559 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
906fb125 13560 even if it currently looks like we won't. */
2bfcf297 13561 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
13562 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
13563 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
1db02437
FS
13564 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
13565 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
906fb125
GK
13566 else
13567 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
4697a36c 13568
a3170dc6
AH
13569 /* For the SPE, we have an additional upper 32-bits on each GPR.
13570 Ideally we should save the entire 64-bits only when the upper
13571 half is used in SIMD instructions. Since we only record
13572 registers live (not the size they are used in), this proves
13573 difficult because we'd have to traverse the instruction chain at
13574 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
13575 so we opt to save the GPRs in 64-bits always if but one register
13576 gets used in 64-bits. Otherwise, all the registers in the frame
13577 get saved in 32-bits.
a3170dc6 13578
c19de7aa 13579 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 13580 traditional GP save area will be empty. */
c19de7aa 13581 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13582 info_ptr->gp_size = 0;
13583
4697a36c
MM
13584 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
13585 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
13586
00b960c7
AH
13587 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
13588 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
13589 - info_ptr->first_altivec_reg_save);
13590
592696dd 13591 /* Does this function call anything? */
71f123ca
FS
13592 info_ptr->calls_p = (! current_function_is_leaf
13593 || cfun->machine->ra_needs_full_frame);
b6c9286a 13594
a4f6c312 13595 /* Determine if we need to save the link register. */
022123e6
AM
13596 if ((DEFAULT_ABI == ABI_AIX
13597 && current_function_profile
13598 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
13599#ifdef TARGET_RELOCATABLE
13600 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
13601#endif
13602 || (info_ptr->first_fp_reg_save != 64
13603 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
00b960c7 13604 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
178274da 13605 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
022123e6
AM
13606 || info_ptr->calls_p
13607 || rs6000_ra_ever_killed ())
4697a36c
MM
13608 {
13609 info_ptr->lr_save_p = 1;
1de43f85 13610 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
13611 }
13612
9ebbca7d 13613 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
13614 if (df_regs_ever_live_p (CR2_REGNO)
13615 || df_regs_ever_live_p (CR3_REGNO)
13616 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
13617 {
13618 info_ptr->cr_save_p = 1;
178274da 13619 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
13620 info_ptr->cr_size = reg_size;
13621 }
13622
83720594
RH
13623 /* If the current function calls __builtin_eh_return, then we need
13624 to allocate stack space for registers that will hold data for
13625 the exception handler. */
13626 if (current_function_calls_eh_return)
13627 {
13628 unsigned int i;
13629 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
13630 continue;
a3170dc6
AH
13631
13632 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
13633 ehrd_size = i * (TARGET_SPE_ABI
13634 && info_ptr->spe_64bit_regs_used != 0
13635 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
13636 }
13637 else
13638 ehrd_size = 0;
13639
592696dd 13640 /* Determine various sizes. */
4697a36c
MM
13641 info_ptr->reg_size = reg_size;
13642 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 13643 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
a4f6c312 13644 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
03e007d7 13645 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
13646 if (FRAME_GROWS_DOWNWARD)
13647 info_ptr->vars_size
5b667039
JJ
13648 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
13649 + info_ptr->parm_size,
7d5175e1 13650 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
13651 - (info_ptr->fixed_size + info_ptr->vars_size
13652 + info_ptr->parm_size);
00b960c7 13653
c19de7aa 13654 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
13655 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
13656 else
13657 info_ptr->spe_gp_size = 0;
13658
4d774ff8
HP
13659 if (TARGET_ALTIVEC_ABI)
13660 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 13661 else
4d774ff8
HP
13662 info_ptr->vrsave_mask = 0;
13663
13664 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
13665 info_ptr->vrsave_size = 4;
13666 else
13667 info_ptr->vrsave_size = 0;
b6c9286a 13668
d62294f5
FJ
13669 compute_save_world_info (info_ptr);
13670
592696dd 13671 /* Calculate the offsets. */
178274da 13672 switch (DEFAULT_ABI)
4697a36c 13673 {
b6c9286a 13674 case ABI_NONE:
24d304eb 13675 default:
37409796 13676 gcc_unreachable ();
b6c9286a
MM
13677
13678 case ABI_AIX:
ee890fe2 13679 case ABI_DARWIN:
b6c9286a
MM
13680 info_ptr->fp_save_offset = - info_ptr->fp_size;
13681 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
13682
13683 if (TARGET_ALTIVEC_ABI)
13684 {
13685 info_ptr->vrsave_save_offset
13686 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
13687
982afe02 13688 /* Align stack so vector save area is on a quadword boundary.
9278121c 13689 The padding goes above the vectors. */
00b960c7
AH
13690 if (info_ptr->altivec_size != 0)
13691 info_ptr->altivec_padding_size
9278121c 13692 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
13693 else
13694 info_ptr->altivec_padding_size = 0;
13695
13696 info_ptr->altivec_save_offset
13697 = info_ptr->vrsave_save_offset
13698 - info_ptr->altivec_padding_size
13699 - info_ptr->altivec_size;
9278121c
GK
13700 gcc_assert (info_ptr->altivec_size == 0
13701 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
13702
13703 /* Adjust for AltiVec case. */
13704 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
13705 }
13706 else
13707 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
13708 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
13709 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
13710 break;
13711
13712 case ABI_V4:
b6c9286a
MM
13713 info_ptr->fp_save_offset = - info_ptr->fp_size;
13714 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 13715 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 13716
c19de7aa 13717 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
13718 {
13719 /* Align stack so SPE GPR save area is aligned on a
13720 double-word boundary. */
13721 if (info_ptr->spe_gp_size != 0)
13722 info_ptr->spe_padding_size
13723 = 8 - (-info_ptr->cr_save_offset % 8);
13724 else
13725 info_ptr->spe_padding_size = 0;
13726
13727 info_ptr->spe_gp_save_offset
13728 = info_ptr->cr_save_offset
13729 - info_ptr->spe_padding_size
13730 - info_ptr->spe_gp_size;
13731
13732 /* Adjust for SPE case. */
022123e6 13733 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 13734 }
a3170dc6 13735 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
13736 {
13737 info_ptr->vrsave_save_offset
13738 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
13739
13740 /* Align stack so vector save area is on a quadword boundary. */
13741 if (info_ptr->altivec_size != 0)
13742 info_ptr->altivec_padding_size
13743 = 16 - (-info_ptr->vrsave_save_offset % 16);
13744 else
13745 info_ptr->altivec_padding_size = 0;
13746
13747 info_ptr->altivec_save_offset
13748 = info_ptr->vrsave_save_offset
13749 - info_ptr->altivec_padding_size
13750 - info_ptr->altivec_size;
13751
13752 /* Adjust for AltiVec case. */
022123e6 13753 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
13754 }
13755 else
022123e6
AM
13756 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
13757 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
13758 info_ptr->lr_save_offset = reg_size;
13759 break;
4697a36c
MM
13760 }
13761
64045029 13762 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
13763 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
13764 + info_ptr->gp_size
13765 + info_ptr->altivec_size
13766 + info_ptr->altivec_padding_size
a3170dc6
AH
13767 + info_ptr->spe_gp_size
13768 + info_ptr->spe_padding_size
00b960c7
AH
13769 + ehrd_size
13770 + info_ptr->cr_size
022123e6 13771 + info_ptr->vrsave_size,
64045029 13772 save_align);
00b960c7 13773
44688022 13774 non_fixed_size = (info_ptr->vars_size
ff381587 13775 + info_ptr->parm_size
5b667039 13776 + info_ptr->save_size);
ff381587 13777
44688022
AM
13778 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
13779 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
13780
13781 /* Determine if we need to allocate any stack frame:
13782
a4f6c312
SS
13783 For AIX we need to push the stack if a frame pointer is needed
13784 (because the stack might be dynamically adjusted), if we are
13785 debugging, if we make calls, or if the sum of fp_save, gp_save,
13786 and local variables are more than the space needed to save all
13787 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13788 + 18*8 = 288 (GPR13 reserved).
ff381587 13789
a4f6c312
SS
13790 For V.4 we don't have the stack cushion that AIX uses, but assume
13791 that the debugger can handle stackless frames. */
ff381587
MM
13792
13793 if (info_ptr->calls_p)
13794 info_ptr->push_p = 1;
13795
178274da 13796 else if (DEFAULT_ABI == ABI_V4)
44688022 13797 info_ptr->push_p = non_fixed_size != 0;
ff381587 13798
178274da
AM
13799 else if (frame_pointer_needed)
13800 info_ptr->push_p = 1;
13801
13802 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
13803 info_ptr->push_p = 1;
13804
ff381587 13805 else
44688022 13806 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 13807
a4f6c312 13808 /* Zero offsets if we're not saving those registers. */
8dda1a21 13809 if (info_ptr->fp_size == 0)
4697a36c
MM
13810 info_ptr->fp_save_offset = 0;
13811
8dda1a21 13812 if (info_ptr->gp_size == 0)
4697a36c
MM
13813 info_ptr->gp_save_offset = 0;
13814
00b960c7
AH
13815 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
13816 info_ptr->altivec_save_offset = 0;
13817
13818 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
13819 info_ptr->vrsave_save_offset = 0;
13820
c19de7aa
AH
13821 if (! TARGET_SPE_ABI
13822 || info_ptr->spe_64bit_regs_used == 0
13823 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
13824 info_ptr->spe_gp_save_offset = 0;
13825
c81fc13e 13826 if (! info_ptr->lr_save_p)
4697a36c
MM
13827 info_ptr->lr_save_offset = 0;
13828
c81fc13e 13829 if (! info_ptr->cr_save_p)
4697a36c
MM
13830 info_ptr->cr_save_offset = 0;
13831
13832 return info_ptr;
13833}
13834
c19de7aa
AH
13835/* Return true if the current function uses any GPRs in 64-bit SIMD
13836 mode. */
13837
13838static bool
863d938c 13839spe_func_has_64bit_regs_p (void)
c19de7aa
AH
13840{
13841 rtx insns, insn;
13842
13843 /* Functions that save and restore all the call-saved registers will
13844 need to save/restore the registers in 64-bits. */
13845 if (current_function_calls_eh_return
13846 || current_function_calls_setjmp
13847 || current_function_has_nonlocal_goto)
13848 return true;
13849
13850 insns = get_insns ();
13851
13852 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
13853 {
13854 if (INSN_P (insn))
13855 {
13856 rtx i;
13857
b5a5beb9
AH
13858 /* FIXME: This should be implemented with attributes...
13859
13860 (set_attr "spe64" "true")....then,
13861 if (get_spe64(insn)) return true;
13862
13863 It's the only reliable way to do the stuff below. */
13864
c19de7aa 13865 i = PATTERN (insn);
f82f556d
AH
13866 if (GET_CODE (i) == SET)
13867 {
13868 enum machine_mode mode = GET_MODE (SET_SRC (i));
13869
13870 if (SPE_VECTOR_MODE (mode))
13871 return true;
17caeff2 13872 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
13873 return true;
13874 }
c19de7aa
AH
13875 }
13876 }
13877
13878 return false;
13879}
13880
d1d0c603 13881static void
a2369ed3 13882debug_stack_info (rs6000_stack_t *info)
9878760c 13883{
d330fd93 13884 const char *abi_string;
24d304eb 13885
c81fc13e 13886 if (! info)
4697a36c
MM
13887 info = rs6000_stack_info ();
13888
13889 fprintf (stderr, "\nStack information for function %s:\n",
13890 ((current_function_decl && DECL_NAME (current_function_decl))
13891 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
13892 : "<unknown>"));
13893
24d304eb
RK
13894 switch (info->abi)
13895 {
b6c9286a
MM
13896 default: abi_string = "Unknown"; break;
13897 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 13898 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 13899 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 13900 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
13901 }
13902
13903 fprintf (stderr, "\tABI = %5s\n", abi_string);
13904
00b960c7
AH
13905 if (TARGET_ALTIVEC_ABI)
13906 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
13907
a3170dc6
AH
13908 if (TARGET_SPE_ABI)
13909 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
13910
4697a36c
MM
13911 if (info->first_gp_reg_save != 32)
13912 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
13913
13914 if (info->first_fp_reg_save != 64)
13915 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 13916
00b960c7
AH
13917 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
13918 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
13919 info->first_altivec_reg_save);
13920
4697a36c
MM
13921 if (info->lr_save_p)
13922 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 13923
4697a36c
MM
13924 if (info->cr_save_p)
13925 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
13926
00b960c7
AH
13927 if (info->vrsave_mask)
13928 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
13929
4697a36c
MM
13930 if (info->push_p)
13931 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
13932
13933 if (info->calls_p)
13934 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
13935
4697a36c
MM
13936 if (info->gp_save_offset)
13937 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
13938
13939 if (info->fp_save_offset)
13940 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
13941
00b960c7
AH
13942 if (info->altivec_save_offset)
13943 fprintf (stderr, "\taltivec_save_offset = %5d\n",
13944 info->altivec_save_offset);
13945
a3170dc6
AH
13946 if (info->spe_gp_save_offset)
13947 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
13948 info->spe_gp_save_offset);
13949
00b960c7
AH
13950 if (info->vrsave_save_offset)
13951 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
13952 info->vrsave_save_offset);
13953
4697a36c
MM
13954 if (info->lr_save_offset)
13955 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
13956
13957 if (info->cr_save_offset)
13958 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
13959
13960 if (info->varargs_save_offset)
13961 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
13962
13963 if (info->total_size)
d1d0c603
JJ
13964 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13965 info->total_size);
4697a36c 13966
4697a36c 13967 if (info->vars_size)
d1d0c603
JJ
13968 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
13969 info->vars_size);
4697a36c
MM
13970
13971 if (info->parm_size)
13972 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
13973
13974 if (info->fixed_size)
13975 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
13976
13977 if (info->gp_size)
13978 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
13979
a3170dc6
AH
13980 if (info->spe_gp_size)
13981 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
13982
4697a36c
MM
13983 if (info->fp_size)
13984 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
13985
00b960c7
AH
13986 if (info->altivec_size)
13987 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
13988
13989 if (info->vrsave_size)
13990 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
13991
13992 if (info->altivec_padding_size)
13993 fprintf (stderr, "\taltivec_padding_size= %5d\n",
13994 info->altivec_padding_size);
13995
a3170dc6
AH
13996 if (info->spe_padding_size)
13997 fprintf (stderr, "\tspe_padding_size = %5d\n",
13998 info->spe_padding_size);
13999
4697a36c
MM
14000 if (info->cr_size)
14001 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
14002
14003 if (info->save_size)
14004 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
14005
14006 if (info->reg_size != 4)
14007 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
14008
14009 fprintf (stderr, "\n");
9878760c 14010}
71f123ca
FS
14011
14012rtx
a2369ed3 14013rs6000_return_addr (int count, rtx frame)
71f123ca 14014{
a4f6c312
SS
14015 /* Currently we don't optimize very well between prolog and body
14016 code and for PIC code the code can be actually quite bad, so
14017 don't try to be too clever here. */
f1384257 14018 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
14019 {
14020 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
14021
14022 return
14023 gen_rtx_MEM
14024 (Pmode,
14025 memory_address
14026 (Pmode,
14027 plus_constant (copy_to_reg
14028 (gen_rtx_MEM (Pmode,
14029 memory_address (Pmode, frame))),
14030 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
14031 }
14032
8c29550d 14033 cfun->machine->ra_need_lr = 1;
1de43f85 14034 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
14035}
14036
5e1bf043
DJ
14037/* Say whether a function is a candidate for sibcall handling or not.
14038 We do not allow indirect calls to be optimized into sibling calls.
14039 Also, we can't do it if there are any vector parameters; there's
14040 nowhere to put the VRsave code so it works; note that functions with
14041 vector parameters are required to have a prototype, so the argument
14042 type info must be available here. (The tail recursion case can work
14043 with vector parameters, but there's no way to distinguish here.) */
4977bab6 14044static bool
a2369ed3 14045rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
14046{
14047 tree type;
4977bab6 14048 if (decl)
5e1bf043
DJ
14049 {
14050 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 14051 {
4977bab6 14052 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
14053 type; type = TREE_CHAIN (type))
14054 {
c15b529f 14055 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 14056 return false;
5e1bf043 14057 }
c4ad648e 14058 }
5e1bf043 14059 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
14060 || ((*targetm.binds_local_p) (decl)
14061 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 14062 {
4977bab6 14063 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
14064
14065 if (!lookup_attribute ("longcall", attr_list)
14066 || lookup_attribute ("shortcall", attr_list))
4977bab6 14067 return true;
2bcc50d0 14068 }
5e1bf043 14069 }
4977bab6 14070 return false;
5e1bf043
DJ
14071}
14072
e7e64a25
AS
14073/* NULL if INSN insn is valid within a low-overhead loop.
14074 Otherwise return why doloop cannot be applied.
9419649c
DE
14075 PowerPC uses the COUNT register for branch on table instructions. */
14076
e7e64a25
AS
14077static const char *
14078rs6000_invalid_within_doloop (rtx insn)
9419649c
DE
14079{
14080 if (CALL_P (insn))
e7e64a25 14081 return "Function call in the loop.";
9419649c
DE
14082
14083 if (JUMP_P (insn)
14084 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
14085 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 14086 return "Computed branch in the loop.";
9419649c 14087
e7e64a25 14088 return NULL;
9419649c
DE
14089}
14090
71f123ca 14091static int
863d938c 14092rs6000_ra_ever_killed (void)
71f123ca
FS
14093{
14094 rtx top;
5e1bf043
DJ
14095 rtx reg;
14096 rtx insn;
71f123ca 14097
dd292d0a 14098 if (current_function_is_thunk)
71f123ca 14099 return 0;
eb0424da 14100
36f7e964
AH
14101 /* regs_ever_live has LR marked as used if any sibcalls are present,
14102 but this should not force saving and restoring in the
14103 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 14104 clobbers LR, so that is inappropriate. */
36f7e964 14105
5e1bf043
DJ
14106 /* Also, the prologue can generate a store into LR that
14107 doesn't really count, like this:
36f7e964 14108
5e1bf043
DJ
14109 move LR->R0
14110 bcl to set PIC register
14111 move LR->R31
14112 move R0->LR
36f7e964
AH
14113
14114 When we're called from the epilogue, we need to avoid counting
14115 this as a store. */
f676971a 14116
71f123ca
FS
14117 push_topmost_sequence ();
14118 top = get_insns ();
14119 pop_topmost_sequence ();
1de43f85 14120 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 14121
5e1bf043
DJ
14122 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
14123 {
14124 if (INSN_P (insn))
14125 {
022123e6
AM
14126 if (CALL_P (insn))
14127 {
14128 if (!SIBLING_CALL_P (insn))
14129 return 1;
14130 }
1de43f85 14131 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 14132 return 1;
36f7e964
AH
14133 else if (set_of (reg, insn) != NULL_RTX
14134 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
14135 return 1;
14136 }
14137 }
14138 return 0;
71f123ca 14139}
4697a36c 14140\f
9ebbca7d 14141/* Emit instructions needed to load the TOC register.
c7ca610e 14142 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 14143 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
14144
14145void
a2369ed3 14146rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 14147{
6fb5fa3c 14148 rtx dest;
1db02437 14149 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 14150
7f970b70 14151 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 14152 {
7f970b70 14153 char buf[30];
e65a3857 14154 rtx lab, tmp1, tmp2, got;
7f970b70
AM
14155
14156 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14157 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14158 if (flag_pic == 2)
14159 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
14160 else
14161 got = rs6000_got_sym ();
14162 tmp1 = tmp2 = dest;
14163 if (!fromprolog)
14164 {
14165 tmp1 = gen_reg_rtx (Pmode);
14166 tmp2 = gen_reg_rtx (Pmode);
14167 }
6fb5fa3c
DB
14168 emit_insn (gen_load_toc_v4_PIC_1 (lab));
14169 emit_move_insn (tmp1,
1de43f85 14170 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
14171 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
14172 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
14173 }
14174 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
14175 {
6fb5fa3c 14176 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 14177 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
14178 }
14179 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
14180 {
14181 char buf[30];
20b71b17
AM
14182 rtx temp0 = (fromprolog
14183 ? gen_rtx_REG (Pmode, 0)
14184 : gen_reg_rtx (Pmode));
20b71b17 14185
20b71b17
AM
14186 if (fromprolog)
14187 {
ccbca5e4 14188 rtx symF, symL;
38c1f2d7 14189
20b71b17
AM
14190 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14191 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 14192
20b71b17
AM
14193 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
14194 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14195
6fb5fa3c
DB
14196 emit_insn (gen_load_toc_v4_PIC_1 (symF));
14197 emit_move_insn (dest,
1de43f85 14198 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 14199 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
14200 }
14201 else
20b71b17
AM
14202 {
14203 rtx tocsym;
20b71b17
AM
14204
14205 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
14206 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
14207 emit_move_insn (dest,
1de43f85 14208 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 14209 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 14210 }
6fb5fa3c 14211 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 14212 }
20b71b17
AM
14213 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
14214 {
14215 /* This is for AIX code running in non-PIC ELF32. */
14216 char buf[30];
14217 rtx realsym;
14218 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14219 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
14220
6fb5fa3c
DB
14221 emit_insn (gen_elf_high (dest, realsym));
14222 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 14223 }
37409796 14224 else
9ebbca7d 14225 {
37409796 14226 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 14227
9ebbca7d 14228 if (TARGET_32BIT)
6fb5fa3c 14229 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 14230 else
6fb5fa3c 14231 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
14232 }
14233}
14234
d1d0c603
JJ
14235/* Emit instructions to restore the link register after determining where
14236 its value has been stored. */
14237
14238void
14239rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
14240{
14241 rs6000_stack_t *info = rs6000_stack_info ();
14242 rtx operands[2];
14243
14244 operands[0] = source;
14245 operands[1] = scratch;
14246
14247 if (info->lr_save_p)
14248 {
14249 rtx frame_rtx = stack_pointer_rtx;
14250 HOST_WIDE_INT sp_offset = 0;
14251 rtx tmp;
14252
14253 if (frame_pointer_needed
14254 || current_function_calls_alloca
14255 || info->total_size > 32767)
14256 {
0be76840 14257 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 14258 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
14259 frame_rtx = operands[1];
14260 }
14261 else if (info->push_p)
14262 sp_offset = info->total_size;
14263
14264 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 14265 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
14266 emit_move_insn (tmp, operands[0]);
14267 }
14268 else
1de43f85 14269 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
14270}
14271
f103e34d
GK
14272static GTY(()) int set = -1;
14273
f676971a 14274int
863d938c 14275get_TOC_alias_set (void)
9ebbca7d 14276{
f103e34d
GK
14277 if (set == -1)
14278 set = new_alias_set ();
14279 return set;
f676971a 14280}
9ebbca7d 14281
c1207243 14282/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
14283 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14284 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 14285#if TARGET_ELF
3c9eb5f4 14286static int
f676971a 14287uses_TOC (void)
9ebbca7d 14288{
c4501e62 14289 rtx insn;
38c1f2d7 14290
c4501e62
JJ
14291 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
14292 if (INSN_P (insn))
14293 {
14294 rtx pat = PATTERN (insn);
14295 int i;
9ebbca7d 14296
f676971a 14297 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
14298 for (i = 0; i < XVECLEN (pat, 0); i++)
14299 {
14300 rtx sub = XVECEXP (pat, 0, i);
14301 if (GET_CODE (sub) == USE)
14302 {
14303 sub = XEXP (sub, 0);
14304 if (GET_CODE (sub) == UNSPEC
14305 && XINT (sub, 1) == UNSPEC_TOC)
14306 return 1;
14307 }
14308 }
14309 }
14310 return 0;
9ebbca7d 14311}
c954844a 14312#endif
38c1f2d7 14313
9ebbca7d 14314rtx
f676971a 14315create_TOC_reference (rtx symbol)
9ebbca7d 14316{
b3a13419 14317 if (!can_create_pseudo_p ())
6fb5fa3c 14318 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 14319 return gen_rtx_PLUS (Pmode,
a8a05998 14320 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
14321 gen_rtx_CONST (Pmode,
14322 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 14323 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 14324}
38c1f2d7 14325
fc4767bb
JJ
14326/* If _Unwind_* has been called from within the same module,
14327 toc register is not guaranteed to be saved to 40(1) on function
14328 entry. Save it there in that case. */
c7ca610e 14329
9ebbca7d 14330void
863d938c 14331rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
14332{
14333 rtx mem;
14334 rtx stack_top = gen_reg_rtx (Pmode);
14335 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
14336 rtx opcode = gen_reg_rtx (SImode);
14337 rtx tocompare = gen_reg_rtx (SImode);
14338 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 14339
8308679f 14340 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
14341 emit_move_insn (stack_top, mem);
14342
8308679f
DE
14343 mem = gen_frame_mem (Pmode,
14344 gen_rtx_PLUS (Pmode, stack_top,
14345 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 14346 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
14347 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
14348 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 14349 : 0xE8410028, SImode));
9ebbca7d 14350
fc4767bb 14351 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 14352 SImode, NULL_RTX, NULL_RTX,
fc4767bb 14353 no_toc_save_needed);
9ebbca7d 14354
8308679f
DE
14355 mem = gen_frame_mem (Pmode,
14356 gen_rtx_PLUS (Pmode, stack_top,
14357 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
14358 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
14359 emit_label (no_toc_save_needed);
9ebbca7d 14360}
38c1f2d7 14361\f
0be76840
DE
14362/* This ties together stack memory (MEM with an alias set of frame_alias_set)
14363 and the change to the stack pointer. */
ba4828e0 14364
9ebbca7d 14365static void
863d938c 14366rs6000_emit_stack_tie (void)
9ebbca7d 14367{
0be76840
DE
14368 rtx mem = gen_frame_mem (BLKmode,
14369 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 14370
9ebbca7d
GK
14371 emit_insn (gen_stack_tie (mem));
14372}
38c1f2d7 14373
9ebbca7d
GK
14374/* Emit the correct code for allocating stack space, as insns.
14375 If COPY_R12, make sure a copy of the old frame is left in r12.
14376 The generated code may use hard register 0 as a temporary. */
14377
14378static void
a2369ed3 14379rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
38c1f2d7 14380{
9ebbca7d
GK
14381 rtx insn;
14382 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14383 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
14384 rtx todec = gen_int_mode (-size, Pmode);
14385
14386 if (INTVAL (todec) != -size)
14387 {
d4ee4d25 14388 warning (0, "stack frame too large");
61168ff1
RS
14389 emit_insn (gen_trap ());
14390 return;
14391 }
a157febd
GK
14392
14393 if (current_function_limit_stack)
14394 {
14395 if (REG_P (stack_limit_rtx)
f676971a 14396 && REGNO (stack_limit_rtx) > 1
a157febd
GK
14397 && REGNO (stack_limit_rtx) <= 31)
14398 {
5b71a4e7 14399 emit_insn (TARGET_32BIT
9ebbca7d
GK
14400 ? gen_addsi3 (tmp_reg,
14401 stack_limit_rtx,
14402 GEN_INT (size))
14403 : gen_adddi3 (tmp_reg,
14404 stack_limit_rtx,
14405 GEN_INT (size)));
5b71a4e7 14406
9ebbca7d
GK
14407 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14408 const0_rtx));
a157febd
GK
14409 }
14410 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 14411 && TARGET_32BIT
f607bc57 14412 && DEFAULT_ABI == ABI_V4)
a157febd 14413 {
9ebbca7d 14414 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
14415 gen_rtx_PLUS (Pmode,
14416 stack_limit_rtx,
9ebbca7d 14417 GEN_INT (size)));
5b71a4e7 14418
9ebbca7d
GK
14419 emit_insn (gen_elf_high (tmp_reg, toload));
14420 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
14421 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
14422 const0_rtx));
a157febd
GK
14423 }
14424 else
d4ee4d25 14425 warning (0, "stack limit expression is not supported");
a157febd
GK
14426 }
14427
9ebbca7d
GK
14428 if (copy_r12 || ! TARGET_UPDATE)
14429 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
14430
38c1f2d7
MM
14431 if (TARGET_UPDATE)
14432 {
9ebbca7d 14433 if (size > 32767)
38c1f2d7 14434 {
9ebbca7d 14435 /* Need a note here so that try_split doesn't get confused. */
9390387d 14436 if (get_last_insn () == NULL_RTX)
2e040219 14437 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
14438 insn = emit_move_insn (tmp_reg, todec);
14439 try_split (PATTERN (insn), insn, 0);
14440 todec = tmp_reg;
38c1f2d7 14441 }
5b71a4e7
DE
14442
14443 insn = emit_insn (TARGET_32BIT
14444 ? gen_movsi_update (stack_reg, stack_reg,
14445 todec, stack_reg)
c4ad648e 14446 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 14447 todec, stack_reg));
38c1f2d7
MM
14448 }
14449 else
14450 {
5b71a4e7
DE
14451 insn = emit_insn (TARGET_32BIT
14452 ? gen_addsi3 (stack_reg, stack_reg, todec)
14453 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d
GK
14454 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
14455 gen_rtx_REG (Pmode, 12));
14456 }
f676971a 14457
9ebbca7d 14458 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 14459 REG_NOTES (insn) =
9ebbca7d 14460 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 14461 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
14462 gen_rtx_PLUS (Pmode, stack_reg,
14463 GEN_INT (-size))),
14464 REG_NOTES (insn));
14465}
14466
a4f6c312
SS
14467/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14468 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14469 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
14470 deduce these equivalences by itself so it wasn't necessary to hold
14471 its hand so much. */
9ebbca7d
GK
14472
14473static void
f676971a 14474rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 14475 rtx reg2, rtx rreg)
9ebbca7d
GK
14476{
14477 rtx real, temp;
14478
e56c4463
JL
14479 /* copy_rtx will not make unique copies of registers, so we need to
14480 ensure we don't have unwanted sharing here. */
14481 if (reg == reg2)
14482 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14483
14484 if (reg == rreg)
14485 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
14486
9ebbca7d
GK
14487 real = copy_rtx (PATTERN (insn));
14488
89e7058f
AH
14489 if (reg2 != NULL_RTX)
14490 real = replace_rtx (real, reg2, rreg);
f676971a
EC
14491
14492 real = replace_rtx (real, reg,
9ebbca7d
GK
14493 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
14494 STACK_POINTER_REGNUM),
14495 GEN_INT (val)));
f676971a 14496
9ebbca7d
GK
14497 /* We expect that 'real' is either a SET or a PARALLEL containing
14498 SETs (and possibly other stuff). In a PARALLEL, all the SETs
14499 are important so they all have to be marked RTX_FRAME_RELATED_P. */
14500
14501 if (GET_CODE (real) == SET)
14502 {
14503 rtx set = real;
f676971a 14504
9ebbca7d
GK
14505 temp = simplify_rtx (SET_SRC (set));
14506 if (temp)
14507 SET_SRC (set) = temp;
14508 temp = simplify_rtx (SET_DEST (set));
14509 if (temp)
14510 SET_DEST (set) = temp;
14511 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 14512 {
9ebbca7d
GK
14513 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14514 if (temp)
14515 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 14516 }
38c1f2d7 14517 }
37409796 14518 else
9ebbca7d
GK
14519 {
14520 int i;
37409796
NS
14521
14522 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
14523 for (i = 0; i < XVECLEN (real, 0); i++)
14524 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
14525 {
14526 rtx set = XVECEXP (real, 0, i);
f676971a 14527
9ebbca7d
GK
14528 temp = simplify_rtx (SET_SRC (set));
14529 if (temp)
14530 SET_SRC (set) = temp;
14531 temp = simplify_rtx (SET_DEST (set));
14532 if (temp)
14533 SET_DEST (set) = temp;
14534 if (GET_CODE (SET_DEST (set)) == MEM)
14535 {
14536 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
14537 if (temp)
14538 XEXP (SET_DEST (set), 0) = temp;
14539 }
14540 RTX_FRAME_RELATED_P (set) = 1;
14541 }
14542 }
c19de7aa
AH
14543
14544 if (TARGET_SPE)
14545 real = spe_synthesize_frame_save (real);
14546
9ebbca7d
GK
14547 RTX_FRAME_RELATED_P (insn) = 1;
14548 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14549 real,
14550 REG_NOTES (insn));
38c1f2d7
MM
14551}
14552
c19de7aa
AH
14553/* Given an SPE frame note, return a PARALLEL of SETs with the
14554 original note, plus a synthetic register save. */
14555
14556static rtx
a2369ed3 14557spe_synthesize_frame_save (rtx real)
c19de7aa
AH
14558{
14559 rtx synth, offset, reg, real2;
14560
14561 if (GET_CODE (real) != SET
14562 || GET_MODE (SET_SRC (real)) != V2SImode)
14563 return real;
14564
14565 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14566 frame related note. The parallel contains a set of the register
41f3a930 14567 being saved, and another set to a synthetic register (n+1200).
c19de7aa
AH
14568 This is so we can differentiate between 64-bit and 32-bit saves.
14569 Words cannot describe this nastiness. */
14570
37409796
NS
14571 gcc_assert (GET_CODE (SET_DEST (real)) == MEM
14572 && GET_CODE (XEXP (SET_DEST (real), 0)) == PLUS
14573 && GET_CODE (SET_SRC (real)) == REG);
c19de7aa
AH
14574
14575 /* Transform:
14576 (set (mem (plus (reg x) (const y)))
14577 (reg z))
14578 into:
14579 (set (mem (plus (reg x) (const y+4)))
41f3a930 14580 (reg z+1200))
c19de7aa
AH
14581 */
14582
14583 real2 = copy_rtx (real);
14584 PUT_MODE (SET_DEST (real2), SImode);
14585 reg = SET_SRC (real2);
14586 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
14587 synth = copy_rtx (real2);
14588
14589 if (BYTES_BIG_ENDIAN)
14590 {
14591 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
14592 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
14593 }
14594
14595 reg = SET_SRC (synth);
41f3a930 14596
c19de7aa 14597 synth = replace_rtx (synth, reg,
41f3a930 14598 gen_rtx_REG (SImode, REGNO (reg) + 1200));
c19de7aa
AH
14599
14600 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
14601 synth = replace_rtx (synth, offset,
14602 GEN_INT (INTVAL (offset)
14603 + (BYTES_BIG_ENDIAN ? 0 : 4)));
14604
14605 RTX_FRAME_RELATED_P (synth) = 1;
14606 RTX_FRAME_RELATED_P (real2) = 1;
14607 if (BYTES_BIG_ENDIAN)
14608 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
14609 else
14610 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
14611
14612 return real;
14613}
14614
00b960c7
AH
14615/* Returns an insn that has a vrsave set operation with the
14616 appropriate CLOBBERs. */
14617
14618static rtx
a2369ed3 14619generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
14620{
14621 int nclobs, i;
14622 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 14623 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 14624
a004eb82
AH
14625 clobs[0]
14626 = gen_rtx_SET (VOIDmode,
14627 vrsave,
14628 gen_rtx_UNSPEC_VOLATILE (SImode,
14629 gen_rtvec (2, reg, vrsave),
3aca4bff 14630 UNSPECV_SET_VRSAVE));
00b960c7
AH
14631
14632 nclobs = 1;
14633
9aa86737
AH
14634 /* We need to clobber the registers in the mask so the scheduler
14635 does not move sets to VRSAVE before sets of AltiVec registers.
14636
14637 However, if the function receives nonlocal gotos, reload will set
14638 all call saved registers live. We will end up with:
14639
14640 (set (reg 999) (mem))
14641 (parallel [ (set (reg vrsave) (unspec blah))
14642 (clobber (reg 999))])
14643
14644 The clobber will cause the store into reg 999 to be dead, and
14645 flow will attempt to delete an epilogue insn. In this case, we
14646 need an unspec use/set of the register. */
00b960c7
AH
14647
14648 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 14649 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
14650 {
14651 if (!epiloguep || call_used_regs [i])
14652 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
14653 gen_rtx_REG (V4SImode, i));
14654 else
14655 {
14656 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
14657
14658 clobs[nclobs++]
a004eb82
AH
14659 = gen_rtx_SET (VOIDmode,
14660 reg,
14661 gen_rtx_UNSPEC (V4SImode,
14662 gen_rtvec (1, reg), 27));
9aa86737
AH
14663 }
14664 }
00b960c7
AH
14665
14666 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
14667
14668 for (i = 0; i < nclobs; ++i)
14669 XVECEXP (insn, 0, i) = clobs[i];
14670
14671 return insn;
14672}
14673
89e7058f
AH
14674/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14675 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
14676
14677static void
f676971a 14678emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 14679 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
14680{
14681 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
14682 rtx replacea, replaceb;
14683
14684 int_rtx = GEN_INT (offset);
14685
14686 /* Some cases that need register indexed addressing. */
14687 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4d4cbc0e 14688 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
14689 || (TARGET_SPE_ABI
14690 && SPE_VECTOR_MODE (mode)
14691 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
14692 {
14693 /* Whomever calls us must make sure r11 is available in the
c4ad648e 14694 flow path of instructions in the prologue. */
89e7058f
AH
14695 offset_rtx = gen_rtx_REG (Pmode, 11);
14696 emit_move_insn (offset_rtx, int_rtx);
14697
14698 replacea = offset_rtx;
14699 replaceb = int_rtx;
14700 }
14701 else
14702 {
14703 offset_rtx = int_rtx;
14704 replacea = NULL_RTX;
14705 replaceb = NULL_RTX;
14706 }
14707
14708 reg = gen_rtx_REG (mode, regno);
14709 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 14710 mem = gen_frame_mem (mode, addr);
89e7058f
AH
14711
14712 insn = emit_move_insn (mem, reg);
14713
14714 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
14715}
14716
a3170dc6
AH
14717/* Emit an offset memory reference suitable for a frame store, while
14718 converting to a valid addressing mode. */
14719
14720static rtx
a2369ed3 14721gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
14722{
14723 rtx int_rtx, offset_rtx;
14724
14725 int_rtx = GEN_INT (offset);
14726
4d4cbc0e
AH
14727 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
14728 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
14729 {
14730 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
14731 emit_move_insn (offset_rtx, int_rtx);
14732 }
14733 else
14734 offset_rtx = int_rtx;
14735
0be76840 14736 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
14737}
14738
6d0a8091
DJ
14739/* Look for user-defined global regs. We should not save and restore these,
14740 and cannot use stmw/lmw if there are any in its range. */
14741
14742static bool
14743no_global_regs_above (int first_greg)
14744{
14745 int i;
14746 for (i = 0; i < 32 - first_greg; i++)
14747 if (global_regs[first_greg + i])
14748 return false;
14749 return true;
14750}
14751
699c914a
MS
14752#ifndef TARGET_FIX_AND_CONTINUE
14753#define TARGET_FIX_AND_CONTINUE 0
14754#endif
14755
52ff33d0
NF
14756/* Determine whether the gp REG is really used. */
14757
14758static bool
14759rs6000_reg_live_or_pic_offset_p (int reg)
14760{
6fb5fa3c 14761 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
14762 && (!call_used_regs[reg]
14763 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14764 && TARGET_TOC && TARGET_MINIMAL_TOC)))
14765 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14766 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14767 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
14768}
14769
9ebbca7d
GK
14770/* Emit function prologue as insns. */
14771
9878760c 14772void
863d938c 14773rs6000_emit_prologue (void)
9878760c 14774{
4697a36c 14775 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 14776 enum machine_mode reg_mode = Pmode;
327e5343 14777 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
14778 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
14779 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
14780 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 14781 rtx cr_save_rtx = NULL_RTX;
9ebbca7d
GK
14782 rtx insn;
14783 int saving_FPRs_inline;
14784 int using_store_multiple;
14785 HOST_WIDE_INT sp_offset = 0;
f676971a 14786
699c914a
MS
14787 if (TARGET_FIX_AND_CONTINUE)
14788 {
14789 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 14790 address by modifying the first 5 instructions of the function
699c914a
MS
14791 to branch to the overriding function. This is necessary to
14792 permit function pointers that point to the old function to
14793 actually forward to the new function. */
14794 emit_insn (gen_nop ());
14795 emit_insn (gen_nop ());
de2ab0ca 14796 emit_insn (gen_nop ());
699c914a
MS
14797 emit_insn (gen_nop ());
14798 emit_insn (gen_nop ());
14799 }
14800
14801 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
14802 {
14803 reg_mode = V2SImode;
14804 reg_size = 8;
14805 }
a3170dc6 14806
9ebbca7d 14807 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
14808 && (!TARGET_SPE_ABI
14809 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
14810 && info->first_gp_reg_save < 31
14811 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 14812 saving_FPRs_inline = (info->first_fp_reg_save == 64
8c29550d 14813 || FP_SAVE_INLINE (info->first_fp_reg_save)
acd0b319 14814 || current_function_calls_eh_return
8c29550d 14815 || cfun->machine->ra_need_lr);
9ebbca7d
GK
14816
14817 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
14818 if (! WORLD_SAVE_P (info)
14819 && info->push_p
acd0b319
AM
14820 && (DEFAULT_ABI == ABI_V4
14821 || current_function_calls_eh_return))
9ebbca7d
GK
14822 {
14823 if (info->total_size < 32767)
14824 sp_offset = info->total_size;
14825 else
14826 frame_reg_rtx = frame_ptr_rtx;
f676971a 14827 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
14828 (frame_reg_rtx != sp_reg_rtx
14829 && (info->cr_save_p
14830 || info->lr_save_p
14831 || info->first_fp_reg_save < 64
14832 || info->first_gp_reg_save < 32
14833 )));
14834 if (frame_reg_rtx != sp_reg_rtx)
14835 rs6000_emit_stack_tie ();
14836 }
14837
d62294f5 14838 /* Handle world saves specially here. */
f57fe068 14839 if (WORLD_SAVE_P (info))
d62294f5
FJ
14840 {
14841 int i, j, sz;
14842 rtx treg;
14843 rtvec p;
22fa69da 14844 rtx reg0;
d62294f5
FJ
14845
14846 /* save_world expects lr in r0. */
22fa69da 14847 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 14848 if (info->lr_save_p)
c4ad648e 14849 {
22fa69da 14850 insn = emit_move_insn (reg0,
1de43f85 14851 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
14852 RTX_FRAME_RELATED_P (insn) = 1;
14853 }
d62294f5
FJ
14854
14855 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 14856 assumptions about the offsets of various bits of the stack
992d08b1 14857 frame. */
37409796
NS
14858 gcc_assert (info->gp_save_offset == -220
14859 && info->fp_save_offset == -144
14860 && info->lr_save_offset == 8
14861 && info->cr_save_offset == 4
14862 && info->push_p
14863 && info->lr_save_p
14864 && (!current_function_calls_eh_return
14865 || info->ehrd_offset == -432)
14866 && info->vrsave_save_offset == -224
22fa69da 14867 && info->altivec_save_offset == -416);
d62294f5
FJ
14868
14869 treg = gen_rtx_REG (SImode, 11);
14870 emit_move_insn (treg, GEN_INT (-info->total_size));
14871
14872 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 14873 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
14874
14875 /* Preserve CR2 for save_world prologues */
22fa69da 14876 sz = 5;
d62294f5
FJ
14877 sz += 32 - info->first_gp_reg_save;
14878 sz += 64 - info->first_fp_reg_save;
14879 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
14880 p = rtvec_alloc (sz);
14881 j = 0;
14882 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 14883 gen_rtx_REG (SImode,
1de43f85 14884 LR_REGNO));
d62294f5 14885 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
14886 gen_rtx_SYMBOL_REF (Pmode,
14887 "*save_world"));
d62294f5 14888 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
14889 properly. */
14890 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
14891 {
14892 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
14893 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14894 GEN_INT (info->fp_save_offset
14895 + sp_offset + 8 * i));
0be76840 14896 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
14897
14898 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14899 }
d62294f5 14900 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
14901 {
14902 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
14903 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14904 GEN_INT (info->altivec_save_offset
14905 + sp_offset + 16 * i));
0be76840 14906 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
14907
14908 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14909 }
d62294f5 14910 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
14911 {
14912 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
14913 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14914 GEN_INT (info->gp_save_offset
14915 + sp_offset + reg_size * i));
0be76840 14916 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14917
14918 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14919 }
14920
14921 {
14922 /* CR register traditionally saved as CR2. */
14923 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
14924 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14925 GEN_INT (info->cr_save_offset
14926 + sp_offset));
0be76840 14927 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
14928
14929 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
14930 }
22fa69da
GK
14931 /* Explain about use of R0. */
14932 if (info->lr_save_p)
14933 {
14934 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14935 GEN_INT (info->lr_save_offset
14936 + sp_offset));
14937 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 14938
22fa69da
GK
14939 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
14940 }
14941 /* Explain what happens to the stack pointer. */
14942 {
14943 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
14944 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
14945 }
d62294f5
FJ
14946
14947 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
14948 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
14949 treg, GEN_INT (-info->total_size));
14950 sp_offset = info->total_size;
d62294f5
FJ
14951 }
14952
9ebbca7d 14953 /* If we use the link register, get it into r0. */
f57fe068 14954 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 14955 {
52ff33d0
NF
14956 rtx addr, reg, mem;
14957
f8a57be8 14958 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 14959 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 14960 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
14961
14962 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
14963 GEN_INT (info->lr_save_offset + sp_offset));
14964 reg = gen_rtx_REG (Pmode, 0);
14965 mem = gen_rtx_MEM (Pmode, addr);
14966 /* This should not be of rs6000_sr_alias_set, because of
14967 __builtin_return_address. */
14968
14969 insn = emit_move_insn (mem, reg);
14970 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
14971 NULL_RTX, NULL_RTX);
f8a57be8 14972 }
9ebbca7d
GK
14973
14974 /* If we need to save CR, put it into r12. */
f57fe068 14975 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 14976 {
f8a57be8 14977 rtx set;
f676971a 14978
9ebbca7d 14979 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
14980 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
14981 RTX_FRAME_RELATED_P (insn) = 1;
14982 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14983 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14984 But that's OK. All we have to do is specify that _one_ condition
14985 code register is saved in this stack slot. The thrower's epilogue
14986 will then restore all the call-saved registers.
14987 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14988 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
14989 gen_rtx_REG (SImode, CR2_REGNO));
14990 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
14991 set,
14992 REG_NOTES (insn));
9ebbca7d
GK
14993 }
14994
a4f6c312
SS
14995 /* Do any required saving of fpr's. If only one or two to save, do
14996 it ourselves. Otherwise, call function. */
f57fe068 14997 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
14998 {
14999 int i;
15000 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15001 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 15002 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
15003 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
15004 info->first_fp_reg_save + i,
15005 info->fp_save_offset + sp_offset + 8 * i,
15006 info->total_size);
9ebbca7d 15007 }
f57fe068 15008 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
9ebbca7d
GK
15009 {
15010 int i;
15011 char rname[30];
520a57c8 15012 const char *alloc_rname;
9ebbca7d
GK
15013 rtvec p;
15014 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
f676971a
EC
15015
15016 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
15017 gen_rtx_REG (Pmode,
1de43f85 15018 LR_REGNO));
9ebbca7d
GK
15019 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
15020 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
a8a05998 15021 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15022 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15023 gen_rtx_SYMBOL_REF (Pmode,
15024 alloc_rname));
15025 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15026 {
15027 rtx addr, reg, mem;
15028 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15029 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a 15030 GEN_INT (info->fp_save_offset
9ebbca7d 15031 + sp_offset + 8*i));
0be76840 15032 mem = gen_frame_mem (DFmode, addr);
9ebbca7d
GK
15033
15034 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
15035 }
15036 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15037 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
15038 NULL_RTX, NULL_RTX);
15039 }
b6c9286a 15040
9ebbca7d
GK
15041 /* Save GPRs. This is done as a PARALLEL if we are using
15042 the store-multiple instructions. */
f57fe068 15043 if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 15044 {
308c142a 15045 rtvec p;
9ebbca7d
GK
15046 int i;
15047 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
15048 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15049 {
15050 rtx addr, reg, mem;
15051 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
15052 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15053 GEN_INT (info->gp_save_offset
15054 + sp_offset
9ebbca7d 15055 + reg_size * i));
0be76840 15056 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
15057
15058 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
15059 }
15060 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 15061 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 15062 NULL_RTX, NULL_RTX);
b6c9286a 15063 }
52ff33d0
NF
15064 else if (!WORLD_SAVE_P (info)
15065 && TARGET_SPE_ABI
15066 && info->spe_64bit_regs_used != 0
15067 && info->first_gp_reg_save != 32)
15068 {
15069 int i;
15070 rtx spe_save_area_ptr;
15071 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
6fb5fa3c 15072 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
52ff33d0
NF
15073 && !call_used_regs[STATIC_CHAIN_REGNUM]);
15074
15075 /* Determine whether we can address all of the registers that need
15076 to be saved with an offset from the stack pointer that fits in
15077 the small const field for SPE memory instructions. */
15078 int spe_regs_addressable_via_sp
15079 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15080 + (32 - info->first_gp_reg_save - 1) * reg_size);
15081 int spe_offset;
15082
15083 if (spe_regs_addressable_via_sp)
15084 {
15085 spe_save_area_ptr = sp_reg_rtx;
15086 spe_offset = info->spe_gp_save_offset + sp_offset;
15087 }
15088 else
15089 {
15090 /* Make r11 point to the start of the SPE save area. We need
15091 to be careful here if r11 is holding the static chain. If
15092 it is, then temporarily save it in r0. We would use r0 as
15093 our base register here, but using r0 as a base register in
15094 loads and stores means something different from what we
15095 would like. */
15096 if (using_static_chain_p)
15097 {
15098 rtx r0 = gen_rtx_REG (Pmode, 0);
15099
15100 gcc_assert (info->first_gp_reg_save > 11);
15101
15102 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
15103 }
15104
15105 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
15106 emit_insn (gen_addsi3 (spe_save_area_ptr, sp_reg_rtx,
15107 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15108
15109 spe_offset = 0;
15110 }
15111
15112 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15113 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15114 {
15115 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15116 rtx offset, addr, mem;
15117
15118 /* We're doing all this to ensure that the offset fits into
15119 the immediate offset of 'evstdd'. */
15120 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
15121
15122 offset = GEN_INT (reg_size * i + spe_offset);
15123 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15124 mem = gen_rtx_MEM (V2SImode, addr);
15125
15126 insn = emit_move_insn (mem, reg);
15127
15128 rs6000_frame_related (insn, spe_save_area_ptr,
15129 info->spe_gp_save_offset
15130 + sp_offset + reg_size * i,
15131 offset, const0_rtx);
15132 }
15133
15134 /* Move the static chain pointer back. */
15135 if (using_static_chain_p && !spe_regs_addressable_via_sp)
15136 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
15137 }
f57fe068 15138 else if (!WORLD_SAVE_P (info))
b6c9286a 15139 {
9ebbca7d
GK
15140 int i;
15141 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
15142 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15143 {
15144 rtx addr, reg, mem;
15145 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 15146
52ff33d0
NF
15147 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15148 GEN_INT (info->gp_save_offset
15149 + sp_offset
15150 + reg_size * i));
15151 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 15152
52ff33d0
NF
15153 insn = emit_move_insn (mem, reg);
15154 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15155 NULL_RTX, NULL_RTX);
15156 }
9ebbca7d
GK
15157 }
15158
83720594
RH
15159 /* ??? There's no need to emit actual instructions here, but it's the
15160 easiest way to get the frame unwind information emitted. */
22fa69da 15161 if (current_function_calls_eh_return)
83720594 15162 {
78e1b90d
DE
15163 unsigned int i, regno;
15164
fc4767bb
JJ
15165 /* In AIX ABI we need to pretend we save r2 here. */
15166 if (TARGET_AIX)
15167 {
15168 rtx addr, reg, mem;
15169
15170 reg = gen_rtx_REG (reg_mode, 2);
15171 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15172 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15173 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15174
15175 insn = emit_move_insn (mem, reg);
f676971a 15176 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
15177 NULL_RTX, NULL_RTX);
15178 PATTERN (insn) = gen_blockage ();
15179 }
15180
83720594
RH
15181 for (i = 0; ; ++i)
15182 {
83720594
RH
15183 regno = EH_RETURN_DATA_REGNO (i);
15184 if (regno == INVALID_REGNUM)
15185 break;
15186
89e7058f
AH
15187 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
15188 info->ehrd_offset + sp_offset
15189 + reg_size * (int) i,
15190 info->total_size);
83720594
RH
15191 }
15192 }
15193
9ebbca7d 15194 /* Save CR if we use any that must be preserved. */
f57fe068 15195 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
15196 {
15197 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15198 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15199 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
15200 /* See the large comment above about why CR2_REGNO is used. */
15201 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 15202
9ebbca7d
GK
15203 /* If r12 was used to hold the original sp, copy cr into r0 now
15204 that it's free. */
15205 if (REGNO (frame_reg_rtx) == 12)
15206 {
f8a57be8
GK
15207 rtx set;
15208
9ebbca7d 15209 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
15210 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
15211 RTX_FRAME_RELATED_P (insn) = 1;
15212 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
15213 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15214 set,
15215 REG_NOTES (insn));
f676971a 15216
9ebbca7d
GK
15217 }
15218 insn = emit_move_insn (mem, cr_save_rtx);
15219
f676971a 15220 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 15221 NULL_RTX, NULL_RTX);
9ebbca7d
GK
15222 }
15223
f676971a 15224 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 15225 for which it was done previously. */
f57fe068 15226 if (!WORLD_SAVE_P (info) && info->push_p
fc4767bb 15227 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
2b2c2fe5 15228 {
bcb2d701 15229 if (info->total_size < 32767)
2b2c2fe5 15230 sp_offset = info->total_size;
bcb2d701
EC
15231 else
15232 frame_reg_rtx = frame_ptr_rtx;
15233 rs6000_emit_allocate_stack (info->total_size,
15234 (frame_reg_rtx != sp_reg_rtx
15235 && ((info->altivec_size != 0)
15236 || (info->vrsave_mask != 0)
15237 )));
15238 if (frame_reg_rtx != sp_reg_rtx)
15239 rs6000_emit_stack_tie ();
2b2c2fe5 15240 }
9ebbca7d
GK
15241
15242 /* Set frame pointer, if needed. */
15243 if (frame_pointer_needed)
15244 {
7d5175e1 15245 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
15246 sp_reg_rtx);
15247 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 15248 }
9878760c 15249
2b2c2fe5
EC
15250 /* Save AltiVec registers if needed. Save here because the red zone does
15251 not include AltiVec registers. */
15252 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15253 {
15254 int i;
15255
15256 /* There should be a non inline version of this, for when we
15257 are saving lots of vector registers. */
15258 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15259 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15260 {
15261 rtx areg, savereg, mem;
15262 int offset;
15263
15264 offset = info->altivec_save_offset + sp_offset
15265 + 16 * (i - info->first_altivec_reg_save);
15266
15267 savereg = gen_rtx_REG (V4SImode, i);
15268
15269 areg = gen_rtx_REG (Pmode, 0);
15270 emit_move_insn (areg, GEN_INT (offset));
15271
15272 /* AltiVec addressing mode is [reg+reg]. */
15273 mem = gen_frame_mem (V4SImode,
15274 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
15275
15276 insn = emit_move_insn (mem, savereg);
15277
15278 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
15279 areg, GEN_INT (offset));
15280 }
15281 }
15282
15283 /* VRSAVE is a bit vector representing which AltiVec registers
15284 are used. The OS uses this to determine which vector
15285 registers to save on a context switch. We need to save
15286 VRSAVE on the stack frame, add whatever AltiVec registers we
15287 used in this function, and do the corresponding magic in the
15288 epilogue. */
15289
15290 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
15291 && info->vrsave_mask != 0)
15292 {
15293 rtx reg, mem, vrsave;
15294 int offset;
15295
15296 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15297 as frame_reg_rtx and r11 as the static chain pointer for
15298 nested functions. */
15299 reg = gen_rtx_REG (SImode, 0);
15300 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15301 if (TARGET_MACHO)
15302 emit_insn (gen_get_vrsave_internal (reg));
15303 else
15304 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
15305
15306 if (!WORLD_SAVE_P (info))
15307 {
15308 /* Save VRSAVE. */
15309 offset = info->vrsave_save_offset + sp_offset;
15310 mem = gen_frame_mem (SImode,
15311 gen_rtx_PLUS (Pmode, frame_reg_rtx,
15312 GEN_INT (offset)));
15313 insn = emit_move_insn (mem, reg);
15314 }
15315
15316 /* Include the registers in the mask. */
15317 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
15318
15319 insn = emit_insn (generate_set_vrsave (reg, info, 0));
15320 }
15321
1db02437 15322 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 15323 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
15324 || (DEFAULT_ABI == ABI_V4
15325 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 15326 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
15327 {
15328 /* If emit_load_toc_table will use the link register, we need to save
15329 it. We use R12 for this purpose because emit_load_toc_table
15330 can use register 0. This allows us to use a plain 'blr' to return
15331 from the procedure more often. */
15332 int save_LR_around_toc_setup = (TARGET_ELF
15333 && DEFAULT_ABI != ABI_AIX
15334 && flag_pic
15335 && ! info->lr_save_p
15336 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
15337 if (save_LR_around_toc_setup)
15338 {
1de43f85 15339 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 15340
c4ad648e 15341 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 15342 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 15343
c4ad648e 15344 rs6000_emit_load_toc_table (TRUE);
f8a57be8 15345
c4ad648e 15346 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
15347 RTX_FRAME_RELATED_P (insn) = 1;
15348 }
15349 else
15350 rs6000_emit_load_toc_table (TRUE);
15351 }
ee890fe2 15352
fcce224d 15353#if TARGET_MACHO
ee890fe2
SS
15354 if (DEFAULT_ABI == ABI_DARWIN
15355 && flag_pic && current_function_uses_pic_offset_table)
15356 {
1de43f85 15357 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 15358 rtx src = machopic_function_base_sym ();
ee890fe2 15359
6d0a8091
DJ
15360 /* Save and restore LR locally around this call (in R0). */
15361 if (!info->lr_save_p)
6fb5fa3c 15362 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 15363
6fb5fa3c 15364 emit_insn (gen_load_macho_picbase (src));
ee890fe2 15365
6fb5fa3c
DB
15366 emit_move_insn (gen_rtx_REG (Pmode,
15367 RS6000_PIC_OFFSET_TABLE_REGNUM),
15368 lr);
6d0a8091
DJ
15369
15370 if (!info->lr_save_p)
6fb5fa3c 15371 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 15372 }
fcce224d 15373#endif
9ebbca7d
GK
15374}
15375
9ebbca7d 15376/* Write function prologue. */
a4f6c312 15377
08c148a8 15378static void
f676971a 15379rs6000_output_function_prologue (FILE *file,
a2369ed3 15380 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
15381{
15382 rs6000_stack_t *info = rs6000_stack_info ();
15383
4697a36c
MM
15384 if (TARGET_DEBUG_STACK)
15385 debug_stack_info (info);
9878760c 15386
a4f6c312
SS
15387 /* Write .extern for any function we will call to save and restore
15388 fp values. */
15389 if (info->first_fp_reg_save < 64
15390 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 15391 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 15392 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
a4f6c312
SS
15393 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
15394 RESTORE_FP_SUFFIX);
9878760c 15395
c764f757
RK
15396 /* Write .extern for AIX common mode routines, if needed. */
15397 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
15398 {
f6709c70
JW
15399 fputs ("\t.extern __mulh\n", file);
15400 fputs ("\t.extern __mull\n", file);
15401 fputs ("\t.extern __divss\n", file);
15402 fputs ("\t.extern __divus\n", file);
15403 fputs ("\t.extern __quoss\n", file);
15404 fputs ("\t.extern __quous\n", file);
c764f757
RK
15405 common_mode_defined = 1;
15406 }
9878760c 15407
9ebbca7d 15408 if (! HAVE_prologue)
979721f8 15409 {
9ebbca7d 15410 start_sequence ();
9dda4cc8 15411
a4f6c312
SS
15412 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
15413 the "toplevel" insn chain. */
2e040219 15414 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15415 rs6000_emit_prologue ();
2e040219 15416 emit_note (NOTE_INSN_DELETED);
178c3eff 15417
a3c9585f 15418 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15419 {
15420 rtx insn;
15421 unsigned addr = 0;
15422 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15423 {
15424 INSN_ADDRESSES_NEW (insn, addr);
15425 addr += 4;
15426 }
15427 }
9dda4cc8 15428
9ebbca7d 15429 if (TARGET_DEBUG_STACK)
a4f6c312 15430 debug_rtx_list (get_insns (), 100);
c9d691e9 15431 final (get_insns (), file, FALSE);
9ebbca7d 15432 end_sequence ();
979721f8
MM
15433 }
15434
9ebbca7d
GK
15435 rs6000_pic_labelno++;
15436}
f676971a 15437
9ebbca7d 15438/* Emit function epilogue as insns.
9878760c 15439
9ebbca7d
GK
15440 At present, dwarf2out_frame_debug_expr doesn't understand
15441 register restores, so we don't bother setting RTX_FRAME_RELATED_P
15442 anywhere in the epilogue. Most of the insns below would in any case
15443 need special notes to explain where r11 is in relation to the stack. */
9878760c 15444
9ebbca7d 15445void
a2369ed3 15446rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
15447{
15448 rs6000_stack_t *info;
15449 int restoring_FPRs_inline;
15450 int using_load_multiple;
d296e02e 15451 int using_mtcr_multiple;
9ebbca7d
GK
15452 int use_backchain_to_restore_sp;
15453 int sp_offset = 0;
15454 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
15455 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 15456 enum machine_mode reg_mode = Pmode;
327e5343 15457 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15458 int i;
15459
c19de7aa
AH
15460 info = rs6000_stack_info ();
15461
15462 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
15463 {
15464 reg_mode = V2SImode;
15465 reg_size = 8;
15466 }
15467
9ebbca7d 15468 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
c19de7aa
AH
15469 && (!TARGET_SPE_ABI
15470 || info->spe_64bit_regs_used == 0)
6d0a8091
DJ
15471 && info->first_gp_reg_save < 31
15472 && no_global_regs_above (info->first_gp_reg_save));
9ebbca7d 15473 restoring_FPRs_inline = (sibcall
83720594 15474 || current_function_calls_eh_return
9ebbca7d
GK
15475 || info->first_fp_reg_save == 64
15476 || FP_SAVE_INLINE (info->first_fp_reg_save));
f676971a 15477 use_backchain_to_restore_sp = (frame_pointer_needed
9ebbca7d
GK
15478 || current_function_calls_alloca
15479 || info->total_size > 32767);
d296e02e 15480 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
15481 || rs6000_cpu == PROCESSOR_PPC603
15482 || rs6000_cpu == PROCESSOR_PPC750
15483 || optimize_size);
15484
f57fe068 15485 if (WORLD_SAVE_P (info))
d62294f5
FJ
15486 {
15487 int i, j;
15488 char rname[30];
15489 const char *alloc_rname;
15490 rtvec p;
15491
15492 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
15493 stack slot (which is not likely to be our caller.)
15494 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
15495 rest_world is similar, except any R10 parameter is ignored.
15496 The exception-handling stuff that was here in 2.95 is no
15497 longer necessary. */
d62294f5
FJ
15498
15499 p = rtvec_alloc (9
15500 + 1
f676971a 15501 + 32 - info->first_gp_reg_save
c4ad648e
AM
15502 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
15503 + 63 + 1 - info->first_fp_reg_save);
d62294f5 15504
c4ad648e
AM
15505 strcpy (rname, ((current_function_calls_eh_return) ?
15506 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
15507 alloc_rname = ggc_strdup (rname);
15508
15509 j = 0;
15510 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
15511 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 15512 gen_rtx_REG (Pmode,
1de43f85 15513 LR_REGNO));
d62294f5 15514 RTVEC_ELT (p, j++)
c4ad648e 15515 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 15516 /* The instruction pattern requires a clobber here;
c4ad648e 15517 it is shared with the restVEC helper. */
d62294f5 15518 RTVEC_ELT (p, j++)
c4ad648e 15519 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
15520
15521 {
c4ad648e
AM
15522 /* CR register traditionally saved as CR2. */
15523 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
15524 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15525 GEN_INT (info->cr_save_offset));
0be76840 15526 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15527
15528 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
15529 }
15530
15531 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
15532 {
15533 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
15534 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15535 GEN_INT (info->gp_save_offset
15536 + reg_size * i));
0be76840 15537 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
15538
15539 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15540 }
d62294f5 15541 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
15542 {
15543 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
15544 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15545 GEN_INT (info->altivec_save_offset
15546 + 16 * i));
0be76840 15547 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
15548
15549 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15550 }
d62294f5 15551 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
15552 {
15553 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
15554 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15555 GEN_INT (info->fp_save_offset
15556 + 8 * i));
0be76840 15557 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
15558
15559 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
15560 }
d62294f5 15561 RTVEC_ELT (p, j++)
c4ad648e 15562 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 15563 RTVEC_ELT (p, j++)
c4ad648e 15564 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 15565 RTVEC_ELT (p, j++)
c4ad648e 15566 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 15567 RTVEC_ELT (p, j++)
c4ad648e 15568 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 15569 RTVEC_ELT (p, j++)
c4ad648e 15570 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
15571 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
15572
15573 return;
15574 }
15575
2b2c2fe5 15576 /* Set sp_offset based on the stack push from the prologue. */
bcb2d701 15577 if (info->total_size < 32767)
2b2c2fe5 15578 sp_offset = info->total_size;
f676971a 15579
9aa86737
AH
15580 /* Restore AltiVec registers if needed. */
15581 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
15582 {
15583 int i;
15584
15585 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
15586 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15587 {
15588 rtx addr, areg, mem;
15589
15590 areg = gen_rtx_REG (Pmode, 0);
15591 emit_move_insn
15592 (areg, GEN_INT (info->altivec_save_offset
15593 + sp_offset
15594 + 16 * (i - info->first_altivec_reg_save)));
15595
15596 /* AltiVec addressing mode is [reg+reg]. */
15597 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 15598 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
15599
15600 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
15601 }
15602 }
15603
15604 /* Restore VRSAVE if needed. */
44688022 15605 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
4d774ff8 15606 && info->vrsave_mask != 0)
9aa86737
AH
15607 {
15608 rtx addr, mem, reg;
15609
15610 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15611 GEN_INT (info->vrsave_save_offset + sp_offset));
0be76840 15612 mem = gen_frame_mem (SImode, addr);
9aa86737
AH
15613 reg = gen_rtx_REG (SImode, 12);
15614 emit_move_insn (reg, mem);
15615
15616 emit_insn (generate_set_vrsave (reg, info, 1));
15617 }
15618
2b2c2fe5
EC
15619 sp_offset = 0;
15620
15621 /* If we have a frame pointer, a call to alloca, or a large stack
15622 frame, restore the old stack pointer using the backchain. Otherwise,
15623 we know what size to update it with. */
15624 if (use_backchain_to_restore_sp)
15625 {
15626 /* Under V.4, don't reset the stack pointer until after we're done
15627 loading the saved registers. */
15628 if (DEFAULT_ABI == ABI_V4)
15629 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
15630
15631 emit_move_insn (frame_reg_rtx,
15632 gen_rtx_MEM (Pmode, sp_reg_rtx));
15633 }
15634 else if (info->push_p)
15635 {
15636 if (DEFAULT_ABI == ABI_V4
15637 || current_function_calls_eh_return)
15638 sp_offset = info->total_size;
15639 else
15640 {
15641 emit_insn (TARGET_32BIT
15642 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15643 GEN_INT (info->total_size))
15644 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15645 GEN_INT (info->total_size)));
15646 }
15647 }
15648
9ebbca7d
GK
15649 /* Get the old lr if we saved it. */
15650 if (info->lr_save_p)
b6c9286a 15651 {
a3170dc6
AH
15652 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
15653 info->lr_save_offset + sp_offset);
ba4828e0 15654
9ebbca7d 15655 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 15656 }
f676971a 15657
9ebbca7d
GK
15658 /* Get the old cr if we saved it. */
15659 if (info->cr_save_p)
15660 {
15661 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15662 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 15663 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 15664
9ebbca7d
GK
15665 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
15666 }
f676971a 15667
9ebbca7d 15668 /* Set LR here to try to overlap restores below. */
4697a36c 15669 if (info->lr_save_p)
1de43f85 15670 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 15671 gen_rtx_REG (Pmode, 0));
f676971a 15672
83720594
RH
15673 /* Load exception handler data registers, if needed. */
15674 if (current_function_calls_eh_return)
15675 {
78e1b90d
DE
15676 unsigned int i, regno;
15677
fc4767bb
JJ
15678 if (TARGET_AIX)
15679 {
15680 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15681 GEN_INT (sp_offset + 5 * reg_size));
0be76840 15682 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
15683
15684 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
15685 }
15686
83720594
RH
15687 for (i = 0; ; ++i)
15688 {
a3170dc6 15689 rtx mem;
83720594
RH
15690
15691 regno = EH_RETURN_DATA_REGNO (i);
15692 if (regno == INVALID_REGNUM)
15693 break;
15694
a3170dc6
AH
15695 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
15696 info->ehrd_offset + sp_offset
15697 + reg_size * (int) i);
83720594
RH
15698
15699 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
15700 }
15701 }
f676971a 15702
9ebbca7d
GK
15703 /* Restore GPRs. This is done as a PARALLEL if we are using
15704 the load-multiple instructions. */
15705 if (using_load_multiple)
979721f8 15706 {
9ebbca7d
GK
15707 rtvec p;
15708 p = rtvec_alloc (32 - info->first_gp_reg_save);
15709 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
979721f8 15710 {
f676971a
EC
15711 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15712 GEN_INT (info->gp_save_offset
15713 + sp_offset
9ebbca7d 15714 + reg_size * i));
0be76840 15715 rtx mem = gen_frame_mem (reg_mode, addr);
9ebbca7d 15716
f676971a 15717 RTVEC_ELT (p, i) =
9ebbca7d
GK
15718 gen_rtx_SET (VOIDmode,
15719 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15720 mem);
979721f8 15721 }
9ebbca7d 15722 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
979721f8 15723 }
52ff33d0
NF
15724 else if (TARGET_SPE_ABI
15725 && info->spe_64bit_regs_used != 0
15726 && info->first_gp_reg_save != 32)
15727 {
15728 rtx spe_save_area_ptr;
15729 /* Determine whether we can address all of the registers that need
15730 to be saved with an offset from the stack pointer that fits in
15731 the small const field for SPE memory instructions. */
15732 int spe_regs_addressable_via_sp
15733 = SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
15734 + (32 - info->first_gp_reg_save - 1) * reg_size);
15735 int spe_offset;
15736
15737 if (spe_regs_addressable_via_sp)
15738 {
15739 spe_save_area_ptr = frame_reg_rtx;
15740 spe_offset = info->spe_gp_save_offset + sp_offset;
15741 }
15742 else
15743 {
15744 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 15745 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
15746 There's no need to worry here because the static chain is passed
15747 anew to every function. */
15748 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
15749
15750 emit_insn (gen_addsi3 (spe_save_area_ptr, frame_reg_rtx,
15751 GEN_INT (info->spe_gp_save_offset + sp_offset)));
15752
15753 spe_offset = 0;
15754 }
15755
15756 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
15757 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
15758 {
15759 rtx offset, addr, mem;
15760
15761 /* We're doing all this to ensure that the immediate offset
15762 fits into the immediate field of 'evldd'. */
15763 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
15764
15765 offset = GEN_INT (spe_offset + reg_size * i);
15766 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
15767 mem = gen_rtx_MEM (V2SImode, addr);
15768
15769 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
15770 mem);
15771 }
15772 }
9ebbca7d
GK
15773 else
15774 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0 15775 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
9ebbca7d 15776 {
f676971a
EC
15777 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15778 GEN_INT (info->gp_save_offset
15779 + sp_offset
9ebbca7d 15780 + reg_size * i));
0be76840 15781 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 15782
f676971a 15783 emit_move_insn (gen_rtx_REG (reg_mode,
a3170dc6 15784 info->first_gp_reg_save + i), mem);
9ebbca7d 15785 }
9878760c 15786
9ebbca7d
GK
15787 /* Restore fpr's if we need to do it without calling a function. */
15788 if (restoring_FPRs_inline)
15789 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 15790 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
15791 && ! call_used_regs[info->first_fp_reg_save+i]))
15792 {
15793 rtx addr, mem;
15794 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
15795 GEN_INT (info->fp_save_offset
15796 + sp_offset
a4f6c312 15797 + 8 * i));
0be76840 15798 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15799
f676971a 15800 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
15801 info->first_fp_reg_save + i),
15802 mem);
15803 }
8d30c4ee 15804
9ebbca7d
GK
15805 /* If we saved cr, restore it here. Just those that were used. */
15806 if (info->cr_save_p)
979721f8 15807 {
9ebbca7d 15808 rtx r12_rtx = gen_rtx_REG (SImode, 12);
e35b9579 15809 int count = 0;
f676971a 15810
d296e02e 15811 if (using_mtcr_multiple)
979721f8 15812 {
9ebbca7d 15813 for (i = 0; i < 8; i++)
6fb5fa3c 15814 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
e35b9579 15815 count++;
37409796 15816 gcc_assert (count);
e35b9579
GK
15817 }
15818
d296e02e 15819 if (using_mtcr_multiple && count > 1)
e35b9579
GK
15820 {
15821 rtvec p;
15822 int ndx;
f676971a 15823
e35b9579 15824 p = rtvec_alloc (count);
9ebbca7d 15825
e35b9579 15826 ndx = 0;
9ebbca7d 15827 for (i = 0; i < 8; i++)
6fb5fa3c 15828 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
9ebbca7d
GK
15829 {
15830 rtvec r = rtvec_alloc (2);
15831 RTVEC_ELT (r, 0) = r12_rtx;
15832 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
e35b9579 15833 RTVEC_ELT (p, ndx) =
f676971a 15834 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
615158e2 15835 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
e35b9579 15836 ndx++;
9ebbca7d
GK
15837 }
15838 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
37409796 15839 gcc_assert (ndx == count);
979721f8
MM
15840 }
15841 else
9ebbca7d 15842 for (i = 0; i < 8; i++)
6fb5fa3c 15843 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
979721f8 15844 {
f676971a 15845 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9ebbca7d
GK
15846 CR0_REGNO+i),
15847 r12_rtx));
979721f8 15848 }
979721f8
MM
15849 }
15850
9ebbca7d 15851 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6
AM
15852 have been done. */
15853 if (frame_reg_rtx != sp_reg_rtx)
15854 {
15855 /* This blockage is needed so that sched doesn't decide to move
15856 the sp change before the register restores. */
15857 rs6000_emit_stack_tie ();
52ff33d0
NF
15858 if (TARGET_SPE_ABI
15859 && info->spe_64bit_regs_used != 0
15860 && info->first_gp_reg_save != 32)
15861 emit_insn (gen_addsi3 (sp_reg_rtx, gen_rtx_REG (Pmode, 11),
15862 GEN_INT (-(info->spe_gp_save_offset + sp_offset))));
15863 else
15864 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
022123e6
AM
15865 }
15866 else if (sp_offset != 0)
15867 emit_insn (TARGET_32BIT
15868 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
15869 GEN_INT (sp_offset))
15870 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
15871 GEN_INT (sp_offset)));
b6c9286a 15872
83720594
RH
15873 if (current_function_calls_eh_return)
15874 {
15875 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 15876 emit_insn (TARGET_32BIT
83720594
RH
15877 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
15878 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
15879 }
15880
9ebbca7d
GK
15881 if (!sibcall)
15882 {
15883 rtvec p;
15884 if (! restoring_FPRs_inline)
15885 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
15886 else
15887 p = rtvec_alloc (2);
b6c9286a 15888
e35b9579 15889 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f676971a
EC
15890 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
15891 gen_rtx_REG (Pmode,
1de43f85 15892 LR_REGNO));
9ebbca7d
GK
15893
15894 /* If we have to restore more than two FP registers, branch to the
15895 restore function. It will return to our caller. */
15896 if (! restoring_FPRs_inline)
15897 {
15898 int i;
15899 char rname[30];
520a57c8 15900 const char *alloc_rname;
979721f8 15901
f676971a 15902 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9ebbca7d 15903 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
a8a05998 15904 alloc_rname = ggc_strdup (rname);
9ebbca7d
GK
15905 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
15906 gen_rtx_SYMBOL_REF (Pmode,
15907 alloc_rname));
b6c9286a 15908
9ebbca7d
GK
15909 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
15910 {
15911 rtx addr, mem;
15912 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
15913 GEN_INT (info->fp_save_offset + 8*i));
0be76840 15914 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 15915
f676971a 15916 RTVEC_ELT (p, i+3) =
9ebbca7d
GK
15917 gen_rtx_SET (VOIDmode,
15918 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
15919 mem);
b6c9286a
MM
15920 }
15921 }
f676971a 15922
9ebbca7d 15923 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 15924 }
9878760c
RK
15925}
15926
15927/* Write function epilogue. */
15928
08c148a8 15929static void
f676971a 15930rs6000_output_function_epilogue (FILE *file,
a2369ed3 15931 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 15932{
9ebbca7d 15933 if (! HAVE_epilogue)
9878760c 15934 {
9ebbca7d
GK
15935 rtx insn = get_last_insn ();
15936 /* If the last insn was a BARRIER, we don't have to write anything except
15937 the trace table. */
15938 if (GET_CODE (insn) == NOTE)
15939 insn = prev_nonnote_insn (insn);
15940 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 15941 {
9ebbca7d
GK
15942 /* This is slightly ugly, but at least we don't have two
15943 copies of the epilogue-emitting code. */
15944 start_sequence ();
15945
15946 /* A NOTE_INSN_DELETED is supposed to be at the start
15947 and end of the "toplevel" insn chain. */
2e040219 15948 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15949 rs6000_emit_epilogue (FALSE);
2e040219 15950 emit_note (NOTE_INSN_DELETED);
9ebbca7d 15951
a3c9585f 15952 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
15953 {
15954 rtx insn;
15955 unsigned addr = 0;
15956 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
15957 {
15958 INSN_ADDRESSES_NEW (insn, addr);
15959 addr += 4;
15960 }
15961 }
15962
9ebbca7d 15963 if (TARGET_DEBUG_STACK)
a4f6c312 15964 debug_rtx_list (get_insns (), 100);
c9d691e9 15965 final (get_insns (), file, FALSE);
9ebbca7d 15966 end_sequence ();
4697a36c 15967 }
9878760c 15968 }
b4ac57ab 15969
efdba735
SH
15970#if TARGET_MACHO
15971 macho_branch_islands ();
0e5da0be
GK
15972 /* Mach-O doesn't support labels at the end of objects, so if
15973 it looks like we might want one, insert a NOP. */
15974 {
15975 rtx insn = get_last_insn ();
15976 while (insn
15977 && NOTE_P (insn)
a38e7aa5 15978 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 15979 insn = PREV_INSN (insn);
f676971a
EC
15980 if (insn
15981 && (LABEL_P (insn)
0e5da0be 15982 || (NOTE_P (insn)
a38e7aa5 15983 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
15984 fputs ("\tnop\n", file);
15985 }
15986#endif
15987
9b30bae2 15988 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
15989 on its format.
15990
15991 We don't output a traceback table if -finhibit-size-directive was
15992 used. The documentation for -finhibit-size-directive reads
15993 ``don't output a @code{.size} assembler directive, or anything
15994 else that would cause trouble if the function is split in the
15995 middle, and the two halves are placed at locations far apart in
15996 memory.'' The traceback table has this property, since it
15997 includes the offset from the start of the function to the
4d30c363
MM
15998 traceback table itself.
15999
16000 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 16001 different traceback table. */
57ac7be9 16002 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
8097c268 16003 && rs6000_traceback != traceback_none && !current_function_is_thunk)
9b30bae2 16004 {
69c75916 16005 const char *fname = NULL;
3ac88239 16006 const char *language_string = lang_hooks.name;
6041bf2f 16007 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 16008 int i;
57ac7be9 16009 int optional_tbtab;
8097c268 16010 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
16011
16012 if (rs6000_traceback == traceback_full)
16013 optional_tbtab = 1;
16014 else if (rs6000_traceback == traceback_part)
16015 optional_tbtab = 0;
16016 else
16017 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 16018
69c75916
AM
16019 if (optional_tbtab)
16020 {
16021 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
16022 while (*fname == '.') /* V.4 encodes . in the name */
16023 fname++;
16024
16025 /* Need label immediately before tbtab, so we can compute
16026 its offset from the function start. */
16027 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
16028 ASM_OUTPUT_LABEL (file, fname);
16029 }
314fc5a9
ILT
16030
16031 /* The .tbtab pseudo-op can only be used for the first eight
16032 expressions, since it can't handle the possibly variable
16033 length fields that follow. However, if you omit the optional
16034 fields, the assembler outputs zeros for all optional fields
16035 anyways, giving each variable length field is minimum length
16036 (as defined in sys/debug.h). Thus we can not use the .tbtab
16037 pseudo-op at all. */
16038
16039 /* An all-zero word flags the start of the tbtab, for debuggers
16040 that have to find it by searching forward from the entry
16041 point or from the current pc. */
19d2d16f 16042 fputs ("\t.long 0\n", file);
314fc5a9
ILT
16043
16044 /* Tbtab format type. Use format type 0. */
19d2d16f 16045 fputs ("\t.byte 0,", file);
314fc5a9 16046
5fc921c1
DE
16047 /* Language type. Unfortunately, there does not seem to be any
16048 official way to discover the language being compiled, so we
16049 use language_string.
16050 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
16051 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
16052 a number, so for now use 9. */
5fc921c1 16053 if (! strcmp (language_string, "GNU C"))
314fc5a9 16054 i = 0;
6de9cd9a
DN
16055 else if (! strcmp (language_string, "GNU F77")
16056 || ! strcmp (language_string, "GNU F95"))
314fc5a9 16057 i = 1;
8b83775b 16058 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 16059 i = 2;
5fc921c1
DE
16060 else if (! strcmp (language_string, "GNU Ada"))
16061 i = 3;
56438901
AM
16062 else if (! strcmp (language_string, "GNU C++")
16063 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 16064 i = 9;
9517ead8
AG
16065 else if (! strcmp (language_string, "GNU Java"))
16066 i = 13;
5fc921c1
DE
16067 else if (! strcmp (language_string, "GNU Objective-C"))
16068 i = 14;
314fc5a9 16069 else
37409796 16070 gcc_unreachable ();
314fc5a9
ILT
16071 fprintf (file, "%d,", i);
16072
16073 /* 8 single bit fields: global linkage (not set for C extern linkage,
16074 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
16075 from start of procedure stored in tbtab, internal function, function
16076 has controlled storage, function has no toc, function uses fp,
16077 function logs/aborts fp operations. */
16078 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
16079 fprintf (file, "%d,",
16080 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
16081
16082 /* 6 bitfields: function is interrupt handler, name present in
16083 proc table, function calls alloca, on condition directives
16084 (controls stack walks, 3 bits), saves condition reg, saves
16085 link reg. */
16086 /* The `function calls alloca' bit seems to be set whenever reg 31 is
16087 set up as a frame pointer, even when there is no alloca call. */
16088 fprintf (file, "%d,",
6041bf2f
DE
16089 ((optional_tbtab << 6)
16090 | ((optional_tbtab & frame_pointer_needed) << 5)
16091 | (info->cr_save_p << 1)
16092 | (info->lr_save_p)));
314fc5a9 16093
6041bf2f 16094 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
16095 (6 bits). */
16096 fprintf (file, "%d,",
4697a36c 16097 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
16098
16099 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
16100 fprintf (file, "%d,", (32 - first_reg_to_save ()));
16101
6041bf2f
DE
16102 if (optional_tbtab)
16103 {
16104 /* Compute the parameter info from the function decl argument
16105 list. */
16106 tree decl;
16107 int next_parm_info_bit = 31;
314fc5a9 16108
6041bf2f
DE
16109 for (decl = DECL_ARGUMENTS (current_function_decl);
16110 decl; decl = TREE_CHAIN (decl))
16111 {
16112 rtx parameter = DECL_INCOMING_RTL (decl);
16113 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 16114
6041bf2f
DE
16115 if (GET_CODE (parameter) == REG)
16116 {
ebb109ad 16117 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
16118 {
16119 int bits;
16120
16121 float_parms++;
16122
37409796
NS
16123 switch (mode)
16124 {
16125 case SFmode:
16126 bits = 0x2;
16127 break;
16128
16129 case DFmode:
7393f7f8 16130 case DDmode:
37409796 16131 case TFmode:
7393f7f8 16132 case TDmode:
37409796
NS
16133 bits = 0x3;
16134 break;
16135
16136 default:
16137 gcc_unreachable ();
16138 }
6041bf2f
DE
16139
16140 /* If only one bit will fit, don't or in this entry. */
16141 if (next_parm_info_bit > 0)
16142 parm_info |= (bits << (next_parm_info_bit - 1));
16143 next_parm_info_bit -= 2;
16144 }
16145 else
16146 {
16147 fixed_parms += ((GET_MODE_SIZE (mode)
16148 + (UNITS_PER_WORD - 1))
16149 / UNITS_PER_WORD);
16150 next_parm_info_bit -= 1;
16151 }
16152 }
16153 }
16154 }
314fc5a9
ILT
16155
16156 /* Number of fixed point parameters. */
16157 /* This is actually the number of words of fixed point parameters; thus
16158 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16159 fprintf (file, "%d,", fixed_parms);
16160
16161 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16162 all on stack. */
16163 /* This is actually the number of fp registers that hold parameters;
16164 and thus the maximum value is 13. */
16165 /* Set parameters on stack bit if parameters are not in their original
16166 registers, regardless of whether they are on the stack? Xlc
16167 seems to set the bit when not optimizing. */
16168 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
16169
6041bf2f
DE
16170 if (! optional_tbtab)
16171 return;
16172
314fc5a9
ILT
16173 /* Optional fields follow. Some are variable length. */
16174
16175 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16176 11 double float. */
16177 /* There is an entry for each parameter in a register, in the order that
16178 they occur in the parameter list. Any intervening arguments on the
16179 stack are ignored. If the list overflows a long (max possible length
16180 34 bits) then completely leave off all elements that don't fit. */
16181 /* Only emit this long if there was at least one parameter. */
16182 if (fixed_parms || float_parms)
16183 fprintf (file, "\t.long %d\n", parm_info);
16184
16185 /* Offset from start of code to tb table. */
19d2d16f 16186 fputs ("\t.long ", file);
314fc5a9 16187 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
16188 if (TARGET_AIX)
16189 RS6000_OUTPUT_BASENAME (file, fname);
16190 else
16191 assemble_name (file, fname);
16192 putc ('-', file);
16193 rs6000_output_function_entry (file, fname);
19d2d16f 16194 putc ('\n', file);
314fc5a9
ILT
16195
16196 /* Interrupt handler mask. */
16197 /* Omit this long, since we never set the interrupt handler bit
16198 above. */
16199
16200 /* Number of CTL (controlled storage) anchors. */
16201 /* Omit this long, since the has_ctl bit is never set above. */
16202
16203 /* Displacement into stack of each CTL anchor. */
16204 /* Omit this list of longs, because there are no CTL anchors. */
16205
16206 /* Length of function name. */
69c75916
AM
16207 if (*fname == '*')
16208 ++fname;
296b8152 16209 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
16210
16211 /* Function name. */
16212 assemble_string (fname, strlen (fname));
16213
16214 /* Register for alloca automatic storage; this is always reg 31.
16215 Only emit this if the alloca bit was set above. */
16216 if (frame_pointer_needed)
19d2d16f 16217 fputs ("\t.byte 31\n", file);
b1765bde
DE
16218
16219 fputs ("\t.align 2\n", file);
9b30bae2 16220 }
9878760c 16221}
17167fd8 16222\f
a4f6c312
SS
16223/* A C compound statement that outputs the assembler code for a thunk
16224 function, used to implement C++ virtual function calls with
16225 multiple inheritance. The thunk acts as a wrapper around a virtual
16226 function, adjusting the implicit object parameter before handing
16227 control off to the real function.
16228
16229 First, emit code to add the integer DELTA to the location that
16230 contains the incoming first argument. Assume that this argument
16231 contains a pointer, and is the one used to pass the `this' pointer
16232 in C++. This is the incoming argument *before* the function
16233 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16234 values of all other incoming arguments.
17167fd8
MM
16235
16236 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
16237 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16238 not touch the return address. Hence returning from FUNCTION will
16239 return to whoever called the current `thunk'.
17167fd8 16240
a4f6c312
SS
16241 The effect must be as if FUNCTION had been called directly with the
16242 adjusted first argument. This macro is responsible for emitting
16243 all of the code for a thunk function; output_function_prologue()
16244 and output_function_epilogue() are not invoked.
17167fd8 16245
a4f6c312
SS
16246 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16247 been extracted from it.) It might possibly be useful on some
16248 targets, but probably not.
17167fd8 16249
a4f6c312
SS
16250 If you do not define this macro, the target-independent code in the
16251 C++ frontend will generate a less efficient heavyweight thunk that
16252 calls FUNCTION instead of jumping to it. The generic approach does
16253 not support varargs. */
17167fd8 16254
3961e8fe 16255static void
f676971a
EC
16256rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
16257 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 16258 tree function)
17167fd8 16259{
5b71a4e7 16260 rtx this, insn, funexp;
17167fd8 16261
5b71a4e7 16262 reload_completed = 1;
fe3ad572 16263 epilogue_completed = 1;
56a7189a 16264
5b71a4e7 16265 /* Mark the end of the (empty) prologue. */
2e040219 16266 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 16267
5b71a4e7
DE
16268 /* Find the "this" pointer. If the function returns a structure,
16269 the structure return pointer is in r3. */
61f71b34 16270 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
5b71a4e7 16271 this = gen_rtx_REG (Pmode, 4);
56a7189a 16272 else
5b71a4e7 16273 this = gen_rtx_REG (Pmode, 3);
17167fd8 16274
5b71a4e7
DE
16275 /* Apply the constant offset, if required. */
16276 if (delta)
16277 {
16278 rtx delta_rtx = GEN_INT (delta);
16279 emit_insn (TARGET_32BIT
16280 ? gen_addsi3 (this, this, delta_rtx)
16281 : gen_adddi3 (this, this, delta_rtx));
17167fd8
MM
16282 }
16283
5b71a4e7
DE
16284 /* Apply the offset from the vtable, if required. */
16285 if (vcall_offset)
17167fd8 16286 {
5b71a4e7
DE
16287 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
16288 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 16289
5b71a4e7 16290 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
eeff9307
JJ
16291 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
16292 {
16293 emit_insn (TARGET_32BIT
16294 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
16295 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
16296 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
16297 }
16298 else
16299 {
16300 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
16301
16302 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
16303 }
5b71a4e7
DE
16304 emit_insn (TARGET_32BIT
16305 ? gen_addsi3 (this, this, tmp)
16306 : gen_adddi3 (this, this, tmp));
17167fd8
MM
16307 }
16308
5b71a4e7
DE
16309 /* Generate a tail call to the target function. */
16310 if (!TREE_USED (function))
16311 {
16312 assemble_external (function);
16313 TREE_USED (function) = 1;
16314 }
16315 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 16316 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
16317
16318#if TARGET_MACHO
ab82a49f 16319 if (MACHOPIC_INDIRECT)
5b71a4e7 16320 funexp = machopic_indirect_call_target (funexp);
ee890fe2 16321#endif
5b71a4e7
DE
16322
16323 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 16324 generate sibcall RTL explicitly. */
5b71a4e7
DE
16325 insn = emit_call_insn (
16326 gen_rtx_PARALLEL (VOIDmode,
16327 gen_rtvec (4,
16328 gen_rtx_CALL (VOIDmode,
16329 funexp, const0_rtx),
16330 gen_rtx_USE (VOIDmode, const0_rtx),
16331 gen_rtx_USE (VOIDmode,
16332 gen_rtx_REG (SImode,
1de43f85 16333 LR_REGNO)),
5b71a4e7
DE
16334 gen_rtx_RETURN (VOIDmode))));
16335 SIBLING_CALL_P (insn) = 1;
16336 emit_barrier ();
16337
16338 /* Run just enough of rest_of_compilation to get the insns emitted.
16339 There's not really enough bulk here to make other passes such as
16340 instruction scheduling worth while. Note that use_thunk calls
16341 assemble_start_function and assemble_end_function. */
16342 insn = get_insns ();
55e092c4 16343 insn_locators_alloc ();
5b71a4e7
DE
16344 shorten_branches (insn);
16345 final_start_function (insn, file, 1);
c9d691e9 16346 final (insn, file, 1);
5b71a4e7
DE
16347 final_end_function ();
16348
16349 reload_completed = 0;
fe3ad572 16350 epilogue_completed = 0;
9ebbca7d 16351}
9ebbca7d
GK
16352\f
16353/* A quick summary of the various types of 'constant-pool tables'
16354 under PowerPC:
16355
f676971a 16356 Target Flags Name One table per
9ebbca7d
GK
16357 AIX (none) AIX TOC object file
16358 AIX -mfull-toc AIX TOC object file
16359 AIX -mminimal-toc AIX minimal TOC translation unit
16360 SVR4/EABI (none) SVR4 SDATA object file
16361 SVR4/EABI -fpic SVR4 pic object file
16362 SVR4/EABI -fPIC SVR4 PIC translation unit
16363 SVR4/EABI -mrelocatable EABI TOC function
16364 SVR4/EABI -maix AIX TOC object file
f676971a 16365 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
16366 AIX minimal TOC translation unit
16367
16368 Name Reg. Set by entries contains:
16369 made by addrs? fp? sum?
16370
16371 AIX TOC 2 crt0 as Y option option
16372 AIX minimal TOC 30 prolog gcc Y Y option
16373 SVR4 SDATA 13 crt0 gcc N Y N
16374 SVR4 pic 30 prolog ld Y not yet N
16375 SVR4 PIC 30 prolog gcc Y option option
16376 EABI TOC 30 prolog gcc Y option option
16377
16378*/
16379
9ebbca7d
GK
16380/* Hash functions for the hash table. */
16381
16382static unsigned
a2369ed3 16383rs6000_hash_constant (rtx k)
9ebbca7d 16384{
46b33600
RH
16385 enum rtx_code code = GET_CODE (k);
16386 enum machine_mode mode = GET_MODE (k);
16387 unsigned result = (code << 3) ^ mode;
16388 const char *format;
16389 int flen, fidx;
f676971a 16390
46b33600
RH
16391 format = GET_RTX_FORMAT (code);
16392 flen = strlen (format);
16393 fidx = 0;
9ebbca7d 16394
46b33600
RH
16395 switch (code)
16396 {
16397 case LABEL_REF:
16398 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
16399
16400 case CONST_DOUBLE:
16401 if (mode != VOIDmode)
16402 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
16403 flen = 2;
16404 break;
16405
16406 case CODE_LABEL:
16407 fidx = 3;
16408 break;
16409
16410 default:
16411 break;
16412 }
9ebbca7d
GK
16413
16414 for (; fidx < flen; fidx++)
16415 switch (format[fidx])
16416 {
16417 case 's':
16418 {
16419 unsigned i, len;
16420 const char *str = XSTR (k, fidx);
16421 len = strlen (str);
16422 result = result * 613 + len;
16423 for (i = 0; i < len; i++)
16424 result = result * 613 + (unsigned) str[i];
17167fd8
MM
16425 break;
16426 }
9ebbca7d
GK
16427 case 'u':
16428 case 'e':
16429 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
16430 break;
16431 case 'i':
16432 case 'n':
16433 result = result * 613 + (unsigned) XINT (k, fidx);
16434 break;
16435 case 'w':
16436 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
16437 result = result * 613 + (unsigned) XWINT (k, fidx);
16438 else
16439 {
16440 size_t i;
9390387d 16441 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
16442 result = result * 613 + (unsigned) (XWINT (k, fidx)
16443 >> CHAR_BIT * i);
16444 }
16445 break;
09501938
DE
16446 case '0':
16447 break;
9ebbca7d 16448 default:
37409796 16449 gcc_unreachable ();
9ebbca7d 16450 }
46b33600 16451
9ebbca7d
GK
16452 return result;
16453}
16454
16455static unsigned
a2369ed3 16456toc_hash_function (const void *hash_entry)
9ebbca7d 16457{
f676971a 16458 const struct toc_hash_struct *thc =
a9098fd0
GK
16459 (const struct toc_hash_struct *) hash_entry;
16460 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
16461}
16462
16463/* Compare H1 and H2 for equivalence. */
16464
16465static int
a2369ed3 16466toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
16467{
16468 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
16469 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
16470
a9098fd0
GK
16471 if (((const struct toc_hash_struct *) h1)->key_mode
16472 != ((const struct toc_hash_struct *) h2)->key_mode)
16473 return 0;
16474
5692c7bc 16475 return rtx_equal_p (r1, r2);
9ebbca7d
GK
16476}
16477
28e510bd
MM
16478/* These are the names given by the C++ front-end to vtables, and
16479 vtable-like objects. Ideally, this logic should not be here;
16480 instead, there should be some programmatic way of inquiring as
16481 to whether or not an object is a vtable. */
16482
16483#define VTABLE_NAME_P(NAME) \
9390387d 16484 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
16485 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
16486 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 16487 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 16488 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
16489
16490void
a2369ed3 16491rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
16492{
16493 /* Currently C++ toc references to vtables can be emitted before it
16494 is decided whether the vtable is public or private. If this is
16495 the case, then the linker will eventually complain that there is
f676971a 16496 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
16497 we emit the TOC reference to reference the symbol and not the
16498 section. */
16499 const char *name = XSTR (x, 0);
54ee9799 16500
f676971a 16501 if (VTABLE_NAME_P (name))
54ee9799
DE
16502 {
16503 RS6000_OUTPUT_BASENAME (file, name);
16504 }
16505 else
16506 assemble_name (file, name);
28e510bd
MM
16507}
16508
a4f6c312
SS
16509/* Output a TOC entry. We derive the entry name from what is being
16510 written. */
9878760c
RK
16511
16512void
a2369ed3 16513output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
16514{
16515 char buf[256];
3cce094d 16516 const char *name = buf;
ec940faa 16517 const char *real_name;
9878760c 16518 rtx base = x;
16fdeb48 16519 HOST_WIDE_INT offset = 0;
9878760c 16520
37409796 16521 gcc_assert (!TARGET_NO_TOC);
4697a36c 16522
9ebbca7d
GK
16523 /* When the linker won't eliminate them, don't output duplicate
16524 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
16525 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
16526 CODE_LABELs. */
16527 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
16528 {
16529 struct toc_hash_struct *h;
16530 void * * found;
f676971a 16531
17211ab5 16532 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 16533 time because GGC is not initialized at that point. */
17211ab5 16534 if (toc_hash_table == NULL)
f676971a 16535 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
16536 toc_hash_eq, NULL);
16537
9ebbca7d
GK
16538 h = ggc_alloc (sizeof (*h));
16539 h->key = x;
a9098fd0 16540 h->key_mode = mode;
9ebbca7d 16541 h->labelno = labelno;
f676971a 16542
9ebbca7d
GK
16543 found = htab_find_slot (toc_hash_table, h, 1);
16544 if (*found == NULL)
16545 *found = h;
f676971a 16546 else /* This is indeed a duplicate.
9ebbca7d
GK
16547 Set this label equal to that label. */
16548 {
16549 fputs ("\t.set ", file);
16550 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
16551 fprintf (file, "%d,", labelno);
16552 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 16553 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
16554 found)->labelno));
16555 return;
16556 }
16557 }
16558
16559 /* If we're going to put a double constant in the TOC, make sure it's
16560 aligned properly when strict alignment is on. */
ff1720ed
RK
16561 if (GET_CODE (x) == CONST_DOUBLE
16562 && STRICT_ALIGNMENT
a9098fd0 16563 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
16564 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
16565 ASM_OUTPUT_ALIGN (file, 3);
16566 }
16567
4977bab6 16568 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 16569
37c37a57
RK
16570 /* Handle FP constants specially. Note that if we have a minimal
16571 TOC, things we put here aren't actually in the TOC, so we can allow
16572 FP constants. */
00b79d54
BE
16573 if (GET_CODE (x) == CONST_DOUBLE &&
16574 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
16575 {
16576 REAL_VALUE_TYPE rv;
16577 long k[4];
16578
16579 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16580 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16581 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
16582 else
16583 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
16584
16585 if (TARGET_64BIT)
16586 {
16587 if (TARGET_MINIMAL_TOC)
16588 fputs (DOUBLE_INT_ASM_OP, file);
16589 else
16590 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16591 k[0] & 0xffffffff, k[1] & 0xffffffff,
16592 k[2] & 0xffffffff, k[3] & 0xffffffff);
16593 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
16594 k[0] & 0xffffffff, k[1] & 0xffffffff,
16595 k[2] & 0xffffffff, k[3] & 0xffffffff);
16596 return;
16597 }
16598 else
16599 {
16600 if (TARGET_MINIMAL_TOC)
16601 fputs ("\t.long ", file);
16602 else
16603 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16604 k[0] & 0xffffffff, k[1] & 0xffffffff,
16605 k[2] & 0xffffffff, k[3] & 0xffffffff);
16606 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16607 k[0] & 0xffffffff, k[1] & 0xffffffff,
16608 k[2] & 0xffffffff, k[3] & 0xffffffff);
16609 return;
16610 }
16611 }
00b79d54
BE
16612 else if (GET_CODE (x) == CONST_DOUBLE &&
16613 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 16614 {
042259f2
DE
16615 REAL_VALUE_TYPE rv;
16616 long k[2];
0adc764e 16617
042259f2 16618 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16619
16620 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16621 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
16622 else
16623 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 16624
13ded975
DE
16625 if (TARGET_64BIT)
16626 {
16627 if (TARGET_MINIMAL_TOC)
2bfcf297 16628 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 16629 else
2f0552b6
AM
16630 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16631 k[0] & 0xffffffff, k[1] & 0xffffffff);
16632 fprintf (file, "0x%lx%08lx\n",
16633 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
16634 return;
16635 }
1875cc88 16636 else
13ded975
DE
16637 {
16638 if (TARGET_MINIMAL_TOC)
2bfcf297 16639 fputs ("\t.long ", file);
13ded975 16640 else
2f0552b6
AM
16641 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
16642 k[0] & 0xffffffff, k[1] & 0xffffffff);
16643 fprintf (file, "0x%lx,0x%lx\n",
16644 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
16645 return;
16646 }
9878760c 16647 }
00b79d54
BE
16648 else if (GET_CODE (x) == CONST_DOUBLE &&
16649 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 16650 {
042259f2
DE
16651 REAL_VALUE_TYPE rv;
16652 long l;
9878760c 16653
042259f2 16654 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
16655 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
16656 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
16657 else
16658 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 16659
31bfaa0b
DE
16660 if (TARGET_64BIT)
16661 {
16662 if (TARGET_MINIMAL_TOC)
2bfcf297 16663 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 16664 else
2f0552b6
AM
16665 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16666 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
16667 return;
16668 }
042259f2 16669 else
31bfaa0b
DE
16670 {
16671 if (TARGET_MINIMAL_TOC)
2bfcf297 16672 fputs ("\t.long ", file);
31bfaa0b 16673 else
2f0552b6
AM
16674 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
16675 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
16676 return;
16677 }
042259f2 16678 }
f176e826 16679 else if (GET_MODE (x) == VOIDmode
a9098fd0 16680 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 16681 {
e2c953b6 16682 unsigned HOST_WIDE_INT low;
042259f2
DE
16683 HOST_WIDE_INT high;
16684
16685 if (GET_CODE (x) == CONST_DOUBLE)
16686 {
16687 low = CONST_DOUBLE_LOW (x);
16688 high = CONST_DOUBLE_HIGH (x);
16689 }
16690 else
16691#if HOST_BITS_PER_WIDE_INT == 32
16692 {
16693 low = INTVAL (x);
0858c623 16694 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
16695 }
16696#else
16697 {
c4ad648e
AM
16698 low = INTVAL (x) & 0xffffffff;
16699 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
16700 }
16701#endif
9878760c 16702
a9098fd0
GK
16703 /* TOC entries are always Pmode-sized, but since this
16704 is a bigendian machine then if we're putting smaller
16705 integer constants in the TOC we have to pad them.
16706 (This is still a win over putting the constants in
16707 a separate constant pool, because then we'd have
02a4ec28
FS
16708 to have both a TOC entry _and_ the actual constant.)
16709
16710 For a 32-bit target, CONST_INT values are loaded and shifted
16711 entirely within `low' and can be stored in one TOC entry. */
16712
37409796
NS
16713 /* It would be easy to make this work, but it doesn't now. */
16714 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
16715
16716 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
16717 {
16718#if HOST_BITS_PER_WIDE_INT == 32
16719 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
16720 POINTER_SIZE, &low, &high, 0);
16721#else
16722 low |= high << 32;
16723 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
16724 high = (HOST_WIDE_INT) low >> 32;
16725 low &= 0xffffffff;
16726#endif
16727 }
a9098fd0 16728
13ded975
DE
16729 if (TARGET_64BIT)
16730 {
16731 if (TARGET_MINIMAL_TOC)
2bfcf297 16732 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 16733 else
2f0552b6
AM
16734 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
16735 (long) high & 0xffffffff, (long) low & 0xffffffff);
16736 fprintf (file, "0x%lx%08lx\n",
16737 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
16738 return;
16739 }
1875cc88 16740 else
13ded975 16741 {
02a4ec28
FS
16742 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
16743 {
16744 if (TARGET_MINIMAL_TOC)
2bfcf297 16745 fputs ("\t.long ", file);
02a4ec28 16746 else
2bfcf297 16747 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
16748 (long) high & 0xffffffff, (long) low & 0xffffffff);
16749 fprintf (file, "0x%lx,0x%lx\n",
16750 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 16751 }
13ded975 16752 else
02a4ec28
FS
16753 {
16754 if (TARGET_MINIMAL_TOC)
2bfcf297 16755 fputs ("\t.long ", file);
02a4ec28 16756 else
2f0552b6
AM
16757 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
16758 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 16759 }
13ded975
DE
16760 return;
16761 }
9878760c
RK
16762 }
16763
16764 if (GET_CODE (x) == CONST)
16765 {
37409796 16766 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 16767
9878760c
RK
16768 base = XEXP (XEXP (x, 0), 0);
16769 offset = INTVAL (XEXP (XEXP (x, 0), 1));
16770 }
f676971a 16771
37409796
NS
16772 switch (GET_CODE (base))
16773 {
16774 case SYMBOL_REF:
16775 name = XSTR (base, 0);
16776 break;
16777
16778 case LABEL_REF:
16779 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
16780 CODE_LABEL_NUMBER (XEXP (base, 0)));
16781 break;
16782
16783 case CODE_LABEL:
16784 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
16785 break;
16786
16787 default:
16788 gcc_unreachable ();
16789 }
9878760c 16790
772c5265 16791 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 16792 if (TARGET_MINIMAL_TOC)
2bfcf297 16793 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
16794 else
16795 {
b6c9286a 16796 fprintf (file, "\t.tc %s", real_name);
9878760c 16797
1875cc88 16798 if (offset < 0)
16fdeb48 16799 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 16800 else if (offset)
16fdeb48 16801 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 16802
19d2d16f 16803 fputs ("[TC],", file);
1875cc88 16804 }
581bc4de
MM
16805
16806 /* Currently C++ toc references to vtables can be emitted before it
16807 is decided whether the vtable is public or private. If this is
16808 the case, then the linker will eventually complain that there is
16809 a TOC reference to an unknown section. Thus, for vtables only,
16810 we emit the TOC reference to reference the symbol and not the
16811 section. */
28e510bd 16812 if (VTABLE_NAME_P (name))
581bc4de 16813 {
54ee9799 16814 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 16815 if (offset < 0)
16fdeb48 16816 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 16817 else if (offset > 0)
16fdeb48 16818 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
16819 }
16820 else
16821 output_addr_const (file, x);
19d2d16f 16822 putc ('\n', file);
9878760c
RK
16823}
16824\f
16825/* Output an assembler pseudo-op to write an ASCII string of N characters
16826 starting at P to FILE.
16827
16828 On the RS/6000, we have to do this using the .byte operation and
16829 write out special characters outside the quoted string.
16830 Also, the assembler is broken; very long strings are truncated,
a4f6c312 16831 so we must artificially break them up early. */
9878760c
RK
16832
16833void
a2369ed3 16834output_ascii (FILE *file, const char *p, int n)
9878760c
RK
16835{
16836 char c;
16837 int i, count_string;
d330fd93
KG
16838 const char *for_string = "\t.byte \"";
16839 const char *for_decimal = "\t.byte ";
16840 const char *to_close = NULL;
9878760c
RK
16841
16842 count_string = 0;
16843 for (i = 0; i < n; i++)
16844 {
16845 c = *p++;
16846 if (c >= ' ' && c < 0177)
16847 {
16848 if (for_string)
16849 fputs (for_string, file);
16850 putc (c, file);
16851
16852 /* Write two quotes to get one. */
16853 if (c == '"')
16854 {
16855 putc (c, file);
16856 ++count_string;
16857 }
16858
16859 for_string = NULL;
16860 for_decimal = "\"\n\t.byte ";
16861 to_close = "\"\n";
16862 ++count_string;
16863
16864 if (count_string >= 512)
16865 {
16866 fputs (to_close, file);
16867
16868 for_string = "\t.byte \"";
16869 for_decimal = "\t.byte ";
16870 to_close = NULL;
16871 count_string = 0;
16872 }
16873 }
16874 else
16875 {
16876 if (for_decimal)
16877 fputs (for_decimal, file);
16878 fprintf (file, "%d", c);
16879
16880 for_string = "\n\t.byte \"";
16881 for_decimal = ", ";
16882 to_close = "\n";
16883 count_string = 0;
16884 }
16885 }
16886
16887 /* Now close the string if we have written one. Then end the line. */
16888 if (to_close)
9ebbca7d 16889 fputs (to_close, file);
9878760c
RK
16890}
16891\f
16892/* Generate a unique section name for FILENAME for a section type
16893 represented by SECTION_DESC. Output goes into BUF.
16894
16895 SECTION_DESC can be any string, as long as it is different for each
16896 possible section type.
16897
16898 We name the section in the same manner as xlc. The name begins with an
16899 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
16900 names) with the last period replaced by the string SECTION_DESC. If
16901 FILENAME does not contain a period, SECTION_DESC is appended to the end of
16902 the name. */
9878760c
RK
16903
16904void
f676971a 16905rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 16906 const char *section_desc)
9878760c 16907{
9ebbca7d 16908 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
16909 char *p;
16910 int len;
9878760c
RK
16911
16912 after_last_slash = filename;
16913 for (q = filename; *q; q++)
11e5fe42
RK
16914 {
16915 if (*q == '/')
16916 after_last_slash = q + 1;
16917 else if (*q == '.')
16918 last_period = q;
16919 }
9878760c 16920
11e5fe42 16921 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 16922 *buf = (char *) xmalloc (len);
9878760c
RK
16923
16924 p = *buf;
16925 *p++ = '_';
16926
16927 for (q = after_last_slash; *q; q++)
16928 {
11e5fe42 16929 if (q == last_period)
c4ad648e 16930 {
9878760c
RK
16931 strcpy (p, section_desc);
16932 p += strlen (section_desc);
e3981aab 16933 break;
c4ad648e 16934 }
9878760c 16935
e9a780ec 16936 else if (ISALNUM (*q))
c4ad648e 16937 *p++ = *q;
9878760c
RK
16938 }
16939
11e5fe42 16940 if (last_period == 0)
9878760c
RK
16941 strcpy (p, section_desc);
16942 else
16943 *p = '\0';
16944}
e165f3f0 16945\f
a4f6c312 16946/* Emit profile function. */
411707f4 16947
411707f4 16948void
a2369ed3 16949output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 16950{
858081ad
AH
16951 /* Non-standard profiling for kernels, which just saves LR then calls
16952 _mcount without worrying about arg saves. The idea is to change
16953 the function prologue as little as possible as it isn't easy to
16954 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
16955 if (TARGET_PROFILE_KERNEL)
16956 return;
16957
8480e480
CC
16958 if (DEFAULT_ABI == ABI_AIX)
16959 {
9739c90c
JJ
16960#ifndef NO_PROFILE_COUNTERS
16961# define NO_PROFILE_COUNTERS 0
16962#endif
f676971a 16963 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
16964 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
16965 else
16966 {
16967 char buf[30];
16968 const char *label_name;
16969 rtx fun;
411707f4 16970
9739c90c
JJ
16971 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
16972 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
16973 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 16974
9739c90c
JJ
16975 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
16976 fun, Pmode);
16977 }
8480e480 16978 }
ee890fe2
SS
16979 else if (DEFAULT_ABI == ABI_DARWIN)
16980 {
d5fa86ba 16981 const char *mcount_name = RS6000_MCOUNT;
1de43f85 16982 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
16983
16984 /* Be conservative and always set this, at least for now. */
16985 current_function_uses_pic_offset_table = 1;
16986
16987#if TARGET_MACHO
16988 /* For PIC code, set up a stub and collect the caller's address
16989 from r0, which is where the prologue puts it. */
11abc112
MM
16990 if (MACHOPIC_INDIRECT
16991 && current_function_uses_pic_offset_table)
16992 caller_addr_regno = 0;
ee890fe2
SS
16993#endif
16994 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
16995 0, VOIDmode, 1,
16996 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
16997 }
411707f4
CC
16998}
16999
a4f6c312 17000/* Write function profiler code. */
e165f3f0
RK
17001
17002void
a2369ed3 17003output_function_profiler (FILE *file, int labelno)
e165f3f0 17004{
3daf36a4 17005 char buf[100];
e165f3f0 17006
38c1f2d7 17007 switch (DEFAULT_ABI)
3daf36a4 17008 {
38c1f2d7 17009 default:
37409796 17010 gcc_unreachable ();
38c1f2d7
MM
17011
17012 case ABI_V4:
09eeeacb
AM
17013 if (!TARGET_32BIT)
17014 {
d4ee4d25 17015 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
17016 return;
17017 }
ffcfcb5f 17018 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 17019 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
17020 if (NO_PROFILE_COUNTERS)
17021 {
17022 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17023 reg_names[0], reg_names[1]);
17024 }
17025 else if (TARGET_SECURE_PLT && flag_pic)
17026 {
17027 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
17028 reg_names[0], reg_names[1]);
17029 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
17030 asm_fprintf (file, "\t{cau|addis} %s,%s,",
17031 reg_names[12], reg_names[12]);
17032 assemble_name (file, buf);
17033 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
17034 assemble_name (file, buf);
17035 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
17036 }
17037 else if (flag_pic == 1)
38c1f2d7 17038 {
dfdfa60f 17039 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
17040 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17041 reg_names[0], reg_names[1]);
17167fd8 17042 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 17043 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 17044 assemble_name (file, buf);
17167fd8 17045 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 17046 }
9ebbca7d 17047 else if (flag_pic > 1)
38c1f2d7 17048 {
71625f3d
AM
17049 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17050 reg_names[0], reg_names[1]);
9ebbca7d 17051 /* Now, we need to get the address of the label. */
71625f3d 17052 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 17053 assemble_name (file, buf);
9ebbca7d
GK
17054 fputs ("-.\n1:", file);
17055 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 17056 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
17057 reg_names[0], reg_names[11]);
17058 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
17059 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 17060 }
38c1f2d7
MM
17061 else
17062 {
17167fd8 17063 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 17064 assemble_name (file, buf);
dfdfa60f 17065 fputs ("@ha\n", file);
71625f3d
AM
17066 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
17067 reg_names[0], reg_names[1]);
a260abc9 17068 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 17069 assemble_name (file, buf);
17167fd8 17070 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
17071 }
17072
50d440bc 17073 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
17074 fprintf (file, "\tbl %s%s\n",
17075 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
17076 break;
17077
17078 case ABI_AIX:
ee890fe2 17079 case ABI_DARWIN:
ffcfcb5f
AM
17080 if (!TARGET_PROFILE_KERNEL)
17081 {
a3c9585f 17082 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
17083 }
17084 else
17085 {
37409796 17086 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
17087
17088 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
17089 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
17090
6de9cd9a 17091 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
17092 {
17093 asm_fprintf (file, "\tstd %s,24(%s)\n",
17094 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17095 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17096 asm_fprintf (file, "\tld %s,24(%s)\n",
17097 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
17098 }
17099 else
17100 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
17101 }
38c1f2d7
MM
17102 break;
17103 }
e165f3f0 17104}
a251ffd0 17105
b54cf83a 17106\f
44cd321e
PS
17107
17108/* The following variable value is the last issued insn. */
17109
17110static rtx last_scheduled_insn;
17111
17112/* The following variable helps to balance issuing of load and
17113 store instructions */
17114
17115static int load_store_pendulum;
17116
b54cf83a
DE
17117/* Power4 load update and store update instructions are cracked into a
17118 load or store and an integer insn which are executed in the same cycle.
17119 Branches have their own dispatch slot which does not count against the
17120 GCC issue rate, but it changes the program flow so there are no other
17121 instructions to issue in this cycle. */
17122
17123static int
f676971a
EC
17124rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
17125 int verbose ATTRIBUTE_UNUSED,
a2369ed3 17126 rtx insn, int more)
b54cf83a 17127{
44cd321e 17128 last_scheduled_insn = insn;
b54cf83a
DE
17129 if (GET_CODE (PATTERN (insn)) == USE
17130 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
17131 {
17132 cached_can_issue_more = more;
17133 return cached_can_issue_more;
17134 }
17135
17136 if (insn_terminates_group_p (insn, current_group))
17137 {
17138 cached_can_issue_more = 0;
17139 return cached_can_issue_more;
17140 }
b54cf83a 17141
d296e02e
AP
17142 /* If no reservation, but reach here */
17143 if (recog_memoized (insn) < 0)
17144 return more;
17145
ec507f2d 17146 if (rs6000_sched_groups)
b54cf83a 17147 {
cbe26ab8 17148 if (is_microcoded_insn (insn))
44cd321e 17149 cached_can_issue_more = 0;
cbe26ab8 17150 else if (is_cracked_insn (insn))
44cd321e
PS
17151 cached_can_issue_more = more > 2 ? more - 2 : 0;
17152 else
17153 cached_can_issue_more = more - 1;
17154
17155 return cached_can_issue_more;
b54cf83a 17156 }
165b263e 17157
d296e02e
AP
17158 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
17159 return 0;
17160
44cd321e
PS
17161 cached_can_issue_more = more - 1;
17162 return cached_can_issue_more;
b54cf83a
DE
17163}
17164
a251ffd0
TG
17165/* Adjust the cost of a scheduling dependency. Return the new cost of
17166 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17167
c237e94a 17168static int
0a4f0294 17169rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 17170{
44cd321e 17171 enum attr_type attr_type;
a251ffd0 17172
44cd321e 17173 if (! recog_memoized (insn))
a251ffd0
TG
17174 return 0;
17175
44cd321e 17176 switch (REG_NOTE_KIND (link))
a251ffd0 17177 {
44cd321e
PS
17178 case REG_DEP_TRUE:
17179 {
17180 /* Data dependency; DEP_INSN writes a register that INSN reads
17181 some cycles later. */
17182
17183 /* Separate a load from a narrower, dependent store. */
17184 if (rs6000_sched_groups
17185 && GET_CODE (PATTERN (insn)) == SET
17186 && GET_CODE (PATTERN (dep_insn)) == SET
17187 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
17188 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
17189 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
17190 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
17191 return cost + 14;
17192
17193 attr_type = get_attr_type (insn);
17194
17195 switch (attr_type)
17196 {
17197 case TYPE_JMPREG:
17198 /* Tell the first scheduling pass about the latency between
17199 a mtctr and bctr (and mtlr and br/blr). The first
17200 scheduling pass will not know about this latency since
17201 the mtctr instruction, which has the latency associated
17202 to it, will be generated by reload. */
17203 return TARGET_POWER ? 5 : 4;
17204 case TYPE_BRANCH:
17205 /* Leave some extra cycles between a compare and its
17206 dependent branch, to inhibit expensive mispredicts. */
17207 if ((rs6000_cpu_attr == CPU_PPC603
17208 || rs6000_cpu_attr == CPU_PPC604
17209 || rs6000_cpu_attr == CPU_PPC604E
17210 || rs6000_cpu_attr == CPU_PPC620
17211 || rs6000_cpu_attr == CPU_PPC630
17212 || rs6000_cpu_attr == CPU_PPC750
17213 || rs6000_cpu_attr == CPU_PPC7400
17214 || rs6000_cpu_attr == CPU_PPC7450
17215 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
17216 || rs6000_cpu_attr == CPU_POWER5
17217 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
17218 && recog_memoized (dep_insn)
17219 && (INSN_CODE (dep_insn) >= 0))
982afe02 17220
44cd321e
PS
17221 switch (get_attr_type (dep_insn))
17222 {
17223 case TYPE_CMP:
17224 case TYPE_COMPARE:
17225 case TYPE_DELAYED_COMPARE:
17226 case TYPE_IMUL_COMPARE:
17227 case TYPE_LMUL_COMPARE:
17228 case TYPE_FPCOMPARE:
17229 case TYPE_CR_LOGICAL:
17230 case TYPE_DELAYED_CR:
17231 return cost + 2;
17232 default:
17233 break;
17234 }
17235 break;
17236
17237 case TYPE_STORE:
17238 case TYPE_STORE_U:
17239 case TYPE_STORE_UX:
17240 case TYPE_FPSTORE:
17241 case TYPE_FPSTORE_U:
17242 case TYPE_FPSTORE_UX:
17243 if ((rs6000_cpu == PROCESSOR_POWER6)
17244 && recog_memoized (dep_insn)
17245 && (INSN_CODE (dep_insn) >= 0))
17246 {
17247
17248 if (GET_CODE (PATTERN (insn)) != SET)
17249 /* If this happens, we have to extend this to schedule
17250 optimally. Return default for now. */
17251 return cost;
17252
17253 /* Adjust the cost for the case where the value written
17254 by a fixed point operation is used as the address
17255 gen value on a store. */
17256 switch (get_attr_type (dep_insn))
17257 {
17258 case TYPE_LOAD:
17259 case TYPE_LOAD_U:
17260 case TYPE_LOAD_UX:
17261 case TYPE_CNTLZ:
17262 {
17263 if (! store_data_bypass_p (dep_insn, insn))
17264 return 4;
17265 break;
17266 }
17267 case TYPE_LOAD_EXT:
17268 case TYPE_LOAD_EXT_U:
17269 case TYPE_LOAD_EXT_UX:
17270 case TYPE_VAR_SHIFT_ROTATE:
17271 case TYPE_VAR_DELAYED_COMPARE:
17272 {
17273 if (! store_data_bypass_p (dep_insn, insn))
17274 return 6;
17275 break;
17276 }
17277 case TYPE_INTEGER:
17278 case TYPE_COMPARE:
17279 case TYPE_FAST_COMPARE:
17280 case TYPE_EXTS:
17281 case TYPE_SHIFT:
17282 case TYPE_INSERT_WORD:
17283 case TYPE_INSERT_DWORD:
17284 case TYPE_FPLOAD_U:
17285 case TYPE_FPLOAD_UX:
17286 case TYPE_STORE_U:
17287 case TYPE_STORE_UX:
17288 case TYPE_FPSTORE_U:
17289 case TYPE_FPSTORE_UX:
17290 {
17291 if (! store_data_bypass_p (dep_insn, insn))
17292 return 3;
17293 break;
17294 }
17295 case TYPE_IMUL:
17296 case TYPE_IMUL2:
17297 case TYPE_IMUL3:
17298 case TYPE_LMUL:
17299 case TYPE_IMUL_COMPARE:
17300 case TYPE_LMUL_COMPARE:
17301 {
17302 if (! store_data_bypass_p (dep_insn, insn))
17303 return 17;
17304 break;
17305 }
17306 case TYPE_IDIV:
17307 {
17308 if (! store_data_bypass_p (dep_insn, insn))
17309 return 45;
17310 break;
17311 }
17312 case TYPE_LDIV:
17313 {
17314 if (! store_data_bypass_p (dep_insn, insn))
17315 return 57;
17316 break;
17317 }
17318 default:
17319 break;
17320 }
17321 }
17322 break;
17323
17324 case TYPE_LOAD:
17325 case TYPE_LOAD_U:
17326 case TYPE_LOAD_UX:
17327 case TYPE_LOAD_EXT:
17328 case TYPE_LOAD_EXT_U:
17329 case TYPE_LOAD_EXT_UX:
17330 if ((rs6000_cpu == PROCESSOR_POWER6)
17331 && recog_memoized (dep_insn)
17332 && (INSN_CODE (dep_insn) >= 0))
17333 {
17334
17335 /* Adjust the cost for the case where the value written
17336 by a fixed point instruction is used within the address
17337 gen portion of a subsequent load(u)(x) */
17338 switch (get_attr_type (dep_insn))
17339 {
17340 case TYPE_LOAD:
17341 case TYPE_LOAD_U:
17342 case TYPE_LOAD_UX:
17343 case TYPE_CNTLZ:
17344 {
17345 if (set_to_load_agen (dep_insn, insn))
17346 return 4;
17347 break;
17348 }
17349 case TYPE_LOAD_EXT:
17350 case TYPE_LOAD_EXT_U:
17351 case TYPE_LOAD_EXT_UX:
17352 case TYPE_VAR_SHIFT_ROTATE:
17353 case TYPE_VAR_DELAYED_COMPARE:
17354 {
17355 if (set_to_load_agen (dep_insn, insn))
17356 return 6;
17357 break;
17358 }
17359 case TYPE_INTEGER:
17360 case TYPE_COMPARE:
17361 case TYPE_FAST_COMPARE:
17362 case TYPE_EXTS:
17363 case TYPE_SHIFT:
17364 case TYPE_INSERT_WORD:
17365 case TYPE_INSERT_DWORD:
17366 case TYPE_FPLOAD_U:
17367 case TYPE_FPLOAD_UX:
17368 case TYPE_STORE_U:
17369 case TYPE_STORE_UX:
17370 case TYPE_FPSTORE_U:
17371 case TYPE_FPSTORE_UX:
17372 {
17373 if (set_to_load_agen (dep_insn, insn))
17374 return 3;
17375 break;
17376 }
17377 case TYPE_IMUL:
17378 case TYPE_IMUL2:
17379 case TYPE_IMUL3:
17380 case TYPE_LMUL:
17381 case TYPE_IMUL_COMPARE:
17382 case TYPE_LMUL_COMPARE:
17383 {
17384 if (set_to_load_agen (dep_insn, insn))
17385 return 17;
17386 break;
17387 }
17388 case TYPE_IDIV:
17389 {
17390 if (set_to_load_agen (dep_insn, insn))
17391 return 45;
17392 break;
17393 }
17394 case TYPE_LDIV:
17395 {
17396 if (set_to_load_agen (dep_insn, insn))
17397 return 57;
17398 break;
17399 }
17400 default:
17401 break;
17402 }
17403 }
17404 break;
17405
17406 case TYPE_FPLOAD:
17407 if ((rs6000_cpu == PROCESSOR_POWER6)
17408 && recog_memoized (dep_insn)
17409 && (INSN_CODE (dep_insn) >= 0)
17410 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
17411 return 2;
17412
17413 default:
17414 break;
17415 }
c9dbf840 17416
a251ffd0 17417 /* Fall out to return default cost. */
44cd321e
PS
17418 }
17419 break;
17420
17421 case REG_DEP_OUTPUT:
17422 /* Output dependency; DEP_INSN writes a register that INSN writes some
17423 cycles later. */
17424 if ((rs6000_cpu == PROCESSOR_POWER6)
17425 && recog_memoized (dep_insn)
17426 && (INSN_CODE (dep_insn) >= 0))
17427 {
17428 attr_type = get_attr_type (insn);
17429
17430 switch (attr_type)
17431 {
17432 case TYPE_FP:
17433 if (get_attr_type (dep_insn) == TYPE_FP)
17434 return 1;
17435 break;
17436 case TYPE_FPLOAD:
17437 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
17438 return 2;
17439 break;
17440 default:
17441 break;
17442 }
17443 }
17444 case REG_DEP_ANTI:
17445 /* Anti dependency; DEP_INSN reads a register that INSN writes some
17446 cycles later. */
17447 return 0;
17448
17449 default:
17450 gcc_unreachable ();
a251ffd0
TG
17451 }
17452
17453 return cost;
17454}
b6c9286a 17455
cbe26ab8 17456/* The function returns a true if INSN is microcoded.
839a4992 17457 Return false otherwise. */
cbe26ab8
DN
17458
17459static bool
17460is_microcoded_insn (rtx insn)
17461{
17462 if (!insn || !INSN_P (insn)
17463 || GET_CODE (PATTERN (insn)) == USE
17464 || GET_CODE (PATTERN (insn)) == CLOBBER)
17465 return false;
17466
d296e02e
AP
17467 if (rs6000_cpu_attr == CPU_CELL)
17468 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
17469
ec507f2d 17470 if (rs6000_sched_groups)
cbe26ab8
DN
17471 {
17472 enum attr_type type = get_attr_type (insn);
17473 if (type == TYPE_LOAD_EXT_U
17474 || type == TYPE_LOAD_EXT_UX
17475 || type == TYPE_LOAD_UX
17476 || type == TYPE_STORE_UX
17477 || type == TYPE_MFCR)
c4ad648e 17478 return true;
cbe26ab8
DN
17479 }
17480
17481 return false;
17482}
17483
cbe26ab8
DN
17484/* The function returns true if INSN is cracked into 2 instructions
17485 by the processor (and therefore occupies 2 issue slots). */
17486
17487static bool
17488is_cracked_insn (rtx insn)
17489{
17490 if (!insn || !INSN_P (insn)
17491 || GET_CODE (PATTERN (insn)) == USE
17492 || GET_CODE (PATTERN (insn)) == CLOBBER)
17493 return false;
17494
ec507f2d 17495 if (rs6000_sched_groups)
cbe26ab8
DN
17496 {
17497 enum attr_type type = get_attr_type (insn);
17498 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
17499 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
17500 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
17501 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
17502 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
17503 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
17504 || type == TYPE_IDIV || type == TYPE_LDIV
17505 || type == TYPE_INSERT_WORD)
17506 return true;
cbe26ab8
DN
17507 }
17508
17509 return false;
17510}
17511
17512/* The function returns true if INSN can be issued only from
a3c9585f 17513 the branch slot. */
cbe26ab8
DN
17514
17515static bool
17516is_branch_slot_insn (rtx insn)
17517{
17518 if (!insn || !INSN_P (insn)
17519 || GET_CODE (PATTERN (insn)) == USE
17520 || GET_CODE (PATTERN (insn)) == CLOBBER)
17521 return false;
17522
ec507f2d 17523 if (rs6000_sched_groups)
cbe26ab8
DN
17524 {
17525 enum attr_type type = get_attr_type (insn);
17526 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 17527 return true;
cbe26ab8
DN
17528 return false;
17529 }
17530
17531 return false;
17532}
79ae11c4 17533
44cd321e
PS
17534/* The function returns true if out_inst sets a value that is
17535 used in the address generation computation of in_insn */
17536static bool
17537set_to_load_agen (rtx out_insn, rtx in_insn)
17538{
17539 rtx out_set, in_set;
17540
17541 /* For performance reasons, only handle the simple case where
17542 both loads are a single_set. */
17543 out_set = single_set (out_insn);
17544 if (out_set)
17545 {
17546 in_set = single_set (in_insn);
17547 if (in_set)
17548 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
17549 }
17550
17551 return false;
17552}
17553
17554/* The function returns true if the target storage location of
17555 out_insn is adjacent to the target storage location of in_insn */
17556/* Return 1 if memory locations are adjacent. */
17557
17558static bool
17559adjacent_mem_locations (rtx insn1, rtx insn2)
17560{
17561
e3a0e200
PB
17562 rtx a = get_store_dest (PATTERN (insn1));
17563 rtx b = get_store_dest (PATTERN (insn2));
17564
44cd321e
PS
17565 if ((GET_CODE (XEXP (a, 0)) == REG
17566 || (GET_CODE (XEXP (a, 0)) == PLUS
17567 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
17568 && (GET_CODE (XEXP (b, 0)) == REG
17569 || (GET_CODE (XEXP (b, 0)) == PLUS
17570 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
17571 {
17572 HOST_WIDE_INT val0 = 0, val1 = 0;
17573 rtx reg0, reg1;
17574 int val_diff;
17575
17576 if (GET_CODE (XEXP (a, 0)) == PLUS)
17577 {
17578 reg0 = XEXP (XEXP (a, 0), 0);
17579 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
17580 }
17581 else
17582 reg0 = XEXP (a, 0);
17583
17584 if (GET_CODE (XEXP (b, 0)) == PLUS)
17585 {
17586 reg1 = XEXP (XEXP (b, 0), 0);
17587 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
17588 }
17589 else
17590 reg1 = XEXP (b, 0);
17591
17592 val_diff = val1 - val0;
17593
17594 return ((REGNO (reg0) == REGNO (reg1))
17595 && (val_diff == INTVAL (MEM_SIZE (a))
17596 || val_diff == -INTVAL (MEM_SIZE (b))));
17597 }
17598
17599 return false;
17600}
17601
a4f6c312 17602/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
17603 priority INSN_PRIORITY (INSN). Increase the priority to execute the
17604 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
17605 define this macro if you do not need to adjust the scheduling
17606 priorities of insns. */
bef84347 17607
c237e94a 17608static int
a2369ed3 17609rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 17610{
a4f6c312
SS
17611 /* On machines (like the 750) which have asymmetric integer units,
17612 where one integer unit can do multiply and divides and the other
17613 can't, reduce the priority of multiply/divide so it is scheduled
17614 before other integer operations. */
bef84347
VM
17615
17616#if 0
2c3c49de 17617 if (! INSN_P (insn))
bef84347
VM
17618 return priority;
17619
17620 if (GET_CODE (PATTERN (insn)) == USE)
17621 return priority;
17622
17623 switch (rs6000_cpu_attr) {
17624 case CPU_PPC750:
17625 switch (get_attr_type (insn))
17626 {
17627 default:
17628 break;
17629
17630 case TYPE_IMUL:
17631 case TYPE_IDIV:
3cb999d8
DE
17632 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
17633 priority, priority);
bef84347
VM
17634 if (priority >= 0 && priority < 0x01000000)
17635 priority >>= 3;
17636 break;
17637 }
17638 }
17639#endif
17640
44cd321e 17641 if (insn_must_be_first_in_group (insn)
79ae11c4 17642 && reload_completed
f676971a 17643 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
17644 && rs6000_sched_restricted_insns_priority)
17645 {
17646
c4ad648e
AM
17647 /* Prioritize insns that can be dispatched only in the first
17648 dispatch slot. */
79ae11c4 17649 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
17650 /* Attach highest priority to insn. This means that in
17651 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 17652 precede 'priority' (critical path) considerations. */
f676971a 17653 return current_sched_info->sched_max_insns_priority;
79ae11c4 17654 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 17655 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
17656 haifa-sched.c:ready_sort(), only 'priority' (critical path)
17657 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
17658 return (priority + 1);
17659 }
79ae11c4 17660
44cd321e
PS
17661 if (rs6000_cpu == PROCESSOR_POWER6
17662 && ((load_store_pendulum == -2 && is_load_insn (insn))
17663 || (load_store_pendulum == 2 && is_store_insn (insn))))
17664 /* Attach highest priority to insn if the scheduler has just issued two
17665 stores and this instruction is a load, or two loads and this instruction
17666 is a store. Power6 wants loads and stores scheduled alternately
17667 when possible */
17668 return current_sched_info->sched_max_insns_priority;
17669
bef84347
VM
17670 return priority;
17671}
17672
d296e02e
AP
17673/* Return true if the instruction is nonpipelined on the Cell. */
17674static bool
17675is_nonpipeline_insn (rtx insn)
17676{
17677 enum attr_type type;
17678 if (!insn || !INSN_P (insn)
17679 || GET_CODE (PATTERN (insn)) == USE
17680 || GET_CODE (PATTERN (insn)) == CLOBBER)
17681 return false;
17682
17683 type = get_attr_type (insn);
17684 if (type == TYPE_IMUL
17685 || type == TYPE_IMUL2
17686 || type == TYPE_IMUL3
17687 || type == TYPE_LMUL
17688 || type == TYPE_IDIV
17689 || type == TYPE_LDIV
17690 || type == TYPE_SDIV
17691 || type == TYPE_DDIV
17692 || type == TYPE_SSQRT
17693 || type == TYPE_DSQRT
17694 || type == TYPE_MFCR
17695 || type == TYPE_MFCRF
17696 || type == TYPE_MFJMPR)
17697 {
17698 return true;
17699 }
17700 return false;
17701}
17702
17703
a4f6c312
SS
17704/* Return how many instructions the machine can issue per cycle. */
17705
c237e94a 17706static int
863d938c 17707rs6000_issue_rate (void)
b6c9286a 17708{
3317bab1
DE
17709 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
17710 if (!reload_completed)
17711 return 1;
17712
b6c9286a 17713 switch (rs6000_cpu_attr) {
3cb999d8
DE
17714 case CPU_RIOS1: /* ? */
17715 case CPU_RS64A:
17716 case CPU_PPC601: /* ? */
ed947a96 17717 case CPU_PPC7450:
3cb999d8 17718 return 3;
b54cf83a 17719 case CPU_PPC440:
b6c9286a 17720 case CPU_PPC603:
bef84347 17721 case CPU_PPC750:
ed947a96 17722 case CPU_PPC7400:
be12c2b0 17723 case CPU_PPC8540:
d296e02e 17724 case CPU_CELL:
f676971a 17725 return 2;
3cb999d8 17726 case CPU_RIOS2:
b6c9286a 17727 case CPU_PPC604:
19684119 17728 case CPU_PPC604E:
b6c9286a 17729 case CPU_PPC620:
3cb999d8 17730 case CPU_PPC630:
b6c9286a 17731 return 4;
cbe26ab8 17732 case CPU_POWER4:
ec507f2d 17733 case CPU_POWER5:
44cd321e 17734 case CPU_POWER6:
cbe26ab8 17735 return 5;
b6c9286a
MM
17736 default:
17737 return 1;
17738 }
17739}
17740
be12c2b0
VM
17741/* Return how many instructions to look ahead for better insn
17742 scheduling. */
17743
17744static int
863d938c 17745rs6000_use_sched_lookahead (void)
be12c2b0
VM
17746{
17747 if (rs6000_cpu_attr == CPU_PPC8540)
17748 return 4;
d296e02e
AP
17749 if (rs6000_cpu_attr == CPU_CELL)
17750 return (reload_completed ? 8 : 0);
be12c2b0
VM
17751 return 0;
17752}
17753
d296e02e
AP
17754/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
17755static int
17756rs6000_use_sched_lookahead_guard (rtx insn)
17757{
17758 if (rs6000_cpu_attr != CPU_CELL)
17759 return 1;
17760
17761 if (insn == NULL_RTX || !INSN_P (insn))
17762 abort ();
982afe02 17763
d296e02e
AP
17764 if (!reload_completed
17765 || is_nonpipeline_insn (insn)
17766 || is_microcoded_insn (insn))
17767 return 0;
17768
17769 return 1;
17770}
17771
569fa502
DN
17772/* Determine is PAT refers to memory. */
17773
17774static bool
17775is_mem_ref (rtx pat)
17776{
17777 const char * fmt;
17778 int i, j;
17779 bool ret = false;
17780
17781 if (GET_CODE (pat) == MEM)
17782 return true;
17783
17784 /* Recursively process the pattern. */
17785 fmt = GET_RTX_FORMAT (GET_CODE (pat));
17786
17787 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
17788 {
17789 if (fmt[i] == 'e')
17790 ret |= is_mem_ref (XEXP (pat, i));
17791 else if (fmt[i] == 'E')
17792 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
17793 ret |= is_mem_ref (XVECEXP (pat, i, j));
17794 }
17795
17796 return ret;
17797}
17798
17799/* Determine if PAT is a PATTERN of a load insn. */
f676971a 17800
569fa502
DN
17801static bool
17802is_load_insn1 (rtx pat)
17803{
17804 if (!pat || pat == NULL_RTX)
17805 return false;
17806
17807 if (GET_CODE (pat) == SET)
17808 return is_mem_ref (SET_SRC (pat));
17809
17810 if (GET_CODE (pat) == PARALLEL)
17811 {
17812 int i;
17813
17814 for (i = 0; i < XVECLEN (pat, 0); i++)
17815 if (is_load_insn1 (XVECEXP (pat, 0, i)))
17816 return true;
17817 }
17818
17819 return false;
17820}
17821
17822/* Determine if INSN loads from memory. */
17823
17824static bool
17825is_load_insn (rtx insn)
17826{
17827 if (!insn || !INSN_P (insn))
17828 return false;
17829
17830 if (GET_CODE (insn) == CALL_INSN)
17831 return false;
17832
17833 return is_load_insn1 (PATTERN (insn));
17834}
17835
17836/* Determine if PAT is a PATTERN of a store insn. */
17837
17838static bool
17839is_store_insn1 (rtx pat)
17840{
17841 if (!pat || pat == NULL_RTX)
17842 return false;
17843
17844 if (GET_CODE (pat) == SET)
17845 return is_mem_ref (SET_DEST (pat));
17846
17847 if (GET_CODE (pat) == PARALLEL)
17848 {
17849 int i;
17850
17851 for (i = 0; i < XVECLEN (pat, 0); i++)
17852 if (is_store_insn1 (XVECEXP (pat, 0, i)))
17853 return true;
17854 }
17855
17856 return false;
17857}
17858
17859/* Determine if INSN stores to memory. */
17860
17861static bool
17862is_store_insn (rtx insn)
17863{
17864 if (!insn || !INSN_P (insn))
17865 return false;
17866
17867 return is_store_insn1 (PATTERN (insn));
17868}
17869
e3a0e200
PB
17870/* Return the dest of a store insn. */
17871
17872static rtx
17873get_store_dest (rtx pat)
17874{
17875 gcc_assert (is_store_insn1 (pat));
17876
17877 if (GET_CODE (pat) == SET)
17878 return SET_DEST (pat);
17879 else if (GET_CODE (pat) == PARALLEL)
17880 {
17881 int i;
17882
17883 for (i = 0; i < XVECLEN (pat, 0); i++)
17884 {
17885 rtx inner_pat = XVECEXP (pat, 0, i);
17886 if (GET_CODE (inner_pat) == SET
17887 && is_mem_ref (SET_DEST (inner_pat)))
17888 return inner_pat;
17889 }
17890 }
17891 /* We shouldn't get here, because we should have either a simple
17892 store insn or a store with update which are covered above. */
17893 gcc_unreachable();
17894}
17895
569fa502
DN
17896/* Returns whether the dependence between INSN and NEXT is considered
17897 costly by the given target. */
17898
17899static bool
b198261f 17900rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 17901{
b198261f
MK
17902 rtx insn;
17903 rtx next;
17904
aabcd309 17905 /* If the flag is not enabled - no dependence is considered costly;
f676971a 17906 allow all dependent insns in the same group.
569fa502
DN
17907 This is the most aggressive option. */
17908 if (rs6000_sched_costly_dep == no_dep_costly)
17909 return false;
17910
f676971a 17911 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
17912 do not allow dependent instructions in the same group.
17913 This is the most conservative option. */
17914 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 17915 return true;
569fa502 17916
b198261f
MK
17917 insn = DEP_PRO (dep);
17918 next = DEP_CON (dep);
17919
f676971a
EC
17920 if (rs6000_sched_costly_dep == store_to_load_dep_costly
17921 && is_load_insn (next)
569fa502
DN
17922 && is_store_insn (insn))
17923 /* Prevent load after store in the same group. */
17924 return true;
17925
17926 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 17927 && is_load_insn (next)
569fa502 17928 && is_store_insn (insn)
b198261f 17929 && DEP_KIND (dep) == REG_DEP_TRUE)
c4ad648e
AM
17930 /* Prevent load after store in the same group if it is a true
17931 dependence. */
569fa502 17932 return true;
f676971a
EC
17933
17934 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
17935 and will not be scheduled in the same group. */
17936 if (rs6000_sched_costly_dep <= max_dep_latency
17937 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
17938 return true;
17939
17940 return false;
17941}
17942
f676971a 17943/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
17944 skipping any "non-active" insns - insns that will not actually occupy
17945 an issue slot. Return NULL_RTX if such an insn is not found. */
17946
17947static rtx
17948get_next_active_insn (rtx insn, rtx tail)
17949{
f489aff8 17950 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
17951 return NULL_RTX;
17952
f489aff8 17953 while (1)
cbe26ab8 17954 {
f489aff8
AM
17955 insn = NEXT_INSN (insn);
17956 if (insn == NULL_RTX || insn == tail)
17957 return NULL_RTX;
cbe26ab8 17958
f489aff8
AM
17959 if (CALL_P (insn)
17960 || JUMP_P (insn)
17961 || (NONJUMP_INSN_P (insn)
17962 && GET_CODE (PATTERN (insn)) != USE
17963 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 17964 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
17965 break;
17966 }
17967 return insn;
cbe26ab8
DN
17968}
17969
44cd321e
PS
17970/* We are about to begin issuing insns for this clock cycle. */
17971
17972static int
17973rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
17974 rtx *ready ATTRIBUTE_UNUSED,
17975 int *pn_ready ATTRIBUTE_UNUSED,
17976 int clock_var ATTRIBUTE_UNUSED)
17977{
d296e02e
AP
17978 int n_ready = *pn_ready;
17979
44cd321e
PS
17980 if (sched_verbose)
17981 fprintf (dump, "// rs6000_sched_reorder :\n");
17982
d296e02e
AP
17983 /* Reorder the ready list, if the second to last ready insn
17984 is a nonepipeline insn. */
17985 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
17986 {
17987 if (is_nonpipeline_insn (ready[n_ready - 1])
17988 && (recog_memoized (ready[n_ready - 2]) > 0))
17989 /* Simply swap first two insns. */
17990 {
17991 rtx tmp = ready[n_ready - 1];
17992 ready[n_ready - 1] = ready[n_ready - 2];
17993 ready[n_ready - 2] = tmp;
17994 }
17995 }
17996
44cd321e
PS
17997 if (rs6000_cpu == PROCESSOR_POWER6)
17998 load_store_pendulum = 0;
17999
18000 return rs6000_issue_rate ();
18001}
18002
18003/* Like rs6000_sched_reorder, but called after issuing each insn. */
18004
18005static int
18006rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
18007 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
18008{
18009 if (sched_verbose)
18010 fprintf (dump, "// rs6000_sched_reorder2 :\n");
18011
18012 /* For Power6, we need to handle some special cases to try and keep the
18013 store queue from overflowing and triggering expensive flushes.
18014
18015 This code monitors how load and store instructions are being issued
18016 and skews the ready list one way or the other to increase the likelihood
18017 that a desired instruction is issued at the proper time.
18018
18019 A couple of things are done. First, we maintain a "load_store_pendulum"
18020 to track the current state of load/store issue.
18021
18022 - If the pendulum is at zero, then no loads or stores have been
18023 issued in the current cycle so we do nothing.
18024
18025 - If the pendulum is 1, then a single load has been issued in this
18026 cycle and we attempt to locate another load in the ready list to
18027 issue with it.
18028
2f8e468b 18029 - If the pendulum is -2, then two stores have already been
44cd321e
PS
18030 issued in this cycle, so we increase the priority of the first load
18031 in the ready list to increase it's likelihood of being chosen first
18032 in the next cycle.
18033
18034 - If the pendulum is -1, then a single store has been issued in this
18035 cycle and we attempt to locate another store in the ready list to
18036 issue with it, preferring a store to an adjacent memory location to
18037 facilitate store pairing in the store queue.
18038
18039 - If the pendulum is 2, then two loads have already been
18040 issued in this cycle, so we increase the priority of the first store
18041 in the ready list to increase it's likelihood of being chosen first
18042 in the next cycle.
18043
18044 - If the pendulum < -2 or > 2, then do nothing.
18045
18046 Note: This code covers the most common scenarios. There exist non
18047 load/store instructions which make use of the LSU and which
18048 would need to be accounted for to strictly model the behavior
18049 of the machine. Those instructions are currently unaccounted
18050 for to help minimize compile time overhead of this code.
18051 */
18052 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
18053 {
18054 int pos;
18055 int i;
18056 rtx tmp;
18057
18058 if (is_store_insn (last_scheduled_insn))
18059 /* Issuing a store, swing the load_store_pendulum to the left */
18060 load_store_pendulum--;
18061 else if (is_load_insn (last_scheduled_insn))
18062 /* Issuing a load, swing the load_store_pendulum to the right */
18063 load_store_pendulum++;
18064 else
18065 return cached_can_issue_more;
18066
18067 /* If the pendulum is balanced, or there is only one instruction on
18068 the ready list, then all is well, so return. */
18069 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
18070 return cached_can_issue_more;
18071
18072 if (load_store_pendulum == 1)
18073 {
18074 /* A load has been issued in this cycle. Scan the ready list
18075 for another load to issue with it */
18076 pos = *pn_ready-1;
18077
18078 while (pos >= 0)
18079 {
18080 if (is_load_insn (ready[pos]))
18081 {
18082 /* Found a load. Move it to the head of the ready list,
18083 and adjust it's priority so that it is more likely to
18084 stay there */
18085 tmp = ready[pos];
18086 for (i=pos; i<*pn_ready-1; i++)
18087 ready[i] = ready[i + 1];
18088 ready[*pn_ready-1] = tmp;
18089 if INSN_PRIORITY_KNOWN (tmp)
18090 INSN_PRIORITY (tmp)++;
18091 break;
18092 }
18093 pos--;
18094 }
18095 }
18096 else if (load_store_pendulum == -2)
18097 {
18098 /* Two stores have been issued in this cycle. Increase the
18099 priority of the first load in the ready list to favor it for
18100 issuing in the next cycle. */
18101 pos = *pn_ready-1;
18102
18103 while (pos >= 0)
18104 {
18105 if (is_load_insn (ready[pos])
18106 && INSN_PRIORITY_KNOWN (ready[pos]))
18107 {
18108 INSN_PRIORITY (ready[pos])++;
18109
18110 /* Adjust the pendulum to account for the fact that a load
18111 was found and increased in priority. This is to prevent
18112 increasing the priority of multiple loads */
18113 load_store_pendulum--;
18114
18115 break;
18116 }
18117 pos--;
18118 }
18119 }
18120 else if (load_store_pendulum == -1)
18121 {
18122 /* A store has been issued in this cycle. Scan the ready list for
18123 another store to issue with it, preferring a store to an adjacent
18124 memory location */
18125 int first_store_pos = -1;
18126
18127 pos = *pn_ready-1;
18128
18129 while (pos >= 0)
18130 {
18131 if (is_store_insn (ready[pos]))
18132 {
18133 /* Maintain the index of the first store found on the
18134 list */
18135 if (first_store_pos == -1)
18136 first_store_pos = pos;
18137
18138 if (is_store_insn (last_scheduled_insn)
18139 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
18140 {
18141 /* Found an adjacent store. Move it to the head of the
18142 ready list, and adjust it's priority so that it is
18143 more likely to stay there */
18144 tmp = ready[pos];
18145 for (i=pos; i<*pn_ready-1; i++)
18146 ready[i] = ready[i + 1];
18147 ready[*pn_ready-1] = tmp;
18148 if INSN_PRIORITY_KNOWN (tmp)
18149 INSN_PRIORITY (tmp)++;
18150 first_store_pos = -1;
18151
18152 break;
18153 };
18154 }
18155 pos--;
18156 }
18157
18158 if (first_store_pos >= 0)
18159 {
18160 /* An adjacent store wasn't found, but a non-adjacent store was,
18161 so move the non-adjacent store to the front of the ready
18162 list, and adjust its priority so that it is more likely to
18163 stay there. */
18164 tmp = ready[first_store_pos];
18165 for (i=first_store_pos; i<*pn_ready-1; i++)
18166 ready[i] = ready[i + 1];
18167 ready[*pn_ready-1] = tmp;
18168 if INSN_PRIORITY_KNOWN (tmp)
18169 INSN_PRIORITY (tmp)++;
18170 }
18171 }
18172 else if (load_store_pendulum == 2)
18173 {
18174 /* Two loads have been issued in this cycle. Increase the priority
18175 of the first store in the ready list to favor it for issuing in
18176 the next cycle. */
18177 pos = *pn_ready-1;
18178
18179 while (pos >= 0)
18180 {
18181 if (is_store_insn (ready[pos])
18182 && INSN_PRIORITY_KNOWN (ready[pos]))
18183 {
18184 INSN_PRIORITY (ready[pos])++;
18185
18186 /* Adjust the pendulum to account for the fact that a store
18187 was found and increased in priority. This is to prevent
18188 increasing the priority of multiple stores */
18189 load_store_pendulum++;
18190
18191 break;
18192 }
18193 pos--;
18194 }
18195 }
18196 }
18197
18198 return cached_can_issue_more;
18199}
18200
839a4992 18201/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
18202 of group WHICH_GROUP.
18203
18204 If WHICH_GROUP == current_group, this function will return true if INSN
18205 causes the termination of the current group (i.e, the dispatch group to
18206 which INSN belongs). This means that INSN will be the last insn in the
18207 group it belongs to.
18208
18209 If WHICH_GROUP == previous_group, this function will return true if INSN
18210 causes the termination of the previous group (i.e, the dispatch group that
18211 precedes the group to which INSN belongs). This means that INSN will be
18212 the first insn in the group it belongs to). */
18213
18214static bool
18215insn_terminates_group_p (rtx insn, enum group_termination which_group)
18216{
44cd321e 18217 bool first, last;
cbe26ab8
DN
18218
18219 if (! insn)
18220 return false;
569fa502 18221
44cd321e
PS
18222 first = insn_must_be_first_in_group (insn);
18223 last = insn_must_be_last_in_group (insn);
cbe26ab8 18224
44cd321e 18225 if (first && last)
cbe26ab8
DN
18226 return true;
18227
18228 if (which_group == current_group)
44cd321e 18229 return last;
cbe26ab8 18230 else if (which_group == previous_group)
44cd321e
PS
18231 return first;
18232
18233 return false;
18234}
18235
18236
18237static bool
18238insn_must_be_first_in_group (rtx insn)
18239{
18240 enum attr_type type;
18241
18242 if (!insn
18243 || insn == NULL_RTX
18244 || GET_CODE (insn) == NOTE
18245 || GET_CODE (PATTERN (insn)) == USE
18246 || GET_CODE (PATTERN (insn)) == CLOBBER)
18247 return false;
18248
18249 switch (rs6000_cpu)
cbe26ab8 18250 {
44cd321e
PS
18251 case PROCESSOR_POWER5:
18252 if (is_cracked_insn (insn))
18253 return true;
18254 case PROCESSOR_POWER4:
18255 if (is_microcoded_insn (insn))
18256 return true;
18257
18258 if (!rs6000_sched_groups)
18259 return false;
18260
18261 type = get_attr_type (insn);
18262
18263 switch (type)
18264 {
18265 case TYPE_MFCR:
18266 case TYPE_MFCRF:
18267 case TYPE_MTCR:
18268 case TYPE_DELAYED_CR:
18269 case TYPE_CR_LOGICAL:
18270 case TYPE_MTJMPR:
18271 case TYPE_MFJMPR:
18272 case TYPE_IDIV:
18273 case TYPE_LDIV:
18274 case TYPE_LOAD_L:
18275 case TYPE_STORE_C:
18276 case TYPE_ISYNC:
18277 case TYPE_SYNC:
18278 return true;
18279 default:
18280 break;
18281 }
18282 break;
18283 case PROCESSOR_POWER6:
18284 type = get_attr_type (insn);
18285
18286 switch (type)
18287 {
18288 case TYPE_INSERT_DWORD:
18289 case TYPE_EXTS:
18290 case TYPE_CNTLZ:
18291 case TYPE_SHIFT:
18292 case TYPE_VAR_SHIFT_ROTATE:
18293 case TYPE_TRAP:
18294 case TYPE_IMUL:
18295 case TYPE_IMUL2:
18296 case TYPE_IMUL3:
18297 case TYPE_LMUL:
18298 case TYPE_IDIV:
18299 case TYPE_INSERT_WORD:
18300 case TYPE_DELAYED_COMPARE:
18301 case TYPE_IMUL_COMPARE:
18302 case TYPE_LMUL_COMPARE:
18303 case TYPE_FPCOMPARE:
18304 case TYPE_MFCR:
18305 case TYPE_MTCR:
18306 case TYPE_MFJMPR:
18307 case TYPE_MTJMPR:
18308 case TYPE_ISYNC:
18309 case TYPE_SYNC:
18310 case TYPE_LOAD_L:
18311 case TYPE_STORE_C:
18312 case TYPE_LOAD_U:
18313 case TYPE_LOAD_UX:
18314 case TYPE_LOAD_EXT_UX:
18315 case TYPE_STORE_U:
18316 case TYPE_STORE_UX:
18317 case TYPE_FPLOAD_U:
18318 case TYPE_FPLOAD_UX:
18319 case TYPE_FPSTORE_U:
18320 case TYPE_FPSTORE_UX:
18321 return true;
18322 default:
18323 break;
18324 }
18325 break;
18326 default:
18327 break;
18328 }
18329
18330 return false;
18331}
18332
18333static bool
18334insn_must_be_last_in_group (rtx insn)
18335{
18336 enum attr_type type;
18337
18338 if (!insn
18339 || insn == NULL_RTX
18340 || GET_CODE (insn) == NOTE
18341 || GET_CODE (PATTERN (insn)) == USE
18342 || GET_CODE (PATTERN (insn)) == CLOBBER)
18343 return false;
18344
18345 switch (rs6000_cpu) {
18346 case PROCESSOR_POWER4:
18347 case PROCESSOR_POWER5:
18348 if (is_microcoded_insn (insn))
18349 return true;
18350
18351 if (is_branch_slot_insn (insn))
18352 return true;
18353
18354 break;
18355 case PROCESSOR_POWER6:
18356 type = get_attr_type (insn);
18357
18358 switch (type)
18359 {
18360 case TYPE_EXTS:
18361 case TYPE_CNTLZ:
18362 case TYPE_SHIFT:
18363 case TYPE_VAR_SHIFT_ROTATE:
18364 case TYPE_TRAP:
18365 case TYPE_IMUL:
18366 case TYPE_IMUL2:
18367 case TYPE_IMUL3:
18368 case TYPE_LMUL:
18369 case TYPE_IDIV:
18370 case TYPE_DELAYED_COMPARE:
18371 case TYPE_IMUL_COMPARE:
18372 case TYPE_LMUL_COMPARE:
18373 case TYPE_FPCOMPARE:
18374 case TYPE_MFCR:
18375 case TYPE_MTCR:
18376 case TYPE_MFJMPR:
18377 case TYPE_MTJMPR:
18378 case TYPE_ISYNC:
18379 case TYPE_SYNC:
18380 case TYPE_LOAD_L:
18381 case TYPE_STORE_C:
18382 return true;
18383 default:
18384 break;
cbe26ab8 18385 }
44cd321e
PS
18386 break;
18387 default:
18388 break;
18389 }
cbe26ab8
DN
18390
18391 return false;
18392}
18393
839a4992 18394/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
18395 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18396
18397static bool
18398is_costly_group (rtx *group_insns, rtx next_insn)
18399{
18400 int i;
cbe26ab8
DN
18401 int issue_rate = rs6000_issue_rate ();
18402
18403 for (i = 0; i < issue_rate; i++)
18404 {
b198261f 18405 dep_link_t link;
cbe26ab8 18406 rtx insn = group_insns[i];
b198261f 18407
cbe26ab8 18408 if (!insn)
c4ad648e 18409 continue;
b198261f
MK
18410
18411 FOR_EACH_DEP_LINK (link, INSN_FORW_DEPS (insn))
c4ad648e 18412 {
b198261f
MK
18413 dep_t dep = DEP_LINK_DEP (link);
18414 rtx next = DEP_CON (dep);
18415
18416 if (next == next_insn
18417 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
18418 return true;
c4ad648e 18419 }
cbe26ab8
DN
18420 }
18421
18422 return false;
18423}
18424
f676971a 18425/* Utility of the function redefine_groups.
cbe26ab8
DN
18426 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
18427 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
18428 to keep it "far" (in a separate group) from GROUP_INSNS, following
18429 one of the following schemes, depending on the value of the flag
18430 -minsert_sched_nops = X:
18431 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 18432 in order to force NEXT_INSN into a separate group.
f676971a
EC
18433 (2) X < sched_finish_regroup_exact: insert exactly X nops.
18434 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
18435 insertion (has a group just ended, how many vacant issue slots remain in the
18436 last group, and how many dispatch groups were encountered so far). */
18437
f676971a 18438static int
c4ad648e
AM
18439force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
18440 rtx next_insn, bool *group_end, int can_issue_more,
18441 int *group_count)
cbe26ab8
DN
18442{
18443 rtx nop;
18444 bool force;
18445 int issue_rate = rs6000_issue_rate ();
18446 bool end = *group_end;
18447 int i;
18448
18449 if (next_insn == NULL_RTX)
18450 return can_issue_more;
18451
18452 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
18453 return can_issue_more;
18454
18455 force = is_costly_group (group_insns, next_insn);
18456 if (!force)
18457 return can_issue_more;
18458
18459 if (sched_verbose > 6)
18460 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 18461 *group_count ,can_issue_more);
cbe26ab8
DN
18462
18463 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
18464 {
18465 if (*group_end)
c4ad648e 18466 can_issue_more = 0;
cbe26ab8
DN
18467
18468 /* Since only a branch can be issued in the last issue_slot, it is
18469 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
18470 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
18471 in this case the last nop will start a new group and the branch
18472 will be forced to the new group. */
cbe26ab8 18473 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 18474 can_issue_more--;
cbe26ab8
DN
18475
18476 while (can_issue_more > 0)
c4ad648e 18477 {
9390387d 18478 nop = gen_nop ();
c4ad648e
AM
18479 emit_insn_before (nop, next_insn);
18480 can_issue_more--;
18481 }
cbe26ab8
DN
18482
18483 *group_end = true;
18484 return 0;
f676971a 18485 }
cbe26ab8
DN
18486
18487 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
18488 {
18489 int n_nops = rs6000_sched_insert_nops;
18490
f676971a 18491 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 18492 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 18493 if (can_issue_more == 0)
c4ad648e 18494 can_issue_more = issue_rate;
cbe26ab8
DN
18495 can_issue_more--;
18496 if (can_issue_more == 0)
c4ad648e
AM
18497 {
18498 can_issue_more = issue_rate - 1;
18499 (*group_count)++;
18500 end = true;
18501 for (i = 0; i < issue_rate; i++)
18502 {
18503 group_insns[i] = 0;
18504 }
18505 }
cbe26ab8
DN
18506
18507 while (n_nops > 0)
c4ad648e
AM
18508 {
18509 nop = gen_nop ();
18510 emit_insn_before (nop, next_insn);
18511 if (can_issue_more == issue_rate - 1) /* new group begins */
18512 end = false;
18513 can_issue_more--;
18514 if (can_issue_more == 0)
18515 {
18516 can_issue_more = issue_rate - 1;
18517 (*group_count)++;
18518 end = true;
18519 for (i = 0; i < issue_rate; i++)
18520 {
18521 group_insns[i] = 0;
18522 }
18523 }
18524 n_nops--;
18525 }
cbe26ab8
DN
18526
18527 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 18528 can_issue_more++;
cbe26ab8 18529
c4ad648e
AM
18530 /* Is next_insn going to start a new group? */
18531 *group_end
18532 = (end
cbe26ab8
DN
18533 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18534 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18535 || (can_issue_more < issue_rate &&
c4ad648e 18536 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18537 if (*group_end && end)
c4ad648e 18538 (*group_count)--;
cbe26ab8
DN
18539
18540 if (sched_verbose > 6)
c4ad648e
AM
18541 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
18542 *group_count, can_issue_more);
f676971a
EC
18543 return can_issue_more;
18544 }
cbe26ab8
DN
18545
18546 return can_issue_more;
18547}
18548
18549/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 18550 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
18551 form in practice. It tries to achieve this synchronization by forcing the
18552 estimated processor grouping on the compiler (as opposed to the function
18553 'pad_goups' which tries to force the scheduler's grouping on the processor).
18554
18555 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
18556 examines the (estimated) dispatch groups that will be formed by the processor
18557 dispatcher. It marks these group boundaries to reflect the estimated
18558 processor grouping, overriding the grouping that the scheduler had marked.
18559 Depending on the value of the flag '-minsert-sched-nops' this function can
18560 force certain insns into separate groups or force a certain distance between
18561 them by inserting nops, for example, if there exists a "costly dependence"
18562 between the insns.
18563
18564 The function estimates the group boundaries that the processor will form as
0fa2e4df 18565 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
18566 each insn. A subsequent insn will start a new group if one of the following
18567 4 cases applies:
18568 - no more vacant issue slots remain in the current dispatch group.
18569 - only the last issue slot, which is the branch slot, is vacant, but the next
18570 insn is not a branch.
18571 - only the last 2 or less issue slots, including the branch slot, are vacant,
18572 which means that a cracked insn (which occupies two issue slots) can't be
18573 issued in this group.
f676971a 18574 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
18575 start a new group. */
18576
18577static int
18578redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
18579{
18580 rtx insn, next_insn;
18581 int issue_rate;
18582 int can_issue_more;
18583 int slot, i;
18584 bool group_end;
18585 int group_count = 0;
18586 rtx *group_insns;
18587
18588 /* Initialize. */
18589 issue_rate = rs6000_issue_rate ();
18590 group_insns = alloca (issue_rate * sizeof (rtx));
f676971a 18591 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
18592 {
18593 group_insns[i] = 0;
18594 }
18595 can_issue_more = issue_rate;
18596 slot = 0;
18597 insn = get_next_active_insn (prev_head_insn, tail);
18598 group_end = false;
18599
18600 while (insn != NULL_RTX)
18601 {
18602 slot = (issue_rate - can_issue_more);
18603 group_insns[slot] = insn;
18604 can_issue_more =
c4ad648e 18605 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 18606 if (insn_terminates_group_p (insn, current_group))
c4ad648e 18607 can_issue_more = 0;
cbe26ab8
DN
18608
18609 next_insn = get_next_active_insn (insn, tail);
18610 if (next_insn == NULL_RTX)
c4ad648e 18611 return group_count + 1;
cbe26ab8 18612
c4ad648e
AM
18613 /* Is next_insn going to start a new group? */
18614 group_end
18615 = (can_issue_more == 0
18616 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
18617 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
18618 || (can_issue_more < issue_rate &&
18619 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 18620
f676971a 18621 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
18622 next_insn, &group_end, can_issue_more,
18623 &group_count);
cbe26ab8
DN
18624
18625 if (group_end)
c4ad648e
AM
18626 {
18627 group_count++;
18628 can_issue_more = 0;
18629 for (i = 0; i < issue_rate; i++)
18630 {
18631 group_insns[i] = 0;
18632 }
18633 }
cbe26ab8
DN
18634
18635 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 18636 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 18637 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 18638 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
18639
18640 insn = next_insn;
18641 if (can_issue_more == 0)
c4ad648e
AM
18642 can_issue_more = issue_rate;
18643 } /* while */
cbe26ab8
DN
18644
18645 return group_count;
18646}
18647
18648/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
18649 dispatch group boundaries that the scheduler had marked. Pad with nops
18650 any dispatch groups which have vacant issue slots, in order to force the
18651 scheduler's grouping on the processor dispatcher. The function
18652 returns the number of dispatch groups found. */
18653
18654static int
18655pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
18656{
18657 rtx insn, next_insn;
18658 rtx nop;
18659 int issue_rate;
18660 int can_issue_more;
18661 int group_end;
18662 int group_count = 0;
18663
18664 /* Initialize issue_rate. */
18665 issue_rate = rs6000_issue_rate ();
18666 can_issue_more = issue_rate;
18667
18668 insn = get_next_active_insn (prev_head_insn, tail);
18669 next_insn = get_next_active_insn (insn, tail);
18670
18671 while (insn != NULL_RTX)
18672 {
18673 can_issue_more =
18674 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
18675
18676 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
18677
18678 if (next_insn == NULL_RTX)
c4ad648e 18679 break;
cbe26ab8
DN
18680
18681 if (group_end)
c4ad648e
AM
18682 {
18683 /* If the scheduler had marked group termination at this location
18684 (between insn and next_indn), and neither insn nor next_insn will
18685 force group termination, pad the group with nops to force group
18686 termination. */
18687 if (can_issue_more
18688 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
18689 && !insn_terminates_group_p (insn, current_group)
18690 && !insn_terminates_group_p (next_insn, previous_group))
18691 {
9390387d 18692 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
18693 can_issue_more--;
18694
18695 while (can_issue_more)
18696 {
18697 nop = gen_nop ();
18698 emit_insn_before (nop, next_insn);
18699 can_issue_more--;
18700 }
18701 }
18702
18703 can_issue_more = issue_rate;
18704 group_count++;
18705 }
cbe26ab8
DN
18706
18707 insn = next_insn;
18708 next_insn = get_next_active_insn (insn, tail);
18709 }
18710
18711 return group_count;
18712}
18713
44cd321e
PS
18714/* We're beginning a new block. Initialize data structures as necessary. */
18715
18716static void
18717rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
18718 int sched_verbose ATTRIBUTE_UNUSED,
18719 int max_ready ATTRIBUTE_UNUSED)
982afe02 18720{
44cd321e
PS
18721 last_scheduled_insn = NULL_RTX;
18722 load_store_pendulum = 0;
18723}
18724
cbe26ab8
DN
18725/* The following function is called at the end of scheduling BB.
18726 After reload, it inserts nops at insn group bundling. */
18727
18728static void
38f391a5 18729rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
18730{
18731 int n_groups;
18732
18733 if (sched_verbose)
18734 fprintf (dump, "=== Finishing schedule.\n");
18735
ec507f2d 18736 if (reload_completed && rs6000_sched_groups)
cbe26ab8
DN
18737 {
18738 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 18739 return;
cbe26ab8
DN
18740
18741 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
18742 n_groups = pad_groups (dump, sched_verbose,
18743 current_sched_info->prev_head,
18744 current_sched_info->next_tail);
cbe26ab8 18745 else
c4ad648e
AM
18746 n_groups = redefine_groups (dump, sched_verbose,
18747 current_sched_info->prev_head,
18748 current_sched_info->next_tail);
cbe26ab8
DN
18749
18750 if (sched_verbose >= 6)
18751 {
18752 fprintf (dump, "ngroups = %d\n", n_groups);
18753 print_rtl (dump, current_sched_info->prev_head);
18754 fprintf (dump, "Done finish_sched\n");
18755 }
18756 }
18757}
b6c9286a 18758\f
b6c9286a
MM
18759/* Length in units of the trampoline for entering a nested function. */
18760
18761int
863d938c 18762rs6000_trampoline_size (void)
b6c9286a
MM
18763{
18764 int ret = 0;
18765
18766 switch (DEFAULT_ABI)
18767 {
18768 default:
37409796 18769 gcc_unreachable ();
b6c9286a
MM
18770
18771 case ABI_AIX:
8f802bfb 18772 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
18773 break;
18774
4dabc42d 18775 case ABI_DARWIN:
b6c9286a 18776 case ABI_V4:
03a7e1a5 18777 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 18778 break;
b6c9286a
MM
18779 }
18780
18781 return ret;
18782}
18783
18784/* Emit RTL insns to initialize the variable parts of a trampoline.
18785 FNADDR is an RTX for the address of the function's pure code.
18786 CXT is an RTX for the static chain value for the function. */
18787
18788void
a2369ed3 18789rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 18790{
8bd04c56 18791 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 18792 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
18793
18794 switch (DEFAULT_ABI)
18795 {
18796 default:
37409796 18797 gcc_unreachable ();
b6c9286a 18798
8bd04c56 18799/* Macros to shorten the code expansions below. */
9613eaff 18800#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 18801#define MEM_PLUS(addr,offset) \
9613eaff 18802 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 18803
b6c9286a
MM
18804 /* Under AIX, just build the 3 word function descriptor */
18805 case ABI_AIX:
8bd04c56 18806 {
9613eaff
SH
18807 rtx fn_reg = gen_reg_rtx (Pmode);
18808 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 18809 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 18810 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
18811 emit_move_insn (MEM_DEREF (addr), fn_reg);
18812 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
18813 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
18814 }
b6c9286a
MM
18815 break;
18816
4dabc42d
TC
18817 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
18818 case ABI_DARWIN:
b6c9286a 18819 case ABI_V4:
9613eaff 18820 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 18821 FALSE, VOIDmode, 4,
9613eaff 18822 addr, Pmode,
eaf1bcf1 18823 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
18824 fnaddr, Pmode,
18825 ctx_reg, Pmode);
b6c9286a 18826 break;
b6c9286a
MM
18827 }
18828
18829 return;
18830}
7509c759
MM
18831
18832\f
91d231cb 18833/* Table of valid machine attributes. */
a4f6c312 18834
91d231cb 18835const struct attribute_spec rs6000_attribute_table[] =
7509c759 18836{
91d231cb 18837 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 18838 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
18839 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
18840 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
18841 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
18842 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
18843#ifdef SUBTARGET_ATTRIBUTE_TABLE
18844 SUBTARGET_ATTRIBUTE_TABLE,
18845#endif
a5c76ee6 18846 { NULL, 0, 0, false, false, false, NULL }
91d231cb 18847};
7509c759 18848
8bb418a3
ZL
18849/* Handle the "altivec" attribute. The attribute may have
18850 arguments as follows:
f676971a 18851
8bb418a3
ZL
18852 __attribute__((altivec(vector__)))
18853 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
18854 __attribute__((altivec(bool__))) (always followed by 'unsigned')
18855
18856 and may appear more than once (e.g., 'vector bool char') in a
18857 given declaration. */
18858
18859static tree
f90ac3f0
UP
18860rs6000_handle_altivec_attribute (tree *node,
18861 tree name ATTRIBUTE_UNUSED,
18862 tree args,
8bb418a3
ZL
18863 int flags ATTRIBUTE_UNUSED,
18864 bool *no_add_attrs)
18865{
18866 tree type = *node, result = NULL_TREE;
18867 enum machine_mode mode;
18868 int unsigned_p;
18869 char altivec_type
18870 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
18871 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
18872 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 18873 : '?');
8bb418a3
ZL
18874
18875 while (POINTER_TYPE_P (type)
18876 || TREE_CODE (type) == FUNCTION_TYPE
18877 || TREE_CODE (type) == METHOD_TYPE
18878 || TREE_CODE (type) == ARRAY_TYPE)
18879 type = TREE_TYPE (type);
18880
18881 mode = TYPE_MODE (type);
18882
f90ac3f0
UP
18883 /* Check for invalid AltiVec type qualifiers. */
18884 if (type == long_unsigned_type_node || type == long_integer_type_node)
18885 {
18886 if (TARGET_64BIT)
18887 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
18888 else if (rs6000_warn_altivec_long)
d4ee4d25 18889 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
18890 }
18891 else if (type == long_long_unsigned_type_node
18892 || type == long_long_integer_type_node)
18893 error ("use of %<long long%> in AltiVec types is invalid");
18894 else if (type == double_type_node)
18895 error ("use of %<double%> in AltiVec types is invalid");
18896 else if (type == long_double_type_node)
18897 error ("use of %<long double%> in AltiVec types is invalid");
18898 else if (type == boolean_type_node)
18899 error ("use of boolean types in AltiVec types is invalid");
18900 else if (TREE_CODE (type) == COMPLEX_TYPE)
18901 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
18902 else if (DECIMAL_FLOAT_MODE_P (mode))
18903 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
18904
18905 switch (altivec_type)
18906 {
18907 case 'v':
8df83eae 18908 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
18909 switch (mode)
18910 {
c4ad648e
AM
18911 case SImode:
18912 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
18913 break;
18914 case HImode:
18915 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
18916 break;
18917 case QImode:
18918 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
18919 break;
18920 case SFmode: result = V4SF_type_node; break;
18921 /* If the user says 'vector int bool', we may be handed the 'bool'
18922 attribute _before_ the 'vector' attribute, and so select the
18923 proper type in the 'b' case below. */
18924 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
18925 result = type;
18926 default: break;
8bb418a3
ZL
18927 }
18928 break;
18929 case 'b':
18930 switch (mode)
18931 {
c4ad648e
AM
18932 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
18933 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
18934 case QImode: case V16QImode: result = bool_V16QI_type_node;
18935 default: break;
8bb418a3
ZL
18936 }
18937 break;
18938 case 'p':
18939 switch (mode)
18940 {
c4ad648e
AM
18941 case V8HImode: result = pixel_V8HI_type_node;
18942 default: break;
8bb418a3
ZL
18943 }
18944 default: break;
18945 }
18946
7958a2a6
FJ
18947 if (result && result != type && TYPE_READONLY (type))
18948 result = build_qualified_type (result, TYPE_QUAL_CONST);
18949
8bb418a3
ZL
18950 *no_add_attrs = true; /* No need to hang on to the attribute. */
18951
f90ac3f0 18952 if (result)
8bb418a3
ZL
18953 *node = reconstruct_complex_type (*node, result);
18954
18955 return NULL_TREE;
18956}
18957
f18eca82
ZL
18958/* AltiVec defines four built-in scalar types that serve as vector
18959 elements; we must teach the compiler how to mangle them. */
18960
18961static const char *
608063c3 18962rs6000_mangle_type (tree type)
f18eca82 18963{
608063c3
JB
18964 type = TYPE_MAIN_VARIANT (type);
18965
18966 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
18967 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
18968 return NULL;
18969
f18eca82
ZL
18970 if (type == bool_char_type_node) return "U6__boolc";
18971 if (type == bool_short_type_node) return "U6__bools";
18972 if (type == pixel_type_node) return "u7__pixel";
18973 if (type == bool_int_type_node) return "U6__booli";
18974
337bde91
DE
18975 /* Mangle IBM extended float long double as `g' (__float128) on
18976 powerpc*-linux where long-double-64 previously was the default. */
18977 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
18978 && TARGET_ELF
18979 && TARGET_LONG_DOUBLE_128
18980 && !TARGET_IEEEQUAD)
18981 return "g";
18982
f18eca82
ZL
18983 /* For all other types, use normal C++ mangling. */
18984 return NULL;
18985}
18986
a5c76ee6
ZW
18987/* Handle a "longcall" or "shortcall" attribute; arguments as in
18988 struct attribute_spec.handler. */
a4f6c312 18989
91d231cb 18990static tree
f676971a
EC
18991rs6000_handle_longcall_attribute (tree *node, tree name,
18992 tree args ATTRIBUTE_UNUSED,
18993 int flags ATTRIBUTE_UNUSED,
a2369ed3 18994 bool *no_add_attrs)
91d231cb
JM
18995{
18996 if (TREE_CODE (*node) != FUNCTION_TYPE
18997 && TREE_CODE (*node) != FIELD_DECL
18998 && TREE_CODE (*node) != TYPE_DECL)
18999 {
5c498b10 19000 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
19001 IDENTIFIER_POINTER (name));
19002 *no_add_attrs = true;
19003 }
6a4cee5f 19004
91d231cb 19005 return NULL_TREE;
7509c759
MM
19006}
19007
a5c76ee6
ZW
19008/* Set longcall attributes on all functions declared when
19009 rs6000_default_long_calls is true. */
19010static void
a2369ed3 19011rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
19012{
19013 if (rs6000_default_long_calls
19014 && (TREE_CODE (type) == FUNCTION_TYPE
19015 || TREE_CODE (type) == METHOD_TYPE))
19016 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
19017 NULL_TREE,
19018 TYPE_ATTRIBUTES (type));
16d6f994
EC
19019
19020#if TARGET_MACHO
19021 darwin_set_default_type_attributes (type);
19022#endif
a5c76ee6
ZW
19023}
19024
3cb999d8
DE
19025/* Return a reference suitable for calling a function with the
19026 longcall attribute. */
a4f6c312 19027
9390387d 19028rtx
a2369ed3 19029rs6000_longcall_ref (rtx call_ref)
6a4cee5f 19030{
d330fd93 19031 const char *call_name;
6a4cee5f
MM
19032 tree node;
19033
19034 if (GET_CODE (call_ref) != SYMBOL_REF)
19035 return call_ref;
19036
19037 /* System V adds '.' to the internal name, so skip them. */
19038 call_name = XSTR (call_ref, 0);
19039 if (*call_name == '.')
19040 {
19041 while (*call_name == '.')
19042 call_name++;
19043
19044 node = get_identifier (call_name);
39403d82 19045 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
19046 }
19047
19048 return force_reg (Pmode, call_ref);
19049}
7509c759 19050\f
77ccdfed
EC
19051#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
19052#define TARGET_USE_MS_BITFIELD_LAYOUT 0
19053#endif
19054
19055/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
19056 struct attribute_spec.handler. */
19057static tree
19058rs6000_handle_struct_attribute (tree *node, tree name,
19059 tree args ATTRIBUTE_UNUSED,
19060 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
19061{
19062 tree *type = NULL;
19063 if (DECL_P (*node))
19064 {
19065 if (TREE_CODE (*node) == TYPE_DECL)
19066 type = &TREE_TYPE (*node);
19067 }
19068 else
19069 type = node;
19070
19071 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
19072 || TREE_CODE (*type) == UNION_TYPE)))
19073 {
19074 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
19075 *no_add_attrs = true;
19076 }
19077
19078 else if ((is_attribute_p ("ms_struct", name)
19079 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
19080 || ((is_attribute_p ("gcc_struct", name)
19081 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
19082 {
19083 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
19084 IDENTIFIER_POINTER (name));
19085 *no_add_attrs = true;
19086 }
19087
19088 return NULL_TREE;
19089}
19090
19091static bool
19092rs6000_ms_bitfield_layout_p (tree record_type)
19093{
19094 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
19095 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
19096 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
19097}
19098\f
b64a1b53
RH
19099#ifdef USING_ELFOS_H
19100
d6b5193b 19101/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 19102
d6b5193b
RS
19103static void
19104rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19105{
19106 if (DEFAULT_ABI == ABI_AIX
19107 && TARGET_MINIMAL_TOC
19108 && !TARGET_RELOCATABLE)
19109 {
19110 if (!toc_initialized)
19111 {
19112 toc_initialized = 1;
19113 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19114 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
19115 fprintf (asm_out_file, "\t.tc ");
19116 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
19117 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19118 fprintf (asm_out_file, "\n");
19119
19120 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19121 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19122 fprintf (asm_out_file, " = .+32768\n");
19123 }
19124 else
19125 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19126 }
19127 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
19128 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
19129 else
19130 {
19131 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19132 if (!toc_initialized)
19133 {
19134 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
19135 fprintf (asm_out_file, " = .+32768\n");
19136 toc_initialized = 1;
19137 }
19138 }
19139}
19140
19141/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 19142
b64a1b53 19143static void
d6b5193b
RS
19144rs6000_elf_asm_init_sections (void)
19145{
19146 toc_section
19147 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
19148
19149 sdata2_section
19150 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
19151 SDATA2_SECTION_ASM_OP);
19152}
19153
19154/* Implement TARGET_SELECT_RTX_SECTION. */
19155
19156static section *
f676971a 19157rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 19158 unsigned HOST_WIDE_INT align)
7509c759 19159{
a9098fd0 19160 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19161 return toc_section;
7509c759 19162 else
d6b5193b 19163 return default_elf_select_rtx_section (mode, x, align);
7509c759 19164}
d9407988 19165\f
d1908feb
JJ
19166/* For a SYMBOL_REF, set generic flags and then perform some
19167 target-specific processing.
19168
d1908feb
JJ
19169 When the AIX ABI is requested on a non-AIX system, replace the
19170 function name with the real name (with a leading .) rather than the
19171 function descriptor name. This saves a lot of overriding code to
19172 read the prefixes. */
d9407988 19173
fb49053f 19174static void
a2369ed3 19175rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 19176{
d1908feb 19177 default_encode_section_info (decl, rtl, first);
b2003250 19178
d1908feb
JJ
19179 if (first
19180 && TREE_CODE (decl) == FUNCTION_DECL
19181 && !TARGET_AIX
19182 && DEFAULT_ABI == ABI_AIX)
d9407988 19183 {
c6a2438a 19184 rtx sym_ref = XEXP (rtl, 0);
d1908feb
JJ
19185 size_t len = strlen (XSTR (sym_ref, 0));
19186 char *str = alloca (len + 2);
19187 str[0] = '.';
19188 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
19189 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 19190 }
d9407988
MM
19191}
19192
c1b7d95a 19193bool
a2369ed3 19194rs6000_elf_in_small_data_p (tree decl)
0e5dbd9b
DE
19195{
19196 if (rs6000_sdata == SDATA_NONE)
19197 return false;
19198
7482ad25
AF
19199 /* We want to merge strings, so we never consider them small data. */
19200 if (TREE_CODE (decl) == STRING_CST)
19201 return false;
19202
19203 /* Functions are never in the small data area. */
19204 if (TREE_CODE (decl) == FUNCTION_DECL)
19205 return false;
19206
0e5dbd9b
DE
19207 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
19208 {
19209 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
19210 if (strcmp (section, ".sdata") == 0
19211 || strcmp (section, ".sdata2") == 0
20bfcd69
GK
19212 || strcmp (section, ".sbss") == 0
19213 || strcmp (section, ".sbss2") == 0
19214 || strcmp (section, ".PPC.EMB.sdata0") == 0
19215 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
19216 return true;
19217 }
19218 else
19219 {
19220 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
19221
19222 if (size > 0
307b599c 19223 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
19224 /* If it's not public, and we're not going to reference it there,
19225 there's no need to put it in the small data section. */
0e5dbd9b
DE
19226 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
19227 return true;
19228 }
19229
19230 return false;
19231}
19232
b91da81f 19233#endif /* USING_ELFOS_H */
aacd3885
RS
19234\f
19235/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 19236
aacd3885
RS
19237static bool
19238rs6000_use_blocks_for_constant_p (enum machine_mode mode, rtx x)
19239{
19240 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
19241}
a6c2a102 19242\f
000034eb 19243/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
19244 ADDR can be effectively incremented by incrementing REG.
19245
19246 r0 is special and we must not select it as an address
19247 register by this routine since our caller will try to
19248 increment the returned register via an "la" instruction. */
000034eb 19249
9390387d 19250rtx
a2369ed3 19251find_addr_reg (rtx addr)
000034eb
DE
19252{
19253 while (GET_CODE (addr) == PLUS)
19254 {
02441cd6
JL
19255 if (GET_CODE (XEXP (addr, 0)) == REG
19256 && REGNO (XEXP (addr, 0)) != 0)
000034eb 19257 addr = XEXP (addr, 0);
02441cd6
JL
19258 else if (GET_CODE (XEXP (addr, 1)) == REG
19259 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
19260 addr = XEXP (addr, 1);
19261 else if (CONSTANT_P (XEXP (addr, 0)))
19262 addr = XEXP (addr, 1);
19263 else if (CONSTANT_P (XEXP (addr, 1)))
19264 addr = XEXP (addr, 0);
19265 else
37409796 19266 gcc_unreachable ();
000034eb 19267 }
37409796
NS
19268 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
19269 return addr;
000034eb
DE
19270}
19271
a6c2a102 19272void
a2369ed3 19273rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
19274{
19275 fatal_insn ("bad address", op);
19276}
c8023011 19277
ee890fe2
SS
19278#if TARGET_MACHO
19279
efdba735 19280static tree branch_island_list = 0;
ee890fe2 19281
efdba735
SH
19282/* Remember to generate a branch island for far calls to the given
19283 function. */
ee890fe2 19284
f676971a 19285static void
c4ad648e
AM
19286add_compiler_branch_island (tree label_name, tree function_name,
19287 int line_number)
ee890fe2 19288{
efdba735 19289 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 19290 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
19291 TREE_CHAIN (branch_island) = branch_island_list;
19292 branch_island_list = branch_island;
ee890fe2
SS
19293}
19294
efdba735
SH
19295#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19296#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19297#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19298 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 19299
efdba735
SH
19300/* Generate far-jump branch islands for everything on the
19301 branch_island_list. Invoked immediately after the last instruction
19302 of the epilogue has been emitted; the branch-islands must be
19303 appended to, and contiguous with, the function body. Mach-O stubs
19304 are generated in machopic_output_stub(). */
ee890fe2 19305
efdba735
SH
19306static void
19307macho_branch_islands (void)
19308{
19309 char tmp_buf[512];
19310 tree branch_island;
19311
19312 for (branch_island = branch_island_list;
19313 branch_island;
19314 branch_island = TREE_CHAIN (branch_island))
19315 {
19316 const char *label =
19317 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
19318 const char *name =
11abc112 19319 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
19320 char name_buf[512];
19321 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19322 if (name[0] == '*' || name[0] == '&')
19323 strcpy (name_buf, name+1);
19324 else
19325 {
19326 name_buf[0] = '_';
19327 strcpy (name_buf+1, name);
19328 }
19329 strcpy (tmp_buf, "\n");
19330 strcat (tmp_buf, label);
ee890fe2 19331#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19332 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19333 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19334#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
19335 if (flag_pic)
19336 {
19337 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
19338 strcat (tmp_buf, label);
19339 strcat (tmp_buf, "_pic\n");
19340 strcat (tmp_buf, label);
19341 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 19342
efdba735
SH
19343 strcat (tmp_buf, "\taddis r11,r11,ha16(");
19344 strcat (tmp_buf, name_buf);
19345 strcat (tmp_buf, " - ");
19346 strcat (tmp_buf, label);
19347 strcat (tmp_buf, "_pic)\n");
f676971a 19348
efdba735 19349 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 19350
efdba735
SH
19351 strcat (tmp_buf, "\taddi r12,r11,lo16(");
19352 strcat (tmp_buf, name_buf);
19353 strcat (tmp_buf, " - ");
19354 strcat (tmp_buf, label);
19355 strcat (tmp_buf, "_pic)\n");
f676971a 19356
efdba735
SH
19357 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
19358 }
19359 else
19360 {
19361 strcat (tmp_buf, ":\nlis r12,hi16(");
19362 strcat (tmp_buf, name_buf);
19363 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
19364 strcat (tmp_buf, name_buf);
19365 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
19366 }
19367 output_asm_insn (tmp_buf, 0);
ee890fe2 19368#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 19369 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 19370 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 19371#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 19372 }
ee890fe2 19373
efdba735 19374 branch_island_list = 0;
ee890fe2
SS
19375}
19376
19377/* NO_PREVIOUS_DEF checks in the link list whether the function name is
19378 already there or not. */
19379
efdba735 19380static int
a2369ed3 19381no_previous_def (tree function_name)
ee890fe2 19382{
efdba735
SH
19383 tree branch_island;
19384 for (branch_island = branch_island_list;
19385 branch_island;
19386 branch_island = TREE_CHAIN (branch_island))
19387 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
19388 return 0;
19389 return 1;
19390}
19391
19392/* GET_PREV_LABEL gets the label name from the previous definition of
19393 the function. */
19394
efdba735 19395static tree
a2369ed3 19396get_prev_label (tree function_name)
ee890fe2 19397{
efdba735
SH
19398 tree branch_island;
19399 for (branch_island = branch_island_list;
19400 branch_island;
19401 branch_island = TREE_CHAIN (branch_island))
19402 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
19403 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
19404 return 0;
19405}
19406
75b1b789
MS
19407#ifndef DARWIN_LINKER_GENERATES_ISLANDS
19408#define DARWIN_LINKER_GENERATES_ISLANDS 0
19409#endif
19410
19411/* KEXTs still need branch islands. */
19412#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
19413 || flag_mkernel || flag_apple_kext)
19414
ee890fe2 19415/* INSN is either a function call or a millicode call. It may have an
f676971a 19416 unconditional jump in its delay slot.
ee890fe2
SS
19417
19418 CALL_DEST is the routine we are calling. */
19419
19420char *
c4ad648e
AM
19421output_call (rtx insn, rtx *operands, int dest_operand_number,
19422 int cookie_operand_number)
ee890fe2
SS
19423{
19424 static char buf[256];
75b1b789
MS
19425 if (DARWIN_GENERATE_ISLANDS
19426 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 19427 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
19428 {
19429 tree labelname;
efdba735 19430 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 19431
ee890fe2
SS
19432 if (no_previous_def (funname))
19433 {
ee890fe2
SS
19434 rtx label_rtx = gen_label_rtx ();
19435 char *label_buf, temp_buf[256];
19436 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
19437 CODE_LABEL_NUMBER (label_rtx));
19438 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
19439 labelname = get_identifier (label_buf);
a38e7aa5 19440 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
19441 }
19442 else
19443 labelname = get_prev_label (funname);
19444
efdba735
SH
19445 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
19446 instruction will reach 'foo', otherwise link as 'bl L42'".
19447 "L42" should be a 'branch island', that will do a far jump to
19448 'foo'. Branch islands are generated in
19449 macho_branch_islands(). */
ee890fe2 19450 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 19451 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
19452 }
19453 else
efdba735
SH
19454 sprintf (buf, "bl %%z%d", dest_operand_number);
19455 return buf;
ee890fe2
SS
19456}
19457
ee890fe2
SS
19458/* Generate PIC and indirect symbol stubs. */
19459
19460void
a2369ed3 19461machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
19462{
19463 unsigned int length;
a4f6c312
SS
19464 char *symbol_name, *lazy_ptr_name;
19465 char *local_label_0;
ee890fe2
SS
19466 static int label = 0;
19467
df56a27f 19468 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 19469 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 19470
ee890fe2 19471
ee890fe2
SS
19472 length = strlen (symb);
19473 symbol_name = alloca (length + 32);
19474 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
19475
19476 lazy_ptr_name = alloca (length + 32);
19477 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
19478
ee890fe2 19479 if (flag_pic == 2)
56c779bc 19480 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 19481 else
56c779bc 19482 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
19483
19484 if (flag_pic == 2)
19485 {
d974312d
DJ
19486 fprintf (file, "\t.align 5\n");
19487
19488 fprintf (file, "%s:\n", stub);
19489 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19490
876455fa 19491 label++;
89da1f32 19492 local_label_0 = alloca (sizeof ("\"L00000000000$spb\""));
876455fa 19493 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 19494
ee890fe2
SS
19495 fprintf (file, "\tmflr r0\n");
19496 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
19497 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
19498 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
19499 lazy_ptr_name, local_label_0);
19500 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
19501 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
19502 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
19503 lazy_ptr_name, local_label_0);
19504 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
19505 fprintf (file, "\tbctr\n");
19506 }
19507 else
d974312d
DJ
19508 {
19509 fprintf (file, "\t.align 4\n");
19510
19511 fprintf (file, "%s:\n", stub);
19512 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
19513
19514 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
19515 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
19516 (TARGET_64BIT ? "ldu" : "lwzu"),
19517 lazy_ptr_name);
d974312d
DJ
19518 fprintf (file, "\tmtctr r12\n");
19519 fprintf (file, "\tbctr\n");
19520 }
f676971a 19521
56c779bc 19522 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
19523 fprintf (file, "%s:\n", lazy_ptr_name);
19524 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
19525 fprintf (file, "%sdyld_stub_binding_helper\n",
19526 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
19527}
19528
19529/* Legitimize PIC addresses. If the address is already
19530 position-independent, we return ORIG. Newly generated
19531 position-independent addresses go into a reg. This is REG if non
19532 zero, otherwise we allocate register(s) as necessary. */
19533
4fbbe694 19534#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
19535
19536rtx
f676971a 19537rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 19538 rtx reg)
ee890fe2
SS
19539{
19540 rtx base, offset;
19541
19542 if (reg == NULL && ! reload_in_progress && ! reload_completed)
19543 reg = gen_reg_rtx (Pmode);
19544
19545 if (GET_CODE (orig) == CONST)
19546 {
37409796
NS
19547 rtx reg_temp;
19548
ee890fe2
SS
19549 if (GET_CODE (XEXP (orig, 0)) == PLUS
19550 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
19551 return orig;
19552
37409796 19553 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 19554
37409796
NS
19555 /* Use a different reg for the intermediate value, as
19556 it will be marked UNCHANGING. */
b3a13419 19557 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
19558 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
19559 Pmode, reg_temp);
19560 offset =
19561 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
19562 Pmode, reg);
bb8df8a6 19563
ee890fe2
SS
19564 if (GET_CODE (offset) == CONST_INT)
19565 {
19566 if (SMALL_INT (offset))
ed8908e7 19567 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
19568 else if (! reload_in_progress && ! reload_completed)
19569 offset = force_reg (Pmode, offset);
19570 else
c859cda6
DJ
19571 {
19572 rtx mem = force_const_mem (Pmode, orig);
19573 return machopic_legitimize_pic_address (mem, Pmode, reg);
19574 }
ee890fe2 19575 }
f1c25d3b 19576 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
19577 }
19578
19579 /* Fall back on generic machopic code. */
19580 return machopic_legitimize_pic_address (orig, mode, reg);
19581}
19582
c4e18b1c
GK
19583/* Output a .machine directive for the Darwin assembler, and call
19584 the generic start_file routine. */
19585
19586static void
19587rs6000_darwin_file_start (void)
19588{
94ff898d 19589 static const struct
c4e18b1c
GK
19590 {
19591 const char *arg;
19592 const char *name;
19593 int if_set;
19594 } mapping[] = {
55dbfb48 19595 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
19596 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
19597 { "power4", "ppc970", 0 },
19598 { "G5", "ppc970", 0 },
19599 { "7450", "ppc7450", 0 },
19600 { "7400", "ppc7400", MASK_ALTIVEC },
19601 { "G4", "ppc7400", 0 },
19602 { "750", "ppc750", 0 },
19603 { "740", "ppc750", 0 },
19604 { "G3", "ppc750", 0 },
19605 { "604e", "ppc604e", 0 },
19606 { "604", "ppc604", 0 },
19607 { "603e", "ppc603", 0 },
19608 { "603", "ppc603", 0 },
19609 { "601", "ppc601", 0 },
19610 { NULL, "ppc", 0 } };
19611 const char *cpu_id = "";
19612 size_t i;
94ff898d 19613
9390387d 19614 rs6000_file_start ();
192d0f89 19615 darwin_file_start ();
c4e18b1c
GK
19616
19617 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
19618 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
19619 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
19620 && rs6000_select[i].string[0] != '\0')
19621 cpu_id = rs6000_select[i].string;
19622
19623 /* Look through the mapping array. Pick the first name that either
19624 matches the argument, has a bit set in IF_SET that is also set
19625 in the target flags, or has a NULL name. */
19626
19627 i = 0;
19628 while (mapping[i].arg != NULL
19629 && strcmp (mapping[i].arg, cpu_id) != 0
19630 && (mapping[i].if_set & target_flags) == 0)
19631 i++;
19632
19633 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
19634}
19635
ee890fe2 19636#endif /* TARGET_MACHO */
7c262518
RH
19637
19638#if TARGET_ELF
9b580a0b
RH
19639static int
19640rs6000_elf_reloc_rw_mask (void)
7c262518 19641{
9b580a0b
RH
19642 if (flag_pic)
19643 return 3;
19644 else if (DEFAULT_ABI == ABI_AIX)
19645 return 2;
19646 else
19647 return 0;
7c262518 19648}
d9f6800d
RH
19649
19650/* Record an element in the table of global constructors. SYMBOL is
19651 a SYMBOL_REF of the function to be called; PRIORITY is a number
19652 between 0 and MAX_INIT_PRIORITY.
19653
19654 This differs from default_named_section_asm_out_constructor in
19655 that we have special handling for -mrelocatable. */
19656
19657static void
a2369ed3 19658rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
19659{
19660 const char *section = ".ctors";
19661 char buf[16];
19662
19663 if (priority != DEFAULT_INIT_PRIORITY)
19664 {
19665 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
19666 /* Invert the numbering so the linker puts us in the proper
19667 order; constructors are run from right to left, and the
19668 linker sorts in increasing order. */
19669 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
19670 section = buf;
19671 }
19672
d6b5193b 19673 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 19674 assemble_align (POINTER_SIZE);
d9f6800d
RH
19675
19676 if (TARGET_RELOCATABLE)
19677 {
19678 fputs ("\t.long (", asm_out_file);
19679 output_addr_const (asm_out_file, symbol);
19680 fputs (")@fixup\n", asm_out_file);
19681 }
19682 else
c8af3574 19683 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
19684}
19685
19686static void
a2369ed3 19687rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
19688{
19689 const char *section = ".dtors";
19690 char buf[16];
19691
19692 if (priority != DEFAULT_INIT_PRIORITY)
19693 {
19694 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
19695 /* Invert the numbering so the linker puts us in the proper
19696 order; constructors are run from right to left, and the
19697 linker sorts in increasing order. */
19698 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
19699 section = buf;
19700 }
19701
d6b5193b 19702 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 19703 assemble_align (POINTER_SIZE);
d9f6800d
RH
19704
19705 if (TARGET_RELOCATABLE)
19706 {
19707 fputs ("\t.long (", asm_out_file);
19708 output_addr_const (asm_out_file, symbol);
19709 fputs (")@fixup\n", asm_out_file);
19710 }
19711 else
c8af3574 19712 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 19713}
9739c90c
JJ
19714
19715void
a2369ed3 19716rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
19717{
19718 if (TARGET_64BIT)
19719 {
19720 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
19721 ASM_OUTPUT_LABEL (file, name);
19722 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
19723 rs6000_output_function_entry (file, name);
19724 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
19725 if (DOT_SYMBOLS)
9739c90c 19726 {
85b776df 19727 fputs ("\t.size\t", file);
9739c90c 19728 assemble_name (file, name);
85b776df
AM
19729 fputs (",24\n\t.type\t.", file);
19730 assemble_name (file, name);
19731 fputs (",@function\n", file);
19732 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
19733 {
19734 fputs ("\t.globl\t.", file);
19735 assemble_name (file, name);
19736 putc ('\n', file);
19737 }
9739c90c 19738 }
85b776df
AM
19739 else
19740 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 19741 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
19742 rs6000_output_function_entry (file, name);
19743 fputs (":\n", file);
9739c90c
JJ
19744 return;
19745 }
19746
19747 if (TARGET_RELOCATABLE
7f970b70 19748 && !TARGET_SECURE_PLT
9739c90c 19749 && (get_pool_size () != 0 || current_function_profile)
3c9eb5f4 19750 && uses_TOC ())
9739c90c
JJ
19751 {
19752 char buf[256];
19753
19754 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
19755
19756 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
19757 fprintf (file, "\t.long ");
19758 assemble_name (file, buf);
19759 putc ('-', file);
19760 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
19761 assemble_name (file, buf);
19762 putc ('\n', file);
19763 }
19764
19765 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
19766 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
19767
19768 if (DEFAULT_ABI == ABI_AIX)
19769 {
19770 const char *desc_name, *orig_name;
19771
19772 orig_name = (*targetm.strip_name_encoding) (name);
19773 desc_name = orig_name;
19774 while (*desc_name == '.')
19775 desc_name++;
19776
19777 if (TREE_PUBLIC (decl))
19778 fprintf (file, "\t.globl %s\n", desc_name);
19779
19780 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
19781 fprintf (file, "%s:\n", desc_name);
19782 fprintf (file, "\t.long %s\n", orig_name);
19783 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
19784 if (DEFAULT_ABI == ABI_AIX)
19785 fputs ("\t.long 0\n", file);
19786 fprintf (file, "\t.previous\n");
19787 }
19788 ASM_OUTPUT_LABEL (file, name);
19789}
1334b570
AM
19790
19791static void
19792rs6000_elf_end_indicate_exec_stack (void)
19793{
19794 if (TARGET_32BIT)
19795 file_end_indicate_exec_stack ();
19796}
7c262518
RH
19797#endif
19798
cbaaba19 19799#if TARGET_XCOFF
0d5817b2
DE
19800static void
19801rs6000_xcoff_asm_output_anchor (rtx symbol)
19802{
19803 char buffer[100];
19804
19805 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
19806 SYMBOL_REF_BLOCK_OFFSET (symbol));
19807 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
19808}
19809
7c262518 19810static void
a2369ed3 19811rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
19812{
19813 fputs (GLOBAL_ASM_OP, stream);
19814 RS6000_OUTPUT_BASENAME (stream, name);
19815 putc ('\n', stream);
19816}
19817
d6b5193b
RS
19818/* A get_unnamed_decl callback, used for read-only sections. PTR
19819 points to the section string variable. */
19820
19821static void
19822rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
19823{
19824 fprintf (asm_out_file, "\t.csect %s[RO],3\n",
19825 *(const char *const *) directive);
19826}
19827
19828/* Likewise for read-write sections. */
19829
19830static void
19831rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
19832{
19833 fprintf (asm_out_file, "\t.csect %s[RW],3\n",
19834 *(const char *const *) directive);
19835}
19836
19837/* A get_unnamed_section callback, used for switching to toc_section. */
19838
19839static void
19840rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
19841{
19842 if (TARGET_MINIMAL_TOC)
19843 {
19844 /* toc_section is always selected at least once from
19845 rs6000_xcoff_file_start, so this is guaranteed to
19846 always be defined once and only once in each file. */
19847 if (!toc_initialized)
19848 {
19849 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
19850 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
19851 toc_initialized = 1;
19852 }
19853 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
19854 (TARGET_32BIT ? "" : ",3"));
19855 }
19856 else
19857 fputs ("\t.toc\n", asm_out_file);
19858}
19859
19860/* Implement TARGET_ASM_INIT_SECTIONS. */
19861
19862static void
19863rs6000_xcoff_asm_init_sections (void)
19864{
19865 read_only_data_section
19866 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
19867 &xcoff_read_only_section_name);
19868
19869 private_data_section
19870 = get_unnamed_section (SECTION_WRITE,
19871 rs6000_xcoff_output_readwrite_section_asm_op,
19872 &xcoff_private_data_section_name);
19873
19874 read_only_private_data_section
19875 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
19876 &xcoff_private_data_section_name);
19877
19878 toc_section
19879 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
19880
19881 readonly_data_section = read_only_data_section;
19882 exception_section = data_section;
19883}
19884
9b580a0b
RH
19885static int
19886rs6000_xcoff_reloc_rw_mask (void)
19887{
19888 return 3;
19889}
19890
b275d088 19891static void
c18a5b6c
MM
19892rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
19893 tree decl ATTRIBUTE_UNUSED)
7c262518 19894{
0e5dbd9b
DE
19895 int smclass;
19896 static const char * const suffix[3] = { "PR", "RO", "RW" };
19897
19898 if (flags & SECTION_CODE)
19899 smclass = 0;
19900 else if (flags & SECTION_WRITE)
19901 smclass = 2;
19902 else
19903 smclass = 1;
19904
5b5198f7 19905 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 19906 (flags & SECTION_CODE) ? "." : "",
5b5198f7 19907 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 19908}
ae46c4e0 19909
d6b5193b 19910static section *
f676971a 19911rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 19912 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 19913{
9b580a0b 19914 if (decl_readonly_section (decl, reloc))
ae46c4e0 19915 {
0e5dbd9b 19916 if (TREE_PUBLIC (decl))
d6b5193b 19917 return read_only_data_section;
ae46c4e0 19918 else
d6b5193b 19919 return read_only_private_data_section;
ae46c4e0
RH
19920 }
19921 else
19922 {
0e5dbd9b 19923 if (TREE_PUBLIC (decl))
d6b5193b 19924 return data_section;
ae46c4e0 19925 else
d6b5193b 19926 return private_data_section;
ae46c4e0
RH
19927 }
19928}
19929
19930static void
a2369ed3 19931rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
19932{
19933 const char *name;
ae46c4e0 19934
5b5198f7
DE
19935 /* Use select_section for private and uninitialized data. */
19936 if (!TREE_PUBLIC (decl)
19937 || DECL_COMMON (decl)
0e5dbd9b
DE
19938 || DECL_INITIAL (decl) == NULL_TREE
19939 || DECL_INITIAL (decl) == error_mark_node
19940 || (flag_zero_initialized_in_bss
19941 && initializer_zerop (DECL_INITIAL (decl))))
19942 return;
19943
19944 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
19945 name = (*targetm.strip_name_encoding) (name);
19946 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 19947}
b64a1b53 19948
fb49053f
RH
19949/* Select section for constant in constant pool.
19950
19951 On RS/6000, all constants are in the private read-only data area.
19952 However, if this is being placed in the TOC it must be output as a
19953 toc entry. */
19954
d6b5193b 19955static section *
f676971a 19956rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 19957 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
19958{
19959 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 19960 return toc_section;
b64a1b53 19961 else
d6b5193b 19962 return read_only_private_data_section;
b64a1b53 19963}
772c5265
RH
19964
19965/* Remove any trailing [DS] or the like from the symbol name. */
19966
19967static const char *
a2369ed3 19968rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
19969{
19970 size_t len;
19971 if (*name == '*')
19972 name++;
19973 len = strlen (name);
19974 if (name[len - 1] == ']')
19975 return ggc_alloc_string (name, len - 4);
19976 else
19977 return name;
19978}
19979
5add3202
DE
19980/* Section attributes. AIX is always PIC. */
19981
19982static unsigned int
a2369ed3 19983rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 19984{
5b5198f7 19985 unsigned int align;
9b580a0b 19986 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
19987
19988 /* Align to at least UNIT size. */
19989 if (flags & SECTION_CODE)
19990 align = MIN_UNITS_PER_WORD;
19991 else
19992 /* Increase alignment of large objects if not already stricter. */
19993 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
19994 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
19995 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
19996
19997 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 19998}
a5fe455b 19999
1bc7c5b6
ZW
20000/* Output at beginning of assembler file.
20001
20002 Initialize the section names for the RS/6000 at this point.
20003
20004 Specify filename, including full path, to assembler.
20005
20006 We want to go into the TOC section so at least one .toc will be emitted.
20007 Also, in order to output proper .bs/.es pairs, we need at least one static
20008 [RW] section emitted.
20009
20010 Finally, declare mcount when profiling to make the assembler happy. */
20011
20012static void
863d938c 20013rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
20014{
20015 rs6000_gen_section_name (&xcoff_bss_section_name,
20016 main_input_filename, ".bss_");
20017 rs6000_gen_section_name (&xcoff_private_data_section_name,
20018 main_input_filename, ".rw_");
20019 rs6000_gen_section_name (&xcoff_read_only_section_name,
20020 main_input_filename, ".ro_");
20021
20022 fputs ("\t.file\t", asm_out_file);
20023 output_quoted_string (asm_out_file, main_input_filename);
20024 fputc ('\n', asm_out_file);
1bc7c5b6 20025 if (write_symbols != NO_DEBUG)
d6b5193b
RS
20026 switch_to_section (private_data_section);
20027 switch_to_section (text_section);
1bc7c5b6
ZW
20028 if (profile_flag)
20029 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
20030 rs6000_file_start ();
20031}
20032
a5fe455b
ZW
20033/* Output at end of assembler file.
20034 On the RS/6000, referencing data should automatically pull in text. */
20035
20036static void
863d938c 20037rs6000_xcoff_file_end (void)
a5fe455b 20038{
d6b5193b 20039 switch_to_section (text_section);
a5fe455b 20040 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 20041 switch_to_section (data_section);
a5fe455b
ZW
20042 fputs (TARGET_32BIT
20043 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
20044 asm_out_file);
20045}
f1384257 20046#endif /* TARGET_XCOFF */
0e5dbd9b 20047
3c50106f
RH
20048/* Compute a (partial) cost for rtx X. Return true if the complete
20049 cost has been computed, and false if subexpressions should be
20050 scanned. In either case, *TOTAL contains the cost result. */
20051
20052static bool
1494c534 20053rs6000_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f 20054{
f0517163
RS
20055 enum machine_mode mode = GET_MODE (x);
20056
3c50106f
RH
20057 switch (code)
20058 {
30a555d9 20059 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 20060 case CONST_INT:
066cd967
DE
20061 if (((outer_code == SET
20062 || outer_code == PLUS
20063 || outer_code == MINUS)
279bb624
DE
20064 && (satisfies_constraint_I (x)
20065 || satisfies_constraint_L (x)))
066cd967 20066 || (outer_code == AND
279bb624
DE
20067 && (satisfies_constraint_K (x)
20068 || (mode == SImode
20069 ? satisfies_constraint_L (x)
20070 : satisfies_constraint_J (x))
1990cd79
AM
20071 || mask_operand (x, mode)
20072 || (mode == DImode
20073 && mask64_operand (x, DImode))))
22e54023 20074 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
20075 && (satisfies_constraint_K (x)
20076 || (mode == SImode
20077 ? satisfies_constraint_L (x)
20078 : satisfies_constraint_J (x))))
066cd967
DE
20079 || outer_code == ASHIFT
20080 || outer_code == ASHIFTRT
20081 || outer_code == LSHIFTRT
20082 || outer_code == ROTATE
20083 || outer_code == ROTATERT
d5861a7a 20084 || outer_code == ZERO_EXTRACT
066cd967 20085 || (outer_code == MULT
279bb624 20086 && satisfies_constraint_I (x))
22e54023
DE
20087 || ((outer_code == DIV || outer_code == UDIV
20088 || outer_code == MOD || outer_code == UMOD)
20089 && exact_log2 (INTVAL (x)) >= 0)
066cd967 20090 || (outer_code == COMPARE
279bb624
DE
20091 && (satisfies_constraint_I (x)
20092 || satisfies_constraint_K (x)))
22e54023 20093 || (outer_code == EQ
279bb624
DE
20094 && (satisfies_constraint_I (x)
20095 || satisfies_constraint_K (x)
20096 || (mode == SImode
20097 ? satisfies_constraint_L (x)
20098 : satisfies_constraint_J (x))))
22e54023 20099 || (outer_code == GTU
279bb624 20100 && satisfies_constraint_I (x))
22e54023 20101 || (outer_code == LTU
279bb624 20102 && satisfies_constraint_P (x)))
066cd967
DE
20103 {
20104 *total = 0;
20105 return true;
20106 }
20107 else if ((outer_code == PLUS
4ae234b0 20108 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 20109 || (outer_code == MINUS
4ae234b0 20110 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
20111 || ((outer_code == SET
20112 || outer_code == IOR
20113 || outer_code == XOR)
20114 && (INTVAL (x)
20115 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
20116 {
20117 *total = COSTS_N_INSNS (1);
20118 return true;
20119 }
20120 /* FALLTHRU */
20121
20122 case CONST_DOUBLE:
f6fe3a22 20123 if (mode == DImode && code == CONST_DOUBLE)
066cd967 20124 {
f6fe3a22
DE
20125 if ((outer_code == IOR || outer_code == XOR)
20126 && CONST_DOUBLE_HIGH (x) == 0
20127 && (CONST_DOUBLE_LOW (x)
20128 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
20129 {
20130 *total = 0;
20131 return true;
20132 }
20133 else if ((outer_code == AND && and64_2_operand (x, DImode))
20134 || ((outer_code == SET
20135 || outer_code == IOR
20136 || outer_code == XOR)
20137 && CONST_DOUBLE_HIGH (x) == 0))
20138 {
20139 *total = COSTS_N_INSNS (1);
20140 return true;
20141 }
066cd967
DE
20142 }
20143 /* FALLTHRU */
20144
3c50106f 20145 case CONST:
066cd967 20146 case HIGH:
3c50106f 20147 case SYMBOL_REF:
066cd967
DE
20148 case MEM:
20149 /* When optimizing for size, MEM should be slightly more expensive
20150 than generating address, e.g., (plus (reg) (const)).
c112cf2b 20151 L1 cache latency is about two instructions. */
066cd967 20152 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
20153 return true;
20154
30a555d9
DE
20155 case LABEL_REF:
20156 *total = 0;
20157 return true;
20158
3c50106f 20159 case PLUS:
f0517163 20160 if (mode == DFmode)
066cd967
DE
20161 {
20162 if (GET_CODE (XEXP (x, 0)) == MULT)
20163 {
20164 /* FNMA accounted in outer NEG. */
20165 if (outer_code == NEG)
20166 *total = rs6000_cost->dmul - rs6000_cost->fp;
20167 else
20168 *total = rs6000_cost->dmul;
20169 }
20170 else
20171 *total = rs6000_cost->fp;
20172 }
f0517163 20173 else if (mode == SFmode)
066cd967
DE
20174 {
20175 /* FNMA accounted in outer NEG. */
20176 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20177 *total = 0;
20178 else
20179 *total = rs6000_cost->fp;
20180 }
f0517163 20181 else
066cd967
DE
20182 *total = COSTS_N_INSNS (1);
20183 return false;
3c50106f 20184
52190329 20185 case MINUS:
f0517163 20186 if (mode == DFmode)
066cd967 20187 {
762c919f
JM
20188 if (GET_CODE (XEXP (x, 0)) == MULT
20189 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
20190 {
20191 /* FNMA accounted in outer NEG. */
20192 if (outer_code == NEG)
762c919f 20193 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
20194 else
20195 *total = rs6000_cost->dmul;
20196 }
20197 else
20198 *total = rs6000_cost->fp;
20199 }
f0517163 20200 else if (mode == SFmode)
066cd967
DE
20201 {
20202 /* FNMA accounted in outer NEG. */
20203 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
20204 *total = 0;
20205 else
20206 *total = rs6000_cost->fp;
20207 }
f0517163 20208 else
c4ad648e 20209 *total = COSTS_N_INSNS (1);
066cd967 20210 return false;
3c50106f
RH
20211
20212 case MULT:
c9dbf840 20213 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 20214 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 20215 {
8b897cfa
RS
20216 if (INTVAL (XEXP (x, 1)) >= -256
20217 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 20218 *total = rs6000_cost->mulsi_const9;
8b897cfa 20219 else
06a67bdd 20220 *total = rs6000_cost->mulsi_const;
3c50106f 20221 }
066cd967
DE
20222 /* FMA accounted in outer PLUS/MINUS. */
20223 else if ((mode == DFmode || mode == SFmode)
20224 && (outer_code == PLUS || outer_code == MINUS))
20225 *total = 0;
f0517163 20226 else if (mode == DFmode)
06a67bdd 20227 *total = rs6000_cost->dmul;
f0517163 20228 else if (mode == SFmode)
06a67bdd 20229 *total = rs6000_cost->fp;
f0517163 20230 else if (mode == DImode)
06a67bdd 20231 *total = rs6000_cost->muldi;
8b897cfa 20232 else
06a67bdd 20233 *total = rs6000_cost->mulsi;
066cd967 20234 return false;
3c50106f
RH
20235
20236 case DIV:
20237 case MOD:
f0517163
RS
20238 if (FLOAT_MODE_P (mode))
20239 {
06a67bdd
RS
20240 *total = mode == DFmode ? rs6000_cost->ddiv
20241 : rs6000_cost->sdiv;
066cd967 20242 return false;
f0517163 20243 }
5efb1046 20244 /* FALLTHRU */
3c50106f
RH
20245
20246 case UDIV:
20247 case UMOD:
627b6fe2
DJ
20248 if (GET_CODE (XEXP (x, 1)) == CONST_INT
20249 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
20250 {
20251 if (code == DIV || code == MOD)
20252 /* Shift, addze */
20253 *total = COSTS_N_INSNS (2);
20254 else
20255 /* Shift */
20256 *total = COSTS_N_INSNS (1);
20257 }
c4ad648e 20258 else
627b6fe2
DJ
20259 {
20260 if (GET_MODE (XEXP (x, 1)) == DImode)
20261 *total = rs6000_cost->divdi;
20262 else
20263 *total = rs6000_cost->divsi;
20264 }
20265 /* Add in shift and subtract for MOD. */
20266 if (code == MOD || code == UMOD)
20267 *total += COSTS_N_INSNS (2);
066cd967 20268 return false;
3c50106f
RH
20269
20270 case FFS:
20271 *total = COSTS_N_INSNS (4);
066cd967 20272 return false;
3c50106f 20273
06a67bdd 20274 case NOT:
066cd967
DE
20275 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
20276 {
20277 *total = 0;
20278 return false;
20279 }
20280 /* FALLTHRU */
20281
20282 case AND:
20283 case IOR:
20284 case XOR:
d5861a7a
DE
20285 case ZERO_EXTRACT:
20286 *total = COSTS_N_INSNS (1);
20287 return false;
20288
066cd967
DE
20289 case ASHIFT:
20290 case ASHIFTRT:
20291 case LSHIFTRT:
20292 case ROTATE:
20293 case ROTATERT:
d5861a7a 20294 /* Handle mul_highpart. */
066cd967
DE
20295 if (outer_code == TRUNCATE
20296 && GET_CODE (XEXP (x, 0)) == MULT)
20297 {
20298 if (mode == DImode)
20299 *total = rs6000_cost->muldi;
20300 else
20301 *total = rs6000_cost->mulsi;
20302 return true;
20303 }
d5861a7a
DE
20304 else if (outer_code == AND)
20305 *total = 0;
20306 else
20307 *total = COSTS_N_INSNS (1);
20308 return false;
20309
20310 case SIGN_EXTEND:
20311 case ZERO_EXTEND:
20312 if (GET_CODE (XEXP (x, 0)) == MEM)
20313 *total = 0;
20314 else
20315 *total = COSTS_N_INSNS (1);
066cd967 20316 return false;
06a67bdd 20317
066cd967
DE
20318 case COMPARE:
20319 case NEG:
20320 case ABS:
20321 if (!FLOAT_MODE_P (mode))
20322 {
20323 *total = COSTS_N_INSNS (1);
20324 return false;
20325 }
20326 /* FALLTHRU */
20327
20328 case FLOAT:
20329 case UNSIGNED_FLOAT:
20330 case FIX:
20331 case UNSIGNED_FIX:
06a67bdd
RS
20332 case FLOAT_TRUNCATE:
20333 *total = rs6000_cost->fp;
066cd967 20334 return false;
06a67bdd 20335
a2af5043
DJ
20336 case FLOAT_EXTEND:
20337 if (mode == DFmode)
20338 *total = 0;
20339 else
20340 *total = rs6000_cost->fp;
20341 return false;
20342
06a67bdd
RS
20343 case UNSPEC:
20344 switch (XINT (x, 1))
20345 {
20346 case UNSPEC_FRSP:
20347 *total = rs6000_cost->fp;
20348 return true;
20349
20350 default:
20351 break;
20352 }
20353 break;
20354
20355 case CALL:
20356 case IF_THEN_ELSE:
20357 if (optimize_size)
20358 {
20359 *total = COSTS_N_INSNS (1);
20360 return true;
20361 }
066cd967
DE
20362 else if (FLOAT_MODE_P (mode)
20363 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
20364 {
20365 *total = rs6000_cost->fp;
20366 return false;
20367 }
06a67bdd
RS
20368 break;
20369
c0600ecd
DE
20370 case EQ:
20371 case GTU:
20372 case LTU:
22e54023
DE
20373 /* Carry bit requires mode == Pmode.
20374 NEG or PLUS already counted so only add one. */
20375 if (mode == Pmode
20376 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 20377 {
22e54023
DE
20378 *total = COSTS_N_INSNS (1);
20379 return true;
20380 }
20381 if (outer_code == SET)
20382 {
20383 if (XEXP (x, 1) == const0_rtx)
c0600ecd 20384 {
22e54023 20385 *total = COSTS_N_INSNS (2);
c0600ecd 20386 return true;
c0600ecd 20387 }
22e54023
DE
20388 else if (mode == Pmode)
20389 {
20390 *total = COSTS_N_INSNS (3);
20391 return false;
20392 }
20393 }
20394 /* FALLTHRU */
20395
20396 case GT:
20397 case LT:
20398 case UNORDERED:
20399 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
20400 {
20401 *total = COSTS_N_INSNS (2);
20402 return true;
c0600ecd 20403 }
22e54023
DE
20404 /* CC COMPARE. */
20405 if (outer_code == COMPARE)
20406 {
20407 *total = 0;
20408 return true;
20409 }
20410 break;
c0600ecd 20411
3c50106f 20412 default:
06a67bdd 20413 break;
3c50106f 20414 }
06a67bdd
RS
20415
20416 return false;
3c50106f
RH
20417}
20418
34bb030a
DE
20419/* A C expression returning the cost of moving data from a register of class
20420 CLASS1 to one of CLASS2. */
20421
20422int
f676971a 20423rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 20424 enum reg_class from, enum reg_class to)
34bb030a
DE
20425{
20426 /* Moves from/to GENERAL_REGS. */
20427 if (reg_classes_intersect_p (to, GENERAL_REGS)
20428 || reg_classes_intersect_p (from, GENERAL_REGS))
20429 {
20430 if (! reg_classes_intersect_p (to, GENERAL_REGS))
20431 from = to;
20432
20433 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
20434 return (rs6000_memory_move_cost (mode, from, 0)
20435 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
20436
c4ad648e
AM
20437 /* It's more expensive to move CR_REGS than CR0_REGS because of the
20438 shift. */
34bb030a
DE
20439 else if (from == CR_REGS)
20440 return 4;
20441
20442 else
c4ad648e 20443 /* A move will cost one instruction per GPR moved. */
c8b622ff 20444 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
20445 }
20446
c4ad648e 20447 /* Moving between two similar registers is just one instruction. */
34bb030a 20448 else if (reg_classes_intersect_p (to, from))
7393f7f8 20449 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 20450
c4ad648e 20451 /* Everything else has to go through GENERAL_REGS. */
34bb030a 20452 else
f676971a 20453 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
20454 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
20455}
20456
20457/* A C expressions returning the cost of moving data of MODE from a register to
20458 or from memory. */
20459
20460int
f676971a 20461rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
a2369ed3 20462 int in ATTRIBUTE_UNUSED)
34bb030a
DE
20463{
20464 if (reg_classes_intersect_p (class, GENERAL_REGS))
c8b622ff 20465 return 4 * hard_regno_nregs[0][mode];
34bb030a 20466 else if (reg_classes_intersect_p (class, FLOAT_REGS))
c8b622ff 20467 return 4 * hard_regno_nregs[32][mode];
34bb030a 20468 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
c8b622ff 20469 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a
DE
20470 else
20471 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
20472}
20473
ef765ea9
DE
20474/* Newton-Raphson approximation of single-precision floating point divide n/d.
20475 Assumes no trapping math and finite arguments. */
20476
20477void
20478rs6000_emit_swdivsf (rtx res, rtx n, rtx d)
20479{
20480 rtx x0, e0, e1, y1, u0, v0, one;
20481
20482 x0 = gen_reg_rtx (SFmode);
20483 e0 = gen_reg_rtx (SFmode);
20484 e1 = gen_reg_rtx (SFmode);
20485 y1 = gen_reg_rtx (SFmode);
20486 u0 = gen_reg_rtx (SFmode);
20487 v0 = gen_reg_rtx (SFmode);
20488 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
20489
20490 /* x0 = 1./d estimate */
20491 emit_insn (gen_rtx_SET (VOIDmode, x0,
20492 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
20493 UNSPEC_FRES)));
20494 /* e0 = 1. - d * x0 */
20495 emit_insn (gen_rtx_SET (VOIDmode, e0,
20496 gen_rtx_MINUS (SFmode, one,
20497 gen_rtx_MULT (SFmode, d, x0))));
20498 /* e1 = e0 + e0 * e0 */
20499 emit_insn (gen_rtx_SET (VOIDmode, e1,
20500 gen_rtx_PLUS (SFmode,
20501 gen_rtx_MULT (SFmode, e0, e0), e0)));
20502 /* y1 = x0 + e1 * x0 */
20503 emit_insn (gen_rtx_SET (VOIDmode, y1,
20504 gen_rtx_PLUS (SFmode,
20505 gen_rtx_MULT (SFmode, e1, x0), x0)));
20506 /* u0 = n * y1 */
20507 emit_insn (gen_rtx_SET (VOIDmode, u0,
20508 gen_rtx_MULT (SFmode, n, y1)));
20509 /* v0 = n - d * u0 */
20510 emit_insn (gen_rtx_SET (VOIDmode, v0,
20511 gen_rtx_MINUS (SFmode, n,
20512 gen_rtx_MULT (SFmode, d, u0))));
20513 /* res = u0 + v0 * y1 */
20514 emit_insn (gen_rtx_SET (VOIDmode, res,
20515 gen_rtx_PLUS (SFmode,
20516 gen_rtx_MULT (SFmode, v0, y1), u0)));
20517}
20518
20519/* Newton-Raphson approximation of double-precision floating point divide n/d.
20520 Assumes no trapping math and finite arguments. */
20521
20522void
20523rs6000_emit_swdivdf (rtx res, rtx n, rtx d)
20524{
20525 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
20526
20527 x0 = gen_reg_rtx (DFmode);
20528 e0 = gen_reg_rtx (DFmode);
20529 e1 = gen_reg_rtx (DFmode);
20530 e2 = gen_reg_rtx (DFmode);
20531 y1 = gen_reg_rtx (DFmode);
20532 y2 = gen_reg_rtx (DFmode);
20533 y3 = gen_reg_rtx (DFmode);
20534 u0 = gen_reg_rtx (DFmode);
20535 v0 = gen_reg_rtx (DFmode);
20536 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
20537
20538 /* x0 = 1./d estimate */
20539 emit_insn (gen_rtx_SET (VOIDmode, x0,
20540 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
20541 UNSPEC_FRES)));
20542 /* e0 = 1. - d * x0 */
20543 emit_insn (gen_rtx_SET (VOIDmode, e0,
20544 gen_rtx_MINUS (DFmode, one,
20545 gen_rtx_MULT (SFmode, d, x0))));
20546 /* y1 = x0 + e0 * x0 */
20547 emit_insn (gen_rtx_SET (VOIDmode, y1,
20548 gen_rtx_PLUS (DFmode,
20549 gen_rtx_MULT (DFmode, e0, x0), x0)));
20550 /* e1 = e0 * e0 */
20551 emit_insn (gen_rtx_SET (VOIDmode, e1,
20552 gen_rtx_MULT (DFmode, e0, e0)));
20553 /* y2 = y1 + e1 * y1 */
20554 emit_insn (gen_rtx_SET (VOIDmode, y2,
20555 gen_rtx_PLUS (DFmode,
20556 gen_rtx_MULT (DFmode, e1, y1), y1)));
20557 /* e2 = e1 * e1 */
20558 emit_insn (gen_rtx_SET (VOIDmode, e2,
20559 gen_rtx_MULT (DFmode, e1, e1)));
20560 /* y3 = y2 + e2 * y2 */
20561 emit_insn (gen_rtx_SET (VOIDmode, y3,
20562 gen_rtx_PLUS (DFmode,
20563 gen_rtx_MULT (DFmode, e2, y2), y2)));
20564 /* u0 = n * y3 */
20565 emit_insn (gen_rtx_SET (VOIDmode, u0,
20566 gen_rtx_MULT (DFmode, n, y3)));
20567 /* v0 = n - d * u0 */
20568 emit_insn (gen_rtx_SET (VOIDmode, v0,
20569 gen_rtx_MINUS (DFmode, n,
20570 gen_rtx_MULT (DFmode, d, u0))));
20571 /* res = u0 + v0 * y3 */
20572 emit_insn (gen_rtx_SET (VOIDmode, res,
20573 gen_rtx_PLUS (DFmode,
20574 gen_rtx_MULT (DFmode, v0, y3), u0)));
20575}
20576
565ef4ba
RS
20577
20578/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
20579 target, and SRC is the argument operand. */
20580
20581void
20582rs6000_emit_popcount (rtx dst, rtx src)
20583{
20584 enum machine_mode mode = GET_MODE (dst);
20585 rtx tmp1, tmp2;
20586
20587 tmp1 = gen_reg_rtx (mode);
20588
20589 if (mode == SImode)
20590 {
20591 emit_insn (gen_popcntbsi2 (tmp1, src));
20592 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
20593 NULL_RTX, 0);
20594 tmp2 = force_reg (SImode, tmp2);
20595 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
20596 }
20597 else
20598 {
20599 emit_insn (gen_popcntbdi2 (tmp1, src));
20600 tmp2 = expand_mult (DImode, tmp1,
20601 GEN_INT ((HOST_WIDE_INT)
20602 0x01010101 << 32 | 0x01010101),
20603 NULL_RTX, 0);
20604 tmp2 = force_reg (DImode, tmp2);
20605 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
20606 }
20607}
20608
20609
20610/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
20611 target, and SRC is the argument operand. */
20612
20613void
20614rs6000_emit_parity (rtx dst, rtx src)
20615{
20616 enum machine_mode mode = GET_MODE (dst);
20617 rtx tmp;
20618
20619 tmp = gen_reg_rtx (mode);
20620 if (mode == SImode)
20621 {
20622 /* Is mult+shift >= shift+xor+shift+xor? */
20623 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
20624 {
20625 rtx tmp1, tmp2, tmp3, tmp4;
20626
20627 tmp1 = gen_reg_rtx (SImode);
20628 emit_insn (gen_popcntbsi2 (tmp1, src));
20629
20630 tmp2 = gen_reg_rtx (SImode);
20631 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
20632 tmp3 = gen_reg_rtx (SImode);
20633 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
20634
20635 tmp4 = gen_reg_rtx (SImode);
20636 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
20637 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
20638 }
20639 else
20640 rs6000_emit_popcount (tmp, src);
20641 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
20642 }
20643 else
20644 {
20645 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
20646 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
20647 {
20648 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
20649
20650 tmp1 = gen_reg_rtx (DImode);
20651 emit_insn (gen_popcntbdi2 (tmp1, src));
20652
20653 tmp2 = gen_reg_rtx (DImode);
20654 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
20655 tmp3 = gen_reg_rtx (DImode);
20656 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
20657
20658 tmp4 = gen_reg_rtx (DImode);
20659 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
20660 tmp5 = gen_reg_rtx (DImode);
20661 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
20662
20663 tmp6 = gen_reg_rtx (DImode);
20664 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
20665 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
20666 }
20667 else
20668 rs6000_emit_popcount (tmp, src);
20669 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
20670 }
20671}
20672
ded9bf77
AH
20673/* Return an RTX representing where to find the function value of a
20674 function returning MODE. */
20675static rtx
20676rs6000_complex_function_value (enum machine_mode mode)
20677{
20678 unsigned int regno;
20679 rtx r1, r2;
20680 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 20681 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 20682
18f63bfa
AH
20683 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
20684 regno = FP_ARG_RETURN;
354ed18f
AH
20685 else
20686 {
18f63bfa 20687 regno = GP_ARG_RETURN;
ded9bf77 20688
18f63bfa
AH
20689 /* 32-bit is OK since it'll go in r3/r4. */
20690 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
20691 return gen_rtx_REG (mode, regno);
20692 }
20693
18f63bfa
AH
20694 if (inner_bytes >= 8)
20695 return gen_rtx_REG (mode, regno);
20696
ded9bf77
AH
20697 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
20698 const0_rtx);
20699 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 20700 GEN_INT (inner_bytes));
ded9bf77
AH
20701 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
20702}
20703
a6ebc39a
AH
20704/* Define how to find the value returned by a function.
20705 VALTYPE is the data type of the value (as a tree).
20706 If the precise function being called is known, FUNC is its FUNCTION_DECL;
20707 otherwise, FUNC is 0.
20708
20709 On the SPE, both FPs and vectors are returned in r3.
20710
20711 On RS/6000 an integer value is in r3 and a floating-point value is in
20712 fp1, unless -msoft-float. */
20713
20714rtx
20715rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
20716{
20717 enum machine_mode mode;
2a8fa26c 20718 unsigned int regno;
a6ebc39a 20719
594a51fe
SS
20720 /* Special handling for structs in darwin64. */
20721 if (rs6000_darwin64_abi
20722 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
20723 && TREE_CODE (valtype) == RECORD_TYPE
20724 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
20725 {
20726 CUMULATIVE_ARGS valcum;
20727 rtx valret;
20728
0b5383eb 20729 valcum.words = 0;
594a51fe
SS
20730 valcum.fregno = FP_ARG_MIN_REG;
20731 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
20732 /* Do a trial code generation as if this were going to be passed as
20733 an argument; if any part goes in memory, we return NULL. */
20734 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
20735 if (valret)
20736 return valret;
20737 /* Otherwise fall through to standard ABI rules. */
20738 }
20739
0e67400a
FJ
20740 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
20741 {
20742 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20743 return gen_rtx_PARALLEL (DImode,
20744 gen_rtvec (2,
20745 gen_rtx_EXPR_LIST (VOIDmode,
20746 gen_rtx_REG (SImode, GP_ARG_RETURN),
20747 const0_rtx),
20748 gen_rtx_EXPR_LIST (VOIDmode,
20749 gen_rtx_REG (SImode,
20750 GP_ARG_RETURN + 1),
20751 GEN_INT (4))));
20752 }
0f086e42
FJ
20753 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
20754 {
20755 return gen_rtx_PARALLEL (DCmode,
20756 gen_rtvec (4,
20757 gen_rtx_EXPR_LIST (VOIDmode,
20758 gen_rtx_REG (SImode, GP_ARG_RETURN),
20759 const0_rtx),
20760 gen_rtx_EXPR_LIST (VOIDmode,
20761 gen_rtx_REG (SImode,
20762 GP_ARG_RETURN + 1),
20763 GEN_INT (4)),
20764 gen_rtx_EXPR_LIST (VOIDmode,
20765 gen_rtx_REG (SImode,
20766 GP_ARG_RETURN + 2),
20767 GEN_INT (8)),
20768 gen_rtx_EXPR_LIST (VOIDmode,
20769 gen_rtx_REG (SImode,
20770 GP_ARG_RETURN + 3),
20771 GEN_INT (12))));
20772 }
602ea4d3 20773
7348aa7f
FXC
20774 mode = TYPE_MODE (valtype);
20775 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 20776 || POINTER_TYPE_P (valtype))
b78d48dd 20777 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 20778
00b79d54 20779 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
20780 {
20781 if (TARGET_HARD_FLOAT && TARGET_FPRS)
20782 {
20783 switch (mode)
20784 {
20785 default:
20786 gcc_unreachable ();
20787 case SDmode:
20788 regno = GP_ARG_RETURN;
20789 break;
20790 case DDmode:
20791 regno = FP_ARG_RETURN;
20792 break;
20793 case TDmode:
20794 /* Use f2:f3 specified by the ABI. */
20795 regno = FP_ARG_RETURN + 1;
20796 break;
20797 }
20798 }
20799 else
20800 regno = GP_ARG_RETURN;
20801 }
00b79d54 20802 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 20803 regno = FP_ARG_RETURN;
ded9bf77 20804 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 20805 && targetm.calls.split_complex_arg)
ded9bf77 20806 return rs6000_complex_function_value (mode);
44688022 20807 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 20808 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 20809 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 20810 regno = ALTIVEC_ARG_RETURN;
18f63bfa 20811 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
20812 && (mode == DFmode || mode == DCmode
20813 || mode == TFmode || mode == TCmode))
18f63bfa 20814 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
20815 else
20816 regno = GP_ARG_RETURN;
20817
20818 return gen_rtx_REG (mode, regno);
20819}
20820
ded9bf77
AH
20821/* Define how to find the value returned by a library function
20822 assuming the value has mode MODE. */
20823rtx
20824rs6000_libcall_value (enum machine_mode mode)
20825{
20826 unsigned int regno;
20827
2e6c9641
FJ
20828 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
20829 {
20830 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20831 return gen_rtx_PARALLEL (DImode,
20832 gen_rtvec (2,
20833 gen_rtx_EXPR_LIST (VOIDmode,
20834 gen_rtx_REG (SImode, GP_ARG_RETURN),
20835 const0_rtx),
20836 gen_rtx_EXPR_LIST (VOIDmode,
20837 gen_rtx_REG (SImode,
20838 GP_ARG_RETURN + 1),
20839 GEN_INT (4))));
20840 }
20841
00b79d54 20842 if (DECIMAL_FLOAT_MODE_P (mode))
7393f7f8
BE
20843 {
20844 if (TARGET_HARD_FLOAT && TARGET_FPRS)
20845 {
20846 switch (mode)
20847 {
20848 default:
20849 gcc_unreachable ();
20850 case SDmode:
20851 regno = GP_ARG_RETURN;
20852 break;
20853 case DDmode:
20854 regno = FP_ARG_RETURN;
20855 break;
20856 case TDmode:
20857 /* Use f2:f3 specified by the ABI. */
20858 regno = FP_ARG_RETURN + 1;
20859 break;
20860 }
20861 }
20862 else
20863 regno = GP_ARG_RETURN;
20864 }
00b79d54 20865 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
20866 && TARGET_HARD_FLOAT && TARGET_FPRS)
20867 regno = FP_ARG_RETURN;
44688022
AM
20868 else if (ALTIVEC_VECTOR_MODE (mode)
20869 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 20870 regno = ALTIVEC_ARG_RETURN;
42ba5130 20871 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 20872 return rs6000_complex_function_value (mode);
18f63bfa 20873 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
17caeff2
JM
20874 && (mode == DFmode || mode == DCmode
20875 || mode == TFmode || mode == TCmode))
18f63bfa 20876 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
20877 else
20878 regno = GP_ARG_RETURN;
20879
20880 return gen_rtx_REG (mode, regno);
20881}
20882
d1d0c603
JJ
20883/* Define the offset between two registers, FROM to be eliminated and its
20884 replacement TO, at the start of a routine. */
20885HOST_WIDE_INT
20886rs6000_initial_elimination_offset (int from, int to)
20887{
20888 rs6000_stack_t *info = rs6000_stack_info ();
20889 HOST_WIDE_INT offset;
20890
7d5175e1 20891 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 20892 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
20893 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
20894 {
20895 offset = info->push_p ? 0 : -info->total_size;
20896 if (FRAME_GROWS_DOWNWARD)
5b667039 20897 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
20898 }
20899 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
20900 offset = FRAME_GROWS_DOWNWARD
5b667039 20901 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
20902 : 0;
20903 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
20904 offset = info->total_size;
20905 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
20906 offset = info->push_p ? info->total_size : 0;
20907 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
20908 offset = 0;
20909 else
37409796 20910 gcc_unreachable ();
d1d0c603
JJ
20911
20912 return offset;
20913}
20914
58646b77 20915/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 20916
c8e4f0e9 20917static bool
58646b77 20918rs6000_is_opaque_type (tree type)
62e1dfcf 20919{
58646b77 20920 return (type == opaque_V2SI_type_node
2abe3e28 20921 || type == opaque_V2SF_type_node
58646b77
PB
20922 || type == opaque_p_V2SI_type_node
20923 || type == opaque_V4SI_type_node);
62e1dfcf
NC
20924}
20925
96714395 20926static rtx
a2369ed3 20927rs6000_dwarf_register_span (rtx reg)
96714395
AH
20928{
20929 unsigned regno;
20930
4d4cbc0e
AH
20931 if (TARGET_SPE
20932 && (SPE_VECTOR_MODE (GET_MODE (reg))
20933 || (TARGET_E500_DOUBLE && GET_MODE (reg) == DFmode)))
20934 ;
20935 else
96714395
AH
20936 return NULL_RTX;
20937
20938 regno = REGNO (reg);
20939
20940 /* The duality of the SPE register size wreaks all kinds of havoc.
20941 This is a way of distinguishing r0 in 32-bits from r0 in
20942 64-bits. */
20943 return
20944 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
20945 BYTES_BIG_ENDIAN
20946 ? gen_rtvec (2,
20947 gen_rtx_REG (SImode, regno + 1200),
20948 gen_rtx_REG (SImode, regno))
20949 : gen_rtvec (2,
20950 gen_rtx_REG (SImode, regno),
20951 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
20952}
20953
37ea0b7e
JM
20954/* Fill in sizes for SPE register high parts in table used by unwinder. */
20955
20956static void
20957rs6000_init_dwarf_reg_sizes_extra (tree address)
20958{
20959 if (TARGET_SPE)
20960 {
20961 int i;
20962 enum machine_mode mode = TYPE_MODE (char_type_node);
20963 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
20964 rtx mem = gen_rtx_MEM (BLKmode, addr);
20965 rtx value = gen_int_mode (4, mode);
20966
20967 for (i = 1201; i < 1232; i++)
20968 {
20969 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
20970 HOST_WIDE_INT offset
20971 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
20972
20973 emit_move_insn (adjust_address (mem, mode, offset), value);
20974 }
20975 }
20976}
20977
93c9d1ba
AM
20978/* Map internal gcc register numbers to DWARF2 register numbers. */
20979
20980unsigned int
20981rs6000_dbx_register_number (unsigned int regno)
20982{
20983 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
20984 return regno;
20985 if (regno == MQ_REGNO)
20986 return 100;
1de43f85 20987 if (regno == LR_REGNO)
93c9d1ba 20988 return 108;
1de43f85 20989 if (regno == CTR_REGNO)
93c9d1ba
AM
20990 return 109;
20991 if (CR_REGNO_P (regno))
20992 return regno - CR0_REGNO + 86;
20993 if (regno == XER_REGNO)
20994 return 101;
20995 if (ALTIVEC_REGNO_P (regno))
20996 return regno - FIRST_ALTIVEC_REGNO + 1124;
20997 if (regno == VRSAVE_REGNO)
20998 return 356;
20999 if (regno == VSCR_REGNO)
21000 return 67;
21001 if (regno == SPE_ACC_REGNO)
21002 return 99;
21003 if (regno == SPEFSCR_REGNO)
21004 return 612;
21005 /* SPE high reg number. We get these values of regno from
21006 rs6000_dwarf_register_span. */
37409796
NS
21007 gcc_assert (regno >= 1200 && regno < 1232);
21008 return regno;
93c9d1ba
AM
21009}
21010
93f90be6 21011/* target hook eh_return_filter_mode */
f676971a 21012static enum machine_mode
93f90be6
FJ
21013rs6000_eh_return_filter_mode (void)
21014{
21015 return TARGET_32BIT ? SImode : word_mode;
21016}
21017
00b79d54
BE
21018/* Target hook for scalar_mode_supported_p. */
21019static bool
21020rs6000_scalar_mode_supported_p (enum machine_mode mode)
21021{
21022 if (DECIMAL_FLOAT_MODE_P (mode))
21023 return true;
21024 else
21025 return default_scalar_mode_supported_p (mode);
21026}
21027
f676971a
EC
21028/* Target hook for vector_mode_supported_p. */
21029static bool
21030rs6000_vector_mode_supported_p (enum machine_mode mode)
21031{
21032
21033 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
21034 return true;
21035
21036 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
21037 return true;
21038
21039 else
21040 return false;
21041}
21042
bb8df8a6
EC
21043/* Target hook for invalid_arg_for_unprototyped_fn. */
21044static const char *
4d3e6fae
FJ
21045invalid_arg_for_unprototyped_fn (tree typelist, tree funcdecl, tree val)
21046{
21047 return (!rs6000_darwin64_abi
21048 && typelist == 0
21049 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
21050 && (funcdecl == NULL_TREE
21051 || (TREE_CODE (funcdecl) == FUNCTION_DECL
21052 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
21053 ? N_("AltiVec argument passed to unprototyped function")
21054 : NULL;
21055}
21056
3aebbe5f
JJ
21057/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
21058 setup by using __stack_chk_fail_local hidden function instead of
21059 calling __stack_chk_fail directly. Otherwise it is better to call
21060 __stack_chk_fail directly. */
21061
21062static tree
21063rs6000_stack_protect_fail (void)
21064{
21065 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
21066 ? default_hidden_stack_protect_fail ()
21067 : default_external_stack_protect_fail ();
21068}
21069
17211ab5 21070#include "gt-rs6000.h"