]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/rs6000/rs6000.c
gnu.ver: Also export __once_functor_lock...
[thirdparty/gcc.git] / gcc / config / rs6000 / rs6000.c
CommitLineData
9878760c 1/* Subroutines used for code generation on IBM RS/6000.
f676971a 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
1c9c5e43 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
337bde91 4 Free Software Foundation, Inc.
fab3bcc3 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
9878760c 6
5de601cf 7 This file is part of GCC.
9878760c 8
5de601cf
NC
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
2f83c7d6 11 by the Free Software Foundation; either version 3, or (at your
5de601cf 12 option) any later version.
9878760c 13
5de601cf
NC
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
9878760c 18
5de601cf 19 You should have received a copy of the GNU General Public License
2f83c7d6
NC
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
9878760c 22
956d6950 23#include "config.h"
c4d38ccb 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
9878760c
RK
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
9878760c
RK
33#include "insn-attr.h"
34#include "flags.h"
35#include "recog.h"
9878760c 36#include "obstack.h"
9b30bae2 37#include "tree.h"
dfafc897 38#include "expr.h"
2fc1c679 39#include "optabs.h"
2a430ec1 40#include "except.h"
a7df97e6 41#include "function.h"
296b8152 42#include "output.h"
d5fa86ba 43#include "basic-block.h"
d0101753 44#include "integrate.h"
296b8152 45#include "toplev.h"
c8023011 46#include "ggc.h"
9ebbca7d
GK
47#include "hashtab.h"
48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
3ac88239 51#include "langhooks.h"
24ea750e 52#include "reload.h"
117dca74 53#include "cfglayout.h"
79ae11c4 54#include "sched-int.h"
726a989a 55#include "gimple.h"
e41b2a33 56#include "tree-flow.h"
4d3e6fae 57#include "intl.h"
59d6560b 58#include "params.h"
279bb624 59#include "tm-constrs.h"
1bc7c5b6
ZW
60#if TARGET_XCOFF
61#include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62#endif
93a27b7b
ZW
63#if TARGET_MACHO
64#include "gstab.h" /* for N_SLINE */
65#endif
9b30bae2 66
7509c759
MM
67#ifndef TARGET_NO_PROTOTYPE
68#define TARGET_NO_PROTOTYPE 0
69#endif
70
9878760c
RK
71#define min(A,B) ((A) < (B) ? (A) : (B))
72#define max(A,B) ((A) > (B) ? (A) : (B))
73
d1d0c603
JJ
74/* Structure used to define the rs6000 stack */
75typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
d1d0c603
JJ
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
c4ad648e 84 int world_save_p; /* true if we're saving *everything*:
d62294f5 85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
d1d0c603
JJ
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
d1d0c603
JJ
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
d1d0c603
JJ
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
d1d0c603
JJ
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
d1d0c603
JJ
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112} rs6000_stack_t;
113
5b667039
JJ
114/* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116typedef struct machine_function GTY(())
117{
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
e41b2a33
PB
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
5b667039
JJ
133} machine_function;
134
5248c961
RK
135/* Target cpu type */
136
137enum processor_type rs6000_cpu;
8e3f41e7
MM
138struct rs6000_cpu_select rs6000_select[3] =
139{
815cdc52
MM
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
8e3f41e7 144};
5248c961 145
d296e02e
AP
146static GTY(()) bool rs6000_cell_dont_microcode;
147
ec507f2d
DE
148/* Always emit branch hint bits. */
149static GTY(()) bool rs6000_always_hint;
150
151/* Schedule instructions for group formation. */
152static GTY(()) bool rs6000_sched_groups;
153
44cd321e
PS
154/* Align branch targets. */
155static GTY(()) bool rs6000_align_branch_targets;
156
569fa502
DN
157/* Support for -msched-costly-dep option. */
158const char *rs6000_sched_costly_dep_str;
159enum rs6000_dependence_cost rs6000_sched_costly_dep;
160
cbe26ab8
DN
161/* Support for -minsert-sched-nops option. */
162const char *rs6000_sched_insert_nops_str;
163enum rs6000_nop_insertion rs6000_sched_insert_nops;
164
7ccf35ed 165/* Support targetm.vectorize.builtin_mask_for_load. */
13c62176 166static GTY(()) tree altivec_builtin_mask_for_load;
7ccf35ed 167
602ea4d3 168/* Size of long double. */
6fa3f289
ZW
169int rs6000_long_double_type_size;
170
602ea4d3
JJ
171/* IEEE quad extended precision long double. */
172int rs6000_ieeequad;
173
a2db2771 174/* Nonzero to use AltiVec ABI. */
6fa3f289
ZW
175int rs6000_altivec_abi;
176
94f4765c
NF
177/* Nonzero if we want SPE SIMD instructions. */
178int rs6000_spe;
179
a3170dc6
AH
180/* Nonzero if we want SPE ABI extensions. */
181int rs6000_spe_abi;
182
94f4765c
NF
183/* Nonzero to use isel instructions. */
184int rs6000_isel;
185
5da702b1
AH
186/* Nonzero if floating point operations are done in the GPRs. */
187int rs6000_float_gprs = 0;
188
594a51fe
SS
189/* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
190int rs6000_darwin64_abi;
191
a0ab749a 192/* Set to nonzero once AIX common-mode calls have been defined. */
bbfb86aa 193static GTY(()) int common_mode_defined;
c81bebd7 194
9878760c
RK
195/* Save information from a "cmpxx" operation until the branch or scc is
196 emitted. */
9878760c
RK
197rtx rs6000_compare_op0, rs6000_compare_op1;
198int rs6000_compare_fp_p;
874a0744 199
874a0744
MM
200/* Label number of label created for -mrelocatable, to call to so we can
201 get the address of the GOT section */
202int rs6000_pic_labelno;
c81bebd7 203
b91da81f 204#ifdef USING_ELFOS_H
c81bebd7 205/* Which abi to adhere to */
9739c90c 206const char *rs6000_abi_name;
d9407988
MM
207
208/* Semantics of the small data area */
209enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
210
211/* Which small data model to use */
815cdc52 212const char *rs6000_sdata_name = (char *)0;
9ebbca7d
GK
213
214/* Counter for labels which are to be placed in .fixup. */
215int fixuplabelno = 0;
874a0744 216#endif
4697a36c 217
c4501e62
JJ
218/* Bit size of immediate TLS offsets and string from which it is decoded. */
219int rs6000_tls_size = 32;
220const char *rs6000_tls_size_string;
221
b6c9286a
MM
222/* ABI enumeration available for subtarget to use. */
223enum rs6000_abi rs6000_current_abi;
224
85b776df
AM
225/* Whether to use variant of AIX ABI for PowerPC64 Linux. */
226int dot_symbols;
227
38c1f2d7 228/* Debug flags */
815cdc52 229const char *rs6000_debug_name;
38c1f2d7
MM
230int rs6000_debug_stack; /* debug stack applications */
231int rs6000_debug_arg; /* debug argument handling */
232
aabcd309 233/* Value is TRUE if register/mode pair is acceptable. */
0d1fbc8c
AH
234bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
235
58646b77
PB
236/* Built in types. */
237
238tree rs6000_builtin_types[RS6000_BTI_MAX];
239tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
8bb418a3 240
57ac7be9
AM
241const char *rs6000_traceback_name;
242static enum {
243 traceback_default = 0,
244 traceback_none,
245 traceback_part,
246 traceback_full
247} rs6000_traceback;
248
38c1f2d7
MM
249/* Flag to say the TOC is initialized */
250int toc_initialized;
9ebbca7d 251char toc_label_name[10];
38c1f2d7 252
44cd321e
PS
253/* Cached value of rs6000_variable_issue. This is cached in
254 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
255static short cached_can_issue_more;
256
d6b5193b
RS
257static GTY(()) section *read_only_data_section;
258static GTY(()) section *private_data_section;
259static GTY(()) section *read_only_private_data_section;
260static GTY(()) section *sdata2_section;
261static GTY(()) section *toc_section;
262
a3c9585f
KH
263/* Control alignment for fields within structures. */
264/* String from -malign-XXXXX. */
025d9908
KH
265int rs6000_alignment_flags;
266
78f5898b
AH
267/* True for any options that were explicitly set. */
268struct {
df01da37 269 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
78f5898b 270 bool alignment; /* True if -malign- was used. */
a2db2771
JJ
271 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
272 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
78f5898b
AH
273 bool spe; /* True if -mspe= was used. */
274 bool float_gprs; /* True if -mfloat-gprs= was used. */
275 bool isel; /* True if -misel was used. */
276 bool long_double; /* True if -mlong-double- was used. */
d3603e8c 277 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
a2db2771 278 bool vrsave; /* True if -mvrsave was used. */
78f5898b
AH
279} rs6000_explicit_options;
280
a3170dc6
AH
281struct builtin_description
282{
283 /* mask is not const because we're going to alter it below. This
284 nonsense will go away when we rewrite the -march infrastructure
285 to give us more target flag bits. */
286 unsigned int mask;
287 const enum insn_code icode;
288 const char *const name;
289 const enum rs6000_builtins code;
290};
8b897cfa
RS
291\f
292/* Target cpu costs. */
293
294struct processor_costs {
c4ad648e 295 const int mulsi; /* cost of SImode multiplication. */
8b897cfa
RS
296 const int mulsi_const; /* cost of SImode multiplication by constant. */
297 const int mulsi_const9; /* cost of SImode mult by short constant. */
c4ad648e
AM
298 const int muldi; /* cost of DImode multiplication. */
299 const int divsi; /* cost of SImode division. */
300 const int divdi; /* cost of DImode division. */
301 const int fp; /* cost of simple SFmode and DFmode insns. */
302 const int dmul; /* cost of DFmode multiplication (and fmadd). */
303 const int sdiv; /* cost of SFmode division (fdivs). */
304 const int ddiv; /* cost of DFmode division (fdiv). */
5f732aba
DE
305 const int cache_line_size; /* cache line size in bytes. */
306 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
307 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
0b11da67
DE
308 const int simultaneous_prefetches; /* number of parallel prefetch
309 operations. */
8b897cfa
RS
310};
311
312const struct processor_costs *rs6000_cost;
313
314/* Processor costs (relative to an add) */
315
316/* Instruction size costs on 32bit processors. */
317static const
318struct processor_costs size32_cost = {
06a67bdd
RS
319 COSTS_N_INSNS (1), /* mulsi */
320 COSTS_N_INSNS (1), /* mulsi_const */
321 COSTS_N_INSNS (1), /* mulsi_const9 */
322 COSTS_N_INSNS (1), /* muldi */
323 COSTS_N_INSNS (1), /* divsi */
324 COSTS_N_INSNS (1), /* divdi */
325 COSTS_N_INSNS (1), /* fp */
326 COSTS_N_INSNS (1), /* dmul */
327 COSTS_N_INSNS (1), /* sdiv */
328 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
329 32,
330 0,
331 0,
5f732aba 332 0,
8b897cfa
RS
333};
334
335/* Instruction size costs on 64bit processors. */
336static const
337struct processor_costs size64_cost = {
06a67bdd
RS
338 COSTS_N_INSNS (1), /* mulsi */
339 COSTS_N_INSNS (1), /* mulsi_const */
340 COSTS_N_INSNS (1), /* mulsi_const9 */
341 COSTS_N_INSNS (1), /* muldi */
342 COSTS_N_INSNS (1), /* divsi */
343 COSTS_N_INSNS (1), /* divdi */
344 COSTS_N_INSNS (1), /* fp */
345 COSTS_N_INSNS (1), /* dmul */
346 COSTS_N_INSNS (1), /* sdiv */
347 COSTS_N_INSNS (1), /* ddiv */
0b11da67
DE
348 128,
349 0,
350 0,
5f732aba 351 0,
8b897cfa
RS
352};
353
354/* Instruction costs on RIOS1 processors. */
355static const
356struct processor_costs rios1_cost = {
06a67bdd
RS
357 COSTS_N_INSNS (5), /* mulsi */
358 COSTS_N_INSNS (4), /* mulsi_const */
359 COSTS_N_INSNS (3), /* mulsi_const9 */
360 COSTS_N_INSNS (5), /* muldi */
361 COSTS_N_INSNS (19), /* divsi */
362 COSTS_N_INSNS (19), /* divdi */
363 COSTS_N_INSNS (2), /* fp */
364 COSTS_N_INSNS (2), /* dmul */
365 COSTS_N_INSNS (19), /* sdiv */
366 COSTS_N_INSNS (19), /* ddiv */
0d158b6e 367 128, /* cache line size */
5f732aba
DE
368 64, /* l1 cache */
369 512, /* l2 cache */
0b11da67 370 0, /* streams */
8b897cfa
RS
371};
372
373/* Instruction costs on RIOS2 processors. */
374static const
375struct processor_costs rios2_cost = {
06a67bdd
RS
376 COSTS_N_INSNS (2), /* mulsi */
377 COSTS_N_INSNS (2), /* mulsi_const */
378 COSTS_N_INSNS (2), /* mulsi_const9 */
379 COSTS_N_INSNS (2), /* muldi */
380 COSTS_N_INSNS (13), /* divsi */
381 COSTS_N_INSNS (13), /* divdi */
382 COSTS_N_INSNS (2), /* fp */
383 COSTS_N_INSNS (2), /* dmul */
384 COSTS_N_INSNS (17), /* sdiv */
385 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 386 256, /* cache line size */
5f732aba
DE
387 256, /* l1 cache */
388 1024, /* l2 cache */
0b11da67 389 0, /* streams */
8b897cfa
RS
390};
391
392/* Instruction costs on RS64A processors. */
393static const
394struct processor_costs rs64a_cost = {
06a67bdd
RS
395 COSTS_N_INSNS (20), /* mulsi */
396 COSTS_N_INSNS (12), /* mulsi_const */
397 COSTS_N_INSNS (8), /* mulsi_const9 */
398 COSTS_N_INSNS (34), /* muldi */
399 COSTS_N_INSNS (65), /* divsi */
400 COSTS_N_INSNS (67), /* divdi */
401 COSTS_N_INSNS (4), /* fp */
402 COSTS_N_INSNS (4), /* dmul */
403 COSTS_N_INSNS (31), /* sdiv */
404 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 405 128, /* cache line size */
5f732aba
DE
406 128, /* l1 cache */
407 2048, /* l2 cache */
0b11da67 408 1, /* streams */
8b897cfa
RS
409};
410
411/* Instruction costs on MPCCORE processors. */
412static const
413struct processor_costs mpccore_cost = {
06a67bdd
RS
414 COSTS_N_INSNS (2), /* mulsi */
415 COSTS_N_INSNS (2), /* mulsi_const */
416 COSTS_N_INSNS (2), /* mulsi_const9 */
417 COSTS_N_INSNS (2), /* muldi */
418 COSTS_N_INSNS (6), /* divsi */
419 COSTS_N_INSNS (6), /* divdi */
420 COSTS_N_INSNS (4), /* fp */
421 COSTS_N_INSNS (5), /* dmul */
422 COSTS_N_INSNS (10), /* sdiv */
423 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 424 32, /* cache line size */
5f732aba
DE
425 4, /* l1 cache */
426 16, /* l2 cache */
0b11da67 427 1, /* streams */
8b897cfa
RS
428};
429
430/* Instruction costs on PPC403 processors. */
431static const
432struct processor_costs ppc403_cost = {
06a67bdd
RS
433 COSTS_N_INSNS (4), /* mulsi */
434 COSTS_N_INSNS (4), /* mulsi_const */
435 COSTS_N_INSNS (4), /* mulsi_const9 */
436 COSTS_N_INSNS (4), /* muldi */
437 COSTS_N_INSNS (33), /* divsi */
438 COSTS_N_INSNS (33), /* divdi */
439 COSTS_N_INSNS (11), /* fp */
440 COSTS_N_INSNS (11), /* dmul */
441 COSTS_N_INSNS (11), /* sdiv */
442 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 443 32, /* cache line size */
5f732aba
DE
444 4, /* l1 cache */
445 16, /* l2 cache */
0b11da67 446 1, /* streams */
8b897cfa
RS
447};
448
449/* Instruction costs on PPC405 processors. */
450static const
451struct processor_costs ppc405_cost = {
06a67bdd
RS
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (4), /* mulsi_const */
454 COSTS_N_INSNS (3), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (35), /* divsi */
457 COSTS_N_INSNS (35), /* divdi */
458 COSTS_N_INSNS (11), /* fp */
459 COSTS_N_INSNS (11), /* dmul */
460 COSTS_N_INSNS (11), /* sdiv */
461 COSTS_N_INSNS (11), /* ddiv */
0d158b6e 462 32, /* cache line size */
5f732aba
DE
463 16, /* l1 cache */
464 128, /* l2 cache */
0b11da67 465 1, /* streams */
8b897cfa
RS
466};
467
468/* Instruction costs on PPC440 processors. */
469static const
470struct processor_costs ppc440_cost = {
06a67bdd
RS
471 COSTS_N_INSNS (3), /* mulsi */
472 COSTS_N_INSNS (2), /* mulsi_const */
473 COSTS_N_INSNS (2), /* mulsi_const9 */
474 COSTS_N_INSNS (3), /* muldi */
475 COSTS_N_INSNS (34), /* divsi */
476 COSTS_N_INSNS (34), /* divdi */
477 COSTS_N_INSNS (5), /* fp */
478 COSTS_N_INSNS (5), /* dmul */
479 COSTS_N_INSNS (19), /* sdiv */
480 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 481 32, /* cache line size */
5f732aba
DE
482 32, /* l1 cache */
483 256, /* l2 cache */
0b11da67 484 1, /* streams */
8b897cfa
RS
485};
486
487/* Instruction costs on PPC601 processors. */
488static const
489struct processor_costs ppc601_cost = {
06a67bdd
RS
490 COSTS_N_INSNS (5), /* mulsi */
491 COSTS_N_INSNS (5), /* mulsi_const */
492 COSTS_N_INSNS (5), /* mulsi_const9 */
493 COSTS_N_INSNS (5), /* muldi */
494 COSTS_N_INSNS (36), /* divsi */
495 COSTS_N_INSNS (36), /* divdi */
496 COSTS_N_INSNS (4), /* fp */
497 COSTS_N_INSNS (5), /* dmul */
498 COSTS_N_INSNS (17), /* sdiv */
499 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 500 32, /* cache line size */
5f732aba
DE
501 32, /* l1 cache */
502 256, /* l2 cache */
0b11da67 503 1, /* streams */
8b897cfa
RS
504};
505
506/* Instruction costs on PPC603 processors. */
507static const
508struct processor_costs ppc603_cost = {
06a67bdd
RS
509 COSTS_N_INSNS (5), /* mulsi */
510 COSTS_N_INSNS (3), /* mulsi_const */
511 COSTS_N_INSNS (2), /* mulsi_const9 */
512 COSTS_N_INSNS (5), /* muldi */
513 COSTS_N_INSNS (37), /* divsi */
514 COSTS_N_INSNS (37), /* divdi */
515 COSTS_N_INSNS (3), /* fp */
516 COSTS_N_INSNS (4), /* dmul */
517 COSTS_N_INSNS (18), /* sdiv */
518 COSTS_N_INSNS (33), /* ddiv */
0d158b6e 519 32, /* cache line size */
5f732aba
DE
520 8, /* l1 cache */
521 64, /* l2 cache */
0b11da67 522 1, /* streams */
8b897cfa
RS
523};
524
525/* Instruction costs on PPC604 processors. */
526static const
527struct processor_costs ppc604_cost = {
06a67bdd
RS
528 COSTS_N_INSNS (4), /* mulsi */
529 COSTS_N_INSNS (4), /* mulsi_const */
530 COSTS_N_INSNS (4), /* mulsi_const9 */
531 COSTS_N_INSNS (4), /* muldi */
532 COSTS_N_INSNS (20), /* divsi */
533 COSTS_N_INSNS (20), /* divdi */
534 COSTS_N_INSNS (3), /* fp */
535 COSTS_N_INSNS (3), /* dmul */
536 COSTS_N_INSNS (18), /* sdiv */
537 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 538 32, /* cache line size */
5f732aba
DE
539 16, /* l1 cache */
540 512, /* l2 cache */
0b11da67 541 1, /* streams */
8b897cfa
RS
542};
543
544/* Instruction costs on PPC604e processors. */
545static const
546struct processor_costs ppc604e_cost = {
06a67bdd
RS
547 COSTS_N_INSNS (2), /* mulsi */
548 COSTS_N_INSNS (2), /* mulsi_const */
549 COSTS_N_INSNS (2), /* mulsi_const9 */
550 COSTS_N_INSNS (2), /* muldi */
551 COSTS_N_INSNS (20), /* divsi */
552 COSTS_N_INSNS (20), /* divdi */
553 COSTS_N_INSNS (3), /* fp */
554 COSTS_N_INSNS (3), /* dmul */
555 COSTS_N_INSNS (18), /* sdiv */
556 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 557 32, /* cache line size */
5f732aba
DE
558 32, /* l1 cache */
559 1024, /* l2 cache */
0b11da67 560 1, /* streams */
8b897cfa
RS
561};
562
f0517163 563/* Instruction costs on PPC620 processors. */
8b897cfa
RS
564static const
565struct processor_costs ppc620_cost = {
06a67bdd
RS
566 COSTS_N_INSNS (5), /* mulsi */
567 COSTS_N_INSNS (4), /* mulsi_const */
568 COSTS_N_INSNS (3), /* mulsi_const9 */
569 COSTS_N_INSNS (7), /* muldi */
570 COSTS_N_INSNS (21), /* divsi */
571 COSTS_N_INSNS (37), /* divdi */
572 COSTS_N_INSNS (3), /* fp */
573 COSTS_N_INSNS (3), /* dmul */
574 COSTS_N_INSNS (18), /* sdiv */
575 COSTS_N_INSNS (32), /* ddiv */
0d158b6e 576 128, /* cache line size */
5f732aba
DE
577 32, /* l1 cache */
578 1024, /* l2 cache */
0b11da67 579 1, /* streams */
f0517163
RS
580};
581
582/* Instruction costs on PPC630 processors. */
583static const
584struct processor_costs ppc630_cost = {
06a67bdd
RS
585 COSTS_N_INSNS (5), /* mulsi */
586 COSTS_N_INSNS (4), /* mulsi_const */
587 COSTS_N_INSNS (3), /* mulsi_const9 */
588 COSTS_N_INSNS (7), /* muldi */
589 COSTS_N_INSNS (21), /* divsi */
590 COSTS_N_INSNS (37), /* divdi */
591 COSTS_N_INSNS (3), /* fp */
592 COSTS_N_INSNS (3), /* dmul */
593 COSTS_N_INSNS (17), /* sdiv */
594 COSTS_N_INSNS (21), /* ddiv */
0d158b6e 595 128, /* cache line size */
5f732aba
DE
596 64, /* l1 cache */
597 1024, /* l2 cache */
0b11da67 598 1, /* streams */
8b897cfa
RS
599};
600
d296e02e
AP
601/* Instruction costs on Cell processor. */
602/* COSTS_N_INSNS (1) ~ one add. */
603static const
604struct processor_costs ppccell_cost = {
605 COSTS_N_INSNS (9/2)+2, /* mulsi */
606 COSTS_N_INSNS (6/2), /* mulsi_const */
607 COSTS_N_INSNS (6/2), /* mulsi_const9 */
608 COSTS_N_INSNS (15/2)+2, /* muldi */
609 COSTS_N_INSNS (38/2), /* divsi */
610 COSTS_N_INSNS (70/2), /* divdi */
611 COSTS_N_INSNS (10/2), /* fp */
612 COSTS_N_INSNS (10/2), /* dmul */
613 COSTS_N_INSNS (74/2), /* sdiv */
614 COSTS_N_INSNS (74/2), /* ddiv */
0d158b6e 615 128, /* cache line size */
5f732aba
DE
616 32, /* l1 cache */
617 512, /* l2 cache */
618 6, /* streams */
d296e02e
AP
619};
620
8b897cfa
RS
621/* Instruction costs on PPC750 and PPC7400 processors. */
622static const
623struct processor_costs ppc750_cost = {
06a67bdd
RS
624 COSTS_N_INSNS (5), /* mulsi */
625 COSTS_N_INSNS (3), /* mulsi_const */
626 COSTS_N_INSNS (2), /* mulsi_const9 */
627 COSTS_N_INSNS (5), /* muldi */
628 COSTS_N_INSNS (17), /* divsi */
629 COSTS_N_INSNS (17), /* divdi */
630 COSTS_N_INSNS (3), /* fp */
631 COSTS_N_INSNS (3), /* dmul */
632 COSTS_N_INSNS (17), /* sdiv */
633 COSTS_N_INSNS (31), /* ddiv */
0d158b6e 634 32, /* cache line size */
5f732aba
DE
635 32, /* l1 cache */
636 512, /* l2 cache */
0b11da67 637 1, /* streams */
8b897cfa
RS
638};
639
640/* Instruction costs on PPC7450 processors. */
641static const
642struct processor_costs ppc7450_cost = {
06a67bdd
RS
643 COSTS_N_INSNS (4), /* mulsi */
644 COSTS_N_INSNS (3), /* mulsi_const */
645 COSTS_N_INSNS (3), /* mulsi_const9 */
646 COSTS_N_INSNS (4), /* muldi */
647 COSTS_N_INSNS (23), /* divsi */
648 COSTS_N_INSNS (23), /* divdi */
649 COSTS_N_INSNS (5), /* fp */
650 COSTS_N_INSNS (5), /* dmul */
651 COSTS_N_INSNS (21), /* sdiv */
652 COSTS_N_INSNS (35), /* ddiv */
0d158b6e 653 32, /* cache line size */
5f732aba
DE
654 32, /* l1 cache */
655 1024, /* l2 cache */
0b11da67 656 1, /* streams */
8b897cfa 657};
a3170dc6 658
8b897cfa
RS
659/* Instruction costs on PPC8540 processors. */
660static const
661struct processor_costs ppc8540_cost = {
06a67bdd
RS
662 COSTS_N_INSNS (4), /* mulsi */
663 COSTS_N_INSNS (4), /* mulsi_const */
664 COSTS_N_INSNS (4), /* mulsi_const9 */
665 COSTS_N_INSNS (4), /* muldi */
666 COSTS_N_INSNS (19), /* divsi */
667 COSTS_N_INSNS (19), /* divdi */
668 COSTS_N_INSNS (4), /* fp */
669 COSTS_N_INSNS (4), /* dmul */
670 COSTS_N_INSNS (29), /* sdiv */
671 COSTS_N_INSNS (29), /* ddiv */
0d158b6e 672 32, /* cache line size */
5f732aba
DE
673 32, /* l1 cache */
674 256, /* l2 cache */
0b11da67 675 1, /* prefetch streams /*/
8b897cfa
RS
676};
677
fa41c305
EW
678/* Instruction costs on E300C2 and E300C3 cores. */
679static const
680struct processor_costs ppce300c2c3_cost = {
681 COSTS_N_INSNS (4), /* mulsi */
682 COSTS_N_INSNS (4), /* mulsi_const */
683 COSTS_N_INSNS (4), /* mulsi_const9 */
684 COSTS_N_INSNS (4), /* muldi */
685 COSTS_N_INSNS (19), /* divsi */
686 COSTS_N_INSNS (19), /* divdi */
687 COSTS_N_INSNS (3), /* fp */
688 COSTS_N_INSNS (4), /* dmul */
689 COSTS_N_INSNS (18), /* sdiv */
690 COSTS_N_INSNS (33), /* ddiv */
642639ce 691 32,
a19b7d46
EW
692 16, /* l1 cache */
693 16, /* l2 cache */
642639ce 694 1, /* prefetch streams /*/
fa41c305
EW
695};
696
edae5fe3
DE
697/* Instruction costs on PPCE500MC processors. */
698static const
699struct processor_costs ppce500mc_cost = {
700 COSTS_N_INSNS (4), /* mulsi */
701 COSTS_N_INSNS (4), /* mulsi_const */
702 COSTS_N_INSNS (4), /* mulsi_const9 */
703 COSTS_N_INSNS (4), /* muldi */
704 COSTS_N_INSNS (14), /* divsi */
705 COSTS_N_INSNS (14), /* divdi */
706 COSTS_N_INSNS (8), /* fp */
707 COSTS_N_INSNS (10), /* dmul */
708 COSTS_N_INSNS (36), /* sdiv */
709 COSTS_N_INSNS (66), /* ddiv */
710 64, /* cache line size */
711 32, /* l1 cache */
712 128, /* l2 cache */
713 1, /* prefetch streams /*/
714};
715
8b897cfa
RS
716/* Instruction costs on POWER4 and POWER5 processors. */
717static const
718struct processor_costs power4_cost = {
06a67bdd
RS
719 COSTS_N_INSNS (3), /* mulsi */
720 COSTS_N_INSNS (2), /* mulsi_const */
721 COSTS_N_INSNS (2), /* mulsi_const9 */
722 COSTS_N_INSNS (4), /* muldi */
723 COSTS_N_INSNS (18), /* divsi */
724 COSTS_N_INSNS (34), /* divdi */
725 COSTS_N_INSNS (3), /* fp */
726 COSTS_N_INSNS (3), /* dmul */
727 COSTS_N_INSNS (17), /* sdiv */
728 COSTS_N_INSNS (17), /* ddiv */
0d158b6e 729 128, /* cache line size */
5f732aba
DE
730 32, /* l1 cache */
731 1024, /* l2 cache */
0b11da67 732 8, /* prefetch streams /*/
8b897cfa
RS
733};
734
44cd321e
PS
735/* Instruction costs on POWER6 processors. */
736static const
737struct processor_costs power6_cost = {
738 COSTS_N_INSNS (8), /* mulsi */
739 COSTS_N_INSNS (8), /* mulsi_const */
740 COSTS_N_INSNS (8), /* mulsi_const9 */
741 COSTS_N_INSNS (8), /* muldi */
742 COSTS_N_INSNS (22), /* divsi */
743 COSTS_N_INSNS (28), /* divdi */
744 COSTS_N_INSNS (3), /* fp */
745 COSTS_N_INSNS (3), /* dmul */
746 COSTS_N_INSNS (13), /* sdiv */
747 COSTS_N_INSNS (16), /* ddiv */
0d158b6e 748 128, /* cache line size */
5f732aba
DE
749 64, /* l1 cache */
750 2048, /* l2 cache */
0b11da67 751 16, /* prefetch streams */
44cd321e
PS
752};
753
8b897cfa 754\f
a2369ed3 755static bool rs6000_function_ok_for_sibcall (tree, tree);
3101faab 756static const char *rs6000_invalid_within_doloop (const_rtx);
a2369ed3 757static rtx rs6000_generate_compare (enum rtx_code);
a2369ed3
DJ
758static void rs6000_emit_stack_tie (void);
759static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
a2369ed3 760static bool spe_func_has_64bit_regs_p (void);
b20a9cca 761static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
d1d0c603 762 int, HOST_WIDE_INT);
a2369ed3 763static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
f78c3290 764static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
a2369ed3
DJ
765static unsigned rs6000_hash_constant (rtx);
766static unsigned toc_hash_function (const void *);
767static int toc_hash_eq (const void *, const void *);
768static int constant_pool_expr_1 (rtx, int *, int *);
769static bool constant_pool_expr_p (rtx);
d04b6e6e 770static bool legitimate_small_data_p (enum machine_mode, rtx);
a2369ed3
DJ
771static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
772static struct machine_function * rs6000_init_machine_status (void);
773static bool rs6000_assemble_integer (rtx, unsigned int, int);
f78c3290 774static bool no_global_regs_above (int, bool);
5add3202 775#ifdef HAVE_GAS_HIDDEN
a2369ed3 776static void rs6000_assemble_visibility (tree, int);
5add3202 777#endif
a2369ed3
DJ
778static int rs6000_ra_ever_killed (void);
779static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
8bb418a3 780static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
3101faab 781static bool rs6000_ms_bitfield_layout_p (const_tree);
77ccdfed 782static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
76d2b81d 783static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
3101faab 784static const char *rs6000_mangle_type (const_tree);
b86fe7b4 785extern const struct attribute_spec rs6000_attribute_table[];
a2369ed3 786static void rs6000_set_default_type_attributes (tree);
f78c3290
NF
787static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
788static void rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
789static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
790 enum machine_mode, bool, bool, bool);
52ff33d0 791static bool rs6000_reg_live_or_pic_offset_p (int);
f78c3290
NF
792static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
793static void rs6000_restore_saved_cr (rtx, int);
a2369ed3
DJ
794static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
795static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
b20a9cca
AM
796static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
797 tree);
a2369ed3 798static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
586de218 799static bool rs6000_return_in_memory (const_tree, const_tree);
a2369ed3 800static void rs6000_file_start (void);
7c262518 801#if TARGET_ELF
9b580a0b 802static int rs6000_elf_reloc_rw_mask (void);
a2369ed3
DJ
803static void rs6000_elf_asm_out_constructor (rtx, int);
804static void rs6000_elf_asm_out_destructor (rtx, int);
1334b570 805static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
d6b5193b 806static void rs6000_elf_asm_init_sections (void);
d6b5193b
RS
807static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
808 unsigned HOST_WIDE_INT);
a56d7372 809static void rs6000_elf_encode_section_info (tree, rtx, int)
0e5dbd9b 810 ATTRIBUTE_UNUSED;
7c262518 811#endif
3101faab 812static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
e41b2a33
PB
813static void rs6000_alloc_sdmode_stack_slot (void);
814static void rs6000_instantiate_decls (void);
cbaaba19 815#if TARGET_XCOFF
0d5817b2 816static void rs6000_xcoff_asm_output_anchor (rtx);
a2369ed3 817static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
d6b5193b 818static void rs6000_xcoff_asm_init_sections (void);
9b580a0b 819static int rs6000_xcoff_reloc_rw_mask (void);
8210e4c4 820static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
d6b5193b 821static section *rs6000_xcoff_select_section (tree, int,
b20a9cca 822 unsigned HOST_WIDE_INT);
d6b5193b
RS
823static void rs6000_xcoff_unique_section (tree, int);
824static section *rs6000_xcoff_select_rtx_section
825 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
a2369ed3
DJ
826static const char * rs6000_xcoff_strip_name_encoding (const char *);
827static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
828static void rs6000_xcoff_file_start (void);
829static void rs6000_xcoff_file_end (void);
f1384257 830#endif
a2369ed3 831static int rs6000_variable_issue (FILE *, int, rtx, int);
f40751dd 832static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
a2369ed3 833static int rs6000_adjust_cost (rtx, rtx, rtx, int);
44cd321e 834static void rs6000_sched_init (FILE *, int, int);
cbe26ab8 835static bool is_microcoded_insn (rtx);
d296e02e 836static bool is_nonpipeline_insn (rtx);
cbe26ab8
DN
837static bool is_cracked_insn (rtx);
838static bool is_branch_slot_insn (rtx);
44cd321e 839static bool is_load_insn (rtx);
e3a0e200 840static rtx get_store_dest (rtx pat);
44cd321e
PS
841static bool is_store_insn (rtx);
842static bool set_to_load_agen (rtx,rtx);
982afe02 843static bool adjacent_mem_locations (rtx,rtx);
a2369ed3
DJ
844static int rs6000_adjust_priority (rtx, int);
845static int rs6000_issue_rate (void);
b198261f 846static bool rs6000_is_costly_dependence (dep_t, int, int);
cbe26ab8
DN
847static rtx get_next_active_insn (rtx, rtx);
848static bool insn_terminates_group_p (rtx , enum group_termination);
44cd321e
PS
849static bool insn_must_be_first_in_group (rtx);
850static bool insn_must_be_last_in_group (rtx);
cbe26ab8
DN
851static bool is_costly_group (rtx *, rtx);
852static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
853static int redefine_groups (FILE *, int, rtx, rtx);
854static int pad_groups (FILE *, int, rtx, rtx);
855static void rs6000_sched_finish (FILE *, int);
44cd321e
PS
856static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
857static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
a2369ed3 858static int rs6000_use_sched_lookahead (void);
d296e02e 859static int rs6000_use_sched_lookahead_guard (rtx);
e855c69d
AB
860static void * rs6000_alloc_sched_context (void);
861static void rs6000_init_sched_context (void *, bool);
862static void rs6000_set_sched_context (void *);
863static void rs6000_free_sched_context (void *);
9c78b944 864static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
7ccf35ed 865static tree rs6000_builtin_mask_for_load (void);
89d67cca
DN
866static tree rs6000_builtin_mul_widen_even (tree);
867static tree rs6000_builtin_mul_widen_odd (tree);
f57d17f1 868static tree rs6000_builtin_conversion (enum tree_code, tree);
0fca40f5 869static tree rs6000_builtin_vec_perm (tree, tree *);
a2369ed3 870
58646b77 871static void def_builtin (int, const char *, tree, int);
3101faab 872static bool rs6000_vector_alignment_reachable (const_tree, bool);
a2369ed3
DJ
873static void rs6000_init_builtins (void);
874static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
875static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
876static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
877static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
878static void altivec_init_builtins (void);
879static void rs6000_common_init_builtins (void);
c15c90bb 880static void rs6000_init_libfuncs (void);
a2369ed3 881
96038623
DE
882static void paired_init_builtins (void);
883static rtx paired_expand_builtin (tree, rtx, bool *);
884static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
885static rtx paired_expand_stv_builtin (enum insn_code, tree);
886static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
887
b20a9cca
AM
888static void enable_mask_for_builtins (struct builtin_description *, int,
889 enum rs6000_builtins,
890 enum rs6000_builtins);
7c62e993 891static tree build_opaque_vector_type (tree, int);
a2369ed3
DJ
892static void spe_init_builtins (void);
893static rtx spe_expand_builtin (tree, rtx, bool *);
61bea3b0 894static rtx spe_expand_stv_builtin (enum insn_code, tree);
a2369ed3
DJ
895static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
896static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
897static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
d1d0c603
JJ
898static rs6000_stack_t *rs6000_stack_info (void);
899static void debug_stack_info (rs6000_stack_t *);
a2369ed3
DJ
900
901static rtx altivec_expand_builtin (tree, rtx, bool *);
902static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
903static rtx altivec_expand_st_builtin (tree, rtx, bool *);
904static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
905static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
f676971a 906static rtx altivec_expand_predicate_builtin (enum insn_code,
c4ad648e 907 const char *, tree, rtx);
b4a62fa0 908static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
a2369ed3 909static rtx altivec_expand_stv_builtin (enum insn_code, tree);
7a4eca66
DE
910static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
911static rtx altivec_expand_vec_set_builtin (tree);
912static rtx altivec_expand_vec_ext_builtin (tree, rtx);
913static int get_element_number (tree, tree);
78f5898b 914static bool rs6000_handle_option (size_t, const char *, int);
a2369ed3 915static void rs6000_parse_tls_size_option (void);
5da702b1 916static void rs6000_parse_yes_no_option (const char *, const char *, int *);
a2369ed3
DJ
917static int first_altivec_reg_to_save (void);
918static unsigned int compute_vrsave_mask (void);
9390387d 919static void compute_save_world_info (rs6000_stack_t *info_ptr);
a2369ed3
DJ
920static void is_altivec_return_reg (rtx, void *);
921static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
922int easy_vector_constant (rtx, enum machine_mode);
3101faab 923static bool rs6000_is_opaque_type (const_tree);
a2369ed3 924static rtx rs6000_dwarf_register_span (rtx);
37ea0b7e 925static void rs6000_init_dwarf_reg_sizes_extra (tree);
a2369ed3 926static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
fdbe66f2 927static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
a2369ed3
DJ
928static rtx rs6000_tls_get_addr (void);
929static rtx rs6000_got_sym (void);
9390387d 930static int rs6000_tls_symbol_ref_1 (rtx *, void *);
a2369ed3
DJ
931static const char *rs6000_get_some_local_dynamic_name (void);
932static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
ded9bf77 933static rtx rs6000_complex_function_value (enum machine_mode);
b20a9cca 934static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
a2369ed3 935 enum machine_mode, tree);
0b5383eb
DJ
936static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
937 HOST_WIDE_INT);
938static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
939 tree, HOST_WIDE_INT);
940static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
941 HOST_WIDE_INT,
942 rtx[], int *);
943static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
586de218
KG
944 const_tree, HOST_WIDE_INT,
945 rtx[], int *);
946static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
ec6376ab 947static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
b1917422 948static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
c6e8c921
GK
949static void setup_incoming_varargs (CUMULATIVE_ARGS *,
950 enum machine_mode, tree,
951 int *, int);
8cd5a4e0 952static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
586de218 953 const_tree, bool);
78a52f11
RH
954static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
955 tree, bool);
3101faab 956static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
efdba735
SH
957#if TARGET_MACHO
958static void macho_branch_islands (void);
efdba735
SH
959static int no_previous_def (tree function_name);
960static tree get_prev_label (tree function_name);
c4e18b1c 961static void rs6000_darwin_file_start (void);
efdba735
SH
962#endif
963
c35d187f 964static tree rs6000_build_builtin_va_list (void);
d7bd8aeb 965static void rs6000_va_start (tree, rtx);
726a989a 966static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
586de218 967static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
00b79d54 968static bool rs6000_scalar_mode_supported_p (enum machine_mode);
f676971a 969static bool rs6000_vector_mode_supported_p (enum machine_mode);
94ff898d 970static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
21213b4c 971 enum machine_mode);
94ff898d 972static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
21213b4c
DP
973 enum machine_mode);
974static int get_vsel_insn (enum machine_mode);
975static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
3aebbe5f 976static tree rs6000_stack_protect_fail (void);
21213b4c
DP
977
978const int INSN_NOT_AVAILABLE = -1;
93f90be6
FJ
979static enum machine_mode rs6000_eh_return_filter_mode (void);
980
17211ab5
GK
981/* Hash table stuff for keeping track of TOC entries. */
982
983struct toc_hash_struct GTY(())
984{
985 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
986 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
987 rtx key;
988 enum machine_mode key_mode;
989 int labelno;
990};
991
992static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
c81bebd7
MM
993\f
994/* Default register names. */
995char rs6000_reg_names[][8] =
996{
802a0058
MM
997 "0", "1", "2", "3", "4", "5", "6", "7",
998 "8", "9", "10", "11", "12", "13", "14", "15",
999 "16", "17", "18", "19", "20", "21", "22", "23",
1000 "24", "25", "26", "27", "28", "29", "30", "31",
1001 "0", "1", "2", "3", "4", "5", "6", "7",
1002 "8", "9", "10", "11", "12", "13", "14", "15",
1003 "16", "17", "18", "19", "20", "21", "22", "23",
1004 "24", "25", "26", "27", "28", "29", "30", "31",
1005 "mq", "lr", "ctr","ap",
1006 "0", "1", "2", "3", "4", "5", "6", "7",
0ac081f6
AH
1007 "xer",
1008 /* AltiVec registers. */
0cd5e3a1
AH
1009 "0", "1", "2", "3", "4", "5", "6", "7",
1010 "8", "9", "10", "11", "12", "13", "14", "15",
1011 "16", "17", "18", "19", "20", "21", "22", "23",
1012 "24", "25", "26", "27", "28", "29", "30", "31",
59a4c851
AH
1013 "vrsave", "vscr",
1014 /* SPE registers. */
7d5175e1
JJ
1015 "spe_acc", "spefscr",
1016 /* Soft frame pointer. */
1017 "sfp"
c81bebd7
MM
1018};
1019
1020#ifdef TARGET_REGNAMES
8b60264b 1021static const char alt_reg_names[][8] =
c81bebd7 1022{
802a0058
MM
1023 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1024 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1025 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1026 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1027 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1028 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1029 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1030 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1031 "mq", "lr", "ctr", "ap",
1032 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
0ac081f6 1033 "xer",
59a4c851 1034 /* AltiVec registers. */
0ac081f6 1035 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
59a4c851
AH
1036 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1037 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1038 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1039 "vrsave", "vscr",
1040 /* SPE registers. */
7d5175e1
JJ
1041 "spe_acc", "spefscr",
1042 /* Soft frame pointer. */
1043 "sfp"
c81bebd7
MM
1044};
1045#endif
9878760c 1046\f
daf11973
MM
1047#ifndef MASK_STRICT_ALIGN
1048#define MASK_STRICT_ALIGN 0
1049#endif
ffcfcb5f
AM
1050#ifndef TARGET_PROFILE_KERNEL
1051#define TARGET_PROFILE_KERNEL 0
1052#endif
3961e8fe
RH
1053
1054/* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1055#define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
672a6f42
NB
1056\f
1057/* Initialize the GCC target structure. */
91d231cb
JM
1058#undef TARGET_ATTRIBUTE_TABLE
1059#define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
a5c76ee6
ZW
1060#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1061#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
daf11973 1062
301d03af
RS
1063#undef TARGET_ASM_ALIGNED_DI_OP
1064#define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1065
1066/* Default unaligned ops are only provided for ELF. Find the ops needed
1067 for non-ELF systems. */
1068#ifndef OBJECT_FORMAT_ELF
cbaaba19 1069#if TARGET_XCOFF
ae6c1efd 1070/* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
301d03af
RS
1071 64-bit targets. */
1072#undef TARGET_ASM_UNALIGNED_HI_OP
1073#define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1074#undef TARGET_ASM_UNALIGNED_SI_OP
1075#define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1076#undef TARGET_ASM_UNALIGNED_DI_OP
1077#define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1078#else
1079/* For Darwin. */
1080#undef TARGET_ASM_UNALIGNED_HI_OP
1081#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1082#undef TARGET_ASM_UNALIGNED_SI_OP
1083#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
49bd1d27
SS
1084#undef TARGET_ASM_UNALIGNED_DI_OP
1085#define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1086#undef TARGET_ASM_ALIGNED_DI_OP
1087#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
301d03af
RS
1088#endif
1089#endif
1090
1091/* This hook deals with fixups for relocatable code and DI-mode objects
1092 in 64-bit code. */
1093#undef TARGET_ASM_INTEGER
1094#define TARGET_ASM_INTEGER rs6000_assemble_integer
1095
93638d7a
AM
1096#ifdef HAVE_GAS_HIDDEN
1097#undef TARGET_ASM_ASSEMBLE_VISIBILITY
1098#define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1099#endif
1100
c4501e62
JJ
1101#undef TARGET_HAVE_TLS
1102#define TARGET_HAVE_TLS HAVE_AS_TLS
1103
1104#undef TARGET_CANNOT_FORCE_CONST_MEM
a7e0b075 1105#define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
c4501e62 1106
08c148a8
NB
1107#undef TARGET_ASM_FUNCTION_PROLOGUE
1108#define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1109#undef TARGET_ASM_FUNCTION_EPILOGUE
1110#define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1111
b54cf83a
DE
1112#undef TARGET_SCHED_VARIABLE_ISSUE
1113#define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1114
c237e94a
ZW
1115#undef TARGET_SCHED_ISSUE_RATE
1116#define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1117#undef TARGET_SCHED_ADJUST_COST
1118#define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1119#undef TARGET_SCHED_ADJUST_PRIORITY
1120#define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
f676971a 1121#undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
569fa502 1122#define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
44cd321e
PS
1123#undef TARGET_SCHED_INIT
1124#define TARGET_SCHED_INIT rs6000_sched_init
cbe26ab8
DN
1125#undef TARGET_SCHED_FINISH
1126#define TARGET_SCHED_FINISH rs6000_sched_finish
44cd321e
PS
1127#undef TARGET_SCHED_REORDER
1128#define TARGET_SCHED_REORDER rs6000_sched_reorder
1129#undef TARGET_SCHED_REORDER2
1130#define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
c237e94a 1131
be12c2b0
VM
1132#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1133#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1134
d296e02e
AP
1135#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1136#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1137
e855c69d
AB
1138#undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1139#define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1140#undef TARGET_SCHED_INIT_SCHED_CONTEXT
1141#define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1142#undef TARGET_SCHED_SET_SCHED_CONTEXT
1143#define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1144#undef TARGET_SCHED_FREE_SCHED_CONTEXT
1145#define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1146
7ccf35ed
DN
1147#undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1148#define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
89d67cca
DN
1149#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1150#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1151#undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1152#define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
f57d17f1
TM
1153#undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1154#define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
0fca40f5
IR
1155#undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1156#define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
7ccf35ed 1157
5b900a4c
DN
1158#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1159#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1160
0ac081f6
AH
1161#undef TARGET_INIT_BUILTINS
1162#define TARGET_INIT_BUILTINS rs6000_init_builtins
1163
1164#undef TARGET_EXPAND_BUILTIN
1165#define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1166
608063c3
JB
1167#undef TARGET_MANGLE_TYPE
1168#define TARGET_MANGLE_TYPE rs6000_mangle_type
f18eca82 1169
c15c90bb
ZW
1170#undef TARGET_INIT_LIBFUNCS
1171#define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1172
f1384257 1173#if TARGET_MACHO
0e5dbd9b 1174#undef TARGET_BINDS_LOCAL_P
31920d83 1175#define TARGET_BINDS_LOCAL_P darwin_binds_local_p
f1384257 1176#endif
0e5dbd9b 1177
77ccdfed
EC
1178#undef TARGET_MS_BITFIELD_LAYOUT_P
1179#define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1180
3961e8fe
RH
1181#undef TARGET_ASM_OUTPUT_MI_THUNK
1182#define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1183
3961e8fe 1184#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3101faab 1185#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
00b960c7 1186
4977bab6
ZW
1187#undef TARGET_FUNCTION_OK_FOR_SIBCALL
1188#define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1189
2e3f0db6
DJ
1190#undef TARGET_INVALID_WITHIN_DOLOOP
1191#define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
9419649c 1192
3c50106f
RH
1193#undef TARGET_RTX_COSTS
1194#define TARGET_RTX_COSTS rs6000_rtx_costs
dcefdf67 1195#undef TARGET_ADDRESS_COST
f40751dd 1196#define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3c50106f 1197
c8e4f0e9 1198#undef TARGET_VECTOR_OPAQUE_P
58646b77 1199#define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
62e1dfcf 1200
96714395
AH
1201#undef TARGET_DWARF_REGISTER_SPAN
1202#define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1203
37ea0b7e
JM
1204#undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1205#define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1206
c6e8c921
GK
1207/* On rs6000, function arguments are promoted, as are function return
1208 values. */
1209#undef TARGET_PROMOTE_FUNCTION_ARGS
586de218 1210#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
c6e8c921 1211#undef TARGET_PROMOTE_FUNCTION_RETURN
586de218 1212#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
c6e8c921 1213
c6e8c921
GK
1214#undef TARGET_RETURN_IN_MEMORY
1215#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1216
1217#undef TARGET_SETUP_INCOMING_VARARGS
1218#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1219
1220/* Always strict argument naming on rs6000. */
1221#undef TARGET_STRICT_ARGUMENT_NAMING
1222#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1223#undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1224#define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
42ba5130 1225#undef TARGET_SPLIT_COMPLEX_ARG
3101faab 1226#define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
fe984136
RH
1227#undef TARGET_MUST_PASS_IN_STACK
1228#define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
8cd5a4e0
RH
1229#undef TARGET_PASS_BY_REFERENCE
1230#define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
78a52f11
RH
1231#undef TARGET_ARG_PARTIAL_BYTES
1232#define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
c6e8c921 1233
c35d187f
RH
1234#undef TARGET_BUILD_BUILTIN_VA_LIST
1235#define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1236
d7bd8aeb
JJ
1237#undef TARGET_EXPAND_BUILTIN_VA_START
1238#define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1239
cd3ce9b4
JM
1240#undef TARGET_GIMPLIFY_VA_ARG_EXPR
1241#define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1242
93f90be6
FJ
1243#undef TARGET_EH_RETURN_FILTER_MODE
1244#define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1245
00b79d54
BE
1246#undef TARGET_SCALAR_MODE_SUPPORTED_P
1247#define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1248
f676971a
EC
1249#undef TARGET_VECTOR_MODE_SUPPORTED_P
1250#define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1251
4d3e6fae
FJ
1252#undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1253#define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1254
78f5898b
AH
1255#undef TARGET_HANDLE_OPTION
1256#define TARGET_HANDLE_OPTION rs6000_handle_option
1257
1258#undef TARGET_DEFAULT_TARGET_FLAGS
1259#define TARGET_DEFAULT_TARGET_FLAGS \
716019c0 1260 (TARGET_DEFAULT)
78f5898b 1261
3aebbe5f
JJ
1262#undef TARGET_STACK_PROTECT_FAIL
1263#define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1264
445cf5eb
JM
1265/* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1266 The PowerPC architecture requires only weak consistency among
1267 processors--that is, memory accesses between processors need not be
1268 sequentially consistent and memory accesses among processors can occur
1269 in any order. The ability to order memory accesses weakly provides
1270 opportunities for more efficient use of the system bus. Unless a
1271 dependency exists, the 604e allows read operations to precede store
1272 operations. */
1273#undef TARGET_RELAXED_ORDERING
1274#define TARGET_RELAXED_ORDERING true
1275
fdbe66f2
EB
1276#ifdef HAVE_AS_TLS
1277#undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1278#define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1279#endif
1280
aacd3885
RS
1281/* Use a 32-bit anchor range. This leads to sequences like:
1282
1283 addis tmp,anchor,high
1284 add dest,tmp,low
1285
1286 where tmp itself acts as an anchor, and can be shared between
1287 accesses to the same 64k page. */
1288#undef TARGET_MIN_ANCHOR_OFFSET
1289#define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1290#undef TARGET_MAX_ANCHOR_OFFSET
1291#define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1292#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1293#define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1294
9c78b944
DE
1295#undef TARGET_BUILTIN_RECIPROCAL
1296#define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1297
e41b2a33
PB
1298#undef TARGET_EXPAND_TO_RTL_HOOK
1299#define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1300
1301#undef TARGET_INSTANTIATE_DECLS
1302#define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1303
f6897b10 1304struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 1305\f
0d1fbc8c
AH
1306
1307/* Value is 1 if hard register REGNO can hold a value of machine-mode
1308 MODE. */
1309static int
1310rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1311{
1312 /* The GPRs can hold any mode, but values bigger than one register
1313 cannot go past R31. */
1314 if (INT_REGNO_P (regno))
1315 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1316
a5a97921 1317 /* The float registers can only hold floating modes and DImode.
7393f7f8 1318 This excludes the 32-bit decimal float mode for now. */
0d1fbc8c
AH
1319 if (FP_REGNO_P (regno))
1320 return
96038623 1321 ((SCALAR_FLOAT_MODE_P (mode)
c092b045 1322 && (mode != TDmode || (regno % 2) == 0)
0d1fbc8c
AH
1323 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1324 || (GET_MODE_CLASS (mode) == MODE_INT
96038623
DE
1325 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1326 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1327 && PAIRED_VECTOR_MODE (mode)));
0d1fbc8c
AH
1328
1329 /* The CR register can only hold CC modes. */
1330 if (CR_REGNO_P (regno))
1331 return GET_MODE_CLASS (mode) == MODE_CC;
1332
1333 if (XER_REGNO_P (regno))
1334 return mode == PSImode;
1335
1336 /* AltiVec only in AldyVec registers. */
1337 if (ALTIVEC_REGNO_P (regno))
1338 return ALTIVEC_VECTOR_MODE (mode);
1339
1340 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1341 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1342 return 1;
1343
1344 /* We cannot put TImode anywhere except general register and it must be
1345 able to fit within the register set. */
1346
1347 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1348}
1349
1350/* Initialize rs6000_hard_regno_mode_ok_p table. */
1351static void
1352rs6000_init_hard_regno_mode_ok (void)
1353{
1354 int r, m;
1355
1356 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1357 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1358 if (rs6000_hard_regno_mode_ok (r, m))
1359 rs6000_hard_regno_mode_ok_p[m][r] = true;
1360}
1361
e4cad568
GK
1362#if TARGET_MACHO
1363/* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1364
1365static void
1366darwin_rs6000_override_options (void)
1367{
1368 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1369 off. */
1370 rs6000_altivec_abi = 1;
1371 TARGET_ALTIVEC_VRSAVE = 1;
1372 if (DEFAULT_ABI == ABI_DARWIN)
1373 {
1374 if (MACHO_DYNAMIC_NO_PIC_P)
1375 {
1376 if (flag_pic)
1377 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1378 flag_pic = 0;
1379 }
1380 else if (flag_pic == 1)
1381 {
1382 flag_pic = 2;
1383 }
1384 }
1385 if (TARGET_64BIT && ! TARGET_POWERPC64)
1386 {
1387 target_flags |= MASK_POWERPC64;
1388 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1389 }
1390 if (flag_mkernel)
1391 {
1392 rs6000_default_long_calls = 1;
1393 target_flags |= MASK_SOFT_FLOAT;
1394 }
1395
1396 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1397 Altivec. */
1398 if (!flag_mkernel && !flag_apple_kext
1399 && TARGET_64BIT
1400 && ! (target_flags_explicit & MASK_ALTIVEC))
1401 target_flags |= MASK_ALTIVEC;
1402
1403 /* Unless the user (not the configurer) has explicitly overridden
1404 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1405 G4 unless targetting the kernel. */
1406 if (!flag_mkernel
1407 && !flag_apple_kext
1408 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1409 && ! (target_flags_explicit & MASK_ALTIVEC)
1410 && ! rs6000_select[1].string)
1411 {
1412 target_flags |= MASK_ALTIVEC;
1413 }
1414}
1415#endif
1416
c1e55850
GK
1417/* If not otherwise specified by a target, make 'long double' equivalent to
1418 'double'. */
1419
1420#ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1421#define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1422#endif
1423
5248c961
RK
1424/* Override command line options. Mostly we process the processor
1425 type and sometimes adjust other TARGET_ options. */
1426
1427void
d779d0dc 1428rs6000_override_options (const char *default_cpu)
5248c961 1429{
c4d38ccb 1430 size_t i, j;
8e3f41e7 1431 struct rs6000_cpu_select *ptr;
66188a7e 1432 int set_masks;
5248c961 1433
66188a7e 1434 /* Simplifications for entries below. */
85638c0d 1435
66188a7e
GK
1436 enum {
1437 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1438 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1439 };
85638c0d 1440
66188a7e
GK
1441 /* This table occasionally claims that a processor does not support
1442 a particular feature even though it does, but the feature is slower
1443 than the alternative. Thus, it shouldn't be relied on as a
f676971a 1444 complete description of the processor's support.
66188a7e
GK
1445
1446 Please keep this list in order, and don't forget to update the
1447 documentation in invoke.texi when adding a new processor or
1448 flag. */
5248c961
RK
1449 static struct ptt
1450 {
8b60264b
KG
1451 const char *const name; /* Canonical processor name. */
1452 const enum processor_type processor; /* Processor type enum value. */
1453 const int target_enable; /* Target flags to enable. */
8b60264b 1454 } const processor_target_table[]
66188a7e 1455 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
49a0b204 1456 {"403", PROCESSOR_PPC403,
66188a7e 1457 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
131aeb82 1458 {"405", PROCESSOR_PPC405,
716019c0
JM
1459 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1460 {"405fp", PROCESSOR_PPC405,
1461 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
131aeb82 1462 {"440", PROCESSOR_PPC440,
716019c0
JM
1463 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1464 {"440fp", PROCESSOR_PPC440,
1465 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
4adf8008
PB
1466 {"464", PROCESSOR_PPC440,
1467 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1468 {"464fp", PROCESSOR_PPC440,
1469 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
66188a7e 1470 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
5248c961 1471 {"601", PROCESSOR_PPC601,
66188a7e
GK
1472 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1473 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1474 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1475 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1476 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1477 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
7ddb6568
AM
1478 {"620", PROCESSOR_PPC620,
1479 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1480 {"630", PROCESSOR_PPC630,
1481 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1482 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1483 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1484 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1485 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1486 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1487 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1488 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
a45bce6e 1489 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
4d4cbc0e 1490 /* 8548 has a dummy entry for now. */
a45bce6e 1491 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
fa41c305
EW
1492 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1493 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
edae5fe3 1494 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
66188a7e 1495 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
7177e720 1496 {"970", PROCESSOR_POWER4,
66188a7e 1497 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
d296e02e
AP
1498 {"cell", PROCESSOR_CELL,
1499 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
66188a7e
GK
1500 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1501 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1502 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1503 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
49ffe578 1504 {"G5", PROCESSOR_POWER4,
66188a7e
GK
1505 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1506 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1507 {"power2", PROCESSOR_POWER,
1508 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
7ddb6568
AM
1509 {"power3", PROCESSOR_PPC630,
1510 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1511 {"power4", PROCESSOR_POWER4,
9a8d7941 1512 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1bc39d2f 1513 | MASK_MFCRF},
ec507f2d 1514 {"power5", PROCESSOR_POWER5,
9a8d7941 1515 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
432218ba 1516 | MASK_MFCRF | MASK_POPCNTB},
9719f3b7 1517 {"power5+", PROCESSOR_POWER5,
9a8d7941 1518 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
9719f3b7 1519 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
44cd321e 1520 {"power6", PROCESSOR_POWER6,
9a8d7941
DE
1521 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1522 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
44cd321e 1523 {"power6x", PROCESSOR_POWER6,
9a8d7941
DE
1524 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1525 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP | MASK_MFPGPR},
d40c9e33
PB
1526 {"power7", PROCESSOR_POWER5,
1527 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1528 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
66188a7e
GK
1529 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1530 {"powerpc64", PROCESSOR_POWERPC64,
98c41d98 1531 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
66188a7e
GK
1532 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1533 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1534 {"rios2", PROCESSOR_RIOS2,
1535 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1536 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1537 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
98c41d98
DE
1538 {"rs64", PROCESSOR_RS64A,
1539 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
66188a7e 1540 };
5248c961 1541
ca7558fc 1542 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
5248c961 1543
66188a7e
GK
1544 /* Some OSs don't support saving the high part of 64-bit registers on
1545 context switch. Other OSs don't support saving Altivec registers.
1546 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1547 settings; if the user wants either, the user must explicitly specify
1548 them and we won't interfere with the user's specification. */
1549
1550 enum {
1551 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
db2675d3 1552 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
66188a7e 1553 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
716019c0 1554 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
b639c3c2 1555 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
66188a7e 1556 };
0d1fbc8c
AH
1557
1558 rs6000_init_hard_regno_mode_ok ();
1559
c4ad648e 1560 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
66188a7e
GK
1561#ifdef OS_MISSING_POWERPC64
1562 if (OS_MISSING_POWERPC64)
1563 set_masks &= ~MASK_POWERPC64;
1564#endif
1565#ifdef OS_MISSING_ALTIVEC
1566 if (OS_MISSING_ALTIVEC)
1567 set_masks &= ~MASK_ALTIVEC;
1568#endif
1569
768875a8
AM
1570 /* Don't override by the processor default if given explicitly. */
1571 set_masks &= ~target_flags_explicit;
957211c3 1572
a4f6c312 1573 /* Identify the processor type. */
8e3f41e7 1574 rs6000_select[0].string = default_cpu;
3cb999d8 1575 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
8e3f41e7 1576
b6a1cbae 1577 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
5248c961 1578 {
8e3f41e7
MM
1579 ptr = &rs6000_select[i];
1580 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
5248c961 1581 {
8e3f41e7
MM
1582 for (j = 0; j < ptt_size; j++)
1583 if (! strcmp (ptr->string, processor_target_table[j].name))
1584 {
1585 if (ptr->set_tune_p)
1586 rs6000_cpu = processor_target_table[j].processor;
1587
1588 if (ptr->set_arch_p)
1589 {
66188a7e
GK
1590 target_flags &= ~set_masks;
1591 target_flags |= (processor_target_table[j].target_enable
1592 & set_masks);
8e3f41e7
MM
1593 }
1594 break;
1595 }
1596
4406229e 1597 if (j == ptt_size)
8e3f41e7 1598 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
5248c961
RK
1599 }
1600 }
8a61d227 1601
edae5fe3
DE
1602 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1603 && !rs6000_explicit_options.isel)
a3170dc6
AH
1604 rs6000_isel = 1;
1605
edae5fe3
DE
1606 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1607 || rs6000_cpu == PROCESSOR_PPCE500MC)
fa41c305
EW
1608 {
1609 if (TARGET_ALTIVEC)
1610 error ("AltiVec not supported in this target");
1611 if (TARGET_SPE)
1612 error ("Spe not supported in this target");
1613 }
1614
dff9f1b6
DE
1615 /* If we are optimizing big endian systems for space, use the load/store
1616 multiple and string instructions. */
ef792183 1617 if (BYTES_BIG_ENDIAN && optimize_size)
957211c3 1618 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
938937d8 1619
a4f6c312
SS
1620 /* Don't allow -mmultiple or -mstring on little endian systems
1621 unless the cpu is a 750, because the hardware doesn't support the
1622 instructions used in little endian mode, and causes an alignment
1623 trap. The 750 does not cause an alignment trap (except when the
1624 target is unaligned). */
bef84347 1625
b21fb038 1626 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
7e69e155
MM
1627 {
1628 if (TARGET_MULTIPLE)
1629 {
1630 target_flags &= ~MASK_MULTIPLE;
b21fb038 1631 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
d4ee4d25 1632 warning (0, "-mmultiple is not supported on little endian systems");
7e69e155
MM
1633 }
1634
1635 if (TARGET_STRING)
1636 {
1637 target_flags &= ~MASK_STRING;
b21fb038 1638 if ((target_flags_explicit & MASK_STRING) != 0)
d4ee4d25 1639 warning (0, "-mstring is not supported on little endian systems");
7e69e155
MM
1640 }
1641 }
3933e0e1 1642
38c1f2d7
MM
1643 /* Set debug flags */
1644 if (rs6000_debug_name)
1645 {
bfc79d3b 1646 if (! strcmp (rs6000_debug_name, "all"))
38c1f2d7 1647 rs6000_debug_stack = rs6000_debug_arg = 1;
bfc79d3b 1648 else if (! strcmp (rs6000_debug_name, "stack"))
38c1f2d7 1649 rs6000_debug_stack = 1;
bfc79d3b 1650 else if (! strcmp (rs6000_debug_name, "arg"))
38c1f2d7
MM
1651 rs6000_debug_arg = 1;
1652 else
c725bd79 1653 error ("unknown -mdebug-%s switch", rs6000_debug_name);
38c1f2d7
MM
1654 }
1655
57ac7be9
AM
1656 if (rs6000_traceback_name)
1657 {
1658 if (! strncmp (rs6000_traceback_name, "full", 4))
1659 rs6000_traceback = traceback_full;
1660 else if (! strncmp (rs6000_traceback_name, "part", 4))
1661 rs6000_traceback = traceback_part;
1662 else if (! strncmp (rs6000_traceback_name, "no", 2))
1663 rs6000_traceback = traceback_none;
1664 else
9e637a26 1665 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
57ac7be9
AM
1666 rs6000_traceback_name);
1667 }
1668
78f5898b
AH
1669 if (!rs6000_explicit_options.long_double)
1670 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
6fa3f289 1671
602ea4d3 1672#ifndef POWERPC_LINUX
d3603e8c 1673 if (!rs6000_explicit_options.ieee)
602ea4d3
JJ
1674 rs6000_ieeequad = 1;
1675#endif
1676
0db747be
DE
1677 /* Enable Altivec ABI for AIX -maltivec. */
1678 if (TARGET_XCOFF && TARGET_ALTIVEC)
1679 rs6000_altivec_abi = 1;
1680
a2db2771
JJ
1681 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1682 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1683 be explicitly overridden in either case. */
1684 if (TARGET_ELF)
6d0ef01e 1685 {
a2db2771
JJ
1686 if (!rs6000_explicit_options.altivec_abi
1687 && (TARGET_64BIT || TARGET_ALTIVEC))
1688 rs6000_altivec_abi = 1;
1689
1690 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1691 if (!rs6000_explicit_options.vrsave)
1692 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
6d0ef01e
HP
1693 }
1694
594a51fe
SS
1695 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1696 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1697 {
1698 rs6000_darwin64_abi = 1;
9c7956fd 1699#if TARGET_MACHO
6ac49599 1700 darwin_one_byte_bool = 1;
9c7956fd 1701#endif
d9168963
SS
1702 /* Default to natural alignment, for better performance. */
1703 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
594a51fe
SS
1704 }
1705
194c524a
DE
1706 /* Place FP constants in the constant pool instead of TOC
1707 if section anchors enabled. */
1708 if (flag_section_anchors)
1709 TARGET_NO_FP_IN_TOC = 1;
1710
c4501e62
JJ
1711 /* Handle -mtls-size option. */
1712 rs6000_parse_tls_size_option ();
1713
a7ae18e2
AH
1714#ifdef SUBTARGET_OVERRIDE_OPTIONS
1715 SUBTARGET_OVERRIDE_OPTIONS;
1716#endif
1717#ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1718 SUBSUBTARGET_OVERRIDE_OPTIONS;
1719#endif
4d4cbc0e
AH
1720#ifdef SUB3TARGET_OVERRIDE_OPTIONS
1721 SUB3TARGET_OVERRIDE_OPTIONS;
1722#endif
a7ae18e2 1723
edae5fe3 1724 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
5da702b1 1725 {
edae5fe3 1726 /* The e500 and e500mc do not have string instructions, and we set
5da702b1
AH
1727 MASK_STRING above when optimizing for size. */
1728 if ((target_flags & MASK_STRING) != 0)
1729 target_flags = target_flags & ~MASK_STRING;
1730 }
1731 else if (rs6000_select[1].string != NULL)
1732 {
1733 /* For the powerpc-eabispe configuration, we set all these by
1734 default, so let's unset them if we manually set another
1735 CPU that is not the E500. */
a2db2771 1736 if (!rs6000_explicit_options.spe_abi)
5da702b1 1737 rs6000_spe_abi = 0;
78f5898b 1738 if (!rs6000_explicit_options.spe)
5da702b1 1739 rs6000_spe = 0;
78f5898b 1740 if (!rs6000_explicit_options.float_gprs)
5da702b1 1741 rs6000_float_gprs = 0;
78f5898b 1742 if (!rs6000_explicit_options.isel)
5da702b1
AH
1743 rs6000_isel = 0;
1744 }
b5044283 1745
eca0d5e8
JM
1746 /* Detect invalid option combinations with E500. */
1747 CHECK_E500_OPTIONS;
1748
ec507f2d 1749 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
44cd321e 1750 && rs6000_cpu != PROCESSOR_POWER5
d296e02e
AP
1751 && rs6000_cpu != PROCESSOR_POWER6
1752 && rs6000_cpu != PROCESSOR_CELL);
ec507f2d
DE
1753 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1754 || rs6000_cpu == PROCESSOR_POWER5);
44cd321e
PS
1755 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1756 || rs6000_cpu == PROCESSOR_POWER5
1757 || rs6000_cpu == PROCESSOR_POWER6);
ec507f2d 1758
ec507f2d
DE
1759 rs6000_sched_restricted_insns_priority
1760 = (rs6000_sched_groups ? 1 : 0);
79ae11c4 1761
569fa502 1762 /* Handle -msched-costly-dep option. */
ec507f2d
DE
1763 rs6000_sched_costly_dep
1764 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
432218ba 1765
569fa502
DN
1766 if (rs6000_sched_costly_dep_str)
1767 {
f676971a 1768 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
c4ad648e 1769 rs6000_sched_costly_dep = no_dep_costly;
569fa502 1770 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
c4ad648e 1771 rs6000_sched_costly_dep = all_deps_costly;
569fa502 1772 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
c4ad648e 1773 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
569fa502 1774 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
c4ad648e 1775 rs6000_sched_costly_dep = store_to_load_dep_costly;
f676971a 1776 else
c4ad648e 1777 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
cbe26ab8
DN
1778 }
1779
1780 /* Handle -minsert-sched-nops option. */
ec507f2d
DE
1781 rs6000_sched_insert_nops
1782 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
432218ba 1783
cbe26ab8
DN
1784 if (rs6000_sched_insert_nops_str)
1785 {
1786 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
c4ad648e 1787 rs6000_sched_insert_nops = sched_finish_none;
cbe26ab8 1788 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
c4ad648e 1789 rs6000_sched_insert_nops = sched_finish_pad_groups;
cbe26ab8 1790 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
c4ad648e 1791 rs6000_sched_insert_nops = sched_finish_regroup_exact;
cbe26ab8 1792 else
c4ad648e 1793 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
569fa502
DN
1794 }
1795
c81bebd7 1796#ifdef TARGET_REGNAMES
a4f6c312
SS
1797 /* If the user desires alternate register names, copy in the
1798 alternate names now. */
c81bebd7 1799 if (TARGET_REGNAMES)
4e135bdd 1800 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
c81bebd7
MM
1801#endif
1802
df01da37 1803 /* Set aix_struct_return last, after the ABI is determined.
6fa3f289
ZW
1804 If -maix-struct-return or -msvr4-struct-return was explicitly
1805 used, don't override with the ABI default. */
df01da37
DE
1806 if (!rs6000_explicit_options.aix_struct_ret)
1807 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
6fa3f289 1808
602ea4d3 1809 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
70a01792 1810 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
fcce224d 1811
f676971a 1812 if (TARGET_TOC)
9ebbca7d 1813 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
71f123ca 1814
301d03af
RS
1815 /* We can only guarantee the availability of DI pseudo-ops when
1816 assembling for 64-bit targets. */
ae6c1efd 1817 if (!TARGET_64BIT)
301d03af
RS
1818 {
1819 targetm.asm_out.aligned_op.di = NULL;
1820 targetm.asm_out.unaligned_op.di = NULL;
1821 }
1822
1494c534
DE
1823 /* Set branch target alignment, if not optimizing for size. */
1824 if (!optimize_size)
1825 {
d296e02e
AP
1826 /* Cell wants to be aligned 8byte for dual issue. */
1827 if (rs6000_cpu == PROCESSOR_CELL)
1828 {
1829 if (align_functions <= 0)
1830 align_functions = 8;
1831 if (align_jumps <= 0)
1832 align_jumps = 8;
1833 if (align_loops <= 0)
1834 align_loops = 8;
1835 }
44cd321e 1836 if (rs6000_align_branch_targets)
1494c534
DE
1837 {
1838 if (align_functions <= 0)
1839 align_functions = 16;
1840 if (align_jumps <= 0)
1841 align_jumps = 16;
1842 if (align_loops <= 0)
1843 align_loops = 16;
1844 }
1845 if (align_jumps_max_skip <= 0)
1846 align_jumps_max_skip = 15;
1847 if (align_loops_max_skip <= 0)
1848 align_loops_max_skip = 15;
1849 }
2792d578 1850
71f123ca
FS
1851 /* Arrange to save and restore machine status around nested functions. */
1852 init_machine_status = rs6000_init_machine_status;
42ba5130
RH
1853
1854 /* We should always be splitting complex arguments, but we can't break
1855 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
18f63bfa 1856 if (DEFAULT_ABI != ABI_AIX)
42ba5130 1857 targetm.calls.split_complex_arg = NULL;
8b897cfa
RS
1858
1859 /* Initialize rs6000_cost with the appropriate target costs. */
1860 if (optimize_size)
1861 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1862 else
1863 switch (rs6000_cpu)
1864 {
1865 case PROCESSOR_RIOS1:
1866 rs6000_cost = &rios1_cost;
1867 break;
1868
1869 case PROCESSOR_RIOS2:
1870 rs6000_cost = &rios2_cost;
1871 break;
1872
1873 case PROCESSOR_RS64A:
1874 rs6000_cost = &rs64a_cost;
1875 break;
1876
1877 case PROCESSOR_MPCCORE:
1878 rs6000_cost = &mpccore_cost;
1879 break;
1880
1881 case PROCESSOR_PPC403:
1882 rs6000_cost = &ppc403_cost;
1883 break;
1884
1885 case PROCESSOR_PPC405:
1886 rs6000_cost = &ppc405_cost;
1887 break;
1888
1889 case PROCESSOR_PPC440:
1890 rs6000_cost = &ppc440_cost;
1891 break;
1892
1893 case PROCESSOR_PPC601:
1894 rs6000_cost = &ppc601_cost;
1895 break;
1896
1897 case PROCESSOR_PPC603:
1898 rs6000_cost = &ppc603_cost;
1899 break;
1900
1901 case PROCESSOR_PPC604:
1902 rs6000_cost = &ppc604_cost;
1903 break;
1904
1905 case PROCESSOR_PPC604e:
1906 rs6000_cost = &ppc604e_cost;
1907 break;
1908
1909 case PROCESSOR_PPC620:
8b897cfa
RS
1910 rs6000_cost = &ppc620_cost;
1911 break;
1912
f0517163
RS
1913 case PROCESSOR_PPC630:
1914 rs6000_cost = &ppc630_cost;
1915 break;
1916
982afe02 1917 case PROCESSOR_CELL:
d296e02e
AP
1918 rs6000_cost = &ppccell_cost;
1919 break;
1920
8b897cfa
RS
1921 case PROCESSOR_PPC750:
1922 case PROCESSOR_PPC7400:
1923 rs6000_cost = &ppc750_cost;
1924 break;
1925
1926 case PROCESSOR_PPC7450:
1927 rs6000_cost = &ppc7450_cost;
1928 break;
1929
1930 case PROCESSOR_PPC8540:
1931 rs6000_cost = &ppc8540_cost;
1932 break;
1933
fa41c305
EW
1934 case PROCESSOR_PPCE300C2:
1935 case PROCESSOR_PPCE300C3:
1936 rs6000_cost = &ppce300c2c3_cost;
1937 break;
1938
edae5fe3
DE
1939 case PROCESSOR_PPCE500MC:
1940 rs6000_cost = &ppce500mc_cost;
1941 break;
1942
8b897cfa
RS
1943 case PROCESSOR_POWER4:
1944 case PROCESSOR_POWER5:
1945 rs6000_cost = &power4_cost;
1946 break;
1947
44cd321e
PS
1948 case PROCESSOR_POWER6:
1949 rs6000_cost = &power6_cost;
1950 break;
1951
8b897cfa 1952 default:
37409796 1953 gcc_unreachable ();
8b897cfa 1954 }
0b11da67
DE
1955
1956 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1957 set_param_value ("simultaneous-prefetches",
1958 rs6000_cost->simultaneous_prefetches);
1959 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
5f732aba 1960 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
0b11da67
DE
1961 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1962 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
5f732aba
DE
1963 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1964 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
d7bd8aeb
JJ
1965
1966 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1967 can be optimized to ap = __builtin_next_arg (0). */
1968 if (DEFAULT_ABI != ABI_V4)
1969 targetm.expand_builtin_va_start = NULL;
5248c961 1970}
5accd822 1971
7ccf35ed
DN
1972/* Implement targetm.vectorize.builtin_mask_for_load. */
1973static tree
1974rs6000_builtin_mask_for_load (void)
1975{
1976 if (TARGET_ALTIVEC)
1977 return altivec_builtin_mask_for_load;
1978 else
1979 return 0;
1980}
1981
7910ae0c
DN
1982/* Implement targetm.vectorize.builtin_conversion.
1983 Returns a decl of a function that implements conversion of an integer vector
1984 into a floating-point vector, or vice-versa. TYPE is the type of the integer
1985 side of the conversion.
1986 Return NULL_TREE if it is not available. */
f57d17f1
TM
1987static tree
1988rs6000_builtin_conversion (enum tree_code code, tree type)
1989{
1990 if (!TARGET_ALTIVEC)
1991 return NULL_TREE;
982afe02 1992
f57d17f1
TM
1993 switch (code)
1994 {
7910ae0c
DN
1995 case FIX_TRUNC_EXPR:
1996 switch (TYPE_MODE (type))
1997 {
1998 case V4SImode:
1999 return TYPE_UNSIGNED (type)
2000 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2001 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2002 default:
2003 return NULL_TREE;
2004 }
2005
f57d17f1
TM
2006 case FLOAT_EXPR:
2007 switch (TYPE_MODE (type))
2008 {
2009 case V4SImode:
7910ae0c
DN
2010 return TYPE_UNSIGNED (type)
2011 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2012 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
f57d17f1
TM
2013 default:
2014 return NULL_TREE;
2015 }
7910ae0c 2016
f57d17f1
TM
2017 default:
2018 return NULL_TREE;
2019 }
2020}
2021
89d67cca
DN
2022/* Implement targetm.vectorize.builtin_mul_widen_even. */
2023static tree
2024rs6000_builtin_mul_widen_even (tree type)
2025{
2026 if (!TARGET_ALTIVEC)
2027 return NULL_TREE;
2028
2029 switch (TYPE_MODE (type))
2030 {
2031 case V8HImode:
7910ae0c
DN
2032 return TYPE_UNSIGNED (type)
2033 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2034 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
89d67cca
DN
2035
2036 case V16QImode:
7910ae0c
DN
2037 return TYPE_UNSIGNED (type)
2038 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2039 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
89d67cca
DN
2040 default:
2041 return NULL_TREE;
2042 }
2043}
2044
2045/* Implement targetm.vectorize.builtin_mul_widen_odd. */
2046static tree
2047rs6000_builtin_mul_widen_odd (tree type)
2048{
2049 if (!TARGET_ALTIVEC)
2050 return NULL_TREE;
2051
2052 switch (TYPE_MODE (type))
2053 {
2054 case V8HImode:
7910ae0c
DN
2055 return TYPE_UNSIGNED (type)
2056 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2057 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
89d67cca
DN
2058
2059 case V16QImode:
7910ae0c
DN
2060 return TYPE_UNSIGNED (type)
2061 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2062 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
89d67cca
DN
2063 default:
2064 return NULL_TREE;
2065 }
2066}
2067
5b900a4c
DN
2068
2069/* Return true iff, data reference of TYPE can reach vector alignment (16)
2070 after applying N number of iterations. This routine does not determine
2071 how may iterations are required to reach desired alignment. */
2072
2073static bool
3101faab 2074rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
5b900a4c
DN
2075{
2076 if (is_packed)
2077 return false;
2078
2079 if (TARGET_32BIT)
2080 {
2081 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2082 return true;
2083
2084 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2085 return true;
2086
2087 return false;
2088 }
2089 else
2090 {
2091 if (TARGET_MACHO)
2092 return false;
2093
2094 /* Assuming that all other types are naturally aligned. CHECKME! */
2095 return true;
2096 }
2097}
2098
0fca40f5
IR
2099/* Implement targetm.vectorize.builtin_vec_perm. */
2100tree
2101rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2102{
2103 tree d;
2104
2105 *mask_element_type = unsigned_char_type_node;
2106
2107 switch (TYPE_MODE (type))
2108 {
2109 case V16QImode:
2110 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2111 break;
2112
2113 case V8HImode:
2114 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2115 break;
2116
2117 case V4SImode:
2118 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2119 break;
2120
2121 case V4SFmode:
2122 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2123 break;
2124
2125 default:
2126 return NULL_TREE;
2127 }
2128
2129 gcc_assert (d);
2130 return d;
2131}
2132
5da702b1
AH
2133/* Handle generic options of the form -mfoo=yes/no.
2134 NAME is the option name.
2135 VALUE is the option value.
2136 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2137 whether the option value is 'yes' or 'no' respectively. */
993f19a8 2138static void
5da702b1 2139rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
993f19a8 2140{
5da702b1 2141 if (value == 0)
993f19a8 2142 return;
5da702b1
AH
2143 else if (!strcmp (value, "yes"))
2144 *flag = 1;
2145 else if (!strcmp (value, "no"))
2146 *flag = 0;
08b57fb3 2147 else
5da702b1 2148 error ("unknown -m%s= option specified: '%s'", name, value);
08b57fb3
AH
2149}
2150
c4501e62
JJ
2151/* Validate and record the size specified with the -mtls-size option. */
2152
2153static void
863d938c 2154rs6000_parse_tls_size_option (void)
c4501e62
JJ
2155{
2156 if (rs6000_tls_size_string == 0)
2157 return;
2158 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2159 rs6000_tls_size = 16;
2160 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2161 rs6000_tls_size = 32;
2162 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2163 rs6000_tls_size = 64;
2164 else
9e637a26 2165 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
c4501e62
JJ
2166}
2167
5accd822 2168void
a2369ed3 2169optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
5accd822 2170{
2e3f0db6
DJ
2171 if (DEFAULT_ABI == ABI_DARWIN)
2172 /* The Darwin libraries never set errno, so we might as well
2173 avoid calling them when that's the only reason we would. */
2174 flag_errno_math = 0;
59d6560b
DE
2175
2176 /* Double growth factor to counter reduced min jump length. */
2177 set_param_value ("max-grow-copy-bb-insns", 16);
194c524a
DE
2178
2179 /* Enable section anchors by default.
2180 Skip section anchors for Objective C and Objective C++
e57903b8
JJ
2181 until front-ends fixed.
2182 Do not enable section anchors without toplevel reorder. */
2183 if (!TARGET_MACHO
2184 && lang_hooks.name[4] != 'O'
2185 && flag_toplevel_reorder != 0)
d6cc6ec9 2186 flag_section_anchors = 2;
5accd822 2187}
78f5898b
AH
2188
2189/* Implement TARGET_HANDLE_OPTION. */
2190
2191static bool
2192rs6000_handle_option (size_t code, const char *arg, int value)
2193{
2194 switch (code)
2195 {
2196 case OPT_mno_power:
2197 target_flags &= ~(MASK_POWER | MASK_POWER2
2198 | MASK_MULTIPLE | MASK_STRING);
c2dba4ab
AH
2199 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2200 | MASK_MULTIPLE | MASK_STRING);
78f5898b
AH
2201 break;
2202 case OPT_mno_powerpc:
2203 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2204 | MASK_PPC_GFXOPT | MASK_POWERPC64);
c2dba4ab
AH
2205 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2206 | MASK_PPC_GFXOPT | MASK_POWERPC64);
78f5898b
AH
2207 break;
2208 case OPT_mfull_toc:
d2894ab5
DE
2209 target_flags &= ~MASK_MINIMAL_TOC;
2210 TARGET_NO_FP_IN_TOC = 0;
2211 TARGET_NO_SUM_IN_TOC = 0;
2212 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2213#ifdef TARGET_USES_SYSV4_OPT
2214 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2215 just the same as -mminimal-toc. */
2216 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2217 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2218#endif
2219 break;
2220
2221#ifdef TARGET_USES_SYSV4_OPT
2222 case OPT_mtoc:
2223 /* Make -mtoc behave like -mminimal-toc. */
2224 target_flags |= MASK_MINIMAL_TOC;
c2dba4ab 2225 target_flags_explicit |= MASK_MINIMAL_TOC;
78f5898b
AH
2226 break;
2227#endif
2228
2229#ifdef TARGET_USES_AIX64_OPT
2230 case OPT_maix64:
2231#else
2232 case OPT_m64:
2233#endif
2c9c9afd
AM
2234 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2235 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2236 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
78f5898b
AH
2237 break;
2238
2239#ifdef TARGET_USES_AIX64_OPT
2240 case OPT_maix32:
2241#else
2242 case OPT_m32:
2243#endif
2244 target_flags &= ~MASK_POWERPC64;
c2dba4ab 2245 target_flags_explicit |= MASK_POWERPC64;
78f5898b
AH
2246 break;
2247
2248 case OPT_minsert_sched_nops_:
2249 rs6000_sched_insert_nops_str = arg;
2250 break;
2251
2252 case OPT_mminimal_toc:
2253 if (value == 1)
2254 {
d2894ab5
DE
2255 TARGET_NO_FP_IN_TOC = 0;
2256 TARGET_NO_SUM_IN_TOC = 0;
78f5898b
AH
2257 }
2258 break;
2259
2260 case OPT_mpower:
2261 if (value == 1)
c2dba4ab
AH
2262 {
2263 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2264 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2265 }
78f5898b
AH
2266 break;
2267
2268 case OPT_mpower2:
2269 if (value == 1)
c2dba4ab
AH
2270 {
2271 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2272 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2273 }
78f5898b
AH
2274 break;
2275
2276 case OPT_mpowerpc_gpopt:
2277 case OPT_mpowerpc_gfxopt:
2278 if (value == 1)
c2dba4ab
AH
2279 {
2280 target_flags |= MASK_POWERPC;
2281 target_flags_explicit |= MASK_POWERPC;
2282 }
78f5898b
AH
2283 break;
2284
df01da37
DE
2285 case OPT_maix_struct_return:
2286 case OPT_msvr4_struct_return:
2287 rs6000_explicit_options.aix_struct_ret = true;
2288 break;
2289
78f5898b 2290 case OPT_mvrsave_:
a2db2771 2291 rs6000_explicit_options.vrsave = true;
78f5898b
AH
2292 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2293 break;
78f5898b 2294
94f4765c
NF
2295 case OPT_misel:
2296 rs6000_explicit_options.isel = true;
2297 rs6000_isel = value;
2298 break;
2299
78f5898b
AH
2300 case OPT_misel_:
2301 rs6000_explicit_options.isel = true;
2302 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2303 break;
2304
94f4765c
NF
2305 case OPT_mspe:
2306 rs6000_explicit_options.spe = true;
2307 rs6000_spe = value;
2308 break;
2309
78f5898b
AH
2310 case OPT_mspe_:
2311 rs6000_explicit_options.spe = true;
2312 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
78f5898b
AH
2313 break;
2314
2315 case OPT_mdebug_:
2316 rs6000_debug_name = arg;
2317 break;
2318
2319#ifdef TARGET_USES_SYSV4_OPT
2320 case OPT_mcall_:
2321 rs6000_abi_name = arg;
2322 break;
2323
2324 case OPT_msdata_:
2325 rs6000_sdata_name = arg;
2326 break;
2327
2328 case OPT_mtls_size_:
2329 rs6000_tls_size_string = arg;
2330 break;
2331
2332 case OPT_mrelocatable:
2333 if (value == 1)
c2dba4ab 2334 {
e0bf274f
AM
2335 target_flags |= MASK_MINIMAL_TOC;
2336 target_flags_explicit |= MASK_MINIMAL_TOC;
2337 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2338 }
78f5898b
AH
2339 break;
2340
2341 case OPT_mrelocatable_lib:
2342 if (value == 1)
c2dba4ab 2343 {
e0bf274f
AM
2344 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2345 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2346 TARGET_NO_FP_IN_TOC = 1;
c2dba4ab 2347 }
78f5898b 2348 else
c2dba4ab
AH
2349 {
2350 target_flags &= ~MASK_RELOCATABLE;
2351 target_flags_explicit |= MASK_RELOCATABLE;
2352 }
78f5898b
AH
2353 break;
2354#endif
2355
2356 case OPT_mabi_:
78f5898b
AH
2357 if (!strcmp (arg, "altivec"))
2358 {
a2db2771 2359 rs6000_explicit_options.altivec_abi = true;
78f5898b 2360 rs6000_altivec_abi = 1;
a2db2771
JJ
2361
2362 /* Enabling the AltiVec ABI turns off the SPE ABI. */
78f5898b
AH
2363 rs6000_spe_abi = 0;
2364 }
2365 else if (! strcmp (arg, "no-altivec"))
d3603e8c 2366 {
a2db2771 2367 rs6000_explicit_options.altivec_abi = true;
d3603e8c
AM
2368 rs6000_altivec_abi = 0;
2369 }
78f5898b
AH
2370 else if (! strcmp (arg, "spe"))
2371 {
a2db2771 2372 rs6000_explicit_options.spe_abi = true;
78f5898b
AH
2373 rs6000_spe_abi = 1;
2374 rs6000_altivec_abi = 0;
2375 if (!TARGET_SPE_ABI)
2376 error ("not configured for ABI: '%s'", arg);
2377 }
2378 else if (! strcmp (arg, "no-spe"))
d3603e8c 2379 {
a2db2771 2380 rs6000_explicit_options.spe_abi = true;
d3603e8c
AM
2381 rs6000_spe_abi = 0;
2382 }
78f5898b
AH
2383
2384 /* These are here for testing during development only, do not
2385 document in the manual please. */
2386 else if (! strcmp (arg, "d64"))
2387 {
2388 rs6000_darwin64_abi = 1;
2389 warning (0, "Using darwin64 ABI");
2390 }
2391 else if (! strcmp (arg, "d32"))
2392 {
2393 rs6000_darwin64_abi = 0;
2394 warning (0, "Using old darwin ABI");
2395 }
2396
602ea4d3
JJ
2397 else if (! strcmp (arg, "ibmlongdouble"))
2398 {
d3603e8c 2399 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2400 rs6000_ieeequad = 0;
2401 warning (0, "Using IBM extended precision long double");
2402 }
2403 else if (! strcmp (arg, "ieeelongdouble"))
2404 {
d3603e8c 2405 rs6000_explicit_options.ieee = true;
602ea4d3
JJ
2406 rs6000_ieeequad = 1;
2407 warning (0, "Using IEEE extended precision long double");
2408 }
2409
78f5898b
AH
2410 else
2411 {
2412 error ("unknown ABI specified: '%s'", arg);
2413 return false;
2414 }
2415 break;
2416
2417 case OPT_mcpu_:
2418 rs6000_select[1].string = arg;
2419 break;
2420
2421 case OPT_mtune_:
2422 rs6000_select[2].string = arg;
2423 break;
2424
2425 case OPT_mtraceback_:
2426 rs6000_traceback_name = arg;
2427 break;
2428
2429 case OPT_mfloat_gprs_:
2430 rs6000_explicit_options.float_gprs = true;
2431 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2432 rs6000_float_gprs = 1;
2433 else if (! strcmp (arg, "double"))
2434 rs6000_float_gprs = 2;
2435 else if (! strcmp (arg, "no"))
2436 rs6000_float_gprs = 0;
2437 else
2438 {
2439 error ("invalid option for -mfloat-gprs: '%s'", arg);
2440 return false;
2441 }
2442 break;
2443
2444 case OPT_mlong_double_:
2445 rs6000_explicit_options.long_double = true;
2446 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2447 if (value != 64 && value != 128)
2448 {
2449 error ("Unknown switch -mlong-double-%s", arg);
2450 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2451 return false;
2452 }
2453 else
2454 rs6000_long_double_type_size = value;
2455 break;
2456
2457 case OPT_msched_costly_dep_:
2458 rs6000_sched_costly_dep_str = arg;
2459 break;
2460
2461 case OPT_malign_:
2462 rs6000_explicit_options.alignment = true;
2463 if (! strcmp (arg, "power"))
2464 {
2465 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2466 some C library functions, so warn about it. The flag may be
2467 useful for performance studies from time to time though, so
2468 don't disable it entirely. */
2469 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2470 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2471 " it is incompatible with the installed C and C++ libraries");
2472 rs6000_alignment_flags = MASK_ALIGN_POWER;
2473 }
2474 else if (! strcmp (arg, "natural"))
2475 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2476 else
2477 {
2478 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2479 return false;
2480 }
2481 break;
2482 }
2483 return true;
2484}
3cfa4909
MM
2485\f
2486/* Do anything needed at the start of the asm file. */
2487
1bc7c5b6 2488static void
863d938c 2489rs6000_file_start (void)
3cfa4909 2490{
c4d38ccb 2491 size_t i;
3cfa4909 2492 char buffer[80];
d330fd93 2493 const char *start = buffer;
3cfa4909 2494 struct rs6000_cpu_select *ptr;
1bc7c5b6
ZW
2495 const char *default_cpu = TARGET_CPU_DEFAULT;
2496 FILE *file = asm_out_file;
2497
2498 default_file_start ();
2499
2500#ifdef TARGET_BI_ARCH
2501 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2502 default_cpu = 0;
2503#endif
3cfa4909
MM
2504
2505 if (flag_verbose_asm)
2506 {
2507 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2508 rs6000_select[0].string = default_cpu;
2509
b6a1cbae 2510 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
3cfa4909
MM
2511 {
2512 ptr = &rs6000_select[i];
2513 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2514 {
2515 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2516 start = "";
2517 }
2518 }
2519
9c6b4ed9 2520 if (PPC405_ERRATUM77)
b0bfee6e 2521 {
9c6b4ed9 2522 fprintf (file, "%s PPC405CR_ERRATUM77", start);
b0bfee6e
DE
2523 start = "";
2524 }
b0bfee6e 2525
b91da81f 2526#ifdef USING_ELFOS_H
3cfa4909
MM
2527 switch (rs6000_sdata)
2528 {
2529 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2530 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2531 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2532 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2533 }
2534
2535 if (rs6000_sdata && g_switch_value)
2536 {
307b599c
MK
2537 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2538 g_switch_value);
3cfa4909
MM
2539 start = "";
2540 }
2541#endif
2542
2543 if (*start == '\0')
949ea356 2544 putc ('\n', file);
3cfa4909 2545 }
b723e82f 2546
e51917ae
JM
2547#ifdef HAVE_AS_GNU_ATTRIBUTE
2548 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
aaa42494
DJ
2549 {
2550 fprintf (file, "\t.gnu_attribute 4, %d\n",
2551 (TARGET_HARD_FLOAT && TARGET_FPRS) ? 1 : 2);
2552 fprintf (file, "\t.gnu_attribute 8, %d\n",
2553 (TARGET_ALTIVEC_ABI ? 2
2554 : TARGET_SPE_ABI ? 3
2555 : 1));
2556 }
e51917ae
JM
2557#endif
2558
b723e82f
JJ
2559 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2560 {
d6b5193b
RS
2561 switch_to_section (toc_section);
2562 switch_to_section (text_section);
b723e82f 2563 }
3cfa4909 2564}
c4e18b1c 2565
5248c961 2566\f
a0ab749a 2567/* Return nonzero if this function is known to have a null epilogue. */
9878760c
RK
2568
2569int
863d938c 2570direct_return (void)
9878760c 2571{
4697a36c
MM
2572 if (reload_completed)
2573 {
2574 rs6000_stack_t *info = rs6000_stack_info ();
2575
2576 if (info->first_gp_reg_save == 32
2577 && info->first_fp_reg_save == 64
00b960c7 2578 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
c81fc13e
DE
2579 && ! info->lr_save_p
2580 && ! info->cr_save_p
00b960c7 2581 && info->vrsave_mask == 0
c81fc13e 2582 && ! info->push_p)
4697a36c
MM
2583 return 1;
2584 }
2585
2586 return 0;
9878760c
RK
2587}
2588
4e74d8ec
MM
2589/* Return the number of instructions it takes to form a constant in an
2590 integer register. */
2591
48d72335 2592int
a2369ed3 2593num_insns_constant_wide (HOST_WIDE_INT value)
4e74d8ec
MM
2594{
2595 /* signed constant loadable with {cal|addi} */
547b216d 2596 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
0865c631
GK
2597 return 1;
2598
4e74d8ec 2599 /* constant loadable with {cau|addis} */
547b216d
DE
2600 else if ((value & 0xffff) == 0
2601 && (value >> 31 == -1 || value >> 31 == 0))
4e74d8ec
MM
2602 return 1;
2603
5f59ecb7 2604#if HOST_BITS_PER_WIDE_INT == 64
c81fc13e 2605 else if (TARGET_POWERPC64)
4e74d8ec 2606 {
a65c591c
DE
2607 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2608 HOST_WIDE_INT high = value >> 31;
4e74d8ec 2609
a65c591c 2610 if (high == 0 || high == -1)
4e74d8ec
MM
2611 return 2;
2612
a65c591c 2613 high >>= 1;
4e74d8ec 2614
a65c591c 2615 if (low == 0)
4e74d8ec 2616 return num_insns_constant_wide (high) + 1;
4e74d8ec
MM
2617 else
2618 return (num_insns_constant_wide (high)
e396202a 2619 + num_insns_constant_wide (low) + 1);
4e74d8ec
MM
2620 }
2621#endif
2622
2623 else
2624 return 2;
2625}
2626
2627int
a2369ed3 2628num_insns_constant (rtx op, enum machine_mode mode)
4e74d8ec 2629{
37409796 2630 HOST_WIDE_INT low, high;
bb8df8a6 2631
37409796 2632 switch (GET_CODE (op))
0d30d435 2633 {
37409796 2634 case CONST_INT:
0d30d435 2635#if HOST_BITS_PER_WIDE_INT == 64
4e2c1c44 2636 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1990cd79 2637 && mask64_operand (op, mode))
c4ad648e 2638 return 2;
0d30d435
DE
2639 else
2640#endif
2641 return num_insns_constant_wide (INTVAL (op));
4e74d8ec 2642
37409796 2643 case CONST_DOUBLE:
e41b2a33 2644 if (mode == SFmode || mode == SDmode)
37409796
NS
2645 {
2646 long l;
2647 REAL_VALUE_TYPE rv;
bb8df8a6 2648
37409796 2649 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
e41b2a33
PB
2650 if (DECIMAL_FLOAT_MODE_P (mode))
2651 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2652 else
2653 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
37409796
NS
2654 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2655 }
a260abc9 2656
37409796
NS
2657 if (mode == VOIDmode || mode == DImode)
2658 {
2659 high = CONST_DOUBLE_HIGH (op);
2660 low = CONST_DOUBLE_LOW (op);
2661 }
2662 else
2663 {
2664 long l[2];
2665 REAL_VALUE_TYPE rv;
bb8df8a6 2666
37409796 2667 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
7393f7f8
BE
2668 if (DECIMAL_FLOAT_MODE_P (mode))
2669 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2670 else
2671 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
37409796
NS
2672 high = l[WORDS_BIG_ENDIAN == 0];
2673 low = l[WORDS_BIG_ENDIAN != 0];
2674 }
47ad8c61 2675
37409796
NS
2676 if (TARGET_32BIT)
2677 return (num_insns_constant_wide (low)
2678 + num_insns_constant_wide (high));
2679 else
2680 {
2681 if ((high == 0 && low >= 0)
2682 || (high == -1 && low < 0))
2683 return num_insns_constant_wide (low);
bb8df8a6 2684
1990cd79 2685 else if (mask64_operand (op, mode))
37409796 2686 return 2;
bb8df8a6 2687
37409796
NS
2688 else if (low == 0)
2689 return num_insns_constant_wide (high) + 1;
bb8df8a6 2690
37409796
NS
2691 else
2692 return (num_insns_constant_wide (high)
2693 + num_insns_constant_wide (low) + 1);
2694 }
bb8df8a6 2695
37409796
NS
2696 default:
2697 gcc_unreachable ();
4e74d8ec 2698 }
4e74d8ec
MM
2699}
2700
0972012c
RS
2701/* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2702 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2703 corresponding element of the vector, but for V4SFmode and V2SFmode,
2704 the corresponding "float" is interpreted as an SImode integer. */
2705
847535b6 2706HOST_WIDE_INT
0972012c
RS
2707const_vector_elt_as_int (rtx op, unsigned int elt)
2708{
2709 rtx tmp = CONST_VECTOR_ELT (op, elt);
2710 if (GET_MODE (op) == V4SFmode
2711 || GET_MODE (op) == V2SFmode)
2712 tmp = gen_lowpart (SImode, tmp);
2713 return INTVAL (tmp);
2714}
452a7d36 2715
77ccdfed 2716/* Return true if OP can be synthesized with a particular vspltisb, vspltish
66180ff3
PB
2717 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2718 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2719 all items are set to the same value and contain COPIES replicas of the
2720 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2721 operand and the others are set to the value of the operand's msb. */
2722
2723static bool
2724vspltis_constant (rtx op, unsigned step, unsigned copies)
452a7d36 2725{
66180ff3
PB
2726 enum machine_mode mode = GET_MODE (op);
2727 enum machine_mode inner = GET_MODE_INNER (mode);
2728
2729 unsigned i;
2730 unsigned nunits = GET_MODE_NUNITS (mode);
2731 unsigned bitsize = GET_MODE_BITSIZE (inner);
2732 unsigned mask = GET_MODE_MASK (inner);
2733
0972012c 2734 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
66180ff3
PB
2735 HOST_WIDE_INT splat_val = val;
2736 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2737
2738 /* Construct the value to be splatted, if possible. If not, return 0. */
2739 for (i = 2; i <= copies; i *= 2)
452a7d36 2740 {
66180ff3
PB
2741 HOST_WIDE_INT small_val;
2742 bitsize /= 2;
2743 small_val = splat_val >> bitsize;
2744 mask >>= bitsize;
2745 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2746 return false;
2747 splat_val = small_val;
2748 }
c4ad648e 2749
66180ff3
PB
2750 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2751 if (EASY_VECTOR_15 (splat_val))
2752 ;
2753
2754 /* Also check if we can splat, and then add the result to itself. Do so if
2755 the value is positive, of if the splat instruction is using OP's mode;
2756 for splat_val < 0, the splat and the add should use the same mode. */
2757 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2758 && (splat_val >= 0 || (step == 1 && copies == 1)))
2759 ;
2760
2761 else
2762 return false;
2763
2764 /* Check if VAL is present in every STEP-th element, and the
2765 other elements are filled with its most significant bit. */
2766 for (i = 0; i < nunits - 1; ++i)
2767 {
2768 HOST_WIDE_INT desired_val;
2769 if (((i + 1) & (step - 1)) == 0)
2770 desired_val = val;
2771 else
2772 desired_val = msb_val;
2773
0972012c 2774 if (desired_val != const_vector_elt_as_int (op, i))
66180ff3 2775 return false;
452a7d36 2776 }
66180ff3
PB
2777
2778 return true;
452a7d36
HP
2779}
2780
69ef87e2 2781
77ccdfed 2782/* Return true if OP is of the given MODE and can be synthesized
66180ff3
PB
2783 with a vspltisb, vspltish or vspltisw. */
2784
2785bool
2786easy_altivec_constant (rtx op, enum machine_mode mode)
d744e06e 2787{
66180ff3 2788 unsigned step, copies;
d744e06e 2789
66180ff3
PB
2790 if (mode == VOIDmode)
2791 mode = GET_MODE (op);
2792 else if (mode != GET_MODE (op))
2793 return false;
d744e06e 2794
66180ff3
PB
2795 /* Start with a vspltisw. */
2796 step = GET_MODE_NUNITS (mode) / 4;
2797 copies = 1;
2798
2799 if (vspltis_constant (op, step, copies))
2800 return true;
2801
2802 /* Then try with a vspltish. */
2803 if (step == 1)
2804 copies <<= 1;
2805 else
2806 step >>= 1;
2807
2808 if (vspltis_constant (op, step, copies))
2809 return true;
2810
2811 /* And finally a vspltisb. */
2812 if (step == 1)
2813 copies <<= 1;
2814 else
2815 step >>= 1;
2816
2817 if (vspltis_constant (op, step, copies))
2818 return true;
2819
2820 return false;
d744e06e
AH
2821}
2822
66180ff3
PB
2823/* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2824 result is OP. Abort if it is not possible. */
d744e06e 2825
f676971a 2826rtx
66180ff3 2827gen_easy_altivec_constant (rtx op)
452a7d36 2828{
66180ff3
PB
2829 enum machine_mode mode = GET_MODE (op);
2830 int nunits = GET_MODE_NUNITS (mode);
2831 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2832 unsigned step = nunits / 4;
2833 unsigned copies = 1;
2834
2835 /* Start with a vspltisw. */
2836 if (vspltis_constant (op, step, copies))
2837 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2838
2839 /* Then try with a vspltish. */
2840 if (step == 1)
2841 copies <<= 1;
2842 else
2843 step >>= 1;
2844
2845 if (vspltis_constant (op, step, copies))
2846 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2847
2848 /* And finally a vspltisb. */
2849 if (step == 1)
2850 copies <<= 1;
2851 else
2852 step >>= 1;
2853
2854 if (vspltis_constant (op, step, copies))
2855 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2856
2857 gcc_unreachable ();
d744e06e
AH
2858}
2859
2860const char *
a2369ed3 2861output_vec_const_move (rtx *operands)
d744e06e
AH
2862{
2863 int cst, cst2;
2864 enum machine_mode mode;
2865 rtx dest, vec;
2866
2867 dest = operands[0];
2868 vec = operands[1];
d744e06e 2869 mode = GET_MODE (dest);
69ef87e2 2870
d744e06e
AH
2871 if (TARGET_ALTIVEC)
2872 {
66180ff3 2873 rtx splat_vec;
d744e06e
AH
2874 if (zero_constant (vec, mode))
2875 return "vxor %0,%0,%0";
37409796 2876
66180ff3
PB
2877 splat_vec = gen_easy_altivec_constant (vec);
2878 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2879 operands[1] = XEXP (splat_vec, 0);
2880 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2881 return "#";
bb8df8a6 2882
66180ff3 2883 switch (GET_MODE (splat_vec))
98ef3137 2884 {
37409796 2885 case V4SImode:
66180ff3 2886 return "vspltisw %0,%1";
c4ad648e 2887
37409796 2888 case V8HImode:
66180ff3 2889 return "vspltish %0,%1";
c4ad648e 2890
37409796 2891 case V16QImode:
66180ff3 2892 return "vspltisb %0,%1";
bb8df8a6 2893
37409796
NS
2894 default:
2895 gcc_unreachable ();
98ef3137 2896 }
69ef87e2
AH
2897 }
2898
37409796 2899 gcc_assert (TARGET_SPE);
bb8df8a6 2900
37409796
NS
2901 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2902 pattern of V1DI, V4HI, and V2SF.
2903
2904 FIXME: We should probably return # and add post reload
2905 splitters for these, but this way is so easy ;-). */
e20dcbef
PB
2906 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2907 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2908 operands[1] = CONST_VECTOR_ELT (vec, 0);
2909 operands[2] = CONST_VECTOR_ELT (vec, 1);
37409796
NS
2910 if (cst == cst2)
2911 return "li %0,%1\n\tevmergelo %0,%0,%0";
2912 else
2913 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
69ef87e2
AH
2914}
2915
f5027409
RE
2916/* Initialize TARGET of vector PAIRED to VALS. */
2917
2918void
2919paired_expand_vector_init (rtx target, rtx vals)
2920{
2921 enum machine_mode mode = GET_MODE (target);
2922 int n_elts = GET_MODE_NUNITS (mode);
2923 int n_var = 0;
0a2aaacc 2924 rtx x, new_rtx, tmp, constant_op, op1, op2;
f5027409
RE
2925 int i;
2926
2927 for (i = 0; i < n_elts; ++i)
2928 {
2929 x = XVECEXP (vals, 0, i);
2930 if (!CONSTANT_P (x))
2931 ++n_var;
2932 }
2933 if (n_var == 0)
2934 {
2935 /* Load from constant pool. */
2936 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2937 return;
2938 }
2939
2940 if (n_var == 2)
2941 {
2942 /* The vector is initialized only with non-constants. */
0a2aaacc 2943 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
f5027409
RE
2944 XVECEXP (vals, 0, 1));
2945
0a2aaacc 2946 emit_move_insn (target, new_rtx);
f5027409
RE
2947 return;
2948 }
2949
2950 /* One field is non-constant and the other one is a constant. Load the
2951 constant from the constant pool and use ps_merge instruction to
2952 construct the whole vector. */
2953 op1 = XVECEXP (vals, 0, 0);
2954 op2 = XVECEXP (vals, 0, 1);
2955
2956 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
2957
2958 tmp = gen_reg_rtx (GET_MODE (constant_op));
2959 emit_move_insn (tmp, constant_op);
2960
2961 if (CONSTANT_P (op1))
0a2aaacc 2962 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
f5027409 2963 else
0a2aaacc 2964 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
f5027409 2965
0a2aaacc 2966 emit_move_insn (target, new_rtx);
f5027409
RE
2967}
2968
e2e95f45
RE
2969void
2970paired_expand_vector_move (rtx operands[])
2971{
2972 rtx op0 = operands[0], op1 = operands[1];
2973
2974 emit_move_insn (op0, op1);
2975}
2976
2977/* Emit vector compare for code RCODE. DEST is destination, OP1 and
2978 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
2979 operands for the relation operation COND. This is a recursive
2980 function. */
2981
2982static void
2983paired_emit_vector_compare (enum rtx_code rcode,
2984 rtx dest, rtx op0, rtx op1,
2985 rtx cc_op0, rtx cc_op1)
2986{
2987 rtx tmp = gen_reg_rtx (V2SFmode);
2988 rtx tmp1, max, min, equal_zero;
2989
2990 gcc_assert (TARGET_PAIRED_FLOAT);
2991 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
2992
2993 switch (rcode)
2994 {
2995 case LT:
2996 case LTU:
2997 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
2998 return;
2999 case GE:
3000 case GEU:
3001 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3002 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3003 return;
3004 case LE:
3005 case LEU:
3006 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3007 return;
3008 case GT:
3009 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3010 return;
3011 case EQ:
3012 tmp1 = gen_reg_rtx (V2SFmode);
3013 max = gen_reg_rtx (V2SFmode);
3014 min = gen_reg_rtx (V2SFmode);
3015 equal_zero = gen_reg_rtx (V2SFmode);
3016
3017 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3018 emit_insn (gen_selv2sf4
3019 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3020 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3021 emit_insn (gen_selv2sf4
3022 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3023 emit_insn (gen_subv2sf3 (tmp1, min, max));
3024 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3025 return;
3026 case NE:
3027 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3028 return;
3029 case UNLE:
3030 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3031 return;
3032 case UNLT:
3033 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3034 return;
3035 case UNGE:
3036 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3037 return;
3038 case UNGT:
3039 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3040 return;
3041 default:
3042 gcc_unreachable ();
3043 }
3044
3045 return;
3046}
3047
3048/* Emit vector conditional expression.
3049 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3050 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3051
3052int
3053paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3054 rtx cond, rtx cc_op0, rtx cc_op1)
3055{
3056 enum rtx_code rcode = GET_CODE (cond);
3057
3058 if (!TARGET_PAIRED_FLOAT)
3059 return 0;
3060
3061 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3062
3063 return 1;
3064}
3065
7a4eca66
DE
3066/* Initialize vector TARGET to VALS. */
3067
3068void
3069rs6000_expand_vector_init (rtx target, rtx vals)
3070{
3071 enum machine_mode mode = GET_MODE (target);
3072 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3073 int n_elts = GET_MODE_NUNITS (mode);
3074 int n_var = 0, one_var = -1;
3075 bool all_same = true, all_const_zero = true;
3076 rtx x, mem;
3077 int i;
3078
3079 for (i = 0; i < n_elts; ++i)
3080 {
3081 x = XVECEXP (vals, 0, i);
3082 if (!CONSTANT_P (x))
3083 ++n_var, one_var = i;
3084 else if (x != CONST0_RTX (inner_mode))
3085 all_const_zero = false;
3086
3087 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3088 all_same = false;
3089 }
3090
3091 if (n_var == 0)
3092 {
501fb355 3093 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
7a4eca66
DE
3094 if (mode != V4SFmode && all_const_zero)
3095 {
3096 /* Zero register. */
3097 emit_insn (gen_rtx_SET (VOIDmode, target,
3098 gen_rtx_XOR (mode, target, target)));
3099 return;
3100 }
501fb355 3101 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
7a4eca66
DE
3102 {
3103 /* Splat immediate. */
501fb355 3104 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
7a4eca66
DE
3105 return;
3106 }
3107 else if (all_same)
3108 ; /* Splat vector element. */
3109 else
3110 {
3111 /* Load from constant pool. */
501fb355 3112 emit_move_insn (target, const_vec);
7a4eca66
DE
3113 return;
3114 }
3115 }
3116
3117 /* Store value to stack temp. Load vector element. Splat. */
3118 if (all_same)
3119 {
3120 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3121 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3122 XVECEXP (vals, 0, 0));
3123 x = gen_rtx_UNSPEC (VOIDmode,
3124 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3125 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3126 gen_rtvec (2,
3127 gen_rtx_SET (VOIDmode,
3128 target, mem),
3129 x)));
3130 x = gen_rtx_VEC_SELECT (inner_mode, target,
3131 gen_rtx_PARALLEL (VOIDmode,
3132 gen_rtvec (1, const0_rtx)));
3133 emit_insn (gen_rtx_SET (VOIDmode, target,
3134 gen_rtx_VEC_DUPLICATE (mode, x)));
3135 return;
3136 }
3137
3138 /* One field is non-constant. Load constant then overwrite
3139 varying field. */
3140 if (n_var == 1)
3141 {
3142 rtx copy = copy_rtx (vals);
3143
57b51d4d 3144 /* Load constant part of vector, substitute neighboring value for
7a4eca66
DE
3145 varying element. */
3146 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3147 rs6000_expand_vector_init (target, copy);
3148
3149 /* Insert variable. */
3150 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3151 return;
3152 }
3153
3154 /* Construct the vector in memory one field at a time
3155 and load the whole vector. */
3156 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3157 for (i = 0; i < n_elts; i++)
3158 emit_move_insn (adjust_address_nv (mem, inner_mode,
3159 i * GET_MODE_SIZE (inner_mode)),
3160 XVECEXP (vals, 0, i));
3161 emit_move_insn (target, mem);
3162}
3163
3164/* Set field ELT of TARGET to VAL. */
3165
3166void
3167rs6000_expand_vector_set (rtx target, rtx val, int elt)
3168{
3169 enum machine_mode mode = GET_MODE (target);
3170 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3171 rtx reg = gen_reg_rtx (mode);
3172 rtx mask, mem, x;
3173 int width = GET_MODE_SIZE (inner_mode);
3174 int i;
3175
3176 /* Load single variable value. */
3177 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3178 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3179 x = gen_rtx_UNSPEC (VOIDmode,
3180 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3181 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3182 gen_rtvec (2,
3183 gen_rtx_SET (VOIDmode,
3184 reg, mem),
3185 x)));
3186
3187 /* Linear sequence. */
3188 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3189 for (i = 0; i < 16; ++i)
3190 XVECEXP (mask, 0, i) = GEN_INT (i);
3191
3192 /* Set permute mask to insert element into target. */
3193 for (i = 0; i < width; ++i)
3194 XVECEXP (mask, 0, elt*width + i)
3195 = GEN_INT (i + 0x10);
3196 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3197 x = gen_rtx_UNSPEC (mode,
3198 gen_rtvec (3, target, reg,
3199 force_reg (V16QImode, x)),
3200 UNSPEC_VPERM);
3201 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3202}
3203
3204/* Extract field ELT from VEC into TARGET. */
3205
3206void
3207rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3208{
3209 enum machine_mode mode = GET_MODE (vec);
3210 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3211 rtx mem, x;
3212
3213 /* Allocate mode-sized buffer. */
3214 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3215
3216 /* Add offset to field within buffer matching vector element. */
3217 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3218
3219 /* Store single field into mode-sized buffer. */
3220 x = gen_rtx_UNSPEC (VOIDmode,
3221 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3222 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3223 gen_rtvec (2,
3224 gen_rtx_SET (VOIDmode,
3225 mem, vec),
3226 x)));
3227 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3228}
3229
0ba1b2ff
AM
3230/* Generates shifts and masks for a pair of rldicl or rldicr insns to
3231 implement ANDing by the mask IN. */
3232void
a2369ed3 3233build_mask64_2_operands (rtx in, rtx *out)
0ba1b2ff
AM
3234{
3235#if HOST_BITS_PER_WIDE_INT >= 64
3236 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3237 int shift;
3238
37409796 3239 gcc_assert (GET_CODE (in) == CONST_INT);
0ba1b2ff
AM
3240
3241 c = INTVAL (in);
3242 if (c & 1)
3243 {
3244 /* Assume c initially something like 0x00fff000000fffff. The idea
3245 is to rotate the word so that the middle ^^^^^^ group of zeros
3246 is at the MS end and can be cleared with an rldicl mask. We then
3247 rotate back and clear off the MS ^^ group of zeros with a
3248 second rldicl. */
3249 c = ~c; /* c == 0xff000ffffff00000 */
3250 lsb = c & -c; /* lsb == 0x0000000000100000 */
3251 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3252 c = ~c; /* c == 0x00fff000000fffff */
3253 c &= -lsb; /* c == 0x00fff00000000000 */
3254 lsb = c & -c; /* lsb == 0x0000100000000000 */
3255 c = ~c; /* c == 0xff000fffffffffff */
3256 c &= -lsb; /* c == 0xff00000000000000 */
3257 shift = 0;
3258 while ((lsb >>= 1) != 0)
3259 shift++; /* shift == 44 on exit from loop */
3260 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3261 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3262 m2 = ~c; /* m2 == 0x00ffffffffffffff */
a260abc9
DE
3263 }
3264 else
0ba1b2ff
AM
3265 {
3266 /* Assume c initially something like 0xff000f0000000000. The idea
3267 is to rotate the word so that the ^^^ middle group of zeros
3268 is at the LS end and can be cleared with an rldicr mask. We then
3269 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3270 a second rldicr. */
3271 lsb = c & -c; /* lsb == 0x0000010000000000 */
3272 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3273 c = ~c; /* c == 0x00fff0ffffffffff */
3274 c &= -lsb; /* c == 0x00fff00000000000 */
3275 lsb = c & -c; /* lsb == 0x0000100000000000 */
3276 c = ~c; /* c == 0xff000fffffffffff */
3277 c &= -lsb; /* c == 0xff00000000000000 */
3278 shift = 0;
3279 while ((lsb >>= 1) != 0)
3280 shift++; /* shift == 44 on exit from loop */
3281 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3282 m1 >>= shift; /* m1 == 0x0000000000000fff */
3283 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3284 }
3285
3286 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3287 masks will be all 1's. We are guaranteed more than one transition. */
3288 out[0] = GEN_INT (64 - shift);
3289 out[1] = GEN_INT (m1);
3290 out[2] = GEN_INT (shift);
3291 out[3] = GEN_INT (m2);
3292#else
045572c7
GK
3293 (void)in;
3294 (void)out;
37409796 3295 gcc_unreachable ();
0ba1b2ff 3296#endif
a260abc9
DE
3297}
3298
54b695e7 3299/* Return TRUE if OP is an invalid SUBREG operation on the e500. */
48d72335
DE
3300
3301bool
54b695e7
AH
3302invalid_e500_subreg (rtx op, enum machine_mode mode)
3303{
61c76239
JM
3304 if (TARGET_E500_DOUBLE)
3305 {
17caeff2 3306 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
4f011e1e
JM
3307 subreg:TI and reg:TF. Decimal float modes are like integer
3308 modes (only low part of each register used) for this
3309 purpose. */
61c76239 3310 if (GET_CODE (op) == SUBREG
4f011e1e
JM
3311 && (mode == SImode || mode == DImode || mode == TImode
3312 || mode == DDmode || mode == TDmode)
61c76239 3313 && REG_P (SUBREG_REG (op))
17caeff2 3314 && (GET_MODE (SUBREG_REG (op)) == DFmode
4f011e1e 3315 || GET_MODE (SUBREG_REG (op)) == TFmode))
61c76239
JM
3316 return true;
3317
17caeff2
JM
3318 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3319 reg:TI. */
61c76239 3320 if (GET_CODE (op) == SUBREG
4f011e1e 3321 && (mode == DFmode || mode == TFmode)
61c76239 3322 && REG_P (SUBREG_REG (op))
17caeff2 3323 && (GET_MODE (SUBREG_REG (op)) == DImode
4f011e1e
JM
3324 || GET_MODE (SUBREG_REG (op)) == TImode
3325 || GET_MODE (SUBREG_REG (op)) == DDmode
3326 || GET_MODE (SUBREG_REG (op)) == TDmode))
61c76239
JM
3327 return true;
3328 }
54b695e7 3329
61c76239
JM
3330 if (TARGET_SPE
3331 && GET_CODE (op) == SUBREG
3332 && mode == SImode
54b695e7 3333 && REG_P (SUBREG_REG (op))
14502dad 3334 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
54b695e7
AH
3335 return true;
3336
3337 return false;
3338}
3339
58182de3 3340/* AIX increases natural record alignment to doubleword if the first
95727fb8
AP
3341 field is an FP double while the FP fields remain word aligned. */
3342
19d66194 3343unsigned int
fa5b0972
AM
3344rs6000_special_round_type_align (tree type, unsigned int computed,
3345 unsigned int specified)
95727fb8 3346{
fa5b0972 3347 unsigned int align = MAX (computed, specified);
95727fb8 3348 tree field = TYPE_FIELDS (type);
95727fb8 3349
bb8df8a6 3350 /* Skip all non field decls */
85962ac8 3351 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
95727fb8
AP
3352 field = TREE_CHAIN (field);
3353
fa5b0972
AM
3354 if (field != NULL && field != type)
3355 {
3356 type = TREE_TYPE (field);
3357 while (TREE_CODE (type) == ARRAY_TYPE)
3358 type = TREE_TYPE (type);
3359
3360 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3361 align = MAX (align, 64);
3362 }
95727fb8 3363
fa5b0972 3364 return align;
95727fb8
AP
3365}
3366
58182de3
GK
3367/* Darwin increases record alignment to the natural alignment of
3368 the first field. */
3369
3370unsigned int
3371darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3372 unsigned int specified)
3373{
3374 unsigned int align = MAX (computed, specified);
3375
3376 if (TYPE_PACKED (type))
3377 return align;
3378
3379 /* Find the first field, looking down into aggregates. */
3380 do {
3381 tree field = TYPE_FIELDS (type);
3382 /* Skip all non field decls */
3383 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3384 field = TREE_CHAIN (field);
3385 if (! field)
3386 break;
3387 type = TREE_TYPE (field);
3388 while (TREE_CODE (type) == ARRAY_TYPE)
3389 type = TREE_TYPE (type);
3390 } while (AGGREGATE_TYPE_P (type));
3391
3392 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3393 align = MAX (align, TYPE_ALIGN (type));
3394
3395 return align;
3396}
3397
a4f6c312 3398/* Return 1 for an operand in small memory on V.4/eabi. */
7509c759
MM
3399
3400int
f676971a 3401small_data_operand (rtx op ATTRIBUTE_UNUSED,
a2369ed3 3402 enum machine_mode mode ATTRIBUTE_UNUSED)
7509c759 3403{
38c1f2d7 3404#if TARGET_ELF
5f59ecb7 3405 rtx sym_ref;
7509c759 3406
d9407988 3407 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
a54d04b7 3408 return 0;
a54d04b7 3409
f607bc57 3410 if (DEFAULT_ABI != ABI_V4)
7509c759
MM
3411 return 0;
3412
2aa42e6e
NF
3413 /* Vector and float memory instructions have a limited offset on the
3414 SPE, so using a vector or float variable directly as an operand is
3415 not useful. */
3416 if (TARGET_SPE
3417 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3418 return 0;
3419
88228c4b
MM
3420 if (GET_CODE (op) == SYMBOL_REF)
3421 sym_ref = op;
3422
3423 else if (GET_CODE (op) != CONST
3424 || GET_CODE (XEXP (op, 0)) != PLUS
3425 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3426 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
7509c759
MM
3427 return 0;
3428
88228c4b 3429 else
dbf55e53
MM
3430 {
3431 rtx sum = XEXP (op, 0);
3432 HOST_WIDE_INT summand;
3433
3434 /* We have to be careful here, because it is the referenced address
c4ad648e 3435 that must be 32k from _SDA_BASE_, not just the symbol. */
dbf55e53 3436 summand = INTVAL (XEXP (sum, 1));
307b599c 3437 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
9390387d 3438 return 0;
dbf55e53
MM
3439
3440 sym_ref = XEXP (sum, 0);
3441 }
88228c4b 3442
20bfcd69 3443 return SYMBOL_REF_SMALL_P (sym_ref);
d9407988
MM
3444#else
3445 return 0;
3446#endif
7509c759 3447}
46c07df8 3448
3a1f863f 3449/* Return true if either operand is a general purpose register. */
46c07df8 3450
3a1f863f
DE
3451bool
3452gpr_or_gpr_p (rtx op0, rtx op1)
46c07df8 3453{
3a1f863f
DE
3454 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3455 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
46c07df8
HP
3456}
3457
9ebbca7d 3458\f
4d588c14
RH
3459/* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3460
f676971a
EC
3461static int
3462constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
9ebbca7d 3463{
9390387d 3464 switch (GET_CODE (op))
9ebbca7d
GK
3465 {
3466 case SYMBOL_REF:
c4501e62
JJ
3467 if (RS6000_SYMBOL_REF_TLS_P (op))
3468 return 0;
3469 else if (CONSTANT_POOL_ADDRESS_P (op))
a4f6c312
SS
3470 {
3471 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3472 {
3473 *have_sym = 1;
3474 return 1;
3475 }
3476 else
3477 return 0;
3478 }
3479 else if (! strcmp (XSTR (op, 0), toc_label_name))
3480 {
3481 *have_toc = 1;
3482 return 1;
3483 }
3484 else
3485 return 0;
9ebbca7d
GK
3486 case PLUS:
3487 case MINUS:
c1f11548
DE
3488 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3489 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
9ebbca7d 3490 case CONST:
a4f6c312 3491 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
9ebbca7d 3492 case CONST_INT:
a4f6c312 3493 return 1;
9ebbca7d 3494 default:
a4f6c312 3495 return 0;
9ebbca7d
GK
3496 }
3497}
3498
4d588c14 3499static bool
a2369ed3 3500constant_pool_expr_p (rtx op)
9ebbca7d
GK
3501{
3502 int have_sym = 0;
3503 int have_toc = 0;
3504 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3505}
3506
48d72335 3507bool
a2369ed3 3508toc_relative_expr_p (rtx op)
9ebbca7d 3509{
4d588c14
RH
3510 int have_sym = 0;
3511 int have_toc = 0;
3512 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3513}
3514
4d588c14 3515bool
a2369ed3 3516legitimate_constant_pool_address_p (rtx x)
4d588c14
RH
3517{
3518 return (TARGET_TOC
3519 && GET_CODE (x) == PLUS
3520 && GET_CODE (XEXP (x, 0)) == REG
3521 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3522 && constant_pool_expr_p (XEXP (x, 1)));
3523}
3524
d04b6e6e
EB
3525static bool
3526legitimate_small_data_p (enum machine_mode mode, rtx x)
4d588c14
RH
3527{
3528 return (DEFAULT_ABI == ABI_V4
3529 && !flag_pic && !TARGET_TOC
3530 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3531 && small_data_operand (x, mode));
3532}
3533
60cdabab
DE
3534/* SPE offset addressing is limited to 5-bits worth of double words. */
3535#define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3536
76d2b81d
DJ
3537bool
3538rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3539{
3540 unsigned HOST_WIDE_INT offset, extra;
3541
3542 if (GET_CODE (x) != PLUS)
3543 return false;
3544 if (GET_CODE (XEXP (x, 0)) != REG)
3545 return false;
3546 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3547 return false;
60cdabab
DE
3548 if (legitimate_constant_pool_address_p (x))
3549 return true;
4d588c14
RH
3550 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3551 return false;
3552
3553 offset = INTVAL (XEXP (x, 1));
3554 extra = 0;
3555 switch (mode)
3556 {
3557 case V16QImode:
3558 case V8HImode:
3559 case V4SFmode:
3560 case V4SImode:
7a4eca66 3561 /* AltiVec vector modes. Only reg+reg addressing is valid and
1a23970d
DE
3562 constant offset zero should not occur due to canonicalization. */
3563 return false;
4d588c14
RH
3564
3565 case V4HImode:
3566 case V2SImode:
3567 case V1DImode:
3568 case V2SFmode:
d42a3bae 3569 /* Paired vector modes. Only reg+reg addressing is valid and
1a23970d 3570 constant offset zero should not occur due to canonicalization. */
d42a3bae 3571 if (TARGET_PAIRED_FLOAT)
1a23970d 3572 return false;
4d588c14
RH
3573 /* SPE vector modes. */
3574 return SPE_CONST_OFFSET_OK (offset);
3575
3576 case DFmode:
4d4cbc0e
AH
3577 if (TARGET_E500_DOUBLE)
3578 return SPE_CONST_OFFSET_OK (offset);
3579
4f011e1e 3580 case DDmode:
4d588c14 3581 case DImode:
54b695e7
AH
3582 /* On e500v2, we may have:
3583
3584 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3585
3586 Which gets addressed with evldd instructions. */
3587 if (TARGET_E500_DOUBLE)
3588 return SPE_CONST_OFFSET_OK (offset);
3589
7393f7f8 3590 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
4d588c14
RH
3591 extra = 4;
3592 else if (offset & 3)
3593 return false;
3594 break;
3595
3596 case TFmode:
17caeff2
JM
3597 if (TARGET_E500_DOUBLE)
3598 return (SPE_CONST_OFFSET_OK (offset)
3599 && SPE_CONST_OFFSET_OK (offset + 8));
3600
4f011e1e 3601 case TDmode:
4d588c14 3602 case TImode:
7393f7f8 3603 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
4d588c14
RH
3604 extra = 12;
3605 else if (offset & 3)
3606 return false;
3607 else
3608 extra = 8;
3609 break;
3610
3611 default:
3612 break;
3613 }
3614
b1917422
AM
3615 offset += 0x8000;
3616 return (offset < 0x10000) && (offset + extra < 0x10000);
4d588c14
RH
3617}
3618
6fb5fa3c 3619bool
a2369ed3 3620legitimate_indexed_address_p (rtx x, int strict)
4d588c14
RH
3621{
3622 rtx op0, op1;
3623
3624 if (GET_CODE (x) != PLUS)
3625 return false;
850e8d3d 3626
4d588c14
RH
3627 op0 = XEXP (x, 0);
3628 op1 = XEXP (x, 1);
3629
bf00cc0f 3630 /* Recognize the rtl generated by reload which we know will later be
9024f4b8
AM
3631 replaced with proper base and index regs. */
3632 if (!strict
3633 && reload_in_progress
3634 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3635 && REG_P (op1))
3636 return true;
3637
3638 return (REG_P (op0) && REG_P (op1)
3639 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3640 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3641 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3642 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
9ebbca7d
GK
3643}
3644
48d72335 3645inline bool
a2369ed3 3646legitimate_indirect_address_p (rtx x, int strict)
4d588c14
RH
3647{
3648 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3649}
3650
48d72335 3651bool
4c81e946
FJ
3652macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3653{
c4ad648e 3654 if (!TARGET_MACHO || !flag_pic
9390387d 3655 || mode != SImode || GET_CODE (x) != MEM)
c4ad648e
AM
3656 return false;
3657 x = XEXP (x, 0);
4c81e946
FJ
3658
3659 if (GET_CODE (x) != LO_SUM)
3660 return false;
3661 if (GET_CODE (XEXP (x, 0)) != REG)
3662 return false;
3663 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3664 return false;
3665 x = XEXP (x, 1);
3666
3667 return CONSTANT_P (x);
3668}
3669
4d588c14 3670static bool
a2369ed3 3671legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
4d588c14
RH
3672{
3673 if (GET_CODE (x) != LO_SUM)
3674 return false;
3675 if (GET_CODE (XEXP (x, 0)) != REG)
3676 return false;
3677 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3678 return false;
54b695e7 3679 /* Restrict addressing for DI because of our SUBREG hackery. */
17caeff2 3680 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 3681 || mode == DDmode || mode == TDmode
17caeff2 3682 || mode == DImode))
f82f556d 3683 return false;
4d588c14
RH
3684 x = XEXP (x, 1);
3685
8622e235 3686 if (TARGET_ELF || TARGET_MACHO)
4d588c14 3687 {
a29077da 3688 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
4d588c14
RH
3689 return false;
3690 if (TARGET_TOC)
3691 return false;
3692 if (GET_MODE_NUNITS (mode) != 1)
3693 return false;
5e5f01b9 3694 if (GET_MODE_BITSIZE (mode) > 64
3c028f65 3695 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
4d4447b5
PB
3696 && !(TARGET_HARD_FLOAT && TARGET_FPRS
3697 && (mode == DFmode || mode == DDmode))))
4d588c14
RH
3698 return false;
3699
3700 return CONSTANT_P (x);
3701 }
3702
3703 return false;
3704}
3705
3706
9ebbca7d
GK
3707/* Try machine-dependent ways of modifying an illegitimate address
3708 to be legitimate. If we find one, return the new, valid address.
3709 This is used from only one place: `memory_address' in explow.c.
3710
a4f6c312
SS
3711 OLDX is the address as it was before break_out_memory_refs was
3712 called. In some cases it is useful to look at this to decide what
3713 needs to be done.
9ebbca7d 3714
a4f6c312 3715 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
9ebbca7d 3716
a4f6c312
SS
3717 It is always safe for this function to do nothing. It exists to
3718 recognize opportunities to optimize the output.
9ebbca7d
GK
3719
3720 On RS/6000, first check for the sum of a register with a constant
3721 integer that is out of range. If so, generate code to add the
3722 constant with the low-order 16 bits masked to the register and force
3723 this result into another register (this can be done with `cau').
3724 Then generate an address of REG+(CONST&0xffff), allowing for the
3725 possibility of bit 16 being a one.
3726
3727 Then check for the sum of a register and something not constant, try to
3728 load the other things into a register and return the sum. */
4d588c14 3729
9ebbca7d 3730rtx
a2369ed3
DJ
3731rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3732 enum machine_mode mode)
0ac081f6 3733{
c4501e62
JJ
3734 if (GET_CODE (x) == SYMBOL_REF)
3735 {
3736 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3737 if (model != 0)
3738 return rs6000_legitimize_tls_address (x, model);
3739 }
3740
f676971a 3741 if (GET_CODE (x) == PLUS
9ebbca7d
GK
3742 && GET_CODE (XEXP (x, 0)) == REG
3743 && GET_CODE (XEXP (x, 1)) == CONST_INT
3c1eb9eb
JM
3744 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3745 && !(SPE_VECTOR_MODE (mode)
efc05e3c 3746 || ALTIVEC_VECTOR_MODE (mode)
3c1eb9eb 3747 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4f011e1e
JM
3748 || mode == DImode || mode == DDmode
3749 || mode == TDmode))))
f676971a 3750 {
9ebbca7d
GK
3751 HOST_WIDE_INT high_int, low_int;
3752 rtx sum;
a65c591c
DE
3753 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3754 high_int = INTVAL (XEXP (x, 1)) - low_int;
9ebbca7d
GK
3755 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3756 GEN_INT (high_int)), 0);
3757 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3758 }
f676971a 3759 else if (GET_CODE (x) == PLUS
9ebbca7d
GK
3760 && GET_CODE (XEXP (x, 0)) == REG
3761 && GET_CODE (XEXP (x, 1)) != CONST_INT
6ac7bf2c 3762 && GET_MODE_NUNITS (mode) == 1
a3170dc6
AH
3763 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3764 || TARGET_POWERPC64
efc05e3c 3765 || ((mode != DImode && mode != DFmode && mode != DDmode)
4f011e1e 3766 || (TARGET_E500_DOUBLE && mode != DDmode)))
9ebbca7d 3767 && (TARGET_POWERPC64 || mode != DImode)
efc05e3c
PB
3768 && mode != TImode
3769 && mode != TFmode
3770 && mode != TDmode)
9ebbca7d
GK
3771 {
3772 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3773 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3774 }
0ac081f6
AH
3775 else if (ALTIVEC_VECTOR_MODE (mode))
3776 {
3777 rtx reg;
3778
3779 /* Make sure both operands are registers. */
3780 if (GET_CODE (x) == PLUS)
9f85ed45 3781 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
0ac081f6
AH
3782 force_reg (Pmode, XEXP (x, 1)));
3783
3784 reg = force_reg (Pmode, x);
3785 return reg;
3786 }
4d4cbc0e 3787 else if (SPE_VECTOR_MODE (mode)
17caeff2 3788 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
7393f7f8 3789 || mode == DDmode || mode == TDmode
54b695e7 3790 || mode == DImode)))
a3170dc6 3791 {
54b695e7
AH
3792 if (mode == DImode)
3793 return NULL_RTX;
a3170dc6
AH
3794 /* We accept [reg + reg] and [reg + OFFSET]. */
3795
3796 if (GET_CODE (x) == PLUS)
61dd226f
NF
3797 {
3798 rtx op1 = XEXP (x, 0);
3799 rtx op2 = XEXP (x, 1);
3800 rtx y;
3801
3802 op1 = force_reg (Pmode, op1);
3803
3804 if (GET_CODE (op2) != REG
3805 && (GET_CODE (op2) != CONST_INT
3806 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3807 || (GET_MODE_SIZE (mode) > 8
3808 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3809 op2 = force_reg (Pmode, op2);
3810
3811 /* We can't always do [reg + reg] for these, because [reg +
3812 reg + offset] is not a legitimate addressing mode. */
3813 y = gen_rtx_PLUS (Pmode, op1, op2);
3814
4f011e1e 3815 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
61dd226f
NF
3816 return force_reg (Pmode, y);
3817 else
3818 return y;
3819 }
a3170dc6
AH
3820
3821 return force_reg (Pmode, x);
3822 }
f1384257
AM
3823 else if (TARGET_ELF
3824 && TARGET_32BIT
3825 && TARGET_NO_TOC
3826 && ! flag_pic
9ebbca7d 3827 && GET_CODE (x) != CONST_INT
f676971a 3828 && GET_CODE (x) != CONST_DOUBLE
9ebbca7d 3829 && CONSTANT_P (x)
6ac7bf2c
GK
3830 && GET_MODE_NUNITS (mode) == 1
3831 && (GET_MODE_BITSIZE (mode) <= 32
4d4447b5
PB
3832 || ((TARGET_HARD_FLOAT && TARGET_FPRS)
3833 && (mode == DFmode || mode == DDmode))))
9ebbca7d
GK
3834 {
3835 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3836 emit_insn (gen_elf_high (reg, x));
3837 return gen_rtx_LO_SUM (Pmode, reg, x);
9ebbca7d 3838 }
ee890fe2
SS
3839 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3840 && ! flag_pic
ab82a49f
AP
3841#if TARGET_MACHO
3842 && ! MACHO_DYNAMIC_NO_PIC_P
3843#endif
ee890fe2 3844 && GET_CODE (x) != CONST_INT
f676971a 3845 && GET_CODE (x) != CONST_DOUBLE
ee890fe2 3846 && CONSTANT_P (x)
4d4447b5
PB
3847 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3848 || (mode != DFmode && mode != DDmode))
f676971a 3849 && mode != DImode
ee890fe2
SS
3850 && mode != TImode)
3851 {
3852 rtx reg = gen_reg_rtx (Pmode);
8a1977f3
GK
3853 emit_insn (gen_macho_high (reg, x));
3854 return gen_rtx_LO_SUM (Pmode, reg, x);
ee890fe2 3855 }
f676971a 3856 else if (TARGET_TOC
0cdc04e8 3857 && GET_CODE (x) == SYMBOL_REF
4d588c14 3858 && constant_pool_expr_p (x)
a9098fd0 3859 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
9ebbca7d
GK
3860 {
3861 return create_TOC_reference (x);
3862 }
3863 else
3864 return NULL_RTX;
3865}
258bfae2 3866
fdbe66f2 3867/* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
c973d557
JJ
3868 We need to emit DTP-relative relocations. */
3869
fdbe66f2 3870static void
c973d557
JJ
3871rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3872{
3873 switch (size)
3874 {
3875 case 4:
3876 fputs ("\t.long\t", file);
3877 break;
3878 case 8:
3879 fputs (DOUBLE_INT_ASM_OP, file);
3880 break;
3881 default:
37409796 3882 gcc_unreachable ();
c973d557
JJ
3883 }
3884 output_addr_const (file, x);
3885 fputs ("@dtprel+0x8000", file);
3886}
3887
c4501e62
JJ
3888/* Construct the SYMBOL_REF for the tls_get_addr function. */
3889
3890static GTY(()) rtx rs6000_tls_symbol;
3891static rtx
863d938c 3892rs6000_tls_get_addr (void)
c4501e62
JJ
3893{
3894 if (!rs6000_tls_symbol)
3895 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3896
3897 return rs6000_tls_symbol;
3898}
3899
3900/* Construct the SYMBOL_REF for TLS GOT references. */
3901
3902static GTY(()) rtx rs6000_got_symbol;
3903static rtx
863d938c 3904rs6000_got_sym (void)
c4501e62
JJ
3905{
3906 if (!rs6000_got_symbol)
3907 {
3908 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3909 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3910 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
f676971a 3911 }
c4501e62
JJ
3912
3913 return rs6000_got_symbol;
3914}
3915
3916/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3917 this (thread-local) address. */
3918
3919static rtx
a2369ed3 3920rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
c4501e62
JJ
3921{
3922 rtx dest, insn;
3923
3924 dest = gen_reg_rtx (Pmode);
3925 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3926 {
3927 rtx tlsreg;
3928
3929 if (TARGET_64BIT)
3930 {
3931 tlsreg = gen_rtx_REG (Pmode, 13);
3932 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3933 }
3934 else
3935 {
3936 tlsreg = gen_rtx_REG (Pmode, 2);
3937 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3938 }
3939 emit_insn (insn);
3940 }
3941 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3942 {
3943 rtx tlsreg, tmp;
3944
3945 tmp = gen_reg_rtx (Pmode);
3946 if (TARGET_64BIT)
3947 {
3948 tlsreg = gen_rtx_REG (Pmode, 13);
3949 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3950 }
3951 else
3952 {
3953 tlsreg = gen_rtx_REG (Pmode, 2);
3954 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3955 }
3956 emit_insn (insn);
3957 if (TARGET_64BIT)
3958 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3959 else
3960 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3961 emit_insn (insn);
3962 }
3963 else
3964 {
3965 rtx r3, got, tga, tmp1, tmp2, eqv;
3966
4fed8f8f
AM
3967 /* We currently use relocations like @got@tlsgd for tls, which
3968 means the linker will handle allocation of tls entries, placing
3969 them in the .got section. So use a pointer to the .got section,
3970 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3971 or to secondary GOT sections used by 32-bit -fPIC. */
c4501e62 3972 if (TARGET_64BIT)
972f427b 3973 got = gen_rtx_REG (Pmode, 2);
c4501e62
JJ
3974 else
3975 {
3976 if (flag_pic == 1)
3977 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3978 else
3979 {
3980 rtx gsym = rs6000_got_sym ();
3981 got = gen_reg_rtx (Pmode);
3982 if (flag_pic == 0)
3983 rs6000_emit_move (got, gsym, Pmode);
3984 else
3985 {
e65a3857 3986 rtx tmp3, mem;
c4501e62
JJ
3987 rtx first, last;
3988
c4501e62
JJ
3989 tmp1 = gen_reg_rtx (Pmode);
3990 tmp2 = gen_reg_rtx (Pmode);
3991 tmp3 = gen_reg_rtx (Pmode);
542a8afa 3992 mem = gen_const_mem (Pmode, tmp1);
c4501e62 3993
e65a3857
DE
3994 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
3995 emit_move_insn (tmp1,
1de43f85 3996 gen_rtx_REG (Pmode, LR_REGNO));
c4501e62
JJ
3997 emit_move_insn (tmp2, mem);
3998 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3999 last = emit_move_insn (got, tmp3);
bd94cb6e 4000 set_unique_reg_note (last, REG_EQUAL, gsym);
c4501e62
JJ
4001 }
4002 }
4003 }
4004
4005 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4006 {
4007 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4008 tga = rs6000_tls_get_addr ();
4009
4010 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4011 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4012 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4013 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4014 else if (DEFAULT_ABI == ABI_V4)
4015 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
c4501e62 4016 else
02135bc1
SB
4017 gcc_unreachable ();
4018
c4501e62 4019 start_sequence ();
c4501e62 4020 insn = emit_call_insn (insn);
becfd6e5 4021 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4022 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4023 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4024 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4025 insn = get_insns ();
4026 end_sequence ();
4027 emit_libcall_block (insn, dest, r3, addr);
4028 }
4029 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4030 {
4031 r3 = gen_rtx_REG (Pmode, 3);
02135bc1
SB
4032 tga = rs6000_tls_get_addr ();
4033
4034 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4035 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4036 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4037 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4038 else if (DEFAULT_ABI == ABI_V4)
4039 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
c4501e62 4040 else
02135bc1
SB
4041 gcc_unreachable ();
4042
c4501e62 4043 start_sequence ();
c4501e62 4044 insn = emit_call_insn (insn);
becfd6e5 4045 RTL_CONST_CALL_P (insn) = 1;
c4501e62 4046 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
d161cb2d
AM
4047 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4048 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
c4501e62
JJ
4049 insn = get_insns ();
4050 end_sequence ();
4051 tmp1 = gen_reg_rtx (Pmode);
4052 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4053 UNSPEC_TLSLD);
4054 emit_libcall_block (insn, tmp1, r3, eqv);
4055 if (rs6000_tls_size == 16)
4056 {
4057 if (TARGET_64BIT)
4058 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4059 else
4060 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4061 }
4062 else if (rs6000_tls_size == 32)
4063 {
4064 tmp2 = gen_reg_rtx (Pmode);
4065 if (TARGET_64BIT)
4066 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4067 else
4068 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4069 emit_insn (insn);
4070 if (TARGET_64BIT)
4071 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4072 else
4073 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4074 }
4075 else
4076 {
4077 tmp2 = gen_reg_rtx (Pmode);
4078 if (TARGET_64BIT)
4079 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4080 else
4081 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4082 emit_insn (insn);
4083 insn = gen_rtx_SET (Pmode, dest,
4084 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4085 }
4086 emit_insn (insn);
4087 }
4088 else
4089 {
a7b376ee 4090 /* IE, or 64-bit offset LE. */
c4501e62
JJ
4091 tmp2 = gen_reg_rtx (Pmode);
4092 if (TARGET_64BIT)
4093 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4094 else
4095 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4096 emit_insn (insn);
4097 if (TARGET_64BIT)
4098 insn = gen_tls_tls_64 (dest, tmp2, addr);
4099 else
4100 insn = gen_tls_tls_32 (dest, tmp2, addr);
4101 emit_insn (insn);
4102 }
4103 }
4104
4105 return dest;
4106}
4107
c4501e62
JJ
4108/* Return 1 if X contains a thread-local symbol. */
4109
4110bool
a2369ed3 4111rs6000_tls_referenced_p (rtx x)
c4501e62 4112{
cd413cab
AP
4113 if (! TARGET_HAVE_TLS)
4114 return false;
4115
c4501e62
JJ
4116 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4117}
4118
4119/* Return 1 if *X is a thread-local symbol. This is the same as
4120 rs6000_tls_symbol_ref except for the type of the unused argument. */
4121
9390387d 4122static int
a2369ed3 4123rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
4124{
4125 return RS6000_SYMBOL_REF_TLS_P (*x);
4126}
4127
24ea750e
DJ
4128/* The convention appears to be to define this wherever it is used.
4129 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
4130 is now used here. */
4131#ifndef REG_MODE_OK_FOR_BASE_P
4132#define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
4133#endif
4134
4135/* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4136 replace the input X, or the original X if no replacement is called for.
4137 The output parameter *WIN is 1 if the calling macro should goto WIN,
4138 0 if it should not.
4139
4140 For RS/6000, we wish to handle large displacements off a base
4141 register by splitting the addend across an addiu/addis and the mem insn.
4142 This cuts number of extra insns needed from 3 to 1.
4143
4144 On Darwin, we use this to generate code for floating point constants.
4145 A movsf_low is generated so we wind up with 2 instructions rather than 3.
4146 The Darwin code is inside #if TARGET_MACHO because only then is
4147 machopic_function_base_name() defined. */
4148rtx
f676971a 4149rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
c4ad648e
AM
4150 int opnum, int type,
4151 int ind_levels ATTRIBUTE_UNUSED, int *win)
24ea750e 4152{
f676971a 4153 /* We must recognize output that we have already generated ourselves. */
24ea750e
DJ
4154 if (GET_CODE (x) == PLUS
4155 && GET_CODE (XEXP (x, 0)) == PLUS
4156 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4157 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4158 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4159 {
4160 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4161 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4162 opnum, (enum reload_type)type);
24ea750e
DJ
4163 *win = 1;
4164 return x;
4165 }
3deb2758 4166
24ea750e
DJ
4167#if TARGET_MACHO
4168 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4169 && GET_CODE (x) == LO_SUM
4170 && GET_CODE (XEXP (x, 0)) == PLUS
4171 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4172 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
4173 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
4174 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
4175 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
4176 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
4177 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
4178 {
4179 /* Result of previous invocation of this function on Darwin
6f317ef3 4180 floating point constant. */
24ea750e 4181 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4182 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4183 opnum, (enum reload_type)type);
24ea750e
DJ
4184 *win = 1;
4185 return x;
4186 }
4187#endif
4937d02d
DE
4188
4189 /* Force ld/std non-word aligned offset into base register by wrapping
4190 in offset 0. */
4191 if (GET_CODE (x) == PLUS
4192 && GET_CODE (XEXP (x, 0)) == REG
4193 && REGNO (XEXP (x, 0)) < 32
4194 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4195 && GET_CODE (XEXP (x, 1)) == CONST_INT
4196 && (INTVAL (XEXP (x, 1)) & 3) != 0
78796ad5 4197 && !ALTIVEC_VECTOR_MODE (mode)
4937d02d
DE
4198 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4199 && TARGET_POWERPC64)
4200 {
4201 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4202 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4203 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4204 opnum, (enum reload_type) type);
4205 *win = 1;
4206 return x;
4207 }
4208
24ea750e
DJ
4209 if (GET_CODE (x) == PLUS
4210 && GET_CODE (XEXP (x, 0)) == REG
4211 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4212 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
78c875e8 4213 && GET_CODE (XEXP (x, 1)) == CONST_INT
93638d7a 4214 && !SPE_VECTOR_MODE (mode)
17caeff2 4215 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 4216 || mode == DDmode || mode == TDmode
54b695e7 4217 || mode == DImode))
78c875e8 4218 && !ALTIVEC_VECTOR_MODE (mode))
24ea750e
DJ
4219 {
4220 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4221 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4222 HOST_WIDE_INT high
c4ad648e 4223 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
24ea750e
DJ
4224
4225 /* Check for 32-bit overflow. */
4226 if (high + low != val)
c4ad648e 4227 {
24ea750e
DJ
4228 *win = 0;
4229 return x;
4230 }
4231
4232 /* Reload the high part into a base reg; leave the low part
c4ad648e 4233 in the mem directly. */
24ea750e
DJ
4234
4235 x = gen_rtx_PLUS (GET_MODE (x),
c4ad648e
AM
4236 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4237 GEN_INT (high)),
4238 GEN_INT (low));
24ea750e
DJ
4239
4240 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
c4ad648e
AM
4241 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4242 opnum, (enum reload_type)type);
24ea750e
DJ
4243 *win = 1;
4244 return x;
4245 }
4937d02d 4246
24ea750e 4247 if (GET_CODE (x) == SYMBOL_REF
69ef87e2 4248 && !ALTIVEC_VECTOR_MODE (mode)
1650e3f5 4249 && !SPE_VECTOR_MODE (mode)
8308679f
DE
4250#if TARGET_MACHO
4251 && DEFAULT_ABI == ABI_DARWIN
a29077da 4252 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
8308679f
DE
4253#else
4254 && DEFAULT_ABI == ABI_V4
4255 && !flag_pic
4256#endif
7393f7f8 4257 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4d4447b5 4258 The same goes for DImode without 64-bit gprs and DFmode and DDmode
7b5d92b2 4259 without fprs. */
0d8c1c97 4260 && mode != TFmode
7393f7f8 4261 && mode != TDmode
7b5d92b2 4262 && (mode != DImode || TARGET_POWERPC64)
4d4447b5 4263 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
7b5d92b2 4264 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
24ea750e 4265 {
8308679f 4266#if TARGET_MACHO
a29077da
GK
4267 if (flag_pic)
4268 {
4269 rtx offset = gen_rtx_CONST (Pmode,
4270 gen_rtx_MINUS (Pmode, x,
11abc112 4271 machopic_function_base_sym ()));
a29077da
GK
4272 x = gen_rtx_LO_SUM (GET_MODE (x),
4273 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4274 gen_rtx_HIGH (Pmode, offset)), offset);
4275 }
4276 else
8308679f 4277#endif
a29077da 4278 x = gen_rtx_LO_SUM (GET_MODE (x),
c4ad648e 4279 gen_rtx_HIGH (Pmode, x), x);
a29077da 4280
24ea750e 4281 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
a29077da
GK
4282 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4283 opnum, (enum reload_type)type);
24ea750e
DJ
4284 *win = 1;
4285 return x;
4286 }
4937d02d 4287
dec1f3aa
DE
4288 /* Reload an offset address wrapped by an AND that represents the
4289 masking of the lower bits. Strip the outer AND and let reload
4290 convert the offset address into an indirect address. */
4291 if (TARGET_ALTIVEC
4292 && ALTIVEC_VECTOR_MODE (mode)
4293 && GET_CODE (x) == AND
4294 && GET_CODE (XEXP (x, 0)) == PLUS
4295 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4296 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4297 && GET_CODE (XEXP (x, 1)) == CONST_INT
4298 && INTVAL (XEXP (x, 1)) == -16)
4299 {
4300 x = XEXP (x, 0);
4301 *win = 1;
4302 return x;
4303 }
4304
24ea750e 4305 if (TARGET_TOC
0cdc04e8 4306 && GET_CODE (x) == SYMBOL_REF
4d588c14 4307 && constant_pool_expr_p (x)
c1f11548 4308 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
24ea750e 4309 {
194c524a 4310 x = create_TOC_reference (x);
24ea750e
DJ
4311 *win = 1;
4312 return x;
4313 }
4314 *win = 0;
4315 return x;
f676971a 4316}
24ea750e 4317
258bfae2
FS
4318/* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4319 that is a valid memory address for an instruction.
4320 The MODE argument is the machine mode for the MEM expression
4321 that wants to use this address.
4322
4323 On the RS/6000, there are four valid address: a SYMBOL_REF that
4324 refers to a constant pool entry of an address (or the sum of it
4325 plus a constant), a short (16-bit signed) constant plus a register,
4326 the sum of two registers, or a register indirect, possibly with an
4d4447b5
PB
4327 auto-increment. For DFmode, DDmode and DImode with a constant plus
4328 register, we must ensure that both words are addressable or PowerPC64
4329 with offset word aligned.
258bfae2 4330
4d4447b5 4331 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
7393f7f8
BE
4332 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4333 because adjacent memory cells are accessed by adding word-sized offsets
258bfae2
FS
4334 during assembly output. */
4335int
a2369ed3 4336rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
258bfae2 4337{
850e8d3d
DN
4338 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4339 if (TARGET_ALTIVEC
4340 && ALTIVEC_VECTOR_MODE (mode)
4341 && GET_CODE (x) == AND
4342 && GET_CODE (XEXP (x, 1)) == CONST_INT
4343 && INTVAL (XEXP (x, 1)) == -16)
4344 x = XEXP (x, 0);
4345
c4501e62
JJ
4346 if (RS6000_SYMBOL_REF_TLS_P (x))
4347 return 0;
4d588c14 4348 if (legitimate_indirect_address_p (x, reg_ok_strict))
258bfae2
FS
4349 return 1;
4350 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
0d6d6892 4351 && !ALTIVEC_VECTOR_MODE (mode)
a3170dc6 4352 && !SPE_VECTOR_MODE (mode)
429ec7dc 4353 && mode != TFmode
7393f7f8 4354 && mode != TDmode
54b695e7 4355 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4356 && !(TARGET_E500_DOUBLE
4357 && (mode == DFmode || mode == DDmode || mode == DImode))
258bfae2 4358 && TARGET_UPDATE
4d588c14 4359 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
258bfae2 4360 return 1;
d04b6e6e 4361 if (legitimate_small_data_p (mode, x))
258bfae2 4362 return 1;
4d588c14 4363 if (legitimate_constant_pool_address_p (x))
258bfae2
FS
4364 return 1;
4365 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4366 if (! reg_ok_strict
4367 && GET_CODE (x) == PLUS
4368 && GET_CODE (XEXP (x, 0)) == REG
708d2456 4369 && (XEXP (x, 0) == virtual_stack_vars_rtx
c4ad648e 4370 || XEXP (x, 0) == arg_pointer_rtx)
258bfae2
FS
4371 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4372 return 1;
76d2b81d 4373 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4374 return 1;
4375 if (mode != TImode
76d2b81d 4376 && mode != TFmode
7393f7f8 4377 && mode != TDmode
a3170dc6
AH
4378 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4379 || TARGET_POWERPC64
4f011e1e
JM
4380 || (mode != DFmode && mode != DDmode)
4381 || (TARGET_E500_DOUBLE && mode != DDmode))
258bfae2 4382 && (TARGET_POWERPC64 || mode != DImode)
4d588c14 4383 && legitimate_indexed_address_p (x, reg_ok_strict))
258bfae2 4384 return 1;
6fb5fa3c
DB
4385 if (GET_CODE (x) == PRE_MODIFY
4386 && mode != TImode
4387 && mode != TFmode
4388 && mode != TDmode
4389 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4390 || TARGET_POWERPC64
4d4447b5 4391 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
6fb5fa3c
DB
4392 && (TARGET_POWERPC64 || mode != DImode)
4393 && !ALTIVEC_VECTOR_MODE (mode)
4394 && !SPE_VECTOR_MODE (mode)
4395 /* Restrict addressing for DI because of our SUBREG hackery. */
4d4447b5
PB
4396 && !(TARGET_E500_DOUBLE
4397 && (mode == DFmode || mode == DDmode || mode == DImode))
6fb5fa3c
DB
4398 && TARGET_UPDATE
4399 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4400 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4401 || legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict))
4402 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4403 return 1;
4d588c14 4404 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
258bfae2
FS
4405 return 1;
4406 return 0;
4407}
4d588c14
RH
4408
4409/* Go to LABEL if ADDR (a legitimate address expression)
4410 has an effect that depends on the machine mode it is used for.
4411
4412 On the RS/6000 this is true of all integral offsets (since AltiVec
4413 modes don't allow them) or is a pre-increment or decrement.
4414
4415 ??? Except that due to conceptual problems in offsettable_address_p
4416 we can't really report the problems of integral offsets. So leave
f676971a 4417 this assuming that the adjustable offset must be valid for the
4d588c14
RH
4418 sub-words of a TFmode operand, which is what we had before. */
4419
4420bool
a2369ed3 4421rs6000_mode_dependent_address (rtx addr)
4d588c14
RH
4422{
4423 switch (GET_CODE (addr))
4424 {
4425 case PLUS:
4426 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4427 {
4428 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4429 return val + 12 + 0x8000 >= 0x10000;
4430 }
4431 break;
4432
4433 case LO_SUM:
4434 return true;
4435
619fe064 4436 /* Auto-increment cases are now treated generically in recog.c. */
6fb5fa3c
DB
4437 case PRE_MODIFY:
4438 return TARGET_UPDATE;
4d588c14
RH
4439
4440 default:
4441 break;
4442 }
4443
4444 return false;
4445}
d8ecbcdb 4446
d04b6e6e
EB
4447/* More elaborate version of recog's offsettable_memref_p predicate
4448 that works around the ??? note of rs6000_mode_dependent_address.
4449 In particular it accepts
4450
4451 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4452
4453 in 32-bit mode, that the recog predicate rejects. */
4454
4455bool
4456rs6000_offsettable_memref_p (rtx op)
4457{
4458 if (!MEM_P (op))
4459 return false;
4460
4461 /* First mimic offsettable_memref_p. */
4462 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4463 return true;
4464
4465 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4466 the latter predicate knows nothing about the mode of the memory
4467 reference and, therefore, assumes that it is the largest supported
4468 mode (TFmode). As a consequence, legitimate offsettable memory
4469 references are rejected. rs6000_legitimate_offset_address_p contains
4470 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4471 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4472}
4473
d8ecbcdb
AH
4474/* Return number of consecutive hard regs needed starting at reg REGNO
4475 to hold something of mode MODE.
4476 This is ordinarily the length in words of a value of mode MODE
4477 but can be less for certain modes in special long registers.
4478
4479 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4480 scalar instructions. The upper 32 bits are only available to the
4481 SIMD instructions.
4482
4483 POWER and PowerPC GPRs hold 32 bits worth;
4484 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4485
4486int
4487rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4488{
4489 if (FP_REGNO_P (regno))
4490 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4491
4492 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4493 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4494
4495 if (ALTIVEC_REGNO_P (regno))
4496 return
4497 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4498
8521c414
JM
4499 /* The value returned for SCmode in the E500 double case is 2 for
4500 ABI compatibility; storing an SCmode value in a single register
4501 would require function_arg and rs6000_spe_function_arg to handle
4502 SCmode so as to pass the value correctly in a pair of
4503 registers. */
4f011e1e
JM
4504 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4505 && !DECIMAL_FLOAT_MODE_P (mode))
8521c414
JM
4506 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4507
d8ecbcdb
AH
4508 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4509}
2aa4498c
AH
4510
4511/* Change register usage conditional on target flags. */
4512void
4513rs6000_conditional_register_usage (void)
4514{
4515 int i;
4516
4517 /* Set MQ register fixed (already call_used) if not POWER
4518 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4519 be allocated. */
4520 if (! TARGET_POWER)
4521 fixed_regs[64] = 1;
4522
7c9ac5c0 4523 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
2aa4498c
AH
4524 if (TARGET_64BIT)
4525 fixed_regs[13] = call_used_regs[13]
4526 = call_really_used_regs[13] = 1;
4527
4528 /* Conditionally disable FPRs. */
4529 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4530 for (i = 32; i < 64; i++)
4531 fixed_regs[i] = call_used_regs[i]
c4ad648e 4532 = call_really_used_regs[i] = 1;
2aa4498c 4533
7c9ac5c0
PH
4534 /* The TOC register is not killed across calls in a way that is
4535 visible to the compiler. */
4536 if (DEFAULT_ABI == ABI_AIX)
4537 call_really_used_regs[2] = 0;
4538
2aa4498c
AH
4539 if (DEFAULT_ABI == ABI_V4
4540 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4541 && flag_pic == 2)
4542 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4543
4544 if (DEFAULT_ABI == ABI_V4
4545 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4546 && flag_pic == 1)
4547 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4548 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4549 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4550
4551 if (DEFAULT_ABI == ABI_DARWIN
4552 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6d0a8091 4553 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
2aa4498c
AH
4554 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4555 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4556
b4db40bf
JJ
4557 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4558 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4559 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4560
2aa4498c
AH
4561 if (TARGET_SPE)
4562 {
4563 global_regs[SPEFSCR_REGNO] = 1;
52ff33d0
NF
4564 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4565 registers in prologues and epilogues. We no longer use r14
4566 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4567 pool for link-compatibility with older versions of GCC. Once
4568 "old" code has died out, we can return r14 to the allocation
4569 pool. */
4570 fixed_regs[14]
4571 = call_used_regs[14]
4572 = call_really_used_regs[14] = 1;
2aa4498c
AH
4573 }
4574
0db747be 4575 if (!TARGET_ALTIVEC)
2aa4498c
AH
4576 {
4577 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4578 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4579 call_really_used_regs[VRSAVE_REGNO] = 1;
4580 }
4581
0db747be
DE
4582 if (TARGET_ALTIVEC)
4583 global_regs[VSCR_REGNO] = 1;
4584
2aa4498c 4585 if (TARGET_ALTIVEC_ABI)
0db747be
DE
4586 {
4587 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4588 call_used_regs[i] = call_really_used_regs[i] = 1;
4589
4590 /* AIX reserves VR20:31 in non-extended ABI mode. */
4591 if (TARGET_XCOFF)
4592 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4593 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4594 }
2aa4498c 4595}
fb4d4348 4596\f
a4f6c312
SS
4597/* Try to output insns to set TARGET equal to the constant C if it can
4598 be done in less than N insns. Do all computations in MODE.
4599 Returns the place where the output has been placed if it can be
4600 done and the insns have been emitted. If it would take more than N
4601 insns, zero is returned and no insns and emitted. */
2bfcf297
DB
4602
4603rtx
f676971a 4604rs6000_emit_set_const (rtx dest, enum machine_mode mode,
a2369ed3 4605 rtx source, int n ATTRIBUTE_UNUSED)
2bfcf297 4606{
af8cb5c5 4607 rtx result, insn, set;
2bfcf297
DB
4608 HOST_WIDE_INT c0, c1;
4609
37409796 4610 switch (mode)
2bfcf297 4611 {
37409796
NS
4612 case QImode:
4613 case HImode:
2bfcf297 4614 if (dest == NULL)
c4ad648e 4615 dest = gen_reg_rtx (mode);
2bfcf297
DB
4616 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4617 return dest;
bb8df8a6 4618
37409796 4619 case SImode:
b3a13419 4620 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
bb8df8a6 4621
d448860e 4622 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
af8cb5c5
DE
4623 GEN_INT (INTVAL (source)
4624 & (~ (HOST_WIDE_INT) 0xffff))));
4625 emit_insn (gen_rtx_SET (VOIDmode, dest,
d448860e 4626 gen_rtx_IOR (SImode, copy_rtx (result),
af8cb5c5
DE
4627 GEN_INT (INTVAL (source) & 0xffff))));
4628 result = dest;
37409796
NS
4629 break;
4630
4631 case DImode:
4632 switch (GET_CODE (source))
af8cb5c5 4633 {
37409796 4634 case CONST_INT:
af8cb5c5
DE
4635 c0 = INTVAL (source);
4636 c1 = -(c0 < 0);
37409796 4637 break;
bb8df8a6 4638
37409796 4639 case CONST_DOUBLE:
2bfcf297 4640#if HOST_BITS_PER_WIDE_INT >= 64
af8cb5c5
DE
4641 c0 = CONST_DOUBLE_LOW (source);
4642 c1 = -(c0 < 0);
2bfcf297 4643#else
af8cb5c5
DE
4644 c0 = CONST_DOUBLE_LOW (source);
4645 c1 = CONST_DOUBLE_HIGH (source);
2bfcf297 4646#endif
37409796
NS
4647 break;
4648
4649 default:
4650 gcc_unreachable ();
af8cb5c5 4651 }
af8cb5c5
DE
4652
4653 result = rs6000_emit_set_long_const (dest, c0, c1);
37409796
NS
4654 break;
4655
4656 default:
4657 gcc_unreachable ();
2bfcf297 4658 }
2bfcf297 4659
af8cb5c5
DE
4660 insn = get_last_insn ();
4661 set = single_set (insn);
4662 if (! CONSTANT_P (SET_SRC (set)))
4663 set_unique_reg_note (insn, REG_EQUAL, source);
4664
4665 return result;
2bfcf297
DB
4666}
4667
4668/* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4669 fall back to a straight forward decomposition. We do this to avoid
4670 exponential run times encountered when looking for longer sequences
4671 with rs6000_emit_set_const. */
4672static rtx
a2369ed3 4673rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
2bfcf297
DB
4674{
4675 if (!TARGET_POWERPC64)
4676 {
4677 rtx operand1, operand2;
4678
4679 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4680 DImode);
d448860e 4681 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
2bfcf297
DB
4682 DImode);
4683 emit_move_insn (operand1, GEN_INT (c1));
4684 emit_move_insn (operand2, GEN_INT (c2));
4685 }
4686 else
4687 {
bc06712d 4688 HOST_WIDE_INT ud1, ud2, ud3, ud4;
252b88f7 4689
bc06712d 4690 ud1 = c1 & 0xffff;
f921c9c9 4691 ud2 = (c1 & 0xffff0000) >> 16;
2bfcf297 4692#if HOST_BITS_PER_WIDE_INT >= 64
bc06712d 4693 c2 = c1 >> 32;
2bfcf297 4694#endif
bc06712d 4695 ud3 = c2 & 0xffff;
f921c9c9 4696 ud4 = (c2 & 0xffff0000) >> 16;
2bfcf297 4697
f676971a 4698 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
bc06712d 4699 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2bfcf297 4700 {
bc06712d 4701 if (ud1 & 0x8000)
b78d48dd 4702 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
bc06712d
TR
4703 else
4704 emit_move_insn (dest, GEN_INT (ud1));
2bfcf297 4705 }
2bfcf297 4706
f676971a 4707 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
bc06712d 4708 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
252b88f7 4709 {
bc06712d 4710 if (ud2 & 0x8000)
f676971a 4711 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
bc06712d 4712 - 0x80000000));
252b88f7 4713 else
bc06712d
TR
4714 emit_move_insn (dest, GEN_INT (ud2 << 16));
4715 if (ud1 != 0)
d448860e
JH
4716 emit_move_insn (copy_rtx (dest),
4717 gen_rtx_IOR (DImode, copy_rtx (dest),
4718 GEN_INT (ud1)));
252b88f7 4719 }
f676971a 4720 else if ((ud4 == 0xffff && (ud3 & 0x8000))
bc06712d
TR
4721 || (ud4 == 0 && ! (ud3 & 0x8000)))
4722 {
4723 if (ud3 & 0x8000)
f676971a 4724 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
bc06712d
TR
4725 - 0x80000000));
4726 else
4727 emit_move_insn (dest, GEN_INT (ud3 << 16));
4728
4729 if (ud2 != 0)
d448860e
JH
4730 emit_move_insn (copy_rtx (dest),
4731 gen_rtx_IOR (DImode, copy_rtx (dest),
4732 GEN_INT (ud2)));
4733 emit_move_insn (copy_rtx (dest),
4734 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4735 GEN_INT (16)));
bc06712d 4736 if (ud1 != 0)
d448860e
JH
4737 emit_move_insn (copy_rtx (dest),
4738 gen_rtx_IOR (DImode, copy_rtx (dest),
4739 GEN_INT (ud1)));
bc06712d 4740 }
f676971a 4741 else
bc06712d
TR
4742 {
4743 if (ud4 & 0x8000)
f676971a 4744 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
bc06712d
TR
4745 - 0x80000000));
4746 else
4747 emit_move_insn (dest, GEN_INT (ud4 << 16));
4748
4749 if (ud3 != 0)
d448860e
JH
4750 emit_move_insn (copy_rtx (dest),
4751 gen_rtx_IOR (DImode, copy_rtx (dest),
4752 GEN_INT (ud3)));
2bfcf297 4753
d448860e
JH
4754 emit_move_insn (copy_rtx (dest),
4755 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4756 GEN_INT (32)));
bc06712d 4757 if (ud2 != 0)
d448860e
JH
4758 emit_move_insn (copy_rtx (dest),
4759 gen_rtx_IOR (DImode, copy_rtx (dest),
4760 GEN_INT (ud2 << 16)));
bc06712d 4761 if (ud1 != 0)
d448860e
JH
4762 emit_move_insn (copy_rtx (dest),
4763 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
bc06712d
TR
4764 }
4765 }
2bfcf297
DB
4766 return dest;
4767}
4768
76d2b81d 4769/* Helper for the following. Get rid of [r+r] memory refs
7393f7f8 4770 in cases where it won't work (TImode, TFmode, TDmode). */
76d2b81d
DJ
4771
4772static void
4773rs6000_eliminate_indexed_memrefs (rtx operands[2])
4774{
4775 if (GET_CODE (operands[0]) == MEM
4776 && GET_CODE (XEXP (operands[0], 0)) != REG
55aa0757 4777 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
76d2b81d
DJ
4778 && ! reload_in_progress)
4779 operands[0]
4780 = replace_equiv_address (operands[0],
4781 copy_addr_to_reg (XEXP (operands[0], 0)));
4782
4783 if (GET_CODE (operands[1]) == MEM
4784 && GET_CODE (XEXP (operands[1], 0)) != REG
55aa0757 4785 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
76d2b81d
DJ
4786 && ! reload_in_progress)
4787 operands[1]
4788 = replace_equiv_address (operands[1],
4789 copy_addr_to_reg (XEXP (operands[1], 0)));
4790}
4791
fb4d4348
GK
4792/* Emit a move from SOURCE to DEST in mode MODE. */
4793void
a2369ed3 4794rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
fb4d4348
GK
4795{
4796 rtx operands[2];
4797 operands[0] = dest;
4798 operands[1] = source;
f676971a 4799
fb4d4348
GK
4800 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4801 if (GET_CODE (operands[1]) == CONST_DOUBLE
4802 && ! FLOAT_MODE_P (mode)
4803 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4804 {
4805 /* FIXME. This should never happen. */
4806 /* Since it seems that it does, do the safe thing and convert
4807 to a CONST_INT. */
2496c7bd 4808 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
fb4d4348 4809 }
37409796
NS
4810 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4811 || FLOAT_MODE_P (mode)
4812 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4813 || CONST_DOUBLE_LOW (operands[1]) < 0)
4814 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4815 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
bb8df8a6 4816
c9e8cb32
DD
4817 /* Check if GCC is setting up a block move that will end up using FP
4818 registers as temporaries. We must make sure this is acceptable. */
4819 if (GET_CODE (operands[0]) == MEM
4820 && GET_CODE (operands[1]) == MEM
4821 && mode == DImode
41543739
GK
4822 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4823 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4824 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4825 ? 32 : MEM_ALIGN (operands[0])))
4826 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
f676971a 4827 ? 32
41543739
GK
4828 : MEM_ALIGN (operands[1]))))
4829 && ! MEM_VOLATILE_P (operands [0])
4830 && ! MEM_VOLATILE_P (operands [1]))
c9e8cb32 4831 {
41543739
GK
4832 emit_move_insn (adjust_address (operands[0], SImode, 0),
4833 adjust_address (operands[1], SImode, 0));
d448860e
JH
4834 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4835 adjust_address (copy_rtx (operands[1]), SImode, 4));
c9e8cb32
DD
4836 return;
4837 }
630d42a0 4838
b3a13419 4839 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
c9dbf840 4840 && !gpc_reg_operand (operands[1], mode))
f6219a5e 4841 operands[1] = force_reg (mode, operands[1]);
a9098fd0 4842
a3170dc6
AH
4843 if (mode == SFmode && ! TARGET_POWERPC
4844 && TARGET_HARD_FLOAT && TARGET_FPRS
ffc14f31 4845 && GET_CODE (operands[0]) == MEM)
fb4d4348 4846 {
ffc14f31
GK
4847 int regnum;
4848
4849 if (reload_in_progress || reload_completed)
4850 regnum = true_regnum (operands[1]);
4851 else if (GET_CODE (operands[1]) == REG)
4852 regnum = REGNO (operands[1]);
4853 else
4854 regnum = -1;
f676971a 4855
fb4d4348
GK
4856 /* If operands[1] is a register, on POWER it may have
4857 double-precision data in it, so truncate it to single
4858 precision. */
4859 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4860 {
4861 rtx newreg;
b3a13419 4862 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
d448860e 4863 : gen_reg_rtx (mode));
fb4d4348
GK
4864 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4865 operands[1] = newreg;
4866 }
4867 }
4868
c4501e62
JJ
4869 /* Recognize the case where operand[1] is a reference to thread-local
4870 data and load its address to a register. */
84f52ebd 4871 if (rs6000_tls_referenced_p (operands[1]))
c4501e62 4872 {
84f52ebd
RH
4873 enum tls_model model;
4874 rtx tmp = operands[1];
4875 rtx addend = NULL;
4876
4877 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4878 {
4879 addend = XEXP (XEXP (tmp, 0), 1);
4880 tmp = XEXP (XEXP (tmp, 0), 0);
4881 }
4882
4883 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4884 model = SYMBOL_REF_TLS_MODEL (tmp);
4885 gcc_assert (model != 0);
4886
4887 tmp = rs6000_legitimize_tls_address (tmp, model);
4888 if (addend)
4889 {
4890 tmp = gen_rtx_PLUS (mode, tmp, addend);
4891 tmp = force_operand (tmp, operands[0]);
4892 }
4893 operands[1] = tmp;
c4501e62
JJ
4894 }
4895
8f4e6caf
RH
4896 /* Handle the case where reload calls us with an invalid address. */
4897 if (reload_in_progress && mode == Pmode
69ef87e2 4898 && (! general_operand (operands[1], mode)
8f4e6caf
RH
4899 || ! nonimmediate_operand (operands[0], mode)))
4900 goto emit_set;
4901
a9baceb1
GK
4902 /* 128-bit constant floating-point values on Darwin should really be
4903 loaded as two parts. */
8521c414 4904 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
a9baceb1
GK
4905 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4906 {
4907 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4908 know how to get a DFmode SUBREG of a TFmode. */
17caeff2
JM
4909 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
4910 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
4911 simplify_gen_subreg (imode, operands[1], mode, 0),
4912 imode);
4913 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
4914 GET_MODE_SIZE (imode)),
4915 simplify_gen_subreg (imode, operands[1], mode,
4916 GET_MODE_SIZE (imode)),
4917 imode);
a9baceb1
GK
4918 return;
4919 }
4920
e41b2a33
PB
4921 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
4922 cfun->machine->sdmode_stack_slot =
4923 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
4924
4925 if (reload_in_progress
4926 && mode == SDmode
4927 && MEM_P (operands[0])
4928 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
4929 && REG_P (operands[1]))
4930 {
4931 if (FP_REGNO_P (REGNO (operands[1])))
4932 {
4933 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
4934 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4935 emit_insn (gen_movsd_store (mem, operands[1]));
4936 }
4937 else if (INT_REGNO_P (REGNO (operands[1])))
4938 {
4939 rtx mem = adjust_address_nv (operands[0], mode, 4);
4940 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4941 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
4942 }
4943 else
4944 gcc_unreachable();
4945 return;
4946 }
4947 if (reload_in_progress
4948 && mode == SDmode
4949 && REG_P (operands[0])
4950 && MEM_P (operands[1])
4951 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
4952 {
4953 if (FP_REGNO_P (REGNO (operands[0])))
4954 {
4955 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
4956 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4957 emit_insn (gen_movsd_load (operands[0], mem));
4958 }
4959 else if (INT_REGNO_P (REGNO (operands[0])))
4960 {
4961 rtx mem = adjust_address_nv (operands[1], mode, 4);
4962 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
4963 emit_insn (gen_movsd_hardfloat (operands[0], mem));
4964 }
4965 else
4966 gcc_unreachable();
4967 return;
4968 }
4969
fb4d4348
GK
4970 /* FIXME: In the long term, this switch statement should go away
4971 and be replaced by a sequence of tests based on things like
4972 mode == Pmode. */
4973 switch (mode)
4974 {
4975 case HImode:
4976 case QImode:
4977 if (CONSTANT_P (operands[1])
4978 && GET_CODE (operands[1]) != CONST_INT)
a9098fd0 4979 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348
GK
4980 break;
4981
06f4e019 4982 case TFmode:
7393f7f8 4983 case TDmode:
76d2b81d
DJ
4984 rs6000_eliminate_indexed_memrefs (operands);
4985 /* fall through */
4986
fb4d4348 4987 case DFmode:
7393f7f8 4988 case DDmode:
fb4d4348 4989 case SFmode:
e41b2a33 4990 case SDmode:
f676971a 4991 if (CONSTANT_P (operands[1])
fb4d4348 4992 && ! easy_fp_constant (operands[1], mode))
a9098fd0 4993 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 4994 break;
f676971a 4995
0ac081f6
AH
4996 case V16QImode:
4997 case V8HImode:
4998 case V4SFmode:
4999 case V4SImode:
a3170dc6
AH
5000 case V4HImode:
5001 case V2SFmode:
5002 case V2SImode:
00a892b8 5003 case V1DImode:
69ef87e2 5004 if (CONSTANT_P (operands[1])
d744e06e 5005 && !easy_vector_constant (operands[1], mode))
0ac081f6
AH
5006 operands[1] = force_const_mem (mode, operands[1]);
5007 break;
f676971a 5008
fb4d4348 5009 case SImode:
a9098fd0 5010 case DImode:
fb4d4348
GK
5011 /* Use default pattern for address of ELF small data */
5012 if (TARGET_ELF
a9098fd0 5013 && mode == Pmode
f607bc57 5014 && DEFAULT_ABI == ABI_V4
f676971a 5015 && (GET_CODE (operands[1]) == SYMBOL_REF
a9098fd0
GK
5016 || GET_CODE (operands[1]) == CONST)
5017 && small_data_operand (operands[1], mode))
fb4d4348
GK
5018 {
5019 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5020 return;
5021 }
5022
f607bc57 5023 if (DEFAULT_ABI == ABI_V4
a9098fd0
GK
5024 && mode == Pmode && mode == SImode
5025 && flag_pic == 1 && got_operand (operands[1], mode))
fb4d4348
GK
5026 {
5027 emit_insn (gen_movsi_got (operands[0], operands[1]));
5028 return;
5029 }
5030
ee890fe2 5031 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
f1384257
AM
5032 && TARGET_NO_TOC
5033 && ! flag_pic
a9098fd0 5034 && mode == Pmode
fb4d4348
GK
5035 && CONSTANT_P (operands[1])
5036 && GET_CODE (operands[1]) != HIGH
5037 && GET_CODE (operands[1]) != CONST_INT)
5038 {
b3a13419
ILT
5039 rtx target = (!can_create_pseudo_p ()
5040 ? operands[0]
5041 : gen_reg_rtx (mode));
fb4d4348
GK
5042
5043 /* If this is a function address on -mcall-aixdesc,
5044 convert it to the address of the descriptor. */
5045 if (DEFAULT_ABI == ABI_AIX
5046 && GET_CODE (operands[1]) == SYMBOL_REF
5047 && XSTR (operands[1], 0)[0] == '.')
5048 {
5049 const char *name = XSTR (operands[1], 0);
5050 rtx new_ref;
5051 while (*name == '.')
5052 name++;
5053 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5054 CONSTANT_POOL_ADDRESS_P (new_ref)
5055 = CONSTANT_POOL_ADDRESS_P (operands[1]);
d1908feb 5056 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
fb4d4348 5057 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
c185c797 5058 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
fb4d4348
GK
5059 operands[1] = new_ref;
5060 }
7509c759 5061
ee890fe2
SS
5062 if (DEFAULT_ABI == ABI_DARWIN)
5063 {
ab82a49f
AP
5064#if TARGET_MACHO
5065 if (MACHO_DYNAMIC_NO_PIC_P)
5066 {
5067 /* Take care of any required data indirection. */
5068 operands[1] = rs6000_machopic_legitimize_pic_address (
5069 operands[1], mode, operands[0]);
5070 if (operands[0] != operands[1])
5071 emit_insn (gen_rtx_SET (VOIDmode,
c4ad648e 5072 operands[0], operands[1]));
ab82a49f
AP
5073 return;
5074 }
5075#endif
b8a55285
AP
5076 emit_insn (gen_macho_high (target, operands[1]));
5077 emit_insn (gen_macho_low (operands[0], target, operands[1]));
ee890fe2
SS
5078 return;
5079 }
5080
fb4d4348
GK
5081 emit_insn (gen_elf_high (target, operands[1]));
5082 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5083 return;
5084 }
5085
a9098fd0
GK
5086 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5087 and we have put it in the TOC, we just need to make a TOC-relative
5088 reference to it. */
5089 if (TARGET_TOC
5090 && GET_CODE (operands[1]) == SYMBOL_REF
4d588c14 5091 && constant_pool_expr_p (operands[1])
a9098fd0
GK
5092 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5093 get_pool_mode (operands[1])))
fb4d4348 5094 {
a9098fd0 5095 operands[1] = create_TOC_reference (operands[1]);
fb4d4348 5096 }
a9098fd0
GK
5097 else if (mode == Pmode
5098 && CONSTANT_P (operands[1])
38886f37
AO
5099 && ((GET_CODE (operands[1]) != CONST_INT
5100 && ! easy_fp_constant (operands[1], mode))
5101 || (GET_CODE (operands[1]) == CONST_INT
5102 && num_insns_constant (operands[1], mode) > 2)
5103 || (GET_CODE (operands[0]) == REG
5104 && FP_REGNO_P (REGNO (operands[0]))))
a9098fd0 5105 && GET_CODE (operands[1]) != HIGH
4d588c14
RH
5106 && ! legitimate_constant_pool_address_p (operands[1])
5107 && ! toc_relative_expr_p (operands[1]))
fb4d4348
GK
5108 {
5109 /* Emit a USE operation so that the constant isn't deleted if
5110 expensive optimizations are turned on because nobody
5111 references it. This should only be done for operands that
5112 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5113 This should not be done for operands that contain LABEL_REFs.
5114 For now, we just handle the obvious case. */
5115 if (GET_CODE (operands[1]) != LABEL_REF)
c41c1387 5116 emit_use (operands[1]);
fb4d4348 5117
c859cda6 5118#if TARGET_MACHO
ee890fe2 5119 /* Darwin uses a special PIC legitimizer. */
ab82a49f 5120 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
ee890fe2 5121 {
ee890fe2
SS
5122 operands[1] =
5123 rs6000_machopic_legitimize_pic_address (operands[1], mode,
c859cda6
DJ
5124 operands[0]);
5125 if (operands[0] != operands[1])
5126 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
ee890fe2
SS
5127 return;
5128 }
c859cda6 5129#endif
ee890fe2 5130
fb4d4348
GK
5131 /* If we are to limit the number of things we put in the TOC and
5132 this is a symbol plus a constant we can add in one insn,
5133 just put the symbol in the TOC and add the constant. Don't do
5134 this if reload is in progress. */
5135 if (GET_CODE (operands[1]) == CONST
5136 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5137 && GET_CODE (XEXP (operands[1], 0)) == PLUS
a9098fd0 5138 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
fb4d4348
GK
5139 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5140 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5141 && ! side_effects_p (operands[0]))
5142 {
a4f6c312
SS
5143 rtx sym =
5144 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
fb4d4348
GK
5145 rtx other = XEXP (XEXP (operands[1], 0), 1);
5146
a9098fd0
GK
5147 sym = force_reg (mode, sym);
5148 if (mode == SImode)
5149 emit_insn (gen_addsi3 (operands[0], sym, other));
5150 else
5151 emit_insn (gen_adddi3 (operands[0], sym, other));
fb4d4348
GK
5152 return;
5153 }
5154
a9098fd0 5155 operands[1] = force_const_mem (mode, operands[1]);
fb4d4348 5156
f676971a 5157 if (TARGET_TOC
0cdc04e8 5158 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
4d588c14 5159 && constant_pool_expr_p (XEXP (operands[1], 0))
d34c5b80
DE
5160 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5161 get_pool_constant (XEXP (operands[1], 0)),
5162 get_pool_mode (XEXP (operands[1], 0))))
a9098fd0 5163 {
ba4828e0 5164 operands[1]
542a8afa 5165 = gen_const_mem (mode,
c4ad648e 5166 create_TOC_reference (XEXP (operands[1], 0)));
ba4828e0 5167 set_mem_alias_set (operands[1], get_TOC_alias_set ());
a9098fd0 5168 }
fb4d4348
GK
5169 }
5170 break;
a9098fd0 5171
fb4d4348 5172 case TImode:
76d2b81d
DJ
5173 rs6000_eliminate_indexed_memrefs (operands);
5174
27dc0551
DE
5175 if (TARGET_POWER)
5176 {
5177 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5178 gen_rtvec (2,
5179 gen_rtx_SET (VOIDmode,
5180 operands[0], operands[1]),
5181 gen_rtx_CLOBBER (VOIDmode,
5182 gen_rtx_SCRATCH (SImode)))));
5183 return;
5184 }
fb4d4348
GK
5185 break;
5186
5187 default:
37409796 5188 gcc_unreachable ();
fb4d4348
GK
5189 }
5190
a9098fd0
GK
5191 /* Above, we may have called force_const_mem which may have returned
5192 an invalid address. If we can, fix this up; otherwise, reload will
5193 have to deal with it. */
8f4e6caf
RH
5194 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5195 operands[1] = validize_mem (operands[1]);
a9098fd0 5196
8f4e6caf 5197 emit_set:
fb4d4348
GK
5198 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5199}
4697a36c 5200\f
2858f73a
GK
5201/* Nonzero if we can use a floating-point register to pass this arg. */
5202#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
ebb109ad 5203 (SCALAR_FLOAT_MODE_P (MODE) \
2858f73a
GK
5204 && (CUM)->fregno <= FP_ARG_MAX_REG \
5205 && TARGET_HARD_FLOAT && TARGET_FPRS)
5206
5207/* Nonzero if we can use an AltiVec register to pass this arg. */
5208#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5209 (ALTIVEC_VECTOR_MODE (MODE) \
5210 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5211 && TARGET_ALTIVEC_ABI \
83953138 5212 && (NAMED))
2858f73a 5213
c6e8c921
GK
5214/* Return a nonzero value to say to return the function value in
5215 memory, just as large structures are always returned. TYPE will be
5216 the data type of the value, and FNTYPE will be the type of the
5217 function doing the returning, or @code{NULL} for libcalls.
5218
5219 The AIX ABI for the RS/6000 specifies that all structures are
5220 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5221 specifies that structures <= 8 bytes are returned in r3/r4, but a
5222 draft put them in memory, and GCC used to implement the draft
df01da37 5223 instead of the final standard. Therefore, aix_struct_return
c6e8c921
GK
5224 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5225 compatibility can change DRAFT_V4_STRUCT_RET to override the
5226 default, and -m switches get the final word. See
5227 rs6000_override_options for more details.
5228
5229 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5230 long double support is enabled. These values are returned in memory.
5231
5232 int_size_in_bytes returns -1 for variable size objects, which go in
5233 memory always. The cast to unsigned makes -1 > 8. */
5234
5235static bool
586de218 5236rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
c6e8c921 5237{
594a51fe
SS
5238 /* In the darwin64 abi, try to use registers for larger structs
5239 if possible. */
0b5383eb 5240 if (rs6000_darwin64_abi
594a51fe 5241 && TREE_CODE (type) == RECORD_TYPE
0b5383eb
DJ
5242 && int_size_in_bytes (type) > 0)
5243 {
5244 CUMULATIVE_ARGS valcum;
5245 rtx valret;
5246
5247 valcum.words = 0;
5248 valcum.fregno = FP_ARG_MIN_REG;
5249 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5250 /* Do a trial code generation as if this were going to be passed
5251 as an argument; if any part goes in memory, we return NULL. */
5252 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5253 if (valret)
5254 return false;
5255 /* Otherwise fall through to more conventional ABI rules. */
5256 }
594a51fe 5257
c6e8c921 5258 if (AGGREGATE_TYPE_P (type)
df01da37 5259 && (aix_struct_return
c6e8c921
GK
5260 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5261 return true;
b693336b 5262
bada2eb8
DE
5263 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5264 modes only exist for GCC vector types if -maltivec. */
5265 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5266 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5267 return false;
5268
b693336b
PB
5269 /* Return synthetic vectors in memory. */
5270 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 5271 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
5272 {
5273 static bool warned_for_return_big_vectors = false;
5274 if (!warned_for_return_big_vectors)
5275 {
d4ee4d25 5276 warning (0, "GCC vector returned by reference: "
b693336b
PB
5277 "non-standard ABI extension with no compatibility guarantee");
5278 warned_for_return_big_vectors = true;
5279 }
5280 return true;
5281 }
5282
602ea4d3 5283 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
c6e8c921 5284 return true;
ad630bef 5285
c6e8c921
GK
5286 return false;
5287}
5288
4697a36c
MM
5289/* Initialize a variable CUM of type CUMULATIVE_ARGS
5290 for a call to a function whose data type is FNTYPE.
5291 For a library call, FNTYPE is 0.
5292
5293 For incoming args we set the number of arguments in the prototype large
1c20ae99 5294 so we never return a PARALLEL. */
4697a36c
MM
5295
5296void
f676971a 5297init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
0f6937fe
AM
5298 rtx libname ATTRIBUTE_UNUSED, int incoming,
5299 int libcall, int n_named_args)
4697a36c
MM
5300{
5301 static CUMULATIVE_ARGS zero_cumulative;
5302
5303 *cum = zero_cumulative;
5304 cum->words = 0;
5305 cum->fregno = FP_ARG_MIN_REG;
0ac081f6 5306 cum->vregno = ALTIVEC_ARG_MIN_REG;
4697a36c 5307 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
ddcc8263
DE
5308 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5309 ? CALL_LIBCALL : CALL_NORMAL);
4cc833b7 5310 cum->sysv_gregno = GP_ARG_MIN_REG;
a6c9bed4
AH
5311 cum->stdarg = fntype
5312 && (TYPE_ARG_TYPES (fntype) != 0
5313 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5314 != void_type_node));
4697a36c 5315
0f6937fe
AM
5316 cum->nargs_prototype = 0;
5317 if (incoming || cum->prototype)
5318 cum->nargs_prototype = n_named_args;
4697a36c 5319
a5c76ee6 5320 /* Check for a longcall attribute. */
3eb4e360
AM
5321 if ((!fntype && rs6000_default_long_calls)
5322 || (fntype
5323 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5324 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5325 cum->call_cookie |= CALL_LONG;
6a4cee5f 5326
4697a36c
MM
5327 if (TARGET_DEBUG_ARG)
5328 {
5329 fprintf (stderr, "\ninit_cumulative_args:");
5330 if (fntype)
5331 {
5332 tree ret_type = TREE_TYPE (fntype);
5333 fprintf (stderr, " ret code = %s,",
5334 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5335 }
5336
6a4cee5f
MM
5337 if (cum->call_cookie & CALL_LONG)
5338 fprintf (stderr, " longcall,");
5339
4697a36c
MM
5340 fprintf (stderr, " proto = %d, nargs = %d\n",
5341 cum->prototype, cum->nargs_prototype);
5342 }
f676971a 5343
c4ad648e
AM
5344 if (fntype
5345 && !TARGET_ALTIVEC
5346 && TARGET_ALTIVEC_ABI
5347 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5348 {
c85ce869 5349 error ("cannot return value in vector register because"
c4ad648e 5350 " altivec instructions are disabled, use -maltivec"
c85ce869 5351 " to enable them");
c4ad648e 5352 }
4697a36c
MM
5353}
5354\f
fe984136
RH
5355/* Return true if TYPE must be passed on the stack and not in registers. */
5356
5357static bool
586de218 5358rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
fe984136
RH
5359{
5360 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5361 return must_pass_in_stack_var_size (mode, type);
5362 else
5363 return must_pass_in_stack_var_size_or_pad (mode, type);
5364}
5365
c229cba9
DE
5366/* If defined, a C expression which determines whether, and in which
5367 direction, to pad out an argument with extra space. The value
5368 should be of type `enum direction': either `upward' to pad above
5369 the argument, `downward' to pad below, or `none' to inhibit
5370 padding.
5371
5372 For the AIX ABI structs are always stored left shifted in their
5373 argument slot. */
5374
9ebbca7d 5375enum direction
586de218 5376function_arg_padding (enum machine_mode mode, const_tree type)
c229cba9 5377{
6e985040
AM
5378#ifndef AGGREGATE_PADDING_FIXED
5379#define AGGREGATE_PADDING_FIXED 0
5380#endif
5381#ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5382#define AGGREGATES_PAD_UPWARD_ALWAYS 0
5383#endif
5384
5385 if (!AGGREGATE_PADDING_FIXED)
5386 {
5387 /* GCC used to pass structures of the same size as integer types as
5388 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
19525b57 5389 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
6e985040
AM
5390 passed padded downward, except that -mstrict-align further
5391 muddied the water in that multi-component structures of 2 and 4
5392 bytes in size were passed padded upward.
5393
5394 The following arranges for best compatibility with previous
5395 versions of gcc, but removes the -mstrict-align dependency. */
5396 if (BYTES_BIG_ENDIAN)
5397 {
5398 HOST_WIDE_INT size = 0;
5399
5400 if (mode == BLKmode)
5401 {
5402 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5403 size = int_size_in_bytes (type);
5404 }
5405 else
5406 size = GET_MODE_SIZE (mode);
5407
5408 if (size == 1 || size == 2 || size == 4)
5409 return downward;
5410 }
5411 return upward;
5412 }
5413
5414 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5415 {
5416 if (type != 0 && AGGREGATE_TYPE_P (type))
5417 return upward;
5418 }
c229cba9 5419
d3704c46
KH
5420 /* Fall back to the default. */
5421 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
c229cba9
DE
5422}
5423
b6c9286a 5424/* If defined, a C expression that gives the alignment boundary, in bits,
f676971a 5425 of an argument with the specified mode and type. If it is not defined,
b6c9286a 5426 PARM_BOUNDARY is used for all arguments.
f676971a 5427
84e9ad15
AM
5428 V.4 wants long longs and doubles to be double word aligned. Just
5429 testing the mode size is a boneheaded way to do this as it means
5430 that other types such as complex int are also double word aligned.
5431 However, we're stuck with this because changing the ABI might break
5432 existing library interfaces.
5433
b693336b
PB
5434 Doubleword align SPE vectors.
5435 Quadword align Altivec vectors.
5436 Quadword align large synthetic vector types. */
b6c9286a
MM
5437
5438int
b693336b 5439function_arg_boundary (enum machine_mode mode, tree type)
b6c9286a 5440{
84e9ad15
AM
5441 if (DEFAULT_ABI == ABI_V4
5442 && (GET_MODE_SIZE (mode) == 8
5443 || (TARGET_HARD_FLOAT
5444 && TARGET_FPRS
7393f7f8 5445 && (mode == TFmode || mode == TDmode))))
4ed78545 5446 return 64;
ad630bef
DE
5447 else if (SPE_VECTOR_MODE (mode)
5448 || (type && TREE_CODE (type) == VECTOR_TYPE
5449 && int_size_in_bytes (type) >= 8
5450 && int_size_in_bytes (type) < 16))
e1f83b4d 5451 return 64;
ad630bef
DE
5452 else if (ALTIVEC_VECTOR_MODE (mode)
5453 || (type && TREE_CODE (type) == VECTOR_TYPE
5454 && int_size_in_bytes (type) >= 16))
0ac081f6 5455 return 128;
0b5383eb
DJ
5456 else if (rs6000_darwin64_abi && mode == BLKmode
5457 && type && TYPE_ALIGN (type) > 64)
5458 return 128;
9ebbca7d 5459 else
b6c9286a 5460 return PARM_BOUNDARY;
b6c9286a 5461}
c53bdcf5 5462
294bd182
AM
5463/* For a function parm of MODE and TYPE, return the starting word in
5464 the parameter area. NWORDS of the parameter area are already used. */
5465
5466static unsigned int
5467rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5468{
5469 unsigned int align;
5470 unsigned int parm_offset;
5471
5472 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5473 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5474 return nwords + (-(parm_offset + nwords) & align);
5475}
5476
c53bdcf5
AM
5477/* Compute the size (in words) of a function argument. */
5478
5479static unsigned long
5480rs6000_arg_size (enum machine_mode mode, tree type)
5481{
5482 unsigned long size;
5483
5484 if (mode != BLKmode)
5485 size = GET_MODE_SIZE (mode);
5486 else
5487 size = int_size_in_bytes (type);
5488
5489 if (TARGET_32BIT)
5490 return (size + 3) >> 2;
5491 else
5492 return (size + 7) >> 3;
5493}
b6c9286a 5494\f
0b5383eb 5495/* Use this to flush pending int fields. */
594a51fe
SS
5496
5497static void
0b5383eb
DJ
5498rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5499 HOST_WIDE_INT bitpos)
594a51fe 5500{
0b5383eb
DJ
5501 unsigned int startbit, endbit;
5502 int intregs, intoffset;
5503 enum machine_mode mode;
594a51fe 5504
0b5383eb
DJ
5505 if (cum->intoffset == -1)
5506 return;
594a51fe 5507
0b5383eb
DJ
5508 intoffset = cum->intoffset;
5509 cum->intoffset = -1;
5510
5511 if (intoffset % BITS_PER_WORD != 0)
5512 {
5513 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5514 MODE_INT, 0);
5515 if (mode == BLKmode)
594a51fe 5516 {
0b5383eb
DJ
5517 /* We couldn't find an appropriate mode, which happens,
5518 e.g., in packed structs when there are 3 bytes to load.
5519 Back intoffset back to the beginning of the word in this
5520 case. */
5521 intoffset = intoffset & -BITS_PER_WORD;
594a51fe 5522 }
594a51fe 5523 }
0b5383eb
DJ
5524
5525 startbit = intoffset & -BITS_PER_WORD;
5526 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5527 intregs = (endbit - startbit) / BITS_PER_WORD;
5528 cum->words += intregs;
5529}
5530
5531/* The darwin64 ABI calls for us to recurse down through structs,
5532 looking for elements passed in registers. Unfortunately, we have
5533 to track int register count here also because of misalignments
5534 in powerpc alignment mode. */
5535
5536static void
5537rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5538 tree type,
5539 HOST_WIDE_INT startbitpos)
5540{
5541 tree f;
5542
5543 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5544 if (TREE_CODE (f) == FIELD_DECL)
5545 {
5546 HOST_WIDE_INT bitpos = startbitpos;
5547 tree ftype = TREE_TYPE (f);
70fb00df
AP
5548 enum machine_mode mode;
5549 if (ftype == error_mark_node)
5550 continue;
5551 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5552
5553 if (DECL_SIZE (f) != 0
5554 && host_integerp (bit_position (f), 1))
5555 bitpos += int_bit_position (f);
5556
5557 /* ??? FIXME: else assume zero offset. */
5558
5559 if (TREE_CODE (ftype) == RECORD_TYPE)
5560 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5561 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5562 {
5563 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5564 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5565 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5566 }
5567 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5568 {
5569 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5570 cum->vregno++;
5571 cum->words += 2;
5572 }
5573 else if (cum->intoffset == -1)
5574 cum->intoffset = bitpos;
5575 }
594a51fe
SS
5576}
5577
4697a36c
MM
5578/* Update the data in CUM to advance over an argument
5579 of mode MODE and data type TYPE.
b2d04ecf
AM
5580 (TYPE is null for libcalls where that information may not be available.)
5581
5582 Note that for args passed by reference, function_arg will be called
5583 with MODE and TYPE set to that of the pointer to the arg, not the arg
5584 itself. */
4697a36c
MM
5585
5586void
f676971a 5587function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
594a51fe 5588 tree type, int named, int depth)
4697a36c 5589{
0b5383eb
DJ
5590 int size;
5591
594a51fe
SS
5592 /* Only tick off an argument if we're not recursing. */
5593 if (depth == 0)
5594 cum->nargs_prototype--;
4697a36c 5595
ad630bef
DE
5596 if (TARGET_ALTIVEC_ABI
5597 && (ALTIVEC_VECTOR_MODE (mode)
5598 || (type && TREE_CODE (type) == VECTOR_TYPE
5599 && int_size_in_bytes (type) == 16)))
0ac081f6 5600 {
4ed78545
AM
5601 bool stack = false;
5602
2858f73a 5603 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c4ad648e 5604 {
6d0ef01e
HP
5605 cum->vregno++;
5606 if (!TARGET_ALTIVEC)
c85ce869 5607 error ("cannot pass argument in vector register because"
6d0ef01e 5608 " altivec instructions are disabled, use -maltivec"
c85ce869 5609 " to enable them");
4ed78545
AM
5610
5611 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
f676971a 5612 even if it is going to be passed in a vector register.
4ed78545
AM
5613 Darwin does the same for variable-argument functions. */
5614 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5615 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5616 stack = true;
6d0ef01e 5617 }
4ed78545
AM
5618 else
5619 stack = true;
5620
5621 if (stack)
c4ad648e 5622 {
a594a19c 5623 int align;
f676971a 5624
2858f73a
GK
5625 /* Vector parameters must be 16-byte aligned. This places
5626 them at 2 mod 4 in terms of words in 32-bit mode, since
5627 the parameter save area starts at offset 24 from the
5628 stack. In 64-bit mode, they just have to start on an
5629 even word, since the parameter save area is 16-byte
5630 aligned. Space for GPRs is reserved even if the argument
5631 will be passed in memory. */
5632 if (TARGET_32BIT)
4ed78545 5633 align = (2 - cum->words) & 3;
2858f73a
GK
5634 else
5635 align = cum->words & 1;
c53bdcf5 5636 cum->words += align + rs6000_arg_size (mode, type);
f676971a 5637
a594a19c
GK
5638 if (TARGET_DEBUG_ARG)
5639 {
f676971a 5640 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
a594a19c
GK
5641 cum->words, align);
5642 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
f676971a 5643 cum->nargs_prototype, cum->prototype,
2858f73a 5644 GET_MODE_NAME (mode));
a594a19c
GK
5645 }
5646 }
0ac081f6 5647 }
a4b0320c 5648 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
a6c9bed4
AH
5649 && !cum->stdarg
5650 && cum->sysv_gregno <= GP_ARG_MAX_REG)
a4b0320c 5651 cum->sysv_gregno++;
594a51fe
SS
5652
5653 else if (rs6000_darwin64_abi
5654 && mode == BLKmode
0b5383eb
DJ
5655 && TREE_CODE (type) == RECORD_TYPE
5656 && (size = int_size_in_bytes (type)) > 0)
5657 {
5658 /* Variable sized types have size == -1 and are
5659 treated as if consisting entirely of ints.
5660 Pad to 16 byte boundary if needed. */
5661 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5662 && (cum->words % 2) != 0)
5663 cum->words++;
5664 /* For varargs, we can just go up by the size of the struct. */
5665 if (!named)
5666 cum->words += (size + 7) / 8;
5667 else
5668 {
5669 /* It is tempting to say int register count just goes up by
5670 sizeof(type)/8, but this is wrong in a case such as
5671 { int; double; int; } [powerpc alignment]. We have to
5672 grovel through the fields for these too. */
5673 cum->intoffset = 0;
5674 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
bb8df8a6 5675 rs6000_darwin64_record_arg_advance_flush (cum,
0b5383eb
DJ
5676 size * BITS_PER_UNIT);
5677 }
5678 }
f607bc57 5679 else if (DEFAULT_ABI == ABI_V4)
4697a36c 5680 {
a3170dc6 5681 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 5682 && (mode == SFmode || mode == DFmode
e41b2a33 5683 || mode == SDmode || mode == DDmode || mode == TDmode
602ea4d3 5684 || (mode == TFmode && !TARGET_IEEEQUAD)))
4697a36c 5685 {
2d83f070
JJ
5686 /* _Decimal128 must use an even/odd register pair. This assumes
5687 that the register number is odd when fregno is odd. */
5688 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
5689 cum->fregno++;
5690
5691 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5692 <= FP_ARG_V4_MAX_REG)
602ea4d3 5693 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4cc833b7
RH
5694 else
5695 {
602ea4d3 5696 cum->fregno = FP_ARG_V4_MAX_REG + 1;
4d4447b5
PB
5697 if (mode == DFmode || mode == TFmode
5698 || mode == DDmode || mode == TDmode)
c4ad648e 5699 cum->words += cum->words & 1;
c53bdcf5 5700 cum->words += rs6000_arg_size (mode, type);
4cc833b7 5701 }
4697a36c 5702 }
4cc833b7
RH
5703 else
5704 {
b2d04ecf 5705 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
5706 int gregno = cum->sysv_gregno;
5707
4ed78545
AM
5708 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5709 (r7,r8) or (r9,r10). As does any other 2 word item such
5710 as complex int due to a historical mistake. */
5711 if (n_words == 2)
5712 gregno += (1 - gregno) & 1;
4cc833b7 5713
4ed78545 5714 /* Multi-reg args are not split between registers and stack. */
4cc833b7
RH
5715 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5716 {
4ed78545
AM
5717 /* Long long and SPE vectors are aligned on the stack.
5718 So are other 2 word items such as complex int due to
5719 a historical mistake. */
4cc833b7
RH
5720 if (n_words == 2)
5721 cum->words += cum->words & 1;
5722 cum->words += n_words;
5723 }
4697a36c 5724
4cc833b7
RH
5725 /* Note: continuing to accumulate gregno past when we've started
5726 spilling to the stack indicates the fact that we've started
5727 spilling to the stack to expand_builtin_saveregs. */
5728 cum->sysv_gregno = gregno + n_words;
5729 }
4697a36c 5730
4cc833b7
RH
5731 if (TARGET_DEBUG_ARG)
5732 {
5733 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5734 cum->words, cum->fregno);
5735 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5736 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5737 fprintf (stderr, "mode = %4s, named = %d\n",
5738 GET_MODE_NAME (mode), named);
5739 }
4697a36c
MM
5740 }
5741 else
4cc833b7 5742 {
b2d04ecf 5743 int n_words = rs6000_arg_size (mode, type);
294bd182
AM
5744 int start_words = cum->words;
5745 int align_words = rs6000_parm_start (mode, type, start_words);
a4f6c312 5746
294bd182 5747 cum->words = align_words + n_words;
4697a36c 5748
ebb109ad 5749 if (SCALAR_FLOAT_MODE_P (mode)
a3170dc6 5750 && TARGET_HARD_FLOAT && TARGET_FPRS)
2d83f070
JJ
5751 {
5752 /* _Decimal128 must be passed in an even/odd float register pair.
5753 This assumes that the register number is odd when fregno is
5754 odd. */
5755 if (mode == TDmode && (cum->fregno % 2) == 1)
5756 cum->fregno++;
5757 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5758 }
4cc833b7
RH
5759
5760 if (TARGET_DEBUG_ARG)
5761 {
5762 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5763 cum->words, cum->fregno);
5764 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5765 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
594a51fe 5766 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
294bd182 5767 named, align_words - start_words, depth);
4cc833b7
RH
5768 }
5769 }
4697a36c 5770}
a6c9bed4 5771
f82f556d
AH
5772static rtx
5773spe_build_register_parallel (enum machine_mode mode, int gregno)
5774{
17caeff2 5775 rtx r1, r3, r5, r7;
f82f556d 5776
37409796 5777 switch (mode)
f82f556d 5778 {
37409796 5779 case DFmode:
54b695e7
AH
5780 r1 = gen_rtx_REG (DImode, gregno);
5781 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5782 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
37409796
NS
5783
5784 case DCmode:
17caeff2 5785 case TFmode:
54b695e7
AH
5786 r1 = gen_rtx_REG (DImode, gregno);
5787 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5788 r3 = gen_rtx_REG (DImode, gregno + 2);
5789 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5790 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
37409796 5791
17caeff2
JM
5792 case TCmode:
5793 r1 = gen_rtx_REG (DImode, gregno);
5794 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5795 r3 = gen_rtx_REG (DImode, gregno + 2);
5796 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5797 r5 = gen_rtx_REG (DImode, gregno + 4);
5798 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5799 r7 = gen_rtx_REG (DImode, gregno + 6);
5800 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5801 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5802
37409796
NS
5803 default:
5804 gcc_unreachable ();
f82f556d 5805 }
f82f556d 5806}
b78d48dd 5807
f82f556d 5808/* Determine where to put a SIMD argument on the SPE. */
a6c9bed4 5809static rtx
f676971a 5810rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 5811 tree type)
a6c9bed4 5812{
f82f556d
AH
5813 int gregno = cum->sysv_gregno;
5814
5815 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
600e1f95 5816 are passed and returned in a pair of GPRs for ABI compatibility. */
4d4447b5 5817 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4d4447b5 5818 || mode == DCmode || mode == TCmode))
f82f556d 5819 {
b5870bee
AH
5820 int n_words = rs6000_arg_size (mode, type);
5821
f82f556d 5822 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4f011e1e 5823 if (mode == DFmode)
b5870bee 5824 gregno += (1 - gregno) & 1;
f82f556d 5825
b5870bee
AH
5826 /* Multi-reg args are not split between registers and stack. */
5827 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
f82f556d
AH
5828 return NULL_RTX;
5829
5830 return spe_build_register_parallel (mode, gregno);
5831 }
a6c9bed4
AH
5832 if (cum->stdarg)
5833 {
c53bdcf5 5834 int n_words = rs6000_arg_size (mode, type);
a6c9bed4
AH
5835
5836 /* SPE vectors are put in odd registers. */
5837 if (n_words == 2 && (gregno & 1) == 0)
5838 gregno += 1;
5839
5840 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5841 {
5842 rtx r1, r2;
5843 enum machine_mode m = SImode;
5844
5845 r1 = gen_rtx_REG (m, gregno);
5846 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5847 r2 = gen_rtx_REG (m, gregno + 1);
5848 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5849 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5850 }
5851 else
b78d48dd 5852 return NULL_RTX;
a6c9bed4
AH
5853 }
5854 else
5855 {
f82f556d
AH
5856 if (gregno <= GP_ARG_MAX_REG)
5857 return gen_rtx_REG (mode, gregno);
a6c9bed4 5858 else
b78d48dd 5859 return NULL_RTX;
a6c9bed4
AH
5860 }
5861}
5862
0b5383eb
DJ
5863/* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5864 structure between cum->intoffset and bitpos to integer registers. */
594a51fe 5865
0b5383eb 5866static void
bb8df8a6 5867rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
0b5383eb 5868 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
594a51fe 5869{
0b5383eb
DJ
5870 enum machine_mode mode;
5871 unsigned int regno;
5872 unsigned int startbit, endbit;
5873 int this_regno, intregs, intoffset;
5874 rtx reg;
594a51fe 5875
0b5383eb
DJ
5876 if (cum->intoffset == -1)
5877 return;
5878
5879 intoffset = cum->intoffset;
5880 cum->intoffset = -1;
5881
5882 /* If this is the trailing part of a word, try to only load that
5883 much into the register. Otherwise load the whole register. Note
5884 that in the latter case we may pick up unwanted bits. It's not a
5885 problem at the moment but may wish to revisit. */
5886
5887 if (intoffset % BITS_PER_WORD != 0)
594a51fe 5888 {
0b5383eb
DJ
5889 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5890 MODE_INT, 0);
5891 if (mode == BLKmode)
5892 {
5893 /* We couldn't find an appropriate mode, which happens,
5894 e.g., in packed structs when there are 3 bytes to load.
5895 Back intoffset back to the beginning of the word in this
5896 case. */
5897 intoffset = intoffset & -BITS_PER_WORD;
5898 mode = word_mode;
5899 }
5900 }
5901 else
5902 mode = word_mode;
5903
5904 startbit = intoffset & -BITS_PER_WORD;
5905 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5906 intregs = (endbit - startbit) / BITS_PER_WORD;
5907 this_regno = cum->words + intoffset / BITS_PER_WORD;
5908
5909 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
5910 cum->use_stack = 1;
bb8df8a6 5911
0b5383eb
DJ
5912 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
5913 if (intregs <= 0)
5914 return;
5915
5916 intoffset /= BITS_PER_UNIT;
5917 do
5918 {
5919 regno = GP_ARG_MIN_REG + this_regno;
5920 reg = gen_rtx_REG (mode, regno);
5921 rvec[(*k)++] =
5922 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
5923
5924 this_regno += 1;
5925 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
5926 mode = word_mode;
5927 intregs -= 1;
5928 }
5929 while (intregs > 0);
5930}
5931
5932/* Recursive workhorse for the following. */
5933
5934static void
586de218 5935rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
0b5383eb
DJ
5936 HOST_WIDE_INT startbitpos, rtx rvec[],
5937 int *k)
5938{
5939 tree f;
5940
5941 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5942 if (TREE_CODE (f) == FIELD_DECL)
5943 {
5944 HOST_WIDE_INT bitpos = startbitpos;
5945 tree ftype = TREE_TYPE (f);
70fb00df
AP
5946 enum machine_mode mode;
5947 if (ftype == error_mark_node)
5948 continue;
5949 mode = TYPE_MODE (ftype);
0b5383eb
DJ
5950
5951 if (DECL_SIZE (f) != 0
5952 && host_integerp (bit_position (f), 1))
5953 bitpos += int_bit_position (f);
5954
5955 /* ??? FIXME: else assume zero offset. */
5956
5957 if (TREE_CODE (ftype) == RECORD_TYPE)
5958 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
5959 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
594a51fe 5960 {
0b5383eb
DJ
5961#if 0
5962 switch (mode)
594a51fe 5963 {
0b5383eb
DJ
5964 case SCmode: mode = SFmode; break;
5965 case DCmode: mode = DFmode; break;
5966 case TCmode: mode = TFmode; break;
5967 default: break;
594a51fe 5968 }
0b5383eb
DJ
5969#endif
5970 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5971 rvec[(*k)++]
bb8df8a6 5972 = gen_rtx_EXPR_LIST (VOIDmode,
0b5383eb
DJ
5973 gen_rtx_REG (mode, cum->fregno++),
5974 GEN_INT (bitpos / BITS_PER_UNIT));
7393f7f8 5975 if (mode == TFmode || mode == TDmode)
0b5383eb 5976 cum->fregno++;
594a51fe 5977 }
0b5383eb
DJ
5978 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
5979 {
5980 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
5981 rvec[(*k)++]
bb8df8a6
EC
5982 = gen_rtx_EXPR_LIST (VOIDmode,
5983 gen_rtx_REG (mode, cum->vregno++),
0b5383eb
DJ
5984 GEN_INT (bitpos / BITS_PER_UNIT));
5985 }
5986 else if (cum->intoffset == -1)
5987 cum->intoffset = bitpos;
5988 }
5989}
594a51fe 5990
0b5383eb
DJ
5991/* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5992 the register(s) to be used for each field and subfield of a struct
5993 being passed by value, along with the offset of where the
5994 register's value may be found in the block. FP fields go in FP
5995 register, vector fields go in vector registers, and everything
bb8df8a6 5996 else goes in int registers, packed as in memory.
8ff40a74 5997
0b5383eb
DJ
5998 This code is also used for function return values. RETVAL indicates
5999 whether this is the case.
8ff40a74 6000
a4d05547 6001 Much of this is taken from the SPARC V9 port, which has a similar
0b5383eb 6002 calling convention. */
594a51fe 6003
0b5383eb 6004static rtx
586de218 6005rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
0b5383eb
DJ
6006 int named, bool retval)
6007{
6008 rtx rvec[FIRST_PSEUDO_REGISTER];
6009 int k = 1, kbase = 1;
6010 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6011 /* This is a copy; modifications are not visible to our caller. */
6012 CUMULATIVE_ARGS copy_cum = *orig_cum;
6013 CUMULATIVE_ARGS *cum = &copy_cum;
6014
6015 /* Pad to 16 byte boundary if needed. */
6016 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6017 && (cum->words % 2) != 0)
6018 cum->words++;
6019
6020 cum->intoffset = 0;
6021 cum->use_stack = 0;
6022 cum->named = named;
6023
6024 /* Put entries into rvec[] for individual FP and vector fields, and
6025 for the chunks of memory that go in int regs. Note we start at
6026 element 1; 0 is reserved for an indication of using memory, and
6027 may or may not be filled in below. */
6028 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6029 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6030
6031 /* If any part of the struct went on the stack put all of it there.
6032 This hack is because the generic code for
6033 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6034 parts of the struct are not at the beginning. */
6035 if (cum->use_stack)
6036 {
6037 if (retval)
6038 return NULL_RTX; /* doesn't go in registers at all */
6039 kbase = 0;
6040 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6041 }
6042 if (k > 1 || cum->use_stack)
6043 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
594a51fe
SS
6044 else
6045 return NULL_RTX;
6046}
6047
b78d48dd
FJ
6048/* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6049
6050static rtx
ec6376ab 6051rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
b78d48dd 6052{
ec6376ab
AM
6053 int n_units;
6054 int i, k;
6055 rtx rvec[GP_ARG_NUM_REG + 1];
6056
6057 if (align_words >= GP_ARG_NUM_REG)
6058 return NULL_RTX;
6059
6060 n_units = rs6000_arg_size (mode, type);
6061
6062 /* Optimize the simple case where the arg fits in one gpr, except in
6063 the case of BLKmode due to assign_parms assuming that registers are
6064 BITS_PER_WORD wide. */
6065 if (n_units == 0
6066 || (n_units == 1 && mode != BLKmode))
6067 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6068
6069 k = 0;
6070 if (align_words + n_units > GP_ARG_NUM_REG)
6071 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6072 using a magic NULL_RTX component.
79773478
AM
6073 This is not strictly correct. Only some of the arg belongs in
6074 memory, not all of it. However, the normal scheme using
6075 function_arg_partial_nregs can result in unusual subregs, eg.
6076 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6077 store the whole arg to memory is often more efficient than code
6078 to store pieces, and we know that space is available in the right
6079 place for the whole arg. */
ec6376ab
AM
6080 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6081
6082 i = 0;
6083 do
36a454e1 6084 {
ec6376ab
AM
6085 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6086 rtx off = GEN_INT (i++ * 4);
6087 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
36a454e1 6088 }
ec6376ab
AM
6089 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6090
6091 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
b78d48dd
FJ
6092}
6093
4697a36c
MM
6094/* Determine where to put an argument to a function.
6095 Value is zero to push the argument on the stack,
6096 or a hard register in which to store the argument.
6097
6098 MODE is the argument's machine mode.
6099 TYPE is the data type of the argument (as a tree).
6100 This is null for libcalls where that information may
6101 not be available.
6102 CUM is a variable of type CUMULATIVE_ARGS which gives info about
0b5383eb
DJ
6103 the preceding args and about the function being called. It is
6104 not modified in this routine.
4697a36c
MM
6105 NAMED is nonzero if this argument is a named parameter
6106 (otherwise it is an extra parameter matching an ellipsis).
6107
6108 On RS/6000 the first eight words of non-FP are normally in registers
6109 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6110 Under V.4, the first 8 FP args are in registers.
6111
6112 If this is floating-point and no prototype is specified, we use
6113 both an FP and integer register (or possibly FP reg and stack). Library
b9599e46 6114 functions (when CALL_LIBCALL is set) always have the proper types for args,
4697a36c 6115 so we can pass the FP value just in one register. emit_library_function
b2d04ecf
AM
6116 doesn't support PARALLEL anyway.
6117
6118 Note that for args passed by reference, function_arg will be called
6119 with MODE and TYPE set to that of the pointer to the arg, not the arg
6120 itself. */
4697a36c 6121
9390387d 6122rtx
f676971a 6123function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
a2369ed3 6124 tree type, int named)
4697a36c 6125{
4cc833b7 6126 enum rs6000_abi abi = DEFAULT_ABI;
4697a36c 6127
a4f6c312
SS
6128 /* Return a marker to indicate whether CR1 needs to set or clear the
6129 bit that V.4 uses to say fp args were passed in registers.
6130 Assume that we don't need the marker for software floating point,
6131 or compiler generated library calls. */
4697a36c
MM
6132 if (mode == VOIDmode)
6133 {
f607bc57 6134 if (abi == ABI_V4
b9599e46 6135 && (cum->call_cookie & CALL_LIBCALL) == 0
c1fa753e
AM
6136 && (cum->stdarg
6137 || (cum->nargs_prototype < 0
6138 && (cum->prototype || TARGET_NO_PROTOTYPE))))
7509c759 6139 {
a3170dc6
AH
6140 /* For the SPE, we need to crxor CR6 always. */
6141 if (TARGET_SPE_ABI)
6142 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6143 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6144 return GEN_INT (cum->call_cookie
6145 | ((cum->fregno == FP_ARG_MIN_REG)
6146 ? CALL_V4_SET_FP_ARGS
6147 : CALL_V4_CLEAR_FP_ARGS));
7509c759 6148 }
4697a36c 6149
7509c759 6150 return GEN_INT (cum->call_cookie);
4697a36c
MM
6151 }
6152
0b5383eb
DJ
6153 if (rs6000_darwin64_abi && mode == BLKmode
6154 && TREE_CODE (type) == RECORD_TYPE)
8ff40a74 6155 {
0b5383eb 6156 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
8ff40a74
SS
6157 if (rslt != NULL_RTX)
6158 return rslt;
6159 /* Else fall through to usual handling. */
6160 }
6161
2858f73a 6162 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
c72d6c26
HP
6163 if (TARGET_64BIT && ! cum->prototype)
6164 {
c4ad648e
AM
6165 /* Vector parameters get passed in vector register
6166 and also in GPRs or memory, in absence of prototype. */
6167 int align_words;
6168 rtx slot;
6169 align_words = (cum->words + 1) & ~1;
6170
6171 if (align_words >= GP_ARG_NUM_REG)
6172 {
6173 slot = NULL_RTX;
6174 }
6175 else
6176 {
6177 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6178 }
6179 return gen_rtx_PARALLEL (mode,
6180 gen_rtvec (2,
6181 gen_rtx_EXPR_LIST (VOIDmode,
6182 slot, const0_rtx),
6183 gen_rtx_EXPR_LIST (VOIDmode,
6184 gen_rtx_REG (mode, cum->vregno),
6185 const0_rtx)));
c72d6c26
HP
6186 }
6187 else
6188 return gen_rtx_REG (mode, cum->vregno);
ad630bef
DE
6189 else if (TARGET_ALTIVEC_ABI
6190 && (ALTIVEC_VECTOR_MODE (mode)
6191 || (type && TREE_CODE (type) == VECTOR_TYPE
6192 && int_size_in_bytes (type) == 16)))
0ac081f6 6193 {
2858f73a 6194 if (named || abi == ABI_V4)
a594a19c 6195 return NULL_RTX;
0ac081f6 6196 else
a594a19c
GK
6197 {
6198 /* Vector parameters to varargs functions under AIX or Darwin
6199 get passed in memory and possibly also in GPRs. */
ec6376ab
AM
6200 int align, align_words, n_words;
6201 enum machine_mode part_mode;
a594a19c
GK
6202
6203 /* Vector parameters must be 16-byte aligned. This places them at
2858f73a
GK
6204 2 mod 4 in terms of words in 32-bit mode, since the parameter
6205 save area starts at offset 24 from the stack. In 64-bit mode,
6206 they just have to start on an even word, since the parameter
6207 save area is 16-byte aligned. */
6208 if (TARGET_32BIT)
4ed78545 6209 align = (2 - cum->words) & 3;
2858f73a
GK
6210 else
6211 align = cum->words & 1;
a594a19c
GK
6212 align_words = cum->words + align;
6213
6214 /* Out of registers? Memory, then. */
6215 if (align_words >= GP_ARG_NUM_REG)
6216 return NULL_RTX;
ec6376ab
AM
6217
6218 if (TARGET_32BIT && TARGET_POWERPC64)
6219 return rs6000_mixed_function_arg (mode, type, align_words);
6220
2858f73a
GK
6221 /* The vector value goes in GPRs. Only the part of the
6222 value in GPRs is reported here. */
ec6376ab
AM
6223 part_mode = mode;
6224 n_words = rs6000_arg_size (mode, type);
6225 if (align_words + n_words > GP_ARG_NUM_REG)
839a4992 6226 /* Fortunately, there are only two possibilities, the value
2858f73a
GK
6227 is either wholly in GPRs or half in GPRs and half not. */
6228 part_mode = DImode;
ec6376ab
AM
6229
6230 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
a594a19c 6231 }
0ac081f6 6232 }
f82f556d
AH
6233 else if (TARGET_SPE_ABI && TARGET_SPE
6234 && (SPE_VECTOR_MODE (mode)
18f63bfa 6235 || (TARGET_E500_DOUBLE && (mode == DFmode
17caeff2
JM
6236 || mode == DCmode
6237 || mode == TFmode
6238 || mode == TCmode))))
a6c9bed4 6239 return rs6000_spe_function_arg (cum, mode, type);
594a51fe 6240
f607bc57 6241 else if (abi == ABI_V4)
4697a36c 6242 {
a3170dc6 6243 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3 6244 && (mode == SFmode || mode == DFmode
7393f7f8 6245 || (mode == TFmode && !TARGET_IEEEQUAD)
e41b2a33 6246 || mode == SDmode || mode == DDmode || mode == TDmode))
4cc833b7 6247 {
2d83f070
JJ
6248 /* _Decimal128 must use an even/odd register pair. This assumes
6249 that the register number is odd when fregno is odd. */
6250 if (mode == TDmode && (cum->fregno % 2) == 1)
7393f7f8
BE
6251 cum->fregno++;
6252
6253 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6254 <= FP_ARG_V4_MAX_REG)
4cc833b7
RH
6255 return gen_rtx_REG (mode, cum->fregno);
6256 else
b78d48dd 6257 return NULL_RTX;
4cc833b7
RH
6258 }
6259 else
6260 {
b2d04ecf 6261 int n_words = rs6000_arg_size (mode, type);
4cc833b7
RH
6262 int gregno = cum->sysv_gregno;
6263
4ed78545
AM
6264 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6265 (r7,r8) or (r9,r10). As does any other 2 word item such
6266 as complex int due to a historical mistake. */
6267 if (n_words == 2)
6268 gregno += (1 - gregno) & 1;
4cc833b7 6269
4ed78545 6270 /* Multi-reg args are not split between registers and stack. */
ec6376ab 6271 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
b78d48dd 6272 return NULL_RTX;
ec6376ab
AM
6273
6274 if (TARGET_32BIT && TARGET_POWERPC64)
6275 return rs6000_mixed_function_arg (mode, type,
6276 gregno - GP_ARG_MIN_REG);
6277 return gen_rtx_REG (mode, gregno);
4cc833b7 6278 }
4697a36c 6279 }
4cc833b7
RH
6280 else
6281 {
294bd182 6282 int align_words = rs6000_parm_start (mode, type, cum->words);
b78d48dd 6283
2d83f070
JJ
6284 /* _Decimal128 must be passed in an even/odd float register pair.
6285 This assumes that the register number is odd when fregno is odd. */
6286 if (mode == TDmode && (cum->fregno % 2) == 1)
6287 cum->fregno++;
6288
2858f73a 6289 if (USE_FP_FOR_ARG_P (cum, mode, type))
4cc833b7 6290 {
ec6376ab
AM
6291 rtx rvec[GP_ARG_NUM_REG + 1];
6292 rtx r;
6293 int k;
c53bdcf5
AM
6294 bool needs_psave;
6295 enum machine_mode fmode = mode;
c53bdcf5
AM
6296 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6297
6298 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6299 {
c53bdcf5
AM
6300 /* Currently, we only ever need one reg here because complex
6301 doubles are split. */
7393f7f8
BE
6302 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6303 && (fmode == TFmode || fmode == TDmode));
ec6376ab 6304
7393f7f8
BE
6305 /* Long double or _Decimal128 split over regs and memory. */
6306 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
c53bdcf5 6307 }
c53bdcf5
AM
6308
6309 /* Do we also need to pass this arg in the parameter save
6310 area? */
6311 needs_psave = (type
6312 && (cum->nargs_prototype <= 0
6313 || (DEFAULT_ABI == ABI_AIX
de17c25f 6314 && TARGET_XL_COMPAT
c53bdcf5
AM
6315 && align_words >= GP_ARG_NUM_REG)));
6316
6317 if (!needs_psave && mode == fmode)
ec6376ab 6318 return gen_rtx_REG (fmode, cum->fregno);
c53bdcf5 6319
ec6376ab 6320 k = 0;
c53bdcf5
AM
6321 if (needs_psave)
6322 {
ec6376ab 6323 /* Describe the part that goes in gprs or the stack.
c53bdcf5 6324 This piece must come first, before the fprs. */
c53bdcf5
AM
6325 if (align_words < GP_ARG_NUM_REG)
6326 {
6327 unsigned long n_words = rs6000_arg_size (mode, type);
ec6376ab
AM
6328
6329 if (align_words + n_words > GP_ARG_NUM_REG
6330 || (TARGET_32BIT && TARGET_POWERPC64))
6331 {
6332 /* If this is partially on the stack, then we only
6333 include the portion actually in registers here. */
6334 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6335 rtx off;
79773478
AM
6336 int i = 0;
6337 if (align_words + n_words > GP_ARG_NUM_REG)
c4ad648e
AM
6338 /* Not all of the arg fits in gprs. Say that it
6339 goes in memory too, using a magic NULL_RTX
6340 component. Also see comment in
6341 rs6000_mixed_function_arg for why the normal
6342 function_arg_partial_nregs scheme doesn't work
6343 in this case. */
6344 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6345 const0_rtx);
ec6376ab
AM
6346 do
6347 {
6348 r = gen_rtx_REG (rmode,
6349 GP_ARG_MIN_REG + align_words);
2e6c9641 6350 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
ec6376ab
AM
6351 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6352 }
6353 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6354 }
6355 else
6356 {
6357 /* The whole arg fits in gprs. */
6358 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6359 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6360 }
c53bdcf5 6361 }
ec6376ab
AM
6362 else
6363 /* It's entirely in memory. */
6364 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
c53bdcf5
AM
6365 }
6366
ec6376ab
AM
6367 /* Describe where this piece goes in the fprs. */
6368 r = gen_rtx_REG (fmode, cum->fregno);
6369 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6370
6371 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4cc833b7
RH
6372 }
6373 else if (align_words < GP_ARG_NUM_REG)
b2d04ecf 6374 {
ec6376ab
AM
6375 if (TARGET_32BIT && TARGET_POWERPC64)
6376 return rs6000_mixed_function_arg (mode, type, align_words);
b2d04ecf 6377
4eeca74f
AM
6378 if (mode == BLKmode)
6379 mode = Pmode;
6380
b2d04ecf
AM
6381 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6382 }
4cc833b7
RH
6383 else
6384 return NULL_RTX;
4697a36c 6385 }
4697a36c
MM
6386}
6387\f
ec6376ab 6388/* For an arg passed partly in registers and partly in memory, this is
fb63c729
AM
6389 the number of bytes passed in registers. For args passed entirely in
6390 registers or entirely in memory, zero. When an arg is described by a
6391 PARALLEL, perhaps using more than one register type, this function
6392 returns the number of bytes used by the first element of the PARALLEL. */
4697a36c 6393
78a52f11
RH
6394static int
6395rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6396 tree type, bool named)
4697a36c 6397{
c53bdcf5 6398 int ret = 0;
ec6376ab 6399 int align_words;
c53bdcf5 6400
f607bc57 6401 if (DEFAULT_ABI == ABI_V4)
4697a36c 6402 return 0;
4697a36c 6403
c53bdcf5
AM
6404 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6405 && cum->nargs_prototype >= 0)
6406 return 0;
6407
0b5383eb
DJ
6408 /* In this complicated case we just disable the partial_nregs code. */
6409 if (rs6000_darwin64_abi && mode == BLKmode
6410 && TREE_CODE (type) == RECORD_TYPE
6411 && int_size_in_bytes (type) > 0)
6412 return 0;
6413
294bd182 6414 align_words = rs6000_parm_start (mode, type, cum->words);
ec6376ab 6415
79773478
AM
6416 if (USE_FP_FOR_ARG_P (cum, mode, type))
6417 {
fb63c729
AM
6418 /* If we are passing this arg in the fixed parameter save area
6419 (gprs or memory) as well as fprs, then this function should
79773478
AM
6420 return the number of partial bytes passed in the parameter
6421 save area rather than partial bytes passed in fprs. */
6422 if (type
6423 && (cum->nargs_prototype <= 0
6424 || (DEFAULT_ABI == ABI_AIX
6425 && TARGET_XL_COMPAT
6426 && align_words >= GP_ARG_NUM_REG)))
6427 return 0;
6428 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6429 > FP_ARG_MAX_REG + 1)
ac7e839c 6430 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
c53bdcf5 6431 else if (cum->nargs_prototype >= 0)
4697a36c
MM
6432 return 0;
6433 }
6434
ec6376ab
AM
6435 if (align_words < GP_ARG_NUM_REG
6436 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ac7e839c 6437 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
78a52f11 6438
c53bdcf5 6439 if (ret != 0 && TARGET_DEBUG_ARG)
78a52f11 6440 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
4697a36c 6441
c53bdcf5 6442 return ret;
4697a36c
MM
6443}
6444\f
6445/* A C expression that indicates when an argument must be passed by
6446 reference. If nonzero for an argument, a copy of that argument is
6447 made in memory and a pointer to the argument is passed instead of
6448 the argument itself. The pointer is passed in whatever way is
6449 appropriate for passing a pointer to that type.
6450
b2d04ecf
AM
6451 Under V.4, aggregates and long double are passed by reference.
6452
6453 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6454 reference unless the AltiVec vector extension ABI is in force.
c8c99a68
DE
6455
6456 As an extension to all ABIs, variable sized types are passed by
6457 reference. */
4697a36c 6458
8cd5a4e0 6459static bool
f676971a 6460rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
586de218 6461 enum machine_mode mode, const_tree type,
bada2eb8 6462 bool named ATTRIBUTE_UNUSED)
4697a36c 6463{
602ea4d3 6464 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
4697a36c
MM
6465 {
6466 if (TARGET_DEBUG_ARG)
bada2eb8
DE
6467 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6468 return 1;
6469 }
6470
6471 if (!type)
6472 return 0;
4697a36c 6473
bada2eb8
DE
6474 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6475 {
6476 if (TARGET_DEBUG_ARG)
6477 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6478 return 1;
6479 }
6480
6481 if (int_size_in_bytes (type) < 0)
6482 {
6483 if (TARGET_DEBUG_ARG)
6484 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6485 return 1;
6486 }
6487
6488 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6489 modes only exist for GCC vector types if -maltivec. */
6490 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6491 {
6492 if (TARGET_DEBUG_ARG)
6493 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
4697a36c
MM
6494 return 1;
6495 }
b693336b
PB
6496
6497 /* Pass synthetic vectors in memory. */
bada2eb8 6498 if (TREE_CODE (type) == VECTOR_TYPE
ad630bef 6499 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
b693336b
PB
6500 {
6501 static bool warned_for_pass_big_vectors = false;
6502 if (TARGET_DEBUG_ARG)
6503 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6504 if (!warned_for_pass_big_vectors)
6505 {
d4ee4d25 6506 warning (0, "GCC vector passed by reference: "
b693336b
PB
6507 "non-standard ABI extension with no compatibility guarantee");
6508 warned_for_pass_big_vectors = true;
6509 }
6510 return 1;
6511 }
6512
b2d04ecf 6513 return 0;
4697a36c 6514}
5985c7a6
FJ
6515
6516static void
2d9db8eb 6517rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5985c7a6
FJ
6518{
6519 int i;
6520 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6521
6522 if (nregs == 0)
6523 return;
6524
c4ad648e 6525 for (i = 0; i < nregs; i++)
5985c7a6 6526 {
9390387d 6527 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
5985c7a6 6528 if (reload_completed)
c4ad648e
AM
6529 {
6530 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6531 tem = NULL_RTX;
6532 else
6533 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
9390387d 6534 i * GET_MODE_SIZE (reg_mode));
c4ad648e 6535 }
5985c7a6
FJ
6536 else
6537 tem = replace_equiv_address (tem, XEXP (tem, 0));
6538
37409796 6539 gcc_assert (tem);
5985c7a6
FJ
6540
6541 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6542 }
6543}
4697a36c
MM
6544\f
6545/* Perform any needed actions needed for a function that is receiving a
f676971a 6546 variable number of arguments.
4697a36c
MM
6547
6548 CUM is as above.
6549
6550 MODE and TYPE are the mode and type of the current parameter.
6551
6552 PRETEND_SIZE is a variable that should be set to the amount of stack
6553 that must be pushed by the prolog to pretend that our caller pushed
6554 it.
6555
6556 Normally, this macro will push all remaining incoming registers on the
6557 stack and set PRETEND_SIZE to the length of the registers pushed. */
6558
c6e8c921 6559static void
f676971a 6560setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
c4ad648e
AM
6561 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6562 int no_rtl)
4697a36c 6563{
4cc833b7
RH
6564 CUMULATIVE_ARGS next_cum;
6565 int reg_size = TARGET_32BIT ? 4 : 8;
ca5adc63 6566 rtx save_area = NULL_RTX, mem;
4862826d
ILT
6567 int first_reg_offset;
6568 alias_set_type set;
4697a36c 6569
f31bf321 6570 /* Skip the last named argument. */
d34c5b80 6571 next_cum = *cum;
594a51fe 6572 function_arg_advance (&next_cum, mode, type, 1, 0);
4cc833b7 6573
f607bc57 6574 if (DEFAULT_ABI == ABI_V4)
d34c5b80 6575 {
5b667039
JJ
6576 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6577
60e2d0ca 6578 if (! no_rtl)
5b667039
JJ
6579 {
6580 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6581 HOST_WIDE_INT offset = 0;
6582
6583 /* Try to optimize the size of the varargs save area.
6584 The ABI requires that ap.reg_save_area is doubleword
6585 aligned, but we don't need to allocate space for all
6586 the bytes, only those to which we actually will save
6587 anything. */
6588 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6589 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6590 if (TARGET_HARD_FLOAT && TARGET_FPRS
6591 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6592 && cfun->va_list_fpr_size)
6593 {
6594 if (gpr_reg_num)
6595 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6596 * UNITS_PER_FP_WORD;
6597 if (cfun->va_list_fpr_size
6598 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6599 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6600 else
6601 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6602 * UNITS_PER_FP_WORD;
6603 }
6604 if (gpr_reg_num)
6605 {
6606 offset = -((first_reg_offset * reg_size) & ~7);
6607 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6608 {
6609 gpr_reg_num = cfun->va_list_gpr_size;
6610 if (reg_size == 4 && (first_reg_offset & 1))
6611 gpr_reg_num++;
6612 }
6613 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6614 }
6615 else if (fpr_size)
6616 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6617 * UNITS_PER_FP_WORD
6618 - (int) (GP_ARG_NUM_REG * reg_size);
4cc833b7 6619
5b667039
JJ
6620 if (gpr_size + fpr_size)
6621 {
6622 rtx reg_save_area
6623 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6624 gcc_assert (GET_CODE (reg_save_area) == MEM);
6625 reg_save_area = XEXP (reg_save_area, 0);
6626 if (GET_CODE (reg_save_area) == PLUS)
6627 {
6628 gcc_assert (XEXP (reg_save_area, 0)
6629 == virtual_stack_vars_rtx);
6630 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6631 offset += INTVAL (XEXP (reg_save_area, 1));
6632 }
6633 else
6634 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6635 }
6636
6637 cfun->machine->varargs_save_offset = offset;
6638 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6639 }
4697a36c 6640 }
60e2d0ca 6641 else
4697a36c 6642 {
d34c5b80 6643 first_reg_offset = next_cum.words;
4cc833b7 6644 save_area = virtual_incoming_args_rtx;
4697a36c 6645
fe984136 6646 if (targetm.calls.must_pass_in_stack (mode, type))
c53bdcf5 6647 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4cc833b7 6648 }
4697a36c 6649
dfafc897 6650 set = get_varargs_alias_set ();
9d30f3c1
JJ
6651 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6652 && cfun->va_list_gpr_size)
4cc833b7 6653 {
9d30f3c1
JJ
6654 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6655
6656 if (va_list_gpr_counter_field)
6657 {
6658 /* V4 va_list_gpr_size counts number of registers needed. */
6659 if (nregs > cfun->va_list_gpr_size)
6660 nregs = cfun->va_list_gpr_size;
6661 }
6662 else
6663 {
6664 /* char * va_list instead counts number of bytes needed. */
6665 if (nregs > cfun->va_list_gpr_size / reg_size)
6666 nregs = cfun->va_list_gpr_size / reg_size;
6667 }
6668
dfafc897 6669 mem = gen_rtx_MEM (BLKmode,
c4ad648e 6670 plus_constant (save_area,
13e2e16e
DE
6671 first_reg_offset * reg_size));
6672 MEM_NOTRAP_P (mem) = 1;
ba4828e0 6673 set_mem_alias_set (mem, set);
8ac61af7 6674 set_mem_align (mem, BITS_PER_WORD);
dfafc897 6675
f676971a 6676 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
9d30f3c1 6677 nregs);
4697a36c
MM
6678 }
6679
4697a36c 6680 /* Save FP registers if needed. */
f607bc57 6681 if (DEFAULT_ABI == ABI_V4
a3170dc6
AH
6682 && TARGET_HARD_FLOAT && TARGET_FPRS
6683 && ! no_rtl
9d30f3c1
JJ
6684 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6685 && cfun->va_list_fpr_size)
4697a36c 6686 {
9d30f3c1 6687 int fregno = next_cum.fregno, nregs;
9ebbca7d 6688 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4cc833b7 6689 rtx lab = gen_label_rtx ();
5b667039
JJ
6690 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6691 * UNITS_PER_FP_WORD);
4697a36c 6692
c4ad648e
AM
6693 emit_jump_insn
6694 (gen_rtx_SET (VOIDmode,
6695 pc_rtx,
6696 gen_rtx_IF_THEN_ELSE (VOIDmode,
4cc833b7 6697 gen_rtx_NE (VOIDmode, cr1,
c4ad648e 6698 const0_rtx),
39403d82 6699 gen_rtx_LABEL_REF (VOIDmode, lab),
4697a36c
MM
6700 pc_rtx)));
6701
9d30f3c1
JJ
6702 for (nregs = 0;
6703 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
5b667039 6704 fregno++, off += UNITS_PER_FP_WORD, nregs++)
4cc833b7 6705 {
5496b36f 6706 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
13e2e16e 6707 MEM_NOTRAP_P (mem) = 1;
c4ad648e 6708 set_mem_alias_set (mem, set);
94ff898d 6709 set_mem_align (mem, GET_MODE_ALIGNMENT (DFmode));
dfafc897 6710 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4697a36c 6711 }
4cc833b7
RH
6712
6713 emit_label (lab);
4697a36c 6714 }
4697a36c 6715}
4697a36c 6716
dfafc897 6717/* Create the va_list data type. */
2c4974b7 6718
c35d187f
RH
6719static tree
6720rs6000_build_builtin_va_list (void)
dfafc897 6721{
64c2816f 6722 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4697a36c 6723
9ebbca7d
GK
6724 /* For AIX, prefer 'char *' because that's what the system
6725 header files like. */
f607bc57 6726 if (DEFAULT_ABI != ABI_V4)
9ebbca7d 6727 return build_pointer_type (char_type_node);
dfafc897 6728
f1e639b1 6729 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
bab45a51 6730 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
0f4fd75d 6731
f676971a 6732 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
9ebbca7d 6733 unsigned_char_type_node);
f676971a 6734 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
9ebbca7d 6735 unsigned_char_type_node);
64c2816f
DT
6736 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6737 every user file. */
6738 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6739 short_unsigned_type_node);
dfafc897
FS
6740 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6741 ptr_type_node);
6742 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6743 ptr_type_node);
6744
9d30f3c1
JJ
6745 va_list_gpr_counter_field = f_gpr;
6746 va_list_fpr_counter_field = f_fpr;
6747
dfafc897
FS
6748 DECL_FIELD_CONTEXT (f_gpr) = record;
6749 DECL_FIELD_CONTEXT (f_fpr) = record;
64c2816f 6750 DECL_FIELD_CONTEXT (f_res) = record;
dfafc897
FS
6751 DECL_FIELD_CONTEXT (f_ovf) = record;
6752 DECL_FIELD_CONTEXT (f_sav) = record;
6753
bab45a51
FS
6754 TREE_CHAIN (record) = type_decl;
6755 TYPE_NAME (record) = type_decl;
dfafc897
FS
6756 TYPE_FIELDS (record) = f_gpr;
6757 TREE_CHAIN (f_gpr) = f_fpr;
64c2816f
DT
6758 TREE_CHAIN (f_fpr) = f_res;
6759 TREE_CHAIN (f_res) = f_ovf;
dfafc897
FS
6760 TREE_CHAIN (f_ovf) = f_sav;
6761
6762 layout_type (record);
6763
6764 /* The correct type is an array type of one element. */
6765 return build_array_type (record, build_index_type (size_zero_node));
6766}
6767
6768/* Implement va_start. */
6769
d7bd8aeb 6770static void
a2369ed3 6771rs6000_va_start (tree valist, rtx nextarg)
4697a36c 6772{
dfafc897 6773 HOST_WIDE_INT words, n_gpr, n_fpr;
c566f9bd 6774 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
dfafc897 6775 tree gpr, fpr, ovf, sav, t;
2c4974b7 6776
dfafc897 6777 /* Only SVR4 needs something special. */
f607bc57 6778 if (DEFAULT_ABI != ABI_V4)
dfafc897 6779 {
e5faf155 6780 std_expand_builtin_va_start (valist, nextarg);
dfafc897
FS
6781 return;
6782 }
6783
973a648b 6784 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
dfafc897 6785 f_fpr = TREE_CHAIN (f_gpr);
c566f9bd
DT
6786 f_res = TREE_CHAIN (f_fpr);
6787 f_ovf = TREE_CHAIN (f_res);
dfafc897
FS
6788 f_sav = TREE_CHAIN (f_ovf);
6789
872a65b5 6790 valist = build_va_arg_indirect_ref (valist);
47a25a46 6791 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6792 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6793 f_fpr, NULL_TREE);
6794 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6795 f_ovf, NULL_TREE);
6796 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6797 f_sav, NULL_TREE);
dfafc897
FS
6798
6799 /* Count number of gp and fp argument registers used. */
38173d38
JH
6800 words = crtl->args.info.words;
6801 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
987732e0 6802 GP_ARG_NUM_REG);
38173d38 6803 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
987732e0 6804 FP_ARG_NUM_REG);
dfafc897
FS
6805
6806 if (TARGET_DEBUG_ARG)
4a0a75dd
KG
6807 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6808 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6809 words, n_gpr, n_fpr);
dfafc897 6810
9d30f3c1
JJ
6811 if (cfun->va_list_gpr_size)
6812 {
726a989a 6813 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
47a25a46 6814 build_int_cst (NULL_TREE, n_gpr));
9d30f3c1
JJ
6815 TREE_SIDE_EFFECTS (t) = 1;
6816 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6817 }
58c8adc1 6818
9d30f3c1
JJ
6819 if (cfun->va_list_fpr_size)
6820 {
726a989a 6821 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
47a25a46 6822 build_int_cst (NULL_TREE, n_fpr));
9d30f3c1
JJ
6823 TREE_SIDE_EFFECTS (t) = 1;
6824 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6825 }
dfafc897
FS
6826
6827 /* Find the overflow area. */
6828 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6829 if (words != 0)
5be014d5
AP
6830 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6831 size_int (words * UNITS_PER_WORD));
726a989a 6832 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
dfafc897
FS
6833 TREE_SIDE_EFFECTS (t) = 1;
6834 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6835
9d30f3c1
JJ
6836 /* If there were no va_arg invocations, don't set up the register
6837 save area. */
6838 if (!cfun->va_list_gpr_size
6839 && !cfun->va_list_fpr_size
6840 && n_gpr < GP_ARG_NUM_REG
6841 && n_fpr < FP_ARG_V4_MAX_REG)
6842 return;
6843
dfafc897
FS
6844 /* Find the register save area. */
6845 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5b667039 6846 if (cfun->machine->varargs_save_offset)
5be014d5
AP
6847 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6848 size_int (cfun->machine->varargs_save_offset));
726a989a 6849 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
dfafc897
FS
6850 TREE_SIDE_EFFECTS (t) = 1;
6851 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6852}
6853
6854/* Implement va_arg. */
6855
23a60a04 6856tree
726a989a
RB
6857rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6858 gimple_seq *post_p)
cd3ce9b4 6859{
cd3ce9b4
JM
6860 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6861 tree gpr, fpr, ovf, sav, reg, t, u;
08b0dc1b 6862 int size, rsize, n_reg, sav_ofs, sav_scale;
cd3ce9b4
JM
6863 tree lab_false, lab_over, addr;
6864 int align;
6865 tree ptrtype = build_pointer_type (type);
7393f7f8 6866 int regalign = 0;
726a989a 6867 gimple stmt;
cd3ce9b4 6868
08b0dc1b
RH
6869 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6870 {
6871 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
872a65b5 6872 return build_va_arg_indirect_ref (t);
08b0dc1b
RH
6873 }
6874
cd3ce9b4
JM
6875 if (DEFAULT_ABI != ABI_V4)
6876 {
08b0dc1b 6877 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
cd3ce9b4
JM
6878 {
6879 tree elem_type = TREE_TYPE (type);
6880 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6881 int elem_size = GET_MODE_SIZE (elem_mode);
6882
6883 if (elem_size < UNITS_PER_WORD)
6884 {
23a60a04 6885 tree real_part, imag_part;
726a989a 6886 gimple_seq post = NULL;
cd3ce9b4 6887
23a60a04
JM
6888 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6889 &post);
6890 /* Copy the value into a temporary, lest the formal temporary
6891 be reused out from under us. */
6892 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
726a989a 6893 gimple_seq_add_seq (pre_p, post);
cd3ce9b4 6894
23a60a04
JM
6895 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6896 post_p);
cd3ce9b4 6897
47a25a46 6898 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
cd3ce9b4
JM
6899 }
6900 }
6901
23a60a04 6902 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
cd3ce9b4
JM
6903 }
6904
6905 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6906 f_fpr = TREE_CHAIN (f_gpr);
6907 f_res = TREE_CHAIN (f_fpr);
6908 f_ovf = TREE_CHAIN (f_res);
6909 f_sav = TREE_CHAIN (f_ovf);
6910
872a65b5 6911 valist = build_va_arg_indirect_ref (valist);
47a25a46 6912 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
726a989a
RB
6913 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6914 f_fpr, NULL_TREE);
6915 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6916 f_ovf, NULL_TREE);
6917 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6918 f_sav, NULL_TREE);
cd3ce9b4
JM
6919
6920 size = int_size_in_bytes (type);
6921 rsize = (size + 3) / 4;
6922 align = 1;
6923
08b0dc1b 6924 if (TARGET_HARD_FLOAT && TARGET_FPRS
602ea4d3
JJ
6925 && (TYPE_MODE (type) == SFmode
6926 || TYPE_MODE (type) == DFmode
7393f7f8 6927 || TYPE_MODE (type) == TFmode
e41b2a33 6928 || TYPE_MODE (type) == SDmode
7393f7f8
BE
6929 || TYPE_MODE (type) == DDmode
6930 || TYPE_MODE (type) == TDmode))
cd3ce9b4
JM
6931 {
6932 /* FP args go in FP registers, if present. */
cd3ce9b4 6933 reg = fpr;
602ea4d3 6934 n_reg = (size + 7) / 8;
cd3ce9b4
JM
6935 sav_ofs = 8*4;
6936 sav_scale = 8;
e41b2a33 6937 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
cd3ce9b4
JM
6938 align = 8;
6939 }
6940 else
6941 {
6942 /* Otherwise into GP registers. */
cd3ce9b4
JM
6943 reg = gpr;
6944 n_reg = rsize;
6945 sav_ofs = 0;
6946 sav_scale = 4;
6947 if (n_reg == 2)
6948 align = 8;
6949 }
6950
6951 /* Pull the value out of the saved registers.... */
6952
6953 lab_over = NULL;
6954 addr = create_tmp_var (ptr_type_node, "addr");
6955 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
6956
6957 /* AltiVec vectors never go in registers when -mabi=altivec. */
6958 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
6959 align = 16;
6960 else
6961 {
6962 lab_false = create_artificial_label ();
6963 lab_over = create_artificial_label ();
6964
6965 /* Long long and SPE vectors are aligned in the registers.
6966 As are any other 2 gpr item such as complex int due to a
6967 historical mistake. */
6968 u = reg;
602ea4d3 6969 if (n_reg == 2 && reg == gpr)
cd3ce9b4 6970 {
7393f7f8 6971 regalign = 1;
726a989a 6972 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 6973 build_int_cst (TREE_TYPE (reg), n_reg - 1));
726a989a
RB
6974 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
6975 unshare_expr (reg), u);
cd3ce9b4 6976 }
7393f7f8
BE
6977 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6978 reg number is 0 for f1, so we want to make it odd. */
6979 else if (reg == fpr && TYPE_MODE (type) == TDmode)
6980 {
6981 regalign = 1;
726a989a 6982 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
383e91e4 6983 build_int_cst (TREE_TYPE (reg), 1));
726a989a 6984 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7393f7f8 6985 }
cd3ce9b4 6986
95674810 6987 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
cd3ce9b4
JM
6988 t = build2 (GE_EXPR, boolean_type_node, u, t);
6989 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6990 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6991 gimplify_and_add (t, pre_p);
6992
6993 t = sav;
6994 if (sav_ofs)
5be014d5 6995 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
cd3ce9b4 6996
726a989a 6997 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
8fb632eb 6998 build_int_cst (TREE_TYPE (reg), n_reg));
5be014d5
AP
6999 u = fold_convert (sizetype, u);
7000 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7001 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
cd3ce9b4 7002
e41b2a33
PB
7003 /* _Decimal32 varargs are located in the second word of the 64-bit
7004 FP register for 32-bit binaries. */
4f011e1e
JM
7005 if (!TARGET_POWERPC64
7006 && TARGET_HARD_FLOAT && TARGET_FPRS
7007 && TYPE_MODE (type) == SDmode)
e41b2a33
PB
7008 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7009
726a989a 7010 gimplify_assign (addr, t, pre_p);
cd3ce9b4 7011
726a989a 7012 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
cd3ce9b4 7013
726a989a
RB
7014 stmt = gimple_build_label (lab_false);
7015 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4 7016
7393f7f8 7017 if ((n_reg == 2 && !regalign) || n_reg > 2)
cd3ce9b4
JM
7018 {
7019 /* Ensure that we don't find any more args in regs.
7393f7f8 7020 Alignment has taken care of for special cases. */
726a989a 7021 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
cd3ce9b4
JM
7022 }
7023 }
7024
7025 /* ... otherwise out of the overflow area. */
7026
7027 /* Care for on-stack alignment if needed. */
7028 t = ovf;
7029 if (align != 1)
7030 {
5be014d5
AP
7031 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7032 t = fold_convert (sizetype, t);
4a90aeeb 7033 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5be014d5
AP
7034 size_int (-align));
7035 t = fold_convert (TREE_TYPE (ovf), t);
cd3ce9b4
JM
7036 }
7037 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7038
726a989a 7039 gimplify_assign (unshare_expr (addr), t, pre_p);
cd3ce9b4 7040
5be014d5 7041 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
726a989a 7042 gimplify_assign (unshare_expr (ovf), t, pre_p);
cd3ce9b4
JM
7043
7044 if (lab_over)
7045 {
726a989a
RB
7046 stmt = gimple_build_label (lab_over);
7047 gimple_seq_add_stmt (pre_p, stmt);
cd3ce9b4
JM
7048 }
7049
0cfbc62b
JM
7050 if (STRICT_ALIGNMENT
7051 && (TYPE_ALIGN (type)
7052 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7053 {
7054 /* The value (of type complex double, for example) may not be
7055 aligned in memory in the saved registers, so copy via a
7056 temporary. (This is the same code as used for SPARC.) */
7057 tree tmp = create_tmp_var (type, "va_arg_tmp");
7058 tree dest_addr = build_fold_addr_expr (tmp);
7059
5039610b
SL
7060 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7061 3, dest_addr, addr, size_int (rsize * 4));
0cfbc62b
JM
7062
7063 gimplify_and_add (copy, pre_p);
7064 addr = dest_addr;
7065 }
7066
08b0dc1b 7067 addr = fold_convert (ptrtype, addr);
872a65b5 7068 return build_va_arg_indirect_ref (addr);
cd3ce9b4
JM
7069}
7070
0ac081f6
AH
7071/* Builtins. */
7072
58646b77
PB
7073static void
7074def_builtin (int mask, const char *name, tree type, int code)
7075{
96038623 7076 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
58646b77
PB
7077 {
7078 if (rs6000_builtin_decls[code])
7079 abort ();
7080
7081 rs6000_builtin_decls[code] =
c79efc4d
RÁE
7082 add_builtin_function (name, type, code, BUILT_IN_MD,
7083 NULL, NULL_TREE);
58646b77
PB
7084 }
7085}
0ac081f6 7086
24408032
AH
7087/* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7088
2212663f 7089static const struct builtin_description bdesc_3arg[] =
24408032
AH
7090{
7091 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7092 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7093 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7094 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7095 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7096 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7097 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7098 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7099 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7100 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
f676971a 7101 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
aba5fb01
NS
7102 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7103 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7104 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7105 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7106 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7107 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7108 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7109 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7110 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7111 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7112 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7113 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
58646b77
PB
7114
7115 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7116 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7117 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7120 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7121 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7122 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7123 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7124 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7125 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7126 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7127 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7128 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7129 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
96038623
DE
7130
7131 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7132 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7133 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7134 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7135 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7136 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7137 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7138 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
49e39588 7139 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
24408032 7140};
2212663f 7141
95385cbb
AH
7142/* DST operations: void foo (void *, const int, const char). */
7143
7144static const struct builtin_description bdesc_dst[] =
7145{
7146 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7147 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7148 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
58646b77
PB
7149 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7150
7151 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7152 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7153 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7154 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
95385cbb
AH
7155};
7156
2212663f 7157/* Simple binary operations: VECc = foo (VECa, VECb). */
24408032 7158
a3170dc6 7159static struct builtin_description bdesc_2arg[] =
0ac081f6 7160{
f18c054f
DB
7161 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7162 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7163 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7164 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
0ac081f6
AH
7165 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7166 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7167 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7168 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7169 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7170 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7171 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
f18c054f 7172 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
aba5fb01 7173 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
0ac081f6
AH
7174 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7175 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7176 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7177 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7178 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7179 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
617e0e1d
DB
7180 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7181 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
0ac081f6
AH
7182 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7183 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7184 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7185 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7186 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7187 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7188 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7189 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7190 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7191 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7192 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7193 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7194 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
617e0e1d
DB
7195 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7196 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
f18c054f
DB
7197 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7198 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
df966bff
AH
7199 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7200 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7201 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7202 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7203 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
0ac081f6
AH
7204 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7205 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7206 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7207 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7208 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7209 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
f18c054f
DB
7210 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7211 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7212 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7213 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7214 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7215 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7216 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
0ac081f6
AH
7217 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7218 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7219 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7220 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7221 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7222 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7223 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7224 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
f96bc213 7225 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
f18c054f 7226 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
0ac081f6
AH
7227 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7228 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7229 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
0ac081f6 7230 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
0ac081f6
AH
7231 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7232 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7233 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7234 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7235 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7236 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7237 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7238 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
71d46ca5
MM
7239 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7240 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7241 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
0ac081f6
AH
7242 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7243 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
2212663f
DB
7244 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7245 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7246 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
71d46ca5
MM
7247 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7248 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7249 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7250 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7251 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7252 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
0ac081f6
AH
7253 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7254 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
f18c054f
DB
7255 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7256 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7257 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7258 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
0ac081f6
AH
7259 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7260 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7261 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7262 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7263 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7264 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7265 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7266 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7267 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7268 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7269 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7270 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
f18c054f 7271 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
a3170dc6 7272
58646b77
PB
7273 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7274 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7275 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7276 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7277 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7278 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7279 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7280 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7281 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7282 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7283 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7284 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7285 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7286 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7287 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7288 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7289 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7290 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7291 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7292 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7293 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7294 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7295 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7296 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7297 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7298 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7299 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7300 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7301 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7302 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7303 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7304 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7305 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7306 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7307 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7308 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7309 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7310 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7311 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7312 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7313 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7314 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7315 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7316 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7317 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7318 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7319 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7320 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7321 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7322 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7323 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7324 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7325 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7326 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7327 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7328 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7329 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7330 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7331 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7332 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7333 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7334 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7335 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7336 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7337 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7338 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7339 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7340 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7341 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7342 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7343 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7344 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7345 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7346 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7347 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7348 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7349 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7350 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7351 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7352 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7353 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7354 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7355 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7356 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7357 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7358 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7359 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7360 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7361 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7362 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7363 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7364 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7365 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7366 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7367 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7368 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7369 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7370 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7371 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7372 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7373 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7374 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7375 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7376 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7377 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7378 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7379 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7380 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7381 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7382 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7400
96038623
DE
7401 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7402 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7403 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7404 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7405 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7406 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7407 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7408 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7409 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7410 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7411
a3170dc6
AH
7412 /* Place holder, leave as first spe builtin. */
7413 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7414 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7415 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7416 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7417 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7418 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7419 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7420 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7421 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7422 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7423 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7424 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7425 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7426 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7427 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7428 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7429 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7430 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7431 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7432 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7433 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7434 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7435 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7436 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7437 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7438 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7439 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7440 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7441 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7442 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7443 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7444 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7445 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7446 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7447 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7448 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7449 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7450 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7451 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7452 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7453 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7454 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7455 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7456 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7457 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7458 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7459 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7460 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7461 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7462 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7463 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7464 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7465 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7466 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7467 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7468 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7469 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7470 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7471 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7472 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7473 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7474 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7475 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7476 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7477 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7478 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7479 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7480 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7481 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7482 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7483 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7484 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7485 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7486 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
a3170dc6
AH
7487 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7488 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
a3170dc6
AH
7489 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7490 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7491 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7492 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7493 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7494 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7495 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7496 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7497 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7498 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7499 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7500 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7501 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7502 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7503 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7504 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7505 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7506 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7507 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7508 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7509 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7510 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7511 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7512 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7513 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7514 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7515 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7516 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7517 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7518 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7519 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7520 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7521 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7522
7523 /* SPE binary operations expecting a 5-bit unsigned literal. */
7524 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7525
7526 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7527 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7528 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7529 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7530 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7531 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7532 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7533 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7534 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7535 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7536 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7537 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7538 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7539 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7540 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7541 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7542 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7543 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7544 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7545 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7546 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7547 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7548 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7549 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7550 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7551 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7552
7553 /* Place-holder. Leave as last binary SPE builtin. */
58646b77 7554 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
ae4b4a02
AH
7555};
7556
7557/* AltiVec predicates. */
7558
7559struct builtin_description_predicates
7560{
7561 const unsigned int mask;
7562 const enum insn_code icode;
7563 const char *opcode;
7564 const char *const name;
7565 const enum rs6000_builtins code;
7566};
7567
7568static const struct builtin_description_predicates bdesc_altivec_preds[] =
7569{
7570 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7571 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7572 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7573 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7574 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7575 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7576 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7577 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7578 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7579 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7580 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7581 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
58646b77
PB
7582 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7583
7584 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7585 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7586 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
0ac081f6 7587};
24408032 7588
a3170dc6
AH
7589/* SPE predicates. */
7590static struct builtin_description bdesc_spe_predicates[] =
7591{
7592 /* Place-holder. Leave as first. */
7593 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7594 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7595 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7596 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7597 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7598 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7599 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7600 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7601 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7602 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7603 /* Place-holder. Leave as last. */
7604 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7605};
7606
7607/* SPE evsel predicates. */
7608static struct builtin_description bdesc_spe_evsel[] =
7609{
7610 /* Place-holder. Leave as first. */
7611 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7612 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7613 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7614 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7615 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7616 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7617 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7618 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7619 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7620 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7621 /* Place-holder. Leave as last. */
7622 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7623};
7624
96038623
DE
7625/* PAIRED predicates. */
7626static const struct builtin_description bdesc_paired_preds[] =
7627{
7628 /* Place-holder. Leave as first. */
7629 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7630 /* Place-holder. Leave as last. */
7631 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7632};
7633
b6d08ca1 7634/* ABS* operations. */
100c4561
AH
7635
7636static const struct builtin_description bdesc_abs[] =
7637{
7638 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7639 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7640 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7641 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7642 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7643 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7644 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7645};
7646
617e0e1d
DB
7647/* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7648 foo (VECa). */
24408032 7649
a3170dc6 7650static struct builtin_description bdesc_1arg[] =
2212663f 7651{
617e0e1d
DB
7652 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7653 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7654 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7655 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7656 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7657 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7658 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7659 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
2212663f
DB
7660 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7661 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7662 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
20e26713
AH
7663 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7664 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7665 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7666 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7667 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7668 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
a3170dc6 7669
58646b77
PB
7670 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7671 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7672 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7673 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7674 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7675 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7676 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7677 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7678 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7679 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7680 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7681 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7682 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7683 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7684 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7685 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7686 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7687 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7688 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7689
a3170dc6
AH
7690 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7691 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7692 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7693 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7694 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7695 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7696 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7697 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7698 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7699 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7700 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7701 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7702 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7703 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7704 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7705 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7706 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7707 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7708 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7709 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7710 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7711 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7712 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7713 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7714 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6a599451 7715 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
a3170dc6
AH
7716 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7717 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7718 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7719 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
a3170dc6
AH
7720
7721 /* Place-holder. Leave as last unary SPE builtin. */
96038623
DE
7722 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7723
7724 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7725 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7726 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7727 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7728 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
2212663f
DB
7729};
7730
7731static rtx
5039610b 7732rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
7733{
7734 rtx pat;
5039610b 7735 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7736 rtx op0 = expand_normal (arg0);
2212663f
DB
7737 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7738 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7739
0559cc77
DE
7740 if (icode == CODE_FOR_nothing)
7741 /* Builtin not supported on this processor. */
7742 return 0;
7743
20e26713
AH
7744 /* If we got invalid arguments bail out before generating bad rtl. */
7745 if (arg0 == error_mark_node)
9a171fcd 7746 return const0_rtx;
20e26713 7747
0559cc77
DE
7748 if (icode == CODE_FOR_altivec_vspltisb
7749 || icode == CODE_FOR_altivec_vspltish
7750 || icode == CODE_FOR_altivec_vspltisw
7751 || icode == CODE_FOR_spe_evsplatfi
7752 || icode == CODE_FOR_spe_evsplati)
b44140e7
AH
7753 {
7754 /* Only allow 5-bit *signed* literals. */
b44140e7 7755 if (GET_CODE (op0) != CONST_INT
afca671b
DP
7756 || INTVAL (op0) > 15
7757 || INTVAL (op0) < -16)
b44140e7
AH
7758 {
7759 error ("argument 1 must be a 5-bit signed literal");
9a171fcd 7760 return const0_rtx;
b44140e7 7761 }
b44140e7
AH
7762 }
7763
c62f2db5 7764 if (target == 0
2212663f
DB
7765 || GET_MODE (target) != tmode
7766 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7767 target = gen_reg_rtx (tmode);
7768
7769 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7770 op0 = copy_to_mode_reg (mode0, op0);
7771
7772 pat = GEN_FCN (icode) (target, op0);
7773 if (! pat)
7774 return 0;
7775 emit_insn (pat);
0ac081f6 7776
2212663f
DB
7777 return target;
7778}
ae4b4a02 7779
100c4561 7780static rtx
5039610b 7781altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
100c4561
AH
7782{
7783 rtx pat, scratch1, scratch2;
5039610b 7784 tree arg0 = CALL_EXPR_ARG (exp, 0);
84217346 7785 rtx op0 = expand_normal (arg0);
100c4561
AH
7786 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7787 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7788
7789 /* If we have invalid arguments, bail out before generating bad rtl. */
7790 if (arg0 == error_mark_node)
9a171fcd 7791 return const0_rtx;
100c4561
AH
7792
7793 if (target == 0
7794 || GET_MODE (target) != tmode
7795 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7796 target = gen_reg_rtx (tmode);
7797
7798 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7799 op0 = copy_to_mode_reg (mode0, op0);
7800
7801 scratch1 = gen_reg_rtx (mode0);
7802 scratch2 = gen_reg_rtx (mode0);
7803
7804 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7805 if (! pat)
7806 return 0;
7807 emit_insn (pat);
7808
7809 return target;
7810}
7811
0ac081f6 7812static rtx
5039610b 7813rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
0ac081f6
AH
7814{
7815 rtx pat;
5039610b
SL
7816 tree arg0 = CALL_EXPR_ARG (exp, 0);
7817 tree arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
7818 rtx op0 = expand_normal (arg0);
7819 rtx op1 = expand_normal (arg1);
0ac081f6
AH
7820 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7821 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7822 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7823
0559cc77
DE
7824 if (icode == CODE_FOR_nothing)
7825 /* Builtin not supported on this processor. */
7826 return 0;
7827
20e26713
AH
7828 /* If we got invalid arguments bail out before generating bad rtl. */
7829 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7830 return const0_rtx;
20e26713 7831
0559cc77
DE
7832 if (icode == CODE_FOR_altivec_vcfux
7833 || icode == CODE_FOR_altivec_vcfsx
7834 || icode == CODE_FOR_altivec_vctsxs
7835 || icode == CODE_FOR_altivec_vctuxs
7836 || icode == CODE_FOR_altivec_vspltb
7837 || icode == CODE_FOR_altivec_vsplth
7838 || icode == CODE_FOR_altivec_vspltw
7839 || icode == CODE_FOR_spe_evaddiw
7840 || icode == CODE_FOR_spe_evldd
7841 || icode == CODE_FOR_spe_evldh
7842 || icode == CODE_FOR_spe_evldw
7843 || icode == CODE_FOR_spe_evlhhesplat
7844 || icode == CODE_FOR_spe_evlhhossplat
7845 || icode == CODE_FOR_spe_evlhhousplat
7846 || icode == CODE_FOR_spe_evlwhe
7847 || icode == CODE_FOR_spe_evlwhos
7848 || icode == CODE_FOR_spe_evlwhou
7849 || icode == CODE_FOR_spe_evlwhsplat
7850 || icode == CODE_FOR_spe_evlwwsplat
7851 || icode == CODE_FOR_spe_evrlwi
7852 || icode == CODE_FOR_spe_evslwi
7853 || icode == CODE_FOR_spe_evsrwis
f5119d10 7854 || icode == CODE_FOR_spe_evsubifw
0559cc77 7855 || icode == CODE_FOR_spe_evsrwiu)
b44140e7
AH
7856 {
7857 /* Only allow 5-bit unsigned literals. */
8bb418a3 7858 STRIP_NOPS (arg1);
b44140e7
AH
7859 if (TREE_CODE (arg1) != INTEGER_CST
7860 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7861 {
7862 error ("argument 2 must be a 5-bit unsigned literal");
9a171fcd 7863 return const0_rtx;
b44140e7 7864 }
b44140e7
AH
7865 }
7866
c62f2db5 7867 if (target == 0
0ac081f6
AH
7868 || GET_MODE (target) != tmode
7869 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7870 target = gen_reg_rtx (tmode);
7871
7872 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7873 op0 = copy_to_mode_reg (mode0, op0);
7874 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7875 op1 = copy_to_mode_reg (mode1, op1);
7876
7877 pat = GEN_FCN (icode) (target, op0, op1);
7878 if (! pat)
7879 return 0;
7880 emit_insn (pat);
7881
7882 return target;
7883}
6525c0e7 7884
ae4b4a02 7885static rtx
f676971a 7886altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5039610b 7887 tree exp, rtx target)
ae4b4a02
AH
7888{
7889 rtx pat, scratch;
5039610b
SL
7890 tree cr6_form = CALL_EXPR_ARG (exp, 0);
7891 tree arg0 = CALL_EXPR_ARG (exp, 1);
7892 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
7893 rtx op0 = expand_normal (arg0);
7894 rtx op1 = expand_normal (arg1);
ae4b4a02
AH
7895 enum machine_mode tmode = SImode;
7896 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7897 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7898 int cr6_form_int;
7899
7900 if (TREE_CODE (cr6_form) != INTEGER_CST)
7901 {
7902 error ("argument 1 of __builtin_altivec_predicate must be a constant");
9a171fcd 7903 return const0_rtx;
ae4b4a02
AH
7904 }
7905 else
7906 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
7907
37409796 7908 gcc_assert (mode0 == mode1);
ae4b4a02
AH
7909
7910 /* If we have invalid arguments, bail out before generating bad rtl. */
7911 if (arg0 == error_mark_node || arg1 == error_mark_node)
9a171fcd 7912 return const0_rtx;
ae4b4a02
AH
7913
7914 if (target == 0
7915 || GET_MODE (target) != tmode
7916 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7917 target = gen_reg_rtx (tmode);
7918
7919 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7920 op0 = copy_to_mode_reg (mode0, op0);
7921 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7922 op1 = copy_to_mode_reg (mode1, op1);
7923
7924 scratch = gen_reg_rtx (mode0);
7925
7926 pat = GEN_FCN (icode) (scratch, op0, op1,
f1c25d3b 7927 gen_rtx_SYMBOL_REF (Pmode, opcode));
ae4b4a02
AH
7928 if (! pat)
7929 return 0;
7930 emit_insn (pat);
7931
7932 /* The vec_any* and vec_all* predicates use the same opcodes for two
7933 different operations, but the bits in CR6 will be different
7934 depending on what information we want. So we have to play tricks
7935 with CR6 to get the right bits out.
7936
7937 If you think this is disgusting, look at the specs for the
7938 AltiVec predicates. */
7939
c4ad648e
AM
7940 switch (cr6_form_int)
7941 {
7942 case 0:
7943 emit_insn (gen_cr6_test_for_zero (target));
7944 break;
7945 case 1:
7946 emit_insn (gen_cr6_test_for_zero_reverse (target));
7947 break;
7948 case 2:
7949 emit_insn (gen_cr6_test_for_lt (target));
7950 break;
7951 case 3:
7952 emit_insn (gen_cr6_test_for_lt_reverse (target));
7953 break;
7954 default:
7955 error ("argument 1 of __builtin_altivec_predicate is out of range");
7956 break;
7957 }
ae4b4a02
AH
7958
7959 return target;
7960}
7961
96038623
DE
7962static rtx
7963paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
7964{
7965 rtx pat, addr;
7966 tree arg0 = CALL_EXPR_ARG (exp, 0);
7967 tree arg1 = CALL_EXPR_ARG (exp, 1);
7968 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7969 enum machine_mode mode0 = Pmode;
7970 enum machine_mode mode1 = Pmode;
7971 rtx op0 = expand_normal (arg0);
7972 rtx op1 = expand_normal (arg1);
7973
7974 if (icode == CODE_FOR_nothing)
7975 /* Builtin not supported on this processor. */
7976 return 0;
7977
7978 /* If we got invalid arguments bail out before generating bad rtl. */
7979 if (arg0 == error_mark_node || arg1 == error_mark_node)
7980 return const0_rtx;
7981
7982 if (target == 0
7983 || GET_MODE (target) != tmode
7984 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7985 target = gen_reg_rtx (tmode);
7986
7987 op1 = copy_to_mode_reg (mode1, op1);
7988
7989 if (op0 == const0_rtx)
7990 {
7991 addr = gen_rtx_MEM (tmode, op1);
7992 }
7993 else
7994 {
7995 op0 = copy_to_mode_reg (mode0, op0);
7996 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
7997 }
7998
7999 pat = GEN_FCN (icode) (target, addr);
8000
8001 if (! pat)
8002 return 0;
8003 emit_insn (pat);
8004
8005 return target;
8006}
8007
b4a62fa0 8008static rtx
5039610b 8009altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
b4a62fa0
SB
8010{
8011 rtx pat, addr;
5039610b
SL
8012 tree arg0 = CALL_EXPR_ARG (exp, 0);
8013 tree arg1 = CALL_EXPR_ARG (exp, 1);
b4a62fa0
SB
8014 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8015 enum machine_mode mode0 = Pmode;
8016 enum machine_mode mode1 = Pmode;
84217346
MD
8017 rtx op0 = expand_normal (arg0);
8018 rtx op1 = expand_normal (arg1);
b4a62fa0
SB
8019
8020 if (icode == CODE_FOR_nothing)
8021 /* Builtin not supported on this processor. */
8022 return 0;
8023
8024 /* If we got invalid arguments bail out before generating bad rtl. */
8025 if (arg0 == error_mark_node || arg1 == error_mark_node)
8026 return const0_rtx;
8027
8028 if (target == 0
8029 || GET_MODE (target) != tmode
8030 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8031 target = gen_reg_rtx (tmode);
8032
f676971a 8033 op1 = copy_to_mode_reg (mode1, op1);
b4a62fa0
SB
8034
8035 if (op0 == const0_rtx)
8036 {
8037 addr = gen_rtx_MEM (tmode, op1);
8038 }
8039 else
8040 {
8041 op0 = copy_to_mode_reg (mode0, op0);
8042 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8043 }
8044
8045 pat = GEN_FCN (icode) (target, addr);
8046
8047 if (! pat)
8048 return 0;
8049 emit_insn (pat);
8050
8051 return target;
8052}
8053
61bea3b0 8054static rtx
5039610b 8055spe_expand_stv_builtin (enum insn_code icode, tree exp)
61bea3b0 8056{
5039610b
SL
8057 tree arg0 = CALL_EXPR_ARG (exp, 0);
8058 tree arg1 = CALL_EXPR_ARG (exp, 1);
8059 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8060 rtx op0 = expand_normal (arg0);
8061 rtx op1 = expand_normal (arg1);
8062 rtx op2 = expand_normal (arg2);
61bea3b0
AH
8063 rtx pat;
8064 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8065 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8066 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8067
8068 /* Invalid arguments. Bail before doing anything stoopid! */
8069 if (arg0 == error_mark_node
8070 || arg1 == error_mark_node
8071 || arg2 == error_mark_node)
8072 return const0_rtx;
8073
8074 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8075 op0 = copy_to_mode_reg (mode2, op0);
8076 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8077 op1 = copy_to_mode_reg (mode0, op1);
8078 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8079 op2 = copy_to_mode_reg (mode1, op2);
8080
8081 pat = GEN_FCN (icode) (op1, op2, op0);
8082 if (pat)
8083 emit_insn (pat);
8084 return NULL_RTX;
8085}
8086
96038623
DE
8087static rtx
8088paired_expand_stv_builtin (enum insn_code icode, tree exp)
8089{
8090 tree arg0 = CALL_EXPR_ARG (exp, 0);
8091 tree arg1 = CALL_EXPR_ARG (exp, 1);
8092 tree arg2 = CALL_EXPR_ARG (exp, 2);
8093 rtx op0 = expand_normal (arg0);
8094 rtx op1 = expand_normal (arg1);
8095 rtx op2 = expand_normal (arg2);
8096 rtx pat, addr;
8097 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8098 enum machine_mode mode1 = Pmode;
8099 enum machine_mode mode2 = Pmode;
8100
8101 /* Invalid arguments. Bail before doing anything stoopid! */
8102 if (arg0 == error_mark_node
8103 || arg1 == error_mark_node
8104 || arg2 == error_mark_node)
8105 return const0_rtx;
8106
8107 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8108 op0 = copy_to_mode_reg (tmode, op0);
8109
8110 op2 = copy_to_mode_reg (mode2, op2);
8111
8112 if (op1 == const0_rtx)
8113 {
8114 addr = gen_rtx_MEM (tmode, op2);
8115 }
8116 else
8117 {
8118 op1 = copy_to_mode_reg (mode1, op1);
8119 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8120 }
8121
8122 pat = GEN_FCN (icode) (addr, op0);
8123 if (pat)
8124 emit_insn (pat);
8125 return NULL_RTX;
8126}
8127
6525c0e7 8128static rtx
5039610b 8129altivec_expand_stv_builtin (enum insn_code icode, tree exp)
6525c0e7 8130{
5039610b
SL
8131 tree arg0 = CALL_EXPR_ARG (exp, 0);
8132 tree arg1 = CALL_EXPR_ARG (exp, 1);
8133 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8134 rtx op0 = expand_normal (arg0);
8135 rtx op1 = expand_normal (arg1);
8136 rtx op2 = expand_normal (arg2);
b4a62fa0
SB
8137 rtx pat, addr;
8138 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8139 enum machine_mode mode1 = Pmode;
8140 enum machine_mode mode2 = Pmode;
6525c0e7
AH
8141
8142 /* Invalid arguments. Bail before doing anything stoopid! */
8143 if (arg0 == error_mark_node
8144 || arg1 == error_mark_node
8145 || arg2 == error_mark_node)
9a171fcd 8146 return const0_rtx;
6525c0e7 8147
b4a62fa0
SB
8148 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8149 op0 = copy_to_mode_reg (tmode, op0);
8150
f676971a 8151 op2 = copy_to_mode_reg (mode2, op2);
b4a62fa0
SB
8152
8153 if (op1 == const0_rtx)
8154 {
8155 addr = gen_rtx_MEM (tmode, op2);
8156 }
8157 else
8158 {
8159 op1 = copy_to_mode_reg (mode1, op1);
8160 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8161 }
6525c0e7 8162
b4a62fa0 8163 pat = GEN_FCN (icode) (addr, op0);
6525c0e7
AH
8164 if (pat)
8165 emit_insn (pat);
8166 return NULL_RTX;
8167}
8168
2212663f 8169static rtx
5039610b 8170rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
2212663f
DB
8171{
8172 rtx pat;
5039610b
SL
8173 tree arg0 = CALL_EXPR_ARG (exp, 0);
8174 tree arg1 = CALL_EXPR_ARG (exp, 1);
8175 tree arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8176 rtx op0 = expand_normal (arg0);
8177 rtx op1 = expand_normal (arg1);
8178 rtx op2 = expand_normal (arg2);
2212663f
DB
8179 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8180 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8181 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8182 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
0ac081f6 8183
774b5662
DE
8184 if (icode == CODE_FOR_nothing)
8185 /* Builtin not supported on this processor. */
8186 return 0;
8187
20e26713
AH
8188 /* If we got invalid arguments bail out before generating bad rtl. */
8189 if (arg0 == error_mark_node
8190 || arg1 == error_mark_node
8191 || arg2 == error_mark_node)
9a171fcd 8192 return const0_rtx;
20e26713 8193
aba5fb01
NS
8194 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8195 || icode == CODE_FOR_altivec_vsldoi_v4si
8196 || icode == CODE_FOR_altivec_vsldoi_v8hi
8197 || icode == CODE_FOR_altivec_vsldoi_v16qi)
b44140e7
AH
8198 {
8199 /* Only allow 4-bit unsigned literals. */
8bb418a3 8200 STRIP_NOPS (arg2);
b44140e7
AH
8201 if (TREE_CODE (arg2) != INTEGER_CST
8202 || TREE_INT_CST_LOW (arg2) & ~0xf)
8203 {
8204 error ("argument 3 must be a 4-bit unsigned literal");
e3277ffb 8205 return const0_rtx;
b44140e7 8206 }
b44140e7
AH
8207 }
8208
c62f2db5 8209 if (target == 0
2212663f
DB
8210 || GET_MODE (target) != tmode
8211 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8212 target = gen_reg_rtx (tmode);
8213
8214 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8215 op0 = copy_to_mode_reg (mode0, op0);
8216 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8217 op1 = copy_to_mode_reg (mode1, op1);
8218 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8219 op2 = copy_to_mode_reg (mode2, op2);
8220
49e39588
RE
8221 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8222 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8223 else
8224 pat = GEN_FCN (icode) (target, op0, op1, op2);
2212663f
DB
8225 if (! pat)
8226 return 0;
8227 emit_insn (pat);
8228
8229 return target;
8230}
92898235 8231
3a9b8c7e 8232/* Expand the lvx builtins. */
0ac081f6 8233static rtx
a2369ed3 8234altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
0ac081f6 8235{
5039610b 8236 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
0ac081f6 8237 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3a9b8c7e
AH
8238 tree arg0;
8239 enum machine_mode tmode, mode0;
7c3abc73 8240 rtx pat, op0;
3a9b8c7e 8241 enum insn_code icode;
92898235 8242
0ac081f6
AH
8243 switch (fcode)
8244 {
f18c054f 8245 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
81466555 8246 icode = CODE_FOR_altivec_lvx_v16qi;
3a9b8c7e 8247 break;
f18c054f 8248 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
81466555 8249 icode = CODE_FOR_altivec_lvx_v8hi;
3a9b8c7e
AH
8250 break;
8251 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
81466555 8252 icode = CODE_FOR_altivec_lvx_v4si;
3a9b8c7e
AH
8253 break;
8254 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
81466555 8255 icode = CODE_FOR_altivec_lvx_v4sf;
3a9b8c7e
AH
8256 break;
8257 default:
8258 *expandedp = false;
8259 return NULL_RTX;
8260 }
0ac081f6 8261
3a9b8c7e 8262 *expandedp = true;
f18c054f 8263
5039610b 8264 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8265 op0 = expand_normal (arg0);
3a9b8c7e
AH
8266 tmode = insn_data[icode].operand[0].mode;
8267 mode0 = insn_data[icode].operand[1].mode;
f18c054f 8268
3a9b8c7e
AH
8269 if (target == 0
8270 || GET_MODE (target) != tmode
8271 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8272 target = gen_reg_rtx (tmode);
24408032 8273
3a9b8c7e
AH
8274 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8275 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
f18c054f 8276
3a9b8c7e
AH
8277 pat = GEN_FCN (icode) (target, op0);
8278 if (! pat)
8279 return 0;
8280 emit_insn (pat);
8281 return target;
8282}
f18c054f 8283
3a9b8c7e
AH
8284/* Expand the stvx builtins. */
8285static rtx
f676971a 8286altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8287 bool *expandedp)
3a9b8c7e 8288{
5039610b 8289 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8290 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8291 tree arg0, arg1;
8292 enum machine_mode mode0, mode1;
7c3abc73 8293 rtx pat, op0, op1;
3a9b8c7e 8294 enum insn_code icode;
f18c054f 8295
3a9b8c7e
AH
8296 switch (fcode)
8297 {
8298 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
81466555 8299 icode = CODE_FOR_altivec_stvx_v16qi;
3a9b8c7e
AH
8300 break;
8301 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
81466555 8302 icode = CODE_FOR_altivec_stvx_v8hi;
3a9b8c7e
AH
8303 break;
8304 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
81466555 8305 icode = CODE_FOR_altivec_stvx_v4si;
3a9b8c7e
AH
8306 break;
8307 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
81466555 8308 icode = CODE_FOR_altivec_stvx_v4sf;
3a9b8c7e
AH
8309 break;
8310 default:
8311 *expandedp = false;
8312 return NULL_RTX;
8313 }
24408032 8314
5039610b
SL
8315 arg0 = CALL_EXPR_ARG (exp, 0);
8316 arg1 = CALL_EXPR_ARG (exp, 1);
84217346
MD
8317 op0 = expand_normal (arg0);
8318 op1 = expand_normal (arg1);
3a9b8c7e
AH
8319 mode0 = insn_data[icode].operand[0].mode;
8320 mode1 = insn_data[icode].operand[1].mode;
f18c054f 8321
3a9b8c7e
AH
8322 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8323 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8324 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8325 op1 = copy_to_mode_reg (mode1, op1);
f18c054f 8326
3a9b8c7e
AH
8327 pat = GEN_FCN (icode) (op0, op1);
8328 if (pat)
8329 emit_insn (pat);
f18c054f 8330
3a9b8c7e
AH
8331 *expandedp = true;
8332 return NULL_RTX;
8333}
f18c054f 8334
3a9b8c7e
AH
8335/* Expand the dst builtins. */
8336static rtx
f676971a 8337altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
a2369ed3 8338 bool *expandedp)
3a9b8c7e 8339{
5039610b 8340 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3a9b8c7e
AH
8341 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8342 tree arg0, arg1, arg2;
8343 enum machine_mode mode0, mode1, mode2;
7c3abc73 8344 rtx pat, op0, op1, op2;
586de218 8345 const struct builtin_description *d;
a3170dc6 8346 size_t i;
f18c054f 8347
3a9b8c7e 8348 *expandedp = false;
f18c054f 8349
3a9b8c7e 8350 /* Handle DST variants. */
586de218 8351 d = bdesc_dst;
3a9b8c7e
AH
8352 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8353 if (d->code == fcode)
8354 {
5039610b
SL
8355 arg0 = CALL_EXPR_ARG (exp, 0);
8356 arg1 = CALL_EXPR_ARG (exp, 1);
8357 arg2 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8358 op0 = expand_normal (arg0);
8359 op1 = expand_normal (arg1);
8360 op2 = expand_normal (arg2);
3a9b8c7e
AH
8361 mode0 = insn_data[d->icode].operand[0].mode;
8362 mode1 = insn_data[d->icode].operand[1].mode;
8363 mode2 = insn_data[d->icode].operand[2].mode;
24408032 8364
3a9b8c7e
AH
8365 /* Invalid arguments, bail out before generating bad rtl. */
8366 if (arg0 == error_mark_node
8367 || arg1 == error_mark_node
8368 || arg2 == error_mark_node)
8369 return const0_rtx;
f18c054f 8370
86e7df90 8371 *expandedp = true;
8bb418a3 8372 STRIP_NOPS (arg2);
3a9b8c7e
AH
8373 if (TREE_CODE (arg2) != INTEGER_CST
8374 || TREE_INT_CST_LOW (arg2) & ~0x3)
8375 {
9e637a26 8376 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
3a9b8c7e
AH
8377 return const0_rtx;
8378 }
f18c054f 8379
3a9b8c7e 8380 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
666158b9 8381 op0 = copy_to_mode_reg (Pmode, op0);
3a9b8c7e
AH
8382 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8383 op1 = copy_to_mode_reg (mode1, op1);
24408032 8384
3a9b8c7e
AH
8385 pat = GEN_FCN (d->icode) (op0, op1, op2);
8386 if (pat != 0)
8387 emit_insn (pat);
f18c054f 8388
3a9b8c7e
AH
8389 return NULL_RTX;
8390 }
f18c054f 8391
3a9b8c7e
AH
8392 return NULL_RTX;
8393}
24408032 8394
7a4eca66
DE
8395/* Expand vec_init builtin. */
8396static rtx
5039610b 8397altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
7a4eca66
DE
8398{
8399 enum machine_mode tmode = TYPE_MODE (type);
8400 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8401 int i, n_elt = GET_MODE_NUNITS (tmode);
8402 rtvec v = rtvec_alloc (n_elt);
8403
8404 gcc_assert (VECTOR_MODE_P (tmode));
5039610b 8405 gcc_assert (n_elt == call_expr_nargs (exp));
982afe02 8406
5039610b 8407 for (i = 0; i < n_elt; ++i)
7a4eca66 8408 {
5039610b 8409 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
7a4eca66
DE
8410 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8411 }
8412
7a4eca66
DE
8413 if (!target || !register_operand (target, tmode))
8414 target = gen_reg_rtx (tmode);
8415
8416 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8417 return target;
8418}
8419
8420/* Return the integer constant in ARG. Constrain it to be in the range
8421 of the subparts of VEC_TYPE; issue an error if not. */
8422
8423static int
8424get_element_number (tree vec_type, tree arg)
8425{
8426 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8427
8428 if (!host_integerp (arg, 1)
8429 || (elt = tree_low_cst (arg, 1), elt > max))
8430 {
8431 error ("selector must be an integer constant in the range 0..%wi", max);
8432 return 0;
8433 }
8434
8435 return elt;
8436}
8437
8438/* Expand vec_set builtin. */
8439static rtx
5039610b 8440altivec_expand_vec_set_builtin (tree exp)
7a4eca66
DE
8441{
8442 enum machine_mode tmode, mode1;
8443 tree arg0, arg1, arg2;
8444 int elt;
8445 rtx op0, op1;
8446
5039610b
SL
8447 arg0 = CALL_EXPR_ARG (exp, 0);
8448 arg1 = CALL_EXPR_ARG (exp, 1);
8449 arg2 = CALL_EXPR_ARG (exp, 2);
7a4eca66
DE
8450
8451 tmode = TYPE_MODE (TREE_TYPE (arg0));
8452 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8453 gcc_assert (VECTOR_MODE_P (tmode));
8454
8455 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8456 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8457 elt = get_element_number (TREE_TYPE (arg0), arg2);
8458
8459 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8460 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8461
8462 op0 = force_reg (tmode, op0);
8463 op1 = force_reg (mode1, op1);
8464
8465 rs6000_expand_vector_set (op0, op1, elt);
8466
8467 return op0;
8468}
8469
8470/* Expand vec_ext builtin. */
8471static rtx
5039610b 8472altivec_expand_vec_ext_builtin (tree exp, rtx target)
7a4eca66
DE
8473{
8474 enum machine_mode tmode, mode0;
8475 tree arg0, arg1;
8476 int elt;
8477 rtx op0;
8478
5039610b
SL
8479 arg0 = CALL_EXPR_ARG (exp, 0);
8480 arg1 = CALL_EXPR_ARG (exp, 1);
7a4eca66 8481
84217346 8482 op0 = expand_normal (arg0);
7a4eca66
DE
8483 elt = get_element_number (TREE_TYPE (arg0), arg1);
8484
8485 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8486 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8487 gcc_assert (VECTOR_MODE_P (mode0));
8488
8489 op0 = force_reg (mode0, op0);
8490
8491 if (optimize || !target || !register_operand (target, tmode))
8492 target = gen_reg_rtx (tmode);
8493
8494 rs6000_expand_vector_extract (target, op0, elt);
8495
8496 return target;
8497}
8498
3a9b8c7e
AH
8499/* Expand the builtin in EXP and store the result in TARGET. Store
8500 true in *EXPANDEDP if we found a builtin to expand. */
8501static rtx
a2369ed3 8502altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
3a9b8c7e 8503{
586de218
KG
8504 const struct builtin_description *d;
8505 const struct builtin_description_predicates *dp;
3a9b8c7e
AH
8506 size_t i;
8507 enum insn_code icode;
5039610b 8508 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7c3abc73
AH
8509 tree arg0;
8510 rtx op0, pat;
8511 enum machine_mode tmode, mode0;
3a9b8c7e 8512 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
0ac081f6 8513
58646b77
PB
8514 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8515 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8516 {
8517 *expandedp = true;
ea40ba9c 8518 error ("unresolved overload for Altivec builtin %qF", fndecl);
58646b77
PB
8519 return const0_rtx;
8520 }
8521
3a9b8c7e
AH
8522 target = altivec_expand_ld_builtin (exp, target, expandedp);
8523 if (*expandedp)
8524 return target;
0ac081f6 8525
3a9b8c7e
AH
8526 target = altivec_expand_st_builtin (exp, target, expandedp);
8527 if (*expandedp)
8528 return target;
8529
8530 target = altivec_expand_dst_builtin (exp, target, expandedp);
8531 if (*expandedp)
8532 return target;
8533
8534 *expandedp = true;
95385cbb 8535
3a9b8c7e
AH
8536 switch (fcode)
8537 {
6525c0e7 8538 case ALTIVEC_BUILTIN_STVX:
5039610b 8539 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
6525c0e7 8540 case ALTIVEC_BUILTIN_STVEBX:
5039610b 8541 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
6525c0e7 8542 case ALTIVEC_BUILTIN_STVEHX:
5039610b 8543 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
6525c0e7 8544 case ALTIVEC_BUILTIN_STVEWX:
5039610b 8545 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
6525c0e7 8546 case ALTIVEC_BUILTIN_STVXL:
5039610b 8547 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
3a9b8c7e 8548
95385cbb
AH
8549 case ALTIVEC_BUILTIN_MFVSCR:
8550 icode = CODE_FOR_altivec_mfvscr;
8551 tmode = insn_data[icode].operand[0].mode;
8552
8553 if (target == 0
8554 || GET_MODE (target) != tmode
8555 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8556 target = gen_reg_rtx (tmode);
f676971a 8557
95385cbb 8558 pat = GEN_FCN (icode) (target);
0ac081f6
AH
8559 if (! pat)
8560 return 0;
8561 emit_insn (pat);
95385cbb
AH
8562 return target;
8563
8564 case ALTIVEC_BUILTIN_MTVSCR:
8565 icode = CODE_FOR_altivec_mtvscr;
5039610b 8566 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8567 op0 = expand_normal (arg0);
95385cbb
AH
8568 mode0 = insn_data[icode].operand[0].mode;
8569
8570 /* If we got invalid arguments bail out before generating bad rtl. */
8571 if (arg0 == error_mark_node)
9a171fcd 8572 return const0_rtx;
95385cbb
AH
8573
8574 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8575 op0 = copy_to_mode_reg (mode0, op0);
8576
8577 pat = GEN_FCN (icode) (op0);
8578 if (pat)
8579 emit_insn (pat);
8580 return NULL_RTX;
3a9b8c7e 8581
95385cbb
AH
8582 case ALTIVEC_BUILTIN_DSSALL:
8583 emit_insn (gen_altivec_dssall ());
8584 return NULL_RTX;
8585
8586 case ALTIVEC_BUILTIN_DSS:
8587 icode = CODE_FOR_altivec_dss;
5039610b 8588 arg0 = CALL_EXPR_ARG (exp, 0);
8bb418a3 8589 STRIP_NOPS (arg0);
84217346 8590 op0 = expand_normal (arg0);
95385cbb
AH
8591 mode0 = insn_data[icode].operand[0].mode;
8592
8593 /* If we got invalid arguments bail out before generating bad rtl. */
8594 if (arg0 == error_mark_node)
9a171fcd 8595 return const0_rtx;
95385cbb 8596
b44140e7
AH
8597 if (TREE_CODE (arg0) != INTEGER_CST
8598 || TREE_INT_CST_LOW (arg0) & ~0x3)
8599 {
8600 error ("argument to dss must be a 2-bit unsigned literal");
9a171fcd 8601 return const0_rtx;
b44140e7
AH
8602 }
8603
95385cbb
AH
8604 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8605 op0 = copy_to_mode_reg (mode0, op0);
8606
8607 emit_insn (gen_altivec_dss (op0));
0ac081f6 8608 return NULL_RTX;
7a4eca66
DE
8609
8610 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8611 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8612 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8613 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
5039610b 8614 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
7a4eca66
DE
8615
8616 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8617 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8618 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8619 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
5039610b 8620 return altivec_expand_vec_set_builtin (exp);
7a4eca66
DE
8621
8622 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8623 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8624 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8625 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
5039610b 8626 return altivec_expand_vec_ext_builtin (exp, target);
7a4eca66
DE
8627
8628 default:
8629 break;
8630 /* Fall through. */
0ac081f6 8631 }
24408032 8632
100c4561 8633 /* Expand abs* operations. */
586de218 8634 d = bdesc_abs;
ca7558fc 8635 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
100c4561 8636 if (d->code == fcode)
5039610b 8637 return altivec_expand_abs_builtin (d->icode, exp, target);
100c4561 8638
ae4b4a02 8639 /* Expand the AltiVec predicates. */
586de218 8640 dp = bdesc_altivec_preds;
ca7558fc 8641 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
ae4b4a02 8642 if (dp->code == fcode)
c4ad648e 8643 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
5039610b 8644 exp, target);
ae4b4a02 8645
6525c0e7
AH
8646 /* LV* are funky. We initialized them differently. */
8647 switch (fcode)
8648 {
8649 case ALTIVEC_BUILTIN_LVSL:
b4a62fa0 8650 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5039610b 8651 exp, target);
6525c0e7 8652 case ALTIVEC_BUILTIN_LVSR:
b4a62fa0 8653 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5039610b 8654 exp, target);
6525c0e7 8655 case ALTIVEC_BUILTIN_LVEBX:
b4a62fa0 8656 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5039610b 8657 exp, target);
6525c0e7 8658 case ALTIVEC_BUILTIN_LVEHX:
b4a62fa0 8659 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5039610b 8660 exp, target);
6525c0e7 8661 case ALTIVEC_BUILTIN_LVEWX:
b4a62fa0 8662 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5039610b 8663 exp, target);
6525c0e7 8664 case ALTIVEC_BUILTIN_LVXL:
b4a62fa0 8665 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5039610b 8666 exp, target);
6525c0e7 8667 case ALTIVEC_BUILTIN_LVX:
b4a62fa0 8668 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5039610b 8669 exp, target);
6525c0e7
AH
8670 default:
8671 break;
8672 /* Fall through. */
8673 }
95385cbb 8674
92898235 8675 *expandedp = false;
0ac081f6
AH
8676 return NULL_RTX;
8677}
8678
96038623
DE
8679/* Expand the builtin in EXP and store the result in TARGET. Store
8680 true in *EXPANDEDP if we found a builtin to expand. */
8681static rtx
8682paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8683{
8684 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8685 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
23a651fc 8686 const struct builtin_description *d;
96038623
DE
8687 size_t i;
8688
8689 *expandedp = true;
8690
8691 switch (fcode)
8692 {
8693 case PAIRED_BUILTIN_STX:
8694 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8695 case PAIRED_BUILTIN_LX:
8696 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8697 default:
8698 break;
8699 /* Fall through. */
8700 }
8701
8702 /* Expand the paired predicates. */
23a651fc 8703 d = bdesc_paired_preds;
96038623
DE
8704 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8705 if (d->code == fcode)
8706 return paired_expand_predicate_builtin (d->icode, exp, target);
8707
8708 *expandedp = false;
8709 return NULL_RTX;
8710}
8711
a3170dc6
AH
8712/* Binops that need to be initialized manually, but can be expanded
8713 automagically by rs6000_expand_binop_builtin. */
8714static struct builtin_description bdesc_2arg_spe[] =
8715{
8716 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8717 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8718 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8719 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8720 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8721 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8722 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8723 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8724 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8725 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8726 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8727 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8728 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8729 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8730 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8731 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8732 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8733 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8734 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8735 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8736 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8737 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8738};
8739
8740/* Expand the builtin in EXP and store the result in TARGET. Store
8741 true in *EXPANDEDP if we found a builtin to expand.
8742
8743 This expands the SPE builtins that are not simple unary and binary
8744 operations. */
8745static rtx
a2369ed3 8746spe_expand_builtin (tree exp, rtx target, bool *expandedp)
a3170dc6 8747{
5039610b 8748 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
a3170dc6
AH
8749 tree arg1, arg0;
8750 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8751 enum insn_code icode;
8752 enum machine_mode tmode, mode0;
8753 rtx pat, op0;
8754 struct builtin_description *d;
8755 size_t i;
8756
8757 *expandedp = true;
8758
8759 /* Syntax check for a 5-bit unsigned immediate. */
8760 switch (fcode)
8761 {
8762 case SPE_BUILTIN_EVSTDD:
8763 case SPE_BUILTIN_EVSTDH:
8764 case SPE_BUILTIN_EVSTDW:
8765 case SPE_BUILTIN_EVSTWHE:
8766 case SPE_BUILTIN_EVSTWHO:
8767 case SPE_BUILTIN_EVSTWWE:
8768 case SPE_BUILTIN_EVSTWWO:
5039610b 8769 arg1 = CALL_EXPR_ARG (exp, 2);
a3170dc6
AH
8770 if (TREE_CODE (arg1) != INTEGER_CST
8771 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8772 {
8773 error ("argument 2 must be a 5-bit unsigned literal");
8774 return const0_rtx;
8775 }
8776 break;
8777 default:
8778 break;
8779 }
8780
00332c9f
AH
8781 /* The evsplat*i instructions are not quite generic. */
8782 switch (fcode)
8783 {
8784 case SPE_BUILTIN_EVSPLATFI:
8785 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5039610b 8786 exp, target);
00332c9f
AH
8787 case SPE_BUILTIN_EVSPLATI:
8788 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5039610b 8789 exp, target);
00332c9f
AH
8790 default:
8791 break;
8792 }
8793
a3170dc6
AH
8794 d = (struct builtin_description *) bdesc_2arg_spe;
8795 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8796 if (d->code == fcode)
5039610b 8797 return rs6000_expand_binop_builtin (d->icode, exp, target);
a3170dc6
AH
8798
8799 d = (struct builtin_description *) bdesc_spe_predicates;
8800 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8801 if (d->code == fcode)
5039610b 8802 return spe_expand_predicate_builtin (d->icode, exp, target);
a3170dc6
AH
8803
8804 d = (struct builtin_description *) bdesc_spe_evsel;
8805 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8806 if (d->code == fcode)
5039610b 8807 return spe_expand_evsel_builtin (d->icode, exp, target);
a3170dc6
AH
8808
8809 switch (fcode)
8810 {
8811 case SPE_BUILTIN_EVSTDDX:
5039610b 8812 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
a3170dc6 8813 case SPE_BUILTIN_EVSTDHX:
5039610b 8814 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
a3170dc6 8815 case SPE_BUILTIN_EVSTDWX:
5039610b 8816 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
a3170dc6 8817 case SPE_BUILTIN_EVSTWHEX:
5039610b 8818 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
a3170dc6 8819 case SPE_BUILTIN_EVSTWHOX:
5039610b 8820 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
a3170dc6 8821 case SPE_BUILTIN_EVSTWWEX:
5039610b 8822 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
a3170dc6 8823 case SPE_BUILTIN_EVSTWWOX:
5039610b 8824 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
a3170dc6 8825 case SPE_BUILTIN_EVSTDD:
5039610b 8826 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
a3170dc6 8827 case SPE_BUILTIN_EVSTDH:
5039610b 8828 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
a3170dc6 8829 case SPE_BUILTIN_EVSTDW:
5039610b 8830 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
a3170dc6 8831 case SPE_BUILTIN_EVSTWHE:
5039610b 8832 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
a3170dc6 8833 case SPE_BUILTIN_EVSTWHO:
5039610b 8834 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
a3170dc6 8835 case SPE_BUILTIN_EVSTWWE:
5039610b 8836 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
a3170dc6 8837 case SPE_BUILTIN_EVSTWWO:
5039610b 8838 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
a3170dc6
AH
8839 case SPE_BUILTIN_MFSPEFSCR:
8840 icode = CODE_FOR_spe_mfspefscr;
8841 tmode = insn_data[icode].operand[0].mode;
8842
8843 if (target == 0
8844 || GET_MODE (target) != tmode
8845 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8846 target = gen_reg_rtx (tmode);
f676971a 8847
a3170dc6
AH
8848 pat = GEN_FCN (icode) (target);
8849 if (! pat)
8850 return 0;
8851 emit_insn (pat);
8852 return target;
8853 case SPE_BUILTIN_MTSPEFSCR:
8854 icode = CODE_FOR_spe_mtspefscr;
5039610b 8855 arg0 = CALL_EXPR_ARG (exp, 0);
84217346 8856 op0 = expand_normal (arg0);
a3170dc6
AH
8857 mode0 = insn_data[icode].operand[0].mode;
8858
8859 if (arg0 == error_mark_node)
8860 return const0_rtx;
8861
8862 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8863 op0 = copy_to_mode_reg (mode0, op0);
8864
8865 pat = GEN_FCN (icode) (op0);
8866 if (pat)
8867 emit_insn (pat);
8868 return NULL_RTX;
8869 default:
8870 break;
8871 }
8872
8873 *expandedp = false;
8874 return NULL_RTX;
8875}
8876
96038623
DE
8877static rtx
8878paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
8879{
8880 rtx pat, scratch, tmp;
8881 tree form = CALL_EXPR_ARG (exp, 0);
8882 tree arg0 = CALL_EXPR_ARG (exp, 1);
8883 tree arg1 = CALL_EXPR_ARG (exp, 2);
8884 rtx op0 = expand_normal (arg0);
8885 rtx op1 = expand_normal (arg1);
8886 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8887 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8888 int form_int;
8889 enum rtx_code code;
8890
8891 if (TREE_CODE (form) != INTEGER_CST)
8892 {
8893 error ("argument 1 of __builtin_paired_predicate must be a constant");
8894 return const0_rtx;
8895 }
8896 else
8897 form_int = TREE_INT_CST_LOW (form);
8898
8899 gcc_assert (mode0 == mode1);
8900
8901 if (arg0 == error_mark_node || arg1 == error_mark_node)
8902 return const0_rtx;
8903
8904 if (target == 0
8905 || GET_MODE (target) != SImode
8906 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
8907 target = gen_reg_rtx (SImode);
8908 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
8909 op0 = copy_to_mode_reg (mode0, op0);
8910 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
8911 op1 = copy_to_mode_reg (mode1, op1);
8912
8913 scratch = gen_reg_rtx (CCFPmode);
8914
8915 pat = GEN_FCN (icode) (scratch, op0, op1);
8916 if (!pat)
8917 return const0_rtx;
8918
8919 emit_insn (pat);
8920
8921 switch (form_int)
8922 {
8923 /* LT bit. */
8924 case 0:
8925 code = LT;
8926 break;
8927 /* GT bit. */
8928 case 1:
8929 code = GT;
8930 break;
8931 /* EQ bit. */
8932 case 2:
8933 code = EQ;
8934 break;
8935 /* UN bit. */
8936 case 3:
8937 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
8938 return target;
8939 default:
8940 error ("argument 1 of __builtin_paired_predicate is out of range");
8941 return const0_rtx;
8942 }
8943
8944 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
8945 emit_move_insn (target, tmp);
8946 return target;
8947}
8948
a3170dc6 8949static rtx
5039610b 8950spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
8951{
8952 rtx pat, scratch, tmp;
5039610b
SL
8953 tree form = CALL_EXPR_ARG (exp, 0);
8954 tree arg0 = CALL_EXPR_ARG (exp, 1);
8955 tree arg1 = CALL_EXPR_ARG (exp, 2);
84217346
MD
8956 rtx op0 = expand_normal (arg0);
8957 rtx op1 = expand_normal (arg1);
a3170dc6
AH
8958 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8959 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8960 int form_int;
8961 enum rtx_code code;
8962
8963 if (TREE_CODE (form) != INTEGER_CST)
8964 {
8965 error ("argument 1 of __builtin_spe_predicate must be a constant");
8966 return const0_rtx;
8967 }
8968 else
8969 form_int = TREE_INT_CST_LOW (form);
8970
37409796 8971 gcc_assert (mode0 == mode1);
a3170dc6
AH
8972
8973 if (arg0 == error_mark_node || arg1 == error_mark_node)
8974 return const0_rtx;
8975
8976 if (target == 0
8977 || GET_MODE (target) != SImode
8978 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
8979 target = gen_reg_rtx (SImode);
8980
8981 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8982 op0 = copy_to_mode_reg (mode0, op0);
8983 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8984 op1 = copy_to_mode_reg (mode1, op1);
8985
8986 scratch = gen_reg_rtx (CCmode);
8987
8988 pat = GEN_FCN (icode) (scratch, op0, op1);
8989 if (! pat)
8990 return const0_rtx;
8991 emit_insn (pat);
8992
8993 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8994 _lower_. We use one compare, but look in different bits of the
8995 CR for each variant.
8996
8997 There are 2 elements in each SPE simd type (upper/lower). The CR
8998 bits are set as follows:
8999
9000 BIT0 | BIT 1 | BIT 2 | BIT 3
9001 U | L | (U | L) | (U & L)
9002
9003 So, for an "all" relationship, BIT 3 would be set.
9004 For an "any" relationship, BIT 2 would be set. Etc.
9005
9006 Following traditional nomenclature, these bits map to:
9007
9008 BIT0 | BIT 1 | BIT 2 | BIT 3
9009 LT | GT | EQ | OV
9010
9011 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9012 */
9013
9014 switch (form_int)
9015 {
9016 /* All variant. OV bit. */
9017 case 0:
9018 /* We need to get to the OV bit, which is the ORDERED bit. We
9019 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
992d08b1 9020 that's ugly and will make validate_condition_mode die.
a3170dc6
AH
9021 So let's just use another pattern. */
9022 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9023 return target;
9024 /* Any variant. EQ bit. */
9025 case 1:
9026 code = EQ;
9027 break;
9028 /* Upper variant. LT bit. */
9029 case 2:
9030 code = LT;
9031 break;
9032 /* Lower variant. GT bit. */
9033 case 3:
9034 code = GT;
9035 break;
9036 default:
9037 error ("argument 1 of __builtin_spe_predicate is out of range");
9038 return const0_rtx;
9039 }
9040
9041 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9042 emit_move_insn (target, tmp);
9043
9044 return target;
9045}
9046
9047/* The evsel builtins look like this:
9048
9049 e = __builtin_spe_evsel_OP (a, b, c, d);
9050
9051 and work like this:
9052
9053 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9054 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9055*/
9056
9057static rtx
5039610b 9058spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
a3170dc6
AH
9059{
9060 rtx pat, scratch;
5039610b
SL
9061 tree arg0 = CALL_EXPR_ARG (exp, 0);
9062 tree arg1 = CALL_EXPR_ARG (exp, 1);
9063 tree arg2 = CALL_EXPR_ARG (exp, 2);
9064 tree arg3 = CALL_EXPR_ARG (exp, 3);
84217346
MD
9065 rtx op0 = expand_normal (arg0);
9066 rtx op1 = expand_normal (arg1);
9067 rtx op2 = expand_normal (arg2);
9068 rtx op3 = expand_normal (arg3);
a3170dc6
AH
9069 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9070 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9071
37409796 9072 gcc_assert (mode0 == mode1);
a3170dc6
AH
9073
9074 if (arg0 == error_mark_node || arg1 == error_mark_node
9075 || arg2 == error_mark_node || arg3 == error_mark_node)
9076 return const0_rtx;
9077
9078 if (target == 0
9079 || GET_MODE (target) != mode0
9080 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9081 target = gen_reg_rtx (mode0);
9082
9083 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9084 op0 = copy_to_mode_reg (mode0, op0);
9085 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9086 op1 = copy_to_mode_reg (mode0, op1);
9087 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9088 op2 = copy_to_mode_reg (mode0, op2);
9089 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9090 op3 = copy_to_mode_reg (mode0, op3);
9091
9092 /* Generate the compare. */
9093 scratch = gen_reg_rtx (CCmode);
9094 pat = GEN_FCN (icode) (scratch, op0, op1);
9095 if (! pat)
9096 return const0_rtx;
9097 emit_insn (pat);
9098
9099 if (mode0 == V2SImode)
9100 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9101 else
9102 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9103
9104 return target;
9105}
9106
0ac081f6
AH
9107/* Expand an expression EXP that calls a built-in function,
9108 with result going to TARGET if that's convenient
9109 (and in mode MODE if that's convenient).
9110 SUBTARGET may be used as the target for computing one of EXP's operands.
9111 IGNORE is nonzero if the value is to be ignored. */
9112
9113static rtx
a2369ed3 9114rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
c4ad648e
AM
9115 enum machine_mode mode ATTRIBUTE_UNUSED,
9116 int ignore ATTRIBUTE_UNUSED)
0ac081f6 9117{
5039610b 9118 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
92898235 9119 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
586de218 9120 const struct builtin_description *d;
92898235
AH
9121 size_t i;
9122 rtx ret;
9123 bool success;
f676971a 9124
9c78b944
DE
9125 if (fcode == RS6000_BUILTIN_RECIP)
9126 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9127
9128 if (fcode == RS6000_BUILTIN_RECIPF)
9129 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9130
9131 if (fcode == RS6000_BUILTIN_RSQRTF)
9132 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9133
7ccf35ed
DN
9134 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9135 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9136 {
9137 int icode = (int) CODE_FOR_altivec_lvsr;
9138 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9139 enum machine_mode mode = insn_data[icode].operand[1].mode;
9140 tree arg;
9141 rtx op, addr, pat;
9142
37409796 9143 gcc_assert (TARGET_ALTIVEC);
7ccf35ed 9144
5039610b 9145 arg = CALL_EXPR_ARG (exp, 0);
37409796 9146 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
7ccf35ed
DN
9147 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9148 addr = memory_address (mode, op);
9149 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9150 op = addr;
9151 else
9152 {
9153 /* For the load case need to negate the address. */
9154 op = gen_reg_rtx (GET_MODE (addr));
9155 emit_insn (gen_rtx_SET (VOIDmode, op,
9156 gen_rtx_NEG (GET_MODE (addr), addr)));
c4ad648e 9157 }
7ccf35ed
DN
9158 op = gen_rtx_MEM (mode, op);
9159
9160 if (target == 0
9161 || GET_MODE (target) != tmode
9162 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9163 target = gen_reg_rtx (tmode);
9164
9165 /*pat = gen_altivec_lvsr (target, op);*/
9166 pat = GEN_FCN (icode) (target, op);
9167 if (!pat)
9168 return 0;
9169 emit_insn (pat);
9170
9171 return target;
9172 }
5039610b
SL
9173
9174 /* FIXME: There's got to be a nicer way to handle this case than
9175 constructing a new CALL_EXPR. */
f57d17f1 9176 if (fcode == ALTIVEC_BUILTIN_VCFUX
7910ae0c
DN
9177 || fcode == ALTIVEC_BUILTIN_VCFSX
9178 || fcode == ALTIVEC_BUILTIN_VCTUXS
9179 || fcode == ALTIVEC_BUILTIN_VCTSXS)
f57d17f1 9180 {
5039610b
SL
9181 if (call_expr_nargs (exp) == 1)
9182 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9183 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
982afe02 9184 }
7ccf35ed 9185
0ac081f6 9186 if (TARGET_ALTIVEC)
92898235
AH
9187 {
9188 ret = altivec_expand_builtin (exp, target, &success);
9189
a3170dc6
AH
9190 if (success)
9191 return ret;
9192 }
9193 if (TARGET_SPE)
9194 {
9195 ret = spe_expand_builtin (exp, target, &success);
9196
92898235
AH
9197 if (success)
9198 return ret;
9199 }
96038623
DE
9200 if (TARGET_PAIRED_FLOAT)
9201 {
9202 ret = paired_expand_builtin (exp, target, &success);
9203
9204 if (success)
9205 return ret;
9206 }
92898235 9207
96038623 9208 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
bb8df8a6 9209
37409796
NS
9210 /* Handle simple unary operations. */
9211 d = (struct builtin_description *) bdesc_1arg;
9212 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9213 if (d->code == fcode)
5039610b 9214 return rs6000_expand_unop_builtin (d->icode, exp, target);
bb8df8a6 9215
37409796
NS
9216 /* Handle simple binary operations. */
9217 d = (struct builtin_description *) bdesc_2arg;
9218 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9219 if (d->code == fcode)
5039610b 9220 return rs6000_expand_binop_builtin (d->icode, exp, target);
0ac081f6 9221
37409796 9222 /* Handle simple ternary operations. */
586de218 9223 d = bdesc_3arg;
37409796
NS
9224 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9225 if (d->code == fcode)
5039610b 9226 return rs6000_expand_ternop_builtin (d->icode, exp, target);
bb8df8a6 9227
37409796 9228 gcc_unreachable ();
0ac081f6
AH
9229}
9230
7c62e993
PB
9231static tree
9232build_opaque_vector_type (tree node, int nunits)
9233{
9234 node = copy_node (node);
9235 TYPE_MAIN_VARIANT (node) = node;
0627bfb4 9236 TYPE_CANONICAL (node) = node;
7c62e993
PB
9237 return build_vector_type (node, nunits);
9238}
9239
0ac081f6 9240static void
863d938c 9241rs6000_init_builtins (void)
0ac081f6 9242{
4a5eab38
PB
9243 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9244 V2SF_type_node = build_vector_type (float_type_node, 2);
9245 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9246 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9247 V4SF_type_node = build_vector_type (float_type_node, 4);
7e463bda 9248 V8HI_type_node = build_vector_type (intHI_type_node, 8);
4a5eab38
PB
9249 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9250
9251 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9252 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9253 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9254
7c62e993
PB
9255 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9256 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6035d635 9257 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
58646b77 9258 opaque_V4SI_type_node = copy_node (V4SI_type_node);
3fdaa45a 9259
8bb418a3
ZL
9260 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9261 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9262 'vector unsigned short'. */
9263
8dd16ecc
NS
9264 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9265 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9266 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9267 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
8bb418a3 9268
58646b77
PB
9269 long_integer_type_internal_node = long_integer_type_node;
9270 long_unsigned_type_internal_node = long_unsigned_type_node;
9271 intQI_type_internal_node = intQI_type_node;
9272 uintQI_type_internal_node = unsigned_intQI_type_node;
9273 intHI_type_internal_node = intHI_type_node;
9274 uintHI_type_internal_node = unsigned_intHI_type_node;
9275 intSI_type_internal_node = intSI_type_node;
9276 uintSI_type_internal_node = unsigned_intSI_type_node;
9277 float_type_internal_node = float_type_node;
9278 void_type_internal_node = void_type_node;
9279
8bb418a3
ZL
9280 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9281 get_identifier ("__bool char"),
9282 bool_char_type_node));
9283 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9284 get_identifier ("__bool short"),
9285 bool_short_type_node));
9286 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9287 get_identifier ("__bool int"),
9288 bool_int_type_node));
9289 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9290 get_identifier ("__pixel"),
9291 pixel_type_node));
9292
4a5eab38
PB
9293 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9294 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9295 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9296 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
8bb418a3
ZL
9297
9298 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9299 get_identifier ("__vector unsigned char"),
9300 unsigned_V16QI_type_node));
9301 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9302 get_identifier ("__vector signed char"),
9303 V16QI_type_node));
9304 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9305 get_identifier ("__vector __bool char"),
9306 bool_V16QI_type_node));
9307
9308 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9309 get_identifier ("__vector unsigned short"),
9310 unsigned_V8HI_type_node));
9311 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9312 get_identifier ("__vector signed short"),
9313 V8HI_type_node));
9314 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9315 get_identifier ("__vector __bool short"),
9316 bool_V8HI_type_node));
9317
9318 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9319 get_identifier ("__vector unsigned int"),
9320 unsigned_V4SI_type_node));
9321 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9322 get_identifier ("__vector signed int"),
9323 V4SI_type_node));
9324 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9325 get_identifier ("__vector __bool int"),
9326 bool_V4SI_type_node));
9327
9328 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9329 get_identifier ("__vector float"),
9330 V4SF_type_node));
9331 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9332 get_identifier ("__vector __pixel"),
9333 pixel_V8HI_type_node));
9334
96038623
DE
9335 if (TARGET_PAIRED_FLOAT)
9336 paired_init_builtins ();
a3170dc6 9337 if (TARGET_SPE)
3fdaa45a 9338 spe_init_builtins ();
0ac081f6
AH
9339 if (TARGET_ALTIVEC)
9340 altivec_init_builtins ();
96038623 9341 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
0559cc77 9342 rs6000_common_init_builtins ();
9c78b944
DE
9343 if (TARGET_PPC_GFXOPT)
9344 {
9345 tree ftype = build_function_type_list (float_type_node,
9346 float_type_node,
9347 float_type_node,
9348 NULL_TREE);
9349 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9350 RS6000_BUILTIN_RECIPF);
9351
9352 ftype = build_function_type_list (float_type_node,
9353 float_type_node,
9354 NULL_TREE);
9355 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9356 RS6000_BUILTIN_RSQRTF);
9357 }
9358 if (TARGET_POPCNTB)
9359 {
9360 tree ftype = build_function_type_list (double_type_node,
9361 double_type_node,
9362 double_type_node,
9363 NULL_TREE);
9364 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9365 RS6000_BUILTIN_RECIP);
9366
9367 }
69ca3549
DE
9368
9369#if TARGET_XCOFF
9370 /* AIX libm provides clog as __clog. */
9371 if (built_in_decls [BUILT_IN_CLOG])
9372 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9373#endif
fb220235
FXC
9374
9375#ifdef SUBTARGET_INIT_BUILTINS
9376 SUBTARGET_INIT_BUILTINS;
9377#endif
0ac081f6
AH
9378}
9379
a3170dc6
AH
9380/* Search through a set of builtins and enable the mask bits.
9381 DESC is an array of builtins.
b6d08ca1 9382 SIZE is the total number of builtins.
a3170dc6
AH
9383 START is the builtin enum at which to start.
9384 END is the builtin enum at which to end. */
0ac081f6 9385static void
a2369ed3 9386enable_mask_for_builtins (struct builtin_description *desc, int size,
f676971a 9387 enum rs6000_builtins start,
a2369ed3 9388 enum rs6000_builtins end)
a3170dc6
AH
9389{
9390 int i;
9391
9392 for (i = 0; i < size; ++i)
9393 if (desc[i].code == start)
9394 break;
9395
9396 if (i == size)
9397 return;
9398
9399 for (; i < size; ++i)
9400 {
9401 /* Flip all the bits on. */
9402 desc[i].mask = target_flags;
9403 if (desc[i].code == end)
9404 break;
9405 }
9406}
9407
9408static void
863d938c 9409spe_init_builtins (void)
0ac081f6 9410{
a3170dc6
AH
9411 tree endlink = void_list_node;
9412 tree puint_type_node = build_pointer_type (unsigned_type_node);
9413 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
ae4b4a02 9414 struct builtin_description *d;
0ac081f6
AH
9415 size_t i;
9416
a3170dc6
AH
9417 tree v2si_ftype_4_v2si
9418 = build_function_type
3fdaa45a
AH
9419 (opaque_V2SI_type_node,
9420 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9421 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9422 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9423 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9424 endlink)))));
9425
9426 tree v2sf_ftype_4_v2sf
9427 = build_function_type
3fdaa45a
AH
9428 (opaque_V2SF_type_node,
9429 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9430 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9431 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9432 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9433 endlink)))));
9434
9435 tree int_ftype_int_v2si_v2si
9436 = build_function_type
9437 (integer_type_node,
9438 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9439 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9440 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9441 endlink))));
9442
9443 tree int_ftype_int_v2sf_v2sf
9444 = build_function_type
9445 (integer_type_node,
9446 tree_cons (NULL_TREE, integer_type_node,
3fdaa45a
AH
9447 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9448 tree_cons (NULL_TREE, opaque_V2SF_type_node,
a3170dc6
AH
9449 endlink))));
9450
9451 tree void_ftype_v2si_puint_int
9452 = build_function_type (void_type_node,
3fdaa45a 9453 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9454 tree_cons (NULL_TREE, puint_type_node,
9455 tree_cons (NULL_TREE,
9456 integer_type_node,
9457 endlink))));
9458
9459 tree void_ftype_v2si_puint_char
9460 = build_function_type (void_type_node,
3fdaa45a 9461 tree_cons (NULL_TREE, opaque_V2SI_type_node,
a3170dc6
AH
9462 tree_cons (NULL_TREE, puint_type_node,
9463 tree_cons (NULL_TREE,
9464 char_type_node,
9465 endlink))));
9466
9467 tree void_ftype_v2si_pv2si_int
9468 = build_function_type (void_type_node,
3fdaa45a 9469 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9470 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9471 tree_cons (NULL_TREE,
9472 integer_type_node,
9473 endlink))));
9474
9475 tree void_ftype_v2si_pv2si_char
9476 = build_function_type (void_type_node,
3fdaa45a 9477 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6035d635 9478 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9479 tree_cons (NULL_TREE,
9480 char_type_node,
9481 endlink))));
9482
9483 tree void_ftype_int
9484 = build_function_type (void_type_node,
9485 tree_cons (NULL_TREE, integer_type_node, endlink));
9486
9487 tree int_ftype_void
36e8d515 9488 = build_function_type (integer_type_node, endlink);
a3170dc6
AH
9489
9490 tree v2si_ftype_pv2si_int
3fdaa45a 9491 = build_function_type (opaque_V2SI_type_node,
6035d635 9492 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
a3170dc6
AH
9493 tree_cons (NULL_TREE, integer_type_node,
9494 endlink)));
9495
9496 tree v2si_ftype_puint_int
3fdaa45a 9497 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9498 tree_cons (NULL_TREE, puint_type_node,
9499 tree_cons (NULL_TREE, integer_type_node,
9500 endlink)));
9501
9502 tree v2si_ftype_pushort_int
3fdaa45a 9503 = build_function_type (opaque_V2SI_type_node,
a3170dc6
AH
9504 tree_cons (NULL_TREE, pushort_type_node,
9505 tree_cons (NULL_TREE, integer_type_node,
9506 endlink)));
9507
00332c9f
AH
9508 tree v2si_ftype_signed_char
9509 = build_function_type (opaque_V2SI_type_node,
9510 tree_cons (NULL_TREE, signed_char_type_node,
9511 endlink));
9512
a3170dc6
AH
9513 /* The initialization of the simple binary and unary builtins is
9514 done in rs6000_common_init_builtins, but we have to enable the
9515 mask bits here manually because we have run out of `target_flags'
9516 bits. We really need to redesign this mask business. */
9517
9518 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9519 ARRAY_SIZE (bdesc_2arg),
9520 SPE_BUILTIN_EVADDW,
9521 SPE_BUILTIN_EVXOR);
9522 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9523 ARRAY_SIZE (bdesc_1arg),
9524 SPE_BUILTIN_EVABS,
9525 SPE_BUILTIN_EVSUBFUSIAAW);
9526 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9527 ARRAY_SIZE (bdesc_spe_predicates),
9528 SPE_BUILTIN_EVCMPEQ,
9529 SPE_BUILTIN_EVFSTSTLT);
9530 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9531 ARRAY_SIZE (bdesc_spe_evsel),
9532 SPE_BUILTIN_EVSEL_CMPGTS,
9533 SPE_BUILTIN_EVSEL_FSTSTEQ);
9534
36252949
AH
9535 (*lang_hooks.decls.pushdecl)
9536 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9537 opaque_V2SI_type_node));
9538
a3170dc6 9539 /* Initialize irregular SPE builtins. */
f676971a 9540
a3170dc6
AH
9541 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9542 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9543 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9544 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9545 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9546 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9547 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9548 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9549 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9550 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9551 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9552 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9553 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9554 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9555 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9556 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
00332c9f
AH
9557 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9558 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
a3170dc6
AH
9559
9560 /* Loads. */
9561 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9562 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9563 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9564 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9565 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9566 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9567 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9568 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9569 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9570 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9571 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9572 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9573 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9574 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9575 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9576 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9577 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9578 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9579 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9580 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9581 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9582 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9583
9584 /* Predicates. */
9585 d = (struct builtin_description *) bdesc_spe_predicates;
9586 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9587 {
9588 tree type;
9589
9590 switch (insn_data[d->icode].operand[1].mode)
9591 {
9592 case V2SImode:
9593 type = int_ftype_int_v2si_v2si;
9594 break;
9595 case V2SFmode:
9596 type = int_ftype_int_v2sf_v2sf;
9597 break;
9598 default:
37409796 9599 gcc_unreachable ();
a3170dc6
AH
9600 }
9601
9602 def_builtin (d->mask, d->name, type, d->code);
9603 }
9604
9605 /* Evsel predicates. */
9606 d = (struct builtin_description *) bdesc_spe_evsel;
9607 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9608 {
9609 tree type;
9610
9611 switch (insn_data[d->icode].operand[1].mode)
9612 {
9613 case V2SImode:
9614 type = v2si_ftype_4_v2si;
9615 break;
9616 case V2SFmode:
9617 type = v2sf_ftype_4_v2sf;
9618 break;
9619 default:
37409796 9620 gcc_unreachable ();
a3170dc6
AH
9621 }
9622
9623 def_builtin (d->mask, d->name, type, d->code);
9624 }
9625}
9626
96038623
DE
9627static void
9628paired_init_builtins (void)
9629{
23a651fc 9630 const struct builtin_description *d;
96038623
DE
9631 size_t i;
9632 tree endlink = void_list_node;
9633
9634 tree int_ftype_int_v2sf_v2sf
9635 = build_function_type
9636 (integer_type_node,
9637 tree_cons (NULL_TREE, integer_type_node,
9638 tree_cons (NULL_TREE, V2SF_type_node,
9639 tree_cons (NULL_TREE, V2SF_type_node,
9640 endlink))));
9641 tree pcfloat_type_node =
9642 build_pointer_type (build_qualified_type
9643 (float_type_node, TYPE_QUAL_CONST));
9644
9645 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9646 long_integer_type_node,
9647 pcfloat_type_node,
9648 NULL_TREE);
9649 tree void_ftype_v2sf_long_pcfloat =
9650 build_function_type_list (void_type_node,
9651 V2SF_type_node,
9652 long_integer_type_node,
9653 pcfloat_type_node,
9654 NULL_TREE);
9655
9656
9657 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9658 PAIRED_BUILTIN_LX);
9659
9660
9661 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9662 PAIRED_BUILTIN_STX);
9663
9664 /* Predicates. */
23a651fc 9665 d = bdesc_paired_preds;
96038623
DE
9666 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9667 {
9668 tree type;
9669
9670 switch (insn_data[d->icode].operand[1].mode)
9671 {
9672 case V2SFmode:
9673 type = int_ftype_int_v2sf_v2sf;
9674 break;
9675 default:
9676 gcc_unreachable ();
9677 }
9678
9679 def_builtin (d->mask, d->name, type, d->code);
9680 }
9681}
9682
a3170dc6 9683static void
863d938c 9684altivec_init_builtins (void)
a3170dc6 9685{
586de218
KG
9686 const struct builtin_description *d;
9687 const struct builtin_description_predicates *dp;
a3170dc6 9688 size_t i;
7a4eca66
DE
9689 tree ftype;
9690
a3170dc6
AH
9691 tree pfloat_type_node = build_pointer_type (float_type_node);
9692 tree pint_type_node = build_pointer_type (integer_type_node);
9693 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9694 tree pchar_type_node = build_pointer_type (char_type_node);
9695
9696 tree pvoid_type_node = build_pointer_type (void_type_node);
9697
0dbc3651
ZW
9698 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9699 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9700 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9701 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9702
9703 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9704
58646b77
PB
9705 tree int_ftype_opaque
9706 = build_function_type_list (integer_type_node,
9707 opaque_V4SI_type_node, NULL_TREE);
9708
9709 tree opaque_ftype_opaque_int
9710 = build_function_type_list (opaque_V4SI_type_node,
9711 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9712 tree opaque_ftype_opaque_opaque_int
9713 = build_function_type_list (opaque_V4SI_type_node,
9714 opaque_V4SI_type_node, opaque_V4SI_type_node,
9715 integer_type_node, NULL_TREE);
9716 tree int_ftype_int_opaque_opaque
9717 = build_function_type_list (integer_type_node,
9718 integer_type_node, opaque_V4SI_type_node,
9719 opaque_V4SI_type_node, NULL_TREE);
a3170dc6
AH
9720 tree int_ftype_int_v4si_v4si
9721 = build_function_type_list (integer_type_node,
9722 integer_type_node, V4SI_type_node,
9723 V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9724 tree v4sf_ftype_pcfloat
9725 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
a3170dc6 9726 tree void_ftype_pfloat_v4sf
b4de2f7d 9727 = build_function_type_list (void_type_node,
a3170dc6 9728 pfloat_type_node, V4SF_type_node, NULL_TREE);
0dbc3651
ZW
9729 tree v4si_ftype_pcint
9730 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9731 tree void_ftype_pint_v4si
b4de2f7d
AH
9732 = build_function_type_list (void_type_node,
9733 pint_type_node, V4SI_type_node, NULL_TREE);
0dbc3651
ZW
9734 tree v8hi_ftype_pcshort
9735 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
f18c054f 9736 tree void_ftype_pshort_v8hi
b4de2f7d
AH
9737 = build_function_type_list (void_type_node,
9738 pshort_type_node, V8HI_type_node, NULL_TREE);
0dbc3651
ZW
9739 tree v16qi_ftype_pcchar
9740 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
f18c054f 9741 tree void_ftype_pchar_v16qi
b4de2f7d
AH
9742 = build_function_type_list (void_type_node,
9743 pchar_type_node, V16QI_type_node, NULL_TREE);
95385cbb 9744 tree void_ftype_v4si
b4de2f7d 9745 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
9746 tree v8hi_ftype_void
9747 = build_function_type (V8HI_type_node, void_list_node);
9748 tree void_ftype_void
9749 = build_function_type (void_type_node, void_list_node);
e34b6648
JJ
9750 tree void_ftype_int
9751 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
0dbc3651 9752
58646b77
PB
9753 tree opaque_ftype_long_pcvoid
9754 = build_function_type_list (opaque_V4SI_type_node,
9755 long_integer_type_node, pcvoid_type_node, NULL_TREE);
b4a62fa0 9756 tree v16qi_ftype_long_pcvoid
a3170dc6 9757 = build_function_type_list (V16QI_type_node,
b4a62fa0
SB
9758 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9759 tree v8hi_ftype_long_pcvoid
a3170dc6 9760 = build_function_type_list (V8HI_type_node,
b4a62fa0
SB
9761 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9762 tree v4si_ftype_long_pcvoid
a3170dc6 9763 = build_function_type_list (V4SI_type_node,
b4a62fa0 9764 long_integer_type_node, pcvoid_type_node, NULL_TREE);
0dbc3651 9765
58646b77
PB
9766 tree void_ftype_opaque_long_pvoid
9767 = build_function_type_list (void_type_node,
9768 opaque_V4SI_type_node, long_integer_type_node,
9769 pvoid_type_node, NULL_TREE);
b4a62fa0 9770 tree void_ftype_v4si_long_pvoid
b4de2f7d 9771 = build_function_type_list (void_type_node,
b4a62fa0 9772 V4SI_type_node, long_integer_type_node,
b4de2f7d 9773 pvoid_type_node, NULL_TREE);
b4a62fa0 9774 tree void_ftype_v16qi_long_pvoid
b4de2f7d 9775 = build_function_type_list (void_type_node,
b4a62fa0 9776 V16QI_type_node, long_integer_type_node,
b4de2f7d 9777 pvoid_type_node, NULL_TREE);
b4a62fa0 9778 tree void_ftype_v8hi_long_pvoid
b4de2f7d 9779 = build_function_type_list (void_type_node,
b4a62fa0 9780 V8HI_type_node, long_integer_type_node,
b4de2f7d 9781 pvoid_type_node, NULL_TREE);
a3170dc6
AH
9782 tree int_ftype_int_v8hi_v8hi
9783 = build_function_type_list (integer_type_node,
9784 integer_type_node, V8HI_type_node,
9785 V8HI_type_node, NULL_TREE);
9786 tree int_ftype_int_v16qi_v16qi
9787 = build_function_type_list (integer_type_node,
9788 integer_type_node, V16QI_type_node,
9789 V16QI_type_node, NULL_TREE);
9790 tree int_ftype_int_v4sf_v4sf
9791 = build_function_type_list (integer_type_node,
9792 integer_type_node, V4SF_type_node,
9793 V4SF_type_node, NULL_TREE);
9794 tree v4si_ftype_v4si
9795 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9796 tree v8hi_ftype_v8hi
9797 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9798 tree v16qi_ftype_v16qi
9799 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9800 tree v4sf_ftype_v4sf
9801 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
8bb418a3 9802 tree void_ftype_pcvoid_int_int
a3170dc6 9803 = build_function_type_list (void_type_node,
0dbc3651 9804 pcvoid_type_node, integer_type_node,
8bb418a3 9805 integer_type_node, NULL_TREE);
8bb418a3 9806
0dbc3651
ZW
9807 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9808 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9809 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9810 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9811 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9812 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9813 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9814 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9815 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9816 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9817 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9818 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9820 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9821 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9822 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
a3170dc6
AH
9823 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9824 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9825 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
e34b6648 9826 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
b4a62fa0
SB
9827 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9828 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9829 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9830 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9831 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9832 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9833 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9834 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9835 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9836 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9837 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9838 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
58646b77
PB
9839 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9840 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9841 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9842 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9843 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9844 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9845 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9846 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9847 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9848 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9849 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9850 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9851 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9852 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9853
9854 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
9855
9856 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
9857 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
9858 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
9859 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
9860 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
9861 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
9862 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
9863 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
9864 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
9865 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
8bb418a3 9866
a3170dc6 9867 /* Add the DST variants. */
586de218 9868 d = bdesc_dst;
a3170dc6 9869 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8bb418a3 9870 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
a3170dc6
AH
9871
9872 /* Initialize the predicates. */
586de218 9873 dp = bdesc_altivec_preds;
a3170dc6
AH
9874 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
9875 {
9876 enum machine_mode mode1;
9877 tree type;
58646b77
PB
9878 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9879 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
a3170dc6 9880
58646b77
PB
9881 if (is_overloaded)
9882 mode1 = VOIDmode;
9883 else
9884 mode1 = insn_data[dp->icode].operand[1].mode;
a3170dc6
AH
9885
9886 switch (mode1)
9887 {
58646b77
PB
9888 case VOIDmode:
9889 type = int_ftype_int_opaque_opaque;
9890 break;
a3170dc6
AH
9891 case V4SImode:
9892 type = int_ftype_int_v4si_v4si;
9893 break;
9894 case V8HImode:
9895 type = int_ftype_int_v8hi_v8hi;
9896 break;
9897 case V16QImode:
9898 type = int_ftype_int_v16qi_v16qi;
9899 break;
9900 case V4SFmode:
9901 type = int_ftype_int_v4sf_v4sf;
9902 break;
9903 default:
37409796 9904 gcc_unreachable ();
a3170dc6 9905 }
f676971a 9906
a3170dc6
AH
9907 def_builtin (dp->mask, dp->name, type, dp->code);
9908 }
9909
9910 /* Initialize the abs* operators. */
586de218 9911 d = bdesc_abs;
a3170dc6
AH
9912 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
9913 {
9914 enum machine_mode mode0;
9915 tree type;
9916
9917 mode0 = insn_data[d->icode].operand[0].mode;
9918
9919 switch (mode0)
9920 {
9921 case V4SImode:
9922 type = v4si_ftype_v4si;
9923 break;
9924 case V8HImode:
9925 type = v8hi_ftype_v8hi;
9926 break;
9927 case V16QImode:
9928 type = v16qi_ftype_v16qi;
9929 break;
9930 case V4SFmode:
9931 type = v4sf_ftype_v4sf;
9932 break;
9933 default:
37409796 9934 gcc_unreachable ();
a3170dc6 9935 }
f676971a 9936
a3170dc6
AH
9937 def_builtin (d->mask, d->name, type, d->code);
9938 }
7ccf35ed 9939
13c62176
DN
9940 if (TARGET_ALTIVEC)
9941 {
9942 tree decl;
9943
9944 /* Initialize target builtin that implements
9945 targetm.vectorize.builtin_mask_for_load. */
9946
c79efc4d
RÁE
9947 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
9948 v16qi_ftype_long_pcvoid,
9949 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
61210b72
AP
9950 BUILT_IN_MD, NULL, NULL_TREE);
9951 TREE_READONLY (decl) = 1;
13c62176
DN
9952 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
9953 altivec_builtin_mask_for_load = decl;
13c62176 9954 }
7a4eca66
DE
9955
9956 /* Access to the vec_init patterns. */
9957 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
9958 integer_type_node, integer_type_node,
9959 integer_type_node, NULL_TREE);
9960 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
9961 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
9962
9963 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
9964 short_integer_type_node,
9965 short_integer_type_node,
9966 short_integer_type_node,
9967 short_integer_type_node,
9968 short_integer_type_node,
9969 short_integer_type_node,
9970 short_integer_type_node, NULL_TREE);
9971 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
9972 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
9973
9974 ftype = build_function_type_list (V16QI_type_node, char_type_node,
9975 char_type_node, char_type_node,
9976 char_type_node, char_type_node,
9977 char_type_node, char_type_node,
9978 char_type_node, char_type_node,
9979 char_type_node, char_type_node,
9980 char_type_node, char_type_node,
9981 char_type_node, char_type_node,
9982 char_type_node, NULL_TREE);
9983 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
9984 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
9985
9986 ftype = build_function_type_list (V4SF_type_node, float_type_node,
9987 float_type_node, float_type_node,
9988 float_type_node, NULL_TREE);
9989 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
9990 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
9991
9992 /* Access to the vec_set patterns. */
9993 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
9994 intSI_type_node,
9995 integer_type_node, NULL_TREE);
9996 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
9997 ALTIVEC_BUILTIN_VEC_SET_V4SI);
9998
9999 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10000 intHI_type_node,
10001 integer_type_node, NULL_TREE);
10002 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10003 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10004
10005 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10006 intQI_type_node,
10007 integer_type_node, NULL_TREE);
10008 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10009 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10010
10011 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10012 float_type_node,
10013 integer_type_node, NULL_TREE);
10014 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10015 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10016
10017 /* Access to the vec_extract patterns. */
10018 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10019 integer_type_node, NULL_TREE);
10020 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10021 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10022
10023 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10024 integer_type_node, NULL_TREE);
10025 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10026 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10027
10028 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10029 integer_type_node, NULL_TREE);
10030 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10031 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10032
10033 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10034 integer_type_node, NULL_TREE);
10035 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10036 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
a3170dc6
AH
10037}
10038
10039static void
863d938c 10040rs6000_common_init_builtins (void)
a3170dc6 10041{
586de218 10042 const struct builtin_description *d;
a3170dc6
AH
10043 size_t i;
10044
96038623
DE
10045 tree v2sf_ftype_v2sf_v2sf_v2sf
10046 = build_function_type_list (V2SF_type_node,
10047 V2SF_type_node, V2SF_type_node,
10048 V2SF_type_node, NULL_TREE);
10049
a3170dc6
AH
10050 tree v4sf_ftype_v4sf_v4sf_v16qi
10051 = build_function_type_list (V4SF_type_node,
10052 V4SF_type_node, V4SF_type_node,
10053 V16QI_type_node, NULL_TREE);
10054 tree v4si_ftype_v4si_v4si_v16qi
10055 = build_function_type_list (V4SI_type_node,
10056 V4SI_type_node, V4SI_type_node,
10057 V16QI_type_node, NULL_TREE);
10058 tree v8hi_ftype_v8hi_v8hi_v16qi
10059 = build_function_type_list (V8HI_type_node,
10060 V8HI_type_node, V8HI_type_node,
10061 V16QI_type_node, NULL_TREE);
10062 tree v16qi_ftype_v16qi_v16qi_v16qi
10063 = build_function_type_list (V16QI_type_node,
10064 V16QI_type_node, V16QI_type_node,
10065 V16QI_type_node, NULL_TREE);
b9e4e5d1
ZL
10066 tree v4si_ftype_int
10067 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10068 tree v8hi_ftype_int
10069 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10070 tree v16qi_ftype_int
10071 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
a3170dc6
AH
10072 tree v8hi_ftype_v16qi
10073 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10074 tree v4sf_ftype_v4sf
10075 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10076
10077 tree v2si_ftype_v2si_v2si
2abe3e28
AH
10078 = build_function_type_list (opaque_V2SI_type_node,
10079 opaque_V2SI_type_node,
10080 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10081
96038623 10082 tree v2sf_ftype_v2sf_v2sf_spe
2abe3e28
AH
10083 = build_function_type_list (opaque_V2SF_type_node,
10084 opaque_V2SF_type_node,
10085 opaque_V2SF_type_node, NULL_TREE);
a3170dc6 10086
96038623
DE
10087 tree v2sf_ftype_v2sf_v2sf
10088 = build_function_type_list (V2SF_type_node,
10089 V2SF_type_node,
10090 V2SF_type_node, NULL_TREE);
10091
10092
a3170dc6 10093 tree v2si_ftype_int_int
2abe3e28 10094 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10095 integer_type_node, integer_type_node,
10096 NULL_TREE);
10097
58646b77
PB
10098 tree opaque_ftype_opaque
10099 = build_function_type_list (opaque_V4SI_type_node,
10100 opaque_V4SI_type_node, NULL_TREE);
10101
a3170dc6 10102 tree v2si_ftype_v2si
2abe3e28
AH
10103 = build_function_type_list (opaque_V2SI_type_node,
10104 opaque_V2SI_type_node, NULL_TREE);
a3170dc6 10105
96038623 10106 tree v2sf_ftype_v2sf_spe
2abe3e28
AH
10107 = build_function_type_list (opaque_V2SF_type_node,
10108 opaque_V2SF_type_node, NULL_TREE);
f676971a 10109
96038623
DE
10110 tree v2sf_ftype_v2sf
10111 = build_function_type_list (V2SF_type_node,
10112 V2SF_type_node, NULL_TREE);
10113
a3170dc6 10114 tree v2sf_ftype_v2si
2abe3e28
AH
10115 = build_function_type_list (opaque_V2SF_type_node,
10116 opaque_V2SI_type_node, NULL_TREE);
a3170dc6
AH
10117
10118 tree v2si_ftype_v2sf
2abe3e28
AH
10119 = build_function_type_list (opaque_V2SI_type_node,
10120 opaque_V2SF_type_node, NULL_TREE);
a3170dc6
AH
10121
10122 tree v2si_ftype_v2si_char
2abe3e28
AH
10123 = build_function_type_list (opaque_V2SI_type_node,
10124 opaque_V2SI_type_node,
10125 char_type_node, NULL_TREE);
a3170dc6
AH
10126
10127 tree v2si_ftype_int_char
2abe3e28 10128 = build_function_type_list (opaque_V2SI_type_node,
a3170dc6
AH
10129 integer_type_node, char_type_node, NULL_TREE);
10130
10131 tree v2si_ftype_char
2abe3e28
AH
10132 = build_function_type_list (opaque_V2SI_type_node,
10133 char_type_node, NULL_TREE);
a3170dc6
AH
10134
10135 tree int_ftype_int_int
10136 = build_function_type_list (integer_type_node,
10137 integer_type_node, integer_type_node,
10138 NULL_TREE);
95385cbb 10139
58646b77
PB
10140 tree opaque_ftype_opaque_opaque
10141 = build_function_type_list (opaque_V4SI_type_node,
10142 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
0ac081f6 10143 tree v4si_ftype_v4si_v4si
b4de2f7d
AH
10144 = build_function_type_list (V4SI_type_node,
10145 V4SI_type_node, V4SI_type_node, NULL_TREE);
b9e4e5d1 10146 tree v4sf_ftype_v4si_int
b4de2f7d 10147 = build_function_type_list (V4SF_type_node,
b9e4e5d1
ZL
10148 V4SI_type_node, integer_type_node, NULL_TREE);
10149 tree v4si_ftype_v4sf_int
b4de2f7d 10150 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10151 V4SF_type_node, integer_type_node, NULL_TREE);
10152 tree v4si_ftype_v4si_int
b4de2f7d 10153 = build_function_type_list (V4SI_type_node,
b9e4e5d1
ZL
10154 V4SI_type_node, integer_type_node, NULL_TREE);
10155 tree v8hi_ftype_v8hi_int
b4de2f7d 10156 = build_function_type_list (V8HI_type_node,
b9e4e5d1
ZL
10157 V8HI_type_node, integer_type_node, NULL_TREE);
10158 tree v16qi_ftype_v16qi_int
b4de2f7d 10159 = build_function_type_list (V16QI_type_node,
b9e4e5d1
ZL
10160 V16QI_type_node, integer_type_node, NULL_TREE);
10161 tree v16qi_ftype_v16qi_v16qi_int
b4de2f7d
AH
10162 = build_function_type_list (V16QI_type_node,
10163 V16QI_type_node, V16QI_type_node,
b9e4e5d1
ZL
10164 integer_type_node, NULL_TREE);
10165 tree v8hi_ftype_v8hi_v8hi_int
b4de2f7d
AH
10166 = build_function_type_list (V8HI_type_node,
10167 V8HI_type_node, V8HI_type_node,
b9e4e5d1
ZL
10168 integer_type_node, NULL_TREE);
10169 tree v4si_ftype_v4si_v4si_int
b4de2f7d
AH
10170 = build_function_type_list (V4SI_type_node,
10171 V4SI_type_node, V4SI_type_node,
b9e4e5d1
ZL
10172 integer_type_node, NULL_TREE);
10173 tree v4sf_ftype_v4sf_v4sf_int
b4de2f7d
AH
10174 = build_function_type_list (V4SF_type_node,
10175 V4SF_type_node, V4SF_type_node,
b9e4e5d1 10176 integer_type_node, NULL_TREE);
0ac081f6 10177 tree v4sf_ftype_v4sf_v4sf
b4de2f7d
AH
10178 = build_function_type_list (V4SF_type_node,
10179 V4SF_type_node, V4SF_type_node, NULL_TREE);
58646b77
PB
10180 tree opaque_ftype_opaque_opaque_opaque
10181 = build_function_type_list (opaque_V4SI_type_node,
10182 opaque_V4SI_type_node, opaque_V4SI_type_node,
10183 opaque_V4SI_type_node, NULL_TREE);
617e0e1d 10184 tree v4sf_ftype_v4sf_v4sf_v4si
b4de2f7d
AH
10185 = build_function_type_list (V4SF_type_node,
10186 V4SF_type_node, V4SF_type_node,
10187 V4SI_type_node, NULL_TREE);
2212663f 10188 tree v4sf_ftype_v4sf_v4sf_v4sf
b4de2f7d
AH
10189 = build_function_type_list (V4SF_type_node,
10190 V4SF_type_node, V4SF_type_node,
10191 V4SF_type_node, NULL_TREE);
f676971a 10192 tree v4si_ftype_v4si_v4si_v4si
b4de2f7d
AH
10193 = build_function_type_list (V4SI_type_node,
10194 V4SI_type_node, V4SI_type_node,
10195 V4SI_type_node, NULL_TREE);
0ac081f6 10196 tree v8hi_ftype_v8hi_v8hi
b4de2f7d
AH
10197 = build_function_type_list (V8HI_type_node,
10198 V8HI_type_node, V8HI_type_node, NULL_TREE);
2212663f 10199 tree v8hi_ftype_v8hi_v8hi_v8hi
b4de2f7d
AH
10200 = build_function_type_list (V8HI_type_node,
10201 V8HI_type_node, V8HI_type_node,
10202 V8HI_type_node, NULL_TREE);
c4ad648e 10203 tree v4si_ftype_v8hi_v8hi_v4si
b4de2f7d
AH
10204 = build_function_type_list (V4SI_type_node,
10205 V8HI_type_node, V8HI_type_node,
10206 V4SI_type_node, NULL_TREE);
c4ad648e 10207 tree v4si_ftype_v16qi_v16qi_v4si
b4de2f7d
AH
10208 = build_function_type_list (V4SI_type_node,
10209 V16QI_type_node, V16QI_type_node,
10210 V4SI_type_node, NULL_TREE);
0ac081f6 10211 tree v16qi_ftype_v16qi_v16qi
b4de2f7d
AH
10212 = build_function_type_list (V16QI_type_node,
10213 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10214 tree v4si_ftype_v4sf_v4sf
b4de2f7d
AH
10215 = build_function_type_list (V4SI_type_node,
10216 V4SF_type_node, V4SF_type_node, NULL_TREE);
0ac081f6 10217 tree v8hi_ftype_v16qi_v16qi
b4de2f7d
AH
10218 = build_function_type_list (V8HI_type_node,
10219 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10220 tree v4si_ftype_v8hi_v8hi
b4de2f7d
AH
10221 = build_function_type_list (V4SI_type_node,
10222 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10223 tree v8hi_ftype_v4si_v4si
b4de2f7d
AH
10224 = build_function_type_list (V8HI_type_node,
10225 V4SI_type_node, V4SI_type_node, NULL_TREE);
0ac081f6 10226 tree v16qi_ftype_v8hi_v8hi
b4de2f7d
AH
10227 = build_function_type_list (V16QI_type_node,
10228 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10229 tree v4si_ftype_v16qi_v4si
b4de2f7d
AH
10230 = build_function_type_list (V4SI_type_node,
10231 V16QI_type_node, V4SI_type_node, NULL_TREE);
fa066a23 10232 tree v4si_ftype_v16qi_v16qi
b4de2f7d
AH
10233 = build_function_type_list (V4SI_type_node,
10234 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10235 tree v4si_ftype_v8hi_v4si
b4de2f7d
AH
10236 = build_function_type_list (V4SI_type_node,
10237 V8HI_type_node, V4SI_type_node, NULL_TREE);
a3170dc6
AH
10238 tree v4si_ftype_v8hi
10239 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10240 tree int_ftype_v4si_v4si
10241 = build_function_type_list (integer_type_node,
10242 V4SI_type_node, V4SI_type_node, NULL_TREE);
10243 tree int_ftype_v4sf_v4sf
10244 = build_function_type_list (integer_type_node,
10245 V4SF_type_node, V4SF_type_node, NULL_TREE);
10246 tree int_ftype_v16qi_v16qi
10247 = build_function_type_list (integer_type_node,
10248 V16QI_type_node, V16QI_type_node, NULL_TREE);
0ac081f6 10249 tree int_ftype_v8hi_v8hi
b4de2f7d
AH
10250 = build_function_type_list (integer_type_node,
10251 V8HI_type_node, V8HI_type_node, NULL_TREE);
0ac081f6 10252
6f317ef3 10253 /* Add the simple ternary operators. */
586de218 10254 d = bdesc_3arg;
ca7558fc 10255 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
2212663f 10256 {
2212663f
DB
10257 enum machine_mode mode0, mode1, mode2, mode3;
10258 tree type;
58646b77
PB
10259 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10260 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
2212663f 10261
58646b77
PB
10262 if (is_overloaded)
10263 {
10264 mode0 = VOIDmode;
10265 mode1 = VOIDmode;
10266 mode2 = VOIDmode;
10267 mode3 = VOIDmode;
10268 }
10269 else
10270 {
10271 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10272 continue;
f676971a 10273
58646b77
PB
10274 mode0 = insn_data[d->icode].operand[0].mode;
10275 mode1 = insn_data[d->icode].operand[1].mode;
10276 mode2 = insn_data[d->icode].operand[2].mode;
10277 mode3 = insn_data[d->icode].operand[3].mode;
10278 }
bb8df8a6 10279
2212663f
DB
10280 /* When all four are of the same mode. */
10281 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10282 {
10283 switch (mode0)
10284 {
58646b77
PB
10285 case VOIDmode:
10286 type = opaque_ftype_opaque_opaque_opaque;
10287 break;
617e0e1d
DB
10288 case V4SImode:
10289 type = v4si_ftype_v4si_v4si_v4si;
10290 break;
2212663f
DB
10291 case V4SFmode:
10292 type = v4sf_ftype_v4sf_v4sf_v4sf;
10293 break;
10294 case V8HImode:
10295 type = v8hi_ftype_v8hi_v8hi_v8hi;
f676971a 10296 break;
2212663f
DB
10297 case V16QImode:
10298 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10299 break;
96038623
DE
10300 case V2SFmode:
10301 type = v2sf_ftype_v2sf_v2sf_v2sf;
10302 break;
2212663f 10303 default:
37409796 10304 gcc_unreachable ();
2212663f
DB
10305 }
10306 }
10307 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
c4ad648e 10308 {
2212663f
DB
10309 switch (mode0)
10310 {
10311 case V4SImode:
10312 type = v4si_ftype_v4si_v4si_v16qi;
10313 break;
10314 case V4SFmode:
10315 type = v4sf_ftype_v4sf_v4sf_v16qi;
10316 break;
10317 case V8HImode:
10318 type = v8hi_ftype_v8hi_v8hi_v16qi;
f676971a 10319 break;
2212663f
DB
10320 case V16QImode:
10321 type = v16qi_ftype_v16qi_v16qi_v16qi;
f676971a 10322 break;
2212663f 10323 default:
37409796 10324 gcc_unreachable ();
2212663f
DB
10325 }
10326 }
f676971a 10327 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
2212663f 10328 && mode3 == V4SImode)
24408032 10329 type = v4si_ftype_v16qi_v16qi_v4si;
f676971a 10330 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
2212663f 10331 && mode3 == V4SImode)
24408032 10332 type = v4si_ftype_v8hi_v8hi_v4si;
f676971a 10333 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
617e0e1d 10334 && mode3 == V4SImode)
24408032
AH
10335 type = v4sf_ftype_v4sf_v4sf_v4si;
10336
a7b376ee 10337 /* vchar, vchar, vchar, 4-bit literal. */
24408032
AH
10338 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10339 && mode3 == QImode)
b9e4e5d1 10340 type = v16qi_ftype_v16qi_v16qi_int;
24408032 10341
a7b376ee 10342 /* vshort, vshort, vshort, 4-bit literal. */
24408032
AH
10343 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10344 && mode3 == QImode)
b9e4e5d1 10345 type = v8hi_ftype_v8hi_v8hi_int;
24408032 10346
a7b376ee 10347 /* vint, vint, vint, 4-bit literal. */
24408032
AH
10348 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10349 && mode3 == QImode)
b9e4e5d1 10350 type = v4si_ftype_v4si_v4si_int;
24408032 10351
a7b376ee 10352 /* vfloat, vfloat, vfloat, 4-bit literal. */
24408032
AH
10353 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10354 && mode3 == QImode)
b9e4e5d1 10355 type = v4sf_ftype_v4sf_v4sf_int;
24408032 10356
2212663f 10357 else
37409796 10358 gcc_unreachable ();
2212663f
DB
10359
10360 def_builtin (d->mask, d->name, type, d->code);
10361 }
10362
0ac081f6 10363 /* Add the simple binary operators. */
00b960c7 10364 d = (struct builtin_description *) bdesc_2arg;
ca7558fc 10365 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
0ac081f6
AH
10366 {
10367 enum machine_mode mode0, mode1, mode2;
10368 tree type;
58646b77
PB
10369 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10370 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
0ac081f6 10371
58646b77
PB
10372 if (is_overloaded)
10373 {
10374 mode0 = VOIDmode;
10375 mode1 = VOIDmode;
10376 mode2 = VOIDmode;
10377 }
10378 else
bb8df8a6 10379 {
58646b77
PB
10380 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10381 continue;
f676971a 10382
58646b77
PB
10383 mode0 = insn_data[d->icode].operand[0].mode;
10384 mode1 = insn_data[d->icode].operand[1].mode;
10385 mode2 = insn_data[d->icode].operand[2].mode;
10386 }
0ac081f6
AH
10387
10388 /* When all three operands are of the same mode. */
10389 if (mode0 == mode1 && mode1 == mode2)
10390 {
10391 switch (mode0)
10392 {
58646b77
PB
10393 case VOIDmode:
10394 type = opaque_ftype_opaque_opaque;
10395 break;
0ac081f6
AH
10396 case V4SFmode:
10397 type = v4sf_ftype_v4sf_v4sf;
10398 break;
10399 case V4SImode:
10400 type = v4si_ftype_v4si_v4si;
10401 break;
10402 case V16QImode:
10403 type = v16qi_ftype_v16qi_v16qi;
10404 break;
10405 case V8HImode:
10406 type = v8hi_ftype_v8hi_v8hi;
10407 break;
a3170dc6
AH
10408 case V2SImode:
10409 type = v2si_ftype_v2si_v2si;
10410 break;
96038623
DE
10411 case V2SFmode:
10412 if (TARGET_PAIRED_FLOAT)
10413 type = v2sf_ftype_v2sf_v2sf;
10414 else
10415 type = v2sf_ftype_v2sf_v2sf_spe;
a3170dc6
AH
10416 break;
10417 case SImode:
10418 type = int_ftype_int_int;
10419 break;
0ac081f6 10420 default:
37409796 10421 gcc_unreachable ();
0ac081f6
AH
10422 }
10423 }
10424
10425 /* A few other combos we really don't want to do manually. */
10426
10427 /* vint, vfloat, vfloat. */
10428 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10429 type = v4si_ftype_v4sf_v4sf;
10430
10431 /* vshort, vchar, vchar. */
10432 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10433 type = v8hi_ftype_v16qi_v16qi;
10434
10435 /* vint, vshort, vshort. */
10436 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10437 type = v4si_ftype_v8hi_v8hi;
10438
10439 /* vshort, vint, vint. */
10440 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10441 type = v8hi_ftype_v4si_v4si;
10442
10443 /* vchar, vshort, vshort. */
10444 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10445 type = v16qi_ftype_v8hi_v8hi;
10446
10447 /* vint, vchar, vint. */
10448 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10449 type = v4si_ftype_v16qi_v4si;
10450
fa066a23
AH
10451 /* vint, vchar, vchar. */
10452 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10453 type = v4si_ftype_v16qi_v16qi;
10454
0ac081f6
AH
10455 /* vint, vshort, vint. */
10456 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10457 type = v4si_ftype_v8hi_v4si;
f676971a 10458
a7b376ee 10459 /* vint, vint, 5-bit literal. */
2212663f 10460 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10461 type = v4si_ftype_v4si_int;
f676971a 10462
a7b376ee 10463 /* vshort, vshort, 5-bit literal. */
2212663f 10464 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
b9e4e5d1 10465 type = v8hi_ftype_v8hi_int;
f676971a 10466
a7b376ee 10467 /* vchar, vchar, 5-bit literal. */
2212663f 10468 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
b9e4e5d1 10469 type = v16qi_ftype_v16qi_int;
0ac081f6 10470
a7b376ee 10471 /* vfloat, vint, 5-bit literal. */
617e0e1d 10472 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
b9e4e5d1 10473 type = v4sf_ftype_v4si_int;
f676971a 10474
a7b376ee 10475 /* vint, vfloat, 5-bit literal. */
617e0e1d 10476 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
b9e4e5d1 10477 type = v4si_ftype_v4sf_int;
617e0e1d 10478
a3170dc6
AH
10479 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10480 type = v2si_ftype_int_int;
10481
10482 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10483 type = v2si_ftype_v2si_char;
10484
10485 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10486 type = v2si_ftype_int_char;
10487
37409796 10488 else
0ac081f6 10489 {
37409796
NS
10490 /* int, x, x. */
10491 gcc_assert (mode0 == SImode);
0ac081f6
AH
10492 switch (mode1)
10493 {
10494 case V4SImode:
10495 type = int_ftype_v4si_v4si;
10496 break;
10497 case V4SFmode:
10498 type = int_ftype_v4sf_v4sf;
10499 break;
10500 case V16QImode:
10501 type = int_ftype_v16qi_v16qi;
10502 break;
10503 case V8HImode:
10504 type = int_ftype_v8hi_v8hi;
10505 break;
10506 default:
37409796 10507 gcc_unreachable ();
0ac081f6
AH
10508 }
10509 }
10510
2212663f
DB
10511 def_builtin (d->mask, d->name, type, d->code);
10512 }
24408032 10513
2212663f
DB
10514 /* Add the simple unary operators. */
10515 d = (struct builtin_description *) bdesc_1arg;
ca7558fc 10516 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
2212663f
DB
10517 {
10518 enum machine_mode mode0, mode1;
10519 tree type;
58646b77
PB
10520 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10521 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10522
10523 if (is_overloaded)
10524 {
10525 mode0 = VOIDmode;
10526 mode1 = VOIDmode;
10527 }
10528 else
10529 {
10530 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10531 continue;
bb8df8a6 10532
58646b77
PB
10533 mode0 = insn_data[d->icode].operand[0].mode;
10534 mode1 = insn_data[d->icode].operand[1].mode;
10535 }
2212663f
DB
10536
10537 if (mode0 == V4SImode && mode1 == QImode)
c4ad648e 10538 type = v4si_ftype_int;
2212663f 10539 else if (mode0 == V8HImode && mode1 == QImode)
c4ad648e 10540 type = v8hi_ftype_int;
2212663f 10541 else if (mode0 == V16QImode && mode1 == QImode)
c4ad648e 10542 type = v16qi_ftype_int;
58646b77
PB
10543 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10544 type = opaque_ftype_opaque;
617e0e1d
DB
10545 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10546 type = v4sf_ftype_v4sf;
20e26713
AH
10547 else if (mode0 == V8HImode && mode1 == V16QImode)
10548 type = v8hi_ftype_v16qi;
10549 else if (mode0 == V4SImode && mode1 == V8HImode)
10550 type = v4si_ftype_v8hi;
a3170dc6
AH
10551 else if (mode0 == V2SImode && mode1 == V2SImode)
10552 type = v2si_ftype_v2si;
10553 else if (mode0 == V2SFmode && mode1 == V2SFmode)
96038623
DE
10554 {
10555 if (TARGET_PAIRED_FLOAT)
10556 type = v2sf_ftype_v2sf;
10557 else
10558 type = v2sf_ftype_v2sf_spe;
10559 }
a3170dc6
AH
10560 else if (mode0 == V2SFmode && mode1 == V2SImode)
10561 type = v2sf_ftype_v2si;
10562 else if (mode0 == V2SImode && mode1 == V2SFmode)
10563 type = v2si_ftype_v2sf;
10564 else if (mode0 == V2SImode && mode1 == QImode)
10565 type = v2si_ftype_char;
2212663f 10566 else
37409796 10567 gcc_unreachable ();
2212663f 10568
0ac081f6
AH
10569 def_builtin (d->mask, d->name, type, d->code);
10570 }
10571}
10572
c15c90bb
ZW
10573static void
10574rs6000_init_libfuncs (void)
10575{
602ea4d3
JJ
10576 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10577 && !TARGET_POWER2 && !TARGET_POWERPC)
c15c90bb 10578 {
602ea4d3
JJ
10579 /* AIX library routines for float->int conversion. */
10580 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10581 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10582 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10583 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10584 }
c15c90bb 10585
602ea4d3 10586 if (!TARGET_IEEEQUAD)
98c41d98 10587 /* AIX/Darwin/64-bit Linux quad floating point routines. */
602ea4d3
JJ
10588 if (!TARGET_XL_COMPAT)
10589 {
10590 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10591 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10592 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10593 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
d0768f19 10594
17caeff2 10595 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
d0768f19
DE
10596 {
10597 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10598 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10599 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10600 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10601 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10602 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10603 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
d0768f19
DE
10604
10605 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10606 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10607 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10608 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10609 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10610 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10611 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10612 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10613 }
b26941b4
JM
10614
10615 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10616 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
602ea4d3
JJ
10617 }
10618 else
10619 {
10620 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10621 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10622 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10623 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10624 }
c9034561 10625 else
c15c90bb 10626 {
c9034561 10627 /* 32-bit SVR4 quad floating point routines. */
c15c90bb
ZW
10628
10629 set_optab_libfunc (add_optab, TFmode, "_q_add");
10630 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10631 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10632 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10633 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10634 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10635 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10636
c9034561
ZW
10637 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10638 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10639 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10640 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10641 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10642 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10643
85363ca0
ZW
10644 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10645 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10646 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10647 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10648 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10649 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10650 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
57904aa7 10651 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
c15c90bb
ZW
10652 }
10653}
fba73eb1
DE
10654
10655\f
10656/* Expand a block clear operation, and return 1 if successful. Return 0
10657 if we should let the compiler generate normal code.
10658
10659 operands[0] is the destination
10660 operands[1] is the length
57e84f18 10661 operands[3] is the alignment */
fba73eb1
DE
10662
10663int
10664expand_block_clear (rtx operands[])
10665{
10666 rtx orig_dest = operands[0];
10667 rtx bytes_rtx = operands[1];
57e84f18 10668 rtx align_rtx = operands[3];
5514620a
GK
10669 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10670 HOST_WIDE_INT align;
10671 HOST_WIDE_INT bytes;
fba73eb1
DE
10672 int offset;
10673 int clear_bytes;
5514620a 10674 int clear_step;
fba73eb1
DE
10675
10676 /* If this is not a fixed size move, just call memcpy */
10677 if (! constp)
10678 return 0;
10679
37409796
NS
10680 /* This must be a fixed size alignment */
10681 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1
DE
10682 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10683
10684 /* Anything to clear? */
10685 bytes = INTVAL (bytes_rtx);
10686 if (bytes <= 0)
10687 return 1;
10688
5514620a
GK
10689 /* Use the builtin memset after a point, to avoid huge code bloat.
10690 When optimize_size, avoid any significant code bloat; calling
10691 memset is about 4 instructions, so allow for one instruction to
10692 load zero and three to do clearing. */
10693 if (TARGET_ALTIVEC && align >= 128)
10694 clear_step = 16;
10695 else if (TARGET_POWERPC64 && align >= 32)
10696 clear_step = 8;
21d818ff
NF
10697 else if (TARGET_SPE && align >= 64)
10698 clear_step = 8;
5514620a
GK
10699 else
10700 clear_step = 4;
fba73eb1 10701
5514620a
GK
10702 if (optimize_size && bytes > 3 * clear_step)
10703 return 0;
10704 if (! optimize_size && bytes > 8 * clear_step)
fba73eb1
DE
10705 return 0;
10706
10707 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10708 {
fba73eb1
DE
10709 enum machine_mode mode = BLKmode;
10710 rtx dest;
f676971a 10711
5514620a
GK
10712 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10713 {
10714 clear_bytes = 16;
10715 mode = V4SImode;
10716 }
21d818ff
NF
10717 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10718 {
10719 clear_bytes = 8;
10720 mode = V2SImode;
10721 }
5514620a 10722 else if (bytes >= 8 && TARGET_POWERPC64
21d818ff
NF
10723 /* 64-bit loads and stores require word-aligned
10724 displacements. */
10725 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
fba73eb1
DE
10726 {
10727 clear_bytes = 8;
10728 mode = DImode;
fba73eb1 10729 }
5514620a 10730 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
fba73eb1
DE
10731 { /* move 4 bytes */
10732 clear_bytes = 4;
10733 mode = SImode;
fba73eb1 10734 }
ec53fc93 10735 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
fba73eb1
DE
10736 { /* move 2 bytes */
10737 clear_bytes = 2;
10738 mode = HImode;
fba73eb1
DE
10739 }
10740 else /* move 1 byte at a time */
10741 {
10742 clear_bytes = 1;
10743 mode = QImode;
fba73eb1 10744 }
f676971a 10745
fba73eb1 10746 dest = adjust_address (orig_dest, mode, offset);
f676971a 10747
5514620a 10748 emit_move_insn (dest, CONST0_RTX (mode));
fba73eb1
DE
10749 }
10750
10751 return 1;
10752}
10753
35aff10b 10754\f
7e69e155
MM
10755/* Expand a block move operation, and return 1 if successful. Return 0
10756 if we should let the compiler generate normal code.
10757
10758 operands[0] is the destination
10759 operands[1] is the source
10760 operands[2] is the length
10761 operands[3] is the alignment */
10762
3933e0e1
MM
10763#define MAX_MOVE_REG 4
10764
7e69e155 10765int
a2369ed3 10766expand_block_move (rtx operands[])
7e69e155 10767{
b6c9286a
MM
10768 rtx orig_dest = operands[0];
10769 rtx orig_src = operands[1];
7e69e155 10770 rtx bytes_rtx = operands[2];
7e69e155 10771 rtx align_rtx = operands[3];
3933e0e1 10772 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5ee95df6 10773 int align;
3933e0e1
MM
10774 int bytes;
10775 int offset;
7e69e155 10776 int move_bytes;
cabfd258
GK
10777 rtx stores[MAX_MOVE_REG];
10778 int num_reg = 0;
7e69e155 10779
3933e0e1 10780 /* If this is not a fixed size move, just call memcpy */
cc0d9ba8 10781 if (! constp)
3933e0e1
MM
10782 return 0;
10783
37409796
NS
10784 /* This must be a fixed size alignment */
10785 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
fba73eb1 10786 align = INTVAL (align_rtx) * BITS_PER_UNIT;
5ee95df6 10787
7e69e155 10788 /* Anything to move? */
3933e0e1
MM
10789 bytes = INTVAL (bytes_rtx);
10790 if (bytes <= 0)
7e69e155
MM
10791 return 1;
10792
ea9982a8 10793 /* store_one_arg depends on expand_block_move to handle at least the size of
f676971a 10794 reg_parm_stack_space. */
ea9982a8 10795 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7e69e155
MM
10796 return 0;
10797
cabfd258 10798 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7e69e155 10799 {
cabfd258 10800 union {
70128ad9 10801 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
a2369ed3 10802 rtx (*mov) (rtx, rtx);
cabfd258
GK
10803 } gen_func;
10804 enum machine_mode mode = BLKmode;
10805 rtx src, dest;
f676971a 10806
5514620a
GK
10807 /* Altivec first, since it will be faster than a string move
10808 when it applies, and usually not significantly larger. */
10809 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10810 {
10811 move_bytes = 16;
10812 mode = V4SImode;
10813 gen_func.mov = gen_movv4si;
10814 }
21d818ff
NF
10815 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10816 {
10817 move_bytes = 8;
10818 mode = V2SImode;
10819 gen_func.mov = gen_movv2si;
10820 }
5514620a 10821 else if (TARGET_STRING
cabfd258
GK
10822 && bytes > 24 /* move up to 32 bytes at a time */
10823 && ! fixed_regs[5]
10824 && ! fixed_regs[6]
10825 && ! fixed_regs[7]
10826 && ! fixed_regs[8]
10827 && ! fixed_regs[9]
10828 && ! fixed_regs[10]
10829 && ! fixed_regs[11]
10830 && ! fixed_regs[12])
7e69e155 10831 {
cabfd258 10832 move_bytes = (bytes > 32) ? 32 : bytes;
70128ad9 10833 gen_func.movmemsi = gen_movmemsi_8reg;
cabfd258
GK
10834 }
10835 else if (TARGET_STRING
10836 && bytes > 16 /* move up to 24 bytes at a time */
10837 && ! fixed_regs[5]
10838 && ! fixed_regs[6]
10839 && ! fixed_regs[7]
10840 && ! fixed_regs[8]
10841 && ! fixed_regs[9]
10842 && ! fixed_regs[10])
10843 {
10844 move_bytes = (bytes > 24) ? 24 : bytes;
70128ad9 10845 gen_func.movmemsi = gen_movmemsi_6reg;
cabfd258
GK
10846 }
10847 else if (TARGET_STRING
10848 && bytes > 8 /* move up to 16 bytes at a time */
10849 && ! fixed_regs[5]
10850 && ! fixed_regs[6]
10851 && ! fixed_regs[7]
10852 && ! fixed_regs[8])
10853 {
10854 move_bytes = (bytes > 16) ? 16 : bytes;
70128ad9 10855 gen_func.movmemsi = gen_movmemsi_4reg;
cabfd258
GK
10856 }
10857 else if (bytes >= 8 && TARGET_POWERPC64
10858 /* 64-bit loads and stores require word-aligned
10859 displacements. */
fba73eb1 10860 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
cabfd258
GK
10861 {
10862 move_bytes = 8;
10863 mode = DImode;
10864 gen_func.mov = gen_movdi;
10865 }
10866 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
10867 { /* move up to 8 bytes at a time */
10868 move_bytes = (bytes > 8) ? 8 : bytes;
70128ad9 10869 gen_func.movmemsi = gen_movmemsi_2reg;
cabfd258 10870 }
cd7d9ca4 10871 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
cabfd258
GK
10872 { /* move 4 bytes */
10873 move_bytes = 4;
10874 mode = SImode;
10875 gen_func.mov = gen_movsi;
10876 }
ec53fc93 10877 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
cabfd258
GK
10878 { /* move 2 bytes */
10879 move_bytes = 2;
10880 mode = HImode;
10881 gen_func.mov = gen_movhi;
10882 }
10883 else if (TARGET_STRING && bytes > 1)
10884 { /* move up to 4 bytes at a time */
10885 move_bytes = (bytes > 4) ? 4 : bytes;
70128ad9 10886 gen_func.movmemsi = gen_movmemsi_1reg;
cabfd258
GK
10887 }
10888 else /* move 1 byte at a time */
10889 {
10890 move_bytes = 1;
10891 mode = QImode;
10892 gen_func.mov = gen_movqi;
10893 }
f676971a 10894
cabfd258
GK
10895 src = adjust_address (orig_src, mode, offset);
10896 dest = adjust_address (orig_dest, mode, offset);
f676971a
EC
10897
10898 if (mode != BLKmode)
cabfd258
GK
10899 {
10900 rtx tmp_reg = gen_reg_rtx (mode);
f676971a 10901
cabfd258
GK
10902 emit_insn ((*gen_func.mov) (tmp_reg, src));
10903 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
4c64a852 10904 }
3933e0e1 10905
cabfd258
GK
10906 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
10907 {
10908 int i;
10909 for (i = 0; i < num_reg; i++)
10910 emit_insn (stores[i]);
10911 num_reg = 0;
10912 }
35aff10b 10913
cabfd258 10914 if (mode == BLKmode)
7e69e155 10915 {
70128ad9 10916 /* Move the address into scratch registers. The movmemsi
cabfd258
GK
10917 patterns require zero offset. */
10918 if (!REG_P (XEXP (src, 0)))
b6c9286a 10919 {
cabfd258
GK
10920 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
10921 src = replace_equiv_address (src, src_reg);
b6c9286a 10922 }
cabfd258 10923 set_mem_size (src, GEN_INT (move_bytes));
f676971a 10924
cabfd258 10925 if (!REG_P (XEXP (dest, 0)))
3933e0e1 10926 {
cabfd258
GK
10927 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
10928 dest = replace_equiv_address (dest, dest_reg);
7e69e155 10929 }
cabfd258 10930 set_mem_size (dest, GEN_INT (move_bytes));
f676971a 10931
70128ad9 10932 emit_insn ((*gen_func.movmemsi) (dest, src,
cabfd258
GK
10933 GEN_INT (move_bytes & 31),
10934 align_rtx));
7e69e155 10935 }
7e69e155
MM
10936 }
10937
10938 return 1;
10939}
10940
d62294f5 10941\f
9caa3eb2
DE
10942/* Return a string to perform a load_multiple operation.
10943 operands[0] is the vector.
10944 operands[1] is the source address.
10945 operands[2] is the first destination register. */
10946
10947const char *
a2369ed3 10948rs6000_output_load_multiple (rtx operands[3])
9caa3eb2
DE
10949{
10950 /* We have to handle the case where the pseudo used to contain the address
10951 is assigned to one of the output registers. */
10952 int i, j;
10953 int words = XVECLEN (operands[0], 0);
10954 rtx xop[10];
10955
10956 if (XVECLEN (operands[0], 0) == 1)
10957 return "{l|lwz} %2,0(%1)";
10958
10959 for (i = 0; i < words; i++)
10960 if (refers_to_regno_p (REGNO (operands[2]) + i,
10961 REGNO (operands[2]) + i + 1, operands[1], 0))
10962 {
10963 if (i == words-1)
10964 {
10965 xop[0] = GEN_INT (4 * (words-1));
10966 xop[1] = operands[1];
10967 xop[2] = operands[2];
10968 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
10969 return "";
10970 }
10971 else if (i == 0)
10972 {
10973 xop[0] = GEN_INT (4 * (words-1));
10974 xop[1] = operands[1];
10975 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
10976 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
10977 return "";
10978 }
10979 else
10980 {
10981 for (j = 0; j < words; j++)
10982 if (j != i)
10983 {
10984 xop[0] = GEN_INT (j * 4);
10985 xop[1] = operands[1];
10986 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
10987 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
10988 }
10989 xop[0] = GEN_INT (i * 4);
10990 xop[1] = operands[1];
10991 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
10992 return "";
10993 }
10994 }
10995
10996 return "{lsi|lswi} %2,%1,%N0";
10997}
10998
9878760c 10999\f
a4f6c312
SS
11000/* A validation routine: say whether CODE, a condition code, and MODE
11001 match. The other alternatives either don't make sense or should
11002 never be generated. */
39a10a29 11003
48d72335 11004void
a2369ed3 11005validate_condition_mode (enum rtx_code code, enum machine_mode mode)
39a10a29 11006{
37409796
NS
11007 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11008 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11009 && GET_MODE_CLASS (mode) == MODE_CC);
39a10a29
GK
11010
11011 /* These don't make sense. */
37409796
NS
11012 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11013 || mode != CCUNSmode);
39a10a29 11014
37409796
NS
11015 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11016 || mode == CCUNSmode);
39a10a29 11017
37409796
NS
11018 gcc_assert (mode == CCFPmode
11019 || (code != ORDERED && code != UNORDERED
11020 && code != UNEQ && code != LTGT
11021 && code != UNGT && code != UNLT
11022 && code != UNGE && code != UNLE));
f676971a
EC
11023
11024 /* These should never be generated except for
bc9ec0e0 11025 flag_finite_math_only. */
37409796
NS
11026 gcc_assert (mode != CCFPmode
11027 || flag_finite_math_only
11028 || (code != LE && code != GE
11029 && code != UNEQ && code != LTGT
11030 && code != UNGT && code != UNLT));
39a10a29
GK
11031
11032 /* These are invalid; the information is not there. */
37409796 11033 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
39a10a29
GK
11034}
11035
9878760c
RK
11036\f
11037/* Return 1 if ANDOP is a mask that has no bits on that are not in the
11038 mask required to convert the result of a rotate insn into a shift
b1765bde 11039 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9878760c
RK
11040
11041int
a2369ed3 11042includes_lshift_p (rtx shiftop, rtx andop)
9878760c 11043{
e2c953b6
DE
11044 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11045
11046 shift_mask <<= INTVAL (shiftop);
9878760c 11047
b1765bde 11048 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9878760c
RK
11049}
11050
11051/* Similar, but for right shift. */
11052
11053int
a2369ed3 11054includes_rshift_p (rtx shiftop, rtx andop)
9878760c 11055{
a7653a2c 11056 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9878760c
RK
11057
11058 shift_mask >>= INTVAL (shiftop);
11059
b1765bde 11060 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
e2c953b6
DE
11061}
11062
c5059423
AM
11063/* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11064 to perform a left shift. It must have exactly SHIFTOP least
b6d08ca1 11065 significant 0's, then one or more 1's, then zero or more 0's. */
e2c953b6
DE
11066
11067int
a2369ed3 11068includes_rldic_lshift_p (rtx shiftop, rtx andop)
e2c953b6 11069{
c5059423
AM
11070 if (GET_CODE (andop) == CONST_INT)
11071 {
02071907 11072 HOST_WIDE_INT c, lsb, shift_mask;
e2c953b6 11073
c5059423 11074 c = INTVAL (andop);
02071907 11075 if (c == 0 || c == ~0)
c5059423 11076 return 0;
e2c953b6 11077
02071907 11078 shift_mask = ~0;
c5059423
AM
11079 shift_mask <<= INTVAL (shiftop);
11080
b6d08ca1 11081 /* Find the least significant one bit. */
c5059423
AM
11082 lsb = c & -c;
11083
11084 /* It must coincide with the LSB of the shift mask. */
11085 if (-lsb != shift_mask)
11086 return 0;
e2c953b6 11087
c5059423
AM
11088 /* Invert to look for the next transition (if any). */
11089 c = ~c;
11090
11091 /* Remove the low group of ones (originally low group of zeros). */
11092 c &= -lsb;
11093
11094 /* Again find the lsb, and check we have all 1's above. */
11095 lsb = c & -c;
11096 return c == -lsb;
11097 }
11098 else if (GET_CODE (andop) == CONST_DOUBLE
11099 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11100 {
02071907
AM
11101 HOST_WIDE_INT low, high, lsb;
11102 HOST_WIDE_INT shift_mask_low, shift_mask_high;
c5059423
AM
11103
11104 low = CONST_DOUBLE_LOW (andop);
11105 if (HOST_BITS_PER_WIDE_INT < 64)
11106 high = CONST_DOUBLE_HIGH (andop);
11107
11108 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
02071907 11109 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
c5059423
AM
11110 return 0;
11111
11112 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11113 {
02071907 11114 shift_mask_high = ~0;
c5059423
AM
11115 if (INTVAL (shiftop) > 32)
11116 shift_mask_high <<= INTVAL (shiftop) - 32;
11117
11118 lsb = high & -high;
11119
11120 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11121 return 0;
11122
11123 high = ~high;
11124 high &= -lsb;
11125
11126 lsb = high & -high;
11127 return high == -lsb;
11128 }
11129
02071907 11130 shift_mask_low = ~0;
c5059423
AM
11131 shift_mask_low <<= INTVAL (shiftop);
11132
11133 lsb = low & -low;
11134
11135 if (-lsb != shift_mask_low)
11136 return 0;
11137
11138 if (HOST_BITS_PER_WIDE_INT < 64)
11139 high = ~high;
11140 low = ~low;
11141 low &= -lsb;
11142
11143 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11144 {
11145 lsb = high & -high;
11146 return high == -lsb;
11147 }
11148
11149 lsb = low & -low;
11150 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11151 }
11152 else
11153 return 0;
11154}
e2c953b6 11155
c5059423
AM
11156/* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11157 to perform a left shift. It must have SHIFTOP or more least
c1207243 11158 significant 0's, with the remainder of the word 1's. */
e2c953b6 11159
c5059423 11160int
a2369ed3 11161includes_rldicr_lshift_p (rtx shiftop, rtx andop)
c5059423 11162{
e2c953b6 11163 if (GET_CODE (andop) == CONST_INT)
c5059423 11164 {
02071907 11165 HOST_WIDE_INT c, lsb, shift_mask;
c5059423 11166
02071907 11167 shift_mask = ~0;
c5059423
AM
11168 shift_mask <<= INTVAL (shiftop);
11169 c = INTVAL (andop);
11170
c1207243 11171 /* Find the least significant one bit. */
c5059423
AM
11172 lsb = c & -c;
11173
11174 /* It must be covered by the shift mask.
a4f6c312 11175 This test also rejects c == 0. */
c5059423
AM
11176 if ((lsb & shift_mask) == 0)
11177 return 0;
11178
11179 /* Check we have all 1's above the transition, and reject all 1's. */
11180 return c == -lsb && lsb != 1;
11181 }
11182 else if (GET_CODE (andop) == CONST_DOUBLE
11183 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11184 {
02071907 11185 HOST_WIDE_INT low, lsb, shift_mask_low;
c5059423
AM
11186
11187 low = CONST_DOUBLE_LOW (andop);
11188
11189 if (HOST_BITS_PER_WIDE_INT < 64)
11190 {
02071907 11191 HOST_WIDE_INT high, shift_mask_high;
c5059423
AM
11192
11193 high = CONST_DOUBLE_HIGH (andop);
11194
11195 if (low == 0)
11196 {
02071907 11197 shift_mask_high = ~0;
c5059423
AM
11198 if (INTVAL (shiftop) > 32)
11199 shift_mask_high <<= INTVAL (shiftop) - 32;
11200
11201 lsb = high & -high;
11202
11203 if ((lsb & shift_mask_high) == 0)
11204 return 0;
11205
11206 return high == -lsb;
11207 }
11208 if (high != ~0)
11209 return 0;
11210 }
11211
02071907 11212 shift_mask_low = ~0;
c5059423
AM
11213 shift_mask_low <<= INTVAL (shiftop);
11214
11215 lsb = low & -low;
11216
11217 if ((lsb & shift_mask_low) == 0)
11218 return 0;
11219
11220 return low == -lsb && lsb != 1;
11221 }
e2c953b6 11222 else
c5059423 11223 return 0;
9878760c 11224}
35068b43 11225
11ac38b2
DE
11226/* Return 1 if operands will generate a valid arguments to rlwimi
11227instruction for insert with right shift in 64-bit mode. The mask may
11228not start on the first bit or stop on the last bit because wrap-around
11229effects of instruction do not correspond to semantics of RTL insn. */
11230
11231int
11232insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11233{
429ec7dc
DE
11234 if (INTVAL (startop) > 32
11235 && INTVAL (startop) < 64
11236 && INTVAL (sizeop) > 1
11237 && INTVAL (sizeop) + INTVAL (startop) < 64
11238 && INTVAL (shiftop) > 0
11239 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11ac38b2
DE
11240 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11241 return 1;
11242
11243 return 0;
11244}
11245
35068b43 11246/* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
90f81f99 11247 for lfq and stfq insns iff the registers are hard registers. */
35068b43
RK
11248
11249int
a2369ed3 11250registers_ok_for_quad_peep (rtx reg1, rtx reg2)
35068b43
RK
11251{
11252 /* We might have been passed a SUBREG. */
f676971a 11253 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
35068b43 11254 return 0;
f676971a 11255
90f81f99
AP
11256 /* We might have been passed non floating point registers. */
11257 if (!FP_REGNO_P (REGNO (reg1))
11258 || !FP_REGNO_P (REGNO (reg2)))
11259 return 0;
35068b43
RK
11260
11261 return (REGNO (reg1) == REGNO (reg2) - 1);
11262}
11263
a4f6c312
SS
11264/* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11265 addr1 and addr2 must be in consecutive memory locations
11266 (addr2 == addr1 + 8). */
35068b43
RK
11267
11268int
90f81f99 11269mems_ok_for_quad_peep (rtx mem1, rtx mem2)
35068b43 11270{
90f81f99 11271 rtx addr1, addr2;
bb8df8a6
EC
11272 unsigned int reg1, reg2;
11273 int offset1, offset2;
35068b43 11274
90f81f99
AP
11275 /* The mems cannot be volatile. */
11276 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11277 return 0;
f676971a 11278
90f81f99
AP
11279 addr1 = XEXP (mem1, 0);
11280 addr2 = XEXP (mem2, 0);
11281
35068b43
RK
11282 /* Extract an offset (if used) from the first addr. */
11283 if (GET_CODE (addr1) == PLUS)
11284 {
11285 /* If not a REG, return zero. */
11286 if (GET_CODE (XEXP (addr1, 0)) != REG)
11287 return 0;
11288 else
11289 {
c4ad648e 11290 reg1 = REGNO (XEXP (addr1, 0));
35068b43
RK
11291 /* The offset must be constant! */
11292 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
c4ad648e
AM
11293 return 0;
11294 offset1 = INTVAL (XEXP (addr1, 1));
35068b43
RK
11295 }
11296 }
11297 else if (GET_CODE (addr1) != REG)
11298 return 0;
11299 else
11300 {
11301 reg1 = REGNO (addr1);
11302 /* This was a simple (mem (reg)) expression. Offset is 0. */
11303 offset1 = 0;
11304 }
11305
bb8df8a6
EC
11306 /* And now for the second addr. */
11307 if (GET_CODE (addr2) == PLUS)
11308 {
11309 /* If not a REG, return zero. */
11310 if (GET_CODE (XEXP (addr2, 0)) != REG)
11311 return 0;
11312 else
11313 {
11314 reg2 = REGNO (XEXP (addr2, 0));
11315 /* The offset must be constant. */
11316 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11317 return 0;
11318 offset2 = INTVAL (XEXP (addr2, 1));
11319 }
11320 }
11321 else if (GET_CODE (addr2) != REG)
35068b43 11322 return 0;
bb8df8a6
EC
11323 else
11324 {
11325 reg2 = REGNO (addr2);
11326 /* This was a simple (mem (reg)) expression. Offset is 0. */
11327 offset2 = 0;
11328 }
35068b43 11329
bb8df8a6
EC
11330 /* Both of these must have the same base register. */
11331 if (reg1 != reg2)
35068b43
RK
11332 return 0;
11333
11334 /* The offset for the second addr must be 8 more than the first addr. */
bb8df8a6 11335 if (offset2 != offset1 + 8)
35068b43
RK
11336 return 0;
11337
11338 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11339 instructions. */
11340 return 1;
11341}
9878760c 11342\f
e41b2a33
PB
11343
11344rtx
11345rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11346{
11347 static bool eliminated = false;
11348 if (mode != SDmode)
11349 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11350 else
11351 {
11352 rtx mem = cfun->machine->sdmode_stack_slot;
11353 gcc_assert (mem != NULL_RTX);
11354
11355 if (!eliminated)
11356 {
11357 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11358 cfun->machine->sdmode_stack_slot = mem;
11359 eliminated = true;
11360 }
11361 return mem;
11362 }
11363}
11364
11365static tree
11366rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11367{
11368 /* Don't walk into types. */
11369 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11370 {
11371 *walk_subtrees = 0;
11372 return NULL_TREE;
11373 }
11374
11375 switch (TREE_CODE (*tp))
11376 {
11377 case VAR_DECL:
11378 case PARM_DECL:
11379 case FIELD_DECL:
11380 case RESULT_DECL:
11381 case REAL_CST:
fdf4f148 11382 case INDIRECT_REF:
a0f39282
JJ
11383 case ALIGN_INDIRECT_REF:
11384 case MISALIGNED_INDIRECT_REF:
fdf4f148 11385 case VIEW_CONVERT_EXPR:
e41b2a33
PB
11386 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11387 return *tp;
11388 break;
11389 default:
11390 break;
11391 }
11392
11393 return NULL_TREE;
11394}
11395
11396
11397/* Allocate a 64-bit stack slot to be used for copying SDmode
11398 values through if this function has any SDmode references. */
11399
11400static void
11401rs6000_alloc_sdmode_stack_slot (void)
11402{
11403 tree t;
11404 basic_block bb;
726a989a 11405 gimple_stmt_iterator gsi;
e41b2a33
PB
11406
11407 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11408
11409 FOR_EACH_BB (bb)
726a989a 11410 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e41b2a33 11411 {
726a989a 11412 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
e41b2a33
PB
11413 if (ret)
11414 {
11415 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11416 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11417 SDmode, 0);
11418 return;
11419 }
11420 }
11421
11422 /* Check for any SDmode parameters of the function. */
11423 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11424 {
11425 if (TREE_TYPE (t) == error_mark_node)
11426 continue;
11427
11428 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11429 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11430 {
11431 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11432 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11433 SDmode, 0);
11434 return;
11435 }
11436 }
11437}
11438
11439static void
11440rs6000_instantiate_decls (void)
11441{
11442 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11443 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11444}
11445
9878760c 11446/* Return the register class of a scratch register needed to copy IN into
0a2aaacc 11447 or out of a register in RCLASS in MODE. If it can be done directly,
9878760c
RK
11448 NO_REGS is returned. */
11449
11450enum reg_class
0a2aaacc 11451rs6000_secondary_reload_class (enum reg_class rclass,
3c4774e0
R
11452 enum machine_mode mode ATTRIBUTE_UNUSED,
11453 rtx in)
9878760c 11454{
5accd822 11455 int regno;
9878760c 11456
ab82a49f
AP
11457 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11458#if TARGET_MACHO
c4ad648e 11459 && MACHOPIC_INDIRECT
ab82a49f 11460#endif
c4ad648e 11461 ))
46fad5b7
DJ
11462 {
11463 /* We cannot copy a symbolic operand directly into anything
c4ad648e
AM
11464 other than BASE_REGS for TARGET_ELF. So indicate that a
11465 register from BASE_REGS is needed as an intermediate
11466 register.
f676971a 11467
46fad5b7
DJ
11468 On Darwin, pic addresses require a load from memory, which
11469 needs a base register. */
0a2aaacc 11470 if (rclass != BASE_REGS
c4ad648e
AM
11471 && (GET_CODE (in) == SYMBOL_REF
11472 || GET_CODE (in) == HIGH
11473 || GET_CODE (in) == LABEL_REF
11474 || GET_CODE (in) == CONST))
11475 return BASE_REGS;
46fad5b7 11476 }
e7b7998a 11477
5accd822
DE
11478 if (GET_CODE (in) == REG)
11479 {
11480 regno = REGNO (in);
11481 if (regno >= FIRST_PSEUDO_REGISTER)
11482 {
11483 regno = true_regnum (in);
11484 if (regno >= FIRST_PSEUDO_REGISTER)
11485 regno = -1;
11486 }
11487 }
11488 else if (GET_CODE (in) == SUBREG)
11489 {
11490 regno = true_regnum (in);
11491 if (regno >= FIRST_PSEUDO_REGISTER)
11492 regno = -1;
11493 }
11494 else
11495 regno = -1;
11496
9878760c
RK
11497 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11498 into anything. */
0a2aaacc 11499 if (rclass == GENERAL_REGS || rclass == BASE_REGS
9878760c
RK
11500 || (regno >= 0 && INT_REGNO_P (regno)))
11501 return NO_REGS;
11502
11503 /* Constants, memory, and FP registers can go into FP registers. */
11504 if ((regno == -1 || FP_REGNO_P (regno))
0a2aaacc 11505 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
e41b2a33 11506 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
9878760c 11507
0ac081f6
AH
11508 /* Memory, and AltiVec registers can go into AltiVec registers. */
11509 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
0a2aaacc 11510 && rclass == ALTIVEC_REGS)
0ac081f6
AH
11511 return NO_REGS;
11512
9878760c 11513 /* We can copy among the CR registers. */
0a2aaacc 11514 if ((rclass == CR_REGS || rclass == CR0_REGS)
9878760c
RK
11515 && regno >= 0 && CR_REGNO_P (regno))
11516 return NO_REGS;
11517
11518 /* Otherwise, we need GENERAL_REGS. */
11519 return GENERAL_REGS;
11520}
11521\f
11522/* Given a comparison operation, return the bit number in CCR to test. We
f676971a 11523 know this is a valid comparison.
9878760c
RK
11524
11525 SCC_P is 1 if this is for an scc. That means that %D will have been
11526 used instead of %C, so the bits will be in different places.
11527
b4ac57ab 11528 Return -1 if OP isn't a valid comparison for some reason. */
9878760c
RK
11529
11530int
a2369ed3 11531ccr_bit (rtx op, int scc_p)
9878760c
RK
11532{
11533 enum rtx_code code = GET_CODE (op);
11534 enum machine_mode cc_mode;
11535 int cc_regnum;
11536 int base_bit;
9ebbca7d 11537 rtx reg;
9878760c 11538
ec8e098d 11539 if (!COMPARISON_P (op))
9878760c
RK
11540 return -1;
11541
9ebbca7d
GK
11542 reg = XEXP (op, 0);
11543
37409796 11544 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
9ebbca7d
GK
11545
11546 cc_mode = GET_MODE (reg);
11547 cc_regnum = REGNO (reg);
11548 base_bit = 4 * (cc_regnum - CR0_REGNO);
9878760c 11549
39a10a29 11550 validate_condition_mode (code, cc_mode);
c5defebb 11551
b7053a3f
GK
11552 /* When generating a sCOND operation, only positive conditions are
11553 allowed. */
37409796
NS
11554 gcc_assert (!scc_p
11555 || code == EQ || code == GT || code == LT || code == UNORDERED
11556 || code == GTU || code == LTU);
f676971a 11557
9878760c
RK
11558 switch (code)
11559 {
11560 case NE:
11561 return scc_p ? base_bit + 3 : base_bit + 2;
11562 case EQ:
11563 return base_bit + 2;
1c882ea4 11564 case GT: case GTU: case UNLE:
9878760c 11565 return base_bit + 1;
1c882ea4 11566 case LT: case LTU: case UNGE:
9878760c 11567 return base_bit;
1c882ea4
GK
11568 case ORDERED: case UNORDERED:
11569 return base_bit + 3;
9878760c
RK
11570
11571 case GE: case GEU:
39a10a29 11572 /* If scc, we will have done a cror to put the bit in the
9878760c
RK
11573 unordered position. So test that bit. For integer, this is ! LT
11574 unless this is an scc insn. */
39a10a29 11575 return scc_p ? base_bit + 3 : base_bit;
9878760c
RK
11576
11577 case LE: case LEU:
39a10a29 11578 return scc_p ? base_bit + 3 : base_bit + 1;
1c882ea4 11579
9878760c 11580 default:
37409796 11581 gcc_unreachable ();
9878760c
RK
11582 }
11583}
1ff7789b 11584\f
8d30c4ee 11585/* Return the GOT register. */
1ff7789b 11586
9390387d 11587rtx
a2369ed3 11588rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
1ff7789b 11589{
a4f6c312
SS
11590 /* The second flow pass currently (June 1999) can't update
11591 regs_ever_live without disturbing other parts of the compiler, so
11592 update it here to make the prolog/epilogue code happy. */
b3a13419
ILT
11593 if (!can_create_pseudo_p ()
11594 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
6fb5fa3c 11595 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
1ff7789b 11596
e3b5732b 11597 crtl->uses_pic_offset_table = 1;
3cb999d8 11598
1ff7789b
MM
11599 return pic_offset_table_rtx;
11600}
a7df97e6 11601\f
e2500fed
GK
11602/* Function to init struct machine_function.
11603 This will be called, via a pointer variable,
11604 from push_function_context. */
a7df97e6 11605
e2500fed 11606static struct machine_function *
863d938c 11607rs6000_init_machine_status (void)
a7df97e6 11608{
5ead67f6 11609 return GGC_CNEW (machine_function);
a7df97e6 11610}
9878760c 11611\f
0ba1b2ff
AM
11612/* These macros test for integers and extract the low-order bits. */
11613#define INT_P(X) \
11614((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11615 && GET_MODE (X) == VOIDmode)
11616
11617#define INT_LOWPART(X) \
11618 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11619
11620int
a2369ed3 11621extract_MB (rtx op)
0ba1b2ff
AM
11622{
11623 int i;
11624 unsigned long val = INT_LOWPART (op);
11625
11626 /* If the high bit is zero, the value is the first 1 bit we find
11627 from the left. */
11628 if ((val & 0x80000000) == 0)
11629 {
37409796 11630 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11631
11632 i = 1;
11633 while (((val <<= 1) & 0x80000000) == 0)
11634 ++i;
11635 return i;
11636 }
11637
11638 /* If the high bit is set and the low bit is not, or the mask is all
11639 1's, the value is zero. */
11640 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11641 return 0;
11642
11643 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11644 from the right. */
11645 i = 31;
11646 while (((val >>= 1) & 1) != 0)
11647 --i;
11648
11649 return i;
11650}
11651
11652int
a2369ed3 11653extract_ME (rtx op)
0ba1b2ff
AM
11654{
11655 int i;
11656 unsigned long val = INT_LOWPART (op);
11657
11658 /* If the low bit is zero, the value is the first 1 bit we find from
11659 the right. */
11660 if ((val & 1) == 0)
11661 {
37409796 11662 gcc_assert (val & 0xffffffff);
0ba1b2ff
AM
11663
11664 i = 30;
11665 while (((val >>= 1) & 1) == 0)
11666 --i;
11667
11668 return i;
11669 }
11670
11671 /* If the low bit is set and the high bit is not, or the mask is all
11672 1's, the value is 31. */
11673 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11674 return 31;
11675
11676 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11677 from the left. */
11678 i = 0;
11679 while (((val <<= 1) & 0x80000000) != 0)
11680 ++i;
11681
11682 return i;
11683}
11684
c4501e62
JJ
11685/* Locate some local-dynamic symbol still in use by this function
11686 so that we can print its name in some tls_ld pattern. */
11687
11688static const char *
863d938c 11689rs6000_get_some_local_dynamic_name (void)
c4501e62
JJ
11690{
11691 rtx insn;
11692
11693 if (cfun->machine->some_ld_name)
11694 return cfun->machine->some_ld_name;
11695
11696 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11697 if (INSN_P (insn)
11698 && for_each_rtx (&PATTERN (insn),
11699 rs6000_get_some_local_dynamic_name_1, 0))
11700 return cfun->machine->some_ld_name;
11701
37409796 11702 gcc_unreachable ();
c4501e62
JJ
11703}
11704
11705/* Helper function for rs6000_get_some_local_dynamic_name. */
11706
11707static int
a2369ed3 11708rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
c4501e62
JJ
11709{
11710 rtx x = *px;
11711
11712 if (GET_CODE (x) == SYMBOL_REF)
11713 {
11714 const char *str = XSTR (x, 0);
11715 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11716 {
11717 cfun->machine->some_ld_name = str;
11718 return 1;
11719 }
11720 }
11721
11722 return 0;
11723}
11724
85b776df
AM
11725/* Write out a function code label. */
11726
11727void
11728rs6000_output_function_entry (FILE *file, const char *fname)
11729{
11730 if (fname[0] != '.')
11731 {
11732 switch (DEFAULT_ABI)
11733 {
11734 default:
37409796 11735 gcc_unreachable ();
85b776df
AM
11736
11737 case ABI_AIX:
11738 if (DOT_SYMBOLS)
11739 putc ('.', file);
11740 else
11741 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11742 break;
11743
11744 case ABI_V4:
11745 case ABI_DARWIN:
11746 break;
11747 }
11748 }
11749 if (TARGET_AIX)
11750 RS6000_OUTPUT_BASENAME (file, fname);
11751 else
11752 assemble_name (file, fname);
11753}
11754
9878760c
RK
11755/* Print an operand. Recognize special options, documented below. */
11756
38c1f2d7 11757#if TARGET_ELF
d9407988 11758#define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8fbd2dc7 11759#define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
ba5e43aa
MM
11760#else
11761#define SMALL_DATA_RELOC "sda21"
8fbd2dc7 11762#define SMALL_DATA_REG 0
ba5e43aa
MM
11763#endif
11764
9878760c 11765void
a2369ed3 11766print_operand (FILE *file, rtx x, int code)
9878760c
RK
11767{
11768 int i;
a260abc9 11769 HOST_WIDE_INT val;
0ba1b2ff 11770 unsigned HOST_WIDE_INT uval;
9878760c
RK
11771
11772 switch (code)
11773 {
a8b3aeda 11774 case '.':
a85d226b
RK
11775 /* Write out an instruction after the call which may be replaced
11776 with glue code by the loader. This depends on the AIX version. */
11777 asm_fprintf (file, RS6000_CALL_GLUE);
a8b3aeda
RK
11778 return;
11779
81eace42
GK
11780 /* %a is output_address. */
11781
9854d9ed
RK
11782 case 'A':
11783 /* If X is a constant integer whose low-order 5 bits are zero,
11784 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
76229ac8 11785 in the AIX assembler where "sri" with a zero shift count
20e26713 11786 writes a trash instruction. */
9854d9ed 11787 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
76229ac8 11788 putc ('l', file);
9854d9ed 11789 else
76229ac8 11790 putc ('r', file);
9854d9ed
RK
11791 return;
11792
11793 case 'b':
e2c953b6
DE
11794 /* If constant, low-order 16 bits of constant, unsigned.
11795 Otherwise, write normally. */
11796 if (INT_P (x))
11797 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11798 else
11799 print_operand (file, x, 0);
cad12a8d
RK
11800 return;
11801
a260abc9
DE
11802 case 'B':
11803 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11804 for 64-bit mask direction. */
9390387d 11805 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
a238cd8b 11806 return;
a260abc9 11807
81eace42
GK
11808 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11809 output_operand. */
11810
423c1189
AH
11811 case 'c':
11812 /* X is a CR register. Print the number of the GT bit of the CR. */
11813 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11814 output_operand_lossage ("invalid %%E value");
11815 else
11816 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11817 return;
11818
11819 case 'D':
cef6b86c 11820 /* Like 'J' but get to the GT bit only. */
37409796 11821 gcc_assert (GET_CODE (x) == REG);
423c1189 11822
cef6b86c
EB
11823 /* Bit 1 is GT bit. */
11824 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
423c1189 11825
cef6b86c
EB
11826 /* Add one for shift count in rlinm for scc. */
11827 fprintf (file, "%d", i + 1);
423c1189
AH
11828 return;
11829
9854d9ed 11830 case 'E':
39a10a29 11831 /* X is a CR register. Print the number of the EQ bit of the CR */
9854d9ed
RK
11832 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11833 output_operand_lossage ("invalid %%E value");
78fbdbf7 11834 else
39a10a29 11835 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
a85d226b 11836 return;
9854d9ed
RK
11837
11838 case 'f':
11839 /* X is a CR register. Print the shift count needed to move it
11840 to the high-order four bits. */
11841 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11842 output_operand_lossage ("invalid %%f value");
11843 else
9ebbca7d 11844 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11845 return;
11846
11847 case 'F':
11848 /* Similar, but print the count for the rotate in the opposite
11849 direction. */
11850 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11851 output_operand_lossage ("invalid %%F value");
11852 else
9ebbca7d 11853 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9854d9ed
RK
11854 return;
11855
11856 case 'G':
11857 /* X is a constant integer. If it is negative, print "m",
43aa4e05 11858 otherwise print "z". This is to make an aze or ame insn. */
9854d9ed
RK
11859 if (GET_CODE (x) != CONST_INT)
11860 output_operand_lossage ("invalid %%G value");
11861 else if (INTVAL (x) >= 0)
76229ac8 11862 putc ('z', file);
9854d9ed 11863 else
76229ac8 11864 putc ('m', file);
9854d9ed 11865 return;
e2c953b6 11866
9878760c 11867 case 'h':
a4f6c312
SS
11868 /* If constant, output low-order five bits. Otherwise, write
11869 normally. */
9878760c 11870 if (INT_P (x))
5f59ecb7 11871 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9878760c
RK
11872 else
11873 print_operand (file, x, 0);
11874 return;
11875
64305719 11876 case 'H':
a4f6c312
SS
11877 /* If constant, output low-order six bits. Otherwise, write
11878 normally. */
64305719 11879 if (INT_P (x))
5f59ecb7 11880 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
64305719
DE
11881 else
11882 print_operand (file, x, 0);
11883 return;
11884
9854d9ed
RK
11885 case 'I':
11886 /* Print `i' if this is a constant, else nothing. */
9878760c 11887 if (INT_P (x))
76229ac8 11888 putc ('i', file);
9878760c
RK
11889 return;
11890
9854d9ed
RK
11891 case 'j':
11892 /* Write the bit number in CCR for jump. */
11893 i = ccr_bit (x, 0);
11894 if (i == -1)
11895 output_operand_lossage ("invalid %%j code");
9878760c 11896 else
9854d9ed 11897 fprintf (file, "%d", i);
9878760c
RK
11898 return;
11899
9854d9ed
RK
11900 case 'J':
11901 /* Similar, but add one for shift count in rlinm for scc and pass
11902 scc flag to `ccr_bit'. */
11903 i = ccr_bit (x, 1);
11904 if (i == -1)
11905 output_operand_lossage ("invalid %%J code");
11906 else
a0466a68
RK
11907 /* If we want bit 31, write a shift count of zero, not 32. */
11908 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9878760c
RK
11909 return;
11910
9854d9ed
RK
11911 case 'k':
11912 /* X must be a constant. Write the 1's complement of the
11913 constant. */
9878760c 11914 if (! INT_P (x))
9854d9ed 11915 output_operand_lossage ("invalid %%k value");
e2c953b6
DE
11916 else
11917 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9878760c
RK
11918 return;
11919
81eace42 11920 case 'K':
9ebbca7d
GK
11921 /* X must be a symbolic constant on ELF. Write an
11922 expression suitable for an 'addi' that adds in the low 16
11923 bits of the MEM. */
11924 if (GET_CODE (x) != CONST)
11925 {
11926 print_operand_address (file, x);
11927 fputs ("@l", file);
11928 }
11929 else
11930 {
11931 if (GET_CODE (XEXP (x, 0)) != PLUS
11932 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
11933 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
11934 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
53cd5d6c 11935 output_operand_lossage ("invalid %%K value");
9ebbca7d
GK
11936 print_operand_address (file, XEXP (XEXP (x, 0), 0));
11937 fputs ("@l", file);
ed8d2920
MM
11938 /* For GNU as, there must be a non-alphanumeric character
11939 between 'l' and the number. The '-' is added by
11940 print_operand() already. */
11941 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
11942 fputs ("+", file);
9ebbca7d
GK
11943 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
11944 }
81eace42
GK
11945 return;
11946
11947 /* %l is output_asm_label. */
9ebbca7d 11948
9854d9ed
RK
11949 case 'L':
11950 /* Write second word of DImode or DFmode reference. Works on register
11951 or non-indexed memory only. */
11952 if (GET_CODE (x) == REG)
fb5c67a7 11953 fputs (reg_names[REGNO (x) + 1], file);
9854d9ed
RK
11954 else if (GET_CODE (x) == MEM)
11955 {
11956 /* Handle possible auto-increment. Since it is pre-increment and
1427100a 11957 we have already done it, we can just use an offset of word. */
9854d9ed
RK
11958 if (GET_CODE (XEXP (x, 0)) == PRE_INC
11959 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
ed8908e7
RK
11960 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11961 UNITS_PER_WORD));
6fb5fa3c
DB
11962 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
11963 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
11964 UNITS_PER_WORD));
9854d9ed 11965 else
d7624dc0
RK
11966 output_address (XEXP (adjust_address_nv (x, SImode,
11967 UNITS_PER_WORD),
11968 0));
ed8908e7 11969
ba5e43aa 11970 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
11971 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
11972 reg_names[SMALL_DATA_REG]);
9854d9ed 11973 }
9878760c 11974 return;
f676971a 11975
9878760c
RK
11976 case 'm':
11977 /* MB value for a mask operand. */
b1765bde 11978 if (! mask_operand (x, SImode))
9878760c
RK
11979 output_operand_lossage ("invalid %%m value");
11980
0ba1b2ff 11981 fprintf (file, "%d", extract_MB (x));
9878760c
RK
11982 return;
11983
11984 case 'M':
11985 /* ME value for a mask operand. */
b1765bde 11986 if (! mask_operand (x, SImode))
a260abc9 11987 output_operand_lossage ("invalid %%M value");
9878760c 11988
0ba1b2ff 11989 fprintf (file, "%d", extract_ME (x));
9878760c
RK
11990 return;
11991
81eace42
GK
11992 /* %n outputs the negative of its operand. */
11993
9878760c
RK
11994 case 'N':
11995 /* Write the number of elements in the vector times 4. */
11996 if (GET_CODE (x) != PARALLEL)
11997 output_operand_lossage ("invalid %%N value");
e2c953b6
DE
11998 else
11999 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9878760c
RK
12000 return;
12001
12002 case 'O':
12003 /* Similar, but subtract 1 first. */
12004 if (GET_CODE (x) != PARALLEL)
1427100a 12005 output_operand_lossage ("invalid %%O value");
e2c953b6
DE
12006 else
12007 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9878760c
RK
12008 return;
12009
9854d9ed
RK
12010 case 'p':
12011 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12012 if (! INT_P (x)
2bfcf297 12013 || INT_LOWPART (x) < 0
9854d9ed
RK
12014 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12015 output_operand_lossage ("invalid %%p value");
e2c953b6
DE
12016 else
12017 fprintf (file, "%d", i);
9854d9ed
RK
12018 return;
12019
9878760c
RK
12020 case 'P':
12021 /* The operand must be an indirect memory reference. The result
8bb418a3 12022 is the register name. */
9878760c
RK
12023 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12024 || REGNO (XEXP (x, 0)) >= 32)
12025 output_operand_lossage ("invalid %%P value");
e2c953b6 12026 else
fb5c67a7 12027 fputs (reg_names[REGNO (XEXP (x, 0))], file);
9878760c
RK
12028 return;
12029
dfbdccdb
GK
12030 case 'q':
12031 /* This outputs the logical code corresponding to a boolean
12032 expression. The expression may have one or both operands
39a10a29 12033 negated (if one, only the first one). For condition register
c4ad648e
AM
12034 logical operations, it will also treat the negated
12035 CR codes as NOTs, but not handle NOTs of them. */
dfbdccdb 12036 {
63bc1d05 12037 const char *const *t = 0;
dfbdccdb
GK
12038 const char *s;
12039 enum rtx_code code = GET_CODE (x);
12040 static const char * const tbl[3][3] = {
12041 { "and", "andc", "nor" },
12042 { "or", "orc", "nand" },
12043 { "xor", "eqv", "xor" } };
12044
12045 if (code == AND)
12046 t = tbl[0];
12047 else if (code == IOR)
12048 t = tbl[1];
12049 else if (code == XOR)
12050 t = tbl[2];
12051 else
12052 output_operand_lossage ("invalid %%q value");
12053
12054 if (GET_CODE (XEXP (x, 0)) != NOT)
12055 s = t[0];
12056 else
12057 {
12058 if (GET_CODE (XEXP (x, 1)) == NOT)
12059 s = t[2];
12060 else
12061 s = t[1];
12062 }
f676971a 12063
dfbdccdb
GK
12064 fputs (s, file);
12065 }
12066 return;
12067
2c4a9cff
DE
12068 case 'Q':
12069 if (TARGET_MFCRF)
3b6ce0af 12070 fputc (',', file);
5efb1046 12071 /* FALLTHRU */
2c4a9cff
DE
12072 else
12073 return;
12074
9854d9ed
RK
12075 case 'R':
12076 /* X is a CR register. Print the mask for `mtcrf'. */
12077 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12078 output_operand_lossage ("invalid %%R value");
12079 else
9ebbca7d 12080 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9878760c 12081 return;
9854d9ed
RK
12082
12083 case 's':
12084 /* Low 5 bits of 32 - value */
12085 if (! INT_P (x))
12086 output_operand_lossage ("invalid %%s value");
e2c953b6
DE
12087 else
12088 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9878760c 12089 return;
9854d9ed 12090
a260abc9 12091 case 'S':
0ba1b2ff 12092 /* PowerPC64 mask position. All 0's is excluded.
a260abc9
DE
12093 CONST_INT 32-bit mask is considered sign-extended so any
12094 transition must occur within the CONST_INT, not on the boundary. */
1990cd79 12095 if (! mask64_operand (x, DImode))
a260abc9
DE
12096 output_operand_lossage ("invalid %%S value");
12097
0ba1b2ff 12098 uval = INT_LOWPART (x);
a260abc9 12099
0ba1b2ff 12100 if (uval & 1) /* Clear Left */
a260abc9 12101 {
f099d360
GK
12102#if HOST_BITS_PER_WIDE_INT > 64
12103 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12104#endif
0ba1b2ff 12105 i = 64;
a260abc9 12106 }
0ba1b2ff 12107 else /* Clear Right */
a260abc9 12108 {
0ba1b2ff 12109 uval = ~uval;
f099d360
GK
12110#if HOST_BITS_PER_WIDE_INT > 64
12111 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12112#endif
0ba1b2ff 12113 i = 63;
a260abc9 12114 }
0ba1b2ff
AM
12115 while (uval != 0)
12116 --i, uval >>= 1;
37409796 12117 gcc_assert (i >= 0);
0ba1b2ff
AM
12118 fprintf (file, "%d", i);
12119 return;
a260abc9 12120
a3170dc6
AH
12121 case 't':
12122 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
37409796 12123 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
a3170dc6
AH
12124
12125 /* Bit 3 is OV bit. */
12126 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12127
12128 /* If we want bit 31, write a shift count of zero, not 32. */
12129 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12130 return;
12131
cccf3bdc
DE
12132 case 'T':
12133 /* Print the symbolic name of a branch target register. */
1de43f85
DE
12134 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12135 && REGNO (x) != CTR_REGNO))
cccf3bdc 12136 output_operand_lossage ("invalid %%T value");
1de43f85 12137 else if (REGNO (x) == LR_REGNO)
cccf3bdc
DE
12138 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12139 else
12140 fputs ("ctr", file);
12141 return;
12142
9854d9ed 12143 case 'u':
802a0058 12144 /* High-order 16 bits of constant for use in unsigned operand. */
9854d9ed
RK
12145 if (! INT_P (x))
12146 output_operand_lossage ("invalid %%u value");
e2c953b6 12147 else
f676971a 12148 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
e2c953b6 12149 (INT_LOWPART (x) >> 16) & 0xffff);
9878760c
RK
12150 return;
12151
802a0058
MM
12152 case 'v':
12153 /* High-order 16 bits of constant for use in signed operand. */
12154 if (! INT_P (x))
12155 output_operand_lossage ("invalid %%v value");
e2c953b6 12156 else
134c32f6
DE
12157 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12158 (INT_LOWPART (x) >> 16) & 0xffff);
12159 return;
802a0058 12160
9854d9ed
RK
12161 case 'U':
12162 /* Print `u' if this has an auto-increment or auto-decrement. */
12163 if (GET_CODE (x) == MEM
12164 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6fb5fa3c
DB
12165 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12166 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
76229ac8 12167 putc ('u', file);
9854d9ed 12168 return;
9878760c 12169
e0cd0770
JC
12170 case 'V':
12171 /* Print the trap code for this operand. */
12172 switch (GET_CODE (x))
12173 {
12174 case EQ:
12175 fputs ("eq", file); /* 4 */
12176 break;
12177 case NE:
12178 fputs ("ne", file); /* 24 */
12179 break;
12180 case LT:
12181 fputs ("lt", file); /* 16 */
12182 break;
12183 case LE:
12184 fputs ("le", file); /* 20 */
12185 break;
12186 case GT:
12187 fputs ("gt", file); /* 8 */
12188 break;
12189 case GE:
12190 fputs ("ge", file); /* 12 */
12191 break;
12192 case LTU:
12193 fputs ("llt", file); /* 2 */
12194 break;
12195 case LEU:
12196 fputs ("lle", file); /* 6 */
12197 break;
12198 case GTU:
12199 fputs ("lgt", file); /* 1 */
12200 break;
12201 case GEU:
12202 fputs ("lge", file); /* 5 */
12203 break;
12204 default:
37409796 12205 gcc_unreachable ();
e0cd0770
JC
12206 }
12207 break;
12208
9854d9ed
RK
12209 case 'w':
12210 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12211 normally. */
12212 if (INT_P (x))
f676971a 12213 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5f59ecb7 12214 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9854d9ed
RK
12215 else
12216 print_operand (file, x, 0);
9878760c
RK
12217 return;
12218
9854d9ed 12219 case 'W':
e2c953b6 12220 /* MB value for a PowerPC64 rldic operand. */
e2c953b6
DE
12221 val = (GET_CODE (x) == CONST_INT
12222 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12223
12224 if (val < 0)
12225 i = -1;
9854d9ed 12226 else
e2c953b6
DE
12227 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12228 if ((val <<= 1) < 0)
12229 break;
12230
12231#if HOST_BITS_PER_WIDE_INT == 32
12232 if (GET_CODE (x) == CONST_INT && i >= 0)
12233 i += 32; /* zero-extend high-part was all 0's */
12234 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12235 {
12236 val = CONST_DOUBLE_LOW (x);
12237
37409796
NS
12238 gcc_assert (val);
12239 if (val < 0)
e2c953b6
DE
12240 --i;
12241 else
12242 for ( ; i < 64; i++)
12243 if ((val <<= 1) < 0)
12244 break;
12245 }
12246#endif
12247
12248 fprintf (file, "%d", i + 1);
9854d9ed 12249 return;
9878760c 12250
9854d9ed
RK
12251 case 'X':
12252 if (GET_CODE (x) == MEM
6fb5fa3c
DB
12253 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12254 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12255 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
76229ac8 12256 putc ('x', file);
9854d9ed 12257 return;
9878760c 12258
9854d9ed
RK
12259 case 'Y':
12260 /* Like 'L', for third word of TImode */
12261 if (GET_CODE (x) == REG)
fb5c67a7 12262 fputs (reg_names[REGNO (x) + 2], file);
9854d9ed 12263 else if (GET_CODE (x) == MEM)
9878760c 12264 {
9854d9ed
RK
12265 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12266 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12267 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6fb5fa3c
DB
12268 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12269 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9854d9ed 12270 else
d7624dc0 12271 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
ba5e43aa 12272 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12273 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12274 reg_names[SMALL_DATA_REG]);
9878760c
RK
12275 }
12276 return;
f676971a 12277
9878760c 12278 case 'z':
b4ac57ab
RS
12279 /* X is a SYMBOL_REF. Write out the name preceded by a
12280 period and without any trailing data in brackets. Used for function
4d30c363
MM
12281 names. If we are configured for System V (or the embedded ABI) on
12282 the PowerPC, do not emit the period, since those systems do not use
12283 TOCs and the like. */
37409796 12284 gcc_assert (GET_CODE (x) == SYMBOL_REF);
9878760c 12285
c4ad648e
AM
12286 /* Mark the decl as referenced so that cgraph will output the
12287 function. */
9bf6462a 12288 if (SYMBOL_REF_DECL (x))
c4ad648e 12289 mark_decl_referenced (SYMBOL_REF_DECL (x));
9bf6462a 12290
85b776df 12291 /* For macho, check to see if we need a stub. */
f9da97f0
AP
12292 if (TARGET_MACHO)
12293 {
12294 const char *name = XSTR (x, 0);
a031e781 12295#if TARGET_MACHO
3b48085e 12296 if (MACHOPIC_INDIRECT
11abc112
MM
12297 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12298 name = machopic_indirection_name (x, /*stub_p=*/true);
f9da97f0
AP
12299#endif
12300 assemble_name (file, name);
12301 }
85b776df 12302 else if (!DOT_SYMBOLS)
9739c90c 12303 assemble_name (file, XSTR (x, 0));
85b776df
AM
12304 else
12305 rs6000_output_function_entry (file, XSTR (x, 0));
9878760c
RK
12306 return;
12307
9854d9ed
RK
12308 case 'Z':
12309 /* Like 'L', for last word of TImode. */
12310 if (GET_CODE (x) == REG)
fb5c67a7 12311 fputs (reg_names[REGNO (x) + 3], file);
9854d9ed
RK
12312 else if (GET_CODE (x) == MEM)
12313 {
12314 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12315 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
a54d04b7 12316 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6fb5fa3c
DB
12317 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12318 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9854d9ed 12319 else
d7624dc0 12320 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
ba5e43aa 12321 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12322 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12323 reg_names[SMALL_DATA_REG]);
9854d9ed 12324 }
5c23c401 12325 return;
0ac081f6 12326
a3170dc6 12327 /* Print AltiVec or SPE memory operand. */
0ac081f6
AH
12328 case 'y':
12329 {
12330 rtx tmp;
12331
37409796 12332 gcc_assert (GET_CODE (x) == MEM);
0ac081f6
AH
12333
12334 tmp = XEXP (x, 0);
12335
90d3ff1c 12336 /* Ugly hack because %y is overloaded. */
8ef65e3d 12337 if ((TARGET_SPE || TARGET_E500_DOUBLE)
17caeff2
JM
12338 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12339 || GET_MODE (x) == TFmode
12340 || GET_MODE (x) == TImode))
a3170dc6
AH
12341 {
12342 /* Handle [reg]. */
12343 if (GET_CODE (tmp) == REG)
12344 {
12345 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12346 break;
12347 }
12348 /* Handle [reg+UIMM]. */
12349 else if (GET_CODE (tmp) == PLUS &&
12350 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12351 {
12352 int x;
12353
37409796 12354 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
a3170dc6
AH
12355
12356 x = INTVAL (XEXP (tmp, 1));
12357 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12358 break;
12359 }
12360
12361 /* Fall through. Must be [reg+reg]. */
12362 }
850e8d3d
DN
12363 if (TARGET_ALTIVEC
12364 && GET_CODE (tmp) == AND
12365 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12366 && INTVAL (XEXP (tmp, 1)) == -16)
12367 tmp = XEXP (tmp, 0);
0ac081f6 12368 if (GET_CODE (tmp) == REG)
c62f2db5 12369 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
37409796 12370 else
0ac081f6 12371 {
cb8cc791
AP
12372 if (!GET_CODE (tmp) == PLUS
12373 || !REG_P (XEXP (tmp, 0))
12374 || !REG_P (XEXP (tmp, 1)))
12375 {
12376 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12377 break;
12378 }
bb8df8a6 12379
0ac081f6
AH
12380 if (REGNO (XEXP (tmp, 0)) == 0)
12381 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12382 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12383 else
12384 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12385 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12386 }
0ac081f6
AH
12387 break;
12388 }
f676971a 12389
9878760c
RK
12390 case 0:
12391 if (GET_CODE (x) == REG)
12392 fprintf (file, "%s", reg_names[REGNO (x)]);
12393 else if (GET_CODE (x) == MEM)
12394 {
12395 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12396 know the width from the mode. */
12397 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
79ba6d34
MM
12398 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12399 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9878760c 12400 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
79ba6d34
MM
12401 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12402 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6fb5fa3c
DB
12403 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12404 output_address (XEXP (XEXP (x, 0), 1));
9878760c 12405 else
a54d04b7 12406 output_address (XEXP (x, 0));
9878760c
RK
12407 }
12408 else
a54d04b7 12409 output_addr_const (file, x);
a85d226b 12410 return;
9878760c 12411
c4501e62
JJ
12412 case '&':
12413 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12414 return;
12415
9878760c
RK
12416 default:
12417 output_operand_lossage ("invalid %%xn code");
12418 }
12419}
12420\f
12421/* Print the address of an operand. */
12422
12423void
a2369ed3 12424print_operand_address (FILE *file, rtx x)
9878760c
RK
12425{
12426 if (GET_CODE (x) == REG)
4697a36c 12427 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9ebbca7d
GK
12428 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12429 || GET_CODE (x) == LABEL_REF)
9878760c
RK
12430 {
12431 output_addr_const (file, x);
ba5e43aa 12432 if (small_data_operand (x, GET_MODE (x)))
8fbd2dc7
MM
12433 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12434 reg_names[SMALL_DATA_REG]);
37409796
NS
12435 else
12436 gcc_assert (!TARGET_TOC);
9878760c
RK
12437 }
12438 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12439 {
9024f4b8 12440 gcc_assert (REG_P (XEXP (x, 0)));
9878760c 12441 if (REGNO (XEXP (x, 0)) == 0)
4697a36c
MM
12442 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12443 reg_names[ REGNO (XEXP (x, 0)) ]);
9878760c 12444 else
4697a36c
MM
12445 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12446 reg_names[ REGNO (XEXP (x, 1)) ]);
9878760c
RK
12447 }
12448 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
4a0a75dd
KG
12449 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12450 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
3cb999d8
DE
12451#if TARGET_ELF
12452 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12453 && CONSTANT_P (XEXP (x, 1)))
4697a36c
MM
12454 {
12455 output_addr_const (file, XEXP (x, 1));
12456 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12457 }
c859cda6
DJ
12458#endif
12459#if TARGET_MACHO
12460 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
c4ad648e 12461 && CONSTANT_P (XEXP (x, 1)))
c859cda6
DJ
12462 {
12463 fprintf (file, "lo16(");
12464 output_addr_const (file, XEXP (x, 1));
12465 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12466 }
3cb999d8 12467#endif
4d588c14 12468 else if (legitimate_constant_pool_address_p (x))
9ebbca7d 12469 {
2bfcf297 12470 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9ebbca7d 12471 {
2bfcf297
DB
12472 rtx contains_minus = XEXP (x, 1);
12473 rtx minus, symref;
12474 const char *name;
f676971a 12475
9ebbca7d 12476 /* Find the (minus (sym) (toc)) buried in X, and temporarily
a4f6c312 12477 turn it into (sym) for output_addr_const. */
9ebbca7d
GK
12478 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
12479 contains_minus = XEXP (contains_minus, 0);
12480
2bfcf297
DB
12481 minus = XEXP (contains_minus, 0);
12482 symref = XEXP (minus, 0);
0cdc04e8 12483 gcc_assert (GET_CODE (XEXP (minus, 1)) == SYMBOL_REF);
2bfcf297
DB
12484 XEXP (contains_minus, 0) = symref;
12485 if (TARGET_ELF)
12486 {
12487 char *newname;
12488
12489 name = XSTR (symref, 0);
5ead67f6 12490 newname = XALLOCAVEC (char, strlen (name) + sizeof ("@toc"));
2bfcf297
DB
12491 strcpy (newname, name);
12492 strcat (newname, "@toc");
12493 XSTR (symref, 0) = newname;
12494 }
12495 output_addr_const (file, XEXP (x, 1));
12496 if (TARGET_ELF)
12497 XSTR (symref, 0) = name;
9ebbca7d
GK
12498 XEXP (contains_minus, 0) = minus;
12499 }
12500 else
12501 output_addr_const (file, XEXP (x, 1));
12502
12503 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12504 }
9878760c 12505 else
37409796 12506 gcc_unreachable ();
9878760c
RK
12507}
12508\f
88cad84b 12509/* Target hook for assembling integer objects. The PowerPC version has
301d03af
RS
12510 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12511 is defined. It also needs to handle DI-mode objects on 64-bit
12512 targets. */
12513
12514static bool
a2369ed3 12515rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af 12516{
f4f4921e 12517#ifdef RELOCATABLE_NEEDS_FIXUP
301d03af 12518 /* Special handling for SI values. */
84dcde01 12519 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
301d03af 12520 {
301d03af 12521 static int recurse = 0;
f676971a 12522
301d03af
RS
12523 /* For -mrelocatable, we mark all addresses that need to be fixed up
12524 in the .fixup section. */
12525 if (TARGET_RELOCATABLE
d6b5193b
RS
12526 && in_section != toc_section
12527 && in_section != text_section
4325ca90 12528 && !unlikely_text_section_p (in_section)
301d03af
RS
12529 && !recurse
12530 && GET_CODE (x) != CONST_INT
12531 && GET_CODE (x) != CONST_DOUBLE
12532 && CONSTANT_P (x))
12533 {
12534 char buf[256];
12535
12536 recurse = 1;
12537 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12538 fixuplabelno++;
12539 ASM_OUTPUT_LABEL (asm_out_file, buf);
12540 fprintf (asm_out_file, "\t.long\t(");
12541 output_addr_const (asm_out_file, x);
12542 fprintf (asm_out_file, ")@fixup\n");
12543 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12544 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12545 fprintf (asm_out_file, "\t.long\t");
12546 assemble_name (asm_out_file, buf);
12547 fprintf (asm_out_file, "\n\t.previous\n");
12548 recurse = 0;
12549 return true;
12550 }
12551 /* Remove initial .'s to turn a -mcall-aixdesc function
12552 address into the address of the descriptor, not the function
12553 itself. */
12554 else if (GET_CODE (x) == SYMBOL_REF
12555 && XSTR (x, 0)[0] == '.'
12556 && DEFAULT_ABI == ABI_AIX)
12557 {
12558 const char *name = XSTR (x, 0);
12559 while (*name == '.')
12560 name++;
12561
12562 fprintf (asm_out_file, "\t.long\t%s\n", name);
12563 return true;
12564 }
12565 }
f4f4921e 12566#endif /* RELOCATABLE_NEEDS_FIXUP */
301d03af
RS
12567 return default_assemble_integer (x, size, aligned_p);
12568}
93638d7a
AM
12569
12570#ifdef HAVE_GAS_HIDDEN
12571/* Emit an assembler directive to set symbol visibility for DECL to
12572 VISIBILITY_TYPE. */
12573
5add3202 12574static void
a2369ed3 12575rs6000_assemble_visibility (tree decl, int vis)
93638d7a 12576{
93638d7a
AM
12577 /* Functions need to have their entry point symbol visibility set as
12578 well as their descriptor symbol visibility. */
85b776df
AM
12579 if (DEFAULT_ABI == ABI_AIX
12580 && DOT_SYMBOLS
12581 && TREE_CODE (decl) == FUNCTION_DECL)
93638d7a 12582 {
25fdb4dc 12583 static const char * const visibility_types[] = {
c4ad648e 12584 NULL, "internal", "hidden", "protected"
25fdb4dc
RH
12585 };
12586
12587 const char *name, *type;
93638d7a
AM
12588
12589 name = ((* targetm.strip_name_encoding)
12590 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
25fdb4dc 12591 type = visibility_types[vis];
93638d7a 12592
25fdb4dc
RH
12593 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12594 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
93638d7a 12595 }
25fdb4dc
RH
12596 else
12597 default_assemble_visibility (decl, vis);
93638d7a
AM
12598}
12599#endif
301d03af 12600\f
39a10a29 12601enum rtx_code
a2369ed3 12602rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
39a10a29
GK
12603{
12604 /* Reversal of FP compares takes care -- an ordered compare
12605 becomes an unordered compare and vice versa. */
f676971a 12606 if (mode == CCFPmode
bc9ec0e0
GK
12607 && (!flag_finite_math_only
12608 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12609 || code == UNEQ || code == LTGT))
bab6226b 12610 return reverse_condition_maybe_unordered (code);
39a10a29 12611 else
bab6226b 12612 return reverse_condition (code);
39a10a29
GK
12613}
12614
39a10a29
GK
12615/* Generate a compare for CODE. Return a brand-new rtx that
12616 represents the result of the compare. */
a4f6c312 12617
39a10a29 12618static rtx
a2369ed3 12619rs6000_generate_compare (enum rtx_code code)
39a10a29
GK
12620{
12621 enum machine_mode comp_mode;
12622 rtx compare_result;
12623
12624 if (rs6000_compare_fp_p)
12625 comp_mode = CCFPmode;
12626 else if (code == GTU || code == LTU
c4ad648e 12627 || code == GEU || code == LEU)
39a10a29 12628 comp_mode = CCUNSmode;
60934f9c
NS
12629 else if ((code == EQ || code == NE)
12630 && GET_CODE (rs6000_compare_op0) == SUBREG
12631 && GET_CODE (rs6000_compare_op1) == SUBREG
12632 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12633 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12634 /* These are unsigned values, perhaps there will be a later
12635 ordering compare that can be shared with this one.
12636 Unfortunately we cannot detect the signedness of the operands
12637 for non-subregs. */
12638 comp_mode = CCUNSmode;
39a10a29
GK
12639 else
12640 comp_mode = CCmode;
12641
12642 /* First, the compare. */
12643 compare_result = gen_reg_rtx (comp_mode);
a3170dc6 12644
cef6b86c 12645 /* E500 FP compare instructions on the GPRs. Yuck! */
8ef65e3d 12646 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
993f19a8 12647 && rs6000_compare_fp_p)
a3170dc6 12648 {
64022b5d 12649 rtx cmp, or_result, compare_result2;
4d4cbc0e
AH
12650 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12651
12652 if (op_mode == VOIDmode)
12653 op_mode = GET_MODE (rs6000_compare_op1);
a3170dc6 12654
cef6b86c
EB
12655 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12656 This explains the following mess. */
423c1189 12657
a3170dc6
AH
12658 switch (code)
12659 {
423c1189 12660 case EQ: case UNEQ: case NE: case LTGT:
37409796
NS
12661 switch (op_mode)
12662 {
12663 case SFmode:
12664 cmp = flag_unsafe_math_optimizations
12665 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12666 rs6000_compare_op1)
12667 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12668 rs6000_compare_op1);
12669 break;
12670
12671 case DFmode:
12672 cmp = flag_unsafe_math_optimizations
12673 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12674 rs6000_compare_op1)
12675 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12676 rs6000_compare_op1);
12677 break;
12678
17caeff2
JM
12679 case TFmode:
12680 cmp = flag_unsafe_math_optimizations
12681 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12682 rs6000_compare_op1)
12683 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12684 rs6000_compare_op1);
12685 break;
12686
37409796
NS
12687 default:
12688 gcc_unreachable ();
12689 }
a3170dc6 12690 break;
bb8df8a6 12691
423c1189 12692 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
37409796
NS
12693 switch (op_mode)
12694 {
12695 case SFmode:
12696 cmp = flag_unsafe_math_optimizations
12697 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12698 rs6000_compare_op1)
12699 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12700 rs6000_compare_op1);
12701 break;
bb8df8a6 12702
37409796
NS
12703 case DFmode:
12704 cmp = flag_unsafe_math_optimizations
12705 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12706 rs6000_compare_op1)
12707 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12708 rs6000_compare_op1);
12709 break;
12710
17caeff2
JM
12711 case TFmode:
12712 cmp = flag_unsafe_math_optimizations
12713 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12714 rs6000_compare_op1)
12715 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12716 rs6000_compare_op1);
12717 break;
12718
37409796
NS
12719 default:
12720 gcc_unreachable ();
12721 }
a3170dc6 12722 break;
bb8df8a6 12723
423c1189 12724 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
37409796
NS
12725 switch (op_mode)
12726 {
12727 case SFmode:
12728 cmp = flag_unsafe_math_optimizations
12729 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12730 rs6000_compare_op1)
12731 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12732 rs6000_compare_op1);
12733 break;
bb8df8a6 12734
37409796
NS
12735 case DFmode:
12736 cmp = flag_unsafe_math_optimizations
12737 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12738 rs6000_compare_op1)
12739 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12740 rs6000_compare_op1);
12741 break;
12742
17caeff2
JM
12743 case TFmode:
12744 cmp = flag_unsafe_math_optimizations
12745 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12746 rs6000_compare_op1)
12747 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12748 rs6000_compare_op1);
12749 break;
12750
37409796
NS
12751 default:
12752 gcc_unreachable ();
12753 }
a3170dc6 12754 break;
4d4cbc0e 12755 default:
37409796 12756 gcc_unreachable ();
a3170dc6
AH
12757 }
12758
12759 /* Synthesize LE and GE from LT/GT || EQ. */
12760 if (code == LE || code == GE || code == LEU || code == GEU)
12761 {
a3170dc6
AH
12762 emit_insn (cmp);
12763
12764 switch (code)
12765 {
12766 case LE: code = LT; break;
12767 case GE: code = GT; break;
12768 case LEU: code = LT; break;
12769 case GEU: code = GT; break;
37409796 12770 default: gcc_unreachable ();
a3170dc6
AH
12771 }
12772
a3170dc6
AH
12773 compare_result2 = gen_reg_rtx (CCFPmode);
12774
12775 /* Do the EQ. */
37409796
NS
12776 switch (op_mode)
12777 {
12778 case SFmode:
12779 cmp = flag_unsafe_math_optimizations
12780 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12781 rs6000_compare_op1)
12782 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12783 rs6000_compare_op1);
12784 break;
12785
12786 case DFmode:
12787 cmp = flag_unsafe_math_optimizations
12788 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12789 rs6000_compare_op1)
12790 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12791 rs6000_compare_op1);
12792 break;
12793
17caeff2
JM
12794 case TFmode:
12795 cmp = flag_unsafe_math_optimizations
12796 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12797 rs6000_compare_op1)
12798 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12799 rs6000_compare_op1);
12800 break;
12801
37409796
NS
12802 default:
12803 gcc_unreachable ();
12804 }
a3170dc6
AH
12805 emit_insn (cmp);
12806
a3170dc6 12807 /* OR them together. */
64022b5d
AH
12808 or_result = gen_reg_rtx (CCFPmode);
12809 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12810 compare_result2);
a3170dc6
AH
12811 compare_result = or_result;
12812 code = EQ;
12813 }
12814 else
12815 {
a3170dc6 12816 if (code == NE || code == LTGT)
a3170dc6 12817 code = NE;
423c1189
AH
12818 else
12819 code = EQ;
a3170dc6
AH
12820 }
12821
12822 emit_insn (cmp);
12823 }
12824 else
de17c25f
DE
12825 {
12826 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12827 CLOBBERs to match cmptf_internal2 pattern. */
12828 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12829 && GET_MODE (rs6000_compare_op0) == TFmode
602ea4d3 12830 && !TARGET_IEEEQUAD
de17c25f
DE
12831 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12832 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12833 gen_rtvec (9,
12834 gen_rtx_SET (VOIDmode,
12835 compare_result,
12836 gen_rtx_COMPARE (comp_mode,
12837 rs6000_compare_op0,
12838 rs6000_compare_op1)),
12839 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12840 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12841 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12842 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12843 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12844 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12845 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12846 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
3aebbe5f
JJ
12847 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
12848 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
12849 {
12850 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
12851 comp_mode = CCEQmode;
12852 compare_result = gen_reg_rtx (CCEQmode);
12853 if (TARGET_64BIT)
12854 emit_insn (gen_stack_protect_testdi (compare_result,
12855 rs6000_compare_op0, op1));
12856 else
12857 emit_insn (gen_stack_protect_testsi (compare_result,
12858 rs6000_compare_op0, op1));
12859 }
de17c25f
DE
12860 else
12861 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
12862 gen_rtx_COMPARE (comp_mode,
12863 rs6000_compare_op0,
12864 rs6000_compare_op1)));
12865 }
f676971a 12866
ca5adc63 12867 /* Some kinds of FP comparisons need an OR operation;
e7108df9 12868 under flag_finite_math_only we don't bother. */
39a10a29 12869 if (rs6000_compare_fp_p
e7108df9 12870 && !flag_finite_math_only
8ef65e3d 12871 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
39a10a29
GK
12872 && (code == LE || code == GE
12873 || code == UNEQ || code == LTGT
12874 || code == UNGT || code == UNLT))
12875 {
12876 enum rtx_code or1, or2;
12877 rtx or1_rtx, or2_rtx, compare2_rtx;
12878 rtx or_result = gen_reg_rtx (CCEQmode);
f676971a 12879
39a10a29
GK
12880 switch (code)
12881 {
12882 case LE: or1 = LT; or2 = EQ; break;
12883 case GE: or1 = GT; or2 = EQ; break;
12884 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
12885 case LTGT: or1 = LT; or2 = GT; break;
12886 case UNGT: or1 = UNORDERED; or2 = GT; break;
12887 case UNLT: or1 = UNORDERED; or2 = LT; break;
37409796 12888 default: gcc_unreachable ();
39a10a29
GK
12889 }
12890 validate_condition_mode (or1, comp_mode);
12891 validate_condition_mode (or2, comp_mode);
1c563bed
KH
12892 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
12893 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
39a10a29
GK
12894 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
12895 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
12896 const_true_rtx);
12897 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
12898
12899 compare_result = or_result;
12900 code = EQ;
12901 }
12902
12903 validate_condition_mode (code, GET_MODE (compare_result));
f676971a 12904
1c563bed 12905 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
39a10a29
GK
12906}
12907
12908
12909/* Emit the RTL for an sCOND pattern. */
12910
12911void
a2369ed3 12912rs6000_emit_sCOND (enum rtx_code code, rtx result)
39a10a29
GK
12913{
12914 rtx condition_rtx;
12915 enum machine_mode op_mode;
b7053a3f 12916 enum rtx_code cond_code;
39a10a29
GK
12917
12918 condition_rtx = rs6000_generate_compare (code);
b7053a3f
GK
12919 cond_code = GET_CODE (condition_rtx);
12920
8ef65e3d 12921 if (rs6000_compare_fp_p
423c1189
AH
12922 && !TARGET_FPRS && TARGET_HARD_FLOAT)
12923 {
12924 rtx t;
12925
12926 PUT_MODE (condition_rtx, SImode);
12927 t = XEXP (condition_rtx, 0);
12928
37409796 12929 gcc_assert (cond_code == NE || cond_code == EQ);
423c1189
AH
12930
12931 if (cond_code == NE)
64022b5d 12932 emit_insn (gen_e500_flip_gt_bit (t, t));
423c1189 12933
64022b5d 12934 emit_insn (gen_move_from_CR_gt_bit (result, t));
423c1189
AH
12935 return;
12936 }
12937
b7053a3f
GK
12938 if (cond_code == NE
12939 || cond_code == GE || cond_code == LE
12940 || cond_code == GEU || cond_code == LEU
12941 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
12942 {
12943 rtx not_result = gen_reg_rtx (CCEQmode);
12944 rtx not_op, rev_cond_rtx;
12945 enum machine_mode cc_mode;
f676971a 12946
b7053a3f
GK
12947 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
12948
1c563bed 12949 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
0f4c242b 12950 SImode, XEXP (condition_rtx, 0), const0_rtx);
b7053a3f
GK
12951 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
12952 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
12953 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
12954 }
39a10a29
GK
12955
12956 op_mode = GET_MODE (rs6000_compare_op0);
12957 if (op_mode == VOIDmode)
12958 op_mode = GET_MODE (rs6000_compare_op1);
12959
12960 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
12961 {
12962 PUT_MODE (condition_rtx, DImode);
12963 convert_move (result, condition_rtx, 0);
12964 }
12965 else
12966 {
12967 PUT_MODE (condition_rtx, SImode);
12968 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
12969 }
12970}
12971
39a10a29
GK
12972/* Emit a branch of kind CODE to location LOC. */
12973
12974void
a2369ed3 12975rs6000_emit_cbranch (enum rtx_code code, rtx loc)
39a10a29
GK
12976{
12977 rtx condition_rtx, loc_ref;
12978
12979 condition_rtx = rs6000_generate_compare (code);
12980 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
12981 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
12982 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
12983 loc_ref, pc_rtx)));
12984}
12985
12a4e8c5
GK
12986/* Return the string to output a conditional branch to LABEL, which is
12987 the operand number of the label, or -1 if the branch is really a
f676971a 12988 conditional return.
12a4e8c5
GK
12989
12990 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
12991 condition code register and its mode specifies what kind of
12992 comparison we made.
12993
a0ab749a 12994 REVERSED is nonzero if we should reverse the sense of the comparison.
12a4e8c5
GK
12995
12996 INSN is the insn. */
12997
12998char *
a2369ed3 12999output_cbranch (rtx op, const char *label, int reversed, rtx insn)
12a4e8c5
GK
13000{
13001 static char string[64];
13002 enum rtx_code code = GET_CODE (op);
13003 rtx cc_reg = XEXP (op, 0);
13004 enum machine_mode mode = GET_MODE (cc_reg);
13005 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
39a10a29 13006 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
12a4e8c5
GK
13007 int really_reversed = reversed ^ need_longbranch;
13008 char *s = string;
13009 const char *ccode;
13010 const char *pred;
13011 rtx note;
13012
39a10a29
GK
13013 validate_condition_mode (code, mode);
13014
13015 /* Work out which way this really branches. We could use
13016 reverse_condition_maybe_unordered here always but this
13017 makes the resulting assembler clearer. */
12a4e8c5 13018 if (really_reversed)
de40e1df
DJ
13019 {
13020 /* Reversal of FP compares takes care -- an ordered compare
13021 becomes an unordered compare and vice versa. */
13022 if (mode == CCFPmode)
13023 code = reverse_condition_maybe_unordered (code);
13024 else
13025 code = reverse_condition (code);
13026 }
12a4e8c5 13027
8ef65e3d 13028 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
a3170dc6
AH
13029 {
13030 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13031 to the GT bit. */
37409796
NS
13032 switch (code)
13033 {
13034 case EQ:
13035 /* Opposite of GT. */
13036 code = GT;
13037 break;
13038
13039 case NE:
13040 code = UNLE;
13041 break;
13042
13043 default:
13044 gcc_unreachable ();
13045 }
a3170dc6
AH
13046 }
13047
39a10a29 13048 switch (code)
12a4e8c5
GK
13049 {
13050 /* Not all of these are actually distinct opcodes, but
13051 we distinguish them for clarity of the resulting assembler. */
50a0b056
GK
13052 case NE: case LTGT:
13053 ccode = "ne"; break;
13054 case EQ: case UNEQ:
13055 ccode = "eq"; break;
f676971a 13056 case GE: case GEU:
50a0b056 13057 ccode = "ge"; break;
f676971a 13058 case GT: case GTU: case UNGT:
50a0b056 13059 ccode = "gt"; break;
f676971a 13060 case LE: case LEU:
50a0b056 13061 ccode = "le"; break;
f676971a 13062 case LT: case LTU: case UNLT:
50a0b056 13063 ccode = "lt"; break;
12a4e8c5
GK
13064 case UNORDERED: ccode = "un"; break;
13065 case ORDERED: ccode = "nu"; break;
13066 case UNGE: ccode = "nl"; break;
13067 case UNLE: ccode = "ng"; break;
13068 default:
37409796 13069 gcc_unreachable ();
12a4e8c5 13070 }
f676971a
EC
13071
13072 /* Maybe we have a guess as to how likely the branch is.
94a54f47 13073 The old mnemonics don't have a way to specify this information. */
f4857b9b 13074 pred = "";
12a4e8c5
GK
13075 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13076 if (note != NULL_RTX)
13077 {
13078 /* PROB is the difference from 50%. */
13079 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
f4857b9b
AM
13080
13081 /* Only hint for highly probable/improbable branches on newer
13082 cpus as static prediction overrides processor dynamic
13083 prediction. For older cpus we may as well always hint, but
13084 assume not taken for branches that are very close to 50% as a
13085 mispredicted taken branch is more expensive than a
f676971a 13086 mispredicted not-taken branch. */
ec507f2d 13087 if (rs6000_always_hint
2c9e13f3
JH
13088 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13089 && br_prob_note_reliable_p (note)))
f4857b9b
AM
13090 {
13091 if (abs (prob) > REG_BR_PROB_BASE / 20
13092 && ((prob > 0) ^ need_longbranch))
c4ad648e 13093 pred = "+";
f4857b9b
AM
13094 else
13095 pred = "-";
13096 }
12a4e8c5 13097 }
12a4e8c5
GK
13098
13099 if (label == NULL)
94a54f47 13100 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
12a4e8c5 13101 else
94a54f47 13102 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
12a4e8c5 13103
37c67319 13104 /* We need to escape any '%' characters in the reg_names string.
a3c9585f 13105 Assume they'd only be the first character.... */
37c67319
GK
13106 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13107 *s++ = '%';
94a54f47 13108 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
12a4e8c5
GK
13109
13110 if (label != NULL)
13111 {
13112 /* If the branch distance was too far, we may have to use an
13113 unconditional branch to go the distance. */
13114 if (need_longbranch)
44518ddd 13115 s += sprintf (s, ",$+8\n\tb %s", label);
12a4e8c5
GK
13116 else
13117 s += sprintf (s, ",%s", label);
13118 }
13119
13120 return string;
13121}
50a0b056 13122
64022b5d 13123/* Return the string to flip the GT bit on a CR. */
423c1189 13124char *
64022b5d 13125output_e500_flip_gt_bit (rtx dst, rtx src)
423c1189
AH
13126{
13127 static char string[64];
13128 int a, b;
13129
37409796
NS
13130 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13131 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
423c1189 13132
64022b5d
AH
13133 /* GT bit. */
13134 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13135 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
423c1189
AH
13136
13137 sprintf (string, "crnot %d,%d", a, b);
13138 return string;
13139}
13140
21213b4c
DP
13141/* Return insn index for the vector compare instruction for given CODE,
13142 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13143 not available. */
13144
13145static int
94ff898d 13146get_vec_cmp_insn (enum rtx_code code,
21213b4c
DP
13147 enum machine_mode dest_mode,
13148 enum machine_mode op_mode)
13149{
13150 if (!TARGET_ALTIVEC)
13151 return INSN_NOT_AVAILABLE;
13152
13153 switch (code)
13154 {
13155 case EQ:
13156 if (dest_mode == V16QImode && op_mode == V16QImode)
13157 return UNSPEC_VCMPEQUB;
13158 if (dest_mode == V8HImode && op_mode == V8HImode)
13159 return UNSPEC_VCMPEQUH;
13160 if (dest_mode == V4SImode && op_mode == V4SImode)
13161 return UNSPEC_VCMPEQUW;
13162 if (dest_mode == V4SImode && op_mode == V4SFmode)
13163 return UNSPEC_VCMPEQFP;
13164 break;
13165 case GE:
13166 if (dest_mode == V4SImode && op_mode == V4SFmode)
13167 return UNSPEC_VCMPGEFP;
13168 case GT:
13169 if (dest_mode == V16QImode && op_mode == V16QImode)
13170 return UNSPEC_VCMPGTSB;
13171 if (dest_mode == V8HImode && op_mode == V8HImode)
13172 return UNSPEC_VCMPGTSH;
13173 if (dest_mode == V4SImode && op_mode == V4SImode)
13174 return UNSPEC_VCMPGTSW;
13175 if (dest_mode == V4SImode && op_mode == V4SFmode)
13176 return UNSPEC_VCMPGTFP;
13177 break;
13178 case GTU:
13179 if (dest_mode == V16QImode && op_mode == V16QImode)
13180 return UNSPEC_VCMPGTUB;
13181 if (dest_mode == V8HImode && op_mode == V8HImode)
13182 return UNSPEC_VCMPGTUH;
13183 if (dest_mode == V4SImode && op_mode == V4SImode)
13184 return UNSPEC_VCMPGTUW;
13185 break;
13186 default:
13187 break;
13188 }
13189 return INSN_NOT_AVAILABLE;
13190}
13191
13192/* Emit vector compare for operands OP0 and OP1 using code RCODE.
13193 DMODE is expected destination mode. This is a recursive function. */
13194
13195static rtx
13196rs6000_emit_vector_compare (enum rtx_code rcode,
13197 rtx op0, rtx op1,
13198 enum machine_mode dmode)
13199{
13200 int vec_cmp_insn;
13201 rtx mask;
13202 enum machine_mode dest_mode;
13203 enum machine_mode op_mode = GET_MODE (op1);
13204
37409796
NS
13205 gcc_assert (TARGET_ALTIVEC);
13206 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
21213b4c
DP
13207
13208 /* Floating point vector compare instructions uses destination V4SImode.
13209 Move destination to appropriate mode later. */
13210 if (dmode == V4SFmode)
13211 dest_mode = V4SImode;
13212 else
13213 dest_mode = dmode;
13214
13215 mask = gen_reg_rtx (dest_mode);
13216 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13217
13218 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13219 {
13220 bool swap_operands = false;
13221 bool try_again = false;
13222 switch (rcode)
13223 {
13224 case LT:
13225 rcode = GT;
13226 swap_operands = true;
13227 try_again = true;
13228 break;
13229 case LTU:
13230 rcode = GTU;
13231 swap_operands = true;
13232 try_again = true;
13233 break;
13234 case NE:
370df7db
JC
13235 case UNLE:
13236 case UNLT:
13237 case UNGE:
13238 case UNGT:
13239 /* Invert condition and try again.
13240 e.g., A != B becomes ~(A==B). */
21213b4c 13241 {
370df7db 13242 enum rtx_code rev_code;
21213b4c 13243 enum insn_code nor_code;
d1123cde 13244 rtx eq_rtx;
370df7db
JC
13245
13246 rev_code = reverse_condition_maybe_unordered (rcode);
d1123cde
MS
13247 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13248 dest_mode);
94ff898d 13249
166cdb08 13250 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
37409796 13251 gcc_assert (nor_code != CODE_FOR_nothing);
21213b4c
DP
13252 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13253
13254 if (dmode != dest_mode)
13255 {
13256 rtx temp = gen_reg_rtx (dest_mode);
13257 convert_move (temp, mask, 0);
13258 return temp;
13259 }
13260 return mask;
13261 }
13262 break;
13263 case GE:
13264 case GEU:
13265 case LE:
13266 case LEU:
13267 /* Try GT/GTU/LT/LTU OR EQ */
13268 {
13269 rtx c_rtx, eq_rtx;
13270 enum insn_code ior_code;
13271 enum rtx_code new_code;
13272
37409796
NS
13273 switch (rcode)
13274 {
13275 case GE:
13276 new_code = GT;
13277 break;
13278
13279 case GEU:
13280 new_code = GTU;
13281 break;
13282
13283 case LE:
13284 new_code = LT;
13285 break;
13286
13287 case LEU:
13288 new_code = LTU;
13289 break;
13290
13291 default:
13292 gcc_unreachable ();
13293 }
21213b4c
DP
13294
13295 c_rtx = rs6000_emit_vector_compare (new_code,
13296 op0, op1, dest_mode);
13297 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13298 dest_mode);
13299
166cdb08 13300 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
37409796 13301 gcc_assert (ior_code != CODE_FOR_nothing);
21213b4c
DP
13302 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13303 if (dmode != dest_mode)
13304 {
13305 rtx temp = gen_reg_rtx (dest_mode);
13306 convert_move (temp, mask, 0);
13307 return temp;
13308 }
13309 return mask;
13310 }
13311 break;
13312 default:
37409796 13313 gcc_unreachable ();
21213b4c
DP
13314 }
13315
13316 if (try_again)
13317 {
13318 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
37409796
NS
13319 /* You only get two chances. */
13320 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
21213b4c
DP
13321 }
13322
13323 if (swap_operands)
13324 {
13325 rtx tmp;
13326 tmp = op0;
13327 op0 = op1;
13328 op1 = tmp;
13329 }
13330 }
13331
915167f5
GK
13332 emit_insn (gen_rtx_SET (VOIDmode, mask,
13333 gen_rtx_UNSPEC (dest_mode,
13334 gen_rtvec (2, op0, op1),
13335 vec_cmp_insn)));
21213b4c
DP
13336 if (dmode != dest_mode)
13337 {
13338 rtx temp = gen_reg_rtx (dest_mode);
13339 convert_move (temp, mask, 0);
13340 return temp;
13341 }
13342 return mask;
13343}
13344
13345/* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13346 valid insn doesn exist for given mode. */
13347
13348static int
13349get_vsel_insn (enum machine_mode mode)
13350{
13351 switch (mode)
13352 {
13353 case V4SImode:
13354 return UNSPEC_VSEL4SI;
13355 break;
13356 case V4SFmode:
13357 return UNSPEC_VSEL4SF;
13358 break;
13359 case V8HImode:
13360 return UNSPEC_VSEL8HI;
13361 break;
13362 case V16QImode:
13363 return UNSPEC_VSEL16QI;
13364 break;
13365 default:
13366 return INSN_NOT_AVAILABLE;
13367 break;
13368 }
13369 return INSN_NOT_AVAILABLE;
13370}
13371
13372/* Emit vector select insn where DEST is destination using
13373 operands OP1, OP2 and MASK. */
13374
13375static void
13376rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13377{
13378 rtx t, temp;
13379 enum machine_mode dest_mode = GET_MODE (dest);
13380 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13381
13382 temp = gen_reg_rtx (dest_mode);
94ff898d 13383
bb8df8a6 13384 /* For each vector element, select op1 when mask is 1 otherwise
19f1ebc7 13385 select op2. */
915167f5
GK
13386 t = gen_rtx_SET (VOIDmode, temp,
13387 gen_rtx_UNSPEC (dest_mode,
13388 gen_rtvec (3, op2, op1, mask),
13389 vsel_insn_index));
21213b4c
DP
13390 emit_insn (t);
13391 emit_move_insn (dest, temp);
13392 return;
13393}
13394
94ff898d 13395/* Emit vector conditional expression.
21213b4c
DP
13396 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13397 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13398
13399int
13400rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13401 rtx cond, rtx cc_op0, rtx cc_op1)
13402{
13403 enum machine_mode dest_mode = GET_MODE (dest);
13404 enum rtx_code rcode = GET_CODE (cond);
13405 rtx mask;
13406
13407 if (!TARGET_ALTIVEC)
13408 return 0;
13409
13410 /* Get the vector mask for the given relational operations. */
13411 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13412
13413 rs6000_emit_vector_select (dest, op1, op2, mask);
13414
13415 return 1;
13416}
13417
50a0b056
GK
13418/* Emit a conditional move: move TRUE_COND to DEST if OP of the
13419 operands of the last comparison is nonzero/true, FALSE_COND if it
13420 is zero/false. Return 0 if the hardware has no such operation. */
a4f6c312 13421
50a0b056 13422int
a2369ed3 13423rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
50a0b056
GK
13424{
13425 enum rtx_code code = GET_CODE (op);
13426 rtx op0 = rs6000_compare_op0;
13427 rtx op1 = rs6000_compare_op1;
13428 REAL_VALUE_TYPE c1;
3148ad6d
DJ
13429 enum machine_mode compare_mode = GET_MODE (op0);
13430 enum machine_mode result_mode = GET_MODE (dest);
50a0b056 13431 rtx temp;
add2402e 13432 bool is_against_zero;
50a0b056 13433
a3c9585f 13434 /* These modes should always match. */
a3170dc6
AH
13435 if (GET_MODE (op1) != compare_mode
13436 /* In the isel case however, we can use a compare immediate, so
13437 op1 may be a small constant. */
13438 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
3148ad6d 13439 return 0;
178c3eff 13440 if (GET_MODE (true_cond) != result_mode)
3148ad6d 13441 return 0;
178c3eff 13442 if (GET_MODE (false_cond) != result_mode)
3148ad6d
DJ
13443 return 0;
13444
50a0b056 13445 /* First, work out if the hardware can do this at all, or
a3c9585f 13446 if it's too slow.... */
50a0b056 13447 if (! rs6000_compare_fp_p)
a3170dc6
AH
13448 {
13449 if (TARGET_ISEL)
13450 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13451 return 0;
13452 }
8ef65e3d 13453 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
ebb109ad 13454 && SCALAR_FLOAT_MODE_P (compare_mode))
fef98bf2 13455 return 0;
50a0b056 13456
add2402e 13457 is_against_zero = op1 == CONST0_RTX (compare_mode);
94ff898d 13458
add2402e
GK
13459 /* A floating-point subtract might overflow, underflow, or produce
13460 an inexact result, thus changing the floating-point flags, so it
13461 can't be generated if we care about that. It's safe if one side
13462 of the construct is zero, since then no subtract will be
13463 generated. */
ebb109ad 13464 if (SCALAR_FLOAT_MODE_P (compare_mode)
add2402e
GK
13465 && flag_trapping_math && ! is_against_zero)
13466 return 0;
13467
50a0b056
GK
13468 /* Eliminate half of the comparisons by switching operands, this
13469 makes the remaining code simpler. */
13470 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
bc9ec0e0 13471 || code == LTGT || code == LT || code == UNLE)
50a0b056
GK
13472 {
13473 code = reverse_condition_maybe_unordered (code);
13474 temp = true_cond;
13475 true_cond = false_cond;
13476 false_cond = temp;
13477 }
13478
13479 /* UNEQ and LTGT take four instructions for a comparison with zero,
13480 it'll probably be faster to use a branch here too. */
bc9ec0e0 13481 if (code == UNEQ && HONOR_NANS (compare_mode))
50a0b056 13482 return 0;
f676971a 13483
50a0b056
GK
13484 if (GET_CODE (op1) == CONST_DOUBLE)
13485 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
f676971a 13486
b6d08ca1 13487 /* We're going to try to implement comparisons by performing
50a0b056
GK
13488 a subtract, then comparing against zero. Unfortunately,
13489 Inf - Inf is NaN which is not zero, and so if we don't
27d30956 13490 know that the operand is finite and the comparison
50a0b056 13491 would treat EQ different to UNORDERED, we can't do it. */
bc9ec0e0 13492 if (HONOR_INFINITIES (compare_mode)
50a0b056 13493 && code != GT && code != UNGE
045572c7 13494 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
50a0b056
GK
13495 /* Constructs of the form (a OP b ? a : b) are safe. */
13496 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
f676971a 13497 || (! rtx_equal_p (op0, true_cond)
50a0b056
GK
13498 && ! rtx_equal_p (op1, true_cond))))
13499 return 0;
add2402e 13500
50a0b056
GK
13501 /* At this point we know we can use fsel. */
13502
13503 /* Reduce the comparison to a comparison against zero. */
add2402e
GK
13504 if (! is_against_zero)
13505 {
13506 temp = gen_reg_rtx (compare_mode);
13507 emit_insn (gen_rtx_SET (VOIDmode, temp,
13508 gen_rtx_MINUS (compare_mode, op0, op1)));
13509 op0 = temp;
13510 op1 = CONST0_RTX (compare_mode);
13511 }
50a0b056
GK
13512
13513 /* If we don't care about NaNs we can reduce some of the comparisons
13514 down to faster ones. */
bc9ec0e0 13515 if (! HONOR_NANS (compare_mode))
50a0b056
GK
13516 switch (code)
13517 {
13518 case GT:
13519 code = LE;
13520 temp = true_cond;
13521 true_cond = false_cond;
13522 false_cond = temp;
13523 break;
13524 case UNGE:
13525 code = GE;
13526 break;
13527 case UNEQ:
13528 code = EQ;
13529 break;
13530 default:
13531 break;
13532 }
13533
13534 /* Now, reduce everything down to a GE. */
13535 switch (code)
13536 {
13537 case GE:
13538 break;
13539
13540 case LE:
3148ad6d
DJ
13541 temp = gen_reg_rtx (compare_mode);
13542 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13543 op0 = temp;
13544 break;
13545
13546 case ORDERED:
3148ad6d
DJ
13547 temp = gen_reg_rtx (compare_mode);
13548 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
50a0b056
GK
13549 op0 = temp;
13550 break;
13551
13552 case EQ:
3148ad6d 13553 temp = gen_reg_rtx (compare_mode);
f676971a 13554 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d
DJ
13555 gen_rtx_NEG (compare_mode,
13556 gen_rtx_ABS (compare_mode, op0))));
50a0b056
GK
13557 op0 = temp;
13558 break;
13559
13560 case UNGE:
bc9ec0e0 13561 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
3148ad6d 13562 temp = gen_reg_rtx (result_mode);
50a0b056 13563 emit_insn (gen_rtx_SET (VOIDmode, temp,
3148ad6d 13564 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13565 gen_rtx_GE (VOIDmode,
13566 op0, op1),
13567 true_cond, false_cond)));
bc9ec0e0
GK
13568 false_cond = true_cond;
13569 true_cond = temp;
50a0b056 13570
3148ad6d
DJ
13571 temp = gen_reg_rtx (compare_mode);
13572 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13573 op0 = temp;
13574 break;
13575
13576 case GT:
bc9ec0e0 13577 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
3148ad6d 13578 temp = gen_reg_rtx (result_mode);
50a0b056 13579 emit_insn (gen_rtx_SET (VOIDmode, temp,
f676971a 13580 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13581 gen_rtx_GE (VOIDmode,
13582 op0, op1),
13583 true_cond, false_cond)));
bc9ec0e0
GK
13584 true_cond = false_cond;
13585 false_cond = temp;
50a0b056 13586
3148ad6d
DJ
13587 temp = gen_reg_rtx (compare_mode);
13588 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
50a0b056
GK
13589 op0 = temp;
13590 break;
13591
13592 default:
37409796 13593 gcc_unreachable ();
50a0b056
GK
13594 }
13595
13596 emit_insn (gen_rtx_SET (VOIDmode, dest,
3148ad6d 13597 gen_rtx_IF_THEN_ELSE (result_mode,
50a0b056
GK
13598 gen_rtx_GE (VOIDmode,
13599 op0, op1),
13600 true_cond, false_cond)));
13601 return 1;
13602}
13603
a3170dc6
AH
13604/* Same as above, but for ints (isel). */
13605
13606static int
a2369ed3 13607rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
a3170dc6
AH
13608{
13609 rtx condition_rtx, cr;
13610
13611 /* All isel implementations thus far are 32-bits. */
13612 if (GET_MODE (rs6000_compare_op0) != SImode)
13613 return 0;
13614
13615 /* We still have to do the compare, because isel doesn't do a
13616 compare, it just looks at the CRx bits set by a previous compare
13617 instruction. */
13618 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13619 cr = XEXP (condition_rtx, 0);
13620
13621 if (GET_MODE (cr) == CCmode)
13622 emit_insn (gen_isel_signed (dest, condition_rtx,
13623 true_cond, false_cond, cr));
13624 else
13625 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13626 true_cond, false_cond, cr));
13627
13628 return 1;
13629}
13630
13631const char *
a2369ed3 13632output_isel (rtx *operands)
a3170dc6
AH
13633{
13634 enum rtx_code code;
13635
13636 code = GET_CODE (operands[1]);
13637 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13638 {
13639 PUT_CODE (operands[1], reverse_condition (code));
13640 return "isel %0,%3,%2,%j1";
13641 }
13642 else
13643 return "isel %0,%2,%3,%j1";
13644}
13645
50a0b056 13646void
a2369ed3 13647rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
50a0b056
GK
13648{
13649 enum machine_mode mode = GET_MODE (op0);
5dc8d536 13650 enum rtx_code c;
50a0b056 13651 rtx target;
5dc8d536
AH
13652
13653 if (code == SMAX || code == SMIN)
13654 c = GE;
13655 else
13656 c = GEU;
13657
50a0b056 13658 if (code == SMAX || code == UMAX)
f676971a 13659 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056
GK
13660 op0, op1, mode, 0);
13661 else
f676971a 13662 target = emit_conditional_move (dest, c, op0, op1, mode,
50a0b056 13663 op1, op0, mode, 0);
37409796 13664 gcc_assert (target);
50a0b056
GK
13665 if (target != dest)
13666 emit_move_insn (dest, target);
13667}
46c07df8 13668
915167f5
GK
13669/* Emit instructions to perform a load-reserved/store-conditional operation.
13670 The operation performed is an atomic
13671 (set M (CODE:MODE M OP))
13672 If not NULL, BEFORE is atomically set to M before the operation, and
13673 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
bb8df8a6 13674 If SYNC_P then a memory barrier is emitted before the operation.
915167f5
GK
13675 Either OP or M may be wrapped in a NOT operation. */
13676
13677void
13678rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13679 rtx m, rtx op, rtx before_param, rtx after_param,
13680 bool sync_p)
13681{
13682 enum machine_mode used_mode;
13683 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13684 rtx used_m;
13685 rtvec vec;
13686 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13687 rtx shift = NULL_RTX;
bb8df8a6 13688
915167f5
GK
13689 if (sync_p)
13690 emit_insn (gen_memory_barrier ());
bb8df8a6 13691
915167f5
GK
13692 if (GET_CODE (m) == NOT)
13693 used_m = XEXP (m, 0);
13694 else
13695 used_m = m;
13696
13697 /* If this is smaller than SImode, we'll have to use SImode with
13698 adjustments. */
13699 if (mode == QImode || mode == HImode)
13700 {
13701 rtx newop, oldop;
13702
13703 if (MEM_ALIGN (used_m) >= 32)
13704 {
13705 int ishift = 0;
13706 if (BYTES_BIG_ENDIAN)
13707 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
bb8df8a6 13708
915167f5 13709 shift = GEN_INT (ishift);
c75c6d11 13710 used_m = change_address (used_m, SImode, 0);
915167f5
GK
13711 }
13712 else
13713 {
13714 rtx addrSI, aligned_addr;
a9c9d3fa 13715 int shift_mask = mode == QImode ? 0x18 : 0x10;
bb8df8a6 13716
c75c6d11
JJ
13717 addrSI = gen_lowpart_common (SImode,
13718 force_reg (Pmode, XEXP (used_m, 0)));
13719 addrSI = force_reg (SImode, addrSI);
915167f5
GK
13720 shift = gen_reg_rtx (SImode);
13721
13722 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
a9c9d3fa
GK
13723 GEN_INT (shift_mask)));
13724 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
915167f5
GK
13725
13726 aligned_addr = expand_binop (Pmode, and_optab,
13727 XEXP (used_m, 0),
13728 GEN_INT (-4), NULL_RTX,
13729 1, OPTAB_LIB_WIDEN);
13730 used_m = change_address (used_m, SImode, aligned_addr);
13731 set_mem_align (used_m, 32);
915167f5 13732 }
c75c6d11
JJ
13733 /* It's safe to keep the old alias set of USED_M, because
13734 the operation is atomic and only affects the original
13735 USED_M. */
13736 if (GET_CODE (m) == NOT)
13737 m = gen_rtx_NOT (SImode, used_m);
13738 else
13739 m = used_m;
915167f5
GK
13740
13741 if (GET_CODE (op) == NOT)
13742 {
13743 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13744 oldop = gen_rtx_NOT (SImode, oldop);
13745 }
13746 else
13747 oldop = lowpart_subreg (SImode, op, mode);
9f0076e5 13748
915167f5
GK
13749 switch (code)
13750 {
13751 case IOR:
13752 case XOR:
13753 newop = expand_binop (SImode, and_optab,
13754 oldop, GEN_INT (imask), NULL_RTX,
13755 1, OPTAB_LIB_WIDEN);
13756 emit_insn (gen_ashlsi3 (newop, newop, shift));
13757 break;
13758
13759 case AND:
13760 newop = expand_binop (SImode, ior_optab,
13761 oldop, GEN_INT (~imask), NULL_RTX,
13762 1, OPTAB_LIB_WIDEN);
a9c9d3fa 13763 emit_insn (gen_rotlsi3 (newop, newop, shift));
915167f5
GK
13764 break;
13765
13766 case PLUS:
9f0076e5 13767 case MINUS:
915167f5
GK
13768 {
13769 rtx mask;
bb8df8a6 13770
915167f5
GK
13771 newop = expand_binop (SImode, and_optab,
13772 oldop, GEN_INT (imask), NULL_RTX,
13773 1, OPTAB_LIB_WIDEN);
13774 emit_insn (gen_ashlsi3 (newop, newop, shift));
13775
13776 mask = gen_reg_rtx (SImode);
13777 emit_move_insn (mask, GEN_INT (imask));
13778 emit_insn (gen_ashlsi3 (mask, mask, shift));
13779
9f0076e5
DE
13780 if (code == PLUS)
13781 newop = gen_rtx_PLUS (SImode, m, newop);
13782 else
13783 newop = gen_rtx_MINUS (SImode, m, newop);
13784 newop = gen_rtx_AND (SImode, newop, mask);
915167f5
GK
13785 newop = gen_rtx_IOR (SImode, newop,
13786 gen_rtx_AND (SImode,
13787 gen_rtx_NOT (SImode, mask),
13788 m));
13789 break;
13790 }
13791
13792 default:
13793 gcc_unreachable ();
13794 }
13795
a9c9d3fa
GK
13796 if (GET_CODE (m) == NOT)
13797 {
13798 rtx mask, xorm;
13799
13800 mask = gen_reg_rtx (SImode);
13801 emit_move_insn (mask, GEN_INT (imask));
13802 emit_insn (gen_ashlsi3 (mask, mask, shift));
13803
13804 xorm = gen_rtx_XOR (SImode, used_m, mask);
13805 /* Depending on the value of 'op', the XOR or the operation might
13806 be able to be simplified away. */
13807 newop = simplify_gen_binary (code, SImode, xorm, newop);
13808 }
915167f5
GK
13809 op = newop;
13810 used_mode = SImode;
13811 before = gen_reg_rtx (used_mode);
13812 after = gen_reg_rtx (used_mode);
13813 }
13814 else
13815 {
13816 used_mode = mode;
13817 before = before_param;
13818 after = after_param;
13819
13820 if (before == NULL_RTX)
13821 before = gen_reg_rtx (used_mode);
13822 if (after == NULL_RTX)
13823 after = gen_reg_rtx (used_mode);
13824 }
bb8df8a6 13825
9f0076e5
DE
13826 if ((code == PLUS || code == MINUS || GET_CODE (m) == NOT)
13827 && used_mode != mode)
915167f5
GK
13828 the_op = op; /* Computed above. */
13829 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13830 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13831 else
13832 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13833
13834 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13835 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13836 set_atomic = gen_rtx_SET (VOIDmode, used_m,
9f0076e5
DE
13837 gen_rtx_UNSPEC (used_mode,
13838 gen_rtvec (1, the_op),
13839 UNSPEC_SYNC_OP));
915167f5
GK
13840 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13841
9f0076e5 13842 if ((code == PLUS || code == MINUS) && used_mode != mode)
915167f5
GK
13843 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13844 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13845 else
13846 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13847 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13848
13849 /* Shift and mask the return values properly. */
13850 if (used_mode != mode && before_param)
13851 {
13852 emit_insn (gen_lshrsi3 (before, before, shift));
13853 convert_move (before_param, before, 1);
13854 }
13855
13856 if (used_mode != mode && after_param)
13857 {
13858 emit_insn (gen_lshrsi3 (after, after, shift));
13859 convert_move (after_param, after, 1);
13860 }
13861
13862 /* The previous sequence will end with a branch that's dependent on
13863 the conditional store, so placing an isync will ensure that no
13864 other instructions (especially, no load or store instructions)
13865 can start before the atomic operation completes. */
13866 if (sync_p)
13867 emit_insn (gen_isync ());
13868}
13869
b52110d4
DE
13870/* A subroutine of the atomic operation splitters. Jump to LABEL if
13871 COND is true. Mark the jump as unlikely to be taken. */
13872
13873static void
13874emit_unlikely_jump (rtx cond, rtx label)
13875{
13876 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
13877 rtx x;
13878
13879 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
13880 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
13881 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
13882}
13883
13884/* A subroutine of the atomic operation splitters. Emit a load-locked
13885 instruction in MODE. */
13886
13887static void
13888emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
13889{
13890 rtx (*fn) (rtx, rtx) = NULL;
13891 if (mode == SImode)
13892 fn = gen_load_locked_si;
13893 else if (mode == DImode)
13894 fn = gen_load_locked_di;
13895 emit_insn (fn (reg, mem));
13896}
13897
13898/* A subroutine of the atomic operation splitters. Emit a store-conditional
13899 instruction in MODE. */
13900
13901static void
13902emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
13903{
13904 rtx (*fn) (rtx, rtx, rtx) = NULL;
13905 if (mode == SImode)
13906 fn = gen_store_conditional_si;
13907 else if (mode == DImode)
13908 fn = gen_store_conditional_di;
13909
9f0076e5 13910 /* Emit sync before stwcx. to address PPC405 Erratum. */
b52110d4
DE
13911 if (PPC405_ERRATUM77)
13912 emit_insn (gen_memory_barrier ());
13913
13914 emit_insn (fn (res, mem, val));
13915}
13916
ea2c620c 13917/* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
bb8df8a6 13918 to perform. MEM is the memory on which to operate. VAL is the second
9f0076e5
DE
13919 operand of the binary operator. BEFORE and AFTER are optional locations to
13920 return the value of MEM either before of after the operation. SCRATCH is
13921 a scratch register. */
13922
13923void
13924rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
13925 rtx before, rtx after, rtx scratch)
13926{
13927 enum machine_mode mode = GET_MODE (mem);
13928 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13929
13930 emit_insn (gen_memory_barrier ());
13931
13932 label = gen_label_rtx ();
13933 emit_label (label);
13934 label = gen_rtx_LABEL_REF (VOIDmode, label);
13935
13936 if (before == NULL_RTX)
13937 before = scratch;
13938 emit_load_locked (mode, before, mem);
13939
13940 if (code == NOT)
13941 x = gen_rtx_AND (mode, gen_rtx_NOT (mode, before), val);
13942 else if (code == AND)
13943 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
13944 else
13945 x = gen_rtx_fmt_ee (code, mode, before, val);
13946
13947 if (after != NULL_RTX)
13948 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
13949 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
13950
13951 emit_store_conditional (mode, cond, mem, scratch);
13952
13953 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13954 emit_unlikely_jump (x, label);
13955
13956 emit_insn (gen_isync ());
13957}
13958
b52110d4
DE
13959/* Expand an atomic compare and swap operation. MEM is the memory on which
13960 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
13961 value to be stored. SCRATCH is a scratch GPR. */
13962
13963void
13964rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
13965 rtx scratch)
13966{
13967 enum machine_mode mode = GET_MODE (mem);
13968 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
13969
13970 emit_insn (gen_memory_barrier ());
13971
13972 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13973 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
13974 emit_label (XEXP (label1, 0));
13975
13976 emit_load_locked (mode, retval, mem);
13977
13978 x = gen_rtx_COMPARE (CCmode, retval, oldval);
13979 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
13980
13981 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13982 emit_unlikely_jump (x, label2);
13983
13984 emit_move_insn (scratch, newval);
13985 emit_store_conditional (mode, cond, mem, scratch);
13986
13987 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
13988 emit_unlikely_jump (x, label1);
13989
13990 emit_insn (gen_isync ());
13991 emit_label (XEXP (label2, 0));
13992}
13993
13994/* Expand an atomic test and set operation. MEM is the memory on which
13995 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
13996
13997void
13998rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
13999{
14000 enum machine_mode mode = GET_MODE (mem);
14001 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14002
14003 emit_insn (gen_memory_barrier ());
14004
14005 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14006 emit_label (XEXP (label, 0));
14007
14008 emit_load_locked (mode, retval, mem);
14009 emit_move_insn (scratch, val);
14010 emit_store_conditional (mode, cond, mem, scratch);
14011
14012 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14013 emit_unlikely_jump (x, label);
14014
14015 emit_insn (gen_isync ());
14016}
14017
9fc75b97
DE
14018void
14019rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14020{
14021 enum machine_mode mode = GET_MODE (mem);
14022 rtx addrSI, align, wdst, shift, mask;
14023 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14024 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14025
14026 /* Shift amount for subword relative to aligned word. */
14027 addrSI = force_reg (SImode, gen_lowpart_common (SImode, XEXP (mem, 0)));
14028 shift = gen_reg_rtx (SImode);
14029 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14030 GEN_INT (shift_mask)));
14031 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14032
14033 /* Shift and mask old value into position within word. */
14034 oldval = convert_modes (SImode, mode, oldval, 1);
14035 oldval = expand_binop (SImode, and_optab,
14036 oldval, GEN_INT (imask), NULL_RTX,
14037 1, OPTAB_LIB_WIDEN);
14038 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14039
14040 /* Shift and mask new value into position within word. */
14041 newval = convert_modes (SImode, mode, newval, 1);
14042 newval = expand_binop (SImode, and_optab,
14043 newval, GEN_INT (imask), NULL_RTX,
14044 1, OPTAB_LIB_WIDEN);
14045 emit_insn (gen_ashlsi3 (newval, newval, shift));
14046
14047 /* Mask for insertion. */
14048 mask = gen_reg_rtx (SImode);
14049 emit_move_insn (mask, GEN_INT (imask));
14050 emit_insn (gen_ashlsi3 (mask, mask, shift));
14051
14052 /* Address of aligned word containing subword. */
14053 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14054 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14055 mem = change_address (mem, SImode, align);
14056 set_mem_align (mem, 32);
14057 MEM_VOLATILE_P (mem) = 1;
14058
14059 wdst = gen_reg_rtx (SImode);
14060 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14061 oldval, newval, mem));
14062
2725b75c
JJ
14063 /* Shift the result back. */
14064 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14065
9fc75b97
DE
14066 emit_move_insn (dst, gen_lowpart (mode, wdst));
14067}
14068
14069void
14070rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14071 rtx oldval, rtx newval, rtx mem,
14072 rtx scratch)
14073{
14074 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14075
14076 emit_insn (gen_memory_barrier ());
14077 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14078 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14079 emit_label (XEXP (label1, 0));
14080
14081 emit_load_locked (SImode, scratch, mem);
14082
14083 /* Mask subword within loaded value for comparison with oldval.
14084 Use UNSPEC_AND to avoid clobber.*/
14085 emit_insn (gen_rtx_SET (SImode, dest,
14086 gen_rtx_UNSPEC (SImode,
14087 gen_rtvec (2, scratch, mask),
14088 UNSPEC_AND)));
14089
14090 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14091 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14092
14093 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14094 emit_unlikely_jump (x, label2);
14095
14096 /* Clear subword within loaded value for insertion of new value. */
14097 emit_insn (gen_rtx_SET (SImode, scratch,
14098 gen_rtx_AND (SImode,
14099 gen_rtx_NOT (SImode, mask), scratch)));
14100 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14101 emit_store_conditional (SImode, cond, mem, scratch);
14102
14103 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14104 emit_unlikely_jump (x, label1);
14105
14106 emit_insn (gen_isync ());
14107 emit_label (XEXP (label2, 0));
14108}
14109
14110
b52110d4 14111 /* Emit instructions to move SRC to DST. Called by splitters for
a9baceb1
GK
14112 multi-register moves. It will emit at most one instruction for
14113 each register that is accessed; that is, it won't emit li/lis pairs
14114 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14115 register. */
46c07df8 14116
46c07df8 14117void
a9baceb1 14118rs6000_split_multireg_move (rtx dst, rtx src)
46c07df8 14119{
a9baceb1
GK
14120 /* The register number of the first register being moved. */
14121 int reg;
14122 /* The mode that is to be moved. */
14123 enum machine_mode mode;
14124 /* The mode that the move is being done in, and its size. */
14125 enum machine_mode reg_mode;
14126 int reg_mode_size;
14127 /* The number of registers that will be moved. */
14128 int nregs;
14129
14130 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14131 mode = GET_MODE (dst);
c8b622ff 14132 nregs = hard_regno_nregs[reg][mode];
a9baceb1 14133 if (FP_REGNO_P (reg))
7393f7f8 14134 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
a9baceb1
GK
14135 else if (ALTIVEC_REGNO_P (reg))
14136 reg_mode = V16QImode;
4f011e1e
JM
14137 else if (TARGET_E500_DOUBLE && mode == TFmode)
14138 reg_mode = DFmode;
a9baceb1
GK
14139 else
14140 reg_mode = word_mode;
14141 reg_mode_size = GET_MODE_SIZE (reg_mode);
f676971a 14142
37409796 14143 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
f676971a 14144
a9baceb1
GK
14145 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14146 {
14147 /* Move register range backwards, if we might have destructive
14148 overlap. */
14149 int i;
14150 for (i = nregs - 1; i >= 0; i--)
f676971a 14151 emit_insn (gen_rtx_SET (VOIDmode,
a9baceb1
GK
14152 simplify_gen_subreg (reg_mode, dst, mode,
14153 i * reg_mode_size),
14154 simplify_gen_subreg (reg_mode, src, mode,
14155 i * reg_mode_size)));
14156 }
46c07df8
HP
14157 else
14158 {
a9baceb1
GK
14159 int i;
14160 int j = -1;
14161 bool used_update = false;
46c07df8 14162
c1e55850 14163 if (MEM_P (src) && INT_REGNO_P (reg))
c4ad648e
AM
14164 {
14165 rtx breg;
3a1f863f 14166
a9baceb1
GK
14167 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14168 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
3a1f863f
DE
14169 {
14170 rtx delta_rtx;
a9baceb1 14171 breg = XEXP (XEXP (src, 0), 0);
c4ad648e
AM
14172 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14173 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14174 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
a9baceb1
GK
14175 emit_insn (TARGET_32BIT
14176 ? gen_addsi3 (breg, breg, delta_rtx)
14177 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14178 src = replace_equiv_address (src, breg);
3a1f863f 14179 }
d04b6e6e 14180 else if (! rs6000_offsettable_memref_p (src))
c1e55850 14181 {
13e2e16e 14182 rtx basereg;
c1e55850
GK
14183 basereg = gen_rtx_REG (Pmode, reg);
14184 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
13e2e16e 14185 src = replace_equiv_address (src, basereg);
c1e55850 14186 }
3a1f863f 14187
0423421f
AM
14188 breg = XEXP (src, 0);
14189 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14190 breg = XEXP (breg, 0);
14191
14192 /* If the base register we are using to address memory is
14193 also a destination reg, then change that register last. */
14194 if (REG_P (breg)
14195 && REGNO (breg) >= REGNO (dst)
3a1f863f
DE
14196 && REGNO (breg) < REGNO (dst) + nregs)
14197 j = REGNO (breg) - REGNO (dst);
c4ad648e 14198 }
46c07df8 14199
a9baceb1 14200 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
3a1f863f
DE
14201 {
14202 rtx breg;
14203
a9baceb1
GK
14204 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14205 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
3a1f863f
DE
14206 {
14207 rtx delta_rtx;
a9baceb1 14208 breg = XEXP (XEXP (dst, 0), 0);
c4ad648e
AM
14209 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14210 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14211 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
3a1f863f
DE
14212
14213 /* We have to update the breg before doing the store.
14214 Use store with update, if available. */
14215
14216 if (TARGET_UPDATE)
14217 {
a9baceb1 14218 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
c4ad648e
AM
14219 emit_insn (TARGET_32BIT
14220 ? (TARGET_POWERPC64
14221 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14222 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14223 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
a9baceb1 14224 used_update = true;
3a1f863f
DE
14225 }
14226 else
a9baceb1
GK
14227 emit_insn (TARGET_32BIT
14228 ? gen_addsi3 (breg, breg, delta_rtx)
14229 : gen_adddi3 (breg, breg, delta_rtx));
13e2e16e 14230 dst = replace_equiv_address (dst, breg);
3a1f863f 14231 }
37409796 14232 else
d04b6e6e 14233 gcc_assert (rs6000_offsettable_memref_p (dst));
3a1f863f
DE
14234 }
14235
46c07df8 14236 for (i = 0; i < nregs; i++)
f676971a 14237 {
3a1f863f
DE
14238 /* Calculate index to next subword. */
14239 ++j;
f676971a 14240 if (j == nregs)
3a1f863f 14241 j = 0;
46c07df8 14242
112cdef5 14243 /* If compiler already emitted move of first word by
a9baceb1 14244 store with update, no need to do anything. */
3a1f863f 14245 if (j == 0 && used_update)
a9baceb1 14246 continue;
f676971a 14247
a9baceb1
GK
14248 emit_insn (gen_rtx_SET (VOIDmode,
14249 simplify_gen_subreg (reg_mode, dst, mode,
14250 j * reg_mode_size),
14251 simplify_gen_subreg (reg_mode, src, mode,
14252 j * reg_mode_size)));
3a1f863f 14253 }
46c07df8
HP
14254 }
14255}
14256
12a4e8c5 14257\f
a4f6c312
SS
14258/* This page contains routines that are used to determine what the
14259 function prologue and epilogue code will do and write them out. */
9878760c 14260
a4f6c312
SS
14261/* Return the first fixed-point register that is required to be
14262 saved. 32 if none. */
9878760c
RK
14263
14264int
863d938c 14265first_reg_to_save (void)
9878760c
RK
14266{
14267 int first_reg;
14268
14269 /* Find lowest numbered live register. */
14270 for (first_reg = 13; first_reg <= 31; first_reg++)
6fb5fa3c 14271 if (df_regs_ever_live_p (first_reg)
a38d360d 14272 && (! call_used_regs[first_reg]
1db02437 14273 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14f00213 14274 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
b4db40bf
JJ
14275 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14276 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
9878760c
RK
14277 break;
14278
ee890fe2 14279#if TARGET_MACHO
93638d7a 14280 if (flag_pic
e3b5732b 14281 && crtl->uses_pic_offset_table
93638d7a 14282 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
1db02437 14283 return RS6000_PIC_OFFSET_TABLE_REGNUM;
ee890fe2
SS
14284#endif
14285
9878760c
RK
14286 return first_reg;
14287}
14288
14289/* Similar, for FP regs. */
14290
14291int
863d938c 14292first_fp_reg_to_save (void)
9878760c
RK
14293{
14294 int first_reg;
14295
14296 /* Find lowest numbered live register. */
14297 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6fb5fa3c 14298 if (df_regs_ever_live_p (first_reg))
9878760c
RK
14299 break;
14300
14301 return first_reg;
14302}
00b960c7
AH
14303
14304/* Similar, for AltiVec regs. */
14305
14306static int
863d938c 14307first_altivec_reg_to_save (void)
00b960c7
AH
14308{
14309 int i;
14310
14311 /* Stack frame remains as is unless we are in AltiVec ABI. */
14312 if (! TARGET_ALTIVEC_ABI)
14313 return LAST_ALTIVEC_REGNO + 1;
14314
22fa69da 14315 /* On Darwin, the unwind routines are compiled without
982afe02 14316 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14317 altivec registers when necessary. */
e3b5732b 14318 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14319 && ! TARGET_ALTIVEC)
14320 return FIRST_ALTIVEC_REGNO + 20;
14321
00b960c7
AH
14322 /* Find lowest numbered live register. */
14323 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14324 if (df_regs_ever_live_p (i))
00b960c7
AH
14325 break;
14326
14327 return i;
14328}
14329
14330/* Return a 32-bit mask of the AltiVec registers we need to set in
14331 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14332 the 32-bit word is 0. */
14333
14334static unsigned int
863d938c 14335compute_vrsave_mask (void)
00b960c7
AH
14336{
14337 unsigned int i, mask = 0;
14338
22fa69da 14339 /* On Darwin, the unwind routines are compiled without
982afe02 14340 TARGET_ALTIVEC, and use save_world to save/restore the
22fa69da 14341 call-saved altivec registers when necessary. */
e3b5732b 14342 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
22fa69da
GK
14343 && ! TARGET_ALTIVEC)
14344 mask |= 0xFFF;
14345
00b960c7
AH
14346 /* First, find out if we use _any_ altivec registers. */
14347 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6fb5fa3c 14348 if (df_regs_ever_live_p (i))
00b960c7
AH
14349 mask |= ALTIVEC_REG_BIT (i);
14350
14351 if (mask == 0)
14352 return mask;
14353
00b960c7
AH
14354 /* Next, remove the argument registers from the set. These must
14355 be in the VRSAVE mask set by the caller, so we don't need to add
14356 them in again. More importantly, the mask we compute here is
14357 used to generate CLOBBERs in the set_vrsave insn, and we do not
14358 wish the argument registers to die. */
38173d38 14359 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
00b960c7
AH
14360 mask &= ~ALTIVEC_REG_BIT (i);
14361
14362 /* Similarly, remove the return value from the set. */
14363 {
14364 bool yes = false;
14365 diddle_return_value (is_altivec_return_reg, &yes);
14366 if (yes)
14367 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14368 }
14369
14370 return mask;
14371}
14372
d62294f5 14373/* For a very restricted set of circumstances, we can cut down the
f57fe068
AM
14374 size of prologues/epilogues by calling our own save/restore-the-world
14375 routines. */
d62294f5
FJ
14376
14377static void
f57fe068
AM
14378compute_save_world_info (rs6000_stack_t *info_ptr)
14379{
14380 info_ptr->world_save_p = 1;
14381 info_ptr->world_save_p
14382 = (WORLD_SAVE_P (info_ptr)
14383 && DEFAULT_ABI == ABI_DARWIN
e3b5732b 14384 && ! (cfun->calls_setjmp && flag_exceptions)
f57fe068
AM
14385 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14386 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14387 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14388 && info_ptr->cr_save_p);
f676971a 14389
d62294f5
FJ
14390 /* This will not work in conjunction with sibcalls. Make sure there
14391 are none. (This check is expensive, but seldom executed.) */
f57fe068 14392 if (WORLD_SAVE_P (info_ptr))
f676971a 14393 {
d62294f5
FJ
14394 rtx insn;
14395 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
c4ad648e
AM
14396 if ( GET_CODE (insn) == CALL_INSN
14397 && SIBLING_CALL_P (insn))
14398 {
14399 info_ptr->world_save_p = 0;
14400 break;
14401 }
d62294f5 14402 }
f676971a 14403
f57fe068 14404 if (WORLD_SAVE_P (info_ptr))
d62294f5
FJ
14405 {
14406 /* Even if we're not touching VRsave, make sure there's room on the
14407 stack for it, if it looks like we're calling SAVE_WORLD, which
c4ad648e 14408 will attempt to save it. */
d62294f5
FJ
14409 info_ptr->vrsave_size = 4;
14410
298ac1dd
AP
14411 /* If we are going to save the world, we need to save the link register too. */
14412 info_ptr->lr_save_p = 1;
14413
d62294f5
FJ
14414 /* "Save" the VRsave register too if we're saving the world. */
14415 if (info_ptr->vrsave_mask == 0)
c4ad648e 14416 info_ptr->vrsave_mask = compute_vrsave_mask ();
d62294f5
FJ
14417
14418 /* Because the Darwin register save/restore routines only handle
c4ad648e 14419 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
992d08b1 14420 check. */
37409796
NS
14421 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14422 && (info_ptr->first_altivec_reg_save
14423 >= FIRST_SAVED_ALTIVEC_REGNO));
d62294f5 14424 }
f676971a 14425 return;
d62294f5
FJ
14426}
14427
14428
00b960c7 14429static void
a2369ed3 14430is_altivec_return_reg (rtx reg, void *xyes)
00b960c7
AH
14431{
14432 bool *yes = (bool *) xyes;
14433 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14434 *yes = true;
14435}
14436
4697a36c
MM
14437\f
14438/* Calculate the stack information for the current function. This is
14439 complicated by having two separate calling sequences, the AIX calling
14440 sequence and the V.4 calling sequence.
14441
592696dd 14442 AIX (and Darwin/Mac OS X) stack frames look like:
a260abc9 14443 32-bit 64-bit
4697a36c 14444 SP----> +---------------------------------------+
a260abc9 14445 | back chain to caller | 0 0
4697a36c 14446 +---------------------------------------+
a260abc9 14447 | saved CR | 4 8 (8-11)
4697a36c 14448 +---------------------------------------+
a260abc9 14449 | saved LR | 8 16
4697a36c 14450 +---------------------------------------+
a260abc9 14451 | reserved for compilers | 12 24
4697a36c 14452 +---------------------------------------+
a260abc9 14453 | reserved for binders | 16 32
4697a36c 14454 +---------------------------------------+
a260abc9 14455 | saved TOC pointer | 20 40
4697a36c 14456 +---------------------------------------+
a260abc9 14457 | Parameter save area (P) | 24 48
4697a36c 14458 +---------------------------------------+
a260abc9 14459 | Alloca space (A) | 24+P etc.
802a0058 14460 +---------------------------------------+
a7df97e6 14461 | Local variable space (L) | 24+P+A
4697a36c 14462 +---------------------------------------+
a7df97e6 14463 | Float/int conversion temporary (X) | 24+P+A+L
4697a36c 14464 +---------------------------------------+
00b960c7
AH
14465 | Save area for AltiVec registers (W) | 24+P+A+L+X
14466 +---------------------------------------+
14467 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14468 +---------------------------------------+
14469 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
4697a36c 14470 +---------------------------------------+
00b960c7
AH
14471 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14472 +---------------------------------------+
14473 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
4697a36c
MM
14474 +---------------------------------------+
14475 old SP->| back chain to caller's caller |
14476 +---------------------------------------+
14477
5376a30c
KR
14478 The required alignment for AIX configurations is two words (i.e., 8
14479 or 16 bytes).
14480
14481
4697a36c
MM
14482 V.4 stack frames look like:
14483
14484 SP----> +---------------------------------------+
14485 | back chain to caller | 0
14486 +---------------------------------------+
5eb387b8 14487 | caller's saved LR | 4
4697a36c
MM
14488 +---------------------------------------+
14489 | Parameter save area (P) | 8
14490 +---------------------------------------+
a7df97e6 14491 | Alloca space (A) | 8+P
f676971a 14492 +---------------------------------------+
a7df97e6 14493 | Varargs save area (V) | 8+P+A
f676971a 14494 +---------------------------------------+
a7df97e6 14495 | Local variable space (L) | 8+P+A+V
f676971a 14496 +---------------------------------------+
a7df97e6 14497 | Float/int conversion temporary (X) | 8+P+A+V+L
4697a36c 14498 +---------------------------------------+
00b960c7
AH
14499 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14500 +---------------------------------------+
14501 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14502 +---------------------------------------+
14503 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14504 +---------------------------------------+
c4ad648e
AM
14505 | SPE: area for 64-bit GP registers |
14506 +---------------------------------------+
14507 | SPE alignment padding |
14508 +---------------------------------------+
00b960c7 14509 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
f676971a 14510 +---------------------------------------+
00b960c7 14511 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
f676971a 14512 +---------------------------------------+
00b960c7 14513 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
4697a36c
MM
14514 +---------------------------------------+
14515 old SP->| back chain to caller's caller |
14516 +---------------------------------------+
b6c9286a 14517
5376a30c
KR
14518 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14519 given. (But note below and in sysv4.h that we require only 8 and
14520 may round up the size of our stack frame anyways. The historical
14521 reason is early versions of powerpc-linux which didn't properly
14522 align the stack at program startup. A happy side-effect is that
14523 -mno-eabi libraries can be used with -meabi programs.)
14524
50d440bc 14525 The EABI configuration defaults to the V.4 layout. However,
5376a30c
KR
14526 the stack alignment requirements may differ. If -mno-eabi is not
14527 given, the required stack alignment is 8 bytes; if -mno-eabi is
14528 given, the required alignment is 16 bytes. (But see V.4 comment
14529 above.) */
4697a36c 14530
61b2fbe7
MM
14531#ifndef ABI_STACK_BOUNDARY
14532#define ABI_STACK_BOUNDARY STACK_BOUNDARY
14533#endif
14534
d1d0c603 14535static rs6000_stack_t *
863d938c 14536rs6000_stack_info (void)
4697a36c 14537{
022123e6 14538 static rs6000_stack_t info;
4697a36c 14539 rs6000_stack_t *info_ptr = &info;
327e5343 14540 int reg_size = TARGET_32BIT ? 4 : 8;
83720594 14541 int ehrd_size;
64045029 14542 int save_align;
8070c91a 14543 int first_gp;
44688022 14544 HOST_WIDE_INT non_fixed_size;
4697a36c 14545
022123e6 14546 memset (&info, 0, sizeof (info));
4697a36c 14547
c19de7aa
AH
14548 if (TARGET_SPE)
14549 {
14550 /* Cache value so we don't rescan instruction chain over and over. */
9b7b447f 14551 if (cfun->machine->insn_chain_scanned_p == 0)
b5a5beb9
AH
14552 cfun->machine->insn_chain_scanned_p
14553 = spe_func_has_64bit_regs_p () + 1;
14554 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
c19de7aa
AH
14555 }
14556
a4f6c312 14557 /* Select which calling sequence. */
178274da 14558 info_ptr->abi = DEFAULT_ABI;
9878760c 14559
a4f6c312 14560 /* Calculate which registers need to be saved & save area size. */
4697a36c 14561 info_ptr->first_gp_reg_save = first_reg_to_save ();
f676971a 14562 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8070c91a
DJ
14563 even if it currently looks like we won't. Reload may need it to
14564 get at a constant; if so, it will have already created a constant
14565 pool entry for it. */
2bfcf297 14566 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
178274da
AM
14567 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14568 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
e3b5732b 14569 && crtl->uses_const_pool
1db02437 14570 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8070c91a 14571 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
906fb125 14572 else
8070c91a
DJ
14573 first_gp = info_ptr->first_gp_reg_save;
14574
14575 info_ptr->gp_size = reg_size * (32 - first_gp);
4697a36c 14576
a3170dc6
AH
14577 /* For the SPE, we have an additional upper 32-bits on each GPR.
14578 Ideally we should save the entire 64-bits only when the upper
14579 half is used in SIMD instructions. Since we only record
14580 registers live (not the size they are used in), this proves
14581 difficult because we'd have to traverse the instruction chain at
14582 the right time, taking reload into account. This is a real pain,
c19de7aa
AH
14583 so we opt to save the GPRs in 64-bits always if but one register
14584 gets used in 64-bits. Otherwise, all the registers in the frame
14585 get saved in 32-bits.
a3170dc6 14586
c19de7aa 14587 So... since when we save all GPRs (except the SP) in 64-bits, the
a3170dc6 14588 traditional GP save area will be empty. */
c19de7aa 14589 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
a3170dc6
AH
14590 info_ptr->gp_size = 0;
14591
4697a36c
MM
14592 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14593 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14594
00b960c7
AH
14595 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14596 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14597 - info_ptr->first_altivec_reg_save);
14598
592696dd 14599 /* Does this function call anything? */
71f123ca
FS
14600 info_ptr->calls_p = (! current_function_is_leaf
14601 || cfun->machine->ra_needs_full_frame);
b6c9286a 14602
a4f6c312 14603 /* Determine if we need to save the link register. */
022123e6 14604 if ((DEFAULT_ABI == ABI_AIX
e3b5732b 14605 && crtl->profile
022123e6 14606 && !TARGET_PROFILE_KERNEL)
4697a36c
MM
14607#ifdef TARGET_RELOCATABLE
14608 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14609#endif
14610 || (info_ptr->first_fp_reg_save != 64
14611 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
e3b5732b 14612 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
022123e6
AM
14613 || info_ptr->calls_p
14614 || rs6000_ra_ever_killed ())
4697a36c
MM
14615 {
14616 info_ptr->lr_save_p = 1;
1de43f85 14617 df_set_regs_ever_live (LR_REGNO, true);
4697a36c
MM
14618 }
14619
9ebbca7d 14620 /* Determine if we need to save the condition code registers. */
6fb5fa3c
DB
14621 if (df_regs_ever_live_p (CR2_REGNO)
14622 || df_regs_ever_live_p (CR3_REGNO)
14623 || df_regs_ever_live_p (CR4_REGNO))
4697a36c
MM
14624 {
14625 info_ptr->cr_save_p = 1;
178274da 14626 if (DEFAULT_ABI == ABI_V4)
4697a36c
MM
14627 info_ptr->cr_size = reg_size;
14628 }
14629
83720594
RH
14630 /* If the current function calls __builtin_eh_return, then we need
14631 to allocate stack space for registers that will hold data for
14632 the exception handler. */
e3b5732b 14633 if (crtl->calls_eh_return)
83720594
RH
14634 {
14635 unsigned int i;
14636 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14637 continue;
a3170dc6
AH
14638
14639 /* SPE saves EH registers in 64-bits. */
c19de7aa
AH
14640 ehrd_size = i * (TARGET_SPE_ABI
14641 && info_ptr->spe_64bit_regs_used != 0
14642 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
83720594
RH
14643 }
14644 else
14645 ehrd_size = 0;
14646
592696dd 14647 /* Determine various sizes. */
4697a36c
MM
14648 info_ptr->reg_size = reg_size;
14649 info_ptr->fixed_size = RS6000_SAVE_AREA;
189e03e3 14650 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
38173d38 14651 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
03e007d7 14652 TARGET_ALTIVEC ? 16 : 8);
7d5175e1
JJ
14653 if (FRAME_GROWS_DOWNWARD)
14654 info_ptr->vars_size
5b667039
JJ
14655 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14656 + info_ptr->parm_size,
7d5175e1 14657 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
5b667039
JJ
14658 - (info_ptr->fixed_size + info_ptr->vars_size
14659 + info_ptr->parm_size);
00b960c7 14660
c19de7aa 14661 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
8070c91a 14662 info_ptr->spe_gp_size = 8 * (32 - first_gp);
a3170dc6
AH
14663 else
14664 info_ptr->spe_gp_size = 0;
14665
4d774ff8
HP
14666 if (TARGET_ALTIVEC_ABI)
14667 info_ptr->vrsave_mask = compute_vrsave_mask ();
00b960c7 14668 else
4d774ff8
HP
14669 info_ptr->vrsave_mask = 0;
14670
14671 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14672 info_ptr->vrsave_size = 4;
14673 else
14674 info_ptr->vrsave_size = 0;
b6c9286a 14675
d62294f5
FJ
14676 compute_save_world_info (info_ptr);
14677
592696dd 14678 /* Calculate the offsets. */
178274da 14679 switch (DEFAULT_ABI)
4697a36c 14680 {
b6c9286a 14681 case ABI_NONE:
24d304eb 14682 default:
37409796 14683 gcc_unreachable ();
b6c9286a
MM
14684
14685 case ABI_AIX:
ee890fe2 14686 case ABI_DARWIN:
b6c9286a
MM
14687 info_ptr->fp_save_offset = - info_ptr->fp_size;
14688 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
00b960c7
AH
14689
14690 if (TARGET_ALTIVEC_ABI)
14691 {
14692 info_ptr->vrsave_save_offset
14693 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14694
982afe02 14695 /* Align stack so vector save area is on a quadword boundary.
9278121c 14696 The padding goes above the vectors. */
00b960c7
AH
14697 if (info_ptr->altivec_size != 0)
14698 info_ptr->altivec_padding_size
9278121c 14699 = info_ptr->vrsave_save_offset & 0xF;
00b960c7
AH
14700 else
14701 info_ptr->altivec_padding_size = 0;
14702
14703 info_ptr->altivec_save_offset
14704 = info_ptr->vrsave_save_offset
14705 - info_ptr->altivec_padding_size
14706 - info_ptr->altivec_size;
9278121c
GK
14707 gcc_assert (info_ptr->altivec_size == 0
14708 || info_ptr->altivec_save_offset % 16 == 0);
00b960c7
AH
14709
14710 /* Adjust for AltiVec case. */
14711 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14712 }
14713 else
14714 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
a260abc9
DE
14715 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14716 info_ptr->lr_save_offset = 2*reg_size;
24d304eb
RK
14717 break;
14718
14719 case ABI_V4:
b6c9286a
MM
14720 info_ptr->fp_save_offset = - info_ptr->fp_size;
14721 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
a7df97e6 14722 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
00b960c7 14723
c19de7aa 14724 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
c4ad648e
AM
14725 {
14726 /* Align stack so SPE GPR save area is aligned on a
14727 double-word boundary. */
f78c3290 14728 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
c4ad648e
AM
14729 info_ptr->spe_padding_size
14730 = 8 - (-info_ptr->cr_save_offset % 8);
14731 else
14732 info_ptr->spe_padding_size = 0;
14733
14734 info_ptr->spe_gp_save_offset
14735 = info_ptr->cr_save_offset
14736 - info_ptr->spe_padding_size
14737 - info_ptr->spe_gp_size;
14738
14739 /* Adjust for SPE case. */
022123e6 14740 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
c4ad648e 14741 }
a3170dc6 14742 else if (TARGET_ALTIVEC_ABI)
00b960c7
AH
14743 {
14744 info_ptr->vrsave_save_offset
14745 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14746
14747 /* Align stack so vector save area is on a quadword boundary. */
14748 if (info_ptr->altivec_size != 0)
14749 info_ptr->altivec_padding_size
14750 = 16 - (-info_ptr->vrsave_save_offset % 16);
14751 else
14752 info_ptr->altivec_padding_size = 0;
14753
14754 info_ptr->altivec_save_offset
14755 = info_ptr->vrsave_save_offset
14756 - info_ptr->altivec_padding_size
14757 - info_ptr->altivec_size;
14758
14759 /* Adjust for AltiVec case. */
022123e6 14760 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
00b960c7
AH
14761 }
14762 else
022123e6
AM
14763 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14764 info_ptr->ehrd_offset -= ehrd_size;
b6c9286a
MM
14765 info_ptr->lr_save_offset = reg_size;
14766 break;
4697a36c
MM
14767 }
14768
64045029 14769 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
00b960c7
AH
14770 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14771 + info_ptr->gp_size
14772 + info_ptr->altivec_size
14773 + info_ptr->altivec_padding_size
a3170dc6
AH
14774 + info_ptr->spe_gp_size
14775 + info_ptr->spe_padding_size
00b960c7
AH
14776 + ehrd_size
14777 + info_ptr->cr_size
022123e6 14778 + info_ptr->vrsave_size,
64045029 14779 save_align);
00b960c7 14780
44688022 14781 non_fixed_size = (info_ptr->vars_size
ff381587 14782 + info_ptr->parm_size
5b667039 14783 + info_ptr->save_size);
ff381587 14784
44688022
AM
14785 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14786 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
ff381587
MM
14787
14788 /* Determine if we need to allocate any stack frame:
14789
a4f6c312
SS
14790 For AIX we need to push the stack if a frame pointer is needed
14791 (because the stack might be dynamically adjusted), if we are
14792 debugging, if we make calls, or if the sum of fp_save, gp_save,
14793 and local variables are more than the space needed to save all
14794 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14795 + 18*8 = 288 (GPR13 reserved).
ff381587 14796
a4f6c312
SS
14797 For V.4 we don't have the stack cushion that AIX uses, but assume
14798 that the debugger can handle stackless frames. */
ff381587
MM
14799
14800 if (info_ptr->calls_p)
14801 info_ptr->push_p = 1;
14802
178274da 14803 else if (DEFAULT_ABI == ABI_V4)
44688022 14804 info_ptr->push_p = non_fixed_size != 0;
ff381587 14805
178274da
AM
14806 else if (frame_pointer_needed)
14807 info_ptr->push_p = 1;
14808
14809 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14810 info_ptr->push_p = 1;
14811
ff381587 14812 else
44688022 14813 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
ff381587 14814
a4f6c312 14815 /* Zero offsets if we're not saving those registers. */
8dda1a21 14816 if (info_ptr->fp_size == 0)
4697a36c
MM
14817 info_ptr->fp_save_offset = 0;
14818
8dda1a21 14819 if (info_ptr->gp_size == 0)
4697a36c
MM
14820 info_ptr->gp_save_offset = 0;
14821
00b960c7
AH
14822 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14823 info_ptr->altivec_save_offset = 0;
14824
14825 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14826 info_ptr->vrsave_save_offset = 0;
14827
c19de7aa
AH
14828 if (! TARGET_SPE_ABI
14829 || info_ptr->spe_64bit_regs_used == 0
14830 || info_ptr->spe_gp_size == 0)
a3170dc6
AH
14831 info_ptr->spe_gp_save_offset = 0;
14832
c81fc13e 14833 if (! info_ptr->lr_save_p)
4697a36c
MM
14834 info_ptr->lr_save_offset = 0;
14835
c81fc13e 14836 if (! info_ptr->cr_save_p)
4697a36c
MM
14837 info_ptr->cr_save_offset = 0;
14838
14839 return info_ptr;
14840}
14841
c19de7aa
AH
14842/* Return true if the current function uses any GPRs in 64-bit SIMD
14843 mode. */
14844
14845static bool
863d938c 14846spe_func_has_64bit_regs_p (void)
c19de7aa
AH
14847{
14848 rtx insns, insn;
14849
14850 /* Functions that save and restore all the call-saved registers will
14851 need to save/restore the registers in 64-bits. */
e3b5732b
JH
14852 if (crtl->calls_eh_return
14853 || cfun->calls_setjmp
14854 || crtl->has_nonlocal_goto)
c19de7aa
AH
14855 return true;
14856
14857 insns = get_insns ();
14858
14859 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
14860 {
14861 if (INSN_P (insn))
14862 {
14863 rtx i;
14864
b5a5beb9
AH
14865 /* FIXME: This should be implemented with attributes...
14866
14867 (set_attr "spe64" "true")....then,
14868 if (get_spe64(insn)) return true;
14869
14870 It's the only reliable way to do the stuff below. */
14871
c19de7aa 14872 i = PATTERN (insn);
f82f556d
AH
14873 if (GET_CODE (i) == SET)
14874 {
14875 enum machine_mode mode = GET_MODE (SET_SRC (i));
14876
14877 if (SPE_VECTOR_MODE (mode))
14878 return true;
4f011e1e 14879 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
f82f556d
AH
14880 return true;
14881 }
c19de7aa
AH
14882 }
14883 }
14884
14885 return false;
14886}
14887
d1d0c603 14888static void
a2369ed3 14889debug_stack_info (rs6000_stack_t *info)
9878760c 14890{
d330fd93 14891 const char *abi_string;
24d304eb 14892
c81fc13e 14893 if (! info)
4697a36c
MM
14894 info = rs6000_stack_info ();
14895
14896 fprintf (stderr, "\nStack information for function %s:\n",
14897 ((current_function_decl && DECL_NAME (current_function_decl))
14898 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
14899 : "<unknown>"));
14900
24d304eb
RK
14901 switch (info->abi)
14902 {
b6c9286a
MM
14903 default: abi_string = "Unknown"; break;
14904 case ABI_NONE: abi_string = "NONE"; break;
50d440bc 14905 case ABI_AIX: abi_string = "AIX"; break;
ee890fe2 14906 case ABI_DARWIN: abi_string = "Darwin"; break;
b6c9286a 14907 case ABI_V4: abi_string = "V.4"; break;
24d304eb
RK
14908 }
14909
14910 fprintf (stderr, "\tABI = %5s\n", abi_string);
14911
00b960c7
AH
14912 if (TARGET_ALTIVEC_ABI)
14913 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
14914
a3170dc6
AH
14915 if (TARGET_SPE_ABI)
14916 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
14917
4697a36c
MM
14918 if (info->first_gp_reg_save != 32)
14919 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
14920
14921 if (info->first_fp_reg_save != 64)
14922 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9878760c 14923
00b960c7
AH
14924 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
14925 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
14926 info->first_altivec_reg_save);
14927
4697a36c
MM
14928 if (info->lr_save_p)
14929 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9878760c 14930
4697a36c
MM
14931 if (info->cr_save_p)
14932 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
14933
00b960c7
AH
14934 if (info->vrsave_mask)
14935 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
14936
4697a36c
MM
14937 if (info->push_p)
14938 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
14939
14940 if (info->calls_p)
14941 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
14942
4697a36c
MM
14943 if (info->gp_save_offset)
14944 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
14945
14946 if (info->fp_save_offset)
14947 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
14948
00b960c7
AH
14949 if (info->altivec_save_offset)
14950 fprintf (stderr, "\taltivec_save_offset = %5d\n",
14951 info->altivec_save_offset);
14952
a3170dc6
AH
14953 if (info->spe_gp_save_offset)
14954 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
14955 info->spe_gp_save_offset);
14956
00b960c7
AH
14957 if (info->vrsave_save_offset)
14958 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
14959 info->vrsave_save_offset);
14960
4697a36c
MM
14961 if (info->lr_save_offset)
14962 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
14963
14964 if (info->cr_save_offset)
14965 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
14966
14967 if (info->varargs_save_offset)
14968 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
14969
14970 if (info->total_size)
d1d0c603
JJ
14971 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14972 info->total_size);
4697a36c 14973
4697a36c 14974 if (info->vars_size)
d1d0c603
JJ
14975 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
14976 info->vars_size);
4697a36c
MM
14977
14978 if (info->parm_size)
14979 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
14980
14981 if (info->fixed_size)
14982 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
14983
14984 if (info->gp_size)
14985 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
14986
a3170dc6
AH
14987 if (info->spe_gp_size)
14988 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
14989
4697a36c
MM
14990 if (info->fp_size)
14991 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
14992
00b960c7
AH
14993 if (info->altivec_size)
14994 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
14995
14996 if (info->vrsave_size)
14997 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
14998
14999 if (info->altivec_padding_size)
15000 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15001 info->altivec_padding_size);
15002
a3170dc6
AH
15003 if (info->spe_padding_size)
15004 fprintf (stderr, "\tspe_padding_size = %5d\n",
15005 info->spe_padding_size);
15006
4697a36c
MM
15007 if (info->cr_size)
15008 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15009
15010 if (info->save_size)
15011 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15012
15013 if (info->reg_size != 4)
15014 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15015
15016 fprintf (stderr, "\n");
9878760c 15017}
71f123ca
FS
15018
15019rtx
a2369ed3 15020rs6000_return_addr (int count, rtx frame)
71f123ca 15021{
a4f6c312
SS
15022 /* Currently we don't optimize very well between prolog and body
15023 code and for PIC code the code can be actually quite bad, so
15024 don't try to be too clever here. */
f1384257 15025 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
71f123ca
FS
15026 {
15027 cfun->machine->ra_needs_full_frame = 1;
8ac61af7
RK
15028
15029 return
15030 gen_rtx_MEM
15031 (Pmode,
15032 memory_address
15033 (Pmode,
15034 plus_constant (copy_to_reg
15035 (gen_rtx_MEM (Pmode,
15036 memory_address (Pmode, frame))),
15037 RETURN_ADDRESS_OFFSET)));
71f123ca
FS
15038 }
15039
8c29550d 15040 cfun->machine->ra_need_lr = 1;
1de43f85 15041 return get_hard_reg_initial_val (Pmode, LR_REGNO);
71f123ca
FS
15042}
15043
5e1bf043
DJ
15044/* Say whether a function is a candidate for sibcall handling or not.
15045 We do not allow indirect calls to be optimized into sibling calls.
15046 Also, we can't do it if there are any vector parameters; there's
15047 nowhere to put the VRsave code so it works; note that functions with
15048 vector parameters are required to have a prototype, so the argument
15049 type info must be available here. (The tail recursion case can work
15050 with vector parameters, but there's no way to distinguish here.) */
4977bab6 15051static bool
a2369ed3 15052rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
5e1bf043
DJ
15053{
15054 tree type;
4977bab6 15055 if (decl)
5e1bf043
DJ
15056 {
15057 if (TARGET_ALTIVEC_VRSAVE)
c4ad648e 15058 {
4977bab6 15059 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
5e1bf043
DJ
15060 type; type = TREE_CHAIN (type))
15061 {
c15b529f 15062 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
4977bab6 15063 return false;
5e1bf043 15064 }
c4ad648e 15065 }
5e1bf043 15066 if (DEFAULT_ABI == ABI_DARWIN
8aa19d95
JJ
15067 || ((*targetm.binds_local_p) (decl)
15068 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
2bcc50d0 15069 {
4977bab6 15070 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
2bcc50d0
AM
15071
15072 if (!lookup_attribute ("longcall", attr_list)
15073 || lookup_attribute ("shortcall", attr_list))
4977bab6 15074 return true;
2bcc50d0 15075 }
5e1bf043 15076 }
4977bab6 15077 return false;
5e1bf043
DJ
15078}
15079
e7e64a25
AS
15080/* NULL if INSN insn is valid within a low-overhead loop.
15081 Otherwise return why doloop cannot be applied.
9419649c
DE
15082 PowerPC uses the COUNT register for branch on table instructions. */
15083
e7e64a25 15084static const char *
3101faab 15085rs6000_invalid_within_doloop (const_rtx insn)
9419649c
DE
15086{
15087 if (CALL_P (insn))
e7e64a25 15088 return "Function call in the loop.";
9419649c
DE
15089
15090 if (JUMP_P (insn)
15091 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15092 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
e7e64a25 15093 return "Computed branch in the loop.";
9419649c 15094
e7e64a25 15095 return NULL;
9419649c
DE
15096}
15097
71f123ca 15098static int
863d938c 15099rs6000_ra_ever_killed (void)
71f123ca
FS
15100{
15101 rtx top;
5e1bf043
DJ
15102 rtx reg;
15103 rtx insn;
71f123ca 15104
e3b5732b 15105 if (crtl->is_thunk)
71f123ca 15106 return 0;
eb0424da 15107
36f7e964
AH
15108 /* regs_ever_live has LR marked as used if any sibcalls are present,
15109 but this should not force saving and restoring in the
15110 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
a3c9585f 15111 clobbers LR, so that is inappropriate. */
36f7e964 15112
5e1bf043
DJ
15113 /* Also, the prologue can generate a store into LR that
15114 doesn't really count, like this:
36f7e964 15115
5e1bf043
DJ
15116 move LR->R0
15117 bcl to set PIC register
15118 move LR->R31
15119 move R0->LR
36f7e964
AH
15120
15121 When we're called from the epilogue, we need to avoid counting
15122 this as a store. */
f676971a 15123
71f123ca
FS
15124 push_topmost_sequence ();
15125 top = get_insns ();
15126 pop_topmost_sequence ();
1de43f85 15127 reg = gen_rtx_REG (Pmode, LR_REGNO);
71f123ca 15128
5e1bf043
DJ
15129 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15130 {
15131 if (INSN_P (insn))
15132 {
022123e6
AM
15133 if (CALL_P (insn))
15134 {
15135 if (!SIBLING_CALL_P (insn))
15136 return 1;
15137 }
1de43f85 15138 else if (find_regno_note (insn, REG_INC, LR_REGNO))
5e1bf043 15139 return 1;
36f7e964
AH
15140 else if (set_of (reg, insn) != NULL_RTX
15141 && !prologue_epilogue_contains (insn))
5e1bf043
DJ
15142 return 1;
15143 }
15144 }
15145 return 0;
71f123ca 15146}
4697a36c 15147\f
9ebbca7d 15148/* Emit instructions needed to load the TOC register.
c7ca610e 15149 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9ebbca7d 15150 a constant pool; or for SVR4 -fpic. */
c7ca610e
RK
15151
15152void
a2369ed3 15153rs6000_emit_load_toc_table (int fromprolog)
c7ca610e 15154{
6fb5fa3c 15155 rtx dest;
1db02437 15156 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
c7ca610e 15157
7f970b70 15158 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
20b71b17 15159 {
7f970b70 15160 char buf[30];
e65a3857 15161 rtx lab, tmp1, tmp2, got;
7f970b70
AM
15162
15163 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15164 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15165 if (flag_pic == 2)
15166 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15167 else
15168 got = rs6000_got_sym ();
15169 tmp1 = tmp2 = dest;
15170 if (!fromprolog)
15171 {
15172 tmp1 = gen_reg_rtx (Pmode);
15173 tmp2 = gen_reg_rtx (Pmode);
15174 }
6fb5fa3c
DB
15175 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15176 emit_move_insn (tmp1,
1de43f85 15177 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c
DB
15178 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15179 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
7f970b70
AM
15180 }
15181 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15182 {
6fb5fa3c 15183 emit_insn (gen_load_toc_v4_pic_si ());
1de43f85 15184 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
20b71b17
AM
15185 }
15186 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15187 {
15188 char buf[30];
20b71b17
AM
15189 rtx temp0 = (fromprolog
15190 ? gen_rtx_REG (Pmode, 0)
15191 : gen_reg_rtx (Pmode));
20b71b17 15192
20b71b17
AM
15193 if (fromprolog)
15194 {
ccbca5e4 15195 rtx symF, symL;
38c1f2d7 15196
20b71b17
AM
15197 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15198 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9ebbca7d 15199
20b71b17
AM
15200 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15201 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15202
6fb5fa3c
DB
15203 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15204 emit_move_insn (dest,
1de43f85 15205 gen_rtx_REG (Pmode, LR_REGNO));
6fb5fa3c 15206 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
9ebbca7d
GK
15207 }
15208 else
20b71b17
AM
15209 {
15210 rtx tocsym;
20b71b17
AM
15211
15212 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
e65a3857
DE
15213 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15214 emit_move_insn (dest,
1de43f85 15215 gen_rtx_REG (Pmode, LR_REGNO));
027fbf43 15216 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
20b71b17 15217 }
6fb5fa3c 15218 emit_insn (gen_addsi3 (dest, temp0, dest));
9ebbca7d 15219 }
20b71b17
AM
15220 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15221 {
15222 /* This is for AIX code running in non-PIC ELF32. */
15223 char buf[30];
15224 rtx realsym;
15225 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15226 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15227
6fb5fa3c
DB
15228 emit_insn (gen_elf_high (dest, realsym));
15229 emit_insn (gen_elf_low (dest, dest, realsym));
20b71b17 15230 }
37409796 15231 else
9ebbca7d 15232 {
37409796 15233 gcc_assert (DEFAULT_ABI == ABI_AIX);
bb8df8a6 15234
9ebbca7d 15235 if (TARGET_32BIT)
6fb5fa3c 15236 emit_insn (gen_load_toc_aix_si (dest));
9ebbca7d 15237 else
6fb5fa3c 15238 emit_insn (gen_load_toc_aix_di (dest));
9ebbca7d
GK
15239 }
15240}
15241
d1d0c603
JJ
15242/* Emit instructions to restore the link register after determining where
15243 its value has been stored. */
15244
15245void
15246rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15247{
15248 rs6000_stack_t *info = rs6000_stack_info ();
15249 rtx operands[2];
15250
15251 operands[0] = source;
15252 operands[1] = scratch;
15253
15254 if (info->lr_save_p)
15255 {
15256 rtx frame_rtx = stack_pointer_rtx;
15257 HOST_WIDE_INT sp_offset = 0;
15258 rtx tmp;
15259
15260 if (frame_pointer_needed
e3b5732b 15261 || cfun->calls_alloca
d1d0c603
JJ
15262 || info->total_size > 32767)
15263 {
0be76840 15264 tmp = gen_frame_mem (Pmode, frame_rtx);
8308679f 15265 emit_move_insn (operands[1], tmp);
d1d0c603
JJ
15266 frame_rtx = operands[1];
15267 }
15268 else if (info->push_p)
15269 sp_offset = info->total_size;
15270
15271 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
0be76840 15272 tmp = gen_frame_mem (Pmode, tmp);
d1d0c603
JJ
15273 emit_move_insn (tmp, operands[0]);
15274 }
15275 else
1de43f85 15276 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
d1d0c603
JJ
15277}
15278
4862826d 15279static GTY(()) alias_set_type set = -1;
f103e34d 15280
4862826d 15281alias_set_type
863d938c 15282get_TOC_alias_set (void)
9ebbca7d 15283{
f103e34d
GK
15284 if (set == -1)
15285 set = new_alias_set ();
15286 return set;
f676971a 15287}
9ebbca7d 15288
c1207243 15289/* This returns nonzero if the current function uses the TOC. This is
3c9eb5f4
AM
15290 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15291 is generated by the ABI_V4 load_toc_* patterns. */
c954844a 15292#if TARGET_ELF
3c9eb5f4 15293static int
f676971a 15294uses_TOC (void)
9ebbca7d 15295{
c4501e62 15296 rtx insn;
38c1f2d7 15297
c4501e62
JJ
15298 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15299 if (INSN_P (insn))
15300 {
15301 rtx pat = PATTERN (insn);
15302 int i;
9ebbca7d 15303
f676971a 15304 if (GET_CODE (pat) == PARALLEL)
c4501e62
JJ
15305 for (i = 0; i < XVECLEN (pat, 0); i++)
15306 {
15307 rtx sub = XVECEXP (pat, 0, i);
15308 if (GET_CODE (sub) == USE)
15309 {
15310 sub = XEXP (sub, 0);
15311 if (GET_CODE (sub) == UNSPEC
15312 && XINT (sub, 1) == UNSPEC_TOC)
15313 return 1;
15314 }
15315 }
15316 }
15317 return 0;
9ebbca7d 15318}
c954844a 15319#endif
38c1f2d7 15320
9ebbca7d 15321rtx
f676971a 15322create_TOC_reference (rtx symbol)
9ebbca7d 15323{
b3a13419 15324 if (!can_create_pseudo_p ())
6fb5fa3c 15325 df_set_regs_ever_live (TOC_REGISTER, true);
f676971a 15326 return gen_rtx_PLUS (Pmode,
a8a05998 15327 gen_rtx_REG (Pmode, TOC_REGISTER),
f676971a
EC
15328 gen_rtx_CONST (Pmode,
15329 gen_rtx_MINUS (Pmode, symbol,
b999aaeb 15330 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9ebbca7d 15331}
38c1f2d7 15332
fc4767bb
JJ
15333/* If _Unwind_* has been called from within the same module,
15334 toc register is not guaranteed to be saved to 40(1) on function
15335 entry. Save it there in that case. */
c7ca610e 15336
9ebbca7d 15337void
863d938c 15338rs6000_aix_emit_builtin_unwind_init (void)
9ebbca7d
GK
15339{
15340 rtx mem;
15341 rtx stack_top = gen_reg_rtx (Pmode);
15342 rtx opcode_addr = gen_reg_rtx (Pmode);
fc4767bb
JJ
15343 rtx opcode = gen_reg_rtx (SImode);
15344 rtx tocompare = gen_reg_rtx (SImode);
15345 rtx no_toc_save_needed = gen_label_rtx ();
9ebbca7d 15346
8308679f 15347 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
9ebbca7d
GK
15348 emit_move_insn (stack_top, mem);
15349
8308679f
DE
15350 mem = gen_frame_mem (Pmode,
15351 gen_rtx_PLUS (Pmode, stack_top,
15352 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9ebbca7d 15353 emit_move_insn (opcode_addr, mem);
fc4767bb
JJ
15354 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15355 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
2496c7bd 15356 : 0xE8410028, SImode));
9ebbca7d 15357
fc4767bb 15358 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
06f4e019 15359 SImode, NULL_RTX, NULL_RTX,
fc4767bb 15360 no_toc_save_needed);
9ebbca7d 15361
8308679f
DE
15362 mem = gen_frame_mem (Pmode,
15363 gen_rtx_PLUS (Pmode, stack_top,
15364 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
fc4767bb
JJ
15365 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15366 emit_label (no_toc_save_needed);
9ebbca7d 15367}
38c1f2d7 15368\f
0be76840
DE
15369/* This ties together stack memory (MEM with an alias set of frame_alias_set)
15370 and the change to the stack pointer. */
ba4828e0 15371
9ebbca7d 15372static void
863d938c 15373rs6000_emit_stack_tie (void)
9ebbca7d 15374{
0be76840
DE
15375 rtx mem = gen_frame_mem (BLKmode,
15376 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
ba4828e0 15377
9ebbca7d
GK
15378 emit_insn (gen_stack_tie (mem));
15379}
38c1f2d7 15380
9ebbca7d
GK
15381/* Emit the correct code for allocating stack space, as insns.
15382 If COPY_R12, make sure a copy of the old frame is left in r12.
f78c3290
NF
15383 If COPY_R11, make sure a copy of the old frame is left in r11,
15384 in preference to r12 if COPY_R12.
9ebbca7d
GK
15385 The generated code may use hard register 0 as a temporary. */
15386
15387static void
f78c3290 15388rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
38c1f2d7 15389{
9ebbca7d
GK
15390 rtx insn;
15391 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15392 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
61168ff1
RS
15393 rtx todec = gen_int_mode (-size, Pmode);
15394
15395 if (INTVAL (todec) != -size)
15396 {
d4ee4d25 15397 warning (0, "stack frame too large");
61168ff1
RS
15398 emit_insn (gen_trap ());
15399 return;
15400 }
a157febd 15401
e3b5732b 15402 if (crtl->limit_stack)
a157febd
GK
15403 {
15404 if (REG_P (stack_limit_rtx)
f676971a 15405 && REGNO (stack_limit_rtx) > 1
a157febd
GK
15406 && REGNO (stack_limit_rtx) <= 31)
15407 {
5b71a4e7 15408 emit_insn (TARGET_32BIT
9ebbca7d
GK
15409 ? gen_addsi3 (tmp_reg,
15410 stack_limit_rtx,
15411 GEN_INT (size))
15412 : gen_adddi3 (tmp_reg,
15413 stack_limit_rtx,
15414 GEN_INT (size)));
5b71a4e7 15415
9ebbca7d
GK
15416 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15417 const0_rtx));
a157febd
GK
15418 }
15419 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9ebbca7d 15420 && TARGET_32BIT
f607bc57 15421 && DEFAULT_ABI == ABI_V4)
a157febd 15422 {
9ebbca7d 15423 rtx toload = gen_rtx_CONST (VOIDmode,
f676971a
EC
15424 gen_rtx_PLUS (Pmode,
15425 stack_limit_rtx,
9ebbca7d 15426 GEN_INT (size)));
5b71a4e7 15427
9ebbca7d
GK
15428 emit_insn (gen_elf_high (tmp_reg, toload));
15429 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15430 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15431 const0_rtx));
a157febd
GK
15432 }
15433 else
d4ee4d25 15434 warning (0, "stack limit expression is not supported");
a157febd
GK
15435 }
15436
f78c3290
NF
15437 if (copy_r12 || copy_r11 || ! TARGET_UPDATE)
15438 emit_move_insn (copy_r11
15439 ? gen_rtx_REG (Pmode, 11)
15440 : gen_rtx_REG (Pmode, 12),
15441 stack_reg);
9ebbca7d 15442
38c1f2d7
MM
15443 if (TARGET_UPDATE)
15444 {
9ebbca7d 15445 if (size > 32767)
38c1f2d7 15446 {
9ebbca7d 15447 /* Need a note here so that try_split doesn't get confused. */
9390387d 15448 if (get_last_insn () == NULL_RTX)
2e040219 15449 emit_note (NOTE_INSN_DELETED);
9ebbca7d
GK
15450 insn = emit_move_insn (tmp_reg, todec);
15451 try_split (PATTERN (insn), insn, 0);
15452 todec = tmp_reg;
38c1f2d7 15453 }
5b71a4e7
DE
15454
15455 insn = emit_insn (TARGET_32BIT
15456 ? gen_movsi_update (stack_reg, stack_reg,
15457 todec, stack_reg)
c4ad648e 15458 : gen_movdi_di_update (stack_reg, stack_reg,
9ebbca7d 15459 todec, stack_reg));
38c1f2d7
MM
15460 }
15461 else
15462 {
5b71a4e7
DE
15463 insn = emit_insn (TARGET_32BIT
15464 ? gen_addsi3 (stack_reg, stack_reg, todec)
15465 : gen_adddi3 (stack_reg, stack_reg, todec));
9ebbca7d 15466 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
f78c3290
NF
15467 copy_r11
15468 ? gen_rtx_REG (Pmode, 11)
15469 : gen_rtx_REG (Pmode, 12));
9ebbca7d 15470 }
f676971a 15471
9ebbca7d 15472 RTX_FRAME_RELATED_P (insn) = 1;
f676971a 15473 REG_NOTES (insn) =
9ebbca7d 15474 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
f676971a 15475 gen_rtx_SET (VOIDmode, stack_reg,
9ebbca7d
GK
15476 gen_rtx_PLUS (Pmode, stack_reg,
15477 GEN_INT (-size))),
15478 REG_NOTES (insn));
15479}
15480
a4f6c312
SS
15481/* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15482 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15483 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15484 deduce these equivalences by itself so it wasn't necessary to hold
15485 its hand so much. */
9ebbca7d
GK
15486
15487static void
f676971a 15488rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
a2369ed3 15489 rtx reg2, rtx rreg)
9ebbca7d
GK
15490{
15491 rtx real, temp;
15492
e56c4463
JL
15493 /* copy_rtx will not make unique copies of registers, so we need to
15494 ensure we don't have unwanted sharing here. */
15495 if (reg == reg2)
15496 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15497
15498 if (reg == rreg)
15499 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15500
9ebbca7d
GK
15501 real = copy_rtx (PATTERN (insn));
15502
89e7058f
AH
15503 if (reg2 != NULL_RTX)
15504 real = replace_rtx (real, reg2, rreg);
f676971a
EC
15505
15506 real = replace_rtx (real, reg,
9ebbca7d
GK
15507 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15508 STACK_POINTER_REGNUM),
15509 GEN_INT (val)));
f676971a 15510
9ebbca7d
GK
15511 /* We expect that 'real' is either a SET or a PARALLEL containing
15512 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15513 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15514
15515 if (GET_CODE (real) == SET)
15516 {
15517 rtx set = real;
f676971a 15518
9ebbca7d
GK
15519 temp = simplify_rtx (SET_SRC (set));
15520 if (temp)
15521 SET_SRC (set) = temp;
15522 temp = simplify_rtx (SET_DEST (set));
15523 if (temp)
15524 SET_DEST (set) = temp;
15525 if (GET_CODE (SET_DEST (set)) == MEM)
38c1f2d7 15526 {
9ebbca7d
GK
15527 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15528 if (temp)
15529 XEXP (SET_DEST (set), 0) = temp;
38c1f2d7 15530 }
38c1f2d7 15531 }
37409796 15532 else
9ebbca7d
GK
15533 {
15534 int i;
37409796
NS
15535
15536 gcc_assert (GET_CODE (real) == PARALLEL);
9ebbca7d
GK
15537 for (i = 0; i < XVECLEN (real, 0); i++)
15538 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15539 {
15540 rtx set = XVECEXP (real, 0, i);
f676971a 15541
9ebbca7d
GK
15542 temp = simplify_rtx (SET_SRC (set));
15543 if (temp)
15544 SET_SRC (set) = temp;
15545 temp = simplify_rtx (SET_DEST (set));
15546 if (temp)
15547 SET_DEST (set) = temp;
15548 if (GET_CODE (SET_DEST (set)) == MEM)
15549 {
15550 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15551 if (temp)
15552 XEXP (SET_DEST (set), 0) = temp;
15553 }
15554 RTX_FRAME_RELATED_P (set) = 1;
15555 }
15556 }
c19de7aa 15557
9ebbca7d
GK
15558 RTX_FRAME_RELATED_P (insn) = 1;
15559 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15560 real,
15561 REG_NOTES (insn));
38c1f2d7
MM
15562}
15563
00b960c7
AH
15564/* Returns an insn that has a vrsave set operation with the
15565 appropriate CLOBBERs. */
15566
15567static rtx
a2369ed3 15568generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
00b960c7
AH
15569{
15570 int nclobs, i;
15571 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
a004eb82 15572 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
00b960c7 15573
a004eb82
AH
15574 clobs[0]
15575 = gen_rtx_SET (VOIDmode,
15576 vrsave,
15577 gen_rtx_UNSPEC_VOLATILE (SImode,
15578 gen_rtvec (2, reg, vrsave),
3aca4bff 15579 UNSPECV_SET_VRSAVE));
00b960c7
AH
15580
15581 nclobs = 1;
15582
9aa86737
AH
15583 /* We need to clobber the registers in the mask so the scheduler
15584 does not move sets to VRSAVE before sets of AltiVec registers.
15585
15586 However, if the function receives nonlocal gotos, reload will set
15587 all call saved registers live. We will end up with:
15588
15589 (set (reg 999) (mem))
15590 (parallel [ (set (reg vrsave) (unspec blah))
15591 (clobber (reg 999))])
15592
15593 The clobber will cause the store into reg 999 to be dead, and
15594 flow will attempt to delete an epilogue insn. In this case, we
15595 need an unspec use/set of the register. */
00b960c7
AH
15596
15597 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
44688022 15598 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9aa86737
AH
15599 {
15600 if (!epiloguep || call_used_regs [i])
15601 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15602 gen_rtx_REG (V4SImode, i));
15603 else
15604 {
15605 rtx reg = gen_rtx_REG (V4SImode, i);
9aa86737
AH
15606
15607 clobs[nclobs++]
a004eb82
AH
15608 = gen_rtx_SET (VOIDmode,
15609 reg,
15610 gen_rtx_UNSPEC (V4SImode,
15611 gen_rtvec (1, reg), 27));
9aa86737
AH
15612 }
15613 }
00b960c7
AH
15614
15615 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15616
15617 for (i = 0; i < nclobs; ++i)
15618 XVECEXP (insn, 0, i) = clobs[i];
15619
15620 return insn;
15621}
15622
89e7058f
AH
15623/* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15624 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15625
15626static void
f676971a 15627emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
d1d0c603 15628 unsigned int regno, int offset, HOST_WIDE_INT total_size)
89e7058f
AH
15629{
15630 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15631 rtx replacea, replaceb;
15632
15633 int_rtx = GEN_INT (offset);
15634
15635 /* Some cases that need register indexed addressing. */
15636 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4f011e1e 15637 || (TARGET_E500_DOUBLE && mode == DFmode)
a3170dc6
AH
15638 || (TARGET_SPE_ABI
15639 && SPE_VECTOR_MODE (mode)
15640 && !SPE_CONST_OFFSET_OK (offset)))
89e7058f
AH
15641 {
15642 /* Whomever calls us must make sure r11 is available in the
c4ad648e 15643 flow path of instructions in the prologue. */
89e7058f
AH
15644 offset_rtx = gen_rtx_REG (Pmode, 11);
15645 emit_move_insn (offset_rtx, int_rtx);
15646
15647 replacea = offset_rtx;
15648 replaceb = int_rtx;
15649 }
15650 else
15651 {
15652 offset_rtx = int_rtx;
15653 replacea = NULL_RTX;
15654 replaceb = NULL_RTX;
15655 }
15656
15657 reg = gen_rtx_REG (mode, regno);
15658 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
0be76840 15659 mem = gen_frame_mem (mode, addr);
89e7058f
AH
15660
15661 insn = emit_move_insn (mem, reg);
15662
15663 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15664}
15665
a3170dc6
AH
15666/* Emit an offset memory reference suitable for a frame store, while
15667 converting to a valid addressing mode. */
15668
15669static rtx
a2369ed3 15670gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
a3170dc6
AH
15671{
15672 rtx int_rtx, offset_rtx;
15673
15674 int_rtx = GEN_INT (offset);
15675
4d4cbc0e 15676 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
4f011e1e 15677 || (TARGET_E500_DOUBLE && mode == DFmode))
a3170dc6
AH
15678 {
15679 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15680 emit_move_insn (offset_rtx, int_rtx);
15681 }
15682 else
15683 offset_rtx = int_rtx;
15684
0be76840 15685 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
a3170dc6
AH
15686}
15687
6d0a8091
DJ
15688/* Look for user-defined global regs. We should not save and restore these,
15689 and cannot use stmw/lmw if there are any in its range. */
15690
15691static bool
f78c3290 15692no_global_regs_above (int first, bool gpr)
6d0a8091
DJ
15693{
15694 int i;
f78c3290
NF
15695 for (i = first; i < gpr ? 32 : 64 ; i++)
15696 if (global_regs[i])
6d0a8091
DJ
15697 return false;
15698 return true;
15699}
15700
699c914a
MS
15701#ifndef TARGET_FIX_AND_CONTINUE
15702#define TARGET_FIX_AND_CONTINUE 0
15703#endif
15704
f78c3290
NF
15705/* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15706#define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15707#define LAST_SAVRES_REGISTER 31
15708#define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15709
15710static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15711
15712/* Return the symbol for an out-of-line register save/restore routine.
15713 We are saving/restoring GPRs if GPR is true. */
15714
15715static rtx
15716rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15717{
15718 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15719 rtx sym;
15720 int select = ((savep ? 1 : 0) << 2
15721 | (gpr
15722 /* On the SPE, we never have any FPRs, but we do have
15723 32/64-bit versions of the routines. */
15724 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15725 : 0) << 1
15726 | (exitp ? 1: 0));
15727
15728 /* Don't generate bogus routine names. */
15729 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15730
15731 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15732
15733 if (sym == NULL)
15734 {
15735 char name[30];
15736 const char *action;
15737 const char *regkind;
15738 const char *exit_suffix;
15739
15740 action = savep ? "save" : "rest";
15741
15742 /* SPE has slightly different names for its routines depending on
15743 whether we are saving 32-bit or 64-bit registers. */
15744 if (TARGET_SPE_ABI)
15745 {
15746 /* No floating point saves on the SPE. */
15747 gcc_assert (gpr);
15748
15749 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15750 }
15751 else
15752 regkind = gpr ? "gpr" : "fpr";
15753
15754 exit_suffix = exitp ? "_x" : "";
15755
15756 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15757
15758 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15759 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15760 }
15761
15762 return sym;
15763}
15764
15765/* Emit a sequence of insns, including a stack tie if needed, for
15766 resetting the stack pointer. If SAVRES is true, then don't reset the
15767 stack pointer, but move the base of the frame into r11 for use by
15768 out-of-line register restore routines. */
15769
15770static void
15771rs6000_emit_stack_reset (rs6000_stack_t *info,
15772 rtx sp_reg_rtx, rtx frame_reg_rtx,
15773 int sp_offset, bool savres)
15774{
15775 /* This blockage is needed so that sched doesn't decide to move
15776 the sp change before the register restores. */
15777 if (frame_reg_rtx != sp_reg_rtx
15778 || (TARGET_SPE_ABI
15779 && info->spe_64bit_regs_used != 0
15780 && info->first_gp_reg_save != 32))
15781 rs6000_emit_stack_tie ();
15782
15783 if (frame_reg_rtx != sp_reg_rtx)
15784 {
15785 rs6000_emit_stack_tie ();
15786 if (sp_offset != 0)
15787 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15788 GEN_INT (sp_offset)));
15789 else if (!savres)
15790 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15791 }
15792 else if (sp_offset != 0)
15793 {
15794 /* If we are restoring registers out-of-line, we will be using the
15795 "exit" variants of the restore routines, which will reset the
15796 stack for us. But we do need to point r11 into the right place
15797 for those routines. */
15798 rtx dest_reg = (savres
15799 ? gen_rtx_REG (Pmode, 11)
15800 : sp_reg_rtx);
15801
15802 emit_insn (TARGET_32BIT
15803 ? gen_addsi3 (dest_reg, sp_reg_rtx,
15804 GEN_INT (sp_offset))
15805 : gen_adddi3 (dest_reg, sp_reg_rtx,
15806 GEN_INT (sp_offset)));
15807 }
15808}
15809
15810/* Construct a parallel rtx describing the effect of a call to an
15811 out-of-line register save/restore routine. */
15812
15813static rtx
15814rs6000_make_savres_rtx (rs6000_stack_t *info,
15815 rtx frame_reg_rtx, int save_area_offset,
15816 enum machine_mode reg_mode,
15817 bool savep, bool gpr, bool exitp)
15818{
15819 int i;
15820 int offset, start_reg, end_reg, n_regs;
15821 int reg_size = GET_MODE_SIZE (reg_mode);
15822 rtx sym;
15823 rtvec p;
15824
15825 offset = 0;
15826 start_reg = (gpr
15827 ? info->first_gp_reg_save
15828 : info->first_fp_reg_save);
15829 end_reg = gpr ? 32 : 64;
15830 n_regs = end_reg - start_reg;
15831 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15832
15833 /* If we're saving registers, then we should never say we're exiting. */
15834 gcc_assert ((savep && !exitp) || !savep);
15835
15836 if (exitp)
15837 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15838
15839 RTVEC_ELT (p, offset++)
15840 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15841
15842 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15843 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
15844 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
15845
15846 for (i = 0; i < end_reg - start_reg; i++)
15847 {
15848 rtx addr, reg, mem;
15849 reg = gen_rtx_REG (reg_mode, start_reg + i);
15850 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
15851 GEN_INT (save_area_offset + reg_size*i));
15852 mem = gen_frame_mem (reg_mode, addr);
15853
15854 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
15855 savep ? mem : reg,
15856 savep ? reg : mem);
15857 }
15858
15859 return gen_rtx_PARALLEL (VOIDmode, p);
15860}
15861
52ff33d0
NF
15862/* Determine whether the gp REG is really used. */
15863
15864static bool
15865rs6000_reg_live_or_pic_offset_p (int reg)
15866{
6fb5fa3c 15867 return ((df_regs_ever_live_p (reg)
52ff33d0
NF
15868 && (!call_used_regs[reg]
15869 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15870 && TARGET_TOC && TARGET_MINIMAL_TOC)))
15871 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
15872 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
15873 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
15874}
15875
f78c3290
NF
15876enum {
15877 SAVRES_MULTIPLE = 0x1,
15878 SAVRES_INLINE_FPRS = 0x2,
15879 SAVRES_INLINE_GPRS = 0x4
15880};
15881
15882/* Determine the strategy for savings/restoring registers. */
15883
15884static int
15885rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
15886 int using_static_chain_p, int sibcall)
15887{
15888 bool using_multiple_p;
15889 bool common;
15890 bool savres_fprs_inline;
15891 bool savres_gprs_inline;
15892 bool noclobber_global_gprs
15893 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
15894
15895 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
15896 && (!TARGET_SPE_ABI
15897 || info->spe_64bit_regs_used == 0)
15898 && info->first_gp_reg_save < 31
15899 && noclobber_global_gprs);
15900 /* Don't bother to try to save things out-of-line if r11 is occupied
15901 by the static chain. It would require too much fiddling and the
15902 static chain is rarely used anyway. */
15903 common = (using_static_chain_p
15904 || sibcall
15905 || crtl->calls_eh_return
15906 || !info->lr_save_p
15907 || cfun->machine->ra_need_lr
15908 || info->total_size > 32767);
15909 savres_fprs_inline = (common
15910 || info->first_fp_reg_save == 64
15911 || !no_global_regs_above (info->first_fp_reg_save,
15912 /*gpr=*/false)
15913 || FP_SAVE_INLINE (info->first_fp_reg_save));
15914 savres_gprs_inline = (common
15915 /* Saving CR interferes with the exit routines
15916 used on the SPE, so just punt here. */
15917 || (!savep
15918 && TARGET_SPE_ABI
15919 && info->spe_64bit_regs_used != 0
15920 && info->cr_save_p != 0)
15921 || info->first_gp_reg_save == 32
15922 || !noclobber_global_gprs
15923 || GP_SAVE_INLINE (info->first_gp_reg_save));
15924
15925 if (savep)
15926 /* If we are going to use store multiple, then don't even bother
15927 with the out-of-line routines, since the store-multiple instruction
15928 will always be smaller. */
15929 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
15930 else
15931 {
15932 /* The situation is more complicated with load multiple. We'd
15933 prefer to use the out-of-line routines for restores, since the
15934 "exit" out-of-line routines can handle the restore of LR and
15935 the frame teardown. But we can only use the out-of-line
15936 routines if we know that we've used store multiple or
15937 out-of-line routines in the prologue, i.e. if we've saved all
15938 the registers from first_gp_reg_save. Otherwise, we risk
15939 loading garbage from the stack. Furthermore, we can only use
15940 the "exit" out-of-line gpr restore if we haven't saved any
15941 fprs. */
15942 bool saved_all = !savres_gprs_inline || using_multiple_p;
15943
15944 if (saved_all && info->first_fp_reg_save != 64)
15945 /* We can't use the exit routine; use load multiple if it's
15946 available. */
15947 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
15948 }
15949
15950 return (using_multiple_p
15951 | (savres_fprs_inline << 1)
15952 | (savres_gprs_inline << 2));
15953}
15954
9ebbca7d
GK
15955/* Emit function prologue as insns. */
15956
9878760c 15957void
863d938c 15958rs6000_emit_prologue (void)
9878760c 15959{
4697a36c 15960 rs6000_stack_t *info = rs6000_stack_info ();
0e67400a 15961 enum machine_mode reg_mode = Pmode;
327e5343 15962 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
15963 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15964 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
15965 rtx frame_reg_rtx = sp_reg_rtx;
b78d48dd 15966 rtx cr_save_rtx = NULL_RTX;
9ebbca7d 15967 rtx insn;
f78c3290 15968 int strategy;
9ebbca7d 15969 int saving_FPRs_inline;
f78c3290 15970 int saving_GPRs_inline;
9ebbca7d 15971 int using_store_multiple;
f78c3290
NF
15972 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
15973 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
15974 && !call_used_regs[STATIC_CHAIN_REGNUM]);
9ebbca7d 15975 HOST_WIDE_INT sp_offset = 0;
f676971a 15976
699c914a
MS
15977 if (TARGET_FIX_AND_CONTINUE)
15978 {
15979 /* gdb on darwin arranges to forward a function from the old
de2ab0ca 15980 address by modifying the first 5 instructions of the function
699c914a
MS
15981 to branch to the overriding function. This is necessary to
15982 permit function pointers that point to the old function to
15983 actually forward to the new function. */
15984 emit_insn (gen_nop ());
15985 emit_insn (gen_nop ());
de2ab0ca 15986 emit_insn (gen_nop ());
699c914a
MS
15987 emit_insn (gen_nop ());
15988 emit_insn (gen_nop ());
15989 }
15990
15991 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
15992 {
15993 reg_mode = V2SImode;
15994 reg_size = 8;
15995 }
a3170dc6 15996
f78c3290
NF
15997 strategy = rs6000_savres_strategy (info, /*savep=*/true,
15998 /*static_chain_p=*/using_static_chain_p,
15999 /*sibcall=*/0);
16000 using_store_multiple = strategy & SAVRES_MULTIPLE;
16001 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16002 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
9ebbca7d
GK
16003
16004 /* For V.4, update stack before we do any saving and set back pointer. */
22fa69da
GK
16005 if (! WORLD_SAVE_P (info)
16006 && info->push_p
acd0b319 16007 && (DEFAULT_ABI == ABI_V4
e3b5732b 16008 || crtl->calls_eh_return))
9ebbca7d 16009 {
f78c3290
NF
16010 bool need_r11 = (TARGET_SPE
16011 ? (!saving_GPRs_inline
16012 && info->spe_64bit_regs_used == 0)
16013 : (!saving_FPRs_inline || !saving_GPRs_inline));
9ebbca7d
GK
16014 if (info->total_size < 32767)
16015 sp_offset = info->total_size;
16016 else
f78c3290
NF
16017 frame_reg_rtx = (need_r11
16018 ? gen_rtx_REG (Pmode, 11)
16019 : frame_ptr_rtx);
f676971a 16020 rs6000_emit_allocate_stack (info->total_size,
9ebbca7d
GK
16021 (frame_reg_rtx != sp_reg_rtx
16022 && (info->cr_save_p
16023 || info->lr_save_p
16024 || info->first_fp_reg_save < 64
16025 || info->first_gp_reg_save < 32
f78c3290
NF
16026 )),
16027 need_r11);
9ebbca7d
GK
16028 if (frame_reg_rtx != sp_reg_rtx)
16029 rs6000_emit_stack_tie ();
16030 }
16031
d62294f5 16032 /* Handle world saves specially here. */
f57fe068 16033 if (WORLD_SAVE_P (info))
d62294f5
FJ
16034 {
16035 int i, j, sz;
16036 rtx treg;
16037 rtvec p;
22fa69da 16038 rtx reg0;
d62294f5
FJ
16039
16040 /* save_world expects lr in r0. */
22fa69da 16041 reg0 = gen_rtx_REG (Pmode, 0);
d62294f5 16042 if (info->lr_save_p)
c4ad648e 16043 {
22fa69da 16044 insn = emit_move_insn (reg0,
1de43f85 16045 gen_rtx_REG (Pmode, LR_REGNO));
c4ad648e
AM
16046 RTX_FRAME_RELATED_P (insn) = 1;
16047 }
d62294f5
FJ
16048
16049 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
c4ad648e 16050 assumptions about the offsets of various bits of the stack
992d08b1 16051 frame. */
37409796
NS
16052 gcc_assert (info->gp_save_offset == -220
16053 && info->fp_save_offset == -144
16054 && info->lr_save_offset == 8
16055 && info->cr_save_offset == 4
16056 && info->push_p
16057 && info->lr_save_p
e3b5732b 16058 && (!crtl->calls_eh_return
37409796
NS
16059 || info->ehrd_offset == -432)
16060 && info->vrsave_save_offset == -224
22fa69da 16061 && info->altivec_save_offset == -416);
d62294f5
FJ
16062
16063 treg = gen_rtx_REG (SImode, 11);
16064 emit_move_insn (treg, GEN_INT (-info->total_size));
16065
16066 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
c4ad648e 16067 in R11. It also clobbers R12, so beware! */
d62294f5
FJ
16068
16069 /* Preserve CR2 for save_world prologues */
22fa69da 16070 sz = 5;
d62294f5
FJ
16071 sz += 32 - info->first_gp_reg_save;
16072 sz += 64 - info->first_fp_reg_save;
16073 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16074 p = rtvec_alloc (sz);
16075 j = 0;
16076 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
a5ad2017 16077 gen_rtx_REG (SImode,
1de43f85 16078 LR_REGNO));
d62294f5 16079 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e
AM
16080 gen_rtx_SYMBOL_REF (Pmode,
16081 "*save_world"));
d62294f5 16082 /* We do floats first so that the instruction pattern matches
c4ad648e
AM
16083 properly. */
16084 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16085 {
16086 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16087 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16088 GEN_INT (info->fp_save_offset
16089 + sp_offset + 8 * i));
0be76840 16090 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16091
16092 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16093 }
d62294f5 16094 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16095 {
16096 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16097 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16098 GEN_INT (info->altivec_save_offset
16099 + sp_offset + 16 * i));
0be76840 16100 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16101
16102 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16103 }
d62294f5 16104 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16105 {
16106 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16107 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16108 GEN_INT (info->gp_save_offset
16109 + sp_offset + reg_size * i));
0be76840 16110 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16111
16112 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16113 }
16114
16115 {
16116 /* CR register traditionally saved as CR2. */
16117 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16118 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16119 GEN_INT (info->cr_save_offset
16120 + sp_offset));
0be76840 16121 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16122
16123 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16124 }
22fa69da
GK
16125 /* Explain about use of R0. */
16126 if (info->lr_save_p)
16127 {
16128 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16129 GEN_INT (info->lr_save_offset
16130 + sp_offset));
16131 rtx mem = gen_frame_mem (reg_mode, addr);
982afe02 16132
22fa69da
GK
16133 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16134 }
16135 /* Explain what happens to the stack pointer. */
16136 {
16137 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16138 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16139 }
d62294f5
FJ
16140
16141 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16142 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
22fa69da
GK
16143 treg, GEN_INT (-info->total_size));
16144 sp_offset = info->total_size;
d62294f5
FJ
16145 }
16146
9ebbca7d 16147 /* If we use the link register, get it into r0. */
f57fe068 16148 if (!WORLD_SAVE_P (info) && info->lr_save_p)
f8a57be8 16149 {
52ff33d0
NF
16150 rtx addr, reg, mem;
16151
f8a57be8 16152 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
1de43f85 16153 gen_rtx_REG (Pmode, LR_REGNO));
f8a57be8 16154 RTX_FRAME_RELATED_P (insn) = 1;
52ff33d0
NF
16155
16156 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16157 GEN_INT (info->lr_save_offset + sp_offset));
16158 reg = gen_rtx_REG (Pmode, 0);
16159 mem = gen_rtx_MEM (Pmode, addr);
16160 /* This should not be of rs6000_sr_alias_set, because of
16161 __builtin_return_address. */
16162
16163 insn = emit_move_insn (mem, reg);
16164 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16165 NULL_RTX, NULL_RTX);
f8a57be8 16166 }
9ebbca7d
GK
16167
16168 /* If we need to save CR, put it into r12. */
f57fe068 16169 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
9ebbca7d 16170 {
f8a57be8 16171 rtx set;
f676971a 16172
9ebbca7d 16173 cr_save_rtx = gen_rtx_REG (SImode, 12);
f8a57be8
GK
16174 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16175 RTX_FRAME_RELATED_P (insn) = 1;
16176 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16177 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16178 But that's OK. All we have to do is specify that _one_ condition
16179 code register is saved in this stack slot. The thrower's epilogue
16180 will then restore all the call-saved registers.
16181 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16182 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16183 gen_rtx_REG (SImode, CR2_REGNO));
16184 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16185 set,
16186 REG_NOTES (insn));
9ebbca7d
GK
16187 }
16188
a4f6c312
SS
16189 /* Do any required saving of fpr's. If only one or two to save, do
16190 it ourselves. Otherwise, call function. */
f57fe068 16191 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
9ebbca7d
GK
16192 {
16193 int i;
16194 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 16195 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d 16196 && ! call_used_regs[info->first_fp_reg_save+i]))
89e7058f
AH
16197 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
16198 info->first_fp_reg_save + i,
16199 info->fp_save_offset + sp_offset + 8 * i,
16200 info->total_size);
9ebbca7d 16201 }
f57fe068 16202 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
f78c3290
NF
16203 {
16204 rtx par;
16205
16206 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16207 info->fp_save_offset + sp_offset,
16208 DFmode,
16209 /*savep=*/true, /*gpr=*/false,
16210 /*exitp=*/false);
16211 insn = emit_insn (par);
16212 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16213 NULL_RTX, NULL_RTX);
16214 }
16215
16216 /* Save GPRs. This is done as a PARALLEL if we are using
16217 the store-multiple instructions. */
16218 if (!WORLD_SAVE_P (info)
16219 && TARGET_SPE_ABI
16220 && info->spe_64bit_regs_used != 0
16221 && info->first_gp_reg_save != 32)
9ebbca7d
GK
16222 {
16223 int i;
f78c3290
NF
16224 rtx spe_save_area_ptr;
16225
16226 /* Determine whether we can address all of the registers that need
16227 to be saved with an offset from the stack pointer that fits in
16228 the small const field for SPE memory instructions. */
16229 int spe_regs_addressable_via_sp
16230 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16231 + (32 - info->first_gp_reg_save - 1) * reg_size)
16232 && saving_GPRs_inline);
16233 int spe_offset;
16234
16235 if (spe_regs_addressable_via_sp)
16236 {
16237 spe_save_area_ptr = frame_reg_rtx;
16238 spe_offset = info->spe_gp_save_offset + sp_offset;
16239 }
16240 else
16241 {
16242 /* Make r11 point to the start of the SPE save area. We need
16243 to be careful here if r11 is holding the static chain. If
16244 it is, then temporarily save it in r0. We would use r0 as
16245 our base register here, but using r0 as a base register in
16246 loads and stores means something different from what we
16247 would like. */
16248 int ool_adjust = (saving_GPRs_inline
16249 ? 0
16250 : (info->first_gp_reg_save
16251 - (FIRST_SAVRES_REGISTER+1))*8);
16252 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16253 + sp_offset - ool_adjust);
16254
16255 if (using_static_chain_p)
16256 {
16257 rtx r0 = gen_rtx_REG (Pmode, 0);
16258 gcc_assert (info->first_gp_reg_save > 11);
16259
16260 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16261 }
16262
16263 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16264 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16265 frame_reg_rtx,
16266 GEN_INT (offset)));
16267 /* We need to make sure the move to r11 gets noted for
16268 properly outputting unwind information. */
16269 if (!saving_GPRs_inline)
16270 rs6000_frame_related (insn, frame_reg_rtx, offset,
16271 NULL_RTX, NULL_RTX);
16272 spe_offset = 0;
16273 }
16274
16275 if (saving_GPRs_inline)
16276 {
16277 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16278 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16279 {
16280 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16281 rtx offset, addr, mem;
f676971a 16282
f78c3290
NF
16283 /* We're doing all this to ensure that the offset fits into
16284 the immediate offset of 'evstdd'. */
16285 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16286
16287 offset = GEN_INT (reg_size * i + spe_offset);
16288 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16289 mem = gen_rtx_MEM (V2SImode, addr);
16290
16291 insn = emit_move_insn (mem, reg);
16292
16293 rs6000_frame_related (insn, spe_save_area_ptr,
16294 info->spe_gp_save_offset
16295 + sp_offset + reg_size * i,
16296 offset, const0_rtx);
16297 }
16298 }
16299 else
9ebbca7d 16300 {
f78c3290 16301 rtx par;
9ebbca7d 16302
f78c3290
NF
16303 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16304 0, reg_mode,
16305 /*savep=*/true, /*gpr=*/true,
16306 /*exitp=*/false);
16307 insn = emit_insn (par);
16308 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16309 NULL_RTX, NULL_RTX);
9ebbca7d 16310 }
f78c3290
NF
16311
16312
16313 /* Move the static chain pointer back. */
16314 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16315 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16316 }
16317 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16318 {
16319 rtx par;
16320
16321 /* Need to adjust r11 if we saved any FPRs. */
16322 if (info->first_fp_reg_save != 64)
16323 {
16324 rtx r11 = gen_rtx_REG (reg_mode, 11);
16325 rtx offset = GEN_INT (info->total_size
16326 + (-8 * (64-info->first_fp_reg_save)));
16327 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16328 ? sp_reg_rtx : r11);
16329
16330 emit_insn (TARGET_32BIT
16331 ? gen_addsi3 (r11, ptr_reg, offset)
16332 : gen_adddi3 (r11, ptr_reg, offset));
16333 }
16334
16335 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16336 info->gp_save_offset + sp_offset,
16337 reg_mode,
16338 /*savep=*/true, /*gpr=*/true,
16339 /*exitp=*/false);
16340 insn = emit_insn (par);
f676971a 16341 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d
GK
16342 NULL_RTX, NULL_RTX);
16343 }
f78c3290 16344 else if (!WORLD_SAVE_P (info) && using_store_multiple)
b6c9286a 16345 {
308c142a 16346 rtvec p;
9ebbca7d
GK
16347 int i;
16348 p = rtvec_alloc (32 - info->first_gp_reg_save);
9ebbca7d
GK
16349 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16350 {
16351 rtx addr, reg, mem;
16352 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
f676971a
EC
16353 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16354 GEN_INT (info->gp_save_offset
16355 + sp_offset
9ebbca7d 16356 + reg_size * i));
0be76840 16357 mem = gen_frame_mem (reg_mode, addr);
9ebbca7d
GK
16358
16359 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16360 }
16361 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
f676971a 16362 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9ebbca7d 16363 NULL_RTX, NULL_RTX);
b6c9286a 16364 }
f57fe068 16365 else if (!WORLD_SAVE_P (info))
b6c9286a 16366 {
9ebbca7d
GK
16367 int i;
16368 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
52ff33d0
NF
16369 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16370 {
16371 rtx addr, reg, mem;
16372 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
a3170dc6 16373
52ff33d0
NF
16374 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16375 GEN_INT (info->gp_save_offset
16376 + sp_offset
16377 + reg_size * i));
16378 mem = gen_frame_mem (reg_mode, addr);
a3170dc6 16379
52ff33d0
NF
16380 insn = emit_move_insn (mem, reg);
16381 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16382 NULL_RTX, NULL_RTX);
16383 }
9ebbca7d
GK
16384 }
16385
83720594
RH
16386 /* ??? There's no need to emit actual instructions here, but it's the
16387 easiest way to get the frame unwind information emitted. */
e3b5732b 16388 if (crtl->calls_eh_return)
83720594 16389 {
78e1b90d
DE
16390 unsigned int i, regno;
16391
fc4767bb
JJ
16392 /* In AIX ABI we need to pretend we save r2 here. */
16393 if (TARGET_AIX)
16394 {
16395 rtx addr, reg, mem;
16396
16397 reg = gen_rtx_REG (reg_mode, 2);
16398 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16399 GEN_INT (sp_offset + 5 * reg_size));
0be76840 16400 mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
16401
16402 insn = emit_move_insn (mem, reg);
f676971a 16403 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
fc4767bb
JJ
16404 NULL_RTX, NULL_RTX);
16405 PATTERN (insn) = gen_blockage ();
16406 }
16407
83720594
RH
16408 for (i = 0; ; ++i)
16409 {
83720594
RH
16410 regno = EH_RETURN_DATA_REGNO (i);
16411 if (regno == INVALID_REGNUM)
16412 break;
16413
89e7058f
AH
16414 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16415 info->ehrd_offset + sp_offset
16416 + reg_size * (int) i,
16417 info->total_size);
83720594
RH
16418 }
16419 }
16420
9ebbca7d 16421 /* Save CR if we use any that must be preserved. */
f57fe068 16422 if (!WORLD_SAVE_P (info) && info->cr_save_p)
9ebbca7d
GK
16423 {
16424 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16425 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 16426 rtx mem = gen_frame_mem (SImode, addr);
f8a57be8
GK
16427 /* See the large comment above about why CR2_REGNO is used. */
16428 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
ba4828e0 16429
9ebbca7d
GK
16430 /* If r12 was used to hold the original sp, copy cr into r0 now
16431 that it's free. */
16432 if (REGNO (frame_reg_rtx) == 12)
16433 {
f8a57be8
GK
16434 rtx set;
16435
9ebbca7d 16436 cr_save_rtx = gen_rtx_REG (SImode, 0);
f8a57be8
GK
16437 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16438 RTX_FRAME_RELATED_P (insn) = 1;
16439 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16440 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16441 set,
16442 REG_NOTES (insn));
f676971a 16443
9ebbca7d
GK
16444 }
16445 insn = emit_move_insn (mem, cr_save_rtx);
16446
f676971a 16447 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
f8a57be8 16448 NULL_RTX, NULL_RTX);
9ebbca7d
GK
16449 }
16450
f676971a 16451 /* Update stack and set back pointer unless this is V.4,
9ebbca7d 16452 for which it was done previously. */
f57fe068 16453 if (!WORLD_SAVE_P (info) && info->push_p
e3b5732b 16454 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
2b2c2fe5 16455 {
bcb2d701 16456 if (info->total_size < 32767)
2b2c2fe5 16457 sp_offset = info->total_size;
bcb2d701
EC
16458 else
16459 frame_reg_rtx = frame_ptr_rtx;
16460 rs6000_emit_allocate_stack (info->total_size,
16461 (frame_reg_rtx != sp_reg_rtx
16462 && ((info->altivec_size != 0)
16463 || (info->vrsave_mask != 0)
f78c3290
NF
16464 )),
16465 FALSE);
bcb2d701
EC
16466 if (frame_reg_rtx != sp_reg_rtx)
16467 rs6000_emit_stack_tie ();
2b2c2fe5 16468 }
9ebbca7d
GK
16469
16470 /* Set frame pointer, if needed. */
16471 if (frame_pointer_needed)
16472 {
7d5175e1 16473 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
9ebbca7d
GK
16474 sp_reg_rtx);
16475 RTX_FRAME_RELATED_P (insn) = 1;
b6c9286a 16476 }
9878760c 16477
2b2c2fe5
EC
16478 /* Save AltiVec registers if needed. Save here because the red zone does
16479 not include AltiVec registers. */
16480 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16481 {
16482 int i;
16483
16484 /* There should be a non inline version of this, for when we
16485 are saving lots of vector registers. */
16486 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16487 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16488 {
16489 rtx areg, savereg, mem;
16490 int offset;
16491
16492 offset = info->altivec_save_offset + sp_offset
16493 + 16 * (i - info->first_altivec_reg_save);
16494
16495 savereg = gen_rtx_REG (V4SImode, i);
16496
16497 areg = gen_rtx_REG (Pmode, 0);
16498 emit_move_insn (areg, GEN_INT (offset));
16499
16500 /* AltiVec addressing mode is [reg+reg]. */
16501 mem = gen_frame_mem (V4SImode,
16502 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16503
16504 insn = emit_move_insn (mem, savereg);
16505
16506 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16507 areg, GEN_INT (offset));
16508 }
16509 }
16510
16511 /* VRSAVE is a bit vector representing which AltiVec registers
16512 are used. The OS uses this to determine which vector
16513 registers to save on a context switch. We need to save
16514 VRSAVE on the stack frame, add whatever AltiVec registers we
16515 used in this function, and do the corresponding magic in the
16516 epilogue. */
16517
16518 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16519 && info->vrsave_mask != 0)
16520 {
16521 rtx reg, mem, vrsave;
16522 int offset;
16523
16524 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16525 as frame_reg_rtx and r11 as the static chain pointer for
16526 nested functions. */
16527 reg = gen_rtx_REG (SImode, 0);
16528 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16529 if (TARGET_MACHO)
16530 emit_insn (gen_get_vrsave_internal (reg));
16531 else
16532 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16533
16534 if (!WORLD_SAVE_P (info))
16535 {
16536 /* Save VRSAVE. */
16537 offset = info->vrsave_save_offset + sp_offset;
16538 mem = gen_frame_mem (SImode,
16539 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16540 GEN_INT (offset)));
16541 insn = emit_move_insn (mem, reg);
16542 }
16543
16544 /* Include the registers in the mask. */
16545 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16546
16547 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16548 }
16549
1db02437 16550 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9ebbca7d 16551 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7f970b70
AM
16552 || (DEFAULT_ABI == ABI_V4
16553 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
6fb5fa3c 16554 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
c4ad648e
AM
16555 {
16556 /* If emit_load_toc_table will use the link register, we need to save
16557 it. We use R12 for this purpose because emit_load_toc_table
16558 can use register 0. This allows us to use a plain 'blr' to return
16559 from the procedure more often. */
16560 int save_LR_around_toc_setup = (TARGET_ELF
16561 && DEFAULT_ABI != ABI_AIX
16562 && flag_pic
16563 && ! info->lr_save_p
16564 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16565 if (save_LR_around_toc_setup)
16566 {
1de43f85 16567 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
f8a57be8 16568
c4ad648e 16569 insn = emit_move_insn (frame_ptr_rtx, lr);
c4ad648e 16570 RTX_FRAME_RELATED_P (insn) = 1;
f8a57be8 16571
c4ad648e 16572 rs6000_emit_load_toc_table (TRUE);
f8a57be8 16573
c4ad648e 16574 insn = emit_move_insn (lr, frame_ptr_rtx);
c4ad648e
AM
16575 RTX_FRAME_RELATED_P (insn) = 1;
16576 }
16577 else
16578 rs6000_emit_load_toc_table (TRUE);
16579 }
ee890fe2 16580
fcce224d 16581#if TARGET_MACHO
ee890fe2 16582 if (DEFAULT_ABI == ABI_DARWIN
e3b5732b 16583 && flag_pic && crtl->uses_pic_offset_table)
ee890fe2 16584 {
1de43f85 16585 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
11abc112 16586 rtx src = machopic_function_base_sym ();
ee890fe2 16587
6d0a8091
DJ
16588 /* Save and restore LR locally around this call (in R0). */
16589 if (!info->lr_save_p)
6fb5fa3c 16590 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
6d0a8091 16591
6fb5fa3c 16592 emit_insn (gen_load_macho_picbase (src));
ee890fe2 16593
6fb5fa3c
DB
16594 emit_move_insn (gen_rtx_REG (Pmode,
16595 RS6000_PIC_OFFSET_TABLE_REGNUM),
16596 lr);
6d0a8091
DJ
16597
16598 if (!info->lr_save_p)
6fb5fa3c 16599 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
ee890fe2 16600 }
fcce224d 16601#endif
9ebbca7d
GK
16602}
16603
9ebbca7d 16604/* Write function prologue. */
a4f6c312 16605
08c148a8 16606static void
f676971a 16607rs6000_output_function_prologue (FILE *file,
a2369ed3 16608 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9ebbca7d
GK
16609{
16610 rs6000_stack_t *info = rs6000_stack_info ();
16611
4697a36c
MM
16612 if (TARGET_DEBUG_STACK)
16613 debug_stack_info (info);
9878760c 16614
a4f6c312
SS
16615 /* Write .extern for any function we will call to save and restore
16616 fp values. */
16617 if (info->first_fp_reg_save < 64
16618 && !FP_SAVE_INLINE (info->first_fp_reg_save))
4d30c363 16619 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
4697a36c 16620 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
f78c3290 16621 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9878760c 16622
c764f757
RK
16623 /* Write .extern for AIX common mode routines, if needed. */
16624 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16625 {
f6709c70
JW
16626 fputs ("\t.extern __mulh\n", file);
16627 fputs ("\t.extern __mull\n", file);
16628 fputs ("\t.extern __divss\n", file);
16629 fputs ("\t.extern __divus\n", file);
16630 fputs ("\t.extern __quoss\n", file);
16631 fputs ("\t.extern __quous\n", file);
c764f757
RK
16632 common_mode_defined = 1;
16633 }
9878760c 16634
9ebbca7d 16635 if (! HAVE_prologue)
979721f8 16636 {
9ebbca7d 16637 start_sequence ();
9dda4cc8 16638
a4f6c312
SS
16639 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16640 the "toplevel" insn chain. */
2e040219 16641 emit_note (NOTE_INSN_DELETED);
9ebbca7d 16642 rs6000_emit_prologue ();
2e040219 16643 emit_note (NOTE_INSN_DELETED);
178c3eff 16644
a3c9585f 16645 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
16646 {
16647 rtx insn;
16648 unsigned addr = 0;
16649 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16650 {
16651 INSN_ADDRESSES_NEW (insn, addr);
16652 addr += 4;
16653 }
16654 }
9dda4cc8 16655
9ebbca7d 16656 if (TARGET_DEBUG_STACK)
a4f6c312 16657 debug_rtx_list (get_insns (), 100);
c9d691e9 16658 final (get_insns (), file, FALSE);
9ebbca7d 16659 end_sequence ();
979721f8
MM
16660 }
16661
9ebbca7d
GK
16662 rs6000_pic_labelno++;
16663}
f676971a 16664
1c9c5e43
AM
16665/* Non-zero if vmx regs are restored before the frame pop, zero if
16666 we restore after the pop when possible. */
16667#define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16668
f78c3290
NF
16669/* Reload CR from REG. */
16670
16671static void
16672rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16673{
16674 int count = 0;
16675 int i;
16676
16677 if (using_mfcr_multiple)
16678 {
16679 for (i = 0; i < 8; i++)
16680 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16681 count++;
16682 gcc_assert (count);
16683 }
16684
16685 if (using_mfcr_multiple && count > 1)
16686 {
16687 rtvec p;
16688 int ndx;
16689
16690 p = rtvec_alloc (count);
16691
16692 ndx = 0;
16693 for (i = 0; i < 8; i++)
16694 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16695 {
16696 rtvec r = rtvec_alloc (2);
16697 RTVEC_ELT (r, 0) = reg;
16698 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16699 RTVEC_ELT (p, ndx) =
16700 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16701 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16702 ndx++;
16703 }
16704 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16705 gcc_assert (ndx == count);
16706 }
16707 else
16708 for (i = 0; i < 8; i++)
16709 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16710 {
16711 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16712 CR0_REGNO+i),
16713 reg));
16714 }
16715}
16716
9ebbca7d 16717/* Emit function epilogue as insns.
9878760c 16718
9ebbca7d
GK
16719 At present, dwarf2out_frame_debug_expr doesn't understand
16720 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16721 anywhere in the epilogue. Most of the insns below would in any case
16722 need special notes to explain where r11 is in relation to the stack. */
9878760c 16723
9ebbca7d 16724void
a2369ed3 16725rs6000_emit_epilogue (int sibcall)
9ebbca7d
GK
16726{
16727 rs6000_stack_t *info;
f78c3290 16728 int restoring_GPRs_inline;
9ebbca7d
GK
16729 int restoring_FPRs_inline;
16730 int using_load_multiple;
d296e02e 16731 int using_mtcr_multiple;
9ebbca7d 16732 int use_backchain_to_restore_sp;
f78c3290
NF
16733 int restore_lr;
16734 int strategy;
9ebbca7d
GK
16735 int sp_offset = 0;
16736 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16737 rtx frame_reg_rtx = sp_reg_rtx;
0e67400a 16738 enum machine_mode reg_mode = Pmode;
327e5343 16739 int reg_size = TARGET_32BIT ? 4 : 8;
9ebbca7d
GK
16740 int i;
16741
c19de7aa
AH
16742 info = rs6000_stack_info ();
16743
16744 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
a3170dc6
AH
16745 {
16746 reg_mode = V2SImode;
16747 reg_size = 8;
16748 }
16749
f78c3290
NF
16750 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16751 /*static_chain_p=*/0, sibcall);
16752 using_load_multiple = strategy & SAVRES_MULTIPLE;
16753 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16754 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
d296e02e 16755 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9ebbca7d
GK
16756 || rs6000_cpu == PROCESSOR_PPC603
16757 || rs6000_cpu == PROCESSOR_PPC750
16758 || optimize_size);
1c9c5e43
AM
16759 /* Restore via the backchain when we have a large frame, since this
16760 is more efficient than an addis, addi pair. The second condition
16761 here will not trigger at the moment; We don't actually need a
16762 frame pointer for alloca, but the generic parts of the compiler
16763 give us one anyway. */
16764 use_backchain_to_restore_sp = (info->total_size > 32767
d2492102
AP
16765 || info->total_size
16766 + (info->lr_save_p ? info->lr_save_offset : 0)
16767 > 32767
1c9c5e43
AM
16768 || (cfun->calls_alloca
16769 && !frame_pointer_needed));
f78c3290
NF
16770 restore_lr = (info->lr_save_p
16771 && restoring_GPRs_inline
16772 && restoring_FPRs_inline);
9ebbca7d 16773
f57fe068 16774 if (WORLD_SAVE_P (info))
d62294f5
FJ
16775 {
16776 int i, j;
16777 char rname[30];
16778 const char *alloc_rname;
16779 rtvec p;
16780
16781 /* eh_rest_world_r10 will return to the location saved in the LR
c4ad648e
AM
16782 stack slot (which is not likely to be our caller.)
16783 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16784 rest_world is similar, except any R10 parameter is ignored.
16785 The exception-handling stuff that was here in 2.95 is no
16786 longer necessary. */
d62294f5
FJ
16787
16788 p = rtvec_alloc (9
16789 + 1
f676971a 16790 + 32 - info->first_gp_reg_save
c4ad648e
AM
16791 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16792 + 63 + 1 - info->first_fp_reg_save);
d62294f5 16793
e3b5732b 16794 strcpy (rname, ((crtl->calls_eh_return) ?
c4ad648e 16795 "*eh_rest_world_r10" : "*rest_world"));
d62294f5
FJ
16796 alloc_rname = ggc_strdup (rname);
16797
16798 j = 0;
16799 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16800 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
c4ad648e 16801 gen_rtx_REG (Pmode,
1de43f85 16802 LR_REGNO));
d62294f5 16803 RTVEC_ELT (p, j++)
c4ad648e 16804 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
d62294f5 16805 /* The instruction pattern requires a clobber here;
c4ad648e 16806 it is shared with the restVEC helper. */
d62294f5 16807 RTVEC_ELT (p, j++)
c4ad648e 16808 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
d62294f5
FJ
16809
16810 {
c4ad648e
AM
16811 /* CR register traditionally saved as CR2. */
16812 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16813 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16814 GEN_INT (info->cr_save_offset));
0be76840 16815 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16816
16817 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
d62294f5
FJ
16818 }
16819
16820 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
c4ad648e
AM
16821 {
16822 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16823 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16824 GEN_INT (info->gp_save_offset
16825 + reg_size * i));
0be76840 16826 rtx mem = gen_frame_mem (reg_mode, addr);
c4ad648e
AM
16827
16828 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16829 }
d62294f5 16830 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
c4ad648e
AM
16831 {
16832 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16833 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16834 GEN_INT (info->altivec_save_offset
16835 + 16 * i));
0be76840 16836 rtx mem = gen_frame_mem (V4SImode, addr);
c4ad648e
AM
16837
16838 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16839 }
d62294f5 16840 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
c4ad648e
AM
16841 {
16842 rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
16843 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16844 GEN_INT (info->fp_save_offset
16845 + 8 * i));
0be76840 16846 rtx mem = gen_frame_mem (DFmode, addr);
c4ad648e
AM
16847
16848 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16849 }
d62294f5 16850 RTVEC_ELT (p, j++)
c4ad648e 16851 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
d62294f5 16852 RTVEC_ELT (p, j++)
c4ad648e 16853 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
d62294f5 16854 RTVEC_ELT (p, j++)
c4ad648e 16855 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
d62294f5 16856 RTVEC_ELT (p, j++)
c4ad648e 16857 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
d62294f5 16858 RTVEC_ELT (p, j++)
c4ad648e 16859 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
d62294f5
FJ
16860 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
16861
16862 return;
16863 }
16864
45b194f8
AM
16865 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
16866 if (info->push_p)
2b2c2fe5 16867 sp_offset = info->total_size;
f676971a 16868
e6477eaa
AM
16869 /* Restore AltiVec registers if we must do so before adjusting the
16870 stack. */
16871 if (TARGET_ALTIVEC_ABI
16872 && info->altivec_size != 0
1c9c5e43
AM
16873 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16874 || (DEFAULT_ABI != ABI_V4
16875 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))))
9aa86737
AH
16876 {
16877 int i;
16878
e6477eaa
AM
16879 if (use_backchain_to_restore_sp)
16880 {
16881 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16882 emit_move_insn (frame_reg_rtx,
16883 gen_rtx_MEM (Pmode, sp_reg_rtx));
16884 sp_offset = 0;
16885 }
1c9c5e43
AM
16886 else if (frame_pointer_needed)
16887 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa 16888
9aa86737
AH
16889 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16890 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16891 {
16892 rtx addr, areg, mem;
16893
16894 areg = gen_rtx_REG (Pmode, 0);
16895 emit_move_insn
16896 (areg, GEN_INT (info->altivec_save_offset
16897 + sp_offset
16898 + 16 * (i - info->first_altivec_reg_save)));
16899
16900 /* AltiVec addressing mode is [reg+reg]. */
16901 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
0be76840 16902 mem = gen_frame_mem (V4SImode, addr);
9aa86737
AH
16903
16904 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
16905 }
16906 }
16907
e6477eaa
AM
16908 /* Restore VRSAVE if we must do so before adjusting the stack. */
16909 if (TARGET_ALTIVEC
16910 && TARGET_ALTIVEC_VRSAVE
16911 && info->vrsave_mask != 0
1c9c5e43
AM
16912 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16913 || (DEFAULT_ABI != ABI_V4
16914 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))))
e6477eaa
AM
16915 {
16916 rtx addr, mem, reg;
16917
1c9c5e43 16918 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa 16919 {
1c9c5e43
AM
16920 if (use_backchain_to_restore_sp)
16921 {
16922 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16923 emit_move_insn (frame_reg_rtx,
16924 gen_rtx_MEM (Pmode, sp_reg_rtx));
16925 sp_offset = 0;
16926 }
16927 else if (frame_pointer_needed)
16928 frame_reg_rtx = hard_frame_pointer_rtx;
e6477eaa
AM
16929 }
16930
16931 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16932 GEN_INT (info->vrsave_save_offset + sp_offset));
16933 mem = gen_frame_mem (SImode, addr);
16934 reg = gen_rtx_REG (SImode, 12);
16935 emit_move_insn (reg, mem);
16936
16937 emit_insn (generate_set_vrsave (reg, info, 1));
16938 }
16939
1c9c5e43
AM
16940 /* If we have a large stack frame, restore the old stack pointer
16941 using the backchain. */
2b2c2fe5
EC
16942 if (use_backchain_to_restore_sp)
16943 {
1c9c5e43 16944 if (frame_reg_rtx == sp_reg_rtx)
e6477eaa
AM
16945 {
16946 /* Under V.4, don't reset the stack pointer until after we're done
16947 loading the saved registers. */
16948 if (DEFAULT_ABI == ABI_V4)
16949 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16950
16951 emit_move_insn (frame_reg_rtx,
16952 gen_rtx_MEM (Pmode, sp_reg_rtx));
16953 sp_offset = 0;
16954 }
1c9c5e43
AM
16955 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16956 && DEFAULT_ABI == ABI_V4)
16957 /* frame_reg_rtx has been set up by the altivec restore. */
16958 ;
16959 else
16960 {
16961 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
16962 frame_reg_rtx = sp_reg_rtx;
16963 }
16964 }
16965 /* If we have a frame pointer, we can restore the old stack pointer
16966 from it. */
16967 else if (frame_pointer_needed)
16968 {
16969 frame_reg_rtx = sp_reg_rtx;
16970 if (DEFAULT_ABI == ABI_V4)
16971 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
16972
16973 emit_insn (TARGET_32BIT
16974 ? gen_addsi3 (frame_reg_rtx, hard_frame_pointer_rtx,
16975 GEN_INT (info->total_size))
16976 : gen_adddi3 (frame_reg_rtx, hard_frame_pointer_rtx,
16977 GEN_INT (info->total_size)));
16978 sp_offset = 0;
2b2c2fe5 16979 }
45b194f8
AM
16980 else if (info->push_p
16981 && DEFAULT_ABI != ABI_V4
e3b5732b 16982 && !crtl->calls_eh_return)
2b2c2fe5 16983 {
45b194f8
AM
16984 emit_insn (TARGET_32BIT
16985 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
16986 GEN_INT (info->total_size))
16987 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
16988 GEN_INT (info->total_size)));
16989 sp_offset = 0;
2b2c2fe5
EC
16990 }
16991
e6477eaa 16992 /* Restore AltiVec registers if we have not done so already. */
1c9c5e43
AM
16993 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
16994 && TARGET_ALTIVEC_ABI
e6477eaa
AM
16995 && info->altivec_size != 0
16996 && (DEFAULT_ABI == ABI_V4
16997 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
16998 {
16999 int i;
17000
17001 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17002 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17003 {
17004 rtx addr, areg, mem;
17005
17006 areg = gen_rtx_REG (Pmode, 0);
17007 emit_move_insn
17008 (areg, GEN_INT (info->altivec_save_offset
17009 + sp_offset
17010 + 16 * (i - info->first_altivec_reg_save)));
17011
17012 /* AltiVec addressing mode is [reg+reg]. */
17013 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17014 mem = gen_frame_mem (V4SImode, addr);
17015
17016 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17017 }
17018 }
17019
17020 /* Restore VRSAVE if we have not done so already. */
1c9c5e43
AM
17021 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17022 && TARGET_ALTIVEC
e6477eaa
AM
17023 && TARGET_ALTIVEC_VRSAVE
17024 && info->vrsave_mask != 0
17025 && (DEFAULT_ABI == ABI_V4
17026 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
554c2941
AM
17027 {
17028 rtx addr, mem, reg;
17029
17030 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17031 GEN_INT (info->vrsave_save_offset + sp_offset));
17032 mem = gen_frame_mem (SImode, addr);
17033 reg = gen_rtx_REG (SImode, 12);
17034 emit_move_insn (reg, mem);
17035
17036 emit_insn (generate_set_vrsave (reg, info, 1));
17037 }
17038
f78c3290
NF
17039 /* Get the old lr if we saved it. If we are restoring registers
17040 out-of-line, then the out-of-line routines can do this for us. */
17041 if (restore_lr)
b6c9286a 17042 {
a3170dc6
AH
17043 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17044 info->lr_save_offset + sp_offset);
ba4828e0 17045
9ebbca7d 17046 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
b6c9286a 17047 }
f676971a 17048
9ebbca7d
GK
17049 /* Get the old cr if we saved it. */
17050 if (info->cr_save_p)
17051 {
17052 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17053 GEN_INT (info->cr_save_offset + sp_offset));
0be76840 17054 rtx mem = gen_frame_mem (SImode, addr);
b6c9286a 17055
9ebbca7d
GK
17056 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17057 }
f676971a 17058
9ebbca7d 17059 /* Set LR here to try to overlap restores below. */
f78c3290 17060 if (restore_lr)
1de43f85 17061 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
9ebbca7d 17062 gen_rtx_REG (Pmode, 0));
f676971a 17063
83720594 17064 /* Load exception handler data registers, if needed. */
e3b5732b 17065 if (crtl->calls_eh_return)
83720594 17066 {
78e1b90d
DE
17067 unsigned int i, regno;
17068
fc4767bb
JJ
17069 if (TARGET_AIX)
17070 {
17071 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17072 GEN_INT (sp_offset + 5 * reg_size));
0be76840 17073 rtx mem = gen_frame_mem (reg_mode, addr);
fc4767bb
JJ
17074
17075 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17076 }
17077
83720594
RH
17078 for (i = 0; ; ++i)
17079 {
a3170dc6 17080 rtx mem;
83720594
RH
17081
17082 regno = EH_RETURN_DATA_REGNO (i);
17083 if (regno == INVALID_REGNUM)
17084 break;
17085
a3170dc6
AH
17086 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17087 info->ehrd_offset + sp_offset
17088 + reg_size * (int) i);
83720594
RH
17089
17090 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17091 }
17092 }
f676971a 17093
9ebbca7d
GK
17094 /* Restore GPRs. This is done as a PARALLEL if we are using
17095 the load-multiple instructions. */
f78c3290
NF
17096 if (TARGET_SPE_ABI
17097 && info->spe_64bit_regs_used != 0
17098 && info->first_gp_reg_save != 32)
52ff33d0 17099 {
52ff33d0
NF
17100 /* Determine whether we can address all of the registers that need
17101 to be saved with an offset from the stack pointer that fits in
17102 the small const field for SPE memory instructions. */
17103 int spe_regs_addressable_via_sp
f78c3290
NF
17104 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17105 + (32 - info->first_gp_reg_save - 1) * reg_size)
17106 && restoring_GPRs_inline);
52ff33d0
NF
17107 int spe_offset;
17108
17109 if (spe_regs_addressable_via_sp)
45b194f8 17110 spe_offset = info->spe_gp_save_offset + sp_offset;
52ff33d0
NF
17111 else
17112 {
45b194f8 17113 rtx old_frame_reg_rtx = frame_reg_rtx;
52ff33d0 17114 /* Make r11 point to the start of the SPE save area. We worried about
6ed3da00 17115 not clobbering it when we were saving registers in the prologue.
52ff33d0
NF
17116 There's no need to worry here because the static chain is passed
17117 anew to every function. */
f78c3290
NF
17118 int ool_adjust = (restoring_GPRs_inline
17119 ? 0
17120 : (info->first_gp_reg_save
17121 - (FIRST_SAVRES_REGISTER+1))*8);
17122
45b194f8
AM
17123 if (frame_reg_rtx == sp_reg_rtx)
17124 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17125 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
f78c3290
NF
17126 GEN_INT (info->spe_gp_save_offset
17127 + sp_offset
17128 - ool_adjust)));
45b194f8
AM
17129 /* Keep the invariant that frame_reg_rtx + sp_offset points
17130 at the top of the stack frame. */
17131 sp_offset = -info->spe_gp_save_offset;
52ff33d0
NF
17132
17133 spe_offset = 0;
17134 }
17135
f78c3290
NF
17136 if (restoring_GPRs_inline)
17137 {
17138 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17139 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17140 {
17141 rtx offset, addr, mem;
52ff33d0 17142
f78c3290
NF
17143 /* We're doing all this to ensure that the immediate offset
17144 fits into the immediate field of 'evldd'. */
17145 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
52ff33d0 17146
f78c3290
NF
17147 offset = GEN_INT (spe_offset + reg_size * i);
17148 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17149 mem = gen_rtx_MEM (V2SImode, addr);
52ff33d0 17150
f78c3290
NF
17151 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17152 mem);
17153 }
17154 }
17155 else
17156 {
17157 rtx par;
17158
17159 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17160 0, reg_mode,
17161 /*savep=*/false, /*gpr=*/true,
17162 /*exitp=*/true);
17163 emit_jump_insn (par);
17164
17165 /* We don't want anybody else emitting things after we jumped
17166 back. */
17167 return;
17168 }
52ff33d0 17169 }
f78c3290
NF
17170 else if (!restoring_GPRs_inline)
17171 {
17172 /* We are jumping to an out-of-line function. */
17173 bool can_use_exit = info->first_fp_reg_save == 64;
17174 rtx par;
17175
17176 /* Emit stack reset code if we need it. */
17177 if (can_use_exit)
17178 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17179 sp_offset, can_use_exit);
17180 else
17181 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17182 sp_reg_rtx,
17183 GEN_INT (sp_offset - info->fp_size)));
17184
17185 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17186 info->gp_save_offset, reg_mode,
17187 /*savep=*/false, /*gpr=*/true,
17188 /*exitp=*/can_use_exit);
17189
17190 if (can_use_exit)
17191 {
17192 if (info->cr_save_p)
17193 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17194 using_mtcr_multiple);
17195
17196 emit_jump_insn (par);
17197
17198 /* We don't want anybody else emitting things after we jumped
17199 back. */
17200 return;
17201 }
17202 else
17203 emit_insn (par);
17204 }
17205 else if (using_load_multiple)
17206 {
17207 rtvec p;
17208 p = rtvec_alloc (32 - info->first_gp_reg_save);
17209 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9ebbca7d 17210 {
f676971a
EC
17211 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17212 GEN_INT (info->gp_save_offset
17213 + sp_offset
9ebbca7d 17214 + reg_size * i));
0be76840 17215 rtx mem = gen_frame_mem (reg_mode, addr);
ba4828e0 17216
f78c3290
NF
17217 RTVEC_ELT (p, i) =
17218 gen_rtx_SET (VOIDmode,
17219 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17220 mem);
9ebbca7d 17221 }
f78c3290
NF
17222 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17223 }
17224 else
17225 {
17226 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17227 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17228 {
17229 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17230 GEN_INT (info->gp_save_offset
17231 + sp_offset
17232 + reg_size * i));
17233 rtx mem = gen_frame_mem (reg_mode, addr);
17234
17235 emit_move_insn (gen_rtx_REG (reg_mode,
17236 info->first_gp_reg_save + i), mem);
17237 }
17238 }
9878760c 17239
9ebbca7d
GK
17240 /* Restore fpr's if we need to do it without calling a function. */
17241 if (restoring_FPRs_inline)
17242 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
6fb5fa3c 17243 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
9ebbca7d
GK
17244 && ! call_used_regs[info->first_fp_reg_save+i]))
17245 {
17246 rtx addr, mem;
17247 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
f676971a
EC
17248 GEN_INT (info->fp_save_offset
17249 + sp_offset
a4f6c312 17250 + 8 * i));
0be76840 17251 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17252
f676971a 17253 emit_move_insn (gen_rtx_REG (DFmode,
9ebbca7d
GK
17254 info->first_fp_reg_save + i),
17255 mem);
17256 }
8d30c4ee 17257
9ebbca7d
GK
17258 /* If we saved cr, restore it here. Just those that were used. */
17259 if (info->cr_save_p)
f78c3290 17260 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
979721f8 17261
9ebbca7d 17262 /* If this is V.4, unwind the stack pointer after all of the loads
022123e6 17263 have been done. */
f78c3290
NF
17264 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17265 sp_offset, !restoring_FPRs_inline);
b6c9286a 17266
e3b5732b 17267 if (crtl->calls_eh_return)
83720594
RH
17268 {
17269 rtx sa = EH_RETURN_STACKADJ_RTX;
5b71a4e7 17270 emit_insn (TARGET_32BIT
83720594
RH
17271 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
17272 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
17273 }
17274
9ebbca7d
GK
17275 if (!sibcall)
17276 {
17277 rtvec p;
17278 if (! restoring_FPRs_inline)
f78c3290 17279 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
9ebbca7d
GK
17280 else
17281 p = rtvec_alloc (2);
b6c9286a 17282
e35b9579 17283 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
f78c3290
NF
17284 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17285 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17286 : gen_rtx_CLOBBER (VOIDmode,
17287 gen_rtx_REG (Pmode, 65)));
9ebbca7d
GK
17288
17289 /* If we have to restore more than two FP registers, branch to the
17290 restore function. It will return to our caller. */
17291 if (! restoring_FPRs_inline)
17292 {
17293 int i;
f78c3290
NF
17294 rtx sym;
17295
17296 sym = rs6000_savres_routine_sym (info,
17297 /*savep=*/false,
17298 /*gpr=*/false,
17299 /*exitp=*/true);
17300 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17301 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17302 gen_rtx_REG (Pmode, 11));
9ebbca7d
GK
17303 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17304 {
17305 rtx addr, mem;
17306 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17307 GEN_INT (info->fp_save_offset + 8*i));
0be76840 17308 mem = gen_frame_mem (DFmode, addr);
9ebbca7d 17309
f78c3290 17310 RTVEC_ELT (p, i+4) =
9ebbca7d
GK
17311 gen_rtx_SET (VOIDmode,
17312 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17313 mem);
b6c9286a
MM
17314 }
17315 }
f676971a 17316
9ebbca7d 17317 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3daf36a4 17318 }
9878760c
RK
17319}
17320
17321/* Write function epilogue. */
17322
08c148a8 17323static void
f676971a 17324rs6000_output_function_epilogue (FILE *file,
a2369ed3 17325 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
9878760c 17326{
9ebbca7d 17327 if (! HAVE_epilogue)
9878760c 17328 {
9ebbca7d
GK
17329 rtx insn = get_last_insn ();
17330 /* If the last insn was a BARRIER, we don't have to write anything except
17331 the trace table. */
17332 if (GET_CODE (insn) == NOTE)
17333 insn = prev_nonnote_insn (insn);
17334 if (insn == 0 || GET_CODE (insn) != BARRIER)
4697a36c 17335 {
9ebbca7d
GK
17336 /* This is slightly ugly, but at least we don't have two
17337 copies of the epilogue-emitting code. */
17338 start_sequence ();
17339
17340 /* A NOTE_INSN_DELETED is supposed to be at the start
17341 and end of the "toplevel" insn chain. */
2e040219 17342 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17343 rs6000_emit_epilogue (FALSE);
2e040219 17344 emit_note (NOTE_INSN_DELETED);
9ebbca7d 17345
a3c9585f 17346 /* Expand INSN_ADDRESSES so final() doesn't crash. */
178c3eff
DJ
17347 {
17348 rtx insn;
17349 unsigned addr = 0;
17350 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17351 {
17352 INSN_ADDRESSES_NEW (insn, addr);
17353 addr += 4;
17354 }
17355 }
17356
9ebbca7d 17357 if (TARGET_DEBUG_STACK)
a4f6c312 17358 debug_rtx_list (get_insns (), 100);
c9d691e9 17359 final (get_insns (), file, FALSE);
9ebbca7d 17360 end_sequence ();
4697a36c 17361 }
9878760c 17362 }
b4ac57ab 17363
efdba735
SH
17364#if TARGET_MACHO
17365 macho_branch_islands ();
0e5da0be
GK
17366 /* Mach-O doesn't support labels at the end of objects, so if
17367 it looks like we might want one, insert a NOP. */
17368 {
17369 rtx insn = get_last_insn ();
17370 while (insn
17371 && NOTE_P (insn)
a38e7aa5 17372 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
0e5da0be 17373 insn = PREV_INSN (insn);
f676971a
EC
17374 if (insn
17375 && (LABEL_P (insn)
0e5da0be 17376 || (NOTE_P (insn)
a38e7aa5 17377 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
0e5da0be
GK
17378 fputs ("\tnop\n", file);
17379 }
17380#endif
17381
9b30bae2 17382 /* Output a traceback table here. See /usr/include/sys/debug.h for info
314fc5a9
ILT
17383 on its format.
17384
17385 We don't output a traceback table if -finhibit-size-directive was
17386 used. The documentation for -finhibit-size-directive reads
17387 ``don't output a @code{.size} assembler directive, or anything
17388 else that would cause trouble if the function is split in the
17389 middle, and the two halves are placed at locations far apart in
17390 memory.'' The traceback table has this property, since it
17391 includes the offset from the start of the function to the
4d30c363
MM
17392 traceback table itself.
17393
17394 System V.4 Powerpc's (and the embedded ABI derived from it) use a
b6c9286a 17395 different traceback table. */
57ac7be9 17396 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
e3b5732b 17397 && rs6000_traceback != traceback_none && !crtl->is_thunk)
9b30bae2 17398 {
69c75916 17399 const char *fname = NULL;
3ac88239 17400 const char *language_string = lang_hooks.name;
6041bf2f 17401 int fixed_parms = 0, float_parms = 0, parm_info = 0;
314fc5a9 17402 int i;
57ac7be9 17403 int optional_tbtab;
8097c268 17404 rs6000_stack_t *info = rs6000_stack_info ();
57ac7be9
AM
17405
17406 if (rs6000_traceback == traceback_full)
17407 optional_tbtab = 1;
17408 else if (rs6000_traceback == traceback_part)
17409 optional_tbtab = 0;
17410 else
17411 optional_tbtab = !optimize_size && !TARGET_ELF;
314fc5a9 17412
69c75916
AM
17413 if (optional_tbtab)
17414 {
17415 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17416 while (*fname == '.') /* V.4 encodes . in the name */
17417 fname++;
17418
17419 /* Need label immediately before tbtab, so we can compute
17420 its offset from the function start. */
17421 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17422 ASM_OUTPUT_LABEL (file, fname);
17423 }
314fc5a9
ILT
17424
17425 /* The .tbtab pseudo-op can only be used for the first eight
17426 expressions, since it can't handle the possibly variable
17427 length fields that follow. However, if you omit the optional
17428 fields, the assembler outputs zeros for all optional fields
17429 anyways, giving each variable length field is minimum length
17430 (as defined in sys/debug.h). Thus we can not use the .tbtab
17431 pseudo-op at all. */
17432
17433 /* An all-zero word flags the start of the tbtab, for debuggers
17434 that have to find it by searching forward from the entry
17435 point or from the current pc. */
19d2d16f 17436 fputs ("\t.long 0\n", file);
314fc5a9
ILT
17437
17438 /* Tbtab format type. Use format type 0. */
19d2d16f 17439 fputs ("\t.byte 0,", file);
314fc5a9 17440
5fc921c1
DE
17441 /* Language type. Unfortunately, there does not seem to be any
17442 official way to discover the language being compiled, so we
17443 use language_string.
17444 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
56438901
AM
17445 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17446 a number, so for now use 9. */
5fc921c1 17447 if (! strcmp (language_string, "GNU C"))
314fc5a9 17448 i = 0;
6de9cd9a 17449 else if (! strcmp (language_string, "GNU F77")
7f62878c 17450 || ! strcmp (language_string, "GNU Fortran"))
314fc5a9 17451 i = 1;
8b83775b 17452 else if (! strcmp (language_string, "GNU Pascal"))
314fc5a9 17453 i = 2;
5fc921c1
DE
17454 else if (! strcmp (language_string, "GNU Ada"))
17455 i = 3;
56438901
AM
17456 else if (! strcmp (language_string, "GNU C++")
17457 || ! strcmp (language_string, "GNU Objective-C++"))
314fc5a9 17458 i = 9;
9517ead8
AG
17459 else if (! strcmp (language_string, "GNU Java"))
17460 i = 13;
5fc921c1
DE
17461 else if (! strcmp (language_string, "GNU Objective-C"))
17462 i = 14;
314fc5a9 17463 else
37409796 17464 gcc_unreachable ();
314fc5a9
ILT
17465 fprintf (file, "%d,", i);
17466
17467 /* 8 single bit fields: global linkage (not set for C extern linkage,
17468 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17469 from start of procedure stored in tbtab, internal function, function
17470 has controlled storage, function has no toc, function uses fp,
17471 function logs/aborts fp operations. */
17472 /* Assume that fp operations are used if any fp reg must be saved. */
6041bf2f
DE
17473 fprintf (file, "%d,",
17474 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
314fc5a9
ILT
17475
17476 /* 6 bitfields: function is interrupt handler, name present in
17477 proc table, function calls alloca, on condition directives
17478 (controls stack walks, 3 bits), saves condition reg, saves
17479 link reg. */
17480 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17481 set up as a frame pointer, even when there is no alloca call. */
17482 fprintf (file, "%d,",
6041bf2f
DE
17483 ((optional_tbtab << 6)
17484 | ((optional_tbtab & frame_pointer_needed) << 5)
17485 | (info->cr_save_p << 1)
17486 | (info->lr_save_p)));
314fc5a9 17487
6041bf2f 17488 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
314fc5a9
ILT
17489 (6 bits). */
17490 fprintf (file, "%d,",
4697a36c 17491 (info->push_p << 7) | (64 - info->first_fp_reg_save));
314fc5a9
ILT
17492
17493 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17494 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17495
6041bf2f
DE
17496 if (optional_tbtab)
17497 {
17498 /* Compute the parameter info from the function decl argument
17499 list. */
17500 tree decl;
17501 int next_parm_info_bit = 31;
314fc5a9 17502
6041bf2f
DE
17503 for (decl = DECL_ARGUMENTS (current_function_decl);
17504 decl; decl = TREE_CHAIN (decl))
17505 {
17506 rtx parameter = DECL_INCOMING_RTL (decl);
17507 enum machine_mode mode = GET_MODE (parameter);
314fc5a9 17508
6041bf2f
DE
17509 if (GET_CODE (parameter) == REG)
17510 {
ebb109ad 17511 if (SCALAR_FLOAT_MODE_P (mode))
6041bf2f
DE
17512 {
17513 int bits;
17514
17515 float_parms++;
17516
37409796
NS
17517 switch (mode)
17518 {
17519 case SFmode:
e41b2a33 17520 case SDmode:
37409796
NS
17521 bits = 0x2;
17522 break;
17523
17524 case DFmode:
7393f7f8 17525 case DDmode:
37409796 17526 case TFmode:
7393f7f8 17527 case TDmode:
37409796
NS
17528 bits = 0x3;
17529 break;
17530
17531 default:
17532 gcc_unreachable ();
17533 }
6041bf2f
DE
17534
17535 /* If only one bit will fit, don't or in this entry. */
17536 if (next_parm_info_bit > 0)
17537 parm_info |= (bits << (next_parm_info_bit - 1));
17538 next_parm_info_bit -= 2;
17539 }
17540 else
17541 {
17542 fixed_parms += ((GET_MODE_SIZE (mode)
17543 + (UNITS_PER_WORD - 1))
17544 / UNITS_PER_WORD);
17545 next_parm_info_bit -= 1;
17546 }
17547 }
17548 }
17549 }
314fc5a9
ILT
17550
17551 /* Number of fixed point parameters. */
17552 /* This is actually the number of words of fixed point parameters; thus
17553 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17554 fprintf (file, "%d,", fixed_parms);
17555
17556 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17557 all on stack. */
17558 /* This is actually the number of fp registers that hold parameters;
17559 and thus the maximum value is 13. */
17560 /* Set parameters on stack bit if parameters are not in their original
17561 registers, regardless of whether they are on the stack? Xlc
17562 seems to set the bit when not optimizing. */
17563 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17564
6041bf2f
DE
17565 if (! optional_tbtab)
17566 return;
17567
314fc5a9
ILT
17568 /* Optional fields follow. Some are variable length. */
17569
17570 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17571 11 double float. */
17572 /* There is an entry for each parameter in a register, in the order that
17573 they occur in the parameter list. Any intervening arguments on the
17574 stack are ignored. If the list overflows a long (max possible length
17575 34 bits) then completely leave off all elements that don't fit. */
17576 /* Only emit this long if there was at least one parameter. */
17577 if (fixed_parms || float_parms)
17578 fprintf (file, "\t.long %d\n", parm_info);
17579
17580 /* Offset from start of code to tb table. */
19d2d16f 17581 fputs ("\t.long ", file);
314fc5a9 17582 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
85b776df
AM
17583 if (TARGET_AIX)
17584 RS6000_OUTPUT_BASENAME (file, fname);
17585 else
17586 assemble_name (file, fname);
17587 putc ('-', file);
17588 rs6000_output_function_entry (file, fname);
19d2d16f 17589 putc ('\n', file);
314fc5a9
ILT
17590
17591 /* Interrupt handler mask. */
17592 /* Omit this long, since we never set the interrupt handler bit
17593 above. */
17594
17595 /* Number of CTL (controlled storage) anchors. */
17596 /* Omit this long, since the has_ctl bit is never set above. */
17597
17598 /* Displacement into stack of each CTL anchor. */
17599 /* Omit this list of longs, because there are no CTL anchors. */
17600
17601 /* Length of function name. */
69c75916
AM
17602 if (*fname == '*')
17603 ++fname;
296b8152 17604 fprintf (file, "\t.short %d\n", (int) strlen (fname));
314fc5a9
ILT
17605
17606 /* Function name. */
17607 assemble_string (fname, strlen (fname));
17608
17609 /* Register for alloca automatic storage; this is always reg 31.
17610 Only emit this if the alloca bit was set above. */
17611 if (frame_pointer_needed)
19d2d16f 17612 fputs ("\t.byte 31\n", file);
b1765bde
DE
17613
17614 fputs ("\t.align 2\n", file);
9b30bae2 17615 }
9878760c 17616}
17167fd8 17617\f
a4f6c312
SS
17618/* A C compound statement that outputs the assembler code for a thunk
17619 function, used to implement C++ virtual function calls with
17620 multiple inheritance. The thunk acts as a wrapper around a virtual
17621 function, adjusting the implicit object parameter before handing
17622 control off to the real function.
17623
17624 First, emit code to add the integer DELTA to the location that
17625 contains the incoming first argument. Assume that this argument
17626 contains a pointer, and is the one used to pass the `this' pointer
17627 in C++. This is the incoming argument *before* the function
17628 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17629 values of all other incoming arguments.
17167fd8
MM
17630
17631 After the addition, emit code to jump to FUNCTION, which is a
a4f6c312
SS
17632 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17633 not touch the return address. Hence returning from FUNCTION will
17634 return to whoever called the current `thunk'.
17167fd8 17635
a4f6c312
SS
17636 The effect must be as if FUNCTION had been called directly with the
17637 adjusted first argument. This macro is responsible for emitting
17638 all of the code for a thunk function; output_function_prologue()
17639 and output_function_epilogue() are not invoked.
17167fd8 17640
a4f6c312
SS
17641 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17642 been extracted from it.) It might possibly be useful on some
17643 targets, but probably not.
17167fd8 17644
a4f6c312
SS
17645 If you do not define this macro, the target-independent code in the
17646 C++ frontend will generate a less efficient heavyweight thunk that
17647 calls FUNCTION instead of jumping to it. The generic approach does
17648 not support varargs. */
17167fd8 17649
3961e8fe 17650static void
f676971a
EC
17651rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17652 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
a2369ed3 17653 tree function)
17167fd8 17654{
0a2aaacc 17655 rtx this_rtx, insn, funexp;
17167fd8 17656
5b71a4e7 17657 reload_completed = 1;
fe3ad572 17658 epilogue_completed = 1;
56a7189a 17659
5b71a4e7 17660 /* Mark the end of the (empty) prologue. */
2e040219 17661 emit_note (NOTE_INSN_PROLOGUE_END);
17167fd8 17662
5b71a4e7
DE
17663 /* Find the "this" pointer. If the function returns a structure,
17664 the structure return pointer is in r3. */
61f71b34 17665 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
0a2aaacc 17666 this_rtx = gen_rtx_REG (Pmode, 4);
56a7189a 17667 else
0a2aaacc 17668 this_rtx = gen_rtx_REG (Pmode, 3);
17167fd8 17669
5b71a4e7
DE
17670 /* Apply the constant offset, if required. */
17671 if (delta)
17672 {
17673 rtx delta_rtx = GEN_INT (delta);
17674 emit_insn (TARGET_32BIT
0a2aaacc
KG
17675 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
17676 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17167fd8
MM
17677 }
17678
5b71a4e7
DE
17679 /* Apply the offset from the vtable, if required. */
17680 if (vcall_offset)
17167fd8 17681 {
5b71a4e7
DE
17682 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17683 rtx tmp = gen_rtx_REG (Pmode, 12);
17167fd8 17684
0a2aaacc 17685 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
eeff9307
JJ
17686 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17687 {
17688 emit_insn (TARGET_32BIT
17689 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17690 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17691 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17692 }
17693 else
17694 {
17695 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17696
17697 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17698 }
5b71a4e7 17699 emit_insn (TARGET_32BIT
0a2aaacc
KG
17700 ? gen_addsi3 (this_rtx, this_rtx, tmp)
17701 : gen_adddi3 (this_rtx, this_rtx, tmp));
17167fd8
MM
17702 }
17703
5b71a4e7
DE
17704 /* Generate a tail call to the target function. */
17705 if (!TREE_USED (function))
17706 {
17707 assemble_external (function);
17708 TREE_USED (function) = 1;
17709 }
17710 funexp = XEXP (DECL_RTL (function), 0);
5b71a4e7 17711 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
ee890fe2
SS
17712
17713#if TARGET_MACHO
ab82a49f 17714 if (MACHOPIC_INDIRECT)
5b71a4e7 17715 funexp = machopic_indirect_call_target (funexp);
ee890fe2 17716#endif
5b71a4e7
DE
17717
17718 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
992d08b1 17719 generate sibcall RTL explicitly. */
5b71a4e7
DE
17720 insn = emit_call_insn (
17721 gen_rtx_PARALLEL (VOIDmode,
17722 gen_rtvec (4,
17723 gen_rtx_CALL (VOIDmode,
17724 funexp, const0_rtx),
17725 gen_rtx_USE (VOIDmode, const0_rtx),
17726 gen_rtx_USE (VOIDmode,
17727 gen_rtx_REG (SImode,
1de43f85 17728 LR_REGNO)),
5b71a4e7
DE
17729 gen_rtx_RETURN (VOIDmode))));
17730 SIBLING_CALL_P (insn) = 1;
17731 emit_barrier ();
17732
17733 /* Run just enough of rest_of_compilation to get the insns emitted.
17734 There's not really enough bulk here to make other passes such as
17735 instruction scheduling worth while. Note that use_thunk calls
17736 assemble_start_function and assemble_end_function. */
17737 insn = get_insns ();
55e092c4 17738 insn_locators_alloc ();
5b71a4e7
DE
17739 shorten_branches (insn);
17740 final_start_function (insn, file, 1);
c9d691e9 17741 final (insn, file, 1);
5b71a4e7 17742 final_end_function ();
d7087dd2 17743 free_after_compilation (cfun);
5b71a4e7
DE
17744
17745 reload_completed = 0;
fe3ad572 17746 epilogue_completed = 0;
9ebbca7d 17747}
9ebbca7d
GK
17748\f
17749/* A quick summary of the various types of 'constant-pool tables'
17750 under PowerPC:
17751
f676971a 17752 Target Flags Name One table per
9ebbca7d
GK
17753 AIX (none) AIX TOC object file
17754 AIX -mfull-toc AIX TOC object file
17755 AIX -mminimal-toc AIX minimal TOC translation unit
17756 SVR4/EABI (none) SVR4 SDATA object file
17757 SVR4/EABI -fpic SVR4 pic object file
17758 SVR4/EABI -fPIC SVR4 PIC translation unit
17759 SVR4/EABI -mrelocatable EABI TOC function
17760 SVR4/EABI -maix AIX TOC object file
f676971a 17761 SVR4/EABI -maix -mminimal-toc
9ebbca7d
GK
17762 AIX minimal TOC translation unit
17763
17764 Name Reg. Set by entries contains:
17765 made by addrs? fp? sum?
17766
17767 AIX TOC 2 crt0 as Y option option
17768 AIX minimal TOC 30 prolog gcc Y Y option
17769 SVR4 SDATA 13 crt0 gcc N Y N
17770 SVR4 pic 30 prolog ld Y not yet N
17771 SVR4 PIC 30 prolog gcc Y option option
17772 EABI TOC 30 prolog gcc Y option option
17773
17774*/
17775
9ebbca7d
GK
17776/* Hash functions for the hash table. */
17777
17778static unsigned
a2369ed3 17779rs6000_hash_constant (rtx k)
9ebbca7d 17780{
46b33600
RH
17781 enum rtx_code code = GET_CODE (k);
17782 enum machine_mode mode = GET_MODE (k);
17783 unsigned result = (code << 3) ^ mode;
17784 const char *format;
17785 int flen, fidx;
f676971a 17786
46b33600
RH
17787 format = GET_RTX_FORMAT (code);
17788 flen = strlen (format);
17789 fidx = 0;
9ebbca7d 17790
46b33600
RH
17791 switch (code)
17792 {
17793 case LABEL_REF:
17794 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17795
17796 case CONST_DOUBLE:
17797 if (mode != VOIDmode)
17798 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17799 flen = 2;
17800 break;
17801
17802 case CODE_LABEL:
17803 fidx = 3;
17804 break;
17805
17806 default:
17807 break;
17808 }
9ebbca7d
GK
17809
17810 for (; fidx < flen; fidx++)
17811 switch (format[fidx])
17812 {
17813 case 's':
17814 {
17815 unsigned i, len;
17816 const char *str = XSTR (k, fidx);
17817 len = strlen (str);
17818 result = result * 613 + len;
17819 for (i = 0; i < len; i++)
17820 result = result * 613 + (unsigned) str[i];
17167fd8
MM
17821 break;
17822 }
9ebbca7d
GK
17823 case 'u':
17824 case 'e':
17825 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17826 break;
17827 case 'i':
17828 case 'n':
17829 result = result * 613 + (unsigned) XINT (k, fidx);
17830 break;
17831 case 'w':
17832 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17833 result = result * 613 + (unsigned) XWINT (k, fidx);
17834 else
17835 {
17836 size_t i;
9390387d 17837 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
9ebbca7d
GK
17838 result = result * 613 + (unsigned) (XWINT (k, fidx)
17839 >> CHAR_BIT * i);
17840 }
17841 break;
09501938
DE
17842 case '0':
17843 break;
9ebbca7d 17844 default:
37409796 17845 gcc_unreachable ();
9ebbca7d 17846 }
46b33600 17847
9ebbca7d
GK
17848 return result;
17849}
17850
17851static unsigned
a2369ed3 17852toc_hash_function (const void *hash_entry)
9ebbca7d 17853{
f676971a 17854 const struct toc_hash_struct *thc =
a9098fd0
GK
17855 (const struct toc_hash_struct *) hash_entry;
17856 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9ebbca7d
GK
17857}
17858
17859/* Compare H1 and H2 for equivalence. */
17860
17861static int
a2369ed3 17862toc_hash_eq (const void *h1, const void *h2)
9ebbca7d
GK
17863{
17864 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
17865 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
17866
a9098fd0
GK
17867 if (((const struct toc_hash_struct *) h1)->key_mode
17868 != ((const struct toc_hash_struct *) h2)->key_mode)
17869 return 0;
17870
5692c7bc 17871 return rtx_equal_p (r1, r2);
9ebbca7d
GK
17872}
17873
28e510bd
MM
17874/* These are the names given by the C++ front-end to vtables, and
17875 vtable-like objects. Ideally, this logic should not be here;
17876 instead, there should be some programmatic way of inquiring as
17877 to whether or not an object is a vtable. */
17878
17879#define VTABLE_NAME_P(NAME) \
9390387d 17880 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
28e510bd
MM
17881 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
17882 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
26be75db 17883 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
f676971a 17884 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
28e510bd
MM
17885
17886void
a2369ed3 17887rs6000_output_symbol_ref (FILE *file, rtx x)
28e510bd
MM
17888{
17889 /* Currently C++ toc references to vtables can be emitted before it
17890 is decided whether the vtable is public or private. If this is
17891 the case, then the linker will eventually complain that there is
f676971a 17892 a reference to an unknown section. Thus, for vtables only,
28e510bd
MM
17893 we emit the TOC reference to reference the symbol and not the
17894 section. */
17895 const char *name = XSTR (x, 0);
54ee9799 17896
f676971a 17897 if (VTABLE_NAME_P (name))
54ee9799
DE
17898 {
17899 RS6000_OUTPUT_BASENAME (file, name);
17900 }
17901 else
17902 assemble_name (file, name);
28e510bd
MM
17903}
17904
a4f6c312
SS
17905/* Output a TOC entry. We derive the entry name from what is being
17906 written. */
9878760c
RK
17907
17908void
a2369ed3 17909output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
9878760c
RK
17910{
17911 char buf[256];
3cce094d 17912 const char *name = buf;
ec940faa 17913 const char *real_name;
9878760c 17914 rtx base = x;
16fdeb48 17915 HOST_WIDE_INT offset = 0;
9878760c 17916
37409796 17917 gcc_assert (!TARGET_NO_TOC);
4697a36c 17918
9ebbca7d
GK
17919 /* When the linker won't eliminate them, don't output duplicate
17920 TOC entries (this happens on AIX if there is any kind of TOC,
17211ab5
GK
17921 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
17922 CODE_LABELs. */
17923 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
9ebbca7d
GK
17924 {
17925 struct toc_hash_struct *h;
17926 void * * found;
f676971a 17927
17211ab5 17928 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
c4ad648e 17929 time because GGC is not initialized at that point. */
17211ab5 17930 if (toc_hash_table == NULL)
f676971a 17931 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
17211ab5
GK
17932 toc_hash_eq, NULL);
17933
5ead67f6 17934 h = GGC_NEW (struct toc_hash_struct);
9ebbca7d 17935 h->key = x;
a9098fd0 17936 h->key_mode = mode;
9ebbca7d 17937 h->labelno = labelno;
f676971a 17938
9ebbca7d
GK
17939 found = htab_find_slot (toc_hash_table, h, 1);
17940 if (*found == NULL)
17941 *found = h;
f676971a 17942 else /* This is indeed a duplicate.
9ebbca7d
GK
17943 Set this label equal to that label. */
17944 {
17945 fputs ("\t.set ", file);
17946 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
17947 fprintf (file, "%d,", labelno);
17948 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
f676971a 17949 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9ebbca7d
GK
17950 found)->labelno));
17951 return;
17952 }
17953 }
17954
17955 /* If we're going to put a double constant in the TOC, make sure it's
17956 aligned properly when strict alignment is on. */
ff1720ed
RK
17957 if (GET_CODE (x) == CONST_DOUBLE
17958 && STRICT_ALIGNMENT
a9098fd0 17959 && GET_MODE_BITSIZE (mode) >= 64
ff1720ed
RK
17960 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
17961 ASM_OUTPUT_ALIGN (file, 3);
17962 }
17963
4977bab6 17964 (*targetm.asm_out.internal_label) (file, "LC", labelno);
9878760c 17965
37c37a57
RK
17966 /* Handle FP constants specially. Note that if we have a minimal
17967 TOC, things we put here aren't actually in the TOC, so we can allow
17968 FP constants. */
00b79d54
BE
17969 if (GET_CODE (x) == CONST_DOUBLE &&
17970 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
fcce224d
DE
17971 {
17972 REAL_VALUE_TYPE rv;
17973 long k[4];
17974
17975 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
17976 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
17977 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
17978 else
17979 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
fcce224d
DE
17980
17981 if (TARGET_64BIT)
17982 {
17983 if (TARGET_MINIMAL_TOC)
17984 fputs (DOUBLE_INT_ASM_OP, file);
17985 else
17986 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
17987 k[0] & 0xffffffff, k[1] & 0xffffffff,
17988 k[2] & 0xffffffff, k[3] & 0xffffffff);
17989 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
17990 k[0] & 0xffffffff, k[1] & 0xffffffff,
17991 k[2] & 0xffffffff, k[3] & 0xffffffff);
17992 return;
17993 }
17994 else
17995 {
17996 if (TARGET_MINIMAL_TOC)
17997 fputs ("\t.long ", file);
17998 else
17999 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18000 k[0] & 0xffffffff, k[1] & 0xffffffff,
18001 k[2] & 0xffffffff, k[3] & 0xffffffff);
18002 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18003 k[0] & 0xffffffff, k[1] & 0xffffffff,
18004 k[2] & 0xffffffff, k[3] & 0xffffffff);
18005 return;
18006 }
18007 }
00b79d54
BE
18008 else if (GET_CODE (x) == CONST_DOUBLE &&
18009 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
9878760c 18010 {
042259f2
DE
18011 REAL_VALUE_TYPE rv;
18012 long k[2];
0adc764e 18013
042259f2 18014 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18015
18016 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18017 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18018 else
18019 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
31bfaa0b 18020
13ded975
DE
18021 if (TARGET_64BIT)
18022 {
18023 if (TARGET_MINIMAL_TOC)
2bfcf297 18024 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18025 else
2f0552b6
AM
18026 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18027 k[0] & 0xffffffff, k[1] & 0xffffffff);
18028 fprintf (file, "0x%lx%08lx\n",
18029 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18030 return;
18031 }
1875cc88 18032 else
13ded975
DE
18033 {
18034 if (TARGET_MINIMAL_TOC)
2bfcf297 18035 fputs ("\t.long ", file);
13ded975 18036 else
2f0552b6
AM
18037 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18038 k[0] & 0xffffffff, k[1] & 0xffffffff);
18039 fprintf (file, "0x%lx,0x%lx\n",
18040 k[0] & 0xffffffff, k[1] & 0xffffffff);
13ded975
DE
18041 return;
18042 }
9878760c 18043 }
00b79d54
BE
18044 else if (GET_CODE (x) == CONST_DOUBLE &&
18045 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
9878760c 18046 {
042259f2
DE
18047 REAL_VALUE_TYPE rv;
18048 long l;
9878760c 18049
042259f2 18050 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
00b79d54
BE
18051 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18052 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18053 else
18054 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
042259f2 18055
31bfaa0b
DE
18056 if (TARGET_64BIT)
18057 {
18058 if (TARGET_MINIMAL_TOC)
2bfcf297 18059 fputs (DOUBLE_INT_ASM_OP, file);
31bfaa0b 18060 else
2f0552b6
AM
18061 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18062 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
31bfaa0b
DE
18063 return;
18064 }
042259f2 18065 else
31bfaa0b
DE
18066 {
18067 if (TARGET_MINIMAL_TOC)
2bfcf297 18068 fputs ("\t.long ", file);
31bfaa0b 18069 else
2f0552b6
AM
18070 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18071 fprintf (file, "0x%lx\n", l & 0xffffffff);
31bfaa0b
DE
18072 return;
18073 }
042259f2 18074 }
f176e826 18075 else if (GET_MODE (x) == VOIDmode
a9098fd0 18076 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
042259f2 18077 {
e2c953b6 18078 unsigned HOST_WIDE_INT low;
042259f2
DE
18079 HOST_WIDE_INT high;
18080
18081 if (GET_CODE (x) == CONST_DOUBLE)
18082 {
18083 low = CONST_DOUBLE_LOW (x);
18084 high = CONST_DOUBLE_HIGH (x);
18085 }
18086 else
18087#if HOST_BITS_PER_WIDE_INT == 32
18088 {
18089 low = INTVAL (x);
0858c623 18090 high = (low & 0x80000000) ? ~0 : 0;
042259f2
DE
18091 }
18092#else
18093 {
c4ad648e
AM
18094 low = INTVAL (x) & 0xffffffff;
18095 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
042259f2
DE
18096 }
18097#endif
9878760c 18098
a9098fd0
GK
18099 /* TOC entries are always Pmode-sized, but since this
18100 is a bigendian machine then if we're putting smaller
18101 integer constants in the TOC we have to pad them.
18102 (This is still a win over putting the constants in
18103 a separate constant pool, because then we'd have
02a4ec28
FS
18104 to have both a TOC entry _and_ the actual constant.)
18105
18106 For a 32-bit target, CONST_INT values are loaded and shifted
18107 entirely within `low' and can be stored in one TOC entry. */
18108
37409796
NS
18109 /* It would be easy to make this work, but it doesn't now. */
18110 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
02a4ec28
FS
18111
18112 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
fb52d8de
AM
18113 {
18114#if HOST_BITS_PER_WIDE_INT == 32
18115 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18116 POINTER_SIZE, &low, &high, 0);
18117#else
18118 low |= high << 32;
18119 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18120 high = (HOST_WIDE_INT) low >> 32;
18121 low &= 0xffffffff;
18122#endif
18123 }
a9098fd0 18124
13ded975
DE
18125 if (TARGET_64BIT)
18126 {
18127 if (TARGET_MINIMAL_TOC)
2bfcf297 18128 fputs (DOUBLE_INT_ASM_OP, file);
13ded975 18129 else
2f0552b6
AM
18130 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18131 (long) high & 0xffffffff, (long) low & 0xffffffff);
18132 fprintf (file, "0x%lx%08lx\n",
18133 (long) high & 0xffffffff, (long) low & 0xffffffff);
13ded975
DE
18134 return;
18135 }
1875cc88 18136 else
13ded975 18137 {
02a4ec28
FS
18138 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18139 {
18140 if (TARGET_MINIMAL_TOC)
2bfcf297 18141 fputs ("\t.long ", file);
02a4ec28 18142 else
2bfcf297 18143 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
2f0552b6
AM
18144 (long) high & 0xffffffff, (long) low & 0xffffffff);
18145 fprintf (file, "0x%lx,0x%lx\n",
18146 (long) high & 0xffffffff, (long) low & 0xffffffff);
02a4ec28 18147 }
13ded975 18148 else
02a4ec28
FS
18149 {
18150 if (TARGET_MINIMAL_TOC)
2bfcf297 18151 fputs ("\t.long ", file);
02a4ec28 18152 else
2f0552b6
AM
18153 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18154 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
02a4ec28 18155 }
13ded975
DE
18156 return;
18157 }
9878760c
RK
18158 }
18159
18160 if (GET_CODE (x) == CONST)
18161 {
37409796 18162 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
2bfcf297 18163
9878760c
RK
18164 base = XEXP (XEXP (x, 0), 0);
18165 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18166 }
f676971a 18167
37409796
NS
18168 switch (GET_CODE (base))
18169 {
18170 case SYMBOL_REF:
18171 name = XSTR (base, 0);
18172 break;
18173
18174 case LABEL_REF:
18175 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18176 CODE_LABEL_NUMBER (XEXP (base, 0)));
18177 break;
18178
18179 case CODE_LABEL:
18180 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18181 break;
18182
18183 default:
18184 gcc_unreachable ();
18185 }
9878760c 18186
772c5265 18187 real_name = (*targetm.strip_name_encoding) (name);
1875cc88 18188 if (TARGET_MINIMAL_TOC)
2bfcf297 18189 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
1875cc88
JW
18190 else
18191 {
b6c9286a 18192 fprintf (file, "\t.tc %s", real_name);
9878760c 18193
1875cc88 18194 if (offset < 0)
16fdeb48 18195 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
1875cc88 18196 else if (offset)
16fdeb48 18197 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
9878760c 18198
19d2d16f 18199 fputs ("[TC],", file);
1875cc88 18200 }
581bc4de
MM
18201
18202 /* Currently C++ toc references to vtables can be emitted before it
18203 is decided whether the vtable is public or private. If this is
18204 the case, then the linker will eventually complain that there is
18205 a TOC reference to an unknown section. Thus, for vtables only,
18206 we emit the TOC reference to reference the symbol and not the
18207 section. */
28e510bd 18208 if (VTABLE_NAME_P (name))
581bc4de 18209 {
54ee9799 18210 RS6000_OUTPUT_BASENAME (file, name);
581bc4de 18211 if (offset < 0)
16fdeb48 18212 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de 18213 else if (offset > 0)
16fdeb48 18214 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
581bc4de
MM
18215 }
18216 else
18217 output_addr_const (file, x);
19d2d16f 18218 putc ('\n', file);
9878760c
RK
18219}
18220\f
18221/* Output an assembler pseudo-op to write an ASCII string of N characters
18222 starting at P to FILE.
18223
18224 On the RS/6000, we have to do this using the .byte operation and
18225 write out special characters outside the quoted string.
18226 Also, the assembler is broken; very long strings are truncated,
a4f6c312 18227 so we must artificially break them up early. */
9878760c
RK
18228
18229void
a2369ed3 18230output_ascii (FILE *file, const char *p, int n)
9878760c
RK
18231{
18232 char c;
18233 int i, count_string;
d330fd93
KG
18234 const char *for_string = "\t.byte \"";
18235 const char *for_decimal = "\t.byte ";
18236 const char *to_close = NULL;
9878760c
RK
18237
18238 count_string = 0;
18239 for (i = 0; i < n; i++)
18240 {
18241 c = *p++;
18242 if (c >= ' ' && c < 0177)
18243 {
18244 if (for_string)
18245 fputs (for_string, file);
18246 putc (c, file);
18247
18248 /* Write two quotes to get one. */
18249 if (c == '"')
18250 {
18251 putc (c, file);
18252 ++count_string;
18253 }
18254
18255 for_string = NULL;
18256 for_decimal = "\"\n\t.byte ";
18257 to_close = "\"\n";
18258 ++count_string;
18259
18260 if (count_string >= 512)
18261 {
18262 fputs (to_close, file);
18263
18264 for_string = "\t.byte \"";
18265 for_decimal = "\t.byte ";
18266 to_close = NULL;
18267 count_string = 0;
18268 }
18269 }
18270 else
18271 {
18272 if (for_decimal)
18273 fputs (for_decimal, file);
18274 fprintf (file, "%d", c);
18275
18276 for_string = "\n\t.byte \"";
18277 for_decimal = ", ";
18278 to_close = "\n";
18279 count_string = 0;
18280 }
18281 }
18282
18283 /* Now close the string if we have written one. Then end the line. */
18284 if (to_close)
9ebbca7d 18285 fputs (to_close, file);
9878760c
RK
18286}
18287\f
18288/* Generate a unique section name for FILENAME for a section type
18289 represented by SECTION_DESC. Output goes into BUF.
18290
18291 SECTION_DESC can be any string, as long as it is different for each
18292 possible section type.
18293
18294 We name the section in the same manner as xlc. The name begins with an
18295 underscore followed by the filename (after stripping any leading directory
11e5fe42
RK
18296 names) with the last period replaced by the string SECTION_DESC. If
18297 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18298 the name. */
9878760c
RK
18299
18300void
f676971a 18301rs6000_gen_section_name (char **buf, const char *filename,
c4ad648e 18302 const char *section_desc)
9878760c 18303{
9ebbca7d 18304 const char *q, *after_last_slash, *last_period = 0;
9878760c
RK
18305 char *p;
18306 int len;
9878760c
RK
18307
18308 after_last_slash = filename;
18309 for (q = filename; *q; q++)
11e5fe42
RK
18310 {
18311 if (*q == '/')
18312 after_last_slash = q + 1;
18313 else if (*q == '.')
18314 last_period = q;
18315 }
9878760c 18316
11e5fe42 18317 len = strlen (after_last_slash) + strlen (section_desc) + 2;
6d9f628e 18318 *buf = (char *) xmalloc (len);
9878760c
RK
18319
18320 p = *buf;
18321 *p++ = '_';
18322
18323 for (q = after_last_slash; *q; q++)
18324 {
11e5fe42 18325 if (q == last_period)
c4ad648e 18326 {
9878760c
RK
18327 strcpy (p, section_desc);
18328 p += strlen (section_desc);
e3981aab 18329 break;
c4ad648e 18330 }
9878760c 18331
e9a780ec 18332 else if (ISALNUM (*q))
c4ad648e 18333 *p++ = *q;
9878760c
RK
18334 }
18335
11e5fe42 18336 if (last_period == 0)
9878760c
RK
18337 strcpy (p, section_desc);
18338 else
18339 *p = '\0';
18340}
e165f3f0 18341\f
a4f6c312 18342/* Emit profile function. */
411707f4 18343
411707f4 18344void
a2369ed3 18345output_profile_hook (int labelno ATTRIBUTE_UNUSED)
411707f4 18346{
858081ad
AH
18347 /* Non-standard profiling for kernels, which just saves LR then calls
18348 _mcount without worrying about arg saves. The idea is to change
18349 the function prologue as little as possible as it isn't easy to
18350 account for arg save/restore code added just for _mcount. */
ffcfcb5f
AM
18351 if (TARGET_PROFILE_KERNEL)
18352 return;
18353
8480e480
CC
18354 if (DEFAULT_ABI == ABI_AIX)
18355 {
9739c90c
JJ
18356#ifndef NO_PROFILE_COUNTERS
18357# define NO_PROFILE_COUNTERS 0
18358#endif
f676971a 18359 if (NO_PROFILE_COUNTERS)
9739c90c
JJ
18360 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
18361 else
18362 {
18363 char buf[30];
18364 const char *label_name;
18365 rtx fun;
411707f4 18366
9739c90c
JJ
18367 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18368 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18369 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
411707f4 18370
9739c90c
JJ
18371 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
18372 fun, Pmode);
18373 }
8480e480 18374 }
ee890fe2
SS
18375 else if (DEFAULT_ABI == ABI_DARWIN)
18376 {
d5fa86ba 18377 const char *mcount_name = RS6000_MCOUNT;
1de43f85 18378 int caller_addr_regno = LR_REGNO;
ee890fe2
SS
18379
18380 /* Be conservative and always set this, at least for now. */
e3b5732b 18381 crtl->uses_pic_offset_table = 1;
ee890fe2
SS
18382
18383#if TARGET_MACHO
18384 /* For PIC code, set up a stub and collect the caller's address
18385 from r0, which is where the prologue puts it. */
11abc112 18386 if (MACHOPIC_INDIRECT
e3b5732b 18387 && crtl->uses_pic_offset_table)
11abc112 18388 caller_addr_regno = 0;
ee890fe2
SS
18389#endif
18390 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
18391 0, VOIDmode, 1,
18392 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18393 }
411707f4
CC
18394}
18395
a4f6c312 18396/* Write function profiler code. */
e165f3f0
RK
18397
18398void
a2369ed3 18399output_function_profiler (FILE *file, int labelno)
e165f3f0 18400{
3daf36a4 18401 char buf[100];
e165f3f0 18402
38c1f2d7 18403 switch (DEFAULT_ABI)
3daf36a4 18404 {
38c1f2d7 18405 default:
37409796 18406 gcc_unreachable ();
38c1f2d7
MM
18407
18408 case ABI_V4:
09eeeacb
AM
18409 if (!TARGET_32BIT)
18410 {
d4ee4d25 18411 warning (0, "no profiling of 64-bit code for this ABI");
09eeeacb
AM
18412 return;
18413 }
ffcfcb5f 18414 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
38c1f2d7 18415 fprintf (file, "\tmflr %s\n", reg_names[0]);
71625f3d
AM
18416 if (NO_PROFILE_COUNTERS)
18417 {
18418 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18419 reg_names[0], reg_names[1]);
18420 }
18421 else if (TARGET_SECURE_PLT && flag_pic)
18422 {
18423 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18424 reg_names[0], reg_names[1]);
18425 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18426 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18427 reg_names[12], reg_names[12]);
18428 assemble_name (file, buf);
18429 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18430 assemble_name (file, buf);
18431 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18432 }
18433 else if (flag_pic == 1)
38c1f2d7 18434 {
dfdfa60f 18435 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
71625f3d
AM
18436 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18437 reg_names[0], reg_names[1]);
17167fd8 18438 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
dfdfa60f 18439 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
38c1f2d7 18440 assemble_name (file, buf);
17167fd8 18441 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
38c1f2d7 18442 }
9ebbca7d 18443 else if (flag_pic > 1)
38c1f2d7 18444 {
71625f3d
AM
18445 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18446 reg_names[0], reg_names[1]);
9ebbca7d 18447 /* Now, we need to get the address of the label. */
71625f3d 18448 fputs ("\tbcl 20,31,1f\n\t.long ", file);
034e84c4 18449 assemble_name (file, buf);
9ebbca7d
GK
18450 fputs ("-.\n1:", file);
18451 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
f676971a 18452 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9ebbca7d
GK
18453 reg_names[0], reg_names[11]);
18454 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18455 reg_names[0], reg_names[0], reg_names[11]);
38c1f2d7 18456 }
38c1f2d7
MM
18457 else
18458 {
17167fd8 18459 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
38c1f2d7 18460 assemble_name (file, buf);
dfdfa60f 18461 fputs ("@ha\n", file);
71625f3d
AM
18462 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18463 reg_names[0], reg_names[1]);
a260abc9 18464 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
38c1f2d7 18465 assemble_name (file, buf);
17167fd8 18466 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
38c1f2d7
MM
18467 }
18468
50d440bc 18469 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
3b6ce0af
DE
18470 fprintf (file, "\tbl %s%s\n",
18471 RS6000_MCOUNT, flag_pic ? "@plt" : "");
38c1f2d7
MM
18472 break;
18473
18474 case ABI_AIX:
ee890fe2 18475 case ABI_DARWIN:
ffcfcb5f
AM
18476 if (!TARGET_PROFILE_KERNEL)
18477 {
a3c9585f 18478 /* Don't do anything, done in output_profile_hook (). */
ffcfcb5f
AM
18479 }
18480 else
18481 {
37409796 18482 gcc_assert (!TARGET_32BIT);
ffcfcb5f
AM
18483
18484 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18485 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18486
6de9cd9a 18487 if (cfun->static_chain_decl != NULL)
ffcfcb5f
AM
18488 {
18489 asm_fprintf (file, "\tstd %s,24(%s)\n",
18490 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18491 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18492 asm_fprintf (file, "\tld %s,24(%s)\n",
18493 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18494 }
18495 else
18496 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18497 }
38c1f2d7
MM
18498 break;
18499 }
e165f3f0 18500}
a251ffd0 18501
b54cf83a 18502\f
44cd321e
PS
18503
18504/* The following variable value is the last issued insn. */
18505
18506static rtx last_scheduled_insn;
18507
18508/* The following variable helps to balance issuing of load and
18509 store instructions */
18510
18511static int load_store_pendulum;
18512
b54cf83a
DE
18513/* Power4 load update and store update instructions are cracked into a
18514 load or store and an integer insn which are executed in the same cycle.
18515 Branches have their own dispatch slot which does not count against the
18516 GCC issue rate, but it changes the program flow so there are no other
18517 instructions to issue in this cycle. */
18518
18519static int
f676971a
EC
18520rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18521 int verbose ATTRIBUTE_UNUSED,
a2369ed3 18522 rtx insn, int more)
b54cf83a 18523{
44cd321e 18524 last_scheduled_insn = insn;
b54cf83a
DE
18525 if (GET_CODE (PATTERN (insn)) == USE
18526 || GET_CODE (PATTERN (insn)) == CLOBBER)
44cd321e
PS
18527 {
18528 cached_can_issue_more = more;
18529 return cached_can_issue_more;
18530 }
18531
18532 if (insn_terminates_group_p (insn, current_group))
18533 {
18534 cached_can_issue_more = 0;
18535 return cached_can_issue_more;
18536 }
b54cf83a 18537
d296e02e
AP
18538 /* If no reservation, but reach here */
18539 if (recog_memoized (insn) < 0)
18540 return more;
18541
ec507f2d 18542 if (rs6000_sched_groups)
b54cf83a 18543 {
cbe26ab8 18544 if (is_microcoded_insn (insn))
44cd321e 18545 cached_can_issue_more = 0;
cbe26ab8 18546 else if (is_cracked_insn (insn))
44cd321e
PS
18547 cached_can_issue_more = more > 2 ? more - 2 : 0;
18548 else
18549 cached_can_issue_more = more - 1;
18550
18551 return cached_can_issue_more;
b54cf83a 18552 }
165b263e 18553
d296e02e
AP
18554 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18555 return 0;
18556
44cd321e
PS
18557 cached_can_issue_more = more - 1;
18558 return cached_can_issue_more;
b54cf83a
DE
18559}
18560
a251ffd0
TG
18561/* Adjust the cost of a scheduling dependency. Return the new cost of
18562 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18563
c237e94a 18564static int
0a4f0294 18565rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
a251ffd0 18566{
44cd321e 18567 enum attr_type attr_type;
a251ffd0 18568
44cd321e 18569 if (! recog_memoized (insn))
a251ffd0
TG
18570 return 0;
18571
44cd321e 18572 switch (REG_NOTE_KIND (link))
a251ffd0 18573 {
44cd321e
PS
18574 case REG_DEP_TRUE:
18575 {
18576 /* Data dependency; DEP_INSN writes a register that INSN reads
18577 some cycles later. */
18578
18579 /* Separate a load from a narrower, dependent store. */
18580 if (rs6000_sched_groups
18581 && GET_CODE (PATTERN (insn)) == SET
18582 && GET_CODE (PATTERN (dep_insn)) == SET
18583 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18584 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18585 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18586 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18587 return cost + 14;
18588
18589 attr_type = get_attr_type (insn);
18590
18591 switch (attr_type)
18592 {
18593 case TYPE_JMPREG:
18594 /* Tell the first scheduling pass about the latency between
18595 a mtctr and bctr (and mtlr and br/blr). The first
18596 scheduling pass will not know about this latency since
18597 the mtctr instruction, which has the latency associated
18598 to it, will be generated by reload. */
18599 return TARGET_POWER ? 5 : 4;
18600 case TYPE_BRANCH:
18601 /* Leave some extra cycles between a compare and its
18602 dependent branch, to inhibit expensive mispredicts. */
18603 if ((rs6000_cpu_attr == CPU_PPC603
18604 || rs6000_cpu_attr == CPU_PPC604
18605 || rs6000_cpu_attr == CPU_PPC604E
18606 || rs6000_cpu_attr == CPU_PPC620
18607 || rs6000_cpu_attr == CPU_PPC630
18608 || rs6000_cpu_attr == CPU_PPC750
18609 || rs6000_cpu_attr == CPU_PPC7400
18610 || rs6000_cpu_attr == CPU_PPC7450
18611 || rs6000_cpu_attr == CPU_POWER4
d296e02e
AP
18612 || rs6000_cpu_attr == CPU_POWER5
18613 || rs6000_cpu_attr == CPU_CELL)
44cd321e
PS
18614 && recog_memoized (dep_insn)
18615 && (INSN_CODE (dep_insn) >= 0))
982afe02 18616
44cd321e
PS
18617 switch (get_attr_type (dep_insn))
18618 {
18619 case TYPE_CMP:
18620 case TYPE_COMPARE:
18621 case TYPE_DELAYED_COMPARE:
18622 case TYPE_IMUL_COMPARE:
18623 case TYPE_LMUL_COMPARE:
18624 case TYPE_FPCOMPARE:
18625 case TYPE_CR_LOGICAL:
18626 case TYPE_DELAYED_CR:
18627 return cost + 2;
18628 default:
18629 break;
18630 }
18631 break;
18632
18633 case TYPE_STORE:
18634 case TYPE_STORE_U:
18635 case TYPE_STORE_UX:
18636 case TYPE_FPSTORE:
18637 case TYPE_FPSTORE_U:
18638 case TYPE_FPSTORE_UX:
18639 if ((rs6000_cpu == PROCESSOR_POWER6)
18640 && recog_memoized (dep_insn)
18641 && (INSN_CODE (dep_insn) >= 0))
18642 {
18643
18644 if (GET_CODE (PATTERN (insn)) != SET)
18645 /* If this happens, we have to extend this to schedule
18646 optimally. Return default for now. */
18647 return cost;
18648
18649 /* Adjust the cost for the case where the value written
18650 by a fixed point operation is used as the address
18651 gen value on a store. */
18652 switch (get_attr_type (dep_insn))
18653 {
18654 case TYPE_LOAD:
18655 case TYPE_LOAD_U:
18656 case TYPE_LOAD_UX:
18657 case TYPE_CNTLZ:
18658 {
18659 if (! store_data_bypass_p (dep_insn, insn))
18660 return 4;
18661 break;
18662 }
18663 case TYPE_LOAD_EXT:
18664 case TYPE_LOAD_EXT_U:
18665 case TYPE_LOAD_EXT_UX:
18666 case TYPE_VAR_SHIFT_ROTATE:
18667 case TYPE_VAR_DELAYED_COMPARE:
18668 {
18669 if (! store_data_bypass_p (dep_insn, insn))
18670 return 6;
18671 break;
18672 }
18673 case TYPE_INTEGER:
18674 case TYPE_COMPARE:
18675 case TYPE_FAST_COMPARE:
18676 case TYPE_EXTS:
18677 case TYPE_SHIFT:
18678 case TYPE_INSERT_WORD:
18679 case TYPE_INSERT_DWORD:
18680 case TYPE_FPLOAD_U:
18681 case TYPE_FPLOAD_UX:
18682 case TYPE_STORE_U:
18683 case TYPE_STORE_UX:
18684 case TYPE_FPSTORE_U:
18685 case TYPE_FPSTORE_UX:
18686 {
18687 if (! store_data_bypass_p (dep_insn, insn))
18688 return 3;
18689 break;
18690 }
18691 case TYPE_IMUL:
18692 case TYPE_IMUL2:
18693 case TYPE_IMUL3:
18694 case TYPE_LMUL:
18695 case TYPE_IMUL_COMPARE:
18696 case TYPE_LMUL_COMPARE:
18697 {
18698 if (! store_data_bypass_p (dep_insn, insn))
18699 return 17;
18700 break;
18701 }
18702 case TYPE_IDIV:
18703 {
18704 if (! store_data_bypass_p (dep_insn, insn))
18705 return 45;
18706 break;
18707 }
18708 case TYPE_LDIV:
18709 {
18710 if (! store_data_bypass_p (dep_insn, insn))
18711 return 57;
18712 break;
18713 }
18714 default:
18715 break;
18716 }
18717 }
18718 break;
18719
18720 case TYPE_LOAD:
18721 case TYPE_LOAD_U:
18722 case TYPE_LOAD_UX:
18723 case TYPE_LOAD_EXT:
18724 case TYPE_LOAD_EXT_U:
18725 case TYPE_LOAD_EXT_UX:
18726 if ((rs6000_cpu == PROCESSOR_POWER6)
18727 && recog_memoized (dep_insn)
18728 && (INSN_CODE (dep_insn) >= 0))
18729 {
18730
18731 /* Adjust the cost for the case where the value written
18732 by a fixed point instruction is used within the address
18733 gen portion of a subsequent load(u)(x) */
18734 switch (get_attr_type (dep_insn))
18735 {
18736 case TYPE_LOAD:
18737 case TYPE_LOAD_U:
18738 case TYPE_LOAD_UX:
18739 case TYPE_CNTLZ:
18740 {
18741 if (set_to_load_agen (dep_insn, insn))
18742 return 4;
18743 break;
18744 }
18745 case TYPE_LOAD_EXT:
18746 case TYPE_LOAD_EXT_U:
18747 case TYPE_LOAD_EXT_UX:
18748 case TYPE_VAR_SHIFT_ROTATE:
18749 case TYPE_VAR_DELAYED_COMPARE:
18750 {
18751 if (set_to_load_agen (dep_insn, insn))
18752 return 6;
18753 break;
18754 }
18755 case TYPE_INTEGER:
18756 case TYPE_COMPARE:
18757 case TYPE_FAST_COMPARE:
18758 case TYPE_EXTS:
18759 case TYPE_SHIFT:
18760 case TYPE_INSERT_WORD:
18761 case TYPE_INSERT_DWORD:
18762 case TYPE_FPLOAD_U:
18763 case TYPE_FPLOAD_UX:
18764 case TYPE_STORE_U:
18765 case TYPE_STORE_UX:
18766 case TYPE_FPSTORE_U:
18767 case TYPE_FPSTORE_UX:
18768 {
18769 if (set_to_load_agen (dep_insn, insn))
18770 return 3;
18771 break;
18772 }
18773 case TYPE_IMUL:
18774 case TYPE_IMUL2:
18775 case TYPE_IMUL3:
18776 case TYPE_LMUL:
18777 case TYPE_IMUL_COMPARE:
18778 case TYPE_LMUL_COMPARE:
18779 {
18780 if (set_to_load_agen (dep_insn, insn))
18781 return 17;
18782 break;
18783 }
18784 case TYPE_IDIV:
18785 {
18786 if (set_to_load_agen (dep_insn, insn))
18787 return 45;
18788 break;
18789 }
18790 case TYPE_LDIV:
18791 {
18792 if (set_to_load_agen (dep_insn, insn))
18793 return 57;
18794 break;
18795 }
18796 default:
18797 break;
18798 }
18799 }
18800 break;
18801
18802 case TYPE_FPLOAD:
18803 if ((rs6000_cpu == PROCESSOR_POWER6)
18804 && recog_memoized (dep_insn)
18805 && (INSN_CODE (dep_insn) >= 0)
18806 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18807 return 2;
18808
18809 default:
18810 break;
18811 }
c9dbf840 18812
a251ffd0 18813 /* Fall out to return default cost. */
44cd321e
PS
18814 }
18815 break;
18816
18817 case REG_DEP_OUTPUT:
18818 /* Output dependency; DEP_INSN writes a register that INSN writes some
18819 cycles later. */
18820 if ((rs6000_cpu == PROCESSOR_POWER6)
18821 && recog_memoized (dep_insn)
18822 && (INSN_CODE (dep_insn) >= 0))
18823 {
18824 attr_type = get_attr_type (insn);
18825
18826 switch (attr_type)
18827 {
18828 case TYPE_FP:
18829 if (get_attr_type (dep_insn) == TYPE_FP)
18830 return 1;
18831 break;
18832 case TYPE_FPLOAD:
18833 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
18834 return 2;
18835 break;
18836 default:
18837 break;
18838 }
18839 }
18840 case REG_DEP_ANTI:
18841 /* Anti dependency; DEP_INSN reads a register that INSN writes some
18842 cycles later. */
18843 return 0;
18844
18845 default:
18846 gcc_unreachable ();
a251ffd0
TG
18847 }
18848
18849 return cost;
18850}
b6c9286a 18851
cbe26ab8 18852/* The function returns a true if INSN is microcoded.
839a4992 18853 Return false otherwise. */
cbe26ab8
DN
18854
18855static bool
18856is_microcoded_insn (rtx insn)
18857{
18858 if (!insn || !INSN_P (insn)
18859 || GET_CODE (PATTERN (insn)) == USE
18860 || GET_CODE (PATTERN (insn)) == CLOBBER)
18861 return false;
18862
d296e02e
AP
18863 if (rs6000_cpu_attr == CPU_CELL)
18864 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
18865
ec507f2d 18866 if (rs6000_sched_groups)
cbe26ab8
DN
18867 {
18868 enum attr_type type = get_attr_type (insn);
18869 if (type == TYPE_LOAD_EXT_U
18870 || type == TYPE_LOAD_EXT_UX
18871 || type == TYPE_LOAD_UX
18872 || type == TYPE_STORE_UX
18873 || type == TYPE_MFCR)
c4ad648e 18874 return true;
cbe26ab8
DN
18875 }
18876
18877 return false;
18878}
18879
cbe26ab8
DN
18880/* The function returns true if INSN is cracked into 2 instructions
18881 by the processor (and therefore occupies 2 issue slots). */
18882
18883static bool
18884is_cracked_insn (rtx insn)
18885{
18886 if (!insn || !INSN_P (insn)
18887 || GET_CODE (PATTERN (insn)) == USE
18888 || GET_CODE (PATTERN (insn)) == CLOBBER)
18889 return false;
18890
ec507f2d 18891 if (rs6000_sched_groups)
cbe26ab8
DN
18892 {
18893 enum attr_type type = get_attr_type (insn);
18894 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
c4ad648e
AM
18895 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
18896 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
18897 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
18898 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
18899 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
18900 || type == TYPE_IDIV || type == TYPE_LDIV
18901 || type == TYPE_INSERT_WORD)
18902 return true;
cbe26ab8
DN
18903 }
18904
18905 return false;
18906}
18907
18908/* The function returns true if INSN can be issued only from
a3c9585f 18909 the branch slot. */
cbe26ab8
DN
18910
18911static bool
18912is_branch_slot_insn (rtx insn)
18913{
18914 if (!insn || !INSN_P (insn)
18915 || GET_CODE (PATTERN (insn)) == USE
18916 || GET_CODE (PATTERN (insn)) == CLOBBER)
18917 return false;
18918
ec507f2d 18919 if (rs6000_sched_groups)
cbe26ab8
DN
18920 {
18921 enum attr_type type = get_attr_type (insn);
18922 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
f676971a 18923 return true;
cbe26ab8
DN
18924 return false;
18925 }
18926
18927 return false;
18928}
79ae11c4 18929
44cd321e
PS
18930/* The function returns true if out_inst sets a value that is
18931 used in the address generation computation of in_insn */
18932static bool
18933set_to_load_agen (rtx out_insn, rtx in_insn)
18934{
18935 rtx out_set, in_set;
18936
18937 /* For performance reasons, only handle the simple case where
18938 both loads are a single_set. */
18939 out_set = single_set (out_insn);
18940 if (out_set)
18941 {
18942 in_set = single_set (in_insn);
18943 if (in_set)
18944 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
18945 }
18946
18947 return false;
18948}
18949
18950/* The function returns true if the target storage location of
18951 out_insn is adjacent to the target storage location of in_insn */
18952/* Return 1 if memory locations are adjacent. */
18953
18954static bool
18955adjacent_mem_locations (rtx insn1, rtx insn2)
18956{
18957
e3a0e200
PB
18958 rtx a = get_store_dest (PATTERN (insn1));
18959 rtx b = get_store_dest (PATTERN (insn2));
18960
44cd321e
PS
18961 if ((GET_CODE (XEXP (a, 0)) == REG
18962 || (GET_CODE (XEXP (a, 0)) == PLUS
18963 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
18964 && (GET_CODE (XEXP (b, 0)) == REG
18965 || (GET_CODE (XEXP (b, 0)) == PLUS
18966 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
18967 {
f98e8938 18968 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
44cd321e 18969 rtx reg0, reg1;
44cd321e
PS
18970
18971 if (GET_CODE (XEXP (a, 0)) == PLUS)
18972 {
18973 reg0 = XEXP (XEXP (a, 0), 0);
18974 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
18975 }
18976 else
18977 reg0 = XEXP (a, 0);
18978
18979 if (GET_CODE (XEXP (b, 0)) == PLUS)
18980 {
18981 reg1 = XEXP (XEXP (b, 0), 0);
18982 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
18983 }
18984 else
18985 reg1 = XEXP (b, 0);
18986
18987 val_diff = val1 - val0;
18988
18989 return ((REGNO (reg0) == REGNO (reg1))
f98e8938
JJ
18990 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
18991 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
44cd321e
PS
18992 }
18993
18994 return false;
18995}
18996
a4f6c312 18997/* A C statement (sans semicolon) to update the integer scheduling
79ae11c4
DN
18998 priority INSN_PRIORITY (INSN). Increase the priority to execute the
18999 INSN earlier, reduce the priority to execute INSN later. Do not
a4f6c312
SS
19000 define this macro if you do not need to adjust the scheduling
19001 priorities of insns. */
bef84347 19002
c237e94a 19003static int
a2369ed3 19004rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
bef84347 19005{
a4f6c312
SS
19006 /* On machines (like the 750) which have asymmetric integer units,
19007 where one integer unit can do multiply and divides and the other
19008 can't, reduce the priority of multiply/divide so it is scheduled
19009 before other integer operations. */
bef84347
VM
19010
19011#if 0
2c3c49de 19012 if (! INSN_P (insn))
bef84347
VM
19013 return priority;
19014
19015 if (GET_CODE (PATTERN (insn)) == USE)
19016 return priority;
19017
19018 switch (rs6000_cpu_attr) {
19019 case CPU_PPC750:
19020 switch (get_attr_type (insn))
19021 {
19022 default:
19023 break;
19024
19025 case TYPE_IMUL:
19026 case TYPE_IDIV:
3cb999d8
DE
19027 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19028 priority, priority);
bef84347
VM
19029 if (priority >= 0 && priority < 0x01000000)
19030 priority >>= 3;
19031 break;
19032 }
19033 }
19034#endif
19035
44cd321e 19036 if (insn_must_be_first_in_group (insn)
79ae11c4 19037 && reload_completed
f676971a 19038 && current_sched_info->sched_max_insns_priority
79ae11c4
DN
19039 && rs6000_sched_restricted_insns_priority)
19040 {
19041
c4ad648e
AM
19042 /* Prioritize insns that can be dispatched only in the first
19043 dispatch slot. */
79ae11c4 19044 if (rs6000_sched_restricted_insns_priority == 1)
f676971a
EC
19045 /* Attach highest priority to insn. This means that in
19046 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
79ae11c4 19047 precede 'priority' (critical path) considerations. */
f676971a 19048 return current_sched_info->sched_max_insns_priority;
79ae11c4 19049 else if (rs6000_sched_restricted_insns_priority == 2)
f676971a 19050 /* Increase priority of insn by a minimal amount. This means that in
c4ad648e
AM
19051 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19052 considerations precede dispatch-slot restriction considerations. */
f676971a
EC
19053 return (priority + 1);
19054 }
79ae11c4 19055
44cd321e
PS
19056 if (rs6000_cpu == PROCESSOR_POWER6
19057 && ((load_store_pendulum == -2 && is_load_insn (insn))
19058 || (load_store_pendulum == 2 && is_store_insn (insn))))
19059 /* Attach highest priority to insn if the scheduler has just issued two
19060 stores and this instruction is a load, or two loads and this instruction
19061 is a store. Power6 wants loads and stores scheduled alternately
19062 when possible */
19063 return current_sched_info->sched_max_insns_priority;
19064
bef84347
VM
19065 return priority;
19066}
19067
d296e02e
AP
19068/* Return true if the instruction is nonpipelined on the Cell. */
19069static bool
19070is_nonpipeline_insn (rtx insn)
19071{
19072 enum attr_type type;
19073 if (!insn || !INSN_P (insn)
19074 || GET_CODE (PATTERN (insn)) == USE
19075 || GET_CODE (PATTERN (insn)) == CLOBBER)
19076 return false;
19077
19078 type = get_attr_type (insn);
19079 if (type == TYPE_IMUL
19080 || type == TYPE_IMUL2
19081 || type == TYPE_IMUL3
19082 || type == TYPE_LMUL
19083 || type == TYPE_IDIV
19084 || type == TYPE_LDIV
19085 || type == TYPE_SDIV
19086 || type == TYPE_DDIV
19087 || type == TYPE_SSQRT
19088 || type == TYPE_DSQRT
19089 || type == TYPE_MFCR
19090 || type == TYPE_MFCRF
19091 || type == TYPE_MFJMPR)
19092 {
19093 return true;
19094 }
19095 return false;
19096}
19097
19098
a4f6c312
SS
19099/* Return how many instructions the machine can issue per cycle. */
19100
c237e94a 19101static int
863d938c 19102rs6000_issue_rate (void)
b6c9286a 19103{
3317bab1
DE
19104 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19105 if (!reload_completed)
19106 return 1;
19107
b6c9286a 19108 switch (rs6000_cpu_attr) {
3cb999d8
DE
19109 case CPU_RIOS1: /* ? */
19110 case CPU_RS64A:
19111 case CPU_PPC601: /* ? */
ed947a96 19112 case CPU_PPC7450:
3cb999d8 19113 return 3;
b54cf83a 19114 case CPU_PPC440:
b6c9286a 19115 case CPU_PPC603:
bef84347 19116 case CPU_PPC750:
ed947a96 19117 case CPU_PPC7400:
be12c2b0 19118 case CPU_PPC8540:
d296e02e 19119 case CPU_CELL:
fa41c305
EW
19120 case CPU_PPCE300C2:
19121 case CPU_PPCE300C3:
edae5fe3 19122 case CPU_PPCE500MC:
f676971a 19123 return 2;
3cb999d8 19124 case CPU_RIOS2:
b6c9286a 19125 case CPU_PPC604:
19684119 19126 case CPU_PPC604E:
b6c9286a 19127 case CPU_PPC620:
3cb999d8 19128 case CPU_PPC630:
b6c9286a 19129 return 4;
cbe26ab8 19130 case CPU_POWER4:
ec507f2d 19131 case CPU_POWER5:
44cd321e 19132 case CPU_POWER6:
cbe26ab8 19133 return 5;
b6c9286a
MM
19134 default:
19135 return 1;
19136 }
19137}
19138
be12c2b0
VM
19139/* Return how many instructions to look ahead for better insn
19140 scheduling. */
19141
19142static int
863d938c 19143rs6000_use_sched_lookahead (void)
be12c2b0
VM
19144{
19145 if (rs6000_cpu_attr == CPU_PPC8540)
19146 return 4;
d296e02e
AP
19147 if (rs6000_cpu_attr == CPU_CELL)
19148 return (reload_completed ? 8 : 0);
be12c2b0
VM
19149 return 0;
19150}
19151
d296e02e
AP
19152/* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19153static int
19154rs6000_use_sched_lookahead_guard (rtx insn)
19155{
19156 if (rs6000_cpu_attr != CPU_CELL)
19157 return 1;
19158
19159 if (insn == NULL_RTX || !INSN_P (insn))
19160 abort ();
982afe02 19161
d296e02e
AP
19162 if (!reload_completed
19163 || is_nonpipeline_insn (insn)
19164 || is_microcoded_insn (insn))
19165 return 0;
19166
19167 return 1;
19168}
19169
569fa502
DN
19170/* Determine is PAT refers to memory. */
19171
19172static bool
19173is_mem_ref (rtx pat)
19174{
19175 const char * fmt;
19176 int i, j;
19177 bool ret = false;
19178
1de59bbd
DE
19179 /* stack_tie does not produce any real memory traffic. */
19180 if (GET_CODE (pat) == UNSPEC
19181 && XINT (pat, 1) == UNSPEC_TIE)
19182 return false;
19183
569fa502
DN
19184 if (GET_CODE (pat) == MEM)
19185 return true;
19186
19187 /* Recursively process the pattern. */
19188 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19189
19190 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19191 {
19192 if (fmt[i] == 'e')
19193 ret |= is_mem_ref (XEXP (pat, i));
19194 else if (fmt[i] == 'E')
19195 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19196 ret |= is_mem_ref (XVECEXP (pat, i, j));
19197 }
19198
19199 return ret;
19200}
19201
19202/* Determine if PAT is a PATTERN of a load insn. */
f676971a 19203
569fa502
DN
19204static bool
19205is_load_insn1 (rtx pat)
19206{
19207 if (!pat || pat == NULL_RTX)
19208 return false;
19209
19210 if (GET_CODE (pat) == SET)
19211 return is_mem_ref (SET_SRC (pat));
19212
19213 if (GET_CODE (pat) == PARALLEL)
19214 {
19215 int i;
19216
19217 for (i = 0; i < XVECLEN (pat, 0); i++)
19218 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19219 return true;
19220 }
19221
19222 return false;
19223}
19224
19225/* Determine if INSN loads from memory. */
19226
19227static bool
19228is_load_insn (rtx insn)
19229{
19230 if (!insn || !INSN_P (insn))
19231 return false;
19232
19233 if (GET_CODE (insn) == CALL_INSN)
19234 return false;
19235
19236 return is_load_insn1 (PATTERN (insn));
19237}
19238
19239/* Determine if PAT is a PATTERN of a store insn. */
19240
19241static bool
19242is_store_insn1 (rtx pat)
19243{
19244 if (!pat || pat == NULL_RTX)
19245 return false;
19246
19247 if (GET_CODE (pat) == SET)
19248 return is_mem_ref (SET_DEST (pat));
19249
19250 if (GET_CODE (pat) == PARALLEL)
19251 {
19252 int i;
19253
19254 for (i = 0; i < XVECLEN (pat, 0); i++)
19255 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19256 return true;
19257 }
19258
19259 return false;
19260}
19261
19262/* Determine if INSN stores to memory. */
19263
19264static bool
19265is_store_insn (rtx insn)
19266{
19267 if (!insn || !INSN_P (insn))
19268 return false;
19269
19270 return is_store_insn1 (PATTERN (insn));
19271}
19272
e3a0e200
PB
19273/* Return the dest of a store insn. */
19274
19275static rtx
19276get_store_dest (rtx pat)
19277{
19278 gcc_assert (is_store_insn1 (pat));
19279
19280 if (GET_CODE (pat) == SET)
19281 return SET_DEST (pat);
19282 else if (GET_CODE (pat) == PARALLEL)
19283 {
19284 int i;
19285
19286 for (i = 0; i < XVECLEN (pat, 0); i++)
19287 {
19288 rtx inner_pat = XVECEXP (pat, 0, i);
19289 if (GET_CODE (inner_pat) == SET
19290 && is_mem_ref (SET_DEST (inner_pat)))
19291 return inner_pat;
19292 }
19293 }
19294 /* We shouldn't get here, because we should have either a simple
19295 store insn or a store with update which are covered above. */
19296 gcc_unreachable();
19297}
19298
569fa502
DN
19299/* Returns whether the dependence between INSN and NEXT is considered
19300 costly by the given target. */
19301
19302static bool
b198261f 19303rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
f676971a 19304{
b198261f
MK
19305 rtx insn;
19306 rtx next;
19307
aabcd309 19308 /* If the flag is not enabled - no dependence is considered costly;
f676971a 19309 allow all dependent insns in the same group.
569fa502
DN
19310 This is the most aggressive option. */
19311 if (rs6000_sched_costly_dep == no_dep_costly)
19312 return false;
19313
f676971a 19314 /* If the flag is set to 1 - a dependence is always considered costly;
569fa502
DN
19315 do not allow dependent instructions in the same group.
19316 This is the most conservative option. */
19317 if (rs6000_sched_costly_dep == all_deps_costly)
f676971a 19318 return true;
569fa502 19319
b198261f
MK
19320 insn = DEP_PRO (dep);
19321 next = DEP_CON (dep);
19322
f676971a
EC
19323 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19324 && is_load_insn (next)
569fa502
DN
19325 && is_store_insn (insn))
19326 /* Prevent load after store in the same group. */
19327 return true;
19328
19329 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
f676971a 19330 && is_load_insn (next)
569fa502 19331 && is_store_insn (insn)
e2f6ff94 19332 && DEP_TYPE (dep) == REG_DEP_TRUE)
c4ad648e
AM
19333 /* Prevent load after store in the same group if it is a true
19334 dependence. */
569fa502 19335 return true;
f676971a
EC
19336
19337 /* The flag is set to X; dependences with latency >= X are considered costly,
569fa502
DN
19338 and will not be scheduled in the same group. */
19339 if (rs6000_sched_costly_dep <= max_dep_latency
19340 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19341 return true;
19342
19343 return false;
19344}
19345
f676971a 19346/* Return the next insn after INSN that is found before TAIL is reached,
cbe26ab8
DN
19347 skipping any "non-active" insns - insns that will not actually occupy
19348 an issue slot. Return NULL_RTX if such an insn is not found. */
19349
19350static rtx
19351get_next_active_insn (rtx insn, rtx tail)
19352{
f489aff8 19353 if (insn == NULL_RTX || insn == tail)
cbe26ab8
DN
19354 return NULL_RTX;
19355
f489aff8 19356 while (1)
cbe26ab8 19357 {
f489aff8
AM
19358 insn = NEXT_INSN (insn);
19359 if (insn == NULL_RTX || insn == tail)
19360 return NULL_RTX;
cbe26ab8 19361
f489aff8
AM
19362 if (CALL_P (insn)
19363 || JUMP_P (insn)
19364 || (NONJUMP_INSN_P (insn)
19365 && GET_CODE (PATTERN (insn)) != USE
19366 && GET_CODE (PATTERN (insn)) != CLOBBER
309ebcd0 19367 && INSN_CODE (insn) != CODE_FOR_stack_tie))
f489aff8
AM
19368 break;
19369 }
19370 return insn;
cbe26ab8
DN
19371}
19372
44cd321e
PS
19373/* We are about to begin issuing insns for this clock cycle. */
19374
19375static int
19376rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19377 rtx *ready ATTRIBUTE_UNUSED,
19378 int *pn_ready ATTRIBUTE_UNUSED,
19379 int clock_var ATTRIBUTE_UNUSED)
19380{
d296e02e
AP
19381 int n_ready = *pn_ready;
19382
44cd321e
PS
19383 if (sched_verbose)
19384 fprintf (dump, "// rs6000_sched_reorder :\n");
19385
d296e02e
AP
19386 /* Reorder the ready list, if the second to last ready insn
19387 is a nonepipeline insn. */
19388 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19389 {
19390 if (is_nonpipeline_insn (ready[n_ready - 1])
19391 && (recog_memoized (ready[n_ready - 2]) > 0))
19392 /* Simply swap first two insns. */
19393 {
19394 rtx tmp = ready[n_ready - 1];
19395 ready[n_ready - 1] = ready[n_ready - 2];
19396 ready[n_ready - 2] = tmp;
19397 }
19398 }
19399
44cd321e
PS
19400 if (rs6000_cpu == PROCESSOR_POWER6)
19401 load_store_pendulum = 0;
19402
19403 return rs6000_issue_rate ();
19404}
19405
19406/* Like rs6000_sched_reorder, but called after issuing each insn. */
19407
19408static int
19409rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19410 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19411{
19412 if (sched_verbose)
19413 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19414
19415 /* For Power6, we need to handle some special cases to try and keep the
19416 store queue from overflowing and triggering expensive flushes.
19417
19418 This code monitors how load and store instructions are being issued
19419 and skews the ready list one way or the other to increase the likelihood
19420 that a desired instruction is issued at the proper time.
19421
19422 A couple of things are done. First, we maintain a "load_store_pendulum"
19423 to track the current state of load/store issue.
19424
19425 - If the pendulum is at zero, then no loads or stores have been
19426 issued in the current cycle so we do nothing.
19427
19428 - If the pendulum is 1, then a single load has been issued in this
19429 cycle and we attempt to locate another load in the ready list to
19430 issue with it.
19431
2f8e468b 19432 - If the pendulum is -2, then two stores have already been
44cd321e
PS
19433 issued in this cycle, so we increase the priority of the first load
19434 in the ready list to increase it's likelihood of being chosen first
19435 in the next cycle.
19436
19437 - If the pendulum is -1, then a single store has been issued in this
19438 cycle and we attempt to locate another store in the ready list to
19439 issue with it, preferring a store to an adjacent memory location to
19440 facilitate store pairing in the store queue.
19441
19442 - If the pendulum is 2, then two loads have already been
19443 issued in this cycle, so we increase the priority of the first store
19444 in the ready list to increase it's likelihood of being chosen first
19445 in the next cycle.
19446
19447 - If the pendulum < -2 or > 2, then do nothing.
19448
19449 Note: This code covers the most common scenarios. There exist non
19450 load/store instructions which make use of the LSU and which
19451 would need to be accounted for to strictly model the behavior
19452 of the machine. Those instructions are currently unaccounted
19453 for to help minimize compile time overhead of this code.
19454 */
19455 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19456 {
19457 int pos;
19458 int i;
19459 rtx tmp;
19460
19461 if (is_store_insn (last_scheduled_insn))
19462 /* Issuing a store, swing the load_store_pendulum to the left */
19463 load_store_pendulum--;
19464 else if (is_load_insn (last_scheduled_insn))
19465 /* Issuing a load, swing the load_store_pendulum to the right */
19466 load_store_pendulum++;
19467 else
19468 return cached_can_issue_more;
19469
19470 /* If the pendulum is balanced, or there is only one instruction on
19471 the ready list, then all is well, so return. */
19472 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19473 return cached_can_issue_more;
19474
19475 if (load_store_pendulum == 1)
19476 {
19477 /* A load has been issued in this cycle. Scan the ready list
19478 for another load to issue with it */
19479 pos = *pn_ready-1;
19480
19481 while (pos >= 0)
19482 {
19483 if (is_load_insn (ready[pos]))
19484 {
19485 /* Found a load. Move it to the head of the ready list,
19486 and adjust it's priority so that it is more likely to
19487 stay there */
19488 tmp = ready[pos];
19489 for (i=pos; i<*pn_ready-1; i++)
19490 ready[i] = ready[i + 1];
19491 ready[*pn_ready-1] = tmp;
e855c69d
AB
19492
19493 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19494 INSN_PRIORITY (tmp)++;
19495 break;
19496 }
19497 pos--;
19498 }
19499 }
19500 else if (load_store_pendulum == -2)
19501 {
19502 /* Two stores have been issued in this cycle. Increase the
19503 priority of the first load in the ready list to favor it for
19504 issuing in the next cycle. */
19505 pos = *pn_ready-1;
19506
19507 while (pos >= 0)
19508 {
19509 if (is_load_insn (ready[pos])
e855c69d
AB
19510 && !sel_sched_p ()
19511 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19512 {
19513 INSN_PRIORITY (ready[pos])++;
19514
19515 /* Adjust the pendulum to account for the fact that a load
19516 was found and increased in priority. This is to prevent
19517 increasing the priority of multiple loads */
19518 load_store_pendulum--;
19519
19520 break;
19521 }
19522 pos--;
19523 }
19524 }
19525 else if (load_store_pendulum == -1)
19526 {
19527 /* A store has been issued in this cycle. Scan the ready list for
19528 another store to issue with it, preferring a store to an adjacent
19529 memory location */
19530 int first_store_pos = -1;
19531
19532 pos = *pn_ready-1;
19533
19534 while (pos >= 0)
19535 {
19536 if (is_store_insn (ready[pos]))
19537 {
19538 /* Maintain the index of the first store found on the
19539 list */
19540 if (first_store_pos == -1)
19541 first_store_pos = pos;
19542
19543 if (is_store_insn (last_scheduled_insn)
19544 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19545 {
19546 /* Found an adjacent store. Move it to the head of the
19547 ready list, and adjust it's priority so that it is
19548 more likely to stay there */
19549 tmp = ready[pos];
19550 for (i=pos; i<*pn_ready-1; i++)
19551 ready[i] = ready[i + 1];
19552 ready[*pn_ready-1] = tmp;
e855c69d
AB
19553
19554 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e 19555 INSN_PRIORITY (tmp)++;
e855c69d 19556
44cd321e
PS
19557 first_store_pos = -1;
19558
19559 break;
19560 };
19561 }
19562 pos--;
19563 }
19564
19565 if (first_store_pos >= 0)
19566 {
19567 /* An adjacent store wasn't found, but a non-adjacent store was,
19568 so move the non-adjacent store to the front of the ready
19569 list, and adjust its priority so that it is more likely to
19570 stay there. */
19571 tmp = ready[first_store_pos];
19572 for (i=first_store_pos; i<*pn_ready-1; i++)
19573 ready[i] = ready[i + 1];
19574 ready[*pn_ready-1] = tmp;
e855c69d 19575 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
44cd321e
PS
19576 INSN_PRIORITY (tmp)++;
19577 }
19578 }
19579 else if (load_store_pendulum == 2)
19580 {
19581 /* Two loads have been issued in this cycle. Increase the priority
19582 of the first store in the ready list to favor it for issuing in
19583 the next cycle. */
19584 pos = *pn_ready-1;
19585
19586 while (pos >= 0)
19587 {
19588 if (is_store_insn (ready[pos])
e855c69d
AB
19589 && !sel_sched_p ()
19590 && INSN_PRIORITY_KNOWN (ready[pos]))
44cd321e
PS
19591 {
19592 INSN_PRIORITY (ready[pos])++;
19593
19594 /* Adjust the pendulum to account for the fact that a store
19595 was found and increased in priority. This is to prevent
19596 increasing the priority of multiple stores */
19597 load_store_pendulum++;
19598
19599 break;
19600 }
19601 pos--;
19602 }
19603 }
19604 }
19605
19606 return cached_can_issue_more;
19607}
19608
839a4992 19609/* Return whether the presence of INSN causes a dispatch group termination
cbe26ab8
DN
19610 of group WHICH_GROUP.
19611
19612 If WHICH_GROUP == current_group, this function will return true if INSN
19613 causes the termination of the current group (i.e, the dispatch group to
19614 which INSN belongs). This means that INSN will be the last insn in the
19615 group it belongs to.
19616
19617 If WHICH_GROUP == previous_group, this function will return true if INSN
19618 causes the termination of the previous group (i.e, the dispatch group that
19619 precedes the group to which INSN belongs). This means that INSN will be
19620 the first insn in the group it belongs to). */
19621
19622static bool
19623insn_terminates_group_p (rtx insn, enum group_termination which_group)
19624{
44cd321e 19625 bool first, last;
cbe26ab8
DN
19626
19627 if (! insn)
19628 return false;
569fa502 19629
44cd321e
PS
19630 first = insn_must_be_first_in_group (insn);
19631 last = insn_must_be_last_in_group (insn);
cbe26ab8 19632
44cd321e 19633 if (first && last)
cbe26ab8
DN
19634 return true;
19635
19636 if (which_group == current_group)
44cd321e 19637 return last;
cbe26ab8 19638 else if (which_group == previous_group)
44cd321e
PS
19639 return first;
19640
19641 return false;
19642}
19643
19644
19645static bool
19646insn_must_be_first_in_group (rtx insn)
19647{
19648 enum attr_type type;
19649
19650 if (!insn
19651 || insn == NULL_RTX
19652 || GET_CODE (insn) == NOTE
19653 || GET_CODE (PATTERN (insn)) == USE
19654 || GET_CODE (PATTERN (insn)) == CLOBBER)
19655 return false;
19656
19657 switch (rs6000_cpu)
cbe26ab8 19658 {
44cd321e
PS
19659 case PROCESSOR_POWER5:
19660 if (is_cracked_insn (insn))
19661 return true;
19662 case PROCESSOR_POWER4:
19663 if (is_microcoded_insn (insn))
19664 return true;
19665
19666 if (!rs6000_sched_groups)
19667 return false;
19668
19669 type = get_attr_type (insn);
19670
19671 switch (type)
19672 {
19673 case TYPE_MFCR:
19674 case TYPE_MFCRF:
19675 case TYPE_MTCR:
19676 case TYPE_DELAYED_CR:
19677 case TYPE_CR_LOGICAL:
19678 case TYPE_MTJMPR:
19679 case TYPE_MFJMPR:
19680 case TYPE_IDIV:
19681 case TYPE_LDIV:
19682 case TYPE_LOAD_L:
19683 case TYPE_STORE_C:
19684 case TYPE_ISYNC:
19685 case TYPE_SYNC:
19686 return true;
19687 default:
19688 break;
19689 }
19690 break;
19691 case PROCESSOR_POWER6:
19692 type = get_attr_type (insn);
19693
19694 switch (type)
19695 {
19696 case TYPE_INSERT_DWORD:
19697 case TYPE_EXTS:
19698 case TYPE_CNTLZ:
19699 case TYPE_SHIFT:
19700 case TYPE_VAR_SHIFT_ROTATE:
19701 case TYPE_TRAP:
19702 case TYPE_IMUL:
19703 case TYPE_IMUL2:
19704 case TYPE_IMUL3:
19705 case TYPE_LMUL:
19706 case TYPE_IDIV:
19707 case TYPE_INSERT_WORD:
19708 case TYPE_DELAYED_COMPARE:
19709 case TYPE_IMUL_COMPARE:
19710 case TYPE_LMUL_COMPARE:
19711 case TYPE_FPCOMPARE:
19712 case TYPE_MFCR:
19713 case TYPE_MTCR:
19714 case TYPE_MFJMPR:
19715 case TYPE_MTJMPR:
19716 case TYPE_ISYNC:
19717 case TYPE_SYNC:
19718 case TYPE_LOAD_L:
19719 case TYPE_STORE_C:
19720 case TYPE_LOAD_U:
19721 case TYPE_LOAD_UX:
19722 case TYPE_LOAD_EXT_UX:
19723 case TYPE_STORE_U:
19724 case TYPE_STORE_UX:
19725 case TYPE_FPLOAD_U:
19726 case TYPE_FPLOAD_UX:
19727 case TYPE_FPSTORE_U:
19728 case TYPE_FPSTORE_UX:
19729 return true;
19730 default:
19731 break;
19732 }
19733 break;
19734 default:
19735 break;
19736 }
19737
19738 return false;
19739}
19740
19741static bool
19742insn_must_be_last_in_group (rtx insn)
19743{
19744 enum attr_type type;
19745
19746 if (!insn
19747 || insn == NULL_RTX
19748 || GET_CODE (insn) == NOTE
19749 || GET_CODE (PATTERN (insn)) == USE
19750 || GET_CODE (PATTERN (insn)) == CLOBBER)
19751 return false;
19752
19753 switch (rs6000_cpu) {
19754 case PROCESSOR_POWER4:
19755 case PROCESSOR_POWER5:
19756 if (is_microcoded_insn (insn))
19757 return true;
19758
19759 if (is_branch_slot_insn (insn))
19760 return true;
19761
19762 break;
19763 case PROCESSOR_POWER6:
19764 type = get_attr_type (insn);
19765
19766 switch (type)
19767 {
19768 case TYPE_EXTS:
19769 case TYPE_CNTLZ:
19770 case TYPE_SHIFT:
19771 case TYPE_VAR_SHIFT_ROTATE:
19772 case TYPE_TRAP:
19773 case TYPE_IMUL:
19774 case TYPE_IMUL2:
19775 case TYPE_IMUL3:
19776 case TYPE_LMUL:
19777 case TYPE_IDIV:
19778 case TYPE_DELAYED_COMPARE:
19779 case TYPE_IMUL_COMPARE:
19780 case TYPE_LMUL_COMPARE:
19781 case TYPE_FPCOMPARE:
19782 case TYPE_MFCR:
19783 case TYPE_MTCR:
19784 case TYPE_MFJMPR:
19785 case TYPE_MTJMPR:
19786 case TYPE_ISYNC:
19787 case TYPE_SYNC:
19788 case TYPE_LOAD_L:
19789 case TYPE_STORE_C:
19790 return true;
19791 default:
19792 break;
cbe26ab8 19793 }
44cd321e
PS
19794 break;
19795 default:
19796 break;
19797 }
cbe26ab8
DN
19798
19799 return false;
19800}
19801
839a4992 19802/* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
cbe26ab8
DN
19803 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19804
19805static bool
19806is_costly_group (rtx *group_insns, rtx next_insn)
19807{
19808 int i;
cbe26ab8
DN
19809 int issue_rate = rs6000_issue_rate ();
19810
19811 for (i = 0; i < issue_rate; i++)
19812 {
e2f6ff94
MK
19813 sd_iterator_def sd_it;
19814 dep_t dep;
cbe26ab8 19815 rtx insn = group_insns[i];
b198261f 19816
cbe26ab8 19817 if (!insn)
c4ad648e 19818 continue;
b198261f 19819
e2f6ff94 19820 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
c4ad648e 19821 {
b198261f
MK
19822 rtx next = DEP_CON (dep);
19823
19824 if (next == next_insn
19825 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
19826 return true;
c4ad648e 19827 }
cbe26ab8
DN
19828 }
19829
19830 return false;
19831}
19832
f676971a 19833/* Utility of the function redefine_groups.
cbe26ab8
DN
19834 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
19835 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
19836 to keep it "far" (in a separate group) from GROUP_INSNS, following
19837 one of the following schemes, depending on the value of the flag
19838 -minsert_sched_nops = X:
19839 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
839a4992 19840 in order to force NEXT_INSN into a separate group.
f676971a
EC
19841 (2) X < sched_finish_regroup_exact: insert exactly X nops.
19842 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
cbe26ab8
DN
19843 insertion (has a group just ended, how many vacant issue slots remain in the
19844 last group, and how many dispatch groups were encountered so far). */
19845
f676971a 19846static int
c4ad648e
AM
19847force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
19848 rtx next_insn, bool *group_end, int can_issue_more,
19849 int *group_count)
cbe26ab8
DN
19850{
19851 rtx nop;
19852 bool force;
19853 int issue_rate = rs6000_issue_rate ();
19854 bool end = *group_end;
19855 int i;
19856
19857 if (next_insn == NULL_RTX)
19858 return can_issue_more;
19859
19860 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
19861 return can_issue_more;
19862
19863 force = is_costly_group (group_insns, next_insn);
19864 if (!force)
19865 return can_issue_more;
19866
19867 if (sched_verbose > 6)
19868 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
c4ad648e 19869 *group_count ,can_issue_more);
cbe26ab8
DN
19870
19871 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
19872 {
19873 if (*group_end)
c4ad648e 19874 can_issue_more = 0;
cbe26ab8
DN
19875
19876 /* Since only a branch can be issued in the last issue_slot, it is
19877 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
19878 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
c4ad648e
AM
19879 in this case the last nop will start a new group and the branch
19880 will be forced to the new group. */
cbe26ab8 19881 if (can_issue_more && !is_branch_slot_insn (next_insn))
c4ad648e 19882 can_issue_more--;
cbe26ab8
DN
19883
19884 while (can_issue_more > 0)
c4ad648e 19885 {
9390387d 19886 nop = gen_nop ();
c4ad648e
AM
19887 emit_insn_before (nop, next_insn);
19888 can_issue_more--;
19889 }
cbe26ab8
DN
19890
19891 *group_end = true;
19892 return 0;
f676971a 19893 }
cbe26ab8
DN
19894
19895 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
19896 {
19897 int n_nops = rs6000_sched_insert_nops;
19898
f676971a 19899 /* Nops can't be issued from the branch slot, so the effective
c4ad648e 19900 issue_rate for nops is 'issue_rate - 1'. */
cbe26ab8 19901 if (can_issue_more == 0)
c4ad648e 19902 can_issue_more = issue_rate;
cbe26ab8
DN
19903 can_issue_more--;
19904 if (can_issue_more == 0)
c4ad648e
AM
19905 {
19906 can_issue_more = issue_rate - 1;
19907 (*group_count)++;
19908 end = true;
19909 for (i = 0; i < issue_rate; i++)
19910 {
19911 group_insns[i] = 0;
19912 }
19913 }
cbe26ab8
DN
19914
19915 while (n_nops > 0)
c4ad648e
AM
19916 {
19917 nop = gen_nop ();
19918 emit_insn_before (nop, next_insn);
19919 if (can_issue_more == issue_rate - 1) /* new group begins */
19920 end = false;
19921 can_issue_more--;
19922 if (can_issue_more == 0)
19923 {
19924 can_issue_more = issue_rate - 1;
19925 (*group_count)++;
19926 end = true;
19927 for (i = 0; i < issue_rate; i++)
19928 {
19929 group_insns[i] = 0;
19930 }
19931 }
19932 n_nops--;
19933 }
cbe26ab8
DN
19934
19935 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
f676971a 19936 can_issue_more++;
cbe26ab8 19937
c4ad648e
AM
19938 /* Is next_insn going to start a new group? */
19939 *group_end
19940 = (end
cbe26ab8
DN
19941 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
19942 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
19943 || (can_issue_more < issue_rate &&
c4ad648e 19944 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 19945 if (*group_end && end)
c4ad648e 19946 (*group_count)--;
cbe26ab8
DN
19947
19948 if (sched_verbose > 6)
c4ad648e
AM
19949 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
19950 *group_count, can_issue_more);
f676971a
EC
19951 return can_issue_more;
19952 }
cbe26ab8
DN
19953
19954 return can_issue_more;
19955}
19956
19957/* This function tries to synch the dispatch groups that the compiler "sees"
f676971a 19958 with the dispatch groups that the processor dispatcher is expected to
cbe26ab8
DN
19959 form in practice. It tries to achieve this synchronization by forcing the
19960 estimated processor grouping on the compiler (as opposed to the function
19961 'pad_goups' which tries to force the scheduler's grouping on the processor).
19962
19963 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
19964 examines the (estimated) dispatch groups that will be formed by the processor
19965 dispatcher. It marks these group boundaries to reflect the estimated
19966 processor grouping, overriding the grouping that the scheduler had marked.
19967 Depending on the value of the flag '-minsert-sched-nops' this function can
19968 force certain insns into separate groups or force a certain distance between
19969 them by inserting nops, for example, if there exists a "costly dependence"
19970 between the insns.
19971
19972 The function estimates the group boundaries that the processor will form as
0fa2e4df 19973 follows: It keeps track of how many vacant issue slots are available after
cbe26ab8
DN
19974 each insn. A subsequent insn will start a new group if one of the following
19975 4 cases applies:
19976 - no more vacant issue slots remain in the current dispatch group.
19977 - only the last issue slot, which is the branch slot, is vacant, but the next
19978 insn is not a branch.
19979 - only the last 2 or less issue slots, including the branch slot, are vacant,
19980 which means that a cracked insn (which occupies two issue slots) can't be
19981 issued in this group.
f676971a 19982 - less than 'issue_rate' slots are vacant, and the next insn always needs to
cbe26ab8
DN
19983 start a new group. */
19984
19985static int
19986redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
19987{
19988 rtx insn, next_insn;
19989 int issue_rate;
19990 int can_issue_more;
19991 int slot, i;
19992 bool group_end;
19993 int group_count = 0;
19994 rtx *group_insns;
19995
19996 /* Initialize. */
19997 issue_rate = rs6000_issue_rate ();
5ead67f6 19998 group_insns = XALLOCAVEC (rtx, issue_rate);
f676971a 19999 for (i = 0; i < issue_rate; i++)
cbe26ab8
DN
20000 {
20001 group_insns[i] = 0;
20002 }
20003 can_issue_more = issue_rate;
20004 slot = 0;
20005 insn = get_next_active_insn (prev_head_insn, tail);
20006 group_end = false;
20007
20008 while (insn != NULL_RTX)
20009 {
20010 slot = (issue_rate - can_issue_more);
20011 group_insns[slot] = insn;
20012 can_issue_more =
c4ad648e 20013 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
cbe26ab8 20014 if (insn_terminates_group_p (insn, current_group))
c4ad648e 20015 can_issue_more = 0;
cbe26ab8
DN
20016
20017 next_insn = get_next_active_insn (insn, tail);
20018 if (next_insn == NULL_RTX)
c4ad648e 20019 return group_count + 1;
cbe26ab8 20020
c4ad648e
AM
20021 /* Is next_insn going to start a new group? */
20022 group_end
20023 = (can_issue_more == 0
20024 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20025 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20026 || (can_issue_more < issue_rate &&
20027 insn_terminates_group_p (next_insn, previous_group)));
cbe26ab8 20028
f676971a 20029 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
c4ad648e
AM
20030 next_insn, &group_end, can_issue_more,
20031 &group_count);
cbe26ab8
DN
20032
20033 if (group_end)
c4ad648e
AM
20034 {
20035 group_count++;
20036 can_issue_more = 0;
20037 for (i = 0; i < issue_rate; i++)
20038 {
20039 group_insns[i] = 0;
20040 }
20041 }
cbe26ab8
DN
20042
20043 if (GET_MODE (next_insn) == TImode && can_issue_more)
9390387d 20044 PUT_MODE (next_insn, VOIDmode);
cbe26ab8 20045 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
c4ad648e 20046 PUT_MODE (next_insn, TImode);
cbe26ab8
DN
20047
20048 insn = next_insn;
20049 if (can_issue_more == 0)
c4ad648e
AM
20050 can_issue_more = issue_rate;
20051 } /* while */
cbe26ab8
DN
20052
20053 return group_count;
20054}
20055
20056/* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20057 dispatch group boundaries that the scheduler had marked. Pad with nops
20058 any dispatch groups which have vacant issue slots, in order to force the
20059 scheduler's grouping on the processor dispatcher. The function
20060 returns the number of dispatch groups found. */
20061
20062static int
20063pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20064{
20065 rtx insn, next_insn;
20066 rtx nop;
20067 int issue_rate;
20068 int can_issue_more;
20069 int group_end;
20070 int group_count = 0;
20071
20072 /* Initialize issue_rate. */
20073 issue_rate = rs6000_issue_rate ();
20074 can_issue_more = issue_rate;
20075
20076 insn = get_next_active_insn (prev_head_insn, tail);
20077 next_insn = get_next_active_insn (insn, tail);
20078
20079 while (insn != NULL_RTX)
20080 {
20081 can_issue_more =
20082 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20083
20084 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20085
20086 if (next_insn == NULL_RTX)
c4ad648e 20087 break;
cbe26ab8
DN
20088
20089 if (group_end)
c4ad648e
AM
20090 {
20091 /* If the scheduler had marked group termination at this location
e855c69d 20092 (between insn and next_insn), and neither insn nor next_insn will
c4ad648e
AM
20093 force group termination, pad the group with nops to force group
20094 termination. */
20095 if (can_issue_more
20096 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20097 && !insn_terminates_group_p (insn, current_group)
20098 && !insn_terminates_group_p (next_insn, previous_group))
20099 {
9390387d 20100 if (!is_branch_slot_insn (next_insn))
c4ad648e
AM
20101 can_issue_more--;
20102
20103 while (can_issue_more)
20104 {
20105 nop = gen_nop ();
20106 emit_insn_before (nop, next_insn);
20107 can_issue_more--;
20108 }
20109 }
20110
20111 can_issue_more = issue_rate;
20112 group_count++;
20113 }
cbe26ab8
DN
20114
20115 insn = next_insn;
20116 next_insn = get_next_active_insn (insn, tail);
20117 }
20118
20119 return group_count;
20120}
20121
44cd321e
PS
20122/* We're beginning a new block. Initialize data structures as necessary. */
20123
20124static void
20125rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20126 int sched_verbose ATTRIBUTE_UNUSED,
20127 int max_ready ATTRIBUTE_UNUSED)
982afe02 20128{
44cd321e
PS
20129 last_scheduled_insn = NULL_RTX;
20130 load_store_pendulum = 0;
20131}
20132
cbe26ab8
DN
20133/* The following function is called at the end of scheduling BB.
20134 After reload, it inserts nops at insn group bundling. */
20135
20136static void
38f391a5 20137rs6000_sched_finish (FILE *dump, int sched_verbose)
cbe26ab8
DN
20138{
20139 int n_groups;
20140
20141 if (sched_verbose)
20142 fprintf (dump, "=== Finishing schedule.\n");
20143
ec507f2d 20144 if (reload_completed && rs6000_sched_groups)
cbe26ab8 20145 {
e855c69d
AB
20146 /* Do not run sched_finish hook when selective scheduling enabled. */
20147 if (sel_sched_p ())
20148 return;
20149
cbe26ab8 20150 if (rs6000_sched_insert_nops == sched_finish_none)
c4ad648e 20151 return;
cbe26ab8
DN
20152
20153 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
c4ad648e
AM
20154 n_groups = pad_groups (dump, sched_verbose,
20155 current_sched_info->prev_head,
20156 current_sched_info->next_tail);
cbe26ab8 20157 else
c4ad648e
AM
20158 n_groups = redefine_groups (dump, sched_verbose,
20159 current_sched_info->prev_head,
20160 current_sched_info->next_tail);
cbe26ab8
DN
20161
20162 if (sched_verbose >= 6)
20163 {
20164 fprintf (dump, "ngroups = %d\n", n_groups);
20165 print_rtl (dump, current_sched_info->prev_head);
20166 fprintf (dump, "Done finish_sched\n");
20167 }
20168 }
20169}
e855c69d
AB
20170
20171struct _rs6000_sched_context
20172{
20173 short cached_can_issue_more;
20174 rtx last_scheduled_insn;
20175 int load_store_pendulum;
20176};
20177
20178typedef struct _rs6000_sched_context rs6000_sched_context_def;
20179typedef rs6000_sched_context_def *rs6000_sched_context_t;
20180
20181/* Allocate store for new scheduling context. */
20182static void *
20183rs6000_alloc_sched_context (void)
20184{
20185 return xmalloc (sizeof (rs6000_sched_context_def));
20186}
20187
20188/* If CLEAN_P is true then initializes _SC with clean data,
20189 and from the global context otherwise. */
20190static void
20191rs6000_init_sched_context (void *_sc, bool clean_p)
20192{
20193 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20194
20195 if (clean_p)
20196 {
20197 sc->cached_can_issue_more = 0;
20198 sc->last_scheduled_insn = NULL_RTX;
20199 sc->load_store_pendulum = 0;
20200 }
20201 else
20202 {
20203 sc->cached_can_issue_more = cached_can_issue_more;
20204 sc->last_scheduled_insn = last_scheduled_insn;
20205 sc->load_store_pendulum = load_store_pendulum;
20206 }
20207}
20208
20209/* Sets the global scheduling context to the one pointed to by _SC. */
20210static void
20211rs6000_set_sched_context (void *_sc)
20212{
20213 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20214
20215 gcc_assert (sc != NULL);
20216
20217 cached_can_issue_more = sc->cached_can_issue_more;
20218 last_scheduled_insn = sc->last_scheduled_insn;
20219 load_store_pendulum = sc->load_store_pendulum;
20220}
20221
20222/* Free _SC. */
20223static void
20224rs6000_free_sched_context (void *_sc)
20225{
20226 gcc_assert (_sc != NULL);
20227
20228 free (_sc);
20229}
20230
b6c9286a 20231\f
b6c9286a
MM
20232/* Length in units of the trampoline for entering a nested function. */
20233
20234int
863d938c 20235rs6000_trampoline_size (void)
b6c9286a
MM
20236{
20237 int ret = 0;
20238
20239 switch (DEFAULT_ABI)
20240 {
20241 default:
37409796 20242 gcc_unreachable ();
b6c9286a
MM
20243
20244 case ABI_AIX:
8f802bfb 20245 ret = (TARGET_32BIT) ? 12 : 24;
b6c9286a
MM
20246 break;
20247
4dabc42d 20248 case ABI_DARWIN:
b6c9286a 20249 case ABI_V4:
03a7e1a5 20250 ret = (TARGET_32BIT) ? 40 : 48;
b6c9286a 20251 break;
b6c9286a
MM
20252 }
20253
20254 return ret;
20255}
20256
20257/* Emit RTL insns to initialize the variable parts of a trampoline.
20258 FNADDR is an RTX for the address of the function's pure code.
20259 CXT is an RTX for the static chain value for the function. */
20260
20261void
a2369ed3 20262rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
b6c9286a 20263{
8bd04c56 20264 int regsize = (TARGET_32BIT) ? 4 : 8;
9613eaff 20265 rtx ctx_reg = force_reg (Pmode, cxt);
b6c9286a
MM
20266
20267 switch (DEFAULT_ABI)
20268 {
20269 default:
37409796 20270 gcc_unreachable ();
b6c9286a 20271
8bd04c56 20272/* Macros to shorten the code expansions below. */
9613eaff 20273#define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
c5c76735 20274#define MEM_PLUS(addr,offset) \
9613eaff 20275 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
7c59dc5d 20276
b6c9286a
MM
20277 /* Under AIX, just build the 3 word function descriptor */
20278 case ABI_AIX:
8bd04c56 20279 {
9613eaff
SH
20280 rtx fn_reg = gen_reg_rtx (Pmode);
20281 rtx toc_reg = gen_reg_rtx (Pmode);
8bd04c56 20282 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
1cb18e3c 20283 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
8bd04c56
MM
20284 emit_move_insn (MEM_DEREF (addr), fn_reg);
20285 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20286 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20287 }
b6c9286a
MM
20288 break;
20289
4dabc42d
TC
20290 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20291 case ABI_DARWIN:
b6c9286a 20292 case ABI_V4:
9613eaff 20293 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
eaf1bcf1 20294 FALSE, VOIDmode, 4,
9613eaff 20295 addr, Pmode,
eaf1bcf1 20296 GEN_INT (rs6000_trampoline_size ()), SImode,
9613eaff
SH
20297 fnaddr, Pmode,
20298 ctx_reg, Pmode);
b6c9286a 20299 break;
b6c9286a
MM
20300 }
20301
20302 return;
20303}
7509c759
MM
20304
20305\f
91d231cb 20306/* Table of valid machine attributes. */
a4f6c312 20307
91d231cb 20308const struct attribute_spec rs6000_attribute_table[] =
7509c759 20309{
91d231cb 20310 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
8bb418a3 20311 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
a5c76ee6
ZW
20312 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20313 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
77ccdfed
EC
20314 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20315 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
005c1a13
GK
20316#ifdef SUBTARGET_ATTRIBUTE_TABLE
20317 SUBTARGET_ATTRIBUTE_TABLE,
20318#endif
a5c76ee6 20319 { NULL, 0, 0, false, false, false, NULL }
91d231cb 20320};
7509c759 20321
8bb418a3
ZL
20322/* Handle the "altivec" attribute. The attribute may have
20323 arguments as follows:
f676971a 20324
8bb418a3
ZL
20325 __attribute__((altivec(vector__)))
20326 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20327 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20328
20329 and may appear more than once (e.g., 'vector bool char') in a
20330 given declaration. */
20331
20332static tree
f90ac3f0
UP
20333rs6000_handle_altivec_attribute (tree *node,
20334 tree name ATTRIBUTE_UNUSED,
20335 tree args,
8bb418a3
ZL
20336 int flags ATTRIBUTE_UNUSED,
20337 bool *no_add_attrs)
20338{
20339 tree type = *node, result = NULL_TREE;
20340 enum machine_mode mode;
20341 int unsigned_p;
20342 char altivec_type
20343 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20344 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20345 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
f676971a 20346 : '?');
8bb418a3
ZL
20347
20348 while (POINTER_TYPE_P (type)
20349 || TREE_CODE (type) == FUNCTION_TYPE
20350 || TREE_CODE (type) == METHOD_TYPE
20351 || TREE_CODE (type) == ARRAY_TYPE)
20352 type = TREE_TYPE (type);
20353
20354 mode = TYPE_MODE (type);
20355
f90ac3f0
UP
20356 /* Check for invalid AltiVec type qualifiers. */
20357 if (type == long_unsigned_type_node || type == long_integer_type_node)
20358 {
20359 if (TARGET_64BIT)
20360 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20361 else if (rs6000_warn_altivec_long)
d4ee4d25 20362 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
f90ac3f0
UP
20363 }
20364 else if (type == long_long_unsigned_type_node
20365 || type == long_long_integer_type_node)
20366 error ("use of %<long long%> in AltiVec types is invalid");
20367 else if (type == double_type_node)
20368 error ("use of %<double%> in AltiVec types is invalid");
20369 else if (type == long_double_type_node)
20370 error ("use of %<long double%> in AltiVec types is invalid");
20371 else if (type == boolean_type_node)
20372 error ("use of boolean types in AltiVec types is invalid");
20373 else if (TREE_CODE (type) == COMPLEX_TYPE)
20374 error ("use of %<complex%> in AltiVec types is invalid");
00b79d54
BE
20375 else if (DECIMAL_FLOAT_MODE_P (mode))
20376 error ("use of decimal floating point types in AltiVec types is invalid");
8bb418a3
ZL
20377
20378 switch (altivec_type)
20379 {
20380 case 'v':
8df83eae 20381 unsigned_p = TYPE_UNSIGNED (type);
8bb418a3
ZL
20382 switch (mode)
20383 {
c4ad648e
AM
20384 case SImode:
20385 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20386 break;
20387 case HImode:
20388 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20389 break;
20390 case QImode:
20391 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20392 break;
20393 case SFmode: result = V4SF_type_node; break;
20394 /* If the user says 'vector int bool', we may be handed the 'bool'
20395 attribute _before_ the 'vector' attribute, and so select the
20396 proper type in the 'b' case below. */
20397 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20398 result = type;
20399 default: break;
8bb418a3
ZL
20400 }
20401 break;
20402 case 'b':
20403 switch (mode)
20404 {
c4ad648e
AM
20405 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20406 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20407 case QImode: case V16QImode: result = bool_V16QI_type_node;
20408 default: break;
8bb418a3
ZL
20409 }
20410 break;
20411 case 'p':
20412 switch (mode)
20413 {
c4ad648e
AM
20414 case V8HImode: result = pixel_V8HI_type_node;
20415 default: break;
8bb418a3
ZL
20416 }
20417 default: break;
20418 }
20419
4f538d42
UW
20420 /* Propagate qualifiers attached to the element type
20421 onto the vector type. */
20422 if (result && result != type && TYPE_QUALS (type))
20423 result = build_qualified_type (result, TYPE_QUALS (type));
7958a2a6 20424
8bb418a3
ZL
20425 *no_add_attrs = true; /* No need to hang on to the attribute. */
20426
f90ac3f0 20427 if (result)
5dc11954 20428 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
8bb418a3
ZL
20429
20430 return NULL_TREE;
20431}
20432
f18eca82
ZL
20433/* AltiVec defines four built-in scalar types that serve as vector
20434 elements; we must teach the compiler how to mangle them. */
20435
20436static const char *
3101faab 20437rs6000_mangle_type (const_tree type)
f18eca82 20438{
608063c3
JB
20439 type = TYPE_MAIN_VARIANT (type);
20440
20441 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20442 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20443 return NULL;
20444
f18eca82
ZL
20445 if (type == bool_char_type_node) return "U6__boolc";
20446 if (type == bool_short_type_node) return "U6__bools";
20447 if (type == pixel_type_node) return "u7__pixel";
20448 if (type == bool_int_type_node) return "U6__booli";
20449
337bde91
DE
20450 /* Mangle IBM extended float long double as `g' (__float128) on
20451 powerpc*-linux where long-double-64 previously was the default. */
20452 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20453 && TARGET_ELF
20454 && TARGET_LONG_DOUBLE_128
20455 && !TARGET_IEEEQUAD)
20456 return "g";
20457
f18eca82
ZL
20458 /* For all other types, use normal C++ mangling. */
20459 return NULL;
20460}
20461
a5c76ee6
ZW
20462/* Handle a "longcall" or "shortcall" attribute; arguments as in
20463 struct attribute_spec.handler. */
a4f6c312 20464
91d231cb 20465static tree
f676971a
EC
20466rs6000_handle_longcall_attribute (tree *node, tree name,
20467 tree args ATTRIBUTE_UNUSED,
20468 int flags ATTRIBUTE_UNUSED,
a2369ed3 20469 bool *no_add_attrs)
91d231cb
JM
20470{
20471 if (TREE_CODE (*node) != FUNCTION_TYPE
20472 && TREE_CODE (*node) != FIELD_DECL
20473 && TREE_CODE (*node) != TYPE_DECL)
20474 {
5c498b10 20475 warning (OPT_Wattributes, "%qs attribute only applies to functions",
91d231cb
JM
20476 IDENTIFIER_POINTER (name));
20477 *no_add_attrs = true;
20478 }
6a4cee5f 20479
91d231cb 20480 return NULL_TREE;
7509c759
MM
20481}
20482
a5c76ee6
ZW
20483/* Set longcall attributes on all functions declared when
20484 rs6000_default_long_calls is true. */
20485static void
a2369ed3 20486rs6000_set_default_type_attributes (tree type)
a5c76ee6
ZW
20487{
20488 if (rs6000_default_long_calls
20489 && (TREE_CODE (type) == FUNCTION_TYPE
20490 || TREE_CODE (type) == METHOD_TYPE))
20491 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20492 NULL_TREE,
20493 TYPE_ATTRIBUTES (type));
16d6f994
EC
20494
20495#if TARGET_MACHO
20496 darwin_set_default_type_attributes (type);
20497#endif
a5c76ee6
ZW
20498}
20499
3cb999d8
DE
20500/* Return a reference suitable for calling a function with the
20501 longcall attribute. */
a4f6c312 20502
9390387d 20503rtx
a2369ed3 20504rs6000_longcall_ref (rtx call_ref)
6a4cee5f 20505{
d330fd93 20506 const char *call_name;
6a4cee5f
MM
20507 tree node;
20508
20509 if (GET_CODE (call_ref) != SYMBOL_REF)
20510 return call_ref;
20511
20512 /* System V adds '.' to the internal name, so skip them. */
20513 call_name = XSTR (call_ref, 0);
20514 if (*call_name == '.')
20515 {
20516 while (*call_name == '.')
20517 call_name++;
20518
20519 node = get_identifier (call_name);
39403d82 20520 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
6a4cee5f
MM
20521 }
20522
20523 return force_reg (Pmode, call_ref);
20524}
7509c759 20525\f
77ccdfed
EC
20526#ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20527#define TARGET_USE_MS_BITFIELD_LAYOUT 0
20528#endif
20529
20530/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20531 struct attribute_spec.handler. */
20532static tree
20533rs6000_handle_struct_attribute (tree *node, tree name,
20534 tree args ATTRIBUTE_UNUSED,
20535 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20536{
20537 tree *type = NULL;
20538 if (DECL_P (*node))
20539 {
20540 if (TREE_CODE (*node) == TYPE_DECL)
20541 type = &TREE_TYPE (*node);
20542 }
20543 else
20544 type = node;
20545
20546 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20547 || TREE_CODE (*type) == UNION_TYPE)))
20548 {
20549 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
20550 *no_add_attrs = true;
20551 }
20552
20553 else if ((is_attribute_p ("ms_struct", name)
20554 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20555 || ((is_attribute_p ("gcc_struct", name)
20556 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20557 {
20558 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
20559 IDENTIFIER_POINTER (name));
20560 *no_add_attrs = true;
20561 }
20562
20563 return NULL_TREE;
20564}
20565
20566static bool
3101faab 20567rs6000_ms_bitfield_layout_p (const_tree record_type)
77ccdfed
EC
20568{
20569 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20570 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20571 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20572}
20573\f
b64a1b53
RH
20574#ifdef USING_ELFOS_H
20575
d6b5193b 20576/* A get_unnamed_section callback, used for switching to toc_section. */
7509c759 20577
d6b5193b
RS
20578static void
20579rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20580{
20581 if (DEFAULT_ABI == ABI_AIX
20582 && TARGET_MINIMAL_TOC
20583 && !TARGET_RELOCATABLE)
20584 {
20585 if (!toc_initialized)
20586 {
20587 toc_initialized = 1;
20588 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20589 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20590 fprintf (asm_out_file, "\t.tc ");
20591 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20592 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20593 fprintf (asm_out_file, "\n");
20594
20595 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20596 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20597 fprintf (asm_out_file, " = .+32768\n");
20598 }
20599 else
20600 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20601 }
20602 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20603 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20604 else
20605 {
20606 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20607 if (!toc_initialized)
20608 {
20609 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20610 fprintf (asm_out_file, " = .+32768\n");
20611 toc_initialized = 1;
20612 }
20613 }
20614}
20615
20616/* Implement TARGET_ASM_INIT_SECTIONS. */
7509c759 20617
b64a1b53 20618static void
d6b5193b
RS
20619rs6000_elf_asm_init_sections (void)
20620{
20621 toc_section
20622 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20623
20624 sdata2_section
20625 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20626 SDATA2_SECTION_ASM_OP);
20627}
20628
20629/* Implement TARGET_SELECT_RTX_SECTION. */
20630
20631static section *
f676971a 20632rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
a2369ed3 20633 unsigned HOST_WIDE_INT align)
7509c759 20634{
a9098fd0 20635 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 20636 return toc_section;
7509c759 20637 else
d6b5193b 20638 return default_elf_select_rtx_section (mode, x, align);
7509c759 20639}
d9407988 20640\f
d1908feb
JJ
20641/* For a SYMBOL_REF, set generic flags and then perform some
20642 target-specific processing.
20643
d1908feb
JJ
20644 When the AIX ABI is requested on a non-AIX system, replace the
20645 function name with the real name (with a leading .) rather than the
20646 function descriptor name. This saves a lot of overriding code to
20647 read the prefixes. */
d9407988 20648
fb49053f 20649static void
a2369ed3 20650rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
d9407988 20651{
d1908feb 20652 default_encode_section_info (decl, rtl, first);
b2003250 20653
d1908feb
JJ
20654 if (first
20655 && TREE_CODE (decl) == FUNCTION_DECL
20656 && !TARGET_AIX
20657 && DEFAULT_ABI == ABI_AIX)
d9407988 20658 {
c6a2438a 20659 rtx sym_ref = XEXP (rtl, 0);
d1908feb 20660 size_t len = strlen (XSTR (sym_ref, 0));
5ead67f6 20661 char *str = XALLOCAVEC (char, len + 2);
d1908feb
JJ
20662 str[0] = '.';
20663 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20664 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
d9407988 20665 }
d9407988
MM
20666}
20667
21d9bb3f 20668static inline bool
0a2aaacc 20669compare_section_name (const char *section, const char *templ)
21d9bb3f
PB
20670{
20671 int len;
20672
0a2aaacc
KG
20673 len = strlen (templ);
20674 return (strncmp (section, templ, len) == 0
21d9bb3f
PB
20675 && (section[len] == 0 || section[len] == '.'));
20676}
20677
c1b7d95a 20678bool
3101faab 20679rs6000_elf_in_small_data_p (const_tree decl)
0e5dbd9b
DE
20680{
20681 if (rs6000_sdata == SDATA_NONE)
20682 return false;
20683
7482ad25
AF
20684 /* We want to merge strings, so we never consider them small data. */
20685 if (TREE_CODE (decl) == STRING_CST)
20686 return false;
20687
20688 /* Functions are never in the small data area. */
20689 if (TREE_CODE (decl) == FUNCTION_DECL)
20690 return false;
20691
0e5dbd9b
DE
20692 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20693 {
20694 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
ca2ba153
JJ
20695 if (compare_section_name (section, ".sdata")
20696 || compare_section_name (section, ".sdata2")
20697 || compare_section_name (section, ".gnu.linkonce.s")
20698 || compare_section_name (section, ".sbss")
20699 || compare_section_name (section, ".sbss2")
20700 || compare_section_name (section, ".gnu.linkonce.sb")
20bfcd69
GK
20701 || strcmp (section, ".PPC.EMB.sdata0") == 0
20702 || strcmp (section, ".PPC.EMB.sbss0") == 0)
0e5dbd9b
DE
20703 return true;
20704 }
20705 else
20706 {
20707 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20708
20709 if (size > 0
307b599c 20710 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20bfcd69
GK
20711 /* If it's not public, and we're not going to reference it there,
20712 there's no need to put it in the small data section. */
0e5dbd9b
DE
20713 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20714 return true;
20715 }
20716
20717 return false;
20718}
20719
b91da81f 20720#endif /* USING_ELFOS_H */
aacd3885
RS
20721\f
20722/* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
000034eb 20723
aacd3885 20724static bool
3101faab 20725rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
aacd3885
RS
20726{
20727 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20728}
a6c2a102 20729\f
000034eb 20730/* Return a REG that occurs in ADDR with coefficient 1.
02441cd6
JL
20731 ADDR can be effectively incremented by incrementing REG.
20732
20733 r0 is special and we must not select it as an address
20734 register by this routine since our caller will try to
20735 increment the returned register via an "la" instruction. */
000034eb 20736
9390387d 20737rtx
a2369ed3 20738find_addr_reg (rtx addr)
000034eb
DE
20739{
20740 while (GET_CODE (addr) == PLUS)
20741 {
02441cd6
JL
20742 if (GET_CODE (XEXP (addr, 0)) == REG
20743 && REGNO (XEXP (addr, 0)) != 0)
000034eb 20744 addr = XEXP (addr, 0);
02441cd6
JL
20745 else if (GET_CODE (XEXP (addr, 1)) == REG
20746 && REGNO (XEXP (addr, 1)) != 0)
000034eb
DE
20747 addr = XEXP (addr, 1);
20748 else if (CONSTANT_P (XEXP (addr, 0)))
20749 addr = XEXP (addr, 1);
20750 else if (CONSTANT_P (XEXP (addr, 1)))
20751 addr = XEXP (addr, 0);
20752 else
37409796 20753 gcc_unreachable ();
000034eb 20754 }
37409796
NS
20755 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20756 return addr;
000034eb
DE
20757}
20758
a6c2a102 20759void
a2369ed3 20760rs6000_fatal_bad_address (rtx op)
a6c2a102
DE
20761{
20762 fatal_insn ("bad address", op);
20763}
c8023011 20764
ee890fe2
SS
20765#if TARGET_MACHO
20766
efdba735 20767static tree branch_island_list = 0;
ee890fe2 20768
efdba735
SH
20769/* Remember to generate a branch island for far calls to the given
20770 function. */
ee890fe2 20771
f676971a 20772static void
c4ad648e
AM
20773add_compiler_branch_island (tree label_name, tree function_name,
20774 int line_number)
ee890fe2 20775{
efdba735 20776 tree branch_island = build_tree_list (function_name, label_name);
7d60be94 20777 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
efdba735
SH
20778 TREE_CHAIN (branch_island) = branch_island_list;
20779 branch_island_list = branch_island;
ee890fe2
SS
20780}
20781
efdba735
SH
20782#define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20783#define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20784#define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20785 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
ee890fe2 20786
efdba735
SH
20787/* Generate far-jump branch islands for everything on the
20788 branch_island_list. Invoked immediately after the last instruction
20789 of the epilogue has been emitted; the branch-islands must be
20790 appended to, and contiguous with, the function body. Mach-O stubs
20791 are generated in machopic_output_stub(). */
ee890fe2 20792
efdba735
SH
20793static void
20794macho_branch_islands (void)
20795{
20796 char tmp_buf[512];
20797 tree branch_island;
20798
20799 for (branch_island = branch_island_list;
20800 branch_island;
20801 branch_island = TREE_CHAIN (branch_island))
20802 {
20803 const char *label =
20804 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20805 const char *name =
11abc112 20806 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
efdba735
SH
20807 char name_buf[512];
20808 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20809 if (name[0] == '*' || name[0] == '&')
20810 strcpy (name_buf, name+1);
20811 else
20812 {
20813 name_buf[0] = '_';
20814 strcpy (name_buf+1, name);
20815 }
20816 strcpy (tmp_buf, "\n");
20817 strcat (tmp_buf, label);
ee890fe2 20818#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20819 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20820 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20821#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735
SH
20822 if (flag_pic)
20823 {
20824 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
20825 strcat (tmp_buf, label);
20826 strcat (tmp_buf, "_pic\n");
20827 strcat (tmp_buf, label);
20828 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
f676971a 20829
efdba735
SH
20830 strcat (tmp_buf, "\taddis r11,r11,ha16(");
20831 strcat (tmp_buf, name_buf);
20832 strcat (tmp_buf, " - ");
20833 strcat (tmp_buf, label);
20834 strcat (tmp_buf, "_pic)\n");
f676971a 20835
efdba735 20836 strcat (tmp_buf, "\tmtlr r0\n");
f676971a 20837
efdba735
SH
20838 strcat (tmp_buf, "\taddi r12,r11,lo16(");
20839 strcat (tmp_buf, name_buf);
20840 strcat (tmp_buf, " - ");
20841 strcat (tmp_buf, label);
20842 strcat (tmp_buf, "_pic)\n");
f676971a 20843
efdba735
SH
20844 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
20845 }
20846 else
20847 {
20848 strcat (tmp_buf, ":\nlis r12,hi16(");
20849 strcat (tmp_buf, name_buf);
20850 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
20851 strcat (tmp_buf, name_buf);
20852 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
20853 }
20854 output_asm_insn (tmp_buf, 0);
ee890fe2 20855#if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
efdba735 20856 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
93a27b7b 20857 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
ee890fe2 20858#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
efdba735 20859 }
ee890fe2 20860
efdba735 20861 branch_island_list = 0;
ee890fe2
SS
20862}
20863
20864/* NO_PREVIOUS_DEF checks in the link list whether the function name is
20865 already there or not. */
20866
efdba735 20867static int
a2369ed3 20868no_previous_def (tree function_name)
ee890fe2 20869{
efdba735
SH
20870 tree branch_island;
20871 for (branch_island = branch_island_list;
20872 branch_island;
20873 branch_island = TREE_CHAIN (branch_island))
20874 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
ee890fe2
SS
20875 return 0;
20876 return 1;
20877}
20878
20879/* GET_PREV_LABEL gets the label name from the previous definition of
20880 the function. */
20881
efdba735 20882static tree
a2369ed3 20883get_prev_label (tree function_name)
ee890fe2 20884{
efdba735
SH
20885 tree branch_island;
20886 for (branch_island = branch_island_list;
20887 branch_island;
20888 branch_island = TREE_CHAIN (branch_island))
20889 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
20890 return BRANCH_ISLAND_LABEL_NAME (branch_island);
ee890fe2
SS
20891 return 0;
20892}
20893
75b1b789
MS
20894#ifndef DARWIN_LINKER_GENERATES_ISLANDS
20895#define DARWIN_LINKER_GENERATES_ISLANDS 0
20896#endif
20897
20898/* KEXTs still need branch islands. */
20899#define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
20900 || flag_mkernel || flag_apple_kext)
20901
ee890fe2 20902/* INSN is either a function call or a millicode call. It may have an
f676971a 20903 unconditional jump in its delay slot.
ee890fe2
SS
20904
20905 CALL_DEST is the routine we are calling. */
20906
20907char *
c4ad648e
AM
20908output_call (rtx insn, rtx *operands, int dest_operand_number,
20909 int cookie_operand_number)
ee890fe2
SS
20910{
20911 static char buf[256];
75b1b789
MS
20912 if (DARWIN_GENERATE_ISLANDS
20913 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
efdba735 20914 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
ee890fe2
SS
20915 {
20916 tree labelname;
efdba735 20917 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
f676971a 20918
ee890fe2
SS
20919 if (no_previous_def (funname))
20920 {
ee890fe2
SS
20921 rtx label_rtx = gen_label_rtx ();
20922 char *label_buf, temp_buf[256];
20923 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
20924 CODE_LABEL_NUMBER (label_rtx));
20925 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
20926 labelname = get_identifier (label_buf);
a38e7aa5 20927 add_compiler_branch_island (labelname, funname, insn_line (insn));
ee890fe2
SS
20928 }
20929 else
20930 labelname = get_prev_label (funname);
20931
efdba735
SH
20932 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
20933 instruction will reach 'foo', otherwise link as 'bl L42'".
20934 "L42" should be a 'branch island', that will do a far jump to
20935 'foo'. Branch islands are generated in
20936 macho_branch_islands(). */
ee890fe2 20937 sprintf (buf, "jbsr %%z%d,%.246s",
efdba735 20938 dest_operand_number, IDENTIFIER_POINTER (labelname));
ee890fe2
SS
20939 }
20940 else
efdba735
SH
20941 sprintf (buf, "bl %%z%d", dest_operand_number);
20942 return buf;
ee890fe2
SS
20943}
20944
ee890fe2
SS
20945/* Generate PIC and indirect symbol stubs. */
20946
20947void
a2369ed3 20948machopic_output_stub (FILE *file, const char *symb, const char *stub)
ee890fe2
SS
20949{
20950 unsigned int length;
a4f6c312
SS
20951 char *symbol_name, *lazy_ptr_name;
20952 char *local_label_0;
ee890fe2
SS
20953 static int label = 0;
20954
df56a27f 20955 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
772c5265 20956 symb = (*targetm.strip_name_encoding) (symb);
df56a27f 20957
ee890fe2 20958
ee890fe2 20959 length = strlen (symb);
5ead67f6 20960 symbol_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
20961 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
20962
5ead67f6 20963 lazy_ptr_name = XALLOCAVEC (char, length + 32);
ee890fe2
SS
20964 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
20965
ee890fe2 20966 if (flag_pic == 2)
56c779bc 20967 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
ee890fe2 20968 else
56c779bc 20969 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
ee890fe2
SS
20970
20971 if (flag_pic == 2)
20972 {
d974312d
DJ
20973 fprintf (file, "\t.align 5\n");
20974
20975 fprintf (file, "%s:\n", stub);
20976 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
20977
876455fa 20978 label++;
5ead67f6 20979 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
876455fa 20980 sprintf (local_label_0, "\"L%011d$spb\"", label);
f676971a 20981
ee890fe2
SS
20982 fprintf (file, "\tmflr r0\n");
20983 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
20984 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
20985 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
20986 lazy_ptr_name, local_label_0);
20987 fprintf (file, "\tmtlr r0\n");
3d0e2d58
SS
20988 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
20989 (TARGET_64BIT ? "ldu" : "lwzu"),
ee890fe2
SS
20990 lazy_ptr_name, local_label_0);
20991 fprintf (file, "\tmtctr r12\n");
ee890fe2
SS
20992 fprintf (file, "\tbctr\n");
20993 }
20994 else
d974312d
DJ
20995 {
20996 fprintf (file, "\t.align 4\n");
20997
20998 fprintf (file, "%s:\n", stub);
20999 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21000
21001 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
d9e4e4f5
SS
21002 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21003 (TARGET_64BIT ? "ldu" : "lwzu"),
21004 lazy_ptr_name);
d974312d
DJ
21005 fprintf (file, "\tmtctr r12\n");
21006 fprintf (file, "\tbctr\n");
21007 }
f676971a 21008
56c779bc 21009 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
ee890fe2
SS
21010 fprintf (file, "%s:\n", lazy_ptr_name);
21011 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
49bd1d27
SS
21012 fprintf (file, "%sdyld_stub_binding_helper\n",
21013 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
ee890fe2
SS
21014}
21015
21016/* Legitimize PIC addresses. If the address is already
21017 position-independent, we return ORIG. Newly generated
21018 position-independent addresses go into a reg. This is REG if non
21019 zero, otherwise we allocate register(s) as necessary. */
21020
4fbbe694 21021#define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
ee890fe2
SS
21022
21023rtx
f676971a 21024rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
a2369ed3 21025 rtx reg)
ee890fe2
SS
21026{
21027 rtx base, offset;
21028
21029 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21030 reg = gen_reg_rtx (Pmode);
21031
21032 if (GET_CODE (orig) == CONST)
21033 {
37409796
NS
21034 rtx reg_temp;
21035
ee890fe2
SS
21036 if (GET_CODE (XEXP (orig, 0)) == PLUS
21037 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21038 return orig;
21039
37409796 21040 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
bb8df8a6 21041
37409796
NS
21042 /* Use a different reg for the intermediate value, as
21043 it will be marked UNCHANGING. */
b3a13419 21044 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
37409796
NS
21045 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21046 Pmode, reg_temp);
21047 offset =
21048 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21049 Pmode, reg);
bb8df8a6 21050
ee890fe2
SS
21051 if (GET_CODE (offset) == CONST_INT)
21052 {
21053 if (SMALL_INT (offset))
ed8908e7 21054 return plus_constant (base, INTVAL (offset));
ee890fe2
SS
21055 else if (! reload_in_progress && ! reload_completed)
21056 offset = force_reg (Pmode, offset);
21057 else
c859cda6
DJ
21058 {
21059 rtx mem = force_const_mem (Pmode, orig);
21060 return machopic_legitimize_pic_address (mem, Pmode, reg);
21061 }
ee890fe2 21062 }
f1c25d3b 21063 return gen_rtx_PLUS (Pmode, base, offset);
ee890fe2
SS
21064 }
21065
21066 /* Fall back on generic machopic code. */
21067 return machopic_legitimize_pic_address (orig, mode, reg);
21068}
21069
c4e18b1c
GK
21070/* Output a .machine directive for the Darwin assembler, and call
21071 the generic start_file routine. */
21072
21073static void
21074rs6000_darwin_file_start (void)
21075{
94ff898d 21076 static const struct
c4e18b1c
GK
21077 {
21078 const char *arg;
21079 const char *name;
21080 int if_set;
21081 } mapping[] = {
55dbfb48 21082 { "ppc64", "ppc64", MASK_64BIT },
c4e18b1c
GK
21083 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21084 { "power4", "ppc970", 0 },
21085 { "G5", "ppc970", 0 },
21086 { "7450", "ppc7450", 0 },
21087 { "7400", "ppc7400", MASK_ALTIVEC },
21088 { "G4", "ppc7400", 0 },
21089 { "750", "ppc750", 0 },
21090 { "740", "ppc750", 0 },
21091 { "G3", "ppc750", 0 },
21092 { "604e", "ppc604e", 0 },
21093 { "604", "ppc604", 0 },
21094 { "603e", "ppc603", 0 },
21095 { "603", "ppc603", 0 },
21096 { "601", "ppc601", 0 },
21097 { NULL, "ppc", 0 } };
21098 const char *cpu_id = "";
21099 size_t i;
94ff898d 21100
9390387d 21101 rs6000_file_start ();
192d0f89 21102 darwin_file_start ();
c4e18b1c
GK
21103
21104 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21105 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21106 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21107 && rs6000_select[i].string[0] != '\0')
21108 cpu_id = rs6000_select[i].string;
21109
21110 /* Look through the mapping array. Pick the first name that either
21111 matches the argument, has a bit set in IF_SET that is also set
21112 in the target flags, or has a NULL name. */
21113
21114 i = 0;
21115 while (mapping[i].arg != NULL
21116 && strcmp (mapping[i].arg, cpu_id) != 0
21117 && (mapping[i].if_set & target_flags) == 0)
21118 i++;
21119
21120 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21121}
21122
ee890fe2 21123#endif /* TARGET_MACHO */
7c262518
RH
21124
21125#if TARGET_ELF
9b580a0b
RH
21126static int
21127rs6000_elf_reloc_rw_mask (void)
7c262518 21128{
9b580a0b
RH
21129 if (flag_pic)
21130 return 3;
21131 else if (DEFAULT_ABI == ABI_AIX)
21132 return 2;
21133 else
21134 return 0;
7c262518 21135}
d9f6800d
RH
21136
21137/* Record an element in the table of global constructors. SYMBOL is
21138 a SYMBOL_REF of the function to be called; PRIORITY is a number
21139 between 0 and MAX_INIT_PRIORITY.
21140
21141 This differs from default_named_section_asm_out_constructor in
21142 that we have special handling for -mrelocatable. */
21143
21144static void
a2369ed3 21145rs6000_elf_asm_out_constructor (rtx symbol, int priority)
d9f6800d
RH
21146{
21147 const char *section = ".ctors";
21148 char buf[16];
21149
21150 if (priority != DEFAULT_INIT_PRIORITY)
21151 {
21152 sprintf (buf, ".ctors.%.5u",
c4ad648e
AM
21153 /* Invert the numbering so the linker puts us in the proper
21154 order; constructors are run from right to left, and the
21155 linker sorts in increasing order. */
21156 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21157 section = buf;
21158 }
21159
d6b5193b 21160 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21161 assemble_align (POINTER_SIZE);
d9f6800d
RH
21162
21163 if (TARGET_RELOCATABLE)
21164 {
21165 fputs ("\t.long (", asm_out_file);
21166 output_addr_const (asm_out_file, symbol);
21167 fputs (")@fixup\n", asm_out_file);
21168 }
21169 else
c8af3574 21170 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d
RH
21171}
21172
21173static void
a2369ed3 21174rs6000_elf_asm_out_destructor (rtx symbol, int priority)
d9f6800d
RH
21175{
21176 const char *section = ".dtors";
21177 char buf[16];
21178
21179 if (priority != DEFAULT_INIT_PRIORITY)
21180 {
21181 sprintf (buf, ".dtors.%.5u",
c4ad648e
AM
21182 /* Invert the numbering so the linker puts us in the proper
21183 order; constructors are run from right to left, and the
21184 linker sorts in increasing order. */
21185 MAX_INIT_PRIORITY - priority);
d9f6800d
RH
21186 section = buf;
21187 }
21188
d6b5193b 21189 switch_to_section (get_section (section, SECTION_WRITE, NULL));
715bdd29 21190 assemble_align (POINTER_SIZE);
d9f6800d
RH
21191
21192 if (TARGET_RELOCATABLE)
21193 {
21194 fputs ("\t.long (", asm_out_file);
21195 output_addr_const (asm_out_file, symbol);
21196 fputs (")@fixup\n", asm_out_file);
21197 }
21198 else
c8af3574 21199 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
d9f6800d 21200}
9739c90c
JJ
21201
21202void
a2369ed3 21203rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
9739c90c
JJ
21204{
21205 if (TARGET_64BIT)
21206 {
21207 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21208 ASM_OUTPUT_LABEL (file, name);
21209 fputs (DOUBLE_INT_ASM_OP, file);
85b776df
AM
21210 rs6000_output_function_entry (file, name);
21211 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21212 if (DOT_SYMBOLS)
9739c90c 21213 {
85b776df 21214 fputs ("\t.size\t", file);
9739c90c 21215 assemble_name (file, name);
85b776df
AM
21216 fputs (",24\n\t.type\t.", file);
21217 assemble_name (file, name);
21218 fputs (",@function\n", file);
21219 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21220 {
21221 fputs ("\t.globl\t.", file);
21222 assemble_name (file, name);
21223 putc ('\n', file);
21224 }
9739c90c 21225 }
85b776df
AM
21226 else
21227 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
9739c90c 21228 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
85b776df
AM
21229 rs6000_output_function_entry (file, name);
21230 fputs (":\n", file);
9739c90c
JJ
21231 return;
21232 }
21233
21234 if (TARGET_RELOCATABLE
7f970b70 21235 && !TARGET_SECURE_PLT
e3b5732b 21236 && (get_pool_size () != 0 || crtl->profile)
3c9eb5f4 21237 && uses_TOC ())
9739c90c
JJ
21238 {
21239 char buf[256];
21240
21241 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21242
21243 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21244 fprintf (file, "\t.long ");
21245 assemble_name (file, buf);
21246 putc ('-', file);
21247 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21248 assemble_name (file, buf);
21249 putc ('\n', file);
21250 }
21251
21252 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21253 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21254
21255 if (DEFAULT_ABI == ABI_AIX)
21256 {
21257 const char *desc_name, *orig_name;
21258
21259 orig_name = (*targetm.strip_name_encoding) (name);
21260 desc_name = orig_name;
21261 while (*desc_name == '.')
21262 desc_name++;
21263
21264 if (TREE_PUBLIC (decl))
21265 fprintf (file, "\t.globl %s\n", desc_name);
21266
21267 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21268 fprintf (file, "%s:\n", desc_name);
21269 fprintf (file, "\t.long %s\n", orig_name);
21270 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21271 if (DEFAULT_ABI == ABI_AIX)
21272 fputs ("\t.long 0\n", file);
21273 fprintf (file, "\t.previous\n");
21274 }
21275 ASM_OUTPUT_LABEL (file, name);
21276}
1334b570
AM
21277
21278static void
21279rs6000_elf_end_indicate_exec_stack (void)
21280{
21281 if (TARGET_32BIT)
21282 file_end_indicate_exec_stack ();
21283}
7c262518
RH
21284#endif
21285
cbaaba19 21286#if TARGET_XCOFF
0d5817b2
DE
21287static void
21288rs6000_xcoff_asm_output_anchor (rtx symbol)
21289{
21290 char buffer[100];
21291
21292 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21293 SYMBOL_REF_BLOCK_OFFSET (symbol));
21294 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21295}
21296
7c262518 21297static void
a2369ed3 21298rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
b275d088
DE
21299{
21300 fputs (GLOBAL_ASM_OP, stream);
21301 RS6000_OUTPUT_BASENAME (stream, name);
21302 putc ('\n', stream);
21303}
21304
d6b5193b
RS
21305/* A get_unnamed_decl callback, used for read-only sections. PTR
21306 points to the section string variable. */
21307
21308static void
21309rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21310{
890f9edf
OH
21311 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21312 *(const char *const *) directive,
21313 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21314}
21315
21316/* Likewise for read-write sections. */
21317
21318static void
21319rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21320{
890f9edf
OH
21321 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21322 *(const char *const *) directive,
21323 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
d6b5193b
RS
21324}
21325
21326/* A get_unnamed_section callback, used for switching to toc_section. */
21327
21328static void
21329rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21330{
21331 if (TARGET_MINIMAL_TOC)
21332 {
21333 /* toc_section is always selected at least once from
21334 rs6000_xcoff_file_start, so this is guaranteed to
21335 always be defined once and only once in each file. */
21336 if (!toc_initialized)
21337 {
21338 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21339 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21340 toc_initialized = 1;
21341 }
21342 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21343 (TARGET_32BIT ? "" : ",3"));
21344 }
21345 else
21346 fputs ("\t.toc\n", asm_out_file);
21347}
21348
21349/* Implement TARGET_ASM_INIT_SECTIONS. */
21350
21351static void
21352rs6000_xcoff_asm_init_sections (void)
21353{
21354 read_only_data_section
21355 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21356 &xcoff_read_only_section_name);
21357
21358 private_data_section
21359 = get_unnamed_section (SECTION_WRITE,
21360 rs6000_xcoff_output_readwrite_section_asm_op,
21361 &xcoff_private_data_section_name);
21362
21363 read_only_private_data_section
21364 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21365 &xcoff_private_data_section_name);
21366
21367 toc_section
21368 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21369
21370 readonly_data_section = read_only_data_section;
21371 exception_section = data_section;
21372}
21373
9b580a0b
RH
21374static int
21375rs6000_xcoff_reloc_rw_mask (void)
21376{
21377 return 3;
21378}
21379
b275d088 21380static void
c18a5b6c
MM
21381rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21382 tree decl ATTRIBUTE_UNUSED)
7c262518 21383{
0e5dbd9b
DE
21384 int smclass;
21385 static const char * const suffix[3] = { "PR", "RO", "RW" };
21386
21387 if (flags & SECTION_CODE)
21388 smclass = 0;
21389 else if (flags & SECTION_WRITE)
21390 smclass = 2;
21391 else
21392 smclass = 1;
21393
5b5198f7 21394 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
0e5dbd9b 21395 (flags & SECTION_CODE) ? "." : "",
5b5198f7 21396 name, suffix[smclass], flags & SECTION_ENTSIZE);
7c262518 21397}
ae46c4e0 21398
d6b5193b 21399static section *
f676971a 21400rs6000_xcoff_select_section (tree decl, int reloc,
c4ad648e 21401 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
ae46c4e0 21402{
9b580a0b 21403 if (decl_readonly_section (decl, reloc))
ae46c4e0 21404 {
0e5dbd9b 21405 if (TREE_PUBLIC (decl))
d6b5193b 21406 return read_only_data_section;
ae46c4e0 21407 else
d6b5193b 21408 return read_only_private_data_section;
ae46c4e0
RH
21409 }
21410 else
21411 {
0e5dbd9b 21412 if (TREE_PUBLIC (decl))
d6b5193b 21413 return data_section;
ae46c4e0 21414 else
d6b5193b 21415 return private_data_section;
ae46c4e0
RH
21416 }
21417}
21418
21419static void
a2369ed3 21420rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
ae46c4e0
RH
21421{
21422 const char *name;
ae46c4e0 21423
5b5198f7
DE
21424 /* Use select_section for private and uninitialized data. */
21425 if (!TREE_PUBLIC (decl)
21426 || DECL_COMMON (decl)
0e5dbd9b
DE
21427 || DECL_INITIAL (decl) == NULL_TREE
21428 || DECL_INITIAL (decl) == error_mark_node
21429 || (flag_zero_initialized_in_bss
21430 && initializer_zerop (DECL_INITIAL (decl))))
21431 return;
21432
21433 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21434 name = (*targetm.strip_name_encoding) (name);
21435 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
ae46c4e0 21436}
b64a1b53 21437
fb49053f
RH
21438/* Select section for constant in constant pool.
21439
21440 On RS/6000, all constants are in the private read-only data area.
21441 However, if this is being placed in the TOC it must be output as a
21442 toc entry. */
21443
d6b5193b 21444static section *
f676971a 21445rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
c4ad648e 21446 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53
RH
21447{
21448 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
d6b5193b 21449 return toc_section;
b64a1b53 21450 else
d6b5193b 21451 return read_only_private_data_section;
b64a1b53 21452}
772c5265
RH
21453
21454/* Remove any trailing [DS] or the like from the symbol name. */
21455
21456static const char *
a2369ed3 21457rs6000_xcoff_strip_name_encoding (const char *name)
772c5265
RH
21458{
21459 size_t len;
21460 if (*name == '*')
21461 name++;
21462 len = strlen (name);
21463 if (name[len - 1] == ']')
21464 return ggc_alloc_string (name, len - 4);
21465 else
21466 return name;
21467}
21468
5add3202
DE
21469/* Section attributes. AIX is always PIC. */
21470
21471static unsigned int
a2369ed3 21472rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
5add3202 21473{
5b5198f7 21474 unsigned int align;
9b580a0b 21475 unsigned int flags = default_section_type_flags (decl, name, reloc);
5b5198f7
DE
21476
21477 /* Align to at least UNIT size. */
21478 if (flags & SECTION_CODE)
21479 align = MIN_UNITS_PER_WORD;
21480 else
21481 /* Increase alignment of large objects if not already stricter. */
21482 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21483 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21484 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21485
21486 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
5add3202 21487}
a5fe455b 21488
1bc7c5b6
ZW
21489/* Output at beginning of assembler file.
21490
21491 Initialize the section names for the RS/6000 at this point.
21492
21493 Specify filename, including full path, to assembler.
21494
21495 We want to go into the TOC section so at least one .toc will be emitted.
21496 Also, in order to output proper .bs/.es pairs, we need at least one static
21497 [RW] section emitted.
21498
21499 Finally, declare mcount when profiling to make the assembler happy. */
21500
21501static void
863d938c 21502rs6000_xcoff_file_start (void)
1bc7c5b6
ZW
21503{
21504 rs6000_gen_section_name (&xcoff_bss_section_name,
21505 main_input_filename, ".bss_");
21506 rs6000_gen_section_name (&xcoff_private_data_section_name,
21507 main_input_filename, ".rw_");
21508 rs6000_gen_section_name (&xcoff_read_only_section_name,
21509 main_input_filename, ".ro_");
21510
21511 fputs ("\t.file\t", asm_out_file);
21512 output_quoted_string (asm_out_file, main_input_filename);
21513 fputc ('\n', asm_out_file);
1bc7c5b6 21514 if (write_symbols != NO_DEBUG)
d6b5193b
RS
21515 switch_to_section (private_data_section);
21516 switch_to_section (text_section);
1bc7c5b6
ZW
21517 if (profile_flag)
21518 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21519 rs6000_file_start ();
21520}
21521
a5fe455b
ZW
21522/* Output at end of assembler file.
21523 On the RS/6000, referencing data should automatically pull in text. */
21524
21525static void
863d938c 21526rs6000_xcoff_file_end (void)
a5fe455b 21527{
d6b5193b 21528 switch_to_section (text_section);
a5fe455b 21529 fputs ("_section_.text:\n", asm_out_file);
d6b5193b 21530 switch_to_section (data_section);
a5fe455b
ZW
21531 fputs (TARGET_32BIT
21532 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21533 asm_out_file);
21534}
f1384257 21535#endif /* TARGET_XCOFF */
0e5dbd9b 21536
3c50106f
RH
21537/* Compute a (partial) cost for rtx X. Return true if the complete
21538 cost has been computed, and false if subexpressions should be
21539 scanned. In either case, *TOTAL contains the cost result. */
21540
21541static bool
f40751dd
JH
21542rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
21543 bool speed)
3c50106f 21544{
f0517163
RS
21545 enum machine_mode mode = GET_MODE (x);
21546
3c50106f
RH
21547 switch (code)
21548 {
30a555d9 21549 /* On the RS/6000, if it is valid in the insn, it is free. */
3c50106f 21550 case CONST_INT:
066cd967
DE
21551 if (((outer_code == SET
21552 || outer_code == PLUS
21553 || outer_code == MINUS)
279bb624
DE
21554 && (satisfies_constraint_I (x)
21555 || satisfies_constraint_L (x)))
066cd967 21556 || (outer_code == AND
279bb624
DE
21557 && (satisfies_constraint_K (x)
21558 || (mode == SImode
21559 ? satisfies_constraint_L (x)
21560 : satisfies_constraint_J (x))
1990cd79
AM
21561 || mask_operand (x, mode)
21562 || (mode == DImode
21563 && mask64_operand (x, DImode))))
22e54023 21564 || ((outer_code == IOR || outer_code == XOR)
279bb624
DE
21565 && (satisfies_constraint_K (x)
21566 || (mode == SImode
21567 ? satisfies_constraint_L (x)
21568 : satisfies_constraint_J (x))))
066cd967
DE
21569 || outer_code == ASHIFT
21570 || outer_code == ASHIFTRT
21571 || outer_code == LSHIFTRT
21572 || outer_code == ROTATE
21573 || outer_code == ROTATERT
d5861a7a 21574 || outer_code == ZERO_EXTRACT
066cd967 21575 || (outer_code == MULT
279bb624 21576 && satisfies_constraint_I (x))
22e54023
DE
21577 || ((outer_code == DIV || outer_code == UDIV
21578 || outer_code == MOD || outer_code == UMOD)
21579 && exact_log2 (INTVAL (x)) >= 0)
066cd967 21580 || (outer_code == COMPARE
279bb624
DE
21581 && (satisfies_constraint_I (x)
21582 || satisfies_constraint_K (x)))
22e54023 21583 || (outer_code == EQ
279bb624
DE
21584 && (satisfies_constraint_I (x)
21585 || satisfies_constraint_K (x)
21586 || (mode == SImode
21587 ? satisfies_constraint_L (x)
21588 : satisfies_constraint_J (x))))
22e54023 21589 || (outer_code == GTU
279bb624 21590 && satisfies_constraint_I (x))
22e54023 21591 || (outer_code == LTU
279bb624 21592 && satisfies_constraint_P (x)))
066cd967
DE
21593 {
21594 *total = 0;
21595 return true;
21596 }
21597 else if ((outer_code == PLUS
4ae234b0 21598 && reg_or_add_cint_operand (x, VOIDmode))
066cd967 21599 || (outer_code == MINUS
4ae234b0 21600 && reg_or_sub_cint_operand (x, VOIDmode))
066cd967
DE
21601 || ((outer_code == SET
21602 || outer_code == IOR
21603 || outer_code == XOR)
21604 && (INTVAL (x)
21605 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21606 {
21607 *total = COSTS_N_INSNS (1);
21608 return true;
21609 }
21610 /* FALLTHRU */
21611
21612 case CONST_DOUBLE:
f6fe3a22 21613 if (mode == DImode && code == CONST_DOUBLE)
066cd967 21614 {
f6fe3a22
DE
21615 if ((outer_code == IOR || outer_code == XOR)
21616 && CONST_DOUBLE_HIGH (x) == 0
21617 && (CONST_DOUBLE_LOW (x)
21618 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21619 {
21620 *total = 0;
21621 return true;
21622 }
21623 else if ((outer_code == AND && and64_2_operand (x, DImode))
21624 || ((outer_code == SET
21625 || outer_code == IOR
21626 || outer_code == XOR)
21627 && CONST_DOUBLE_HIGH (x) == 0))
21628 {
21629 *total = COSTS_N_INSNS (1);
21630 return true;
21631 }
066cd967
DE
21632 }
21633 /* FALLTHRU */
21634
3c50106f 21635 case CONST:
066cd967 21636 case HIGH:
3c50106f 21637 case SYMBOL_REF:
066cd967
DE
21638 case MEM:
21639 /* When optimizing for size, MEM should be slightly more expensive
21640 than generating address, e.g., (plus (reg) (const)).
c112cf2b 21641 L1 cache latency is about two instructions. */
f40751dd 21642 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
3c50106f
RH
21643 return true;
21644
30a555d9
DE
21645 case LABEL_REF:
21646 *total = 0;
21647 return true;
21648
3c50106f 21649 case PLUS:
f0517163 21650 if (mode == DFmode)
066cd967
DE
21651 {
21652 if (GET_CODE (XEXP (x, 0)) == MULT)
21653 {
21654 /* FNMA accounted in outer NEG. */
21655 if (outer_code == NEG)
21656 *total = rs6000_cost->dmul - rs6000_cost->fp;
21657 else
21658 *total = rs6000_cost->dmul;
21659 }
21660 else
21661 *total = rs6000_cost->fp;
21662 }
f0517163 21663 else if (mode == SFmode)
066cd967
DE
21664 {
21665 /* FNMA accounted in outer NEG. */
21666 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21667 *total = 0;
21668 else
21669 *total = rs6000_cost->fp;
21670 }
f0517163 21671 else
066cd967
DE
21672 *total = COSTS_N_INSNS (1);
21673 return false;
3c50106f 21674
52190329 21675 case MINUS:
f0517163 21676 if (mode == DFmode)
066cd967 21677 {
762c919f
JM
21678 if (GET_CODE (XEXP (x, 0)) == MULT
21679 || GET_CODE (XEXP (x, 1)) == MULT)
066cd967
DE
21680 {
21681 /* FNMA accounted in outer NEG. */
21682 if (outer_code == NEG)
762c919f 21683 *total = rs6000_cost->dmul - rs6000_cost->fp;
066cd967
DE
21684 else
21685 *total = rs6000_cost->dmul;
21686 }
21687 else
21688 *total = rs6000_cost->fp;
21689 }
f0517163 21690 else if (mode == SFmode)
066cd967
DE
21691 {
21692 /* FNMA accounted in outer NEG. */
21693 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21694 *total = 0;
21695 else
21696 *total = rs6000_cost->fp;
21697 }
f0517163 21698 else
c4ad648e 21699 *total = COSTS_N_INSNS (1);
066cd967 21700 return false;
3c50106f
RH
21701
21702 case MULT:
c9dbf840 21703 if (GET_CODE (XEXP (x, 1)) == CONST_INT
279bb624 21704 && satisfies_constraint_I (XEXP (x, 1)))
3c50106f 21705 {
8b897cfa
RS
21706 if (INTVAL (XEXP (x, 1)) >= -256
21707 && INTVAL (XEXP (x, 1)) <= 255)
06a67bdd 21708 *total = rs6000_cost->mulsi_const9;
8b897cfa 21709 else
06a67bdd 21710 *total = rs6000_cost->mulsi_const;
3c50106f 21711 }
066cd967
DE
21712 /* FMA accounted in outer PLUS/MINUS. */
21713 else if ((mode == DFmode || mode == SFmode)
21714 && (outer_code == PLUS || outer_code == MINUS))
21715 *total = 0;
f0517163 21716 else if (mode == DFmode)
06a67bdd 21717 *total = rs6000_cost->dmul;
f0517163 21718 else if (mode == SFmode)
06a67bdd 21719 *total = rs6000_cost->fp;
f0517163 21720 else if (mode == DImode)
06a67bdd 21721 *total = rs6000_cost->muldi;
8b897cfa 21722 else
06a67bdd 21723 *total = rs6000_cost->mulsi;
066cd967 21724 return false;
3c50106f
RH
21725
21726 case DIV:
21727 case MOD:
f0517163
RS
21728 if (FLOAT_MODE_P (mode))
21729 {
06a67bdd
RS
21730 *total = mode == DFmode ? rs6000_cost->ddiv
21731 : rs6000_cost->sdiv;
066cd967 21732 return false;
f0517163 21733 }
5efb1046 21734 /* FALLTHRU */
3c50106f
RH
21735
21736 case UDIV:
21737 case UMOD:
627b6fe2
DJ
21738 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21739 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21740 {
21741 if (code == DIV || code == MOD)
21742 /* Shift, addze */
21743 *total = COSTS_N_INSNS (2);
21744 else
21745 /* Shift */
21746 *total = COSTS_N_INSNS (1);
21747 }
c4ad648e 21748 else
627b6fe2
DJ
21749 {
21750 if (GET_MODE (XEXP (x, 1)) == DImode)
21751 *total = rs6000_cost->divdi;
21752 else
21753 *total = rs6000_cost->divsi;
21754 }
21755 /* Add in shift and subtract for MOD. */
21756 if (code == MOD || code == UMOD)
21757 *total += COSTS_N_INSNS (2);
066cd967 21758 return false;
3c50106f 21759
32f56aad 21760 case CTZ:
3c50106f
RH
21761 case FFS:
21762 *total = COSTS_N_INSNS (4);
066cd967 21763 return false;
3c50106f 21764
32f56aad
DE
21765 case POPCOUNT:
21766 *total = COSTS_N_INSNS (6);
21767 return false;
21768
06a67bdd 21769 case NOT:
066cd967
DE
21770 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21771 {
21772 *total = 0;
21773 return false;
21774 }
21775 /* FALLTHRU */
21776
21777 case AND:
32f56aad 21778 case CLZ:
066cd967
DE
21779 case IOR:
21780 case XOR:
d5861a7a
DE
21781 case ZERO_EXTRACT:
21782 *total = COSTS_N_INSNS (1);
21783 return false;
21784
066cd967
DE
21785 case ASHIFT:
21786 case ASHIFTRT:
21787 case LSHIFTRT:
21788 case ROTATE:
21789 case ROTATERT:
d5861a7a 21790 /* Handle mul_highpart. */
066cd967
DE
21791 if (outer_code == TRUNCATE
21792 && GET_CODE (XEXP (x, 0)) == MULT)
21793 {
21794 if (mode == DImode)
21795 *total = rs6000_cost->muldi;
21796 else
21797 *total = rs6000_cost->mulsi;
21798 return true;
21799 }
d5861a7a
DE
21800 else if (outer_code == AND)
21801 *total = 0;
21802 else
21803 *total = COSTS_N_INSNS (1);
21804 return false;
21805
21806 case SIGN_EXTEND:
21807 case ZERO_EXTEND:
21808 if (GET_CODE (XEXP (x, 0)) == MEM)
21809 *total = 0;
21810 else
21811 *total = COSTS_N_INSNS (1);
066cd967 21812 return false;
06a67bdd 21813
066cd967
DE
21814 case COMPARE:
21815 case NEG:
21816 case ABS:
21817 if (!FLOAT_MODE_P (mode))
21818 {
21819 *total = COSTS_N_INSNS (1);
21820 return false;
21821 }
21822 /* FALLTHRU */
21823
21824 case FLOAT:
21825 case UNSIGNED_FLOAT:
21826 case FIX:
21827 case UNSIGNED_FIX:
06a67bdd
RS
21828 case FLOAT_TRUNCATE:
21829 *total = rs6000_cost->fp;
066cd967 21830 return false;
06a67bdd 21831
a2af5043
DJ
21832 case FLOAT_EXTEND:
21833 if (mode == DFmode)
21834 *total = 0;
21835 else
21836 *total = rs6000_cost->fp;
21837 return false;
21838
06a67bdd
RS
21839 case UNSPEC:
21840 switch (XINT (x, 1))
21841 {
21842 case UNSPEC_FRSP:
21843 *total = rs6000_cost->fp;
21844 return true;
21845
21846 default:
21847 break;
21848 }
21849 break;
21850
21851 case CALL:
21852 case IF_THEN_ELSE:
f40751dd 21853 if (!speed)
06a67bdd
RS
21854 {
21855 *total = COSTS_N_INSNS (1);
21856 return true;
21857 }
066cd967
DE
21858 else if (FLOAT_MODE_P (mode)
21859 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
21860 {
21861 *total = rs6000_cost->fp;
21862 return false;
21863 }
06a67bdd
RS
21864 break;
21865
c0600ecd
DE
21866 case EQ:
21867 case GTU:
21868 case LTU:
22e54023
DE
21869 /* Carry bit requires mode == Pmode.
21870 NEG or PLUS already counted so only add one. */
21871 if (mode == Pmode
21872 && (outer_code == NEG || outer_code == PLUS))
c0600ecd 21873 {
22e54023
DE
21874 *total = COSTS_N_INSNS (1);
21875 return true;
21876 }
21877 if (outer_code == SET)
21878 {
21879 if (XEXP (x, 1) == const0_rtx)
c0600ecd 21880 {
22e54023 21881 *total = COSTS_N_INSNS (2);
c0600ecd 21882 return true;
c0600ecd 21883 }
22e54023
DE
21884 else if (mode == Pmode)
21885 {
21886 *total = COSTS_N_INSNS (3);
21887 return false;
21888 }
21889 }
21890 /* FALLTHRU */
21891
21892 case GT:
21893 case LT:
21894 case UNORDERED:
21895 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
21896 {
21897 *total = COSTS_N_INSNS (2);
21898 return true;
c0600ecd 21899 }
22e54023
DE
21900 /* CC COMPARE. */
21901 if (outer_code == COMPARE)
21902 {
21903 *total = 0;
21904 return true;
21905 }
21906 break;
c0600ecd 21907
3c50106f 21908 default:
06a67bdd 21909 break;
3c50106f 21910 }
06a67bdd
RS
21911
21912 return false;
3c50106f
RH
21913}
21914
34bb030a
DE
21915/* A C expression returning the cost of moving data from a register of class
21916 CLASS1 to one of CLASS2. */
21917
21918int
f676971a 21919rs6000_register_move_cost (enum machine_mode mode,
a2369ed3 21920 enum reg_class from, enum reg_class to)
34bb030a
DE
21921{
21922 /* Moves from/to GENERAL_REGS. */
21923 if (reg_classes_intersect_p (to, GENERAL_REGS)
21924 || reg_classes_intersect_p (from, GENERAL_REGS))
21925 {
21926 if (! reg_classes_intersect_p (to, GENERAL_REGS))
21927 from = to;
21928
21929 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
21930 return (rs6000_memory_move_cost (mode, from, 0)
21931 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
21932
c4ad648e
AM
21933 /* It's more expensive to move CR_REGS than CR0_REGS because of the
21934 shift. */
34bb030a
DE
21935 else if (from == CR_REGS)
21936 return 4;
21937
aafc759a
PH
21938 /* Power6 has slower LR/CTR moves so make them more expensive than
21939 memory in order to bias spills to memory .*/
21940 else if (rs6000_cpu == PROCESSOR_POWER6
21941 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
21942 return 6 * hard_regno_nregs[0][mode];
21943
34bb030a 21944 else
c4ad648e 21945 /* A move will cost one instruction per GPR moved. */
c8b622ff 21946 return 2 * hard_regno_nregs[0][mode];
34bb030a
DE
21947 }
21948
c4ad648e 21949 /* Moving between two similar registers is just one instruction. */
34bb030a 21950 else if (reg_classes_intersect_p (to, from))
7393f7f8 21951 return (mode == TFmode || mode == TDmode) ? 4 : 2;
34bb030a 21952
c4ad648e 21953 /* Everything else has to go through GENERAL_REGS. */
34bb030a 21954 else
f676971a 21955 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
34bb030a
DE
21956 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
21957}
21958
21959/* A C expressions returning the cost of moving data of MODE from a register to
21960 or from memory. */
21961
21962int
0a2aaacc 21963rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
a2369ed3 21964 int in ATTRIBUTE_UNUSED)
34bb030a 21965{
0a2aaacc 21966 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
c8b622ff 21967 return 4 * hard_regno_nregs[0][mode];
0a2aaacc 21968 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
c8b622ff 21969 return 4 * hard_regno_nregs[32][mode];
0a2aaacc 21970 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
c8b622ff 21971 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
34bb030a 21972 else
0a2aaacc 21973 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
34bb030a
DE
21974}
21975
9c78b944
DE
21976/* Returns a code for a target-specific builtin that implements
21977 reciprocal of the function, or NULL_TREE if not available. */
21978
21979static tree
21980rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
21981 bool sqrt ATTRIBUTE_UNUSED)
21982{
21983 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
21984 && flag_finite_math_only && !flag_trapping_math
21985 && flag_unsafe_math_optimizations))
21986 return NULL_TREE;
21987
21988 if (md_fn)
21989 return NULL_TREE;
21990 else
21991 switch (fn)
21992 {
21993 case BUILT_IN_SQRTF:
21994 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
21995
21996 default:
21997 return NULL_TREE;
21998 }
21999}
22000
ef765ea9
DE
22001/* Newton-Raphson approximation of single-precision floating point divide n/d.
22002 Assumes no trapping math and finite arguments. */
22003
22004void
9c78b944 22005rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22006{
22007 rtx x0, e0, e1, y1, u0, v0, one;
22008
22009 x0 = gen_reg_rtx (SFmode);
22010 e0 = gen_reg_rtx (SFmode);
22011 e1 = gen_reg_rtx (SFmode);
22012 y1 = gen_reg_rtx (SFmode);
22013 u0 = gen_reg_rtx (SFmode);
22014 v0 = gen_reg_rtx (SFmode);
22015 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22016
22017 /* x0 = 1./d estimate */
22018 emit_insn (gen_rtx_SET (VOIDmode, x0,
22019 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22020 UNSPEC_FRES)));
22021 /* e0 = 1. - d * x0 */
22022 emit_insn (gen_rtx_SET (VOIDmode, e0,
22023 gen_rtx_MINUS (SFmode, one,
22024 gen_rtx_MULT (SFmode, d, x0))));
22025 /* e1 = e0 + e0 * e0 */
22026 emit_insn (gen_rtx_SET (VOIDmode, e1,
22027 gen_rtx_PLUS (SFmode,
22028 gen_rtx_MULT (SFmode, e0, e0), e0)));
22029 /* y1 = x0 + e1 * x0 */
22030 emit_insn (gen_rtx_SET (VOIDmode, y1,
22031 gen_rtx_PLUS (SFmode,
22032 gen_rtx_MULT (SFmode, e1, x0), x0)));
22033 /* u0 = n * y1 */
22034 emit_insn (gen_rtx_SET (VOIDmode, u0,
22035 gen_rtx_MULT (SFmode, n, y1)));
22036 /* v0 = n - d * u0 */
22037 emit_insn (gen_rtx_SET (VOIDmode, v0,
22038 gen_rtx_MINUS (SFmode, n,
22039 gen_rtx_MULT (SFmode, d, u0))));
9c78b944
DE
22040 /* dst = u0 + v0 * y1 */
22041 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22042 gen_rtx_PLUS (SFmode,
22043 gen_rtx_MULT (SFmode, v0, y1), u0)));
22044}
22045
22046/* Newton-Raphson approximation of double-precision floating point divide n/d.
22047 Assumes no trapping math and finite arguments. */
22048
22049void
9c78b944 22050rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
ef765ea9
DE
22051{
22052 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22053
22054 x0 = gen_reg_rtx (DFmode);
22055 e0 = gen_reg_rtx (DFmode);
22056 e1 = gen_reg_rtx (DFmode);
22057 e2 = gen_reg_rtx (DFmode);
22058 y1 = gen_reg_rtx (DFmode);
22059 y2 = gen_reg_rtx (DFmode);
22060 y3 = gen_reg_rtx (DFmode);
22061 u0 = gen_reg_rtx (DFmode);
22062 v0 = gen_reg_rtx (DFmode);
22063 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22064
22065 /* x0 = 1./d estimate */
22066 emit_insn (gen_rtx_SET (VOIDmode, x0,
22067 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22068 UNSPEC_FRES)));
22069 /* e0 = 1. - d * x0 */
22070 emit_insn (gen_rtx_SET (VOIDmode, e0,
22071 gen_rtx_MINUS (DFmode, one,
22072 gen_rtx_MULT (SFmode, d, x0))));
22073 /* y1 = x0 + e0 * x0 */
22074 emit_insn (gen_rtx_SET (VOIDmode, y1,
22075 gen_rtx_PLUS (DFmode,
22076 gen_rtx_MULT (DFmode, e0, x0), x0)));
22077 /* e1 = e0 * e0 */
22078 emit_insn (gen_rtx_SET (VOIDmode, e1,
22079 gen_rtx_MULT (DFmode, e0, e0)));
22080 /* y2 = y1 + e1 * y1 */
22081 emit_insn (gen_rtx_SET (VOIDmode, y2,
22082 gen_rtx_PLUS (DFmode,
22083 gen_rtx_MULT (DFmode, e1, y1), y1)));
22084 /* e2 = e1 * e1 */
22085 emit_insn (gen_rtx_SET (VOIDmode, e2,
22086 gen_rtx_MULT (DFmode, e1, e1)));
22087 /* y3 = y2 + e2 * y2 */
22088 emit_insn (gen_rtx_SET (VOIDmode, y3,
22089 gen_rtx_PLUS (DFmode,
22090 gen_rtx_MULT (DFmode, e2, y2), y2)));
22091 /* u0 = n * y3 */
22092 emit_insn (gen_rtx_SET (VOIDmode, u0,
22093 gen_rtx_MULT (DFmode, n, y3)));
22094 /* v0 = n - d * u0 */
22095 emit_insn (gen_rtx_SET (VOIDmode, v0,
22096 gen_rtx_MINUS (DFmode, n,
22097 gen_rtx_MULT (DFmode, d, u0))));
9c78b944
DE
22098 /* dst = u0 + v0 * y3 */
22099 emit_insn (gen_rtx_SET (VOIDmode, dst,
ef765ea9
DE
22100 gen_rtx_PLUS (DFmode,
22101 gen_rtx_MULT (DFmode, v0, y3), u0)));
22102}
22103
565ef4ba 22104
9c78b944
DE
22105/* Newton-Raphson approximation of single-precision floating point rsqrt.
22106 Assumes no trapping math and finite arguments. */
22107
22108void
22109rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22110{
22111 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22112 half, one, halfthree, c1, cond, label;
22113
22114 x0 = gen_reg_rtx (SFmode);
22115 x1 = gen_reg_rtx (SFmode);
22116 x2 = gen_reg_rtx (SFmode);
22117 y1 = gen_reg_rtx (SFmode);
22118 u0 = gen_reg_rtx (SFmode);
22119 u1 = gen_reg_rtx (SFmode);
22120 u2 = gen_reg_rtx (SFmode);
22121 v0 = gen_reg_rtx (SFmode);
22122 v1 = gen_reg_rtx (SFmode);
22123 v2 = gen_reg_rtx (SFmode);
22124 t0 = gen_reg_rtx (SFmode);
22125 halfthree = gen_reg_rtx (SFmode);
22126 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22127 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22128
22129 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22130 emit_insn (gen_rtx_SET (VOIDmode, t0,
22131 gen_rtx_MULT (SFmode, src, src)));
22132
22133 emit_insn (gen_rtx_SET (VOIDmode, cond,
22134 gen_rtx_COMPARE (CCFPmode, t0, src)));
22135 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22136 emit_unlikely_jump (c1, label);
22137
22138 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22139 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22140
22141 /* halfthree = 1.5 = 1.0 + 0.5 */
22142 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22143 gen_rtx_PLUS (SFmode, one, half)));
22144
22145 /* x0 = rsqrt estimate */
22146 emit_insn (gen_rtx_SET (VOIDmode, x0,
22147 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22148 UNSPEC_RSQRT)));
22149
22150 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22151 emit_insn (gen_rtx_SET (VOIDmode, y1,
22152 gen_rtx_MINUS (SFmode,
22153 gen_rtx_MULT (SFmode, src, halfthree),
22154 src)));
22155
22156 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22157 emit_insn (gen_rtx_SET (VOIDmode, u0,
22158 gen_rtx_MULT (SFmode, x0, x0)));
22159 emit_insn (gen_rtx_SET (VOIDmode, v0,
22160 gen_rtx_MINUS (SFmode,
22161 halfthree,
22162 gen_rtx_MULT (SFmode, y1, u0))));
22163 emit_insn (gen_rtx_SET (VOIDmode, x1,
22164 gen_rtx_MULT (SFmode, x0, v0)));
22165
22166 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22167 emit_insn (gen_rtx_SET (VOIDmode, u1,
22168 gen_rtx_MULT (SFmode, x1, x1)));
22169 emit_insn (gen_rtx_SET (VOIDmode, v1,
22170 gen_rtx_MINUS (SFmode,
22171 halfthree,
22172 gen_rtx_MULT (SFmode, y1, u1))));
22173 emit_insn (gen_rtx_SET (VOIDmode, x2,
22174 gen_rtx_MULT (SFmode, x1, v1)));
22175
22176 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22177 emit_insn (gen_rtx_SET (VOIDmode, u2,
22178 gen_rtx_MULT (SFmode, x2, x2)));
22179 emit_insn (gen_rtx_SET (VOIDmode, v2,
22180 gen_rtx_MINUS (SFmode,
22181 halfthree,
22182 gen_rtx_MULT (SFmode, y1, u2))));
22183 emit_insn (gen_rtx_SET (VOIDmode, dst,
22184 gen_rtx_MULT (SFmode, x2, v2)));
22185
22186 emit_label (XEXP (label, 0));
22187}
22188
565ef4ba
RS
22189/* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22190 target, and SRC is the argument operand. */
22191
22192void
22193rs6000_emit_popcount (rtx dst, rtx src)
22194{
22195 enum machine_mode mode = GET_MODE (dst);
22196 rtx tmp1, tmp2;
22197
22198 tmp1 = gen_reg_rtx (mode);
22199
22200 if (mode == SImode)
22201 {
22202 emit_insn (gen_popcntbsi2 (tmp1, src));
22203 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22204 NULL_RTX, 0);
22205 tmp2 = force_reg (SImode, tmp2);
22206 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22207 }
22208 else
22209 {
22210 emit_insn (gen_popcntbdi2 (tmp1, src));
22211 tmp2 = expand_mult (DImode, tmp1,
22212 GEN_INT ((HOST_WIDE_INT)
22213 0x01010101 << 32 | 0x01010101),
22214 NULL_RTX, 0);
22215 tmp2 = force_reg (DImode, tmp2);
22216 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22217 }
22218}
22219
22220
22221/* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22222 target, and SRC is the argument operand. */
22223
22224void
22225rs6000_emit_parity (rtx dst, rtx src)
22226{
22227 enum machine_mode mode = GET_MODE (dst);
22228 rtx tmp;
22229
22230 tmp = gen_reg_rtx (mode);
22231 if (mode == SImode)
22232 {
22233 /* Is mult+shift >= shift+xor+shift+xor? */
22234 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22235 {
22236 rtx tmp1, tmp2, tmp3, tmp4;
22237
22238 tmp1 = gen_reg_rtx (SImode);
22239 emit_insn (gen_popcntbsi2 (tmp1, src));
22240
22241 tmp2 = gen_reg_rtx (SImode);
22242 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22243 tmp3 = gen_reg_rtx (SImode);
22244 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22245
22246 tmp4 = gen_reg_rtx (SImode);
22247 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22248 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22249 }
22250 else
22251 rs6000_emit_popcount (tmp, src);
22252 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22253 }
22254 else
22255 {
22256 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22257 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22258 {
22259 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22260
22261 tmp1 = gen_reg_rtx (DImode);
22262 emit_insn (gen_popcntbdi2 (tmp1, src));
22263
22264 tmp2 = gen_reg_rtx (DImode);
22265 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22266 tmp3 = gen_reg_rtx (DImode);
22267 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22268
22269 tmp4 = gen_reg_rtx (DImode);
22270 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22271 tmp5 = gen_reg_rtx (DImode);
22272 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22273
22274 tmp6 = gen_reg_rtx (DImode);
22275 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22276 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22277 }
22278 else
22279 rs6000_emit_popcount (tmp, src);
22280 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22281 }
22282}
22283
ded9bf77
AH
22284/* Return an RTX representing where to find the function value of a
22285 function returning MODE. */
22286static rtx
22287rs6000_complex_function_value (enum machine_mode mode)
22288{
22289 unsigned int regno;
22290 rtx r1, r2;
22291 enum machine_mode inner = GET_MODE_INNER (mode);
fb7e4164 22292 unsigned int inner_bytes = GET_MODE_SIZE (inner);
ded9bf77 22293
18f63bfa
AH
22294 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22295 regno = FP_ARG_RETURN;
354ed18f
AH
22296 else
22297 {
18f63bfa 22298 regno = GP_ARG_RETURN;
ded9bf77 22299
18f63bfa
AH
22300 /* 32-bit is OK since it'll go in r3/r4. */
22301 if (TARGET_32BIT && inner_bytes >= 4)
ded9bf77
AH
22302 return gen_rtx_REG (mode, regno);
22303 }
22304
18f63bfa
AH
22305 if (inner_bytes >= 8)
22306 return gen_rtx_REG (mode, regno);
22307
ded9bf77
AH
22308 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22309 const0_rtx);
22310 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
fb7e4164 22311 GEN_INT (inner_bytes));
ded9bf77
AH
22312 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22313}
22314
a6ebc39a
AH
22315/* Define how to find the value returned by a function.
22316 VALTYPE is the data type of the value (as a tree).
22317 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22318 otherwise, FUNC is 0.
22319
22320 On the SPE, both FPs and vectors are returned in r3.
22321
22322 On RS/6000 an integer value is in r3 and a floating-point value is in
22323 fp1, unless -msoft-float. */
22324
22325rtx
586de218 22326rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
a6ebc39a
AH
22327{
22328 enum machine_mode mode;
2a8fa26c 22329 unsigned int regno;
a6ebc39a 22330
594a51fe
SS
22331 /* Special handling for structs in darwin64. */
22332 if (rs6000_darwin64_abi
22333 && TYPE_MODE (valtype) == BLKmode
0b5383eb
DJ
22334 && TREE_CODE (valtype) == RECORD_TYPE
22335 && int_size_in_bytes (valtype) > 0)
594a51fe
SS
22336 {
22337 CUMULATIVE_ARGS valcum;
22338 rtx valret;
22339
0b5383eb 22340 valcum.words = 0;
594a51fe
SS
22341 valcum.fregno = FP_ARG_MIN_REG;
22342 valcum.vregno = ALTIVEC_ARG_MIN_REG;
0b5383eb
DJ
22343 /* Do a trial code generation as if this were going to be passed as
22344 an argument; if any part goes in memory, we return NULL. */
22345 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
594a51fe
SS
22346 if (valret)
22347 return valret;
22348 /* Otherwise fall through to standard ABI rules. */
22349 }
22350
0e67400a
FJ
22351 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22352 {
22353 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22354 return gen_rtx_PARALLEL (DImode,
22355 gen_rtvec (2,
22356 gen_rtx_EXPR_LIST (VOIDmode,
22357 gen_rtx_REG (SImode, GP_ARG_RETURN),
22358 const0_rtx),
22359 gen_rtx_EXPR_LIST (VOIDmode,
22360 gen_rtx_REG (SImode,
22361 GP_ARG_RETURN + 1),
22362 GEN_INT (4))));
22363 }
0f086e42
FJ
22364 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22365 {
22366 return gen_rtx_PARALLEL (DCmode,
22367 gen_rtvec (4,
22368 gen_rtx_EXPR_LIST (VOIDmode,
22369 gen_rtx_REG (SImode, GP_ARG_RETURN),
22370 const0_rtx),
22371 gen_rtx_EXPR_LIST (VOIDmode,
22372 gen_rtx_REG (SImode,
22373 GP_ARG_RETURN + 1),
22374 GEN_INT (4)),
22375 gen_rtx_EXPR_LIST (VOIDmode,
22376 gen_rtx_REG (SImode,
22377 GP_ARG_RETURN + 2),
22378 GEN_INT (8)),
22379 gen_rtx_EXPR_LIST (VOIDmode,
22380 gen_rtx_REG (SImode,
22381 GP_ARG_RETURN + 3),
22382 GEN_INT (12))));
22383 }
602ea4d3 22384
7348aa7f
FXC
22385 mode = TYPE_MODE (valtype);
22386 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
a6ebc39a 22387 || POINTER_TYPE_P (valtype))
b78d48dd 22388 mode = TARGET_32BIT ? SImode : DImode;
a6ebc39a 22389
e41b2a33
PB
22390 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22391 /* _Decimal128 must use an even/odd register pair. */
22392 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22393 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
2a8fa26c 22394 regno = FP_ARG_RETURN;
ded9bf77 22395 else if (TREE_CODE (valtype) == COMPLEX_TYPE
42ba5130 22396 && targetm.calls.split_complex_arg)
ded9bf77 22397 return rs6000_complex_function_value (mode);
44688022 22398 else if (TREE_CODE (valtype) == VECTOR_TYPE
d0b2079e 22399 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
23ba09f0 22400 && ALTIVEC_VECTOR_MODE (mode))
a6ebc39a 22401 regno = ALTIVEC_ARG_RETURN;
18f63bfa 22402 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22403 && (mode == DFmode || mode == DCmode
22404 || mode == TFmode || mode == TCmode))
18f63bfa 22405 return spe_build_register_parallel (mode, GP_ARG_RETURN);
a6ebc39a
AH
22406 else
22407 regno = GP_ARG_RETURN;
22408
22409 return gen_rtx_REG (mode, regno);
22410}
22411
ded9bf77
AH
22412/* Define how to find the value returned by a library function
22413 assuming the value has mode MODE. */
22414rtx
22415rs6000_libcall_value (enum machine_mode mode)
22416{
22417 unsigned int regno;
22418
2e6c9641
FJ
22419 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22420 {
22421 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22422 return gen_rtx_PARALLEL (DImode,
22423 gen_rtvec (2,
22424 gen_rtx_EXPR_LIST (VOIDmode,
22425 gen_rtx_REG (SImode, GP_ARG_RETURN),
22426 const0_rtx),
22427 gen_rtx_EXPR_LIST (VOIDmode,
22428 gen_rtx_REG (SImode,
22429 GP_ARG_RETURN + 1),
22430 GEN_INT (4))));
22431 }
22432
e41b2a33
PB
22433 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22434 /* _Decimal128 must use an even/odd register pair. */
22435 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
00b79d54 22436 else if (SCALAR_FLOAT_MODE_P (mode)
ded9bf77
AH
22437 && TARGET_HARD_FLOAT && TARGET_FPRS)
22438 regno = FP_ARG_RETURN;
44688022
AM
22439 else if (ALTIVEC_VECTOR_MODE (mode)
22440 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
ded9bf77 22441 regno = ALTIVEC_ARG_RETURN;
42ba5130 22442 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
ded9bf77 22443 return rs6000_complex_function_value (mode);
18f63bfa 22444 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
4f011e1e
JM
22445 && (mode == DFmode || mode == DCmode
22446 || mode == TFmode || mode == TCmode))
18f63bfa 22447 return spe_build_register_parallel (mode, GP_ARG_RETURN);
ded9bf77
AH
22448 else
22449 regno = GP_ARG_RETURN;
22450
22451 return gen_rtx_REG (mode, regno);
22452}
22453
d1d0c603
JJ
22454/* Define the offset between two registers, FROM to be eliminated and its
22455 replacement TO, at the start of a routine. */
22456HOST_WIDE_INT
22457rs6000_initial_elimination_offset (int from, int to)
22458{
22459 rs6000_stack_t *info = rs6000_stack_info ();
22460 HOST_WIDE_INT offset;
22461
7d5175e1 22462 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
d1d0c603 22463 offset = info->push_p ? 0 : -info->total_size;
7d5175e1
JJ
22464 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22465 {
22466 offset = info->push_p ? 0 : -info->total_size;
22467 if (FRAME_GROWS_DOWNWARD)
5b667039 22468 offset += info->fixed_size + info->vars_size + info->parm_size;
7d5175e1
JJ
22469 }
22470 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22471 offset = FRAME_GROWS_DOWNWARD
5b667039 22472 ? info->fixed_size + info->vars_size + info->parm_size
7d5175e1
JJ
22473 : 0;
22474 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
d1d0c603
JJ
22475 offset = info->total_size;
22476 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22477 offset = info->push_p ? info->total_size : 0;
22478 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22479 offset = 0;
22480 else
37409796 22481 gcc_unreachable ();
d1d0c603
JJ
22482
22483 return offset;
22484}
22485
58646b77 22486/* Return true if TYPE is a SPE or AltiVec opaque type. */
62e1dfcf 22487
c8e4f0e9 22488static bool
3101faab 22489rs6000_is_opaque_type (const_tree type)
62e1dfcf 22490{
58646b77 22491 return (type == opaque_V2SI_type_node
2abe3e28 22492 || type == opaque_V2SF_type_node
58646b77 22493 || type == opaque_V4SI_type_node);
62e1dfcf
NC
22494}
22495
96714395 22496static rtx
a2369ed3 22497rs6000_dwarf_register_span (rtx reg)
96714395
AH
22498{
22499 unsigned regno;
22500
4d4cbc0e
AH
22501 if (TARGET_SPE
22502 && (SPE_VECTOR_MODE (GET_MODE (reg))
4d4447b5
PB
22503 || (TARGET_E500_DOUBLE
22504 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
4d4cbc0e
AH
22505 ;
22506 else
96714395
AH
22507 return NULL_RTX;
22508
22509 regno = REGNO (reg);
22510
22511 /* The duality of the SPE register size wreaks all kinds of havoc.
22512 This is a way of distinguishing r0 in 32-bits from r0 in
22513 64-bits. */
22514 return
22515 gen_rtx_PARALLEL (VOIDmode,
3bd104d1
AH
22516 BYTES_BIG_ENDIAN
22517 ? gen_rtvec (2,
22518 gen_rtx_REG (SImode, regno + 1200),
22519 gen_rtx_REG (SImode, regno))
22520 : gen_rtvec (2,
22521 gen_rtx_REG (SImode, regno),
22522 gen_rtx_REG (SImode, regno + 1200)));
96714395
AH
22523}
22524
37ea0b7e
JM
22525/* Fill in sizes for SPE register high parts in table used by unwinder. */
22526
22527static void
22528rs6000_init_dwarf_reg_sizes_extra (tree address)
22529{
22530 if (TARGET_SPE)
22531 {
22532 int i;
22533 enum machine_mode mode = TYPE_MODE (char_type_node);
22534 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
22535 rtx mem = gen_rtx_MEM (BLKmode, addr);
22536 rtx value = gen_int_mode (4, mode);
22537
22538 for (i = 1201; i < 1232; i++)
22539 {
22540 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22541 HOST_WIDE_INT offset
22542 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22543
22544 emit_move_insn (adjust_address (mem, mode, offset), value);
22545 }
22546 }
22547}
22548
93c9d1ba
AM
22549/* Map internal gcc register numbers to DWARF2 register numbers. */
22550
22551unsigned int
22552rs6000_dbx_register_number (unsigned int regno)
22553{
22554 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22555 return regno;
22556 if (regno == MQ_REGNO)
22557 return 100;
1de43f85 22558 if (regno == LR_REGNO)
93c9d1ba 22559 return 108;
1de43f85 22560 if (regno == CTR_REGNO)
93c9d1ba
AM
22561 return 109;
22562 if (CR_REGNO_P (regno))
22563 return regno - CR0_REGNO + 86;
22564 if (regno == XER_REGNO)
22565 return 101;
22566 if (ALTIVEC_REGNO_P (regno))
22567 return regno - FIRST_ALTIVEC_REGNO + 1124;
22568 if (regno == VRSAVE_REGNO)
22569 return 356;
22570 if (regno == VSCR_REGNO)
22571 return 67;
22572 if (regno == SPE_ACC_REGNO)
22573 return 99;
22574 if (regno == SPEFSCR_REGNO)
22575 return 612;
22576 /* SPE high reg number. We get these values of regno from
22577 rs6000_dwarf_register_span. */
37409796
NS
22578 gcc_assert (regno >= 1200 && regno < 1232);
22579 return regno;
93c9d1ba
AM
22580}
22581
93f90be6 22582/* target hook eh_return_filter_mode */
f676971a 22583static enum machine_mode
93f90be6
FJ
22584rs6000_eh_return_filter_mode (void)
22585{
22586 return TARGET_32BIT ? SImode : word_mode;
22587}
22588
00b79d54
BE
22589/* Target hook for scalar_mode_supported_p. */
22590static bool
22591rs6000_scalar_mode_supported_p (enum machine_mode mode)
22592{
22593 if (DECIMAL_FLOAT_MODE_P (mode))
22594 return true;
22595 else
22596 return default_scalar_mode_supported_p (mode);
22597}
22598
f676971a
EC
22599/* Target hook for vector_mode_supported_p. */
22600static bool
22601rs6000_vector_mode_supported_p (enum machine_mode mode)
22602{
22603
96038623
DE
22604 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22605 return true;
22606
f676971a
EC
22607 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22608 return true;
22609
22610 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22611 return true;
22612
22613 else
22614 return false;
22615}
22616
bb8df8a6
EC
22617/* Target hook for invalid_arg_for_unprototyped_fn. */
22618static const char *
3101faab 22619invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
4d3e6fae
FJ
22620{
22621 return (!rs6000_darwin64_abi
22622 && typelist == 0
22623 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22624 && (funcdecl == NULL_TREE
22625 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22626 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22627 ? N_("AltiVec argument passed to unprototyped function")
22628 : NULL;
22629}
22630
3aebbe5f
JJ
22631/* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22632 setup by using __stack_chk_fail_local hidden function instead of
22633 calling __stack_chk_fail directly. Otherwise it is better to call
22634 __stack_chk_fail directly. */
22635
22636static tree
22637rs6000_stack_protect_fail (void)
22638{
22639 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22640 ? default_hidden_stack_protect_fail ()
22641 : default_external_stack_protect_fail ();
22642}
22643
17211ab5 22644#include "gt-rs6000.h"