1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 2, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the
21 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
22 MA 02110-1301, USA. */
26 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 #include "cfglayout.h"
55 #include "sched-int.h"
56 #include "tree-gimple.h"
59 #include "tm-constrs.h"
61 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
64 #include "gstab.h" /* for N_SLINE */
67 #ifndef TARGET_NO_PROTOTYPE
68 #define TARGET_NO_PROTOTYPE 0
71 #define min(A,B) ((A) < (B) ? (A) : (B))
72 #define max(A,B) ((A) > (B) ? (A) : (B))
74 /* Structure used to define the rs6000 stack */
75 typedef struct rs6000_stack
{
76 int first_gp_reg_save
; /* first callee saved GP register used */
77 int first_fp_reg_save
; /* first callee saved FP register used */
78 int first_altivec_reg_save
; /* first callee saved AltiVec register used */
79 int lr_save_p
; /* true if the link reg needs to be saved */
80 int cr_save_p
; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask
; /* mask of vec registers to save */
82 int push_p
; /* true if we need to allocate stack space */
83 int calls_p
; /* true if the function makes any calls */
84 int world_save_p
; /* true if we're saving *everything*:
85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
86 enum rs6000_abi abi
; /* which ABI to use */
87 int gp_save_offset
; /* offset to save GP regs from initial SP */
88 int fp_save_offset
; /* offset to save FP regs from initial SP */
89 int altivec_save_offset
; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset
; /* offset to save LR from initial SP */
91 int cr_save_offset
; /* offset to save CR from initial SP */
92 int vrsave_save_offset
; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset
; /* offset to save spe 64-bit gprs */
94 int varargs_save_offset
; /* offset to save the varargs registers */
95 int ehrd_offset
; /* offset to EH return data */
96 int reg_size
; /* register size (4 or 8) */
97 HOST_WIDE_INT vars_size
; /* variable save area size */
98 int parm_size
; /* outgoing parameter size */
99 int save_size
; /* save area size */
100 int fixed_size
; /* fixed size of stack frame */
101 int gp_size
; /* size of saved GP registers */
102 int fp_size
; /* size of saved FP registers */
103 int altivec_size
; /* size of saved AltiVec registers */
104 int cr_size
; /* size to hold CR if not in save_size */
105 int vrsave_size
; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size
; /* size of altivec alignment padding if
108 int spe_gp_size
; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size
;
110 HOST_WIDE_INT total_size
; /* total bytes allocated for stack */
111 int spe_64bit_regs_used
;
114 /* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116 typedef struct machine_function
GTY(())
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame
;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name
;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p
;
124 /* Flags if __builtin_return_address (0) was used. */
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset
;
131 /* Target cpu type */
133 enum processor_type rs6000_cpu
;
134 struct rs6000_cpu_select rs6000_select
[3] =
136 /* switch name, tune arch */
137 { (const char *)0, "--with-cpu=", 1, 1 },
138 { (const char *)0, "-mcpu=", 1, 1 },
139 { (const char *)0, "-mtune=", 1, 0 },
142 static GTY(()) bool rs6000_cell_dont_microcode
;
144 /* Always emit branch hint bits. */
145 static GTY(()) bool rs6000_always_hint
;
147 /* Schedule instructions for group formation. */
148 static GTY(()) bool rs6000_sched_groups
;
150 /* Align branch targets. */
151 static GTY(()) bool rs6000_align_branch_targets
;
153 /* Support for -msched-costly-dep option. */
154 const char *rs6000_sched_costly_dep_str
;
155 enum rs6000_dependence_cost rs6000_sched_costly_dep
;
157 /* Support for -minsert-sched-nops option. */
158 const char *rs6000_sched_insert_nops_str
;
159 enum rs6000_nop_insertion rs6000_sched_insert_nops
;
161 /* Support targetm.vectorize.builtin_mask_for_load. */
162 static GTY(()) tree altivec_builtin_mask_for_load
;
164 /* Size of long double. */
165 int rs6000_long_double_type_size
;
167 /* IEEE quad extended precision long double. */
170 /* Whether -mabi=altivec has appeared. */
171 int rs6000_altivec_abi
;
173 /* Nonzero if we want SPE ABI extensions. */
176 /* Nonzero if floating point operations are done in the GPRs. */
177 int rs6000_float_gprs
= 0;
179 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
180 int rs6000_darwin64_abi
;
182 /* Set to nonzero once AIX common-mode calls have been defined. */
183 static GTY(()) int common_mode_defined
;
185 /* Save information from a "cmpxx" operation until the branch or scc is
187 rtx rs6000_compare_op0
, rs6000_compare_op1
;
188 int rs6000_compare_fp_p
;
190 /* Label number of label created for -mrelocatable, to call to so we can
191 get the address of the GOT section */
192 int rs6000_pic_labelno
;
195 /* Which abi to adhere to */
196 const char *rs6000_abi_name
;
198 /* Semantics of the small data area */
199 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
201 /* Which small data model to use */
202 const char *rs6000_sdata_name
= (char *)0;
204 /* Counter for labels which are to be placed in .fixup. */
205 int fixuplabelno
= 0;
208 /* Bit size of immediate TLS offsets and string from which it is decoded. */
209 int rs6000_tls_size
= 32;
210 const char *rs6000_tls_size_string
;
212 /* ABI enumeration available for subtarget to use. */
213 enum rs6000_abi rs6000_current_abi
;
215 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
219 const char *rs6000_debug_name
;
220 int rs6000_debug_stack
; /* debug stack applications */
221 int rs6000_debug_arg
; /* debug argument handling */
223 /* Value is TRUE if register/mode pair is acceptable. */
224 bool rs6000_hard_regno_mode_ok_p
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
226 /* Built in types. */
228 tree rs6000_builtin_types
[RS6000_BTI_MAX
];
229 tree rs6000_builtin_decls
[RS6000_BUILTIN_COUNT
];
231 const char *rs6000_traceback_name
;
233 traceback_default
= 0,
239 /* Flag to say the TOC is initialized */
241 char toc_label_name
[10];
243 /* Cached value of rs6000_variable_issue. This is cached in
244 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
245 static short cached_can_issue_more
;
247 static GTY(()) section
*read_only_data_section
;
248 static GTY(()) section
*private_data_section
;
249 static GTY(()) section
*read_only_private_data_section
;
250 static GTY(()) section
*sdata2_section
;
251 static GTY(()) section
*toc_section
;
253 /* Control alignment for fields within structures. */
254 /* String from -malign-XXXXX. */
255 int rs6000_alignment_flags
;
257 /* True for any options that were explicitly set. */
259 bool aix_struct_ret
; /* True if -maix-struct-ret was used. */
260 bool alignment
; /* True if -malign- was used. */
261 bool abi
; /* True if -mabi=spe/nospe was used. */
262 bool spe
; /* True if -mspe= was used. */
263 bool float_gprs
; /* True if -mfloat-gprs= was used. */
264 bool isel
; /* True if -misel was used. */
265 bool long_double
; /* True if -mlong-double- was used. */
266 bool ieee
; /* True if -mabi=ieee/ibmlongdouble used. */
267 } rs6000_explicit_options
;
269 struct builtin_description
271 /* mask is not const because we're going to alter it below. This
272 nonsense will go away when we rewrite the -march infrastructure
273 to give us more target flag bits. */
275 const enum insn_code icode
;
276 const char *const name
;
277 const enum rs6000_builtins code
;
280 /* Target cpu costs. */
282 struct processor_costs
{
283 const int mulsi
; /* cost of SImode multiplication. */
284 const int mulsi_const
; /* cost of SImode multiplication by constant. */
285 const int mulsi_const9
; /* cost of SImode mult by short constant. */
286 const int muldi
; /* cost of DImode multiplication. */
287 const int divsi
; /* cost of SImode division. */
288 const int divdi
; /* cost of DImode division. */
289 const int fp
; /* cost of simple SFmode and DFmode insns. */
290 const int dmul
; /* cost of DFmode multiplication (and fmadd). */
291 const int sdiv
; /* cost of SFmode division (fdivs). */
292 const int ddiv
; /* cost of DFmode division (fdiv). */
295 const struct processor_costs
*rs6000_cost
;
297 /* Processor costs (relative to an add) */
299 /* Instruction size costs on 32bit processors. */
301 struct processor_costs size32_cost
= {
302 COSTS_N_INSNS (1), /* mulsi */
303 COSTS_N_INSNS (1), /* mulsi_const */
304 COSTS_N_INSNS (1), /* mulsi_const9 */
305 COSTS_N_INSNS (1), /* muldi */
306 COSTS_N_INSNS (1), /* divsi */
307 COSTS_N_INSNS (1), /* divdi */
308 COSTS_N_INSNS (1), /* fp */
309 COSTS_N_INSNS (1), /* dmul */
310 COSTS_N_INSNS (1), /* sdiv */
311 COSTS_N_INSNS (1), /* ddiv */
314 /* Instruction size costs on 64bit processors. */
316 struct processor_costs size64_cost
= {
317 COSTS_N_INSNS (1), /* mulsi */
318 COSTS_N_INSNS (1), /* mulsi_const */
319 COSTS_N_INSNS (1), /* mulsi_const9 */
320 COSTS_N_INSNS (1), /* muldi */
321 COSTS_N_INSNS (1), /* divsi */
322 COSTS_N_INSNS (1), /* divdi */
323 COSTS_N_INSNS (1), /* fp */
324 COSTS_N_INSNS (1), /* dmul */
325 COSTS_N_INSNS (1), /* sdiv */
326 COSTS_N_INSNS (1), /* ddiv */
329 /* Instruction costs on RIOS1 processors. */
331 struct processor_costs rios1_cost
= {
332 COSTS_N_INSNS (5), /* mulsi */
333 COSTS_N_INSNS (4), /* mulsi_const */
334 COSTS_N_INSNS (3), /* mulsi_const9 */
335 COSTS_N_INSNS (5), /* muldi */
336 COSTS_N_INSNS (19), /* divsi */
337 COSTS_N_INSNS (19), /* divdi */
338 COSTS_N_INSNS (2), /* fp */
339 COSTS_N_INSNS (2), /* dmul */
340 COSTS_N_INSNS (19), /* sdiv */
341 COSTS_N_INSNS (19), /* ddiv */
344 /* Instruction costs on RIOS2 processors. */
346 struct processor_costs rios2_cost
= {
347 COSTS_N_INSNS (2), /* mulsi */
348 COSTS_N_INSNS (2), /* mulsi_const */
349 COSTS_N_INSNS (2), /* mulsi_const9 */
350 COSTS_N_INSNS (2), /* muldi */
351 COSTS_N_INSNS (13), /* divsi */
352 COSTS_N_INSNS (13), /* divdi */
353 COSTS_N_INSNS (2), /* fp */
354 COSTS_N_INSNS (2), /* dmul */
355 COSTS_N_INSNS (17), /* sdiv */
356 COSTS_N_INSNS (17), /* ddiv */
359 /* Instruction costs on RS64A processors. */
361 struct processor_costs rs64a_cost
= {
362 COSTS_N_INSNS (20), /* mulsi */
363 COSTS_N_INSNS (12), /* mulsi_const */
364 COSTS_N_INSNS (8), /* mulsi_const9 */
365 COSTS_N_INSNS (34), /* muldi */
366 COSTS_N_INSNS (65), /* divsi */
367 COSTS_N_INSNS (67), /* divdi */
368 COSTS_N_INSNS (4), /* fp */
369 COSTS_N_INSNS (4), /* dmul */
370 COSTS_N_INSNS (31), /* sdiv */
371 COSTS_N_INSNS (31), /* ddiv */
374 /* Instruction costs on MPCCORE processors. */
376 struct processor_costs mpccore_cost
= {
377 COSTS_N_INSNS (2), /* mulsi */
378 COSTS_N_INSNS (2), /* mulsi_const */
379 COSTS_N_INSNS (2), /* mulsi_const9 */
380 COSTS_N_INSNS (2), /* muldi */
381 COSTS_N_INSNS (6), /* divsi */
382 COSTS_N_INSNS (6), /* divdi */
383 COSTS_N_INSNS (4), /* fp */
384 COSTS_N_INSNS (5), /* dmul */
385 COSTS_N_INSNS (10), /* sdiv */
386 COSTS_N_INSNS (17), /* ddiv */
389 /* Instruction costs on PPC403 processors. */
391 struct processor_costs ppc403_cost
= {
392 COSTS_N_INSNS (4), /* mulsi */
393 COSTS_N_INSNS (4), /* mulsi_const */
394 COSTS_N_INSNS (4), /* mulsi_const9 */
395 COSTS_N_INSNS (4), /* muldi */
396 COSTS_N_INSNS (33), /* divsi */
397 COSTS_N_INSNS (33), /* divdi */
398 COSTS_N_INSNS (11), /* fp */
399 COSTS_N_INSNS (11), /* dmul */
400 COSTS_N_INSNS (11), /* sdiv */
401 COSTS_N_INSNS (11), /* ddiv */
404 /* Instruction costs on PPC405 processors. */
406 struct processor_costs ppc405_cost
= {
407 COSTS_N_INSNS (5), /* mulsi */
408 COSTS_N_INSNS (4), /* mulsi_const */
409 COSTS_N_INSNS (3), /* mulsi_const9 */
410 COSTS_N_INSNS (5), /* muldi */
411 COSTS_N_INSNS (35), /* divsi */
412 COSTS_N_INSNS (35), /* divdi */
413 COSTS_N_INSNS (11), /* fp */
414 COSTS_N_INSNS (11), /* dmul */
415 COSTS_N_INSNS (11), /* sdiv */
416 COSTS_N_INSNS (11), /* ddiv */
419 /* Instruction costs on PPC440 processors. */
421 struct processor_costs ppc440_cost
= {
422 COSTS_N_INSNS (3), /* mulsi */
423 COSTS_N_INSNS (2), /* mulsi_const */
424 COSTS_N_INSNS (2), /* mulsi_const9 */
425 COSTS_N_INSNS (3), /* muldi */
426 COSTS_N_INSNS (34), /* divsi */
427 COSTS_N_INSNS (34), /* divdi */
428 COSTS_N_INSNS (5), /* fp */
429 COSTS_N_INSNS (5), /* dmul */
430 COSTS_N_INSNS (19), /* sdiv */
431 COSTS_N_INSNS (33), /* ddiv */
434 /* Instruction costs on PPC601 processors. */
436 struct processor_costs ppc601_cost
= {
437 COSTS_N_INSNS (5), /* mulsi */
438 COSTS_N_INSNS (5), /* mulsi_const */
439 COSTS_N_INSNS (5), /* mulsi_const9 */
440 COSTS_N_INSNS (5), /* muldi */
441 COSTS_N_INSNS (36), /* divsi */
442 COSTS_N_INSNS (36), /* divdi */
443 COSTS_N_INSNS (4), /* fp */
444 COSTS_N_INSNS (5), /* dmul */
445 COSTS_N_INSNS (17), /* sdiv */
446 COSTS_N_INSNS (31), /* ddiv */
449 /* Instruction costs on PPC603 processors. */
451 struct processor_costs ppc603_cost
= {
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (3), /* mulsi_const */
454 COSTS_N_INSNS (2), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (37), /* divsi */
457 COSTS_N_INSNS (37), /* divdi */
458 COSTS_N_INSNS (3), /* fp */
459 COSTS_N_INSNS (4), /* dmul */
460 COSTS_N_INSNS (18), /* sdiv */
461 COSTS_N_INSNS (33), /* ddiv */
464 /* Instruction costs on PPC604 processors. */
466 struct processor_costs ppc604_cost
= {
467 COSTS_N_INSNS (4), /* mulsi */
468 COSTS_N_INSNS (4), /* mulsi_const */
469 COSTS_N_INSNS (4), /* mulsi_const9 */
470 COSTS_N_INSNS (4), /* muldi */
471 COSTS_N_INSNS (20), /* divsi */
472 COSTS_N_INSNS (20), /* divdi */
473 COSTS_N_INSNS (3), /* fp */
474 COSTS_N_INSNS (3), /* dmul */
475 COSTS_N_INSNS (18), /* sdiv */
476 COSTS_N_INSNS (32), /* ddiv */
479 /* Instruction costs on PPC604e processors. */
481 struct processor_costs ppc604e_cost
= {
482 COSTS_N_INSNS (2), /* mulsi */
483 COSTS_N_INSNS (2), /* mulsi_const */
484 COSTS_N_INSNS (2), /* mulsi_const9 */
485 COSTS_N_INSNS (2), /* muldi */
486 COSTS_N_INSNS (20), /* divsi */
487 COSTS_N_INSNS (20), /* divdi */
488 COSTS_N_INSNS (3), /* fp */
489 COSTS_N_INSNS (3), /* dmul */
490 COSTS_N_INSNS (18), /* sdiv */
491 COSTS_N_INSNS (32), /* ddiv */
494 /* Instruction costs on PPC620 processors. */
496 struct processor_costs ppc620_cost
= {
497 COSTS_N_INSNS (5), /* mulsi */
498 COSTS_N_INSNS (4), /* mulsi_const */
499 COSTS_N_INSNS (3), /* mulsi_const9 */
500 COSTS_N_INSNS (7), /* muldi */
501 COSTS_N_INSNS (21), /* divsi */
502 COSTS_N_INSNS (37), /* divdi */
503 COSTS_N_INSNS (3), /* fp */
504 COSTS_N_INSNS (3), /* dmul */
505 COSTS_N_INSNS (18), /* sdiv */
506 COSTS_N_INSNS (32), /* ddiv */
509 /* Instruction costs on PPC630 processors. */
511 struct processor_costs ppc630_cost
= {
512 COSTS_N_INSNS (5), /* mulsi */
513 COSTS_N_INSNS (4), /* mulsi_const */
514 COSTS_N_INSNS (3), /* mulsi_const9 */
515 COSTS_N_INSNS (7), /* muldi */
516 COSTS_N_INSNS (21), /* divsi */
517 COSTS_N_INSNS (37), /* divdi */
518 COSTS_N_INSNS (3), /* fp */
519 COSTS_N_INSNS (3), /* dmul */
520 COSTS_N_INSNS (17), /* sdiv */
521 COSTS_N_INSNS (21), /* ddiv */
524 /* Instruction costs on Cell processor. */
525 /* COSTS_N_INSNS (1) ~ one add. */
527 struct processor_costs ppccell_cost
= {
528 COSTS_N_INSNS (9/2)+2, /* mulsi */
529 COSTS_N_INSNS (6/2), /* mulsi_const */
530 COSTS_N_INSNS (6/2), /* mulsi_const9 */
531 COSTS_N_INSNS (15/2)+2, /* muldi */
532 COSTS_N_INSNS (38/2), /* divsi */
533 COSTS_N_INSNS (70/2), /* divdi */
534 COSTS_N_INSNS (10/2), /* fp */
535 COSTS_N_INSNS (10/2), /* dmul */
536 COSTS_N_INSNS (74/2), /* sdiv */
537 COSTS_N_INSNS (74/2), /* ddiv */
540 /* Instruction costs on PPC750 and PPC7400 processors. */
542 struct processor_costs ppc750_cost
= {
543 COSTS_N_INSNS (5), /* mulsi */
544 COSTS_N_INSNS (3), /* mulsi_const */
545 COSTS_N_INSNS (2), /* mulsi_const9 */
546 COSTS_N_INSNS (5), /* muldi */
547 COSTS_N_INSNS (17), /* divsi */
548 COSTS_N_INSNS (17), /* divdi */
549 COSTS_N_INSNS (3), /* fp */
550 COSTS_N_INSNS (3), /* dmul */
551 COSTS_N_INSNS (17), /* sdiv */
552 COSTS_N_INSNS (31), /* ddiv */
555 /* Instruction costs on PPC7450 processors. */
557 struct processor_costs ppc7450_cost
= {
558 COSTS_N_INSNS (4), /* mulsi */
559 COSTS_N_INSNS (3), /* mulsi_const */
560 COSTS_N_INSNS (3), /* mulsi_const9 */
561 COSTS_N_INSNS (4), /* muldi */
562 COSTS_N_INSNS (23), /* divsi */
563 COSTS_N_INSNS (23), /* divdi */
564 COSTS_N_INSNS (5), /* fp */
565 COSTS_N_INSNS (5), /* dmul */
566 COSTS_N_INSNS (21), /* sdiv */
567 COSTS_N_INSNS (35), /* ddiv */
570 /* Instruction costs on PPC8540 processors. */
572 struct processor_costs ppc8540_cost
= {
573 COSTS_N_INSNS (4), /* mulsi */
574 COSTS_N_INSNS (4), /* mulsi_const */
575 COSTS_N_INSNS (4), /* mulsi_const9 */
576 COSTS_N_INSNS (4), /* muldi */
577 COSTS_N_INSNS (19), /* divsi */
578 COSTS_N_INSNS (19), /* divdi */
579 COSTS_N_INSNS (4), /* fp */
580 COSTS_N_INSNS (4), /* dmul */
581 COSTS_N_INSNS (29), /* sdiv */
582 COSTS_N_INSNS (29), /* ddiv */
585 /* Instruction costs on POWER4 and POWER5 processors. */
587 struct processor_costs power4_cost
= {
588 COSTS_N_INSNS (3), /* mulsi */
589 COSTS_N_INSNS (2), /* mulsi_const */
590 COSTS_N_INSNS (2), /* mulsi_const9 */
591 COSTS_N_INSNS (4), /* muldi */
592 COSTS_N_INSNS (18), /* divsi */
593 COSTS_N_INSNS (34), /* divdi */
594 COSTS_N_INSNS (3), /* fp */
595 COSTS_N_INSNS (3), /* dmul */
596 COSTS_N_INSNS (17), /* sdiv */
597 COSTS_N_INSNS (17), /* ddiv */
600 /* Instruction costs on POWER6 processors. */
602 struct processor_costs power6_cost
= {
603 COSTS_N_INSNS (8), /* mulsi */
604 COSTS_N_INSNS (8), /* mulsi_const */
605 COSTS_N_INSNS (8), /* mulsi_const9 */
606 COSTS_N_INSNS (8), /* muldi */
607 COSTS_N_INSNS (22), /* divsi */
608 COSTS_N_INSNS (28), /* divdi */
609 COSTS_N_INSNS (3), /* fp */
610 COSTS_N_INSNS (3), /* dmul */
611 COSTS_N_INSNS (13), /* sdiv */
612 COSTS_N_INSNS (16), /* ddiv */
616 static bool rs6000_function_ok_for_sibcall (tree
, tree
);
617 static const char *rs6000_invalid_within_doloop (rtx
);
618 static rtx
rs6000_generate_compare (enum rtx_code
);
619 static void rs6000_emit_stack_tie (void);
620 static void rs6000_frame_related (rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
);
621 static rtx
spe_synthesize_frame_save (rtx
);
622 static bool spe_func_has_64bit_regs_p (void);
623 static void emit_frame_save (rtx
, rtx
, enum machine_mode
, unsigned int,
625 static rtx
gen_frame_mem_offset (enum machine_mode
, rtx
, int);
626 static void rs6000_emit_allocate_stack (HOST_WIDE_INT
, int);
627 static unsigned rs6000_hash_constant (rtx
);
628 static unsigned toc_hash_function (const void *);
629 static int toc_hash_eq (const void *, const void *);
630 static int constant_pool_expr_1 (rtx
, int *, int *);
631 static bool constant_pool_expr_p (rtx
);
632 static bool legitimate_small_data_p (enum machine_mode
, rtx
);
633 static bool legitimate_lo_sum_address_p (enum machine_mode
, rtx
, int);
634 static struct machine_function
* rs6000_init_machine_status (void);
635 static bool rs6000_assemble_integer (rtx
, unsigned int, int);
636 static bool no_global_regs_above (int);
637 #ifdef HAVE_GAS_HIDDEN
638 static void rs6000_assemble_visibility (tree
, int);
640 static int rs6000_ra_ever_killed (void);
641 static tree
rs6000_handle_longcall_attribute (tree
*, tree
, tree
, int, bool *);
642 static tree
rs6000_handle_altivec_attribute (tree
*, tree
, tree
, int, bool *);
643 static bool rs6000_ms_bitfield_layout_p (tree
);
644 static tree
rs6000_handle_struct_attribute (tree
*, tree
, tree
, int, bool *);
645 static void rs6000_eliminate_indexed_memrefs (rtx operands
[2]);
646 static const char *rs6000_mangle_fundamental_type (tree
);
647 extern const struct attribute_spec rs6000_attribute_table
[];
648 static void rs6000_set_default_type_attributes (tree
);
649 static bool rs6000_reg_live_or_pic_offset_p (int);
650 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT
);
651 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT
);
652 static void rs6000_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
654 static rtx
rs6000_emit_set_long_const (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
);
655 static bool rs6000_return_in_memory (tree
, tree
);
656 static void rs6000_file_start (void);
658 static int rs6000_elf_reloc_rw_mask (void);
659 static void rs6000_elf_asm_out_constructor (rtx
, int);
660 static void rs6000_elf_asm_out_destructor (rtx
, int);
661 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED
;
662 static void rs6000_elf_asm_init_sections (void);
663 static section
*rs6000_elf_select_rtx_section (enum machine_mode
, rtx
,
664 unsigned HOST_WIDE_INT
);
665 static void rs6000_elf_encode_section_info (tree
, rtx
, int)
668 static bool rs6000_use_blocks_for_constant_p (enum machine_mode
, rtx
);
670 static void rs6000_xcoff_asm_output_anchor (rtx
);
671 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
672 static void rs6000_xcoff_asm_init_sections (void);
673 static int rs6000_xcoff_reloc_rw_mask (void);
674 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree
);
675 static section
*rs6000_xcoff_select_section (tree
, int,
676 unsigned HOST_WIDE_INT
);
677 static void rs6000_xcoff_unique_section (tree
, int);
678 static section
*rs6000_xcoff_select_rtx_section
679 (enum machine_mode
, rtx
, unsigned HOST_WIDE_INT
);
680 static const char * rs6000_xcoff_strip_name_encoding (const char *);
681 static unsigned int rs6000_xcoff_section_type_flags (tree
, const char *, int);
682 static void rs6000_xcoff_file_start (void);
683 static void rs6000_xcoff_file_end (void);
685 static int rs6000_variable_issue (FILE *, int, rtx
, int);
686 static bool rs6000_rtx_costs (rtx
, int, int, int *);
687 static int rs6000_adjust_cost (rtx
, rtx
, rtx
, int);
688 static void rs6000_sched_init (FILE *, int, int);
689 static bool is_microcoded_insn (rtx
);
690 static bool is_nonpipeline_insn (rtx
);
691 static bool is_cracked_insn (rtx
);
692 static bool is_branch_slot_insn (rtx
);
693 static bool is_load_insn (rtx
);
694 static rtx
get_store_dest (rtx pat
);
695 static bool is_store_insn (rtx
);
696 static bool set_to_load_agen (rtx
,rtx
);
697 static bool adjacent_mem_locations (rtx
,rtx
);
698 static int rs6000_adjust_priority (rtx
, int);
699 static int rs6000_issue_rate (void);
700 static bool rs6000_is_costly_dependence (dep_t
, int, int);
701 static rtx
get_next_active_insn (rtx
, rtx
);
702 static bool insn_terminates_group_p (rtx
, enum group_termination
);
703 static bool insn_must_be_first_in_group (rtx
);
704 static bool insn_must_be_last_in_group (rtx
);
705 static bool is_costly_group (rtx
*, rtx
);
706 static int force_new_group (int, FILE *, rtx
*, rtx
, bool *, int, int *);
707 static int redefine_groups (FILE *, int, rtx
, rtx
);
708 static int pad_groups (FILE *, int, rtx
, rtx
);
709 static void rs6000_sched_finish (FILE *, int);
710 static int rs6000_sched_reorder (FILE *, int, rtx
*, int *, int);
711 static int rs6000_sched_reorder2 (FILE *, int, rtx
*, int *, int);
712 static int rs6000_use_sched_lookahead (void);
713 static int rs6000_use_sched_lookahead_guard (rtx
);
714 static tree
rs6000_builtin_mask_for_load (void);
715 static tree
rs6000_builtin_mul_widen_even (tree
);
716 static tree
rs6000_builtin_mul_widen_odd (tree
);
717 static tree
rs6000_builtin_conversion (enum tree_code
, tree
);
719 static void def_builtin (int, const char *, tree
, int);
720 static void rs6000_init_builtins (void);
721 static rtx
rs6000_expand_unop_builtin (enum insn_code
, tree
, rtx
);
722 static rtx
rs6000_expand_binop_builtin (enum insn_code
, tree
, rtx
);
723 static rtx
rs6000_expand_ternop_builtin (enum insn_code
, tree
, rtx
);
724 static rtx
rs6000_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
725 static void altivec_init_builtins (void);
726 static void rs6000_common_init_builtins (void);
727 static void rs6000_init_libfuncs (void);
729 static void enable_mask_for_builtins (struct builtin_description
*, int,
730 enum rs6000_builtins
,
731 enum rs6000_builtins
);
732 static tree
build_opaque_vector_type (tree
, int);
733 static void spe_init_builtins (void);
734 static rtx
spe_expand_builtin (tree
, rtx
, bool *);
735 static rtx
spe_expand_stv_builtin (enum insn_code
, tree
);
736 static rtx
spe_expand_predicate_builtin (enum insn_code
, tree
, rtx
);
737 static rtx
spe_expand_evsel_builtin (enum insn_code
, tree
, rtx
);
738 static int rs6000_emit_int_cmove (rtx
, rtx
, rtx
, rtx
);
739 static rs6000_stack_t
*rs6000_stack_info (void);
740 static void debug_stack_info (rs6000_stack_t
*);
742 static rtx
altivec_expand_builtin (tree
, rtx
, bool *);
743 static rtx
altivec_expand_ld_builtin (tree
, rtx
, bool *);
744 static rtx
altivec_expand_st_builtin (tree
, rtx
, bool *);
745 static rtx
altivec_expand_dst_builtin (tree
, rtx
, bool *);
746 static rtx
altivec_expand_abs_builtin (enum insn_code
, tree
, rtx
);
747 static rtx
altivec_expand_predicate_builtin (enum insn_code
,
748 const char *, tree
, rtx
);
749 static rtx
altivec_expand_lv_builtin (enum insn_code
, tree
, rtx
);
750 static rtx
altivec_expand_stv_builtin (enum insn_code
, tree
);
751 static rtx
altivec_expand_vec_init_builtin (tree
, tree
, rtx
);
752 static rtx
altivec_expand_vec_set_builtin (tree
);
753 static rtx
altivec_expand_vec_ext_builtin (tree
, rtx
);
754 static int get_element_number (tree
, tree
);
755 static bool rs6000_handle_option (size_t, const char *, int);
756 static void rs6000_parse_tls_size_option (void);
757 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
758 static int first_altivec_reg_to_save (void);
759 static unsigned int compute_vrsave_mask (void);
760 static void compute_save_world_info (rs6000_stack_t
*info_ptr
);
761 static void is_altivec_return_reg (rtx
, void *);
762 static rtx
generate_set_vrsave (rtx
, rs6000_stack_t
*, int);
763 int easy_vector_constant (rtx
, enum machine_mode
);
764 static bool rs6000_is_opaque_type (tree
);
765 static rtx
rs6000_dwarf_register_span (rtx
);
766 static void rs6000_init_dwarf_reg_sizes_extra (tree
);
767 static rtx
rs6000_legitimize_tls_address (rtx
, enum tls_model
);
768 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx
) ATTRIBUTE_UNUSED
;
769 static rtx
rs6000_tls_get_addr (void);
770 static rtx
rs6000_got_sym (void);
771 static int rs6000_tls_symbol_ref_1 (rtx
*, void *);
772 static const char *rs6000_get_some_local_dynamic_name (void);
773 static int rs6000_get_some_local_dynamic_name_1 (rtx
*, void *);
774 static rtx
rs6000_complex_function_value (enum machine_mode
);
775 static rtx
rs6000_spe_function_arg (CUMULATIVE_ARGS
*,
776 enum machine_mode
, tree
);
777 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS
*,
779 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS
*,
780 tree
, HOST_WIDE_INT
);
781 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS
*,
784 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS
*,
787 static rtx
rs6000_darwin64_record_arg (CUMULATIVE_ARGS
*, tree
, int, bool);
788 static rtx
rs6000_mixed_function_arg (enum machine_mode
, tree
, int);
789 static void rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
);
790 static void setup_incoming_varargs (CUMULATIVE_ARGS
*,
791 enum machine_mode
, tree
,
793 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode
,
795 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS
*, enum machine_mode
,
797 static const char *invalid_arg_for_unprototyped_fn (tree
, tree
, tree
);
799 static void macho_branch_islands (void);
800 static int no_previous_def (tree function_name
);
801 static tree
get_prev_label (tree function_name
);
802 static void rs6000_darwin_file_start (void);
805 static tree
rs6000_build_builtin_va_list (void);
806 static tree
rs6000_gimplify_va_arg (tree
, tree
, tree
*, tree
*);
807 static bool rs6000_must_pass_in_stack (enum machine_mode
, tree
);
808 static bool rs6000_scalar_mode_supported_p (enum machine_mode
);
809 static bool rs6000_vector_mode_supported_p (enum machine_mode
);
810 static int get_vec_cmp_insn (enum rtx_code
, enum machine_mode
,
812 static rtx
rs6000_emit_vector_compare (enum rtx_code
, rtx
, rtx
,
814 static int get_vsel_insn (enum machine_mode
);
815 static void rs6000_emit_vector_select (rtx
, rtx
, rtx
, rtx
);
816 static tree
rs6000_stack_protect_fail (void);
818 const int INSN_NOT_AVAILABLE
= -1;
819 static enum machine_mode
rs6000_eh_return_filter_mode (void);
821 /* Hash table stuff for keeping track of TOC entries. */
823 struct toc_hash_struct
GTY(())
825 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
826 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
828 enum machine_mode key_mode
;
832 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
834 /* Default register names. */
835 char rs6000_reg_names
[][8] =
837 "0", "1", "2", "3", "4", "5", "6", "7",
838 "8", "9", "10", "11", "12", "13", "14", "15",
839 "16", "17", "18", "19", "20", "21", "22", "23",
840 "24", "25", "26", "27", "28", "29", "30", "31",
841 "0", "1", "2", "3", "4", "5", "6", "7",
842 "8", "9", "10", "11", "12", "13", "14", "15",
843 "16", "17", "18", "19", "20", "21", "22", "23",
844 "24", "25", "26", "27", "28", "29", "30", "31",
845 "mq", "lr", "ctr","ap",
846 "0", "1", "2", "3", "4", "5", "6", "7",
848 /* AltiVec registers. */
849 "0", "1", "2", "3", "4", "5", "6", "7",
850 "8", "9", "10", "11", "12", "13", "14", "15",
851 "16", "17", "18", "19", "20", "21", "22", "23",
852 "24", "25", "26", "27", "28", "29", "30", "31",
855 "spe_acc", "spefscr",
856 /* Soft frame pointer. */
860 #ifdef TARGET_REGNAMES
861 static const char alt_reg_names
[][8] =
863 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
864 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
865 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
866 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
867 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
868 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
869 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
870 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
871 "mq", "lr", "ctr", "ap",
872 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
874 /* AltiVec registers. */
875 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
876 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
877 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
878 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
881 "spe_acc", "spefscr",
882 /* Soft frame pointer. */
887 #ifndef MASK_STRICT_ALIGN
888 #define MASK_STRICT_ALIGN 0
890 #ifndef TARGET_PROFILE_KERNEL
891 #define TARGET_PROFILE_KERNEL 0
894 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
895 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
897 /* Initialize the GCC target structure. */
898 #undef TARGET_ATTRIBUTE_TABLE
899 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
900 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
901 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
903 #undef TARGET_ASM_ALIGNED_DI_OP
904 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
906 /* Default unaligned ops are only provided for ELF. Find the ops needed
907 for non-ELF systems. */
908 #ifndef OBJECT_FORMAT_ELF
910 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
912 #undef TARGET_ASM_UNALIGNED_HI_OP
913 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
914 #undef TARGET_ASM_UNALIGNED_SI_OP
915 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
916 #undef TARGET_ASM_UNALIGNED_DI_OP
917 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
920 #undef TARGET_ASM_UNALIGNED_HI_OP
921 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
922 #undef TARGET_ASM_UNALIGNED_SI_OP
923 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
924 #undef TARGET_ASM_UNALIGNED_DI_OP
925 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
926 #undef TARGET_ASM_ALIGNED_DI_OP
927 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
931 /* This hook deals with fixups for relocatable code and DI-mode objects
933 #undef TARGET_ASM_INTEGER
934 #define TARGET_ASM_INTEGER rs6000_assemble_integer
936 #ifdef HAVE_GAS_HIDDEN
937 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
938 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
941 #undef TARGET_HAVE_TLS
942 #define TARGET_HAVE_TLS HAVE_AS_TLS
944 #undef TARGET_CANNOT_FORCE_CONST_MEM
945 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
947 #undef TARGET_ASM_FUNCTION_PROLOGUE
948 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
949 #undef TARGET_ASM_FUNCTION_EPILOGUE
950 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
952 #undef TARGET_SCHED_VARIABLE_ISSUE
953 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
955 #undef TARGET_SCHED_ISSUE_RATE
956 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
957 #undef TARGET_SCHED_ADJUST_COST
958 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
959 #undef TARGET_SCHED_ADJUST_PRIORITY
960 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
961 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
962 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
963 #undef TARGET_SCHED_INIT
964 #define TARGET_SCHED_INIT rs6000_sched_init
965 #undef TARGET_SCHED_FINISH
966 #define TARGET_SCHED_FINISH rs6000_sched_finish
967 #undef TARGET_SCHED_REORDER
968 #define TARGET_SCHED_REORDER rs6000_sched_reorder
969 #undef TARGET_SCHED_REORDER2
970 #define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
972 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
973 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
975 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
976 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
978 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
979 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
980 #undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
981 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
982 #undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
983 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
984 #undef TARGET_VECTORIZE_BUILTIN_CONVERSION
985 #define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
987 #undef TARGET_INIT_BUILTINS
988 #define TARGET_INIT_BUILTINS rs6000_init_builtins
990 #undef TARGET_EXPAND_BUILTIN
991 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
993 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
994 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
996 #undef TARGET_INIT_LIBFUNCS
997 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1000 #undef TARGET_BINDS_LOCAL_P
1001 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
1004 #undef TARGET_MS_BITFIELD_LAYOUT_P
1005 #define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1007 #undef TARGET_ASM_OUTPUT_MI_THUNK
1008 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1010 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1011 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
1013 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1014 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1016 #undef TARGET_INVALID_WITHIN_DOLOOP
1017 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
1019 #undef TARGET_RTX_COSTS
1020 #define TARGET_RTX_COSTS rs6000_rtx_costs
1021 #undef TARGET_ADDRESS_COST
1022 #define TARGET_ADDRESS_COST hook_int_rtx_0
1024 #undef TARGET_VECTOR_OPAQUE_P
1025 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
1027 #undef TARGET_DWARF_REGISTER_SPAN
1028 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1030 #undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1031 #define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1033 /* On rs6000, function arguments are promoted, as are function return
1035 #undef TARGET_PROMOTE_FUNCTION_ARGS
1036 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
1037 #undef TARGET_PROMOTE_FUNCTION_RETURN
1038 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
1040 #undef TARGET_RETURN_IN_MEMORY
1041 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1043 #undef TARGET_SETUP_INCOMING_VARARGS
1044 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1046 /* Always strict argument naming on rs6000. */
1047 #undef TARGET_STRICT_ARGUMENT_NAMING
1048 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1049 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1050 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
1051 #undef TARGET_SPLIT_COMPLEX_ARG
1052 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
1053 #undef TARGET_MUST_PASS_IN_STACK
1054 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
1055 #undef TARGET_PASS_BY_REFERENCE
1056 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
1057 #undef TARGET_ARG_PARTIAL_BYTES
1058 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
1060 #undef TARGET_BUILD_BUILTIN_VA_LIST
1061 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1063 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1064 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1066 #undef TARGET_EH_RETURN_FILTER_MODE
1067 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1069 #undef TARGET_SCALAR_MODE_SUPPORTED_P
1070 #define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1072 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1073 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1075 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1076 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1078 #undef TARGET_HANDLE_OPTION
1079 #define TARGET_HANDLE_OPTION rs6000_handle_option
1081 #undef TARGET_DEFAULT_TARGET_FLAGS
1082 #define TARGET_DEFAULT_TARGET_FLAGS \
1085 #undef TARGET_STACK_PROTECT_FAIL
1086 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1088 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1089 The PowerPC architecture requires only weak consistency among
1090 processors--that is, memory accesses between processors need not be
1091 sequentially consistent and memory accesses among processors can occur
1092 in any order. The ability to order memory accesses weakly provides
1093 opportunities for more efficient use of the system bus. Unless a
1094 dependency exists, the 604e allows read operations to precede store
1096 #undef TARGET_RELAXED_ORDERING
1097 #define TARGET_RELAXED_ORDERING true
1100 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1101 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1104 /* Use a 32-bit anchor range. This leads to sequences like:
1106 addis tmp,anchor,high
1109 where tmp itself acts as an anchor, and can be shared between
1110 accesses to the same 64k page. */
1111 #undef TARGET_MIN_ANCHOR_OFFSET
1112 #define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1113 #undef TARGET_MAX_ANCHOR_OFFSET
1114 #define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1115 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1116 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1118 struct gcc_target targetm
= TARGET_INITIALIZER
;
1121 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1124 rs6000_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
1126 /* The GPRs can hold any mode, but values bigger than one register
1127 cannot go past R31. */
1128 if (INT_REGNO_P (regno
))
1129 return INT_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
1131 /* The float registers can only hold floating modes and DImode.
1132 This excludes the 32-bit decimal float mode for now. */
1133 if (FP_REGNO_P (regno
))
1135 (SCALAR_FLOAT_MODE_P (mode
)
1136 && (mode
!= TDmode
|| (regno
% 2) == 0)
1138 && FP_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1))
1139 || (GET_MODE_CLASS (mode
) == MODE_INT
1140 && GET_MODE_SIZE (mode
) == UNITS_PER_FP_WORD
);
1142 /* The CR register can only hold CC modes. */
1143 if (CR_REGNO_P (regno
))
1144 return GET_MODE_CLASS (mode
) == MODE_CC
;
1146 if (XER_REGNO_P (regno
))
1147 return mode
== PSImode
;
1149 /* AltiVec only in AldyVec registers. */
1150 if (ALTIVEC_REGNO_P (regno
))
1151 return ALTIVEC_VECTOR_MODE (mode
);
1153 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1154 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
1157 /* We cannot put TImode anywhere except general register and it must be
1158 able to fit within the register set. */
1160 return GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
;
1163 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1165 rs6000_init_hard_regno_mode_ok (void)
1169 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; ++r
)
1170 for (m
= 0; m
< NUM_MACHINE_MODES
; ++m
)
1171 if (rs6000_hard_regno_mode_ok (r
, m
))
1172 rs6000_hard_regno_mode_ok_p
[m
][r
] = true;
1176 /* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1179 darwin_rs6000_override_options (void)
1181 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1183 rs6000_altivec_abi
= 1;
1184 TARGET_ALTIVEC_VRSAVE
= 1;
1185 if (DEFAULT_ABI
== ABI_DARWIN
)
1187 if (MACHO_DYNAMIC_NO_PIC_P
)
1190 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1193 else if (flag_pic
== 1)
1198 if (TARGET_64BIT
&& ! TARGET_POWERPC64
)
1200 target_flags
|= MASK_POWERPC64
;
1201 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1205 rs6000_default_long_calls
= 1;
1206 target_flags
|= MASK_SOFT_FLOAT
;
1209 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1211 if (!flag_mkernel
&& !flag_apple_kext
1213 && ! (target_flags_explicit
& MASK_ALTIVEC
))
1214 target_flags
|= MASK_ALTIVEC
;
1216 /* Unless the user (not the configurer) has explicitly overridden
1217 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1218 G4 unless targetting the kernel. */
1221 && strverscmp (darwin_macosx_version_min
, "10.5") >= 0
1222 && ! (target_flags_explicit
& MASK_ALTIVEC
)
1223 && ! rs6000_select
[1].string
)
1225 target_flags
|= MASK_ALTIVEC
;
1230 /* If not otherwise specified by a target, make 'long double' equivalent to
1233 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1234 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1237 /* Override command line options. Mostly we process the processor
1238 type and sometimes adjust other TARGET_ options. */
1241 rs6000_override_options (const char *default_cpu
)
1244 struct rs6000_cpu_select
*ptr
;
1247 /* Simplifications for entries below. */
1250 POWERPC_BASE_MASK
= MASK_POWERPC
| MASK_NEW_MNEMONICS
,
1251 POWERPC_7400_MASK
= POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_ALTIVEC
1254 /* This table occasionally claims that a processor does not support
1255 a particular feature even though it does, but the feature is slower
1256 than the alternative. Thus, it shouldn't be relied on as a
1257 complete description of the processor's support.
1259 Please keep this list in order, and don't forget to update the
1260 documentation in invoke.texi when adding a new processor or
1264 const char *const name
; /* Canonical processor name. */
1265 const enum processor_type processor
; /* Processor type enum value. */
1266 const int target_enable
; /* Target flags to enable. */
1267 } const processor_target_table
[]
1268 = {{"401", PROCESSOR_PPC403
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1269 {"403", PROCESSOR_PPC403
,
1270 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_STRICT_ALIGN
},
1271 {"405", PROCESSOR_PPC405
,
1272 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_MULHW
| MASK_DLMZB
},
1273 {"405fp", PROCESSOR_PPC405
,
1274 POWERPC_BASE_MASK
| MASK_MULHW
| MASK_DLMZB
},
1275 {"440", PROCESSOR_PPC440
,
1276 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_MULHW
| MASK_DLMZB
},
1277 {"440fp", PROCESSOR_PPC440
,
1278 POWERPC_BASE_MASK
| MASK_MULHW
| MASK_DLMZB
},
1279 {"505", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
},
1280 {"601", PROCESSOR_PPC601
,
1281 MASK_POWER
| POWERPC_BASE_MASK
| MASK_MULTIPLE
| MASK_STRING
},
1282 {"602", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1283 {"603", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1284 {"603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1285 {"604", PROCESSOR_PPC604
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1286 {"604e", PROCESSOR_PPC604e
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1287 {"620", PROCESSOR_PPC620
,
1288 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1289 {"630", PROCESSOR_PPC630
,
1290 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1291 {"740", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1292 {"7400", PROCESSOR_PPC7400
, POWERPC_7400_MASK
},
1293 {"7450", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1294 {"750", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1295 {"801", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1296 {"821", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1297 {"823", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1298 {"8540", PROCESSOR_PPC8540
, POWERPC_BASE_MASK
| MASK_STRICT_ALIGN
},
1299 /* 8548 has a dummy entry for now. */
1300 {"8548", PROCESSOR_PPC8540
, POWERPC_BASE_MASK
| MASK_STRICT_ALIGN
},
1301 {"860", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1302 {"970", PROCESSOR_POWER4
,
1303 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1304 {"cell", PROCESSOR_CELL
,
1305 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1306 {"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
},
1307 {"ec603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1308 {"G3", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1309 {"G4", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1310 {"G5", PROCESSOR_POWER4
,
1311 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1312 {"power", PROCESSOR_POWER
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1313 {"power2", PROCESSOR_POWER
,
1314 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1315 {"power3", PROCESSOR_PPC630
,
1316 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1317 {"power4", PROCESSOR_POWER4
,
1318 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1319 {"power5", PROCESSOR_POWER5
,
1320 POWERPC_BASE_MASK
| MASK_POWERPC64
| MASK_PPC_GFXOPT
1321 | MASK_MFCRF
| MASK_POPCNTB
},
1322 {"power5+", PROCESSOR_POWER5
,
1323 POWERPC_BASE_MASK
| MASK_POWERPC64
| MASK_PPC_GFXOPT
1324 | MASK_MFCRF
| MASK_POPCNTB
| MASK_FPRND
},
1325 {"power6", PROCESSOR_POWER6
,
1326 POWERPC_7400_MASK
| MASK_POWERPC64
| MASK_MFCRF
| MASK_POPCNTB
1327 | MASK_FPRND
| MASK_CMPB
| MASK_DFP
},
1328 {"power6x", PROCESSOR_POWER6
,
1329 POWERPC_7400_MASK
| MASK_POWERPC64
| MASK_MFCRF
| MASK_POPCNTB
1330 | MASK_FPRND
| MASK_CMPB
| MASK_MFPGPR
| MASK_DFP
},
1331 {"powerpc", PROCESSOR_POWERPC
, POWERPC_BASE_MASK
},
1332 {"powerpc64", PROCESSOR_POWERPC64
,
1333 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1334 {"rios", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1335 {"rios1", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1336 {"rios2", PROCESSOR_RIOS2
,
1337 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1338 {"rsc", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1339 {"rsc1", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1340 {"rs64", PROCESSOR_RS64A
,
1341 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
}
1344 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
1346 /* Some OSs don't support saving the high part of 64-bit registers on
1347 context switch. Other OSs don't support saving Altivec registers.
1348 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1349 settings; if the user wants either, the user must explicitly specify
1350 them and we won't interfere with the user's specification. */
1353 POWER_MASKS
= MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
1354 POWERPC_MASKS
= (POWERPC_BASE_MASK
| MASK_PPC_GPOPT
| MASK_STRICT_ALIGN
1355 | MASK_PPC_GFXOPT
| MASK_POWERPC64
| MASK_ALTIVEC
1356 | MASK_MFCRF
| MASK_POPCNTB
| MASK_FPRND
| MASK_MULHW
1357 | MASK_DLMZB
| MASK_CMPB
| MASK_MFPGPR
| MASK_DFP
)
1360 rs6000_init_hard_regno_mode_ok ();
1362 set_masks
= POWER_MASKS
| POWERPC_MASKS
| MASK_SOFT_FLOAT
;
1363 #ifdef OS_MISSING_POWERPC64
1364 if (OS_MISSING_POWERPC64
)
1365 set_masks
&= ~MASK_POWERPC64
;
1367 #ifdef OS_MISSING_ALTIVEC
1368 if (OS_MISSING_ALTIVEC
)
1369 set_masks
&= ~MASK_ALTIVEC
;
1372 /* Don't override by the processor default if given explicitly. */
1373 set_masks
&= ~target_flags_explicit
;
1375 /* Identify the processor type. */
1376 rs6000_select
[0].string
= default_cpu
;
1377 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
1379 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1381 ptr
= &rs6000_select
[i
];
1382 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1384 for (j
= 0; j
< ptt_size
; j
++)
1385 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
1387 if (ptr
->set_tune_p
)
1388 rs6000_cpu
= processor_target_table
[j
].processor
;
1390 if (ptr
->set_arch_p
)
1392 target_flags
&= ~set_masks
;
1393 target_flags
|= (processor_target_table
[j
].target_enable
1400 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
1407 /* If we are optimizing big endian systems for space, use the load/store
1408 multiple and string instructions. */
1409 if (BYTES_BIG_ENDIAN
&& optimize_size
)
1410 target_flags
|= ~target_flags_explicit
& (MASK_MULTIPLE
| MASK_STRING
);
1412 /* Don't allow -mmultiple or -mstring on little endian systems
1413 unless the cpu is a 750, because the hardware doesn't support the
1414 instructions used in little endian mode, and causes an alignment
1415 trap. The 750 does not cause an alignment trap (except when the
1416 target is unaligned). */
1418 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
1420 if (TARGET_MULTIPLE
)
1422 target_flags
&= ~MASK_MULTIPLE
;
1423 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
1424 warning (0, "-mmultiple is not supported on little endian systems");
1429 target_flags
&= ~MASK_STRING
;
1430 if ((target_flags_explicit
& MASK_STRING
) != 0)
1431 warning (0, "-mstring is not supported on little endian systems");
1435 /* Set debug flags */
1436 if (rs6000_debug_name
)
1438 if (! strcmp (rs6000_debug_name
, "all"))
1439 rs6000_debug_stack
= rs6000_debug_arg
= 1;
1440 else if (! strcmp (rs6000_debug_name
, "stack"))
1441 rs6000_debug_stack
= 1;
1442 else if (! strcmp (rs6000_debug_name
, "arg"))
1443 rs6000_debug_arg
= 1;
1445 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
1448 if (rs6000_traceback_name
)
1450 if (! strncmp (rs6000_traceback_name
, "full", 4))
1451 rs6000_traceback
= traceback_full
;
1452 else if (! strncmp (rs6000_traceback_name
, "part", 4))
1453 rs6000_traceback
= traceback_part
;
1454 else if (! strncmp (rs6000_traceback_name
, "no", 2))
1455 rs6000_traceback
= traceback_none
;
1457 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1458 rs6000_traceback_name
);
1461 if (!rs6000_explicit_options
.long_double
)
1462 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1464 #ifndef POWERPC_LINUX
1465 if (!rs6000_explicit_options
.ieee
)
1466 rs6000_ieeequad
= 1;
1469 /* Set Altivec ABI as default for powerpc64 linux. */
1470 if (TARGET_ELF
&& TARGET_64BIT
)
1472 rs6000_altivec_abi
= 1;
1473 TARGET_ALTIVEC_VRSAVE
= 1;
1476 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1477 if (DEFAULT_ABI
== ABI_DARWIN
&& TARGET_64BIT
)
1479 rs6000_darwin64_abi
= 1;
1481 darwin_one_byte_bool
= 1;
1483 /* Default to natural alignment, for better performance. */
1484 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
1487 /* Place FP constants in the constant pool instead of TOC
1488 if section anchors enabled. */
1489 if (flag_section_anchors
)
1490 TARGET_NO_FP_IN_TOC
= 1;
1492 /* Handle -mtls-size option. */
1493 rs6000_parse_tls_size_option ();
1495 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1496 SUBTARGET_OVERRIDE_OPTIONS
;
1498 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1499 SUBSUBTARGET_OVERRIDE_OPTIONS
;
1501 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1502 SUB3TARGET_OVERRIDE_OPTIONS
;
1507 /* The e500 does not have string instructions, and we set
1508 MASK_STRING above when optimizing for size. */
1509 if ((target_flags
& MASK_STRING
) != 0)
1510 target_flags
= target_flags
& ~MASK_STRING
;
1512 else if (rs6000_select
[1].string
!= NULL
)
1514 /* For the powerpc-eabispe configuration, we set all these by
1515 default, so let's unset them if we manually set another
1516 CPU that is not the E500. */
1517 if (!rs6000_explicit_options
.abi
)
1519 if (!rs6000_explicit_options
.spe
)
1521 if (!rs6000_explicit_options
.float_gprs
)
1522 rs6000_float_gprs
= 0;
1523 if (!rs6000_explicit_options
.isel
)
1527 /* Detect invalid option combinations with E500. */
1530 rs6000_always_hint
= (rs6000_cpu
!= PROCESSOR_POWER4
1531 && rs6000_cpu
!= PROCESSOR_POWER5
1532 && rs6000_cpu
!= PROCESSOR_POWER6
1533 && rs6000_cpu
!= PROCESSOR_CELL
);
1534 rs6000_sched_groups
= (rs6000_cpu
== PROCESSOR_POWER4
1535 || rs6000_cpu
== PROCESSOR_POWER5
);
1536 rs6000_align_branch_targets
= (rs6000_cpu
== PROCESSOR_POWER4
1537 || rs6000_cpu
== PROCESSOR_POWER5
1538 || rs6000_cpu
== PROCESSOR_POWER6
);
1540 rs6000_sched_restricted_insns_priority
1541 = (rs6000_sched_groups
? 1 : 0);
1543 /* Handle -msched-costly-dep option. */
1544 rs6000_sched_costly_dep
1545 = (rs6000_sched_groups
? store_to_load_dep_costly
: no_dep_costly
);
1547 if (rs6000_sched_costly_dep_str
)
1549 if (! strcmp (rs6000_sched_costly_dep_str
, "no"))
1550 rs6000_sched_costly_dep
= no_dep_costly
;
1551 else if (! strcmp (rs6000_sched_costly_dep_str
, "all"))
1552 rs6000_sched_costly_dep
= all_deps_costly
;
1553 else if (! strcmp (rs6000_sched_costly_dep_str
, "true_store_to_load"))
1554 rs6000_sched_costly_dep
= true_store_to_load_dep_costly
;
1555 else if (! strcmp (rs6000_sched_costly_dep_str
, "store_to_load"))
1556 rs6000_sched_costly_dep
= store_to_load_dep_costly
;
1558 rs6000_sched_costly_dep
= atoi (rs6000_sched_costly_dep_str
);
1561 /* Handle -minsert-sched-nops option. */
1562 rs6000_sched_insert_nops
1563 = (rs6000_sched_groups
? sched_finish_regroup_exact
: sched_finish_none
);
1565 if (rs6000_sched_insert_nops_str
)
1567 if (! strcmp (rs6000_sched_insert_nops_str
, "no"))
1568 rs6000_sched_insert_nops
= sched_finish_none
;
1569 else if (! strcmp (rs6000_sched_insert_nops_str
, "pad"))
1570 rs6000_sched_insert_nops
= sched_finish_pad_groups
;
1571 else if (! strcmp (rs6000_sched_insert_nops_str
, "regroup_exact"))
1572 rs6000_sched_insert_nops
= sched_finish_regroup_exact
;
1574 rs6000_sched_insert_nops
= atoi (rs6000_sched_insert_nops_str
);
1577 #ifdef TARGET_REGNAMES
1578 /* If the user desires alternate register names, copy in the
1579 alternate names now. */
1580 if (TARGET_REGNAMES
)
1581 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
1584 /* Set aix_struct_return last, after the ABI is determined.
1585 If -maix-struct-return or -msvr4-struct-return was explicitly
1586 used, don't override with the ABI default. */
1587 if (!rs6000_explicit_options
.aix_struct_ret
)
1588 aix_struct_return
= (DEFAULT_ABI
!= ABI_V4
|| DRAFT_V4_STRUCT_RET
);
1590 if (TARGET_LONG_DOUBLE_128
&& !TARGET_IEEEQUAD
)
1591 REAL_MODE_FORMAT (TFmode
) = &ibm_extended_format
;
1594 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
1596 /* We can only guarantee the availability of DI pseudo-ops when
1597 assembling for 64-bit targets. */
1600 targetm
.asm_out
.aligned_op
.di
= NULL
;
1601 targetm
.asm_out
.unaligned_op
.di
= NULL
;
1604 /* Set branch target alignment, if not optimizing for size. */
1607 /* Cell wants to be aligned 8byte for dual issue. */
1608 if (rs6000_cpu
== PROCESSOR_CELL
)
1610 if (align_functions
<= 0)
1611 align_functions
= 8;
1612 if (align_jumps
<= 0)
1614 if (align_loops
<= 0)
1617 if (rs6000_align_branch_targets
)
1619 if (align_functions
<= 0)
1620 align_functions
= 16;
1621 if (align_jumps
<= 0)
1623 if (align_loops
<= 0)
1626 if (align_jumps_max_skip
<= 0)
1627 align_jumps_max_skip
= 15;
1628 if (align_loops_max_skip
<= 0)
1629 align_loops_max_skip
= 15;
1632 /* Arrange to save and restore machine status around nested functions. */
1633 init_machine_status
= rs6000_init_machine_status
;
1635 /* We should always be splitting complex arguments, but we can't break
1636 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1637 if (DEFAULT_ABI
!= ABI_AIX
)
1638 targetm
.calls
.split_complex_arg
= NULL
;
1640 /* Initialize rs6000_cost with the appropriate target costs. */
1642 rs6000_cost
= TARGET_POWERPC64
? &size64_cost
: &size32_cost
;
1646 case PROCESSOR_RIOS1
:
1647 rs6000_cost
= &rios1_cost
;
1650 case PROCESSOR_RIOS2
:
1651 rs6000_cost
= &rios2_cost
;
1654 case PROCESSOR_RS64A
:
1655 rs6000_cost
= &rs64a_cost
;
1658 case PROCESSOR_MPCCORE
:
1659 rs6000_cost
= &mpccore_cost
;
1662 case PROCESSOR_PPC403
:
1663 rs6000_cost
= &ppc403_cost
;
1666 case PROCESSOR_PPC405
:
1667 rs6000_cost
= &ppc405_cost
;
1670 case PROCESSOR_PPC440
:
1671 rs6000_cost
= &ppc440_cost
;
1674 case PROCESSOR_PPC601
:
1675 rs6000_cost
= &ppc601_cost
;
1678 case PROCESSOR_PPC603
:
1679 rs6000_cost
= &ppc603_cost
;
1682 case PROCESSOR_PPC604
:
1683 rs6000_cost
= &ppc604_cost
;
1686 case PROCESSOR_PPC604e
:
1687 rs6000_cost
= &ppc604e_cost
;
1690 case PROCESSOR_PPC620
:
1691 rs6000_cost
= &ppc620_cost
;
1694 case PROCESSOR_PPC630
:
1695 rs6000_cost
= &ppc630_cost
;
1698 case PROCESSOR_CELL
:
1699 rs6000_cost
= &ppccell_cost
;
1702 case PROCESSOR_PPC750
:
1703 case PROCESSOR_PPC7400
:
1704 rs6000_cost
= &ppc750_cost
;
1707 case PROCESSOR_PPC7450
:
1708 rs6000_cost
= &ppc7450_cost
;
1711 case PROCESSOR_PPC8540
:
1712 rs6000_cost
= &ppc8540_cost
;
1715 case PROCESSOR_POWER4
:
1716 case PROCESSOR_POWER5
:
1717 rs6000_cost
= &power4_cost
;
1720 case PROCESSOR_POWER6
:
1721 rs6000_cost
= &power6_cost
;
1729 /* Implement targetm.vectorize.builtin_mask_for_load. */
1731 rs6000_builtin_mask_for_load (void)
1734 return altivec_builtin_mask_for_load
;
1739 /* Implement targetm.vectorize.builtin_conversion. */
1741 rs6000_builtin_conversion (enum tree_code code
, tree type
)
1743 if (!TARGET_ALTIVEC
)
1749 switch (TYPE_MODE (type
))
1752 return TYPE_UNSIGNED (type
) ?
1753 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VCFUX
] :
1754 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VCFSX
];
1763 /* Implement targetm.vectorize.builtin_mul_widen_even. */
1765 rs6000_builtin_mul_widen_even (tree type
)
1767 if (!TARGET_ALTIVEC
)
1770 switch (TYPE_MODE (type
))
1773 return TYPE_UNSIGNED (type
) ?
1774 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULEUH
] :
1775 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULESH
];
1778 return TYPE_UNSIGNED (type
) ?
1779 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULEUB
] :
1780 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULESB
];
1786 /* Implement targetm.vectorize.builtin_mul_widen_odd. */
1788 rs6000_builtin_mul_widen_odd (tree type
)
1790 if (!TARGET_ALTIVEC
)
1793 switch (TYPE_MODE (type
))
1796 return TYPE_UNSIGNED (type
) ?
1797 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOUH
] :
1798 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOSH
];
1801 return TYPE_UNSIGNED (type
) ?
1802 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOUB
] :
1803 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOSB
];
1809 /* Handle generic options of the form -mfoo=yes/no.
1810 NAME is the option name.
1811 VALUE is the option value.
1812 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1813 whether the option value is 'yes' or 'no' respectively. */
1815 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
1819 else if (!strcmp (value
, "yes"))
1821 else if (!strcmp (value
, "no"))
1824 error ("unknown -m%s= option specified: '%s'", name
, value
);
1827 /* Validate and record the size specified with the -mtls-size option. */
1830 rs6000_parse_tls_size_option (void)
1832 if (rs6000_tls_size_string
== 0)
1834 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
1835 rs6000_tls_size
= 16;
1836 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
1837 rs6000_tls_size
= 32;
1838 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
1839 rs6000_tls_size
= 64;
1841 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string
);
1845 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1847 if (DEFAULT_ABI
== ABI_DARWIN
)
1848 /* The Darwin libraries never set errno, so we might as well
1849 avoid calling them when that's the only reason we would. */
1850 flag_errno_math
= 0;
1852 /* Double growth factor to counter reduced min jump length. */
1853 set_param_value ("max-grow-copy-bb-insns", 16);
1855 /* Enable section anchors by default.
1856 Skip section anchors for Objective C and Objective C++
1857 until front-ends fixed. */
1858 if (!TARGET_MACHO
&& lang_hooks
.name
[4] != 'O')
1859 flag_section_anchors
= 1;
1862 /* Implement TARGET_HANDLE_OPTION. */
1865 rs6000_handle_option (size_t code
, const char *arg
, int value
)
1870 target_flags
&= ~(MASK_POWER
| MASK_POWER2
1871 | MASK_MULTIPLE
| MASK_STRING
);
1872 target_flags_explicit
|= (MASK_POWER
| MASK_POWER2
1873 | MASK_MULTIPLE
| MASK_STRING
);
1875 case OPT_mno_powerpc
:
1876 target_flags
&= ~(MASK_POWERPC
| MASK_PPC_GPOPT
1877 | MASK_PPC_GFXOPT
| MASK_POWERPC64
);
1878 target_flags_explicit
|= (MASK_POWERPC
| MASK_PPC_GPOPT
1879 | MASK_PPC_GFXOPT
| MASK_POWERPC64
);
1882 target_flags
&= ~MASK_MINIMAL_TOC
;
1883 TARGET_NO_FP_IN_TOC
= 0;
1884 TARGET_NO_SUM_IN_TOC
= 0;
1885 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1886 #ifdef TARGET_USES_SYSV4_OPT
1887 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1888 just the same as -mminimal-toc. */
1889 target_flags
|= MASK_MINIMAL_TOC
;
1890 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1894 #ifdef TARGET_USES_SYSV4_OPT
1896 /* Make -mtoc behave like -mminimal-toc. */
1897 target_flags
|= MASK_MINIMAL_TOC
;
1898 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1902 #ifdef TARGET_USES_AIX64_OPT
1907 target_flags
|= MASK_POWERPC64
| MASK_POWERPC
;
1908 target_flags
|= ~target_flags_explicit
& MASK_PPC_GFXOPT
;
1909 target_flags_explicit
|= MASK_POWERPC64
| MASK_POWERPC
;
1912 #ifdef TARGET_USES_AIX64_OPT
1917 target_flags
&= ~MASK_POWERPC64
;
1918 target_flags_explicit
|= MASK_POWERPC64
;
1921 case OPT_minsert_sched_nops_
:
1922 rs6000_sched_insert_nops_str
= arg
;
1925 case OPT_mminimal_toc
:
1928 TARGET_NO_FP_IN_TOC
= 0;
1929 TARGET_NO_SUM_IN_TOC
= 0;
1936 target_flags
|= (MASK_MULTIPLE
| MASK_STRING
);
1937 target_flags_explicit
|= (MASK_MULTIPLE
| MASK_STRING
);
1944 target_flags
|= (MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
);
1945 target_flags_explicit
|= (MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
);
1949 case OPT_mpowerpc_gpopt
:
1950 case OPT_mpowerpc_gfxopt
:
1953 target_flags
|= MASK_POWERPC
;
1954 target_flags_explicit
|= MASK_POWERPC
;
1958 case OPT_maix_struct_return
:
1959 case OPT_msvr4_struct_return
:
1960 rs6000_explicit_options
.aix_struct_ret
= true;
1964 rs6000_parse_yes_no_option ("vrsave", arg
, &(TARGET_ALTIVEC_VRSAVE
));
1968 rs6000_explicit_options
.isel
= true;
1969 rs6000_parse_yes_no_option ("isel", arg
, &(rs6000_isel
));
1973 rs6000_explicit_options
.spe
= true;
1974 rs6000_parse_yes_no_option ("spe", arg
, &(rs6000_spe
));
1978 rs6000_debug_name
= arg
;
1981 #ifdef TARGET_USES_SYSV4_OPT
1983 rs6000_abi_name
= arg
;
1987 rs6000_sdata_name
= arg
;
1990 case OPT_mtls_size_
:
1991 rs6000_tls_size_string
= arg
;
1994 case OPT_mrelocatable
:
1997 target_flags
|= MASK_MINIMAL_TOC
;
1998 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1999 TARGET_NO_FP_IN_TOC
= 1;
2003 case OPT_mrelocatable_lib
:
2006 target_flags
|= MASK_RELOCATABLE
| MASK_MINIMAL_TOC
;
2007 target_flags_explicit
|= MASK_RELOCATABLE
| MASK_MINIMAL_TOC
;
2008 TARGET_NO_FP_IN_TOC
= 1;
2012 target_flags
&= ~MASK_RELOCATABLE
;
2013 target_flags_explicit
|= MASK_RELOCATABLE
;
2019 if (!strcmp (arg
, "altivec"))
2021 rs6000_explicit_options
.abi
= true;
2022 rs6000_altivec_abi
= 1;
2025 else if (! strcmp (arg
, "no-altivec"))
2027 /* ??? Don't set rs6000_explicit_options.abi here, to allow
2028 the default for rs6000_spe_abi to be chosen later. */
2029 rs6000_altivec_abi
= 0;
2031 else if (! strcmp (arg
, "spe"))
2033 rs6000_explicit_options
.abi
= true;
2035 rs6000_altivec_abi
= 0;
2036 if (!TARGET_SPE_ABI
)
2037 error ("not configured for ABI: '%s'", arg
);
2039 else if (! strcmp (arg
, "no-spe"))
2041 rs6000_explicit_options
.abi
= true;
2045 /* These are here for testing during development only, do not
2046 document in the manual please. */
2047 else if (! strcmp (arg
, "d64"))
2049 rs6000_darwin64_abi
= 1;
2050 warning (0, "Using darwin64 ABI");
2052 else if (! strcmp (arg
, "d32"))
2054 rs6000_darwin64_abi
= 0;
2055 warning (0, "Using old darwin ABI");
2058 else if (! strcmp (arg
, "ibmlongdouble"))
2060 rs6000_explicit_options
.ieee
= true;
2061 rs6000_ieeequad
= 0;
2062 warning (0, "Using IBM extended precision long double");
2064 else if (! strcmp (arg
, "ieeelongdouble"))
2066 rs6000_explicit_options
.ieee
= true;
2067 rs6000_ieeequad
= 1;
2068 warning (0, "Using IEEE extended precision long double");
2073 error ("unknown ABI specified: '%s'", arg
);
2079 rs6000_select
[1].string
= arg
;
2083 rs6000_select
[2].string
= arg
;
2086 case OPT_mtraceback_
:
2087 rs6000_traceback_name
= arg
;
2090 case OPT_mfloat_gprs_
:
2091 rs6000_explicit_options
.float_gprs
= true;
2092 if (! strcmp (arg
, "yes") || ! strcmp (arg
, "single"))
2093 rs6000_float_gprs
= 1;
2094 else if (! strcmp (arg
, "double"))
2095 rs6000_float_gprs
= 2;
2096 else if (! strcmp (arg
, "no"))
2097 rs6000_float_gprs
= 0;
2100 error ("invalid option for -mfloat-gprs: '%s'", arg
);
2105 case OPT_mlong_double_
:
2106 rs6000_explicit_options
.long_double
= true;
2107 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
2108 if (value
!= 64 && value
!= 128)
2110 error ("Unknown switch -mlong-double-%s", arg
);
2111 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
2115 rs6000_long_double_type_size
= value
;
2118 case OPT_msched_costly_dep_
:
2119 rs6000_sched_costly_dep_str
= arg
;
2123 rs6000_explicit_options
.alignment
= true;
2124 if (! strcmp (arg
, "power"))
2126 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2127 some C library functions, so warn about it. The flag may be
2128 useful for performance studies from time to time though, so
2129 don't disable it entirely. */
2130 if (DEFAULT_ABI
== ABI_DARWIN
&& TARGET_64BIT
)
2131 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2132 " it is incompatible with the installed C and C++ libraries");
2133 rs6000_alignment_flags
= MASK_ALIGN_POWER
;
2135 else if (! strcmp (arg
, "natural"))
2136 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
2139 error ("unknown -malign-XXXXX option specified: '%s'", arg
);
2147 /* Do anything needed at the start of the asm file. */
2150 rs6000_file_start (void)
2154 const char *start
= buffer
;
2155 struct rs6000_cpu_select
*ptr
;
2156 const char *default_cpu
= TARGET_CPU_DEFAULT
;
2157 FILE *file
= asm_out_file
;
2159 default_file_start ();
2161 #ifdef TARGET_BI_ARCH
2162 if ((TARGET_DEFAULT
^ target_flags
) & MASK_64BIT
)
2166 if (flag_verbose_asm
)
2168 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
2169 rs6000_select
[0].string
= default_cpu
;
2171 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
2173 ptr
= &rs6000_select
[i
];
2174 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
2176 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
2181 if (PPC405_ERRATUM77
)
2183 fprintf (file
, "%s PPC405CR_ERRATUM77", start
);
2187 #ifdef USING_ELFOS_H
2188 switch (rs6000_sdata
)
2190 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
2191 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
2192 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
2193 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
2196 if (rs6000_sdata
&& g_switch_value
)
2198 fprintf (file
, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED
, start
,
2208 if (DEFAULT_ABI
== ABI_AIX
|| (TARGET_ELF
&& flag_pic
== 2))
2210 switch_to_section (toc_section
);
2211 switch_to_section (text_section
);
2216 /* Return nonzero if this function is known to have a null epilogue. */
2219 direct_return (void)
2221 if (reload_completed
)
2223 rs6000_stack_t
*info
= rs6000_stack_info ();
2225 if (info
->first_gp_reg_save
== 32
2226 && info
->first_fp_reg_save
== 64
2227 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
2228 && ! info
->lr_save_p
2229 && ! info
->cr_save_p
2230 && info
->vrsave_mask
== 0
2238 /* Return the number of instructions it takes to form a constant in an
2239 integer register. */
2242 num_insns_constant_wide (HOST_WIDE_INT value
)
2244 /* signed constant loadable with {cal|addi} */
2245 if ((unsigned HOST_WIDE_INT
) (value
+ 0x8000) < 0x10000)
2248 /* constant loadable with {cau|addis} */
2249 else if ((value
& 0xffff) == 0
2250 && (value
>> 31 == -1 || value
>> 31 == 0))
2253 #if HOST_BITS_PER_WIDE_INT == 64
2254 else if (TARGET_POWERPC64
)
2256 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
2257 HOST_WIDE_INT high
= value
>> 31;
2259 if (high
== 0 || high
== -1)
2265 return num_insns_constant_wide (high
) + 1;
2267 return (num_insns_constant_wide (high
)
2268 + num_insns_constant_wide (low
) + 1);
2277 num_insns_constant (rtx op
, enum machine_mode mode
)
2279 HOST_WIDE_INT low
, high
;
2281 switch (GET_CODE (op
))
2284 #if HOST_BITS_PER_WIDE_INT == 64
2285 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
2286 && mask64_operand (op
, mode
))
2290 return num_insns_constant_wide (INTVAL (op
));
2298 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2299 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
2300 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
2303 if (mode
== VOIDmode
|| mode
== DImode
)
2305 high
= CONST_DOUBLE_HIGH (op
);
2306 low
= CONST_DOUBLE_LOW (op
);
2313 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2314 if (DECIMAL_FLOAT_MODE_P (mode
))
2315 REAL_VALUE_TO_TARGET_DECIMAL64 (rv
, l
);
2317 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
2318 high
= l
[WORDS_BIG_ENDIAN
== 0];
2319 low
= l
[WORDS_BIG_ENDIAN
!= 0];
2323 return (num_insns_constant_wide (low
)
2324 + num_insns_constant_wide (high
));
2327 if ((high
== 0 && low
>= 0)
2328 || (high
== -1 && low
< 0))
2329 return num_insns_constant_wide (low
);
2331 else if (mask64_operand (op
, mode
))
2335 return num_insns_constant_wide (high
) + 1;
2338 return (num_insns_constant_wide (high
)
2339 + num_insns_constant_wide (low
) + 1);
2347 /* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2348 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2349 corresponding element of the vector, but for V4SFmode and V2SFmode,
2350 the corresponding "float" is interpreted as an SImode integer. */
2352 static HOST_WIDE_INT
2353 const_vector_elt_as_int (rtx op
, unsigned int elt
)
2355 rtx tmp
= CONST_VECTOR_ELT (op
, elt
);
2356 if (GET_MODE (op
) == V4SFmode
2357 || GET_MODE (op
) == V2SFmode
)
2358 tmp
= gen_lowpart (SImode
, tmp
);
2359 return INTVAL (tmp
);
2362 /* Return true if OP can be synthesized with a particular vspltisb, vspltish
2363 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2364 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2365 all items are set to the same value and contain COPIES replicas of the
2366 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2367 operand and the others are set to the value of the operand's msb. */
2370 vspltis_constant (rtx op
, unsigned step
, unsigned copies
)
2372 enum machine_mode mode
= GET_MODE (op
);
2373 enum machine_mode inner
= GET_MODE_INNER (mode
);
2376 unsigned nunits
= GET_MODE_NUNITS (mode
);
2377 unsigned bitsize
= GET_MODE_BITSIZE (inner
);
2378 unsigned mask
= GET_MODE_MASK (inner
);
2380 HOST_WIDE_INT val
= const_vector_elt_as_int (op
, nunits
- 1);
2381 HOST_WIDE_INT splat_val
= val
;
2382 HOST_WIDE_INT msb_val
= val
> 0 ? 0 : -1;
2384 /* Construct the value to be splatted, if possible. If not, return 0. */
2385 for (i
= 2; i
<= copies
; i
*= 2)
2387 HOST_WIDE_INT small_val
;
2389 small_val
= splat_val
>> bitsize
;
2391 if (splat_val
!= ((small_val
<< bitsize
) | (small_val
& mask
)))
2393 splat_val
= small_val
;
2396 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2397 if (EASY_VECTOR_15 (splat_val
))
2400 /* Also check if we can splat, and then add the result to itself. Do so if
2401 the value is positive, of if the splat instruction is using OP's mode;
2402 for splat_val < 0, the splat and the add should use the same mode. */
2403 else if (EASY_VECTOR_15_ADD_SELF (splat_val
)
2404 && (splat_val
>= 0 || (step
== 1 && copies
== 1)))
2410 /* Check if VAL is present in every STEP-th element, and the
2411 other elements are filled with its most significant bit. */
2412 for (i
= 0; i
< nunits
- 1; ++i
)
2414 HOST_WIDE_INT desired_val
;
2415 if (((i
+ 1) & (step
- 1)) == 0)
2418 desired_val
= msb_val
;
2420 if (desired_val
!= const_vector_elt_as_int (op
, i
))
2428 /* Return true if OP is of the given MODE and can be synthesized
2429 with a vspltisb, vspltish or vspltisw. */
2432 easy_altivec_constant (rtx op
, enum machine_mode mode
)
2434 unsigned step
, copies
;
2436 if (mode
== VOIDmode
)
2437 mode
= GET_MODE (op
);
2438 else if (mode
!= GET_MODE (op
))
2441 /* Start with a vspltisw. */
2442 step
= GET_MODE_NUNITS (mode
) / 4;
2445 if (vspltis_constant (op
, step
, copies
))
2448 /* Then try with a vspltish. */
2454 if (vspltis_constant (op
, step
, copies
))
2457 /* And finally a vspltisb. */
2463 if (vspltis_constant (op
, step
, copies
))
2469 /* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2470 result is OP. Abort if it is not possible. */
2473 gen_easy_altivec_constant (rtx op
)
2475 enum machine_mode mode
= GET_MODE (op
);
2476 int nunits
= GET_MODE_NUNITS (mode
);
2477 rtx last
= CONST_VECTOR_ELT (op
, nunits
- 1);
2478 unsigned step
= nunits
/ 4;
2479 unsigned copies
= 1;
2481 /* Start with a vspltisw. */
2482 if (vspltis_constant (op
, step
, copies
))
2483 return gen_rtx_VEC_DUPLICATE (V4SImode
, gen_lowpart (SImode
, last
));
2485 /* Then try with a vspltish. */
2491 if (vspltis_constant (op
, step
, copies
))
2492 return gen_rtx_VEC_DUPLICATE (V8HImode
, gen_lowpart (HImode
, last
));
2494 /* And finally a vspltisb. */
2500 if (vspltis_constant (op
, step
, copies
))
2501 return gen_rtx_VEC_DUPLICATE (V16QImode
, gen_lowpart (QImode
, last
));
2507 output_vec_const_move (rtx
*operands
)
2510 enum machine_mode mode
;
2515 mode
= GET_MODE (dest
);
2520 if (zero_constant (vec
, mode
))
2521 return "vxor %0,%0,%0";
2523 splat_vec
= gen_easy_altivec_constant (vec
);
2524 gcc_assert (GET_CODE (splat_vec
) == VEC_DUPLICATE
);
2525 operands
[1] = XEXP (splat_vec
, 0);
2526 if (!EASY_VECTOR_15 (INTVAL (operands
[1])))
2529 switch (GET_MODE (splat_vec
))
2532 return "vspltisw %0,%1";
2535 return "vspltish %0,%1";
2538 return "vspltisb %0,%1";
2545 gcc_assert (TARGET_SPE
);
2547 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2548 pattern of V1DI, V4HI, and V2SF.
2550 FIXME: We should probably return # and add post reload
2551 splitters for these, but this way is so easy ;-). */
2552 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
2553 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
2554 operands
[1] = CONST_VECTOR_ELT (vec
, 0);
2555 operands
[2] = CONST_VECTOR_ELT (vec
, 1);
2557 return "li %0,%1\n\tevmergelo %0,%0,%0";
2559 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2562 /* Initialize vector TARGET to VALS. */
2565 rs6000_expand_vector_init (rtx target
, rtx vals
)
2567 enum machine_mode mode
= GET_MODE (target
);
2568 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2569 int n_elts
= GET_MODE_NUNITS (mode
);
2570 int n_var
= 0, one_var
= -1;
2571 bool all_same
= true, all_const_zero
= true;
2575 for (i
= 0; i
< n_elts
; ++i
)
2577 x
= XVECEXP (vals
, 0, i
);
2578 if (!CONSTANT_P (x
))
2579 ++n_var
, one_var
= i
;
2580 else if (x
!= CONST0_RTX (inner_mode
))
2581 all_const_zero
= false;
2583 if (i
> 0 && !rtx_equal_p (x
, XVECEXP (vals
, 0, 0)))
2589 if (mode
!= V4SFmode
&& all_const_zero
)
2591 /* Zero register. */
2592 emit_insn (gen_rtx_SET (VOIDmode
, target
,
2593 gen_rtx_XOR (mode
, target
, target
)));
2596 else if (mode
!= V4SFmode
&& easy_vector_constant (vals
, mode
))
2598 /* Splat immediate. */
2599 emit_insn (gen_rtx_SET (VOIDmode
, target
, vals
));
2603 ; /* Splat vector element. */
2606 /* Load from constant pool. */
2607 emit_move_insn (target
, gen_rtx_CONST_VECTOR (mode
, XVEC (vals
, 0)));
2612 /* Store value to stack temp. Load vector element. Splat. */
2615 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (inner_mode
), 0);
2616 emit_move_insn (adjust_address_nv (mem
, inner_mode
, 0),
2617 XVECEXP (vals
, 0, 0));
2618 x
= gen_rtx_UNSPEC (VOIDmode
,
2619 gen_rtvec (1, const0_rtx
), UNSPEC_LVE
);
2620 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2622 gen_rtx_SET (VOIDmode
,
2625 x
= gen_rtx_VEC_SELECT (inner_mode
, target
,
2626 gen_rtx_PARALLEL (VOIDmode
,
2627 gen_rtvec (1, const0_rtx
)));
2628 emit_insn (gen_rtx_SET (VOIDmode
, target
,
2629 gen_rtx_VEC_DUPLICATE (mode
, x
)));
2633 /* One field is non-constant. Load constant then overwrite
2637 rtx copy
= copy_rtx (vals
);
2639 /* Load constant part of vector, substitute neighboring value for
2641 XVECEXP (copy
, 0, one_var
) = XVECEXP (vals
, 0, (one_var
+ 1) % n_elts
);
2642 rs6000_expand_vector_init (target
, copy
);
2644 /* Insert variable. */
2645 rs6000_expand_vector_set (target
, XVECEXP (vals
, 0, one_var
), one_var
);
2649 /* Construct the vector in memory one field at a time
2650 and load the whole vector. */
2651 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
2652 for (i
= 0; i
< n_elts
; i
++)
2653 emit_move_insn (adjust_address_nv (mem
, inner_mode
,
2654 i
* GET_MODE_SIZE (inner_mode
)),
2655 XVECEXP (vals
, 0, i
));
2656 emit_move_insn (target
, mem
);
2659 /* Set field ELT of TARGET to VAL. */
2662 rs6000_expand_vector_set (rtx target
, rtx val
, int elt
)
2664 enum machine_mode mode
= GET_MODE (target
);
2665 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2666 rtx reg
= gen_reg_rtx (mode
);
2668 int width
= GET_MODE_SIZE (inner_mode
);
2671 /* Load single variable value. */
2672 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (inner_mode
), 0);
2673 emit_move_insn (adjust_address_nv (mem
, inner_mode
, 0), val
);
2674 x
= gen_rtx_UNSPEC (VOIDmode
,
2675 gen_rtvec (1, const0_rtx
), UNSPEC_LVE
);
2676 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2678 gen_rtx_SET (VOIDmode
,
2682 /* Linear sequence. */
2683 mask
= gen_rtx_PARALLEL (V16QImode
, rtvec_alloc (16));
2684 for (i
= 0; i
< 16; ++i
)
2685 XVECEXP (mask
, 0, i
) = GEN_INT (i
);
2687 /* Set permute mask to insert element into target. */
2688 for (i
= 0; i
< width
; ++i
)
2689 XVECEXP (mask
, 0, elt
*width
+ i
)
2690 = GEN_INT (i
+ 0x10);
2691 x
= gen_rtx_CONST_VECTOR (V16QImode
, XVEC (mask
, 0));
2692 x
= gen_rtx_UNSPEC (mode
,
2693 gen_rtvec (3, target
, reg
,
2694 force_reg (V16QImode
, x
)),
2696 emit_insn (gen_rtx_SET (VOIDmode
, target
, x
));
2699 /* Extract field ELT from VEC into TARGET. */
2702 rs6000_expand_vector_extract (rtx target
, rtx vec
, int elt
)
2704 enum machine_mode mode
= GET_MODE (vec
);
2705 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2708 /* Allocate mode-sized buffer. */
2709 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
2711 /* Add offset to field within buffer matching vector element. */
2712 mem
= adjust_address_nv (mem
, mode
, elt
* GET_MODE_SIZE (inner_mode
));
2714 /* Store single field into mode-sized buffer. */
2715 x
= gen_rtx_UNSPEC (VOIDmode
,
2716 gen_rtvec (1, const0_rtx
), UNSPEC_STVE
);
2717 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2719 gen_rtx_SET (VOIDmode
,
2722 emit_move_insn (target
, adjust_address_nv (mem
, inner_mode
, 0));
2725 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2726 implement ANDing by the mask IN. */
2728 build_mask64_2_operands (rtx in
, rtx
*out
)
2730 #if HOST_BITS_PER_WIDE_INT >= 64
2731 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
2734 gcc_assert (GET_CODE (in
) == CONST_INT
);
2739 /* Assume c initially something like 0x00fff000000fffff. The idea
2740 is to rotate the word so that the middle ^^^^^^ group of zeros
2741 is at the MS end and can be cleared with an rldicl mask. We then
2742 rotate back and clear off the MS ^^ group of zeros with a
2744 c
= ~c
; /* c == 0xff000ffffff00000 */
2745 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
2746 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
2747 c
= ~c
; /* c == 0x00fff000000fffff */
2748 c
&= -lsb
; /* c == 0x00fff00000000000 */
2749 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2750 c
= ~c
; /* c == 0xff000fffffffffff */
2751 c
&= -lsb
; /* c == 0xff00000000000000 */
2753 while ((lsb
>>= 1) != 0)
2754 shift
++; /* shift == 44 on exit from loop */
2755 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2756 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2757 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2761 /* Assume c initially something like 0xff000f0000000000. The idea
2762 is to rotate the word so that the ^^^ middle group of zeros
2763 is at the LS end and can be cleared with an rldicr mask. We then
2764 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2766 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2767 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2768 c
= ~c
; /* c == 0x00fff0ffffffffff */
2769 c
&= -lsb
; /* c == 0x00fff00000000000 */
2770 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2771 c
= ~c
; /* c == 0xff000fffffffffff */
2772 c
&= -lsb
; /* c == 0xff00000000000000 */
2774 while ((lsb
>>= 1) != 0)
2775 shift
++; /* shift == 44 on exit from loop */
2776 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2777 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2778 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2781 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2782 masks will be all 1's. We are guaranteed more than one transition. */
2783 out
[0] = GEN_INT (64 - shift
);
2784 out
[1] = GEN_INT (m1
);
2785 out
[2] = GEN_INT (shift
);
2786 out
[3] = GEN_INT (m2
);
2794 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2797 invalid_e500_subreg (rtx op
, enum machine_mode mode
)
2799 if (TARGET_E500_DOUBLE
)
2801 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
2802 subreg:TI and reg:TF. */
2803 if (GET_CODE (op
) == SUBREG
2804 && (mode
== SImode
|| mode
== DImode
|| mode
== TImode
)
2805 && REG_P (SUBREG_REG (op
))
2806 && (GET_MODE (SUBREG_REG (op
)) == DFmode
2807 || GET_MODE (SUBREG_REG (op
)) == TFmode
))
2810 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
2812 if (GET_CODE (op
) == SUBREG
2813 && (mode
== DFmode
|| mode
== TFmode
)
2814 && REG_P (SUBREG_REG (op
))
2815 && (GET_MODE (SUBREG_REG (op
)) == DImode
2816 || GET_MODE (SUBREG_REG (op
)) == TImode
))
2821 && GET_CODE (op
) == SUBREG
2823 && REG_P (SUBREG_REG (op
))
2824 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op
))))
2830 /* AIX increases natural record alignment to doubleword if the first
2831 field is an FP double while the FP fields remain word aligned. */
2834 rs6000_special_round_type_align (tree type
, unsigned int computed
,
2835 unsigned int specified
)
2837 unsigned int align
= MAX (computed
, specified
);
2838 tree field
= TYPE_FIELDS (type
);
2840 /* Skip all non field decls */
2841 while (field
!= NULL
&& TREE_CODE (field
) != FIELD_DECL
)
2842 field
= TREE_CHAIN (field
);
2844 if (field
!= NULL
&& field
!= type
)
2846 type
= TREE_TYPE (field
);
2847 while (TREE_CODE (type
) == ARRAY_TYPE
)
2848 type
= TREE_TYPE (type
);
2850 if (type
!= error_mark_node
&& TYPE_MODE (type
) == DFmode
)
2851 align
= MAX (align
, 64);
2857 /* Darwin increases record alignment to the natural alignment of
2861 darwin_rs6000_special_round_type_align (tree type
, unsigned int computed
,
2862 unsigned int specified
)
2864 unsigned int align
= MAX (computed
, specified
);
2866 if (TYPE_PACKED (type
))
2869 /* Find the first field, looking down into aggregates. */
2871 tree field
= TYPE_FIELDS (type
);
2872 /* Skip all non field decls */
2873 while (field
!= NULL
&& TREE_CODE (field
) != FIELD_DECL
)
2874 field
= TREE_CHAIN (field
);
2877 type
= TREE_TYPE (field
);
2878 while (TREE_CODE (type
) == ARRAY_TYPE
)
2879 type
= TREE_TYPE (type
);
2880 } while (AGGREGATE_TYPE_P (type
));
2882 if (! AGGREGATE_TYPE_P (type
) && type
!= error_mark_node
)
2883 align
= MAX (align
, TYPE_ALIGN (type
));
2888 /* Return 1 for an operand in small memory on V.4/eabi. */
2891 small_data_operand (rtx op ATTRIBUTE_UNUSED
,
2892 enum machine_mode mode ATTRIBUTE_UNUSED
)
2897 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2900 if (DEFAULT_ABI
!= ABI_V4
)
2903 if (GET_CODE (op
) == SYMBOL_REF
)
2906 else if (GET_CODE (op
) != CONST
2907 || GET_CODE (XEXP (op
, 0)) != PLUS
2908 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2909 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2914 rtx sum
= XEXP (op
, 0);
2915 HOST_WIDE_INT summand
;
2917 /* We have to be careful here, because it is the referenced address
2918 that must be 32k from _SDA_BASE_, not just the symbol. */
2919 summand
= INTVAL (XEXP (sum
, 1));
2920 if (summand
< 0 || (unsigned HOST_WIDE_INT
) summand
> g_switch_value
)
2923 sym_ref
= XEXP (sum
, 0);
2926 return SYMBOL_REF_SMALL_P (sym_ref
);
2932 /* Return true if either operand is a general purpose register. */
2935 gpr_or_gpr_p (rtx op0
, rtx op1
)
2937 return ((REG_P (op0
) && INT_REGNO_P (REGNO (op0
)))
2938 || (REG_P (op1
) && INT_REGNO_P (REGNO (op1
))));
2942 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2945 constant_pool_expr_1 (rtx op
, int *have_sym
, int *have_toc
)
2947 switch (GET_CODE (op
))
2950 if (RS6000_SYMBOL_REF_TLS_P (op
))
2952 else if (CONSTANT_POOL_ADDRESS_P (op
))
2954 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2962 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2971 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2972 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2974 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2983 constant_pool_expr_p (rtx op
)
2987 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2991 toc_relative_expr_p (rtx op
)
2995 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2999 legitimate_constant_pool_address_p (rtx x
)
3002 && GET_CODE (x
) == PLUS
3003 && GET_CODE (XEXP (x
, 0)) == REG
3004 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
3005 && constant_pool_expr_p (XEXP (x
, 1)));
3009 legitimate_small_data_p (enum machine_mode mode
, rtx x
)
3011 return (DEFAULT_ABI
== ABI_V4
3012 && !flag_pic
&& !TARGET_TOC
3013 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
3014 && small_data_operand (x
, mode
));
3017 /* SPE offset addressing is limited to 5-bits worth of double words. */
3018 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3021 rs6000_legitimate_offset_address_p (enum machine_mode mode
, rtx x
, int strict
)
3023 unsigned HOST_WIDE_INT offset
, extra
;
3025 if (GET_CODE (x
) != PLUS
)
3027 if (GET_CODE (XEXP (x
, 0)) != REG
)
3029 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
3031 if (legitimate_constant_pool_address_p (x
))
3033 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
3036 offset
= INTVAL (XEXP (x
, 1));
3044 /* AltiVec vector modes. Only reg+reg addressing is valid and
3045 constant offset zero should not occur due to canonicalization.
3046 Allow any offset when not strict before reload. */
3053 /* SPE vector modes. */
3054 return SPE_CONST_OFFSET_OK (offset
);
3058 if (TARGET_E500_DOUBLE
)
3059 return SPE_CONST_OFFSET_OK (offset
);
3062 /* On e500v2, we may have:
3064 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3066 Which gets addressed with evldd instructions. */
3067 if (TARGET_E500_DOUBLE
)
3068 return SPE_CONST_OFFSET_OK (offset
);
3070 if (mode
== DFmode
|| mode
== DDmode
|| !TARGET_POWERPC64
)
3072 else if (offset
& 3)
3077 if (TARGET_E500_DOUBLE
)
3078 return (SPE_CONST_OFFSET_OK (offset
)
3079 && SPE_CONST_OFFSET_OK (offset
+ 8));
3083 if (mode
== TFmode
|| mode
== TDmode
|| !TARGET_POWERPC64
)
3085 else if (offset
& 3)
3096 return (offset
< 0x10000) && (offset
+ extra
< 0x10000);
3100 legitimate_indexed_address_p (rtx x
, int strict
)
3104 if (GET_CODE (x
) != PLUS
)
3110 /* Recognize the rtl generated by reload which we know will later be
3111 replaced with proper base and index regs. */
3113 && reload_in_progress
3114 && (REG_P (op0
) || GET_CODE (op0
) == PLUS
)
3118 return (REG_P (op0
) && REG_P (op1
)
3119 && ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
3120 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
3121 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
3122 && INT_REG_OK_FOR_INDEX_P (op0
, strict
))));
3126 legitimate_indirect_address_p (rtx x
, int strict
)
3128 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
3132 macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
)
3134 if (!TARGET_MACHO
|| !flag_pic
3135 || mode
!= SImode
|| GET_CODE (x
) != MEM
)
3139 if (GET_CODE (x
) != LO_SUM
)
3141 if (GET_CODE (XEXP (x
, 0)) != REG
)
3143 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), 0))
3147 return CONSTANT_P (x
);
3151 legitimate_lo_sum_address_p (enum machine_mode mode
, rtx x
, int strict
)
3153 if (GET_CODE (x
) != LO_SUM
)
3155 if (GET_CODE (XEXP (x
, 0)) != REG
)
3157 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
3159 /* Restrict addressing for DI because of our SUBREG hackery. */
3160 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3165 if (TARGET_ELF
|| TARGET_MACHO
)
3167 if (DEFAULT_ABI
!= ABI_AIX
&& DEFAULT_ABI
!= ABI_DARWIN
&& flag_pic
)
3171 if (GET_MODE_NUNITS (mode
) != 1)
3173 if (GET_MODE_BITSIZE (mode
) > 64
3174 || (GET_MODE_BITSIZE (mode
) > 32 && !TARGET_POWERPC64
3175 && !(TARGET_HARD_FLOAT
&& TARGET_FPRS
&& mode
== DFmode
)))
3178 return CONSTANT_P (x
);
3185 /* Try machine-dependent ways of modifying an illegitimate address
3186 to be legitimate. If we find one, return the new, valid address.
3187 This is used from only one place: `memory_address' in explow.c.
3189 OLDX is the address as it was before break_out_memory_refs was
3190 called. In some cases it is useful to look at this to decide what
3193 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3195 It is always safe for this function to do nothing. It exists to
3196 recognize opportunities to optimize the output.
3198 On RS/6000, first check for the sum of a register with a constant
3199 integer that is out of range. If so, generate code to add the
3200 constant with the low-order 16 bits masked to the register and force
3201 this result into another register (this can be done with `cau').
3202 Then generate an address of REG+(CONST&0xffff), allowing for the
3203 possibility of bit 16 being a one.
3205 Then check for the sum of a register and something not constant, try to
3206 load the other things into a register and return the sum. */
3209 rs6000_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
3210 enum machine_mode mode
)
3212 if (GET_CODE (x
) == SYMBOL_REF
)
3214 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
3216 return rs6000_legitimize_tls_address (x
, model
);
3219 if (GET_CODE (x
) == PLUS
3220 && GET_CODE (XEXP (x
, 0)) == REG
3221 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3222 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
3224 HOST_WIDE_INT high_int
, low_int
;
3226 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3227 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
3228 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3229 GEN_INT (high_int
)), 0);
3230 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
3232 else if (GET_CODE (x
) == PLUS
3233 && GET_CODE (XEXP (x
, 0)) == REG
3234 && GET_CODE (XEXP (x
, 1)) != CONST_INT
3235 && GET_MODE_NUNITS (mode
) == 1
3236 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3238 || (((mode
!= DImode
&& mode
!= DFmode
&& mode
!= DDmode
)
3239 || TARGET_E500_DOUBLE
)
3240 && mode
!= TFmode
&& mode
!= TDmode
))
3241 && (TARGET_POWERPC64
|| mode
!= DImode
)
3244 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3245 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
3247 else if (ALTIVEC_VECTOR_MODE (mode
))
3251 /* Make sure both operands are registers. */
3252 if (GET_CODE (x
) == PLUS
)
3253 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
3254 force_reg (Pmode
, XEXP (x
, 1)));
3256 reg
= force_reg (Pmode
, x
);
3259 else if (SPE_VECTOR_MODE (mode
)
3260 || (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3261 || mode
== DDmode
|| mode
== TDmode
3262 || mode
== DImode
)))
3266 /* We accept [reg + reg] and [reg + OFFSET]. */
3268 if (GET_CODE (x
) == PLUS
)
3270 rtx op1
= XEXP (x
, 0);
3271 rtx op2
= XEXP (x
, 1);
3273 op1
= force_reg (Pmode
, op1
);
3275 if (GET_CODE (op2
) != REG
3276 && (GET_CODE (op2
) != CONST_INT
3277 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
3278 op2
= force_reg (Pmode
, op2
);
3280 return gen_rtx_PLUS (Pmode
, op1
, op2
);
3283 return force_reg (Pmode
, x
);
3289 && GET_CODE (x
) != CONST_INT
3290 && GET_CODE (x
) != CONST_DOUBLE
3292 && GET_MODE_NUNITS (mode
) == 1
3293 && (GET_MODE_BITSIZE (mode
) <= 32
3294 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
3296 rtx reg
= gen_reg_rtx (Pmode
);
3297 emit_insn (gen_elf_high (reg
, x
));
3298 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3300 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
3303 && ! MACHO_DYNAMIC_NO_PIC_P
3305 && GET_CODE (x
) != CONST_INT
3306 && GET_CODE (x
) != CONST_DOUBLE
3308 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
3312 rtx reg
= gen_reg_rtx (Pmode
);
3313 emit_insn (gen_macho_high (reg
, x
));
3314 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3317 && constant_pool_expr_p (x
)
3318 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
3320 return create_TOC_reference (x
);
3326 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3327 We need to emit DTP-relative relocations. */
3330 rs6000_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3335 fputs ("\t.long\t", file
);
3338 fputs (DOUBLE_INT_ASM_OP
, file
);
3343 output_addr_const (file
, x
);
3344 fputs ("@dtprel+0x8000", file
);
3347 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3349 static GTY(()) rtx rs6000_tls_symbol
;
3351 rs6000_tls_get_addr (void)
3353 if (!rs6000_tls_symbol
)
3354 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
3356 return rs6000_tls_symbol
;
3359 /* Construct the SYMBOL_REF for TLS GOT references. */
3361 static GTY(()) rtx rs6000_got_symbol
;
3363 rs6000_got_sym (void)
3365 if (!rs6000_got_symbol
)
3367 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
3368 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
3369 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
3372 return rs6000_got_symbol
;
3375 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3376 this (thread-local) address. */
3379 rs6000_legitimize_tls_address (rtx addr
, enum tls_model model
)
3383 dest
= gen_reg_rtx (Pmode
);
3384 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
3390 tlsreg
= gen_rtx_REG (Pmode
, 13);
3391 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
3395 tlsreg
= gen_rtx_REG (Pmode
, 2);
3396 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
3400 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
3404 tmp
= gen_reg_rtx (Pmode
);
3407 tlsreg
= gen_rtx_REG (Pmode
, 13);
3408 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
3412 tlsreg
= gen_rtx_REG (Pmode
, 2);
3413 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
3417 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
3419 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
3424 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
3426 /* We currently use relocations like @got@tlsgd for tls, which
3427 means the linker will handle allocation of tls entries, placing
3428 them in the .got section. So use a pointer to the .got section,
3429 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3430 or to secondary GOT sections used by 32-bit -fPIC. */
3432 got
= gen_rtx_REG (Pmode
, 2);
3436 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
3439 rtx gsym
= rs6000_got_sym ();
3440 got
= gen_reg_rtx (Pmode
);
3442 rs6000_emit_move (got
, gsym
, Pmode
);
3448 tmp1
= gen_reg_rtx (Pmode
);
3449 tmp2
= gen_reg_rtx (Pmode
);
3450 tmp3
= gen_reg_rtx (Pmode
);
3451 mem
= gen_const_mem (Pmode
, tmp1
);
3453 first
= emit_insn (gen_load_toc_v4_PIC_1b (gsym
));
3454 emit_move_insn (tmp1
,
3455 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
3456 emit_move_insn (tmp2
, mem
);
3457 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
3458 last
= emit_move_insn (got
, tmp3
);
3459 set_unique_reg_note (last
, REG_EQUAL
, gsym
);
3460 maybe_encapsulate_block (first
, last
, gsym
);
3465 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
3467 r3
= gen_rtx_REG (Pmode
, 3);
3469 insn
= gen_tls_gd_64 (r3
, got
, addr
);
3471 insn
= gen_tls_gd_32 (r3
, got
, addr
);
3474 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3475 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3476 insn
= emit_call_insn (insn
);
3477 CONST_OR_PURE_CALL_P (insn
) = 1;
3478 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3479 insn
= get_insns ();
3481 emit_libcall_block (insn
, dest
, r3
, addr
);
3483 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
3485 r3
= gen_rtx_REG (Pmode
, 3);
3487 insn
= gen_tls_ld_64 (r3
, got
);
3489 insn
= gen_tls_ld_32 (r3
, got
);
3492 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3493 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3494 insn
= emit_call_insn (insn
);
3495 CONST_OR_PURE_CALL_P (insn
) = 1;
3496 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3497 insn
= get_insns ();
3499 tmp1
= gen_reg_rtx (Pmode
);
3500 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
3502 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
3503 if (rs6000_tls_size
== 16)
3506 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
3508 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
3510 else if (rs6000_tls_size
== 32)
3512 tmp2
= gen_reg_rtx (Pmode
);
3514 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
3516 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
3519 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
3521 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
3525 tmp2
= gen_reg_rtx (Pmode
);
3527 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
3529 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
3531 insn
= gen_rtx_SET (Pmode
, dest
,
3532 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
3538 /* IE, or 64-bit offset LE. */
3539 tmp2
= gen_reg_rtx (Pmode
);
3541 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
3543 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
3546 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
3548 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
3556 /* Return 1 if X contains a thread-local symbol. */
3559 rs6000_tls_referenced_p (rtx x
)
3561 if (! TARGET_HAVE_TLS
)
3564 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
3567 /* Return 1 if *X is a thread-local symbol. This is the same as
3568 rs6000_tls_symbol_ref except for the type of the unused argument. */
3571 rs6000_tls_symbol_ref_1 (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
3573 return RS6000_SYMBOL_REF_TLS_P (*x
);
3576 /* The convention appears to be to define this wherever it is used.
3577 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3578 is now used here. */
3579 #ifndef REG_MODE_OK_FOR_BASE_P
3580 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3583 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3584 replace the input X, or the original X if no replacement is called for.
3585 The output parameter *WIN is 1 if the calling macro should goto WIN,
3588 For RS/6000, we wish to handle large displacements off a base
3589 register by splitting the addend across an addiu/addis and the mem insn.
3590 This cuts number of extra insns needed from 3 to 1.
3592 On Darwin, we use this to generate code for floating point constants.
3593 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3594 The Darwin code is inside #if TARGET_MACHO because only then is
3595 machopic_function_base_name() defined. */
3597 rs6000_legitimize_reload_address (rtx x
, enum machine_mode mode
,
3598 int opnum
, int type
,
3599 int ind_levels ATTRIBUTE_UNUSED
, int *win
)
3601 /* We must recognize output that we have already generated ourselves. */
3602 if (GET_CODE (x
) == PLUS
3603 && GET_CODE (XEXP (x
, 0)) == PLUS
3604 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3605 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3606 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3608 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3609 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3610 opnum
, (enum reload_type
)type
);
3616 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
3617 && GET_CODE (x
) == LO_SUM
3618 && GET_CODE (XEXP (x
, 0)) == PLUS
3619 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
3620 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
3621 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
3622 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
3623 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
3624 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
3625 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
3627 /* Result of previous invocation of this function on Darwin
3628 floating point constant. */
3629 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3630 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3631 opnum
, (enum reload_type
)type
);
3637 /* Force ld/std non-word aligned offset into base register by wrapping
3639 if (GET_CODE (x
) == PLUS
3640 && GET_CODE (XEXP (x
, 0)) == REG
3641 && REGNO (XEXP (x
, 0)) < 32
3642 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3643 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3644 && (INTVAL (XEXP (x
, 1)) & 3) != 0
3645 && !ALTIVEC_VECTOR_MODE (mode
)
3646 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
3647 && TARGET_POWERPC64
)
3649 x
= gen_rtx_PLUS (GET_MODE (x
), x
, GEN_INT (0));
3650 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3651 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3652 opnum
, (enum reload_type
) type
);
3657 if (GET_CODE (x
) == PLUS
3658 && GET_CODE (XEXP (x
, 0)) == REG
3659 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
3660 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3661 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3662 && !SPE_VECTOR_MODE (mode
)
3663 && !(TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3665 && !ALTIVEC_VECTOR_MODE (mode
))
3667 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
3668 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
3670 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3672 /* Check for 32-bit overflow. */
3673 if (high
+ low
!= val
)
3679 /* Reload the high part into a base reg; leave the low part
3680 in the mem directly. */
3682 x
= gen_rtx_PLUS (GET_MODE (x
),
3683 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
3687 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3688 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3689 opnum
, (enum reload_type
)type
);
3694 if (GET_CODE (x
) == SYMBOL_REF
3695 && !ALTIVEC_VECTOR_MODE (mode
)
3696 && !SPE_VECTOR_MODE (mode
)
3698 && DEFAULT_ABI
== ABI_DARWIN
3699 && (flag_pic
|| MACHO_DYNAMIC_NO_PIC_P
)
3701 && DEFAULT_ABI
== ABI_V4
3704 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
3705 The same goes for DImode without 64-bit gprs and DFmode
3709 && (mode
!= DImode
|| TARGET_POWERPC64
)
3710 && (mode
!= DFmode
|| TARGET_POWERPC64
3711 || (TARGET_FPRS
&& TARGET_HARD_FLOAT
)))
3716 rtx offset
= gen_rtx_CONST (Pmode
,
3717 gen_rtx_MINUS (Pmode
, x
,
3718 machopic_function_base_sym ()));
3719 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3720 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
3721 gen_rtx_HIGH (Pmode
, offset
)), offset
);
3725 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3726 gen_rtx_HIGH (Pmode
, x
), x
);
3728 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3729 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3730 opnum
, (enum reload_type
)type
);
3735 /* Reload an offset address wrapped by an AND that represents the
3736 masking of the lower bits. Strip the outer AND and let reload
3737 convert the offset address into an indirect address. */
3739 && ALTIVEC_VECTOR_MODE (mode
)
3740 && GET_CODE (x
) == AND
3741 && GET_CODE (XEXP (x
, 0)) == PLUS
3742 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3743 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3744 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3745 && INTVAL (XEXP (x
, 1)) == -16)
3753 && constant_pool_expr_p (x
)
3754 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
3756 x
= create_TOC_reference (x
);
3764 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3765 that is a valid memory address for an instruction.
3766 The MODE argument is the machine mode for the MEM expression
3767 that wants to use this address.
3769 On the RS/6000, there are four valid address: a SYMBOL_REF that
3770 refers to a constant pool entry of an address (or the sum of it
3771 plus a constant), a short (16-bit signed) constant plus a register,
3772 the sum of two registers, or a register indirect, possibly with an
3773 auto-increment. For DFmode and DImode with a constant plus register,
3774 we must ensure that both words are addressable or PowerPC64 with offset
3777 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3778 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
3779 because adjacent memory cells are accessed by adding word-sized offsets
3780 during assembly output. */
3782 rs6000_legitimate_address (enum machine_mode mode
, rtx x
, int reg_ok_strict
)
3784 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3786 && ALTIVEC_VECTOR_MODE (mode
)
3787 && GET_CODE (x
) == AND
3788 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3789 && INTVAL (XEXP (x
, 1)) == -16)
3792 if (RS6000_SYMBOL_REF_TLS_P (x
))
3794 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3796 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3797 && !ALTIVEC_VECTOR_MODE (mode
)
3798 && !SPE_VECTOR_MODE (mode
)
3801 /* Restrict addressing for DI because of our SUBREG hackery. */
3802 && !(TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3805 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3807 if (legitimate_small_data_p (mode
, x
))
3809 if (legitimate_constant_pool_address_p (x
))
3811 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3813 && GET_CODE (x
) == PLUS
3814 && GET_CODE (XEXP (x
, 0)) == REG
3815 && (XEXP (x
, 0) == virtual_stack_vars_rtx
3816 || XEXP (x
, 0) == arg_pointer_rtx
)
3817 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3819 if (rs6000_legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3824 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3826 || ((mode
!= DFmode
|| TARGET_E500_DOUBLE
) && mode
!= TFmode
))
3827 && (TARGET_POWERPC64
|| mode
!= DImode
)
3828 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3830 if (GET_CODE (x
) == PRE_MODIFY
3834 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3836 || ((mode
!= DFmode
|| TARGET_E500_DOUBLE
) && mode
!= TFmode
))
3837 && (TARGET_POWERPC64
|| mode
!= DImode
)
3838 && !ALTIVEC_VECTOR_MODE (mode
)
3839 && !SPE_VECTOR_MODE (mode
)
3840 /* Restrict addressing for DI because of our SUBREG hackery. */
3841 && !(TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== DImode
))
3843 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
)
3844 && (rs6000_legitimate_offset_address_p (mode
, XEXP (x
, 1), reg_ok_strict
)
3845 || legitimate_indexed_address_p (XEXP (x
, 1), reg_ok_strict
))
3846 && rtx_equal_p (XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)))
3848 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3853 /* Go to LABEL if ADDR (a legitimate address expression)
3854 has an effect that depends on the machine mode it is used for.
3856 On the RS/6000 this is true of all integral offsets (since AltiVec
3857 modes don't allow them) or is a pre-increment or decrement.
3859 ??? Except that due to conceptual problems in offsettable_address_p
3860 we can't really report the problems of integral offsets. So leave
3861 this assuming that the adjustable offset must be valid for the
3862 sub-words of a TFmode operand, which is what we had before. */
3865 rs6000_mode_dependent_address (rtx addr
)
3867 switch (GET_CODE (addr
))
3870 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3872 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3873 return val
+ 12 + 0x8000 >= 0x10000;
3883 return TARGET_UPDATE
;
3892 /* More elaborate version of recog's offsettable_memref_p predicate
3893 that works around the ??? note of rs6000_mode_dependent_address.
3894 In particular it accepts
3896 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3898 in 32-bit mode, that the recog predicate rejects. */
3901 rs6000_offsettable_memref_p (rtx op
)
3906 /* First mimic offsettable_memref_p. */
3907 if (offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)))
3910 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3911 the latter predicate knows nothing about the mode of the memory
3912 reference and, therefore, assumes that it is the largest supported
3913 mode (TFmode). As a consequence, legitimate offsettable memory
3914 references are rejected. rs6000_legitimate_offset_address_p contains
3915 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
3916 return rs6000_legitimate_offset_address_p (GET_MODE (op
), XEXP (op
, 0), 1);
3919 /* Return number of consecutive hard regs needed starting at reg REGNO
3920 to hold something of mode MODE.
3921 This is ordinarily the length in words of a value of mode MODE
3922 but can be less for certain modes in special long registers.
3924 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3925 scalar instructions. The upper 32 bits are only available to the
3928 POWER and PowerPC GPRs hold 32 bits worth;
3929 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3932 rs6000_hard_regno_nregs (int regno
, enum machine_mode mode
)
3934 if (FP_REGNO_P (regno
))
3935 return (GET_MODE_SIZE (mode
) + UNITS_PER_FP_WORD
- 1) / UNITS_PER_FP_WORD
;
3937 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
3938 return (GET_MODE_SIZE (mode
) + UNITS_PER_SPE_WORD
- 1) / UNITS_PER_SPE_WORD
;
3940 if (ALTIVEC_REGNO_P (regno
))
3942 (GET_MODE_SIZE (mode
) + UNITS_PER_ALTIVEC_WORD
- 1) / UNITS_PER_ALTIVEC_WORD
;
3944 /* The value returned for SCmode in the E500 double case is 2 for
3945 ABI compatibility; storing an SCmode value in a single register
3946 would require function_arg and rs6000_spe_function_arg to handle
3947 SCmode so as to pass the value correctly in a pair of
3949 if (TARGET_E500_DOUBLE
&& FLOAT_MODE_P (mode
) && mode
!= SCmode
)
3950 return (GET_MODE_SIZE (mode
) + UNITS_PER_FP_WORD
- 1) / UNITS_PER_FP_WORD
;
3952 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3955 /* Change register usage conditional on target flags. */
3957 rs6000_conditional_register_usage (void)
3961 /* Set MQ register fixed (already call_used) if not POWER
3962 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3967 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3969 fixed_regs
[13] = call_used_regs
[13]
3970 = call_really_used_regs
[13] = 1;
3972 /* Conditionally disable FPRs. */
3973 if (TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
3974 for (i
= 32; i
< 64; i
++)
3975 fixed_regs
[i
] = call_used_regs
[i
]
3976 = call_really_used_regs
[i
] = 1;
3978 /* The TOC register is not killed across calls in a way that is
3979 visible to the compiler. */
3980 if (DEFAULT_ABI
== ABI_AIX
)
3981 call_really_used_regs
[2] = 0;
3983 if (DEFAULT_ABI
== ABI_V4
3984 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3986 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3988 if (DEFAULT_ABI
== ABI_V4
3989 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3991 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3992 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3993 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3995 if (DEFAULT_ABI
== ABI_DARWIN
3996 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3997 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3998 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3999 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
4001 if (TARGET_TOC
&& TARGET_MINIMAL_TOC
)
4002 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
4003 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
4006 global_regs
[VSCR_REGNO
] = 1;
4010 global_regs
[SPEFSCR_REGNO
] = 1;
4011 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4012 registers in prologues and epilogues. We no longer use r14
4013 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4014 pool for link-compatibility with older versions of GCC. Once
4015 "old" code has died out, we can return r14 to the allocation
4018 = call_used_regs
[14]
4019 = call_really_used_regs
[14] = 1;
4022 if (! TARGET_ALTIVEC
)
4024 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
4025 fixed_regs
[i
] = call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
4026 call_really_used_regs
[VRSAVE_REGNO
] = 1;
4029 if (TARGET_ALTIVEC_ABI
)
4030 for (i
= FIRST_ALTIVEC_REGNO
; i
< FIRST_ALTIVEC_REGNO
+ 20; ++i
)
4031 call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
4034 /* Try to output insns to set TARGET equal to the constant C if it can
4035 be done in less than N insns. Do all computations in MODE.
4036 Returns the place where the output has been placed if it can be
4037 done and the insns have been emitted. If it would take more than N
4038 insns, zero is returned and no insns and emitted. */
4041 rs6000_emit_set_const (rtx dest
, enum machine_mode mode
,
4042 rtx source
, int n ATTRIBUTE_UNUSED
)
4044 rtx result
, insn
, set
;
4045 HOST_WIDE_INT c0
, c1
;
4052 dest
= gen_reg_rtx (mode
);
4053 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
4057 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
4059 emit_insn (gen_rtx_SET (VOIDmode
, copy_rtx (result
),
4060 GEN_INT (INTVAL (source
)
4061 & (~ (HOST_WIDE_INT
) 0xffff))));
4062 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
4063 gen_rtx_IOR (SImode
, copy_rtx (result
),
4064 GEN_INT (INTVAL (source
) & 0xffff))));
4069 switch (GET_CODE (source
))
4072 c0
= INTVAL (source
);
4077 #if HOST_BITS_PER_WIDE_INT >= 64
4078 c0
= CONST_DOUBLE_LOW (source
);
4081 c0
= CONST_DOUBLE_LOW (source
);
4082 c1
= CONST_DOUBLE_HIGH (source
);
4090 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
4097 insn
= get_last_insn ();
4098 set
= single_set (insn
);
4099 if (! CONSTANT_P (SET_SRC (set
)))
4100 set_unique_reg_note (insn
, REG_EQUAL
, source
);
4105 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4106 fall back to a straight forward decomposition. We do this to avoid
4107 exponential run times encountered when looking for longer sequences
4108 with rs6000_emit_set_const. */
4110 rs6000_emit_set_long_const (rtx dest
, HOST_WIDE_INT c1
, HOST_WIDE_INT c2
)
4112 if (!TARGET_POWERPC64
)
4114 rtx operand1
, operand2
;
4116 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
4118 operand2
= operand_subword_force (copy_rtx (dest
), WORDS_BIG_ENDIAN
!= 0,
4120 emit_move_insn (operand1
, GEN_INT (c1
));
4121 emit_move_insn (operand2
, GEN_INT (c2
));
4125 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
4128 ud2
= (c1
& 0xffff0000) >> 16;
4129 #if HOST_BITS_PER_WIDE_INT >= 64
4133 ud4
= (c2
& 0xffff0000) >> 16;
4135 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
4136 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
4139 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
4141 emit_move_insn (dest
, GEN_INT (ud1
));
4144 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
4145 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
4148 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
4151 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
4153 emit_move_insn (copy_rtx (dest
),
4154 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4157 else if ((ud4
== 0xffff && (ud3
& 0x8000))
4158 || (ud4
== 0 && ! (ud3
& 0x8000)))
4161 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
4164 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
4167 emit_move_insn (copy_rtx (dest
),
4168 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4170 emit_move_insn (copy_rtx (dest
),
4171 gen_rtx_ASHIFT (DImode
, copy_rtx (dest
),
4174 emit_move_insn (copy_rtx (dest
),
4175 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4181 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
4184 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
4187 emit_move_insn (copy_rtx (dest
),
4188 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4191 emit_move_insn (copy_rtx (dest
),
4192 gen_rtx_ASHIFT (DImode
, copy_rtx (dest
),
4195 emit_move_insn (copy_rtx (dest
),
4196 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4197 GEN_INT (ud2
<< 16)));
4199 emit_move_insn (copy_rtx (dest
),
4200 gen_rtx_IOR (DImode
, copy_rtx (dest
), GEN_INT (ud1
)));
4206 /* Helper for the following. Get rid of [r+r] memory refs
4207 in cases where it won't work (TImode, TFmode, TDmode). */
4210 rs6000_eliminate_indexed_memrefs (rtx operands
[2])
4212 if (GET_CODE (operands
[0]) == MEM
4213 && GET_CODE (XEXP (operands
[0], 0)) != REG
4214 && ! legitimate_constant_pool_address_p (XEXP (operands
[0], 0))
4215 && ! reload_in_progress
)
4217 = replace_equiv_address (operands
[0],
4218 copy_addr_to_reg (XEXP (operands
[0], 0)));
4220 if (GET_CODE (operands
[1]) == MEM
4221 && GET_CODE (XEXP (operands
[1], 0)) != REG
4222 && ! legitimate_constant_pool_address_p (XEXP (operands
[1], 0))
4223 && ! reload_in_progress
)
4225 = replace_equiv_address (operands
[1],
4226 copy_addr_to_reg (XEXP (operands
[1], 0)));
4229 /* Emit a move from SOURCE to DEST in mode MODE. */
4231 rs6000_emit_move (rtx dest
, rtx source
, enum machine_mode mode
)
4235 operands
[1] = source
;
4237 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4238 if (GET_CODE (operands
[1]) == CONST_DOUBLE
4239 && ! FLOAT_MODE_P (mode
)
4240 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
4242 /* FIXME. This should never happen. */
4243 /* Since it seems that it does, do the safe thing and convert
4245 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
4247 gcc_assert (GET_CODE (operands
[1]) != CONST_DOUBLE
4248 || FLOAT_MODE_P (mode
)
4249 || ((CONST_DOUBLE_HIGH (operands
[1]) != 0
4250 || CONST_DOUBLE_LOW (operands
[1]) < 0)
4251 && (CONST_DOUBLE_HIGH (operands
[1]) != -1
4252 || CONST_DOUBLE_LOW (operands
[1]) >= 0)));
4254 /* Check if GCC is setting up a block move that will end up using FP
4255 registers as temporaries. We must make sure this is acceptable. */
4256 if (GET_CODE (operands
[0]) == MEM
4257 && GET_CODE (operands
[1]) == MEM
4259 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
4260 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
4261 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
4262 ? 32 : MEM_ALIGN (operands
[0])))
4263 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
4265 : MEM_ALIGN (operands
[1]))))
4266 && ! MEM_VOLATILE_P (operands
[0])
4267 && ! MEM_VOLATILE_P (operands
[1]))
4269 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
4270 adjust_address (operands
[1], SImode
, 0));
4271 emit_move_insn (adjust_address (copy_rtx (operands
[0]), SImode
, 4),
4272 adjust_address (copy_rtx (operands
[1]), SImode
, 4));
4276 if (!no_new_pseudos
&& GET_CODE (operands
[0]) == MEM
4277 && !gpc_reg_operand (operands
[1], mode
))
4278 operands
[1] = force_reg (mode
, operands
[1]);
4280 if (mode
== SFmode
&& ! TARGET_POWERPC
4281 && TARGET_HARD_FLOAT
&& TARGET_FPRS
4282 && GET_CODE (operands
[0]) == MEM
)
4286 if (reload_in_progress
|| reload_completed
)
4287 regnum
= true_regnum (operands
[1]);
4288 else if (GET_CODE (operands
[1]) == REG
)
4289 regnum
= REGNO (operands
[1]);
4293 /* If operands[1] is a register, on POWER it may have
4294 double-precision data in it, so truncate it to single
4296 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
4299 newreg
= (no_new_pseudos
? copy_rtx (operands
[1])
4300 : gen_reg_rtx (mode
));
4301 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
4302 operands
[1] = newreg
;
4306 /* Recognize the case where operand[1] is a reference to thread-local
4307 data and load its address to a register. */
4308 if (rs6000_tls_referenced_p (operands
[1]))
4310 enum tls_model model
;
4311 rtx tmp
= operands
[1];
4314 if (GET_CODE (tmp
) == CONST
&& GET_CODE (XEXP (tmp
, 0)) == PLUS
)
4316 addend
= XEXP (XEXP (tmp
, 0), 1);
4317 tmp
= XEXP (XEXP (tmp
, 0), 0);
4320 gcc_assert (GET_CODE (tmp
) == SYMBOL_REF
);
4321 model
= SYMBOL_REF_TLS_MODEL (tmp
);
4322 gcc_assert (model
!= 0);
4324 tmp
= rs6000_legitimize_tls_address (tmp
, model
);
4327 tmp
= gen_rtx_PLUS (mode
, tmp
, addend
);
4328 tmp
= force_operand (tmp
, operands
[0]);
4333 /* Handle the case where reload calls us with an invalid address. */
4334 if (reload_in_progress
&& mode
== Pmode
4335 && (! general_operand (operands
[1], mode
)
4336 || ! nonimmediate_operand (operands
[0], mode
)))
4339 /* 128-bit constant floating-point values on Darwin should really be
4340 loaded as two parts. */
4341 if (!TARGET_IEEEQUAD
&& TARGET_LONG_DOUBLE_128
4342 && mode
== TFmode
&& GET_CODE (operands
[1]) == CONST_DOUBLE
)
4344 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4345 know how to get a DFmode SUBREG of a TFmode. */
4346 enum machine_mode imode
= (TARGET_E500_DOUBLE
? DFmode
: DImode
);
4347 rs6000_emit_move (simplify_gen_subreg (imode
, operands
[0], mode
, 0),
4348 simplify_gen_subreg (imode
, operands
[1], mode
, 0),
4350 rs6000_emit_move (simplify_gen_subreg (imode
, operands
[0], mode
,
4351 GET_MODE_SIZE (imode
)),
4352 simplify_gen_subreg (imode
, operands
[1], mode
,
4353 GET_MODE_SIZE (imode
)),
4358 /* FIXME: In the long term, this switch statement should go away
4359 and be replaced by a sequence of tests based on things like
4365 if (CONSTANT_P (operands
[1])
4366 && GET_CODE (operands
[1]) != CONST_INT
)
4367 operands
[1] = force_const_mem (mode
, operands
[1]);
4372 rs6000_eliminate_indexed_memrefs (operands
);
4378 if (CONSTANT_P (operands
[1])
4379 && ! easy_fp_constant (operands
[1], mode
))
4380 operands
[1] = force_const_mem (mode
, operands
[1]);
4391 if (CONSTANT_P (operands
[1])
4392 && !easy_vector_constant (operands
[1], mode
))
4393 operands
[1] = force_const_mem (mode
, operands
[1]);
4398 /* Use default pattern for address of ELF small data */
4401 && DEFAULT_ABI
== ABI_V4
4402 && (GET_CODE (operands
[1]) == SYMBOL_REF
4403 || GET_CODE (operands
[1]) == CONST
)
4404 && small_data_operand (operands
[1], mode
))
4406 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4410 if (DEFAULT_ABI
== ABI_V4
4411 && mode
== Pmode
&& mode
== SImode
4412 && flag_pic
== 1 && got_operand (operands
[1], mode
))
4414 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
4418 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
4422 && CONSTANT_P (operands
[1])
4423 && GET_CODE (operands
[1]) != HIGH
4424 && GET_CODE (operands
[1]) != CONST_INT
)
4426 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
4428 /* If this is a function address on -mcall-aixdesc,
4429 convert it to the address of the descriptor. */
4430 if (DEFAULT_ABI
== ABI_AIX
4431 && GET_CODE (operands
[1]) == SYMBOL_REF
4432 && XSTR (operands
[1], 0)[0] == '.')
4434 const char *name
= XSTR (operands
[1], 0);
4436 while (*name
== '.')
4438 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
4439 CONSTANT_POOL_ADDRESS_P (new_ref
)
4440 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
4441 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
4442 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
4443 SYMBOL_REF_DATA (new_ref
) = SYMBOL_REF_DATA (operands
[1]);
4444 operands
[1] = new_ref
;
4447 if (DEFAULT_ABI
== ABI_DARWIN
)
4450 if (MACHO_DYNAMIC_NO_PIC_P
)
4452 /* Take care of any required data indirection. */
4453 operands
[1] = rs6000_machopic_legitimize_pic_address (
4454 operands
[1], mode
, operands
[0]);
4455 if (operands
[0] != operands
[1])
4456 emit_insn (gen_rtx_SET (VOIDmode
,
4457 operands
[0], operands
[1]));
4461 emit_insn (gen_macho_high (target
, operands
[1]));
4462 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
4466 emit_insn (gen_elf_high (target
, operands
[1]));
4467 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
4471 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4472 and we have put it in the TOC, we just need to make a TOC-relative
4475 && GET_CODE (operands
[1]) == SYMBOL_REF
4476 && constant_pool_expr_p (operands
[1])
4477 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
4478 get_pool_mode (operands
[1])))
4480 operands
[1] = create_TOC_reference (operands
[1]);
4482 else if (mode
== Pmode
4483 && CONSTANT_P (operands
[1])
4484 && ((GET_CODE (operands
[1]) != CONST_INT
4485 && ! easy_fp_constant (operands
[1], mode
))
4486 || (GET_CODE (operands
[1]) == CONST_INT
4487 && num_insns_constant (operands
[1], mode
) > 2)
4488 || (GET_CODE (operands
[0]) == REG
4489 && FP_REGNO_P (REGNO (operands
[0]))))
4490 && GET_CODE (operands
[1]) != HIGH
4491 && ! legitimate_constant_pool_address_p (operands
[1])
4492 && ! toc_relative_expr_p (operands
[1]))
4494 /* Emit a USE operation so that the constant isn't deleted if
4495 expensive optimizations are turned on because nobody
4496 references it. This should only be done for operands that
4497 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4498 This should not be done for operands that contain LABEL_REFs.
4499 For now, we just handle the obvious case. */
4500 if (GET_CODE (operands
[1]) != LABEL_REF
)
4501 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
4504 /* Darwin uses a special PIC legitimizer. */
4505 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
4508 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
4510 if (operands
[0] != operands
[1])
4511 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4516 /* If we are to limit the number of things we put in the TOC and
4517 this is a symbol plus a constant we can add in one insn,
4518 just put the symbol in the TOC and add the constant. Don't do
4519 this if reload is in progress. */
4520 if (GET_CODE (operands
[1]) == CONST
4521 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
4522 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
4523 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
4524 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
4525 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
4526 && ! side_effects_p (operands
[0]))
4529 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
4530 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
4532 sym
= force_reg (mode
, sym
);
4534 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
4536 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
4540 operands
[1] = force_const_mem (mode
, operands
[1]);
4543 && constant_pool_expr_p (XEXP (operands
[1], 0))
4544 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4545 get_pool_constant (XEXP (operands
[1], 0)),
4546 get_pool_mode (XEXP (operands
[1], 0))))
4549 = gen_const_mem (mode
,
4550 create_TOC_reference (XEXP (operands
[1], 0)));
4551 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
4557 rs6000_eliminate_indexed_memrefs (operands
);
4561 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
4563 gen_rtx_SET (VOIDmode
,
4564 operands
[0], operands
[1]),
4565 gen_rtx_CLOBBER (VOIDmode
,
4566 gen_rtx_SCRATCH (SImode
)))));
4575 /* Above, we may have called force_const_mem which may have returned
4576 an invalid address. If we can, fix this up; otherwise, reload will
4577 have to deal with it. */
4578 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
4579 operands
[1] = validize_mem (operands
[1]);
4582 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4585 /* Nonzero if we can use a floating-point register to pass this arg. */
4586 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4587 (SCALAR_FLOAT_MODE_P (MODE) \
4588 && (MODE) != SDmode \
4589 && (CUM)->fregno <= FP_ARG_MAX_REG \
4590 && TARGET_HARD_FLOAT && TARGET_FPRS)
4592 /* Nonzero if we can use an AltiVec register to pass this arg. */
4593 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4594 (ALTIVEC_VECTOR_MODE (MODE) \
4595 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4596 && TARGET_ALTIVEC_ABI \
4599 /* Return a nonzero value to say to return the function value in
4600 memory, just as large structures are always returned. TYPE will be
4601 the data type of the value, and FNTYPE will be the type of the
4602 function doing the returning, or @code{NULL} for libcalls.
4604 The AIX ABI for the RS/6000 specifies that all structures are
4605 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4606 specifies that structures <= 8 bytes are returned in r3/r4, but a
4607 draft put them in memory, and GCC used to implement the draft
4608 instead of the final standard. Therefore, aix_struct_return
4609 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4610 compatibility can change DRAFT_V4_STRUCT_RET to override the
4611 default, and -m switches get the final word. See
4612 rs6000_override_options for more details.
4614 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4615 long double support is enabled. These values are returned in memory.
4617 int_size_in_bytes returns -1 for variable size objects, which go in
4618 memory always. The cast to unsigned makes -1 > 8. */
4621 rs6000_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
4623 /* In the darwin64 abi, try to use registers for larger structs
4625 if (rs6000_darwin64_abi
4626 && TREE_CODE (type
) == RECORD_TYPE
4627 && int_size_in_bytes (type
) > 0)
4629 CUMULATIVE_ARGS valcum
;
4633 valcum
.fregno
= FP_ARG_MIN_REG
;
4634 valcum
.vregno
= ALTIVEC_ARG_MIN_REG
;
4635 /* Do a trial code generation as if this were going to be passed
4636 as an argument; if any part goes in memory, we return NULL. */
4637 valret
= rs6000_darwin64_record_arg (&valcum
, type
, 1, true);
4640 /* Otherwise fall through to more conventional ABI rules. */
4643 if (AGGREGATE_TYPE_P (type
)
4644 && (aix_struct_return
4645 || (unsigned HOST_WIDE_INT
) int_size_in_bytes (type
) > 8))
4648 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4649 modes only exist for GCC vector types if -maltivec. */
4650 if (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
4651 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
4654 /* Return synthetic vectors in memory. */
4655 if (TREE_CODE (type
) == VECTOR_TYPE
4656 && int_size_in_bytes (type
) > (TARGET_ALTIVEC_ABI
? 16 : 8))
4658 static bool warned_for_return_big_vectors
= false;
4659 if (!warned_for_return_big_vectors
)
4661 warning (0, "GCC vector returned by reference: "
4662 "non-standard ABI extension with no compatibility guarantee");
4663 warned_for_return_big_vectors
= true;
4668 if (DEFAULT_ABI
== ABI_V4
&& TARGET_IEEEQUAD
&& TYPE_MODE (type
) == TFmode
)
4674 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4675 for a call to a function whose data type is FNTYPE.
4676 For a library call, FNTYPE is 0.
4678 For incoming args we set the number of arguments in the prototype large
4679 so we never return a PARALLEL. */
4682 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
4683 rtx libname ATTRIBUTE_UNUSED
, int incoming
,
4684 int libcall
, int n_named_args
)
4686 static CUMULATIVE_ARGS zero_cumulative
;
4688 *cum
= zero_cumulative
;
4690 cum
->fregno
= FP_ARG_MIN_REG
;
4691 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
4692 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
4693 cum
->call_cookie
= ((DEFAULT_ABI
== ABI_V4
&& libcall
)
4694 ? CALL_LIBCALL
: CALL_NORMAL
);
4695 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
4696 cum
->stdarg
= fntype
4697 && (TYPE_ARG_TYPES (fntype
) != 0
4698 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4699 != void_type_node
));
4701 cum
->nargs_prototype
= 0;
4702 if (incoming
|| cum
->prototype
)
4703 cum
->nargs_prototype
= n_named_args
;
4705 /* Check for a longcall attribute. */
4706 if ((!fntype
&& rs6000_default_long_calls
)
4708 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
4709 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
))))
4710 cum
->call_cookie
|= CALL_LONG
;
4712 if (TARGET_DEBUG_ARG
)
4714 fprintf (stderr
, "\ninit_cumulative_args:");
4717 tree ret_type
= TREE_TYPE (fntype
);
4718 fprintf (stderr
, " ret code = %s,",
4719 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
4722 if (cum
->call_cookie
& CALL_LONG
)
4723 fprintf (stderr
, " longcall,");
4725 fprintf (stderr
, " proto = %d, nargs = %d\n",
4726 cum
->prototype
, cum
->nargs_prototype
);
4731 && TARGET_ALTIVEC_ABI
4732 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype
))))
4734 error ("cannot return value in vector register because"
4735 " altivec instructions are disabled, use -maltivec"
4740 /* Return true if TYPE must be passed on the stack and not in registers. */
4743 rs6000_must_pass_in_stack (enum machine_mode mode
, tree type
)
4745 if (DEFAULT_ABI
== ABI_AIX
|| TARGET_64BIT
)
4746 return must_pass_in_stack_var_size (mode
, type
);
4748 return must_pass_in_stack_var_size_or_pad (mode
, type
);
4751 /* If defined, a C expression which determines whether, and in which
4752 direction, to pad out an argument with extra space. The value
4753 should be of type `enum direction': either `upward' to pad above
4754 the argument, `downward' to pad below, or `none' to inhibit
4757 For the AIX ABI structs are always stored left shifted in their
4761 function_arg_padding (enum machine_mode mode
, tree type
)
4763 #ifndef AGGREGATE_PADDING_FIXED
4764 #define AGGREGATE_PADDING_FIXED 0
4766 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4767 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4770 if (!AGGREGATE_PADDING_FIXED
)
4772 /* GCC used to pass structures of the same size as integer types as
4773 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4774 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4775 passed padded downward, except that -mstrict-align further
4776 muddied the water in that multi-component structures of 2 and 4
4777 bytes in size were passed padded upward.
4779 The following arranges for best compatibility with previous
4780 versions of gcc, but removes the -mstrict-align dependency. */
4781 if (BYTES_BIG_ENDIAN
)
4783 HOST_WIDE_INT size
= 0;
4785 if (mode
== BLKmode
)
4787 if (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
4788 size
= int_size_in_bytes (type
);
4791 size
= GET_MODE_SIZE (mode
);
4793 if (size
== 1 || size
== 2 || size
== 4)
4799 if (AGGREGATES_PAD_UPWARD_ALWAYS
)
4801 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
4805 /* Fall back to the default. */
4806 return DEFAULT_FUNCTION_ARG_PADDING (mode
, type
);
4809 /* If defined, a C expression that gives the alignment boundary, in bits,
4810 of an argument with the specified mode and type. If it is not defined,
4811 PARM_BOUNDARY is used for all arguments.
4813 V.4 wants long longs and doubles to be double word aligned. Just
4814 testing the mode size is a boneheaded way to do this as it means
4815 that other types such as complex int are also double word aligned.
4816 However, we're stuck with this because changing the ABI might break
4817 existing library interfaces.
4819 Doubleword align SPE vectors.
4820 Quadword align Altivec vectors.
4821 Quadword align large synthetic vector types. */
4824 function_arg_boundary (enum machine_mode mode
, tree type
)
4826 if (DEFAULT_ABI
== ABI_V4
4827 && (GET_MODE_SIZE (mode
) == 8
4828 || (TARGET_HARD_FLOAT
4830 && (mode
== TFmode
|| mode
== TDmode
))))
4832 else if (SPE_VECTOR_MODE (mode
)
4833 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4834 && int_size_in_bytes (type
) >= 8
4835 && int_size_in_bytes (type
) < 16))
4837 else if (ALTIVEC_VECTOR_MODE (mode
)
4838 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4839 && int_size_in_bytes (type
) >= 16))
4841 else if (rs6000_darwin64_abi
&& mode
== BLKmode
4842 && type
&& TYPE_ALIGN (type
) > 64)
4845 return PARM_BOUNDARY
;
4848 /* For a function parm of MODE and TYPE, return the starting word in
4849 the parameter area. NWORDS of the parameter area are already used. */
4852 rs6000_parm_start (enum machine_mode mode
, tree type
, unsigned int nwords
)
4855 unsigned int parm_offset
;
4857 align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
4858 parm_offset
= DEFAULT_ABI
== ABI_V4
? 2 : 6;
4859 return nwords
+ (-(parm_offset
+ nwords
) & align
);
4862 /* Compute the size (in words) of a function argument. */
4864 static unsigned long
4865 rs6000_arg_size (enum machine_mode mode
, tree type
)
4869 if (mode
!= BLKmode
)
4870 size
= GET_MODE_SIZE (mode
);
4872 size
= int_size_in_bytes (type
);
4875 return (size
+ 3) >> 2;
4877 return (size
+ 7) >> 3;
4880 /* Use this to flush pending int fields. */
4883 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS
*cum
,
4884 HOST_WIDE_INT bitpos
)
4886 unsigned int startbit
, endbit
;
4887 int intregs
, intoffset
;
4888 enum machine_mode mode
;
4890 if (cum
->intoffset
== -1)
4893 intoffset
= cum
->intoffset
;
4894 cum
->intoffset
= -1;
4896 if (intoffset
% BITS_PER_WORD
!= 0)
4898 mode
= mode_for_size (BITS_PER_WORD
- intoffset
% BITS_PER_WORD
,
4900 if (mode
== BLKmode
)
4902 /* We couldn't find an appropriate mode, which happens,
4903 e.g., in packed structs when there are 3 bytes to load.
4904 Back intoffset back to the beginning of the word in this
4906 intoffset
= intoffset
& -BITS_PER_WORD
;
4910 startbit
= intoffset
& -BITS_PER_WORD
;
4911 endbit
= (bitpos
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
4912 intregs
= (endbit
- startbit
) / BITS_PER_WORD
;
4913 cum
->words
+= intregs
;
4916 /* The darwin64 ABI calls for us to recurse down through structs,
4917 looking for elements passed in registers. Unfortunately, we have
4918 to track int register count here also because of misalignments
4919 in powerpc alignment mode. */
4922 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS
*cum
,
4924 HOST_WIDE_INT startbitpos
)
4928 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4929 if (TREE_CODE (f
) == FIELD_DECL
)
4931 HOST_WIDE_INT bitpos
= startbitpos
;
4932 tree ftype
= TREE_TYPE (f
);
4933 enum machine_mode mode
;
4934 if (ftype
== error_mark_node
)
4936 mode
= TYPE_MODE (ftype
);
4938 if (DECL_SIZE (f
) != 0
4939 && host_integerp (bit_position (f
), 1))
4940 bitpos
+= int_bit_position (f
);
4942 /* ??? FIXME: else assume zero offset. */
4944 if (TREE_CODE (ftype
) == RECORD_TYPE
)
4945 rs6000_darwin64_record_arg_advance_recurse (cum
, ftype
, bitpos
);
4946 else if (USE_FP_FOR_ARG_P (cum
, mode
, ftype
))
4948 rs6000_darwin64_record_arg_advance_flush (cum
, bitpos
);
4949 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4950 cum
->words
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4952 else if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, 1))
4954 rs6000_darwin64_record_arg_advance_flush (cum
, bitpos
);
4958 else if (cum
->intoffset
== -1)
4959 cum
->intoffset
= bitpos
;
4963 /* Update the data in CUM to advance over an argument
4964 of mode MODE and data type TYPE.
4965 (TYPE is null for libcalls where that information may not be available.)
4967 Note that for args passed by reference, function_arg will be called
4968 with MODE and TYPE set to that of the pointer to the arg, not the arg
4972 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4973 tree type
, int named
, int depth
)
4977 /* Only tick off an argument if we're not recursing. */
4979 cum
->nargs_prototype
--;
4981 if (TARGET_ALTIVEC_ABI
4982 && (ALTIVEC_VECTOR_MODE (mode
)
4983 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4984 && int_size_in_bytes (type
) == 16)))
4988 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4991 if (!TARGET_ALTIVEC
)
4992 error ("cannot pass argument in vector register because"
4993 " altivec instructions are disabled, use -maltivec"
4996 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4997 even if it is going to be passed in a vector register.
4998 Darwin does the same for variable-argument functions. */
4999 if ((DEFAULT_ABI
== ABI_AIX
&& TARGET_64BIT
)
5000 || (cum
->stdarg
&& DEFAULT_ABI
!= ABI_V4
))
5010 /* Vector parameters must be 16-byte aligned. This places
5011 them at 2 mod 4 in terms of words in 32-bit mode, since
5012 the parameter save area starts at offset 24 from the
5013 stack. In 64-bit mode, they just have to start on an
5014 even word, since the parameter save area is 16-byte
5015 aligned. Space for GPRs is reserved even if the argument
5016 will be passed in memory. */
5018 align
= (2 - cum
->words
) & 3;
5020 align
= cum
->words
& 1;
5021 cum
->words
+= align
+ rs6000_arg_size (mode
, type
);
5023 if (TARGET_DEBUG_ARG
)
5025 fprintf (stderr
, "function_adv: words = %2d, align=%d, ",
5027 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s\n",
5028 cum
->nargs_prototype
, cum
->prototype
,
5029 GET_MODE_NAME (mode
));
5033 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
5035 && cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
5038 else if (rs6000_darwin64_abi
5040 && TREE_CODE (type
) == RECORD_TYPE
5041 && (size
= int_size_in_bytes (type
)) > 0)
5043 /* Variable sized types have size == -1 and are
5044 treated as if consisting entirely of ints.
5045 Pad to 16 byte boundary if needed. */
5046 if (TYPE_ALIGN (type
) >= 2 * BITS_PER_WORD
5047 && (cum
->words
% 2) != 0)
5049 /* For varargs, we can just go up by the size of the struct. */
5051 cum
->words
+= (size
+ 7) / 8;
5054 /* It is tempting to say int register count just goes up by
5055 sizeof(type)/8, but this is wrong in a case such as
5056 { int; double; int; } [powerpc alignment]. We have to
5057 grovel through the fields for these too. */
5059 rs6000_darwin64_record_arg_advance_recurse (cum
, type
, 0);
5060 rs6000_darwin64_record_arg_advance_flush (cum
,
5061 size
* BITS_PER_UNIT
);
5064 else if (DEFAULT_ABI
== ABI_V4
)
5066 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5067 && (mode
== SFmode
|| mode
== DFmode
5068 || mode
== DDmode
|| mode
== TDmode
5069 || (mode
== TFmode
&& !TARGET_IEEEQUAD
)))
5071 /* _Decimal128 must use an even/odd register pair. This assumes
5072 that the register number is odd when fregno is odd. */
5073 if (mode
== TDmode
&& (cum
->fregno
% 2) == 1)
5076 if (cum
->fregno
+ (mode
== TFmode
|| mode
== TDmode
? 1 : 0)
5077 <= FP_ARG_V4_MAX_REG
)
5078 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
5081 cum
->fregno
= FP_ARG_V4_MAX_REG
+ 1;
5082 if (mode
== DFmode
|| mode
== TFmode
|| mode
== DDmode
|| mode
== TDmode
)
5083 cum
->words
+= cum
->words
& 1;
5084 cum
->words
+= rs6000_arg_size (mode
, type
);
5089 int n_words
= rs6000_arg_size (mode
, type
);
5090 int gregno
= cum
->sysv_gregno
;
5092 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5093 (r7,r8) or (r9,r10). As does any other 2 word item such
5094 as complex int due to a historical mistake. */
5096 gregno
+= (1 - gregno
) & 1;
5098 /* Multi-reg args are not split between registers and stack. */
5099 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
5101 /* Long long and SPE vectors are aligned on the stack.
5102 So are other 2 word items such as complex int due to
5103 a historical mistake. */
5105 cum
->words
+= cum
->words
& 1;
5106 cum
->words
+= n_words
;
5109 /* Note: continuing to accumulate gregno past when we've started
5110 spilling to the stack indicates the fact that we've started
5111 spilling to the stack to expand_builtin_saveregs. */
5112 cum
->sysv_gregno
= gregno
+ n_words
;
5115 if (TARGET_DEBUG_ARG
)
5117 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
5118 cum
->words
, cum
->fregno
);
5119 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
5120 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
5121 fprintf (stderr
, "mode = %4s, named = %d\n",
5122 GET_MODE_NAME (mode
), named
);
5127 int n_words
= rs6000_arg_size (mode
, type
);
5128 int start_words
= cum
->words
;
5129 int align_words
= rs6000_parm_start (mode
, type
, start_words
);
5131 cum
->words
= align_words
+ n_words
;
5133 if (SCALAR_FLOAT_MODE_P (mode
)
5135 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
5137 /* _Decimal128 must be passed in an even/odd float register pair.
5138 This assumes that the register number is odd when fregno is
5140 if (mode
== TDmode
&& (cum
->fregno
% 2) == 1)
5142 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
5145 if (TARGET_DEBUG_ARG
)
5147 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
5148 cum
->words
, cum
->fregno
);
5149 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
5150 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
5151 fprintf (stderr
, "named = %d, align = %d, depth = %d\n",
5152 named
, align_words
- start_words
, depth
);
5158 spe_build_register_parallel (enum machine_mode mode
, int gregno
)
5165 r1
= gen_rtx_REG (DImode
, gregno
);
5166 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
5167 return gen_rtx_PARALLEL (mode
, gen_rtvec (1, r1
));
5171 r1
= gen_rtx_REG (DImode
, gregno
);
5172 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
5173 r3
= gen_rtx_REG (DImode
, gregno
+ 2);
5174 r3
= gen_rtx_EXPR_LIST (VOIDmode
, r3
, GEN_INT (8));
5175 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r3
));
5178 r1
= gen_rtx_REG (DImode
, gregno
);
5179 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
5180 r3
= gen_rtx_REG (DImode
, gregno
+ 2);
5181 r3
= gen_rtx_EXPR_LIST (VOIDmode
, r3
, GEN_INT (8));
5182 r5
= gen_rtx_REG (DImode
, gregno
+ 4);
5183 r5
= gen_rtx_EXPR_LIST (VOIDmode
, r5
, GEN_INT (16));
5184 r7
= gen_rtx_REG (DImode
, gregno
+ 6);
5185 r7
= gen_rtx_EXPR_LIST (VOIDmode
, r7
, GEN_INT (24));
5186 return gen_rtx_PARALLEL (mode
, gen_rtvec (4, r1
, r3
, r5
, r7
));
5193 /* Determine where to put a SIMD argument on the SPE. */
5195 rs6000_spe_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5198 int gregno
= cum
->sysv_gregno
;
5200 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
5201 are passed and returned in a pair of GPRs for ABI compatibility. */
5202 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== DCmode
5203 || mode
== TFmode
|| mode
== TCmode
))
5205 int n_words
= rs6000_arg_size (mode
, type
);
5207 /* Doubles go in an odd/even register pair (r5/r6, etc). */
5209 gregno
+= (1 - gregno
) & 1;
5211 /* Multi-reg args are not split between registers and stack. */
5212 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
5215 return spe_build_register_parallel (mode
, gregno
);
5219 int n_words
= rs6000_arg_size (mode
, type
);
5221 /* SPE vectors are put in odd registers. */
5222 if (n_words
== 2 && (gregno
& 1) == 0)
5225 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
5228 enum machine_mode m
= SImode
;
5230 r1
= gen_rtx_REG (m
, gregno
);
5231 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
5232 r2
= gen_rtx_REG (m
, gregno
+ 1);
5233 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
5234 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
5241 if (gregno
<= GP_ARG_MAX_REG
)
5242 return gen_rtx_REG (mode
, gregno
);
5248 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5249 structure between cum->intoffset and bitpos to integer registers. */
5252 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS
*cum
,
5253 HOST_WIDE_INT bitpos
, rtx rvec
[], int *k
)
5255 enum machine_mode mode
;
5257 unsigned int startbit
, endbit
;
5258 int this_regno
, intregs
, intoffset
;
5261 if (cum
->intoffset
== -1)
5264 intoffset
= cum
->intoffset
;
5265 cum
->intoffset
= -1;
5267 /* If this is the trailing part of a word, try to only load that
5268 much into the register. Otherwise load the whole register. Note
5269 that in the latter case we may pick up unwanted bits. It's not a
5270 problem at the moment but may wish to revisit. */
5272 if (intoffset
% BITS_PER_WORD
!= 0)
5274 mode
= mode_for_size (BITS_PER_WORD
- intoffset
% BITS_PER_WORD
,
5276 if (mode
== BLKmode
)
5278 /* We couldn't find an appropriate mode, which happens,
5279 e.g., in packed structs when there are 3 bytes to load.
5280 Back intoffset back to the beginning of the word in this
5282 intoffset
= intoffset
& -BITS_PER_WORD
;
5289 startbit
= intoffset
& -BITS_PER_WORD
;
5290 endbit
= (bitpos
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
5291 intregs
= (endbit
- startbit
) / BITS_PER_WORD
;
5292 this_regno
= cum
->words
+ intoffset
/ BITS_PER_WORD
;
5294 if (intregs
> 0 && intregs
> GP_ARG_NUM_REG
- this_regno
)
5297 intregs
= MIN (intregs
, GP_ARG_NUM_REG
- this_regno
);
5301 intoffset
/= BITS_PER_UNIT
;
5304 regno
= GP_ARG_MIN_REG
+ this_regno
;
5305 reg
= gen_rtx_REG (mode
, regno
);
5307 gen_rtx_EXPR_LIST (VOIDmode
, reg
, GEN_INT (intoffset
));
5310 intoffset
= (intoffset
| (UNITS_PER_WORD
-1)) + 1;
5314 while (intregs
> 0);
5317 /* Recursive workhorse for the following. */
5320 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS
*cum
, tree type
,
5321 HOST_WIDE_INT startbitpos
, rtx rvec
[],
5326 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
5327 if (TREE_CODE (f
) == FIELD_DECL
)
5329 HOST_WIDE_INT bitpos
= startbitpos
;
5330 tree ftype
= TREE_TYPE (f
);
5331 enum machine_mode mode
;
5332 if (ftype
== error_mark_node
)
5334 mode
= TYPE_MODE (ftype
);
5336 if (DECL_SIZE (f
) != 0
5337 && host_integerp (bit_position (f
), 1))
5338 bitpos
+= int_bit_position (f
);
5340 /* ??? FIXME: else assume zero offset. */
5342 if (TREE_CODE (ftype
) == RECORD_TYPE
)
5343 rs6000_darwin64_record_arg_recurse (cum
, ftype
, bitpos
, rvec
, k
);
5344 else if (cum
->named
&& USE_FP_FOR_ARG_P (cum
, mode
, ftype
))
5349 case SCmode
: mode
= SFmode
; break;
5350 case DCmode
: mode
= DFmode
; break;
5351 case TCmode
: mode
= TFmode
; break;
5355 rs6000_darwin64_record_arg_flush (cum
, bitpos
, rvec
, k
);
5357 = gen_rtx_EXPR_LIST (VOIDmode
,
5358 gen_rtx_REG (mode
, cum
->fregno
++),
5359 GEN_INT (bitpos
/ BITS_PER_UNIT
));
5360 if (mode
== TFmode
|| mode
== TDmode
)
5363 else if (cum
->named
&& USE_ALTIVEC_FOR_ARG_P (cum
, mode
, ftype
, 1))
5365 rs6000_darwin64_record_arg_flush (cum
, bitpos
, rvec
, k
);
5367 = gen_rtx_EXPR_LIST (VOIDmode
,
5368 gen_rtx_REG (mode
, cum
->vregno
++),
5369 GEN_INT (bitpos
/ BITS_PER_UNIT
));
5371 else if (cum
->intoffset
== -1)
5372 cum
->intoffset
= bitpos
;
5376 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5377 the register(s) to be used for each field and subfield of a struct
5378 being passed by value, along with the offset of where the
5379 register's value may be found in the block. FP fields go in FP
5380 register, vector fields go in vector registers, and everything
5381 else goes in int registers, packed as in memory.
5383 This code is also used for function return values. RETVAL indicates
5384 whether this is the case.
5386 Much of this is taken from the SPARC V9 port, which has a similar
5387 calling convention. */
5390 rs6000_darwin64_record_arg (CUMULATIVE_ARGS
*orig_cum
, tree type
,
5391 int named
, bool retval
)
5393 rtx rvec
[FIRST_PSEUDO_REGISTER
];
5394 int k
= 1, kbase
= 1;
5395 HOST_WIDE_INT typesize
= int_size_in_bytes (type
);
5396 /* This is a copy; modifications are not visible to our caller. */
5397 CUMULATIVE_ARGS copy_cum
= *orig_cum
;
5398 CUMULATIVE_ARGS
*cum
= ©_cum
;
5400 /* Pad to 16 byte boundary if needed. */
5401 if (!retval
&& TYPE_ALIGN (type
) >= 2 * BITS_PER_WORD
5402 && (cum
->words
% 2) != 0)
5409 /* Put entries into rvec[] for individual FP and vector fields, and
5410 for the chunks of memory that go in int regs. Note we start at
5411 element 1; 0 is reserved for an indication of using memory, and
5412 may or may not be filled in below. */
5413 rs6000_darwin64_record_arg_recurse (cum
, type
, 0, rvec
, &k
);
5414 rs6000_darwin64_record_arg_flush (cum
, typesize
* BITS_PER_UNIT
, rvec
, &k
);
5416 /* If any part of the struct went on the stack put all of it there.
5417 This hack is because the generic code for
5418 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5419 parts of the struct are not at the beginning. */
5423 return NULL_RTX
; /* doesn't go in registers at all */
5425 rvec
[0] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5427 if (k
> 1 || cum
->use_stack
)
5428 return gen_rtx_PARALLEL (BLKmode
, gen_rtvec_v (k
- kbase
, &rvec
[kbase
]));
5433 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5436 rs6000_mixed_function_arg (enum machine_mode mode
, tree type
, int align_words
)
5440 rtx rvec
[GP_ARG_NUM_REG
+ 1];
5442 if (align_words
>= GP_ARG_NUM_REG
)
5445 n_units
= rs6000_arg_size (mode
, type
);
5447 /* Optimize the simple case where the arg fits in one gpr, except in
5448 the case of BLKmode due to assign_parms assuming that registers are
5449 BITS_PER_WORD wide. */
5451 || (n_units
== 1 && mode
!= BLKmode
))
5452 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5455 if (align_words
+ n_units
> GP_ARG_NUM_REG
)
5456 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5457 using a magic NULL_RTX component.
5458 This is not strictly correct. Only some of the arg belongs in
5459 memory, not all of it. However, the normal scheme using
5460 function_arg_partial_nregs can result in unusual subregs, eg.
5461 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5462 store the whole arg to memory is often more efficient than code
5463 to store pieces, and we know that space is available in the right
5464 place for the whole arg. */
5465 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5470 rtx r
= gen_rtx_REG (SImode
, GP_ARG_MIN_REG
+ align_words
);
5471 rtx off
= GEN_INT (i
++ * 4);
5472 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
5474 while (++align_words
< GP_ARG_NUM_REG
&& --n_units
!= 0);
5476 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
5479 /* Determine where to put an argument to a function.
5480 Value is zero to push the argument on the stack,
5481 or a hard register in which to store the argument.
5483 MODE is the argument's machine mode.
5484 TYPE is the data type of the argument (as a tree).
5485 This is null for libcalls where that information may
5487 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5488 the preceding args and about the function being called. It is
5489 not modified in this routine.
5490 NAMED is nonzero if this argument is a named parameter
5491 (otherwise it is an extra parameter matching an ellipsis).
5493 On RS/6000 the first eight words of non-FP are normally in registers
5494 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5495 Under V.4, the first 8 FP args are in registers.
5497 If this is floating-point and no prototype is specified, we use
5498 both an FP and integer register (or possibly FP reg and stack). Library
5499 functions (when CALL_LIBCALL is set) always have the proper types for args,
5500 so we can pass the FP value just in one register. emit_library_function
5501 doesn't support PARALLEL anyway.
5503 Note that for args passed by reference, function_arg will be called
5504 with MODE and TYPE set to that of the pointer to the arg, not the arg
5508 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5509 tree type
, int named
)
5511 enum rs6000_abi abi
= DEFAULT_ABI
;
5513 /* Return a marker to indicate whether CR1 needs to set or clear the
5514 bit that V.4 uses to say fp args were passed in registers.
5515 Assume that we don't need the marker for software floating point,
5516 or compiler generated library calls. */
5517 if (mode
== VOIDmode
)
5520 && (cum
->call_cookie
& CALL_LIBCALL
) == 0
5522 || (cum
->nargs_prototype
< 0
5523 && (cum
->prototype
|| TARGET_NO_PROTOTYPE
))))
5525 /* For the SPE, we need to crxor CR6 always. */
5527 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
5528 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
5529 return GEN_INT (cum
->call_cookie
5530 | ((cum
->fregno
== FP_ARG_MIN_REG
)
5531 ? CALL_V4_SET_FP_ARGS
5532 : CALL_V4_CLEAR_FP_ARGS
));
5535 return GEN_INT (cum
->call_cookie
);
5538 if (rs6000_darwin64_abi
&& mode
== BLKmode
5539 && TREE_CODE (type
) == RECORD_TYPE
)
5541 rtx rslt
= rs6000_darwin64_record_arg (cum
, type
, named
, false);
5542 if (rslt
!= NULL_RTX
)
5544 /* Else fall through to usual handling. */
5547 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
5548 if (TARGET_64BIT
&& ! cum
->prototype
)
5550 /* Vector parameters get passed in vector register
5551 and also in GPRs or memory, in absence of prototype. */
5554 align_words
= (cum
->words
+ 1) & ~1;
5556 if (align_words
>= GP_ARG_NUM_REG
)
5562 slot
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5564 return gen_rtx_PARALLEL (mode
,
5566 gen_rtx_EXPR_LIST (VOIDmode
,
5568 gen_rtx_EXPR_LIST (VOIDmode
,
5569 gen_rtx_REG (mode
, cum
->vregno
),
5573 return gen_rtx_REG (mode
, cum
->vregno
);
5574 else if (TARGET_ALTIVEC_ABI
5575 && (ALTIVEC_VECTOR_MODE (mode
)
5576 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
5577 && int_size_in_bytes (type
) == 16)))
5579 if (named
|| abi
== ABI_V4
)
5583 /* Vector parameters to varargs functions under AIX or Darwin
5584 get passed in memory and possibly also in GPRs. */
5585 int align
, align_words
, n_words
;
5586 enum machine_mode part_mode
;
5588 /* Vector parameters must be 16-byte aligned. This places them at
5589 2 mod 4 in terms of words in 32-bit mode, since the parameter
5590 save area starts at offset 24 from the stack. In 64-bit mode,
5591 they just have to start on an even word, since the parameter
5592 save area is 16-byte aligned. */
5594 align
= (2 - cum
->words
) & 3;
5596 align
= cum
->words
& 1;
5597 align_words
= cum
->words
+ align
;
5599 /* Out of registers? Memory, then. */
5600 if (align_words
>= GP_ARG_NUM_REG
)
5603 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5604 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5606 /* The vector value goes in GPRs. Only the part of the
5607 value in GPRs is reported here. */
5609 n_words
= rs6000_arg_size (mode
, type
);
5610 if (align_words
+ n_words
> GP_ARG_NUM_REG
)
5611 /* Fortunately, there are only two possibilities, the value
5612 is either wholly in GPRs or half in GPRs and half not. */
5615 return gen_rtx_REG (part_mode
, GP_ARG_MIN_REG
+ align_words
);
5618 else if (TARGET_SPE_ABI
&& TARGET_SPE
5619 && (SPE_VECTOR_MODE (mode
)
5620 || (TARGET_E500_DOUBLE
&& (mode
== DFmode
5625 || mode
== TCmode
))))
5626 return rs6000_spe_function_arg (cum
, mode
, type
);
5628 else if (abi
== ABI_V4
)
5630 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5631 && (mode
== SFmode
|| mode
== DFmode
5632 || (mode
== TFmode
&& !TARGET_IEEEQUAD
)
5633 || mode
== DDmode
|| mode
== TDmode
))
5635 /* _Decimal128 must use an even/odd register pair. This assumes
5636 that the register number is odd when fregno is odd. */
5637 if (mode
== TDmode
&& (cum
->fregno
% 2) == 1)
5640 if (cum
->fregno
+ (mode
== TFmode
|| mode
== TDmode
? 1 : 0)
5641 <= FP_ARG_V4_MAX_REG
)
5642 return gen_rtx_REG (mode
, cum
->fregno
);
5648 int n_words
= rs6000_arg_size (mode
, type
);
5649 int gregno
= cum
->sysv_gregno
;
5651 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5652 (r7,r8) or (r9,r10). As does any other 2 word item such
5653 as complex int due to a historical mistake. */
5655 gregno
+= (1 - gregno
) & 1;
5657 /* Multi-reg args are not split between registers and stack. */
5658 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
5661 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5662 return rs6000_mixed_function_arg (mode
, type
,
5663 gregno
- GP_ARG_MIN_REG
);
5664 return gen_rtx_REG (mode
, gregno
);
5669 int align_words
= rs6000_parm_start (mode
, type
, cum
->words
);
5671 /* _Decimal128 must be passed in an even/odd float register pair.
5672 This assumes that the register number is odd when fregno is odd. */
5673 if (mode
== TDmode
&& (cum
->fregno
% 2) == 1)
5676 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
5678 rtx rvec
[GP_ARG_NUM_REG
+ 1];
5682 enum machine_mode fmode
= mode
;
5683 unsigned long n_fpreg
= (GET_MODE_SIZE (mode
) + 7) >> 3;
5685 if (cum
->fregno
+ n_fpreg
> FP_ARG_MAX_REG
+ 1)
5687 /* Currently, we only ever need one reg here because complex
5688 doubles are split. */
5689 gcc_assert (cum
->fregno
== FP_ARG_MAX_REG
5690 && (fmode
== TFmode
|| fmode
== TDmode
));
5692 /* Long double or _Decimal128 split over regs and memory. */
5693 fmode
= DECIMAL_FLOAT_MODE_P (fmode
) ? DDmode
: DFmode
;
5696 /* Do we also need to pass this arg in the parameter save
5699 && (cum
->nargs_prototype
<= 0
5700 || (DEFAULT_ABI
== ABI_AIX
5702 && align_words
>= GP_ARG_NUM_REG
)));
5704 if (!needs_psave
&& mode
== fmode
)
5705 return gen_rtx_REG (fmode
, cum
->fregno
);
5710 /* Describe the part that goes in gprs or the stack.
5711 This piece must come first, before the fprs. */
5712 if (align_words
< GP_ARG_NUM_REG
)
5714 unsigned long n_words
= rs6000_arg_size (mode
, type
);
5716 if (align_words
+ n_words
> GP_ARG_NUM_REG
5717 || (TARGET_32BIT
&& TARGET_POWERPC64
))
5719 /* If this is partially on the stack, then we only
5720 include the portion actually in registers here. */
5721 enum machine_mode rmode
= TARGET_32BIT
? SImode
: DImode
;
5724 if (align_words
+ n_words
> GP_ARG_NUM_REG
)
5725 /* Not all of the arg fits in gprs. Say that it
5726 goes in memory too, using a magic NULL_RTX
5727 component. Also see comment in
5728 rs6000_mixed_function_arg for why the normal
5729 function_arg_partial_nregs scheme doesn't work
5731 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
,
5735 r
= gen_rtx_REG (rmode
,
5736 GP_ARG_MIN_REG
+ align_words
);
5737 off
= GEN_INT (i
++ * GET_MODE_SIZE (rmode
));
5738 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
5740 while (++align_words
< GP_ARG_NUM_REG
&& --n_words
!= 0);
5744 /* The whole arg fits in gprs. */
5745 r
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5746 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5750 /* It's entirely in memory. */
5751 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5754 /* Describe where this piece goes in the fprs. */
5755 r
= gen_rtx_REG (fmode
, cum
->fregno
);
5756 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5758 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
5760 else if (align_words
< GP_ARG_NUM_REG
)
5762 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5763 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5765 if (mode
== BLKmode
)
5768 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5775 /* For an arg passed partly in registers and partly in memory, this is
5776 the number of bytes passed in registers. For args passed entirely in
5777 registers or entirely in memory, zero. When an arg is described by a
5778 PARALLEL, perhaps using more than one register type, this function
5779 returns the number of bytes used by the first element of the PARALLEL. */
5782 rs6000_arg_partial_bytes (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5783 tree type
, bool named
)
5788 if (DEFAULT_ABI
== ABI_V4
)
5791 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
)
5792 && cum
->nargs_prototype
>= 0)
5795 /* In this complicated case we just disable the partial_nregs code. */
5796 if (rs6000_darwin64_abi
&& mode
== BLKmode
5797 && TREE_CODE (type
) == RECORD_TYPE
5798 && int_size_in_bytes (type
) > 0)
5801 align_words
= rs6000_parm_start (mode
, type
, cum
->words
);
5803 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
5805 /* If we are passing this arg in the fixed parameter save area
5806 (gprs or memory) as well as fprs, then this function should
5807 return the number of partial bytes passed in the parameter
5808 save area rather than partial bytes passed in fprs. */
5810 && (cum
->nargs_prototype
<= 0
5811 || (DEFAULT_ABI
== ABI_AIX
5813 && align_words
>= GP_ARG_NUM_REG
)))
5815 else if (cum
->fregno
+ ((GET_MODE_SIZE (mode
) + 7) >> 3)
5816 > FP_ARG_MAX_REG
+ 1)
5817 ret
= (FP_ARG_MAX_REG
+ 1 - cum
->fregno
) * 8;
5818 else if (cum
->nargs_prototype
>= 0)
5822 if (align_words
< GP_ARG_NUM_REG
5823 && GP_ARG_NUM_REG
< align_words
+ rs6000_arg_size (mode
, type
))
5824 ret
= (GP_ARG_NUM_REG
- align_words
) * (TARGET_32BIT
? 4 : 8);
5826 if (ret
!= 0 && TARGET_DEBUG_ARG
)
5827 fprintf (stderr
, "rs6000_arg_partial_bytes: %d\n", ret
);
5832 /* A C expression that indicates when an argument must be passed by
5833 reference. If nonzero for an argument, a copy of that argument is
5834 made in memory and a pointer to the argument is passed instead of
5835 the argument itself. The pointer is passed in whatever way is
5836 appropriate for passing a pointer to that type.
5838 Under V.4, aggregates and long double are passed by reference.
5840 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5841 reference unless the AltiVec vector extension ABI is in force.
5843 As an extension to all ABIs, variable sized types are passed by
5847 rs6000_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
5848 enum machine_mode mode
, tree type
,
5849 bool named ATTRIBUTE_UNUSED
)
5851 if (DEFAULT_ABI
== ABI_V4
&& TARGET_IEEEQUAD
&& mode
== TFmode
)
5853 if (TARGET_DEBUG_ARG
)
5854 fprintf (stderr
, "function_arg_pass_by_reference: V4 long double\n");
5861 if (DEFAULT_ABI
== ABI_V4
&& AGGREGATE_TYPE_P (type
))
5863 if (TARGET_DEBUG_ARG
)
5864 fprintf (stderr
, "function_arg_pass_by_reference: V4 aggregate\n");
5868 if (int_size_in_bytes (type
) < 0)
5870 if (TARGET_DEBUG_ARG
)
5871 fprintf (stderr
, "function_arg_pass_by_reference: variable size\n");
5875 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5876 modes only exist for GCC vector types if -maltivec. */
5877 if (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
5879 if (TARGET_DEBUG_ARG
)
5880 fprintf (stderr
, "function_arg_pass_by_reference: AltiVec\n");
5884 /* Pass synthetic vectors in memory. */
5885 if (TREE_CODE (type
) == VECTOR_TYPE
5886 && int_size_in_bytes (type
) > (TARGET_ALTIVEC_ABI
? 16 : 8))
5888 static bool warned_for_pass_big_vectors
= false;
5889 if (TARGET_DEBUG_ARG
)
5890 fprintf (stderr
, "function_arg_pass_by_reference: synthetic vector\n");
5891 if (!warned_for_pass_big_vectors
)
5893 warning (0, "GCC vector passed by reference: "
5894 "non-standard ABI extension with no compatibility guarantee");
5895 warned_for_pass_big_vectors
= true;
5904 rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
)
5907 enum machine_mode reg_mode
= TARGET_32BIT
? SImode
: DImode
;
5912 for (i
= 0; i
< nregs
; i
++)
5914 rtx tem
= adjust_address_nv (x
, reg_mode
, i
* GET_MODE_SIZE (reg_mode
));
5915 if (reload_completed
)
5917 if (! strict_memory_address_p (reg_mode
, XEXP (tem
, 0)))
5920 tem
= simplify_gen_subreg (reg_mode
, x
, BLKmode
,
5921 i
* GET_MODE_SIZE (reg_mode
));
5924 tem
= replace_equiv_address (tem
, XEXP (tem
, 0));
5928 emit_move_insn (tem
, gen_rtx_REG (reg_mode
, regno
+ i
));
5932 /* Perform any needed actions needed for a function that is receiving a
5933 variable number of arguments.
5937 MODE and TYPE are the mode and type of the current parameter.
5939 PRETEND_SIZE is a variable that should be set to the amount of stack
5940 that must be pushed by the prolog to pretend that our caller pushed
5943 Normally, this macro will push all remaining incoming registers on the
5944 stack and set PRETEND_SIZE to the length of the registers pushed. */
5947 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5948 tree type
, int *pretend_size ATTRIBUTE_UNUSED
,
5951 CUMULATIVE_ARGS next_cum
;
5952 int reg_size
= TARGET_32BIT
? 4 : 8;
5953 rtx save_area
= NULL_RTX
, mem
;
5954 int first_reg_offset
, set
;
5956 /* Skip the last named argument. */
5958 function_arg_advance (&next_cum
, mode
, type
, 1, 0);
5960 if (DEFAULT_ABI
== ABI_V4
)
5962 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
5966 int gpr_reg_num
= 0, gpr_size
= 0, fpr_size
= 0;
5967 HOST_WIDE_INT offset
= 0;
5969 /* Try to optimize the size of the varargs save area.
5970 The ABI requires that ap.reg_save_area is doubleword
5971 aligned, but we don't need to allocate space for all
5972 the bytes, only those to which we actually will save
5974 if (cfun
->va_list_gpr_size
&& first_reg_offset
< GP_ARG_NUM_REG
)
5975 gpr_reg_num
= GP_ARG_NUM_REG
- first_reg_offset
;
5976 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5977 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
5978 && cfun
->va_list_fpr_size
)
5981 fpr_size
= (next_cum
.fregno
- FP_ARG_MIN_REG
)
5982 * UNITS_PER_FP_WORD
;
5983 if (cfun
->va_list_fpr_size
5984 < FP_ARG_V4_MAX_REG
+ 1 - next_cum
.fregno
)
5985 fpr_size
+= cfun
->va_list_fpr_size
* UNITS_PER_FP_WORD
;
5987 fpr_size
+= (FP_ARG_V4_MAX_REG
+ 1 - next_cum
.fregno
)
5988 * UNITS_PER_FP_WORD
;
5992 offset
= -((first_reg_offset
* reg_size
) & ~7);
5993 if (!fpr_size
&& gpr_reg_num
> cfun
->va_list_gpr_size
)
5995 gpr_reg_num
= cfun
->va_list_gpr_size
;
5996 if (reg_size
== 4 && (first_reg_offset
& 1))
5999 gpr_size
= (gpr_reg_num
* reg_size
+ 7) & ~7;
6002 offset
= - (int) (next_cum
.fregno
- FP_ARG_MIN_REG
)
6004 - (int) (GP_ARG_NUM_REG
* reg_size
);
6006 if (gpr_size
+ fpr_size
)
6009 = assign_stack_local (BLKmode
, gpr_size
+ fpr_size
, 64);
6010 gcc_assert (GET_CODE (reg_save_area
) == MEM
);
6011 reg_save_area
= XEXP (reg_save_area
, 0);
6012 if (GET_CODE (reg_save_area
) == PLUS
)
6014 gcc_assert (XEXP (reg_save_area
, 0)
6015 == virtual_stack_vars_rtx
);
6016 gcc_assert (GET_CODE (XEXP (reg_save_area
, 1)) == CONST_INT
);
6017 offset
+= INTVAL (XEXP (reg_save_area
, 1));
6020 gcc_assert (reg_save_area
== virtual_stack_vars_rtx
);
6023 cfun
->machine
->varargs_save_offset
= offset
;
6024 save_area
= plus_constant (virtual_stack_vars_rtx
, offset
);
6029 first_reg_offset
= next_cum
.words
;
6030 save_area
= virtual_incoming_args_rtx
;
6032 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
6033 first_reg_offset
+= rs6000_arg_size (TYPE_MODE (type
), type
);
6036 set
= get_varargs_alias_set ();
6037 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
6038 && cfun
->va_list_gpr_size
)
6040 int nregs
= GP_ARG_NUM_REG
- first_reg_offset
;
6042 if (va_list_gpr_counter_field
)
6044 /* V4 va_list_gpr_size counts number of registers needed. */
6045 if (nregs
> cfun
->va_list_gpr_size
)
6046 nregs
= cfun
->va_list_gpr_size
;
6050 /* char * va_list instead counts number of bytes needed. */
6051 if (nregs
> cfun
->va_list_gpr_size
/ reg_size
)
6052 nregs
= cfun
->va_list_gpr_size
/ reg_size
;
6055 mem
= gen_rtx_MEM (BLKmode
,
6056 plus_constant (save_area
,
6057 first_reg_offset
* reg_size
));
6058 MEM_NOTRAP_P (mem
) = 1;
6059 set_mem_alias_set (mem
, set
);
6060 set_mem_align (mem
, BITS_PER_WORD
);
6062 rs6000_move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
6066 /* Save FP registers if needed. */
6067 if (DEFAULT_ABI
== ABI_V4
6068 && TARGET_HARD_FLOAT
&& TARGET_FPRS
6070 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
6071 && cfun
->va_list_fpr_size
)
6073 int fregno
= next_cum
.fregno
, nregs
;
6074 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
6075 rtx lab
= gen_label_rtx ();
6076 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
)
6077 * UNITS_PER_FP_WORD
);
6080 (gen_rtx_SET (VOIDmode
,
6082 gen_rtx_IF_THEN_ELSE (VOIDmode
,
6083 gen_rtx_NE (VOIDmode
, cr1
,
6085 gen_rtx_LABEL_REF (VOIDmode
, lab
),
6089 fregno
<= FP_ARG_V4_MAX_REG
&& nregs
< cfun
->va_list_fpr_size
;
6090 fregno
++, off
+= UNITS_PER_FP_WORD
, nregs
++)
6092 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
6093 MEM_NOTRAP_P (mem
) = 1;
6094 set_mem_alias_set (mem
, set
);
6095 set_mem_align (mem
, GET_MODE_ALIGNMENT (DFmode
));
6096 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
6103 /* Create the va_list data type. */
6106 rs6000_build_builtin_va_list (void)
6108 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
, record
, type_decl
;
6110 /* For AIX, prefer 'char *' because that's what the system
6111 header files like. */
6112 if (DEFAULT_ABI
!= ABI_V4
)
6113 return build_pointer_type (char_type_node
);
6115 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
6116 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
6118 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
6119 unsigned_char_type_node
);
6120 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
6121 unsigned_char_type_node
);
6122 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6124 f_res
= build_decl (FIELD_DECL
, get_identifier ("reserved"),
6125 short_unsigned_type_node
);
6126 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
6128 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
6131 va_list_gpr_counter_field
= f_gpr
;
6132 va_list_fpr_counter_field
= f_fpr
;
6134 DECL_FIELD_CONTEXT (f_gpr
) = record
;
6135 DECL_FIELD_CONTEXT (f_fpr
) = record
;
6136 DECL_FIELD_CONTEXT (f_res
) = record
;
6137 DECL_FIELD_CONTEXT (f_ovf
) = record
;
6138 DECL_FIELD_CONTEXT (f_sav
) = record
;
6140 TREE_CHAIN (record
) = type_decl
;
6141 TYPE_NAME (record
) = type_decl
;
6142 TYPE_FIELDS (record
) = f_gpr
;
6143 TREE_CHAIN (f_gpr
) = f_fpr
;
6144 TREE_CHAIN (f_fpr
) = f_res
;
6145 TREE_CHAIN (f_res
) = f_ovf
;
6146 TREE_CHAIN (f_ovf
) = f_sav
;
6148 layout_type (record
);
6150 /* The correct type is an array type of one element. */
6151 return build_array_type (record
, build_index_type (size_zero_node
));
6154 /* Implement va_start. */
6157 rs6000_va_start (tree valist
, rtx nextarg
)
6159 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
6160 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
6161 tree gpr
, fpr
, ovf
, sav
, t
;
6163 /* Only SVR4 needs something special. */
6164 if (DEFAULT_ABI
!= ABI_V4
)
6166 std_expand_builtin_va_start (valist
, nextarg
);
6170 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6171 f_fpr
= TREE_CHAIN (f_gpr
);
6172 f_res
= TREE_CHAIN (f_fpr
);
6173 f_ovf
= TREE_CHAIN (f_res
);
6174 f_sav
= TREE_CHAIN (f_ovf
);
6176 valist
= build_va_arg_indirect_ref (valist
);
6177 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
6178 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
6179 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
6180 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
6182 /* Count number of gp and fp argument registers used. */
6183 words
= current_function_args_info
.words
;
6184 n_gpr
= MIN (current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
,
6186 n_fpr
= MIN (current_function_args_info
.fregno
- FP_ARG_MIN_REG
,
6189 if (TARGET_DEBUG_ARG
)
6190 fprintf (stderr
, "va_start: words = "HOST_WIDE_INT_PRINT_DEC
", n_gpr = "
6191 HOST_WIDE_INT_PRINT_DEC
", n_fpr = "HOST_WIDE_INT_PRINT_DEC
"\n",
6192 words
, n_gpr
, n_fpr
);
6194 if (cfun
->va_list_gpr_size
)
6196 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (gpr
), gpr
,
6197 build_int_cst (NULL_TREE
, n_gpr
));
6198 TREE_SIDE_EFFECTS (t
) = 1;
6199 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6202 if (cfun
->va_list_fpr_size
)
6204 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (fpr
), fpr
,
6205 build_int_cst (NULL_TREE
, n_fpr
));
6206 TREE_SIDE_EFFECTS (t
) = 1;
6207 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6210 /* Find the overflow area. */
6211 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
6213 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (ovf
), t
,
6214 size_int (words
* UNITS_PER_WORD
));
6215 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (ovf
), ovf
, t
);
6216 TREE_SIDE_EFFECTS (t
) = 1;
6217 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6219 /* If there were no va_arg invocations, don't set up the register
6221 if (!cfun
->va_list_gpr_size
6222 && !cfun
->va_list_fpr_size
6223 && n_gpr
< GP_ARG_NUM_REG
6224 && n_fpr
< FP_ARG_V4_MAX_REG
)
6227 /* Find the register save area. */
6228 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
6229 if (cfun
->machine
->varargs_save_offset
)
6230 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (sav
), t
,
6231 size_int (cfun
->machine
->varargs_save_offset
));
6232 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (sav
), sav
, t
);
6233 TREE_SIDE_EFFECTS (t
) = 1;
6234 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6237 /* Implement va_arg. */
6240 rs6000_gimplify_va_arg (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
6242 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
6243 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
6244 int size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
6245 tree lab_false
, lab_over
, addr
;
6247 tree ptrtype
= build_pointer_type (type
);
6250 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
6252 t
= rs6000_gimplify_va_arg (valist
, ptrtype
, pre_p
, post_p
);
6253 return build_va_arg_indirect_ref (t
);
6256 if (DEFAULT_ABI
!= ABI_V4
)
6258 if (targetm
.calls
.split_complex_arg
&& TREE_CODE (type
) == COMPLEX_TYPE
)
6260 tree elem_type
= TREE_TYPE (type
);
6261 enum machine_mode elem_mode
= TYPE_MODE (elem_type
);
6262 int elem_size
= GET_MODE_SIZE (elem_mode
);
6264 if (elem_size
< UNITS_PER_WORD
)
6266 tree real_part
, imag_part
;
6267 tree post
= NULL_TREE
;
6269 real_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
6271 /* Copy the value into a temporary, lest the formal temporary
6272 be reused out from under us. */
6273 real_part
= get_initialized_tmp_var (real_part
, pre_p
, &post
);
6274 append_to_statement_list (post
, pre_p
);
6276 imag_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
6279 return build2 (COMPLEX_EXPR
, type
, real_part
, imag_part
);
6283 return std_gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
6286 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6287 f_fpr
= TREE_CHAIN (f_gpr
);
6288 f_res
= TREE_CHAIN (f_fpr
);
6289 f_ovf
= TREE_CHAIN (f_res
);
6290 f_sav
= TREE_CHAIN (f_ovf
);
6292 valist
= build_va_arg_indirect_ref (valist
);
6293 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
6294 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
6295 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
6296 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
6298 size
= int_size_in_bytes (type
);
6299 rsize
= (size
+ 3) / 4;
6302 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
6303 && (TYPE_MODE (type
) == SFmode
6304 || TYPE_MODE (type
) == DFmode
6305 || TYPE_MODE (type
) == TFmode
6306 || TYPE_MODE (type
) == DDmode
6307 || TYPE_MODE (type
) == TDmode
))
6309 /* FP args go in FP registers, if present. */
6311 n_reg
= (size
+ 7) / 8;
6314 if (TYPE_MODE (type
) != SFmode
)
6319 /* Otherwise into GP registers. */
6328 /* Pull the value out of the saved registers.... */
6331 addr
= create_tmp_var (ptr_type_node
, "addr");
6332 DECL_POINTER_ALIAS_SET (addr
) = get_varargs_alias_set ();
6334 /* AltiVec vectors never go in registers when -mabi=altivec. */
6335 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
6339 lab_false
= create_artificial_label ();
6340 lab_over
= create_artificial_label ();
6342 /* Long long and SPE vectors are aligned in the registers.
6343 As are any other 2 gpr item such as complex int due to a
6344 historical mistake. */
6346 if (n_reg
== 2 && reg
== gpr
)
6349 u
= build2 (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
6350 size_int (n_reg
- 1));
6351 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, u
);
6353 /* _Decimal128 is passed in even/odd fpr pairs; the stored
6354 reg number is 0 for f1, so we want to make it odd. */
6355 else if (reg
== fpr
&& TYPE_MODE (type
) == TDmode
)
6358 t
= build2 (BIT_IOR_EXPR
, TREE_TYPE (reg
), reg
, size_int (1));
6359 u
= build2 (MODIFY_EXPR
, void_type_node
, reg
, t
);
6362 t
= fold_convert (TREE_TYPE (reg
), size_int (8 - n_reg
+ 1));
6363 t
= build2 (GE_EXPR
, boolean_type_node
, u
, t
);
6364 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
6365 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
6366 gimplify_and_add (t
, pre_p
);
6370 t
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, sav
, size_int (sav_ofs
));
6372 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, size_int (n_reg
));
6373 u
= fold_convert (sizetype
, u
);
6374 u
= build2 (MULT_EXPR
, sizetype
, u
, size_int (sav_scale
));
6375 t
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
, t
, u
);
6377 t
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, addr
, t
);
6378 gimplify_and_add (t
, pre_p
);
6380 t
= build1 (GOTO_EXPR
, void_type_node
, lab_over
);
6381 gimplify_and_add (t
, pre_p
);
6383 t
= build1 (LABEL_EXPR
, void_type_node
, lab_false
);
6384 append_to_statement_list (t
, pre_p
);
6386 if ((n_reg
== 2 && !regalign
) || n_reg
> 2)
6388 /* Ensure that we don't find any more args in regs.
6389 Alignment has taken care of for special cases. */
6390 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (reg
), reg
, size_int (8));
6391 gimplify_and_add (t
, pre_p
);
6395 /* ... otherwise out of the overflow area. */
6397 /* Care for on-stack alignment if needed. */
6401 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (align
- 1));
6402 t
= fold_convert (sizetype
, t
);
6403 t
= build2 (BIT_AND_EXPR
, TREE_TYPE (t
), t
,
6405 t
= fold_convert (TREE_TYPE (ovf
), t
);
6407 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
6409 u
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, addr
, t
);
6410 gimplify_and_add (u
, pre_p
);
6412 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (size
));
6413 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (ovf
), ovf
, t
);
6414 gimplify_and_add (t
, pre_p
);
6418 t
= build1 (LABEL_EXPR
, void_type_node
, lab_over
);
6419 append_to_statement_list (t
, pre_p
);
6422 if (STRICT_ALIGNMENT
6423 && (TYPE_ALIGN (type
)
6424 > (unsigned) BITS_PER_UNIT
* (align
< 4 ? 4 : align
)))
6426 /* The value (of type complex double, for example) may not be
6427 aligned in memory in the saved registers, so copy via a
6428 temporary. (This is the same code as used for SPARC.) */
6429 tree tmp
= create_tmp_var (type
, "va_arg_tmp");
6430 tree dest_addr
= build_fold_addr_expr (tmp
);
6432 tree copy
= build_call_expr (implicit_built_in_decls
[BUILT_IN_MEMCPY
],
6433 3, dest_addr
, addr
, size_int (rsize
* 4));
6435 gimplify_and_add (copy
, pre_p
);
6439 addr
= fold_convert (ptrtype
, addr
);
6440 return build_va_arg_indirect_ref (addr
);
6446 def_builtin (int mask
, const char *name
, tree type
, int code
)
6448 if (mask
& target_flags
)
6450 if (rs6000_builtin_decls
[code
])
6453 rs6000_builtin_decls
[code
] =
6454 add_builtin_function (name
, type
, code
, BUILT_IN_MD
,
6459 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6461 static const struct builtin_description bdesc_3arg
[] =
6463 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
6464 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
6465 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
6466 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
6467 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
6468 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
6469 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
6470 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
6471 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
6472 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
6473 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
6474 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
6475 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
6476 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
6477 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
6478 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
6479 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
6480 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
6481 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
6482 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
6483 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
6484 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
6485 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
6487 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD
},
6488 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS
},
6489 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD
},
6490 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS
},
6491 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM
},
6492 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM
},
6493 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM
},
6494 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM
},
6495 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM
},
6496 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS
},
6497 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS
},
6498 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS
},
6499 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB
},
6500 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM
},
6501 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL
},
6504 /* DST operations: void foo (void *, const int, const char). */
6506 static const struct builtin_description bdesc_dst
[] =
6508 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
6509 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
6510 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
6511 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
},
6513 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST
},
6514 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT
},
6515 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST
},
6516 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT
}
6519 /* Simple binary operations: VECc = foo (VECa, VECb). */
6521 static struct builtin_description bdesc_2arg
[] =
6523 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
6524 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
6525 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
6526 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
6527 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
6528 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
6529 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
6530 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
6531 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
6532 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
6533 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
6534 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
6535 { MASK_ALTIVEC
, CODE_FOR_andcv4si3
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
6536 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
6537 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
6538 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
6539 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
6540 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
6541 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
6542 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
6543 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
6544 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
6545 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
6546 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
6547 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
6548 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
6549 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
6550 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
6551 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
6552 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
6553 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
6554 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
6555 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
6556 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
6557 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
6558 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
6559 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
6560 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
6561 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
6562 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
6563 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
6564 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
6565 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
6566 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
6567 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
6568 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
6569 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
6570 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
6571 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
6572 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
6573 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
6574 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
6575 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
6576 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
6577 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
6578 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
6579 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
6580 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
6581 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
6582 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
6583 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
6584 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
6585 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
6586 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
6587 { MASK_ALTIVEC
, CODE_FOR_altivec_norv4si3
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
6588 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
6589 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
6590 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
6591 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
6592 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
6593 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
6594 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
6595 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
6596 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
6597 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
6598 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
6599 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
6600 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
6601 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
6602 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
6603 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
6604 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
6605 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
6606 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
6607 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
6608 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
6609 { MASK_ALTIVEC
, CODE_FOR_lshrv16qi3
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
6610 { MASK_ALTIVEC
, CODE_FOR_lshrv8hi3
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
6611 { MASK_ALTIVEC
, CODE_FOR_lshrv4si3
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
6612 { MASK_ALTIVEC
, CODE_FOR_ashrv16qi3
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
6613 { MASK_ALTIVEC
, CODE_FOR_ashrv8hi3
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
6614 { MASK_ALTIVEC
, CODE_FOR_ashrv4si3
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
6615 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
6616 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
6617 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
6618 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
6619 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
6620 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
6621 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
6622 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
6623 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
6624 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
6625 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
6626 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
6627 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
6628 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
6629 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
6630 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
6631 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
6632 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
6633 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
6635 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD
},
6636 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP
},
6637 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM
},
6638 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM
},
6639 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM
},
6640 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC
},
6641 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS
},
6642 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS
},
6643 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS
},
6644 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS
},
6645 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS
},
6646 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS
},
6647 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS
},
6648 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND
},
6649 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC
},
6650 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG
},
6651 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW
},
6652 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW
},
6653 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH
},
6654 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH
},
6655 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB
},
6656 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB
},
6657 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB
},
6658 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ
},
6659 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP
},
6660 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW
},
6661 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH
},
6662 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB
},
6663 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE
},
6664 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT
},
6665 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP
},
6666 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW
},
6667 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW
},
6668 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH
},
6669 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH
},
6670 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB
},
6671 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB
},
6672 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE
},
6673 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT
},
6674 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX
},
6675 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP
},
6676 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW
},
6677 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW
},
6678 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH
},
6679 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH
},
6680 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB
},
6681 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB
},
6682 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH
},
6683 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW
},
6684 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH
},
6685 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB
},
6686 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL
},
6687 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW
},
6688 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH
},
6689 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB
},
6690 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN
},
6691 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP
},
6692 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW
},
6693 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW
},
6694 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH
},
6695 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH
},
6696 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB
},
6697 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB
},
6698 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE
},
6699 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB
},
6700 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB
},
6701 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH
},
6702 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH
},
6703 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO
},
6704 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH
},
6705 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH
},
6706 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB
},
6707 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB
},
6708 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR
},
6709 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR
},
6710 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK
},
6711 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM
},
6712 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM
},
6713 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX
},
6714 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS
},
6715 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS
},
6716 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS
},
6717 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS
},
6718 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS
},
6719 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU
},
6720 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS
},
6721 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS
},
6722 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL
},
6723 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW
},
6724 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH
},
6725 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB
},
6726 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL
},
6727 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW
},
6728 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH
},
6729 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB
},
6730 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL
},
6731 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO
},
6732 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR
},
6733 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW
},
6734 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH
},
6735 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB
},
6736 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA
},
6737 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW
},
6738 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH
},
6739 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB
},
6740 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL
},
6741 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO
},
6742 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB
},
6743 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP
},
6744 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM
},
6745 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM
},
6746 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM
},
6747 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC
},
6748 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS
},
6749 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS
},
6750 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS
},
6751 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS
},
6752 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS
},
6753 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS
},
6754 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS
},
6755 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S
},
6756 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS
},
6757 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS
},
6758 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS
},
6759 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S
},
6760 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS
},
6761 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR
},
6763 /* Place holder, leave as first spe builtin. */
6764 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
6765 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
6766 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
6767 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
6768 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
6769 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
6770 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
6771 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
6772 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
6773 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
6774 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
6775 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
6776 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
6777 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
6778 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
6779 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
6780 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
6781 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
6782 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
6783 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
6784 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
6785 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
6786 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
6787 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
6788 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
6789 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
6790 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
6791 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
6792 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
6793 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
6794 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
6795 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
6796 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
6797 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
6798 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
6799 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
6800 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
6801 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
6802 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
6803 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
6804 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
6805 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
6806 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
6807 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
6808 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
6809 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
6810 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
6811 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
6812 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
6813 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
6814 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
6815 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
6816 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
6817 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
6818 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
6819 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
6820 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
6821 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
6822 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
6823 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
6824 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
6825 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
6826 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
6827 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
6828 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
6829 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
6830 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
6831 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
6832 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
6833 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
6834 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
6835 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
6836 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
6837 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
6838 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
6839 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
6840 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
6841 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
6842 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
6843 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
6844 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
6845 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
6846 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
6847 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
6848 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
6849 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
6850 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
6851 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
6852 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
6853 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
6854 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
6855 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
6856 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
6857 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
6858 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
6859 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
6860 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
6861 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
6862 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
6863 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
6864 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
6865 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
6866 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
6867 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
6868 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
6869 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
6870 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
6871 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
6872 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
6874 /* SPE binary operations expecting a 5-bit unsigned literal. */
6875 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
6877 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
6878 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
6879 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
6880 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
6881 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
6882 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
6883 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
6884 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
6885 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
6886 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
6887 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
6888 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
6889 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
6890 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
6891 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
6892 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
6893 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
6894 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
6895 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
6896 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
6897 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
6898 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
6899 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
6900 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
6901 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
6902 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
6904 /* Place-holder. Leave as last binary SPE builtin. */
6905 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
}
6908 /* AltiVec predicates. */
6910 struct builtin_description_predicates
6912 const unsigned int mask
;
6913 const enum insn_code icode
;
6915 const char *const name
;
6916 const enum rs6000_builtins code
;
6919 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
6921 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
6922 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
6923 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
6924 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
6925 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
6926 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
6927 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
6928 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
6929 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
6930 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
6931 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
6932 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
6933 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
},
6935 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P
},
6936 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P
},
6937 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P
}
6940 /* SPE predicates. */
6941 static struct builtin_description bdesc_spe_predicates
[] =
6943 /* Place-holder. Leave as first. */
6944 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
6945 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
6946 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
6947 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
6948 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
6949 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
6950 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
6951 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
6952 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
6953 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
6954 /* Place-holder. Leave as last. */
6955 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
6958 /* SPE evsel predicates. */
6959 static struct builtin_description bdesc_spe_evsel
[] =
6961 /* Place-holder. Leave as first. */
6962 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
6963 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
6964 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
6965 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
6966 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
6967 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
6968 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
6969 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
6970 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
6971 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
6972 /* Place-holder. Leave as last. */
6973 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
6976 /* ABS* operations. */
6978 static const struct builtin_description bdesc_abs
[] =
6980 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
6981 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
6982 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
6983 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
6984 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
6985 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
6986 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
6989 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6992 static struct builtin_description bdesc_1arg
[] =
6994 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
6995 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
6996 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
6997 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
6998 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
6999 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
7000 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
7001 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
7002 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
7003 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
7004 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
7005 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
7006 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
7007 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
7008 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
7009 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
7010 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
7012 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS
},
7013 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS
},
7014 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL
},
7015 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE
},
7016 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR
},
7017 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE
},
7018 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR
},
7019 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE
},
7020 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND
},
7021 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE
},
7022 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC
},
7023 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH
},
7024 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH
},
7025 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX
},
7026 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB
},
7027 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL
},
7028 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX
},
7029 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH
},
7030 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB
},
7032 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7033 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7034 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
7035 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
7036 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
7037 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
7038 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
7039 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
7040 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
7041 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
7042 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
7043 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
7044 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
7045 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
7046 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
7047 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
7048 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
7049 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
7050 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
7051 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
7052 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
7053 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
7054 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
7055 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
7056 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
7057 { 0, CODE_FOR_negv2si2
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
7058 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
7059 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
7060 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
7061 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
7063 /* Place-holder. Leave as last unary SPE builtin. */
7064 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
}
7068 rs6000_expand_unop_builtin (enum insn_code icode
, tree exp
, rtx target
)
7071 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7072 rtx op0
= expand_normal (arg0
);
7073 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7074 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7076 if (icode
== CODE_FOR_nothing
)
7077 /* Builtin not supported on this processor. */
7080 /* If we got invalid arguments bail out before generating bad rtl. */
7081 if (arg0
== error_mark_node
)
7084 if (icode
== CODE_FOR_altivec_vspltisb
7085 || icode
== CODE_FOR_altivec_vspltish
7086 || icode
== CODE_FOR_altivec_vspltisw
7087 || icode
== CODE_FOR_spe_evsplatfi
7088 || icode
== CODE_FOR_spe_evsplati
)
7090 /* Only allow 5-bit *signed* literals. */
7091 if (GET_CODE (op0
) != CONST_INT
7092 || INTVAL (op0
) > 15
7093 || INTVAL (op0
) < -16)
7095 error ("argument 1 must be a 5-bit signed literal");
7101 || GET_MODE (target
) != tmode
7102 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7103 target
= gen_reg_rtx (tmode
);
7105 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7106 op0
= copy_to_mode_reg (mode0
, op0
);
7108 pat
= GEN_FCN (icode
) (target
, op0
);
7117 altivec_expand_abs_builtin (enum insn_code icode
, tree exp
, rtx target
)
7119 rtx pat
, scratch1
, scratch2
;
7120 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7121 rtx op0
= expand_normal (arg0
);
7122 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7123 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7125 /* If we have invalid arguments, bail out before generating bad rtl. */
7126 if (arg0
== error_mark_node
)
7130 || GET_MODE (target
) != tmode
7131 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7132 target
= gen_reg_rtx (tmode
);
7134 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7135 op0
= copy_to_mode_reg (mode0
, op0
);
7137 scratch1
= gen_reg_rtx (mode0
);
7138 scratch2
= gen_reg_rtx (mode0
);
7140 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
7149 rs6000_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
7152 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7153 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7154 rtx op0
= expand_normal (arg0
);
7155 rtx op1
= expand_normal (arg1
);
7156 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7157 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7158 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7160 if (icode
== CODE_FOR_nothing
)
7161 /* Builtin not supported on this processor. */
7164 /* If we got invalid arguments bail out before generating bad rtl. */
7165 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7168 if (icode
== CODE_FOR_altivec_vcfux
7169 || icode
== CODE_FOR_altivec_vcfsx
7170 || icode
== CODE_FOR_altivec_vctsxs
7171 || icode
== CODE_FOR_altivec_vctuxs
7172 || icode
== CODE_FOR_altivec_vspltb
7173 || icode
== CODE_FOR_altivec_vsplth
7174 || icode
== CODE_FOR_altivec_vspltw
7175 || icode
== CODE_FOR_spe_evaddiw
7176 || icode
== CODE_FOR_spe_evldd
7177 || icode
== CODE_FOR_spe_evldh
7178 || icode
== CODE_FOR_spe_evldw
7179 || icode
== CODE_FOR_spe_evlhhesplat
7180 || icode
== CODE_FOR_spe_evlhhossplat
7181 || icode
== CODE_FOR_spe_evlhhousplat
7182 || icode
== CODE_FOR_spe_evlwhe
7183 || icode
== CODE_FOR_spe_evlwhos
7184 || icode
== CODE_FOR_spe_evlwhou
7185 || icode
== CODE_FOR_spe_evlwhsplat
7186 || icode
== CODE_FOR_spe_evlwwsplat
7187 || icode
== CODE_FOR_spe_evrlwi
7188 || icode
== CODE_FOR_spe_evslwi
7189 || icode
== CODE_FOR_spe_evsrwis
7190 || icode
== CODE_FOR_spe_evsubifw
7191 || icode
== CODE_FOR_spe_evsrwiu
)
7193 /* Only allow 5-bit unsigned literals. */
7195 if (TREE_CODE (arg1
) != INTEGER_CST
7196 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
7198 error ("argument 2 must be a 5-bit unsigned literal");
7204 || GET_MODE (target
) != tmode
7205 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7206 target
= gen_reg_rtx (tmode
);
7208 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7209 op0
= copy_to_mode_reg (mode0
, op0
);
7210 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7211 op1
= copy_to_mode_reg (mode1
, op1
);
7213 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
7222 altivec_expand_predicate_builtin (enum insn_code icode
, const char *opcode
,
7223 tree exp
, rtx target
)
7226 tree cr6_form
= CALL_EXPR_ARG (exp
, 0);
7227 tree arg0
= CALL_EXPR_ARG (exp
, 1);
7228 tree arg1
= CALL_EXPR_ARG (exp
, 2);
7229 rtx op0
= expand_normal (arg0
);
7230 rtx op1
= expand_normal (arg1
);
7231 enum machine_mode tmode
= SImode
;
7232 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7233 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7236 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
7238 error ("argument 1 of __builtin_altivec_predicate must be a constant");
7242 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
7244 gcc_assert (mode0
== mode1
);
7246 /* If we have invalid arguments, bail out before generating bad rtl. */
7247 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7251 || GET_MODE (target
) != tmode
7252 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7253 target
= gen_reg_rtx (tmode
);
7255 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7256 op0
= copy_to_mode_reg (mode0
, op0
);
7257 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7258 op1
= copy_to_mode_reg (mode1
, op1
);
7260 scratch
= gen_reg_rtx (mode0
);
7262 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
7263 gen_rtx_SYMBOL_REF (Pmode
, opcode
));
7268 /* The vec_any* and vec_all* predicates use the same opcodes for two
7269 different operations, but the bits in CR6 will be different
7270 depending on what information we want. So we have to play tricks
7271 with CR6 to get the right bits out.
7273 If you think this is disgusting, look at the specs for the
7274 AltiVec predicates. */
7276 switch (cr6_form_int
)
7279 emit_insn (gen_cr6_test_for_zero (target
));
7282 emit_insn (gen_cr6_test_for_zero_reverse (target
));
7285 emit_insn (gen_cr6_test_for_lt (target
));
7288 emit_insn (gen_cr6_test_for_lt_reverse (target
));
7291 error ("argument 1 of __builtin_altivec_predicate is out of range");
7299 altivec_expand_lv_builtin (enum insn_code icode
, tree exp
, rtx target
)
7302 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7303 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7304 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7305 enum machine_mode mode0
= Pmode
;
7306 enum machine_mode mode1
= Pmode
;
7307 rtx op0
= expand_normal (arg0
);
7308 rtx op1
= expand_normal (arg1
);
7310 if (icode
== CODE_FOR_nothing
)
7311 /* Builtin not supported on this processor. */
7314 /* If we got invalid arguments bail out before generating bad rtl. */
7315 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7319 || GET_MODE (target
) != tmode
7320 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7321 target
= gen_reg_rtx (tmode
);
7323 op1
= copy_to_mode_reg (mode1
, op1
);
7325 if (op0
== const0_rtx
)
7327 addr
= gen_rtx_MEM (tmode
, op1
);
7331 op0
= copy_to_mode_reg (mode0
, op0
);
7332 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op0
, op1
));
7335 pat
= GEN_FCN (icode
) (target
, addr
);
7345 spe_expand_stv_builtin (enum insn_code icode
, tree exp
)
7347 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7348 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7349 tree arg2
= CALL_EXPR_ARG (exp
, 2);
7350 rtx op0
= expand_normal (arg0
);
7351 rtx op1
= expand_normal (arg1
);
7352 rtx op2
= expand_normal (arg2
);
7354 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
7355 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
7356 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
7358 /* Invalid arguments. Bail before doing anything stoopid! */
7359 if (arg0
== error_mark_node
7360 || arg1
== error_mark_node
7361 || arg2
== error_mark_node
)
7364 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
7365 op0
= copy_to_mode_reg (mode2
, op0
);
7366 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
7367 op1
= copy_to_mode_reg (mode0
, op1
);
7368 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
7369 op2
= copy_to_mode_reg (mode1
, op2
);
7371 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
7378 altivec_expand_stv_builtin (enum insn_code icode
, tree exp
)
7380 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7381 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7382 tree arg2
= CALL_EXPR_ARG (exp
, 2);
7383 rtx op0
= expand_normal (arg0
);
7384 rtx op1
= expand_normal (arg1
);
7385 rtx op2
= expand_normal (arg2
);
7387 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7388 enum machine_mode mode1
= Pmode
;
7389 enum machine_mode mode2
= Pmode
;
7391 /* Invalid arguments. Bail before doing anything stoopid! */
7392 if (arg0
== error_mark_node
7393 || arg1
== error_mark_node
7394 || arg2
== error_mark_node
)
7397 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, tmode
))
7398 op0
= copy_to_mode_reg (tmode
, op0
);
7400 op2
= copy_to_mode_reg (mode2
, op2
);
7402 if (op1
== const0_rtx
)
7404 addr
= gen_rtx_MEM (tmode
, op2
);
7408 op1
= copy_to_mode_reg (mode1
, op1
);
7409 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op1
, op2
));
7412 pat
= GEN_FCN (icode
) (addr
, op0
);
7419 rs6000_expand_ternop_builtin (enum insn_code icode
, tree exp
, rtx target
)
7422 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7423 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7424 tree arg2
= CALL_EXPR_ARG (exp
, 2);
7425 rtx op0
= expand_normal (arg0
);
7426 rtx op1
= expand_normal (arg1
);
7427 rtx op2
= expand_normal (arg2
);
7428 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7429 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7430 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7431 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
7433 if (icode
== CODE_FOR_nothing
)
7434 /* Builtin not supported on this processor. */
7437 /* If we got invalid arguments bail out before generating bad rtl. */
7438 if (arg0
== error_mark_node
7439 || arg1
== error_mark_node
7440 || arg2
== error_mark_node
)
7443 if (icode
== CODE_FOR_altivec_vsldoi_v4sf
7444 || icode
== CODE_FOR_altivec_vsldoi_v4si
7445 || icode
== CODE_FOR_altivec_vsldoi_v8hi
7446 || icode
== CODE_FOR_altivec_vsldoi_v16qi
)
7448 /* Only allow 4-bit unsigned literals. */
7450 if (TREE_CODE (arg2
) != INTEGER_CST
7451 || TREE_INT_CST_LOW (arg2
) & ~0xf)
7453 error ("argument 3 must be a 4-bit unsigned literal");
7459 || GET_MODE (target
) != tmode
7460 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7461 target
= gen_reg_rtx (tmode
);
7463 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7464 op0
= copy_to_mode_reg (mode0
, op0
);
7465 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7466 op1
= copy_to_mode_reg (mode1
, op1
);
7467 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
7468 op2
= copy_to_mode_reg (mode2
, op2
);
7470 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
7478 /* Expand the lvx builtins. */
7480 altivec_expand_ld_builtin (tree exp
, rtx target
, bool *expandedp
)
7482 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
7483 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7485 enum machine_mode tmode
, mode0
;
7487 enum insn_code icode
;
7491 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
7492 icode
= CODE_FOR_altivec_lvx_v16qi
;
7494 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
7495 icode
= CODE_FOR_altivec_lvx_v8hi
;
7497 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
7498 icode
= CODE_FOR_altivec_lvx_v4si
;
7500 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
7501 icode
= CODE_FOR_altivec_lvx_v4sf
;
7510 arg0
= CALL_EXPR_ARG (exp
, 0);
7511 op0
= expand_normal (arg0
);
7512 tmode
= insn_data
[icode
].operand
[0].mode
;
7513 mode0
= insn_data
[icode
].operand
[1].mode
;
7516 || GET_MODE (target
) != tmode
7517 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7518 target
= gen_reg_rtx (tmode
);
7520 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7521 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
7523 pat
= GEN_FCN (icode
) (target
, op0
);
7530 /* Expand the stvx builtins. */
7532 altivec_expand_st_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
7535 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
7536 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7538 enum machine_mode mode0
, mode1
;
7540 enum insn_code icode
;
7544 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
7545 icode
= CODE_FOR_altivec_stvx_v16qi
;
7547 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
7548 icode
= CODE_FOR_altivec_stvx_v8hi
;
7550 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
7551 icode
= CODE_FOR_altivec_stvx_v4si
;
7553 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
7554 icode
= CODE_FOR_altivec_stvx_v4sf
;
7561 arg0
= CALL_EXPR_ARG (exp
, 0);
7562 arg1
= CALL_EXPR_ARG (exp
, 1);
7563 op0
= expand_normal (arg0
);
7564 op1
= expand_normal (arg1
);
7565 mode0
= insn_data
[icode
].operand
[0].mode
;
7566 mode1
= insn_data
[icode
].operand
[1].mode
;
7568 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7569 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
7570 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
7571 op1
= copy_to_mode_reg (mode1
, op1
);
7573 pat
= GEN_FCN (icode
) (op0
, op1
);
7581 /* Expand the dst builtins. */
7583 altivec_expand_dst_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
7586 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
7587 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7588 tree arg0
, arg1
, arg2
;
7589 enum machine_mode mode0
, mode1
, mode2
;
7590 rtx pat
, op0
, op1
, op2
;
7591 struct builtin_description
*d
;
7596 /* Handle DST variants. */
7597 d
= (struct builtin_description
*) bdesc_dst
;
7598 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
7599 if (d
->code
== fcode
)
7601 arg0
= CALL_EXPR_ARG (exp
, 0);
7602 arg1
= CALL_EXPR_ARG (exp
, 1);
7603 arg2
= CALL_EXPR_ARG (exp
, 2);
7604 op0
= expand_normal (arg0
);
7605 op1
= expand_normal (arg1
);
7606 op2
= expand_normal (arg2
);
7607 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7608 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
7609 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
7611 /* Invalid arguments, bail out before generating bad rtl. */
7612 if (arg0
== error_mark_node
7613 || arg1
== error_mark_node
7614 || arg2
== error_mark_node
)
7619 if (TREE_CODE (arg2
) != INTEGER_CST
7620 || TREE_INT_CST_LOW (arg2
) & ~0x3)
7622 error ("argument to %qs must be a 2-bit unsigned literal", d
->name
);
7626 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
7627 op0
= copy_to_mode_reg (Pmode
, op0
);
7628 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
7629 op1
= copy_to_mode_reg (mode1
, op1
);
7631 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
7641 /* Expand vec_init builtin. */
7643 altivec_expand_vec_init_builtin (tree type
, tree exp
, rtx target
)
7645 enum machine_mode tmode
= TYPE_MODE (type
);
7646 enum machine_mode inner_mode
= GET_MODE_INNER (tmode
);
7647 int i
, n_elt
= GET_MODE_NUNITS (tmode
);
7648 rtvec v
= rtvec_alloc (n_elt
);
7650 gcc_assert (VECTOR_MODE_P (tmode
));
7651 gcc_assert (n_elt
== call_expr_nargs (exp
));
7653 for (i
= 0; i
< n_elt
; ++i
)
7655 rtx x
= expand_normal (CALL_EXPR_ARG (exp
, i
));
7656 RTVEC_ELT (v
, i
) = gen_lowpart (inner_mode
, x
);
7659 if (!target
|| !register_operand (target
, tmode
))
7660 target
= gen_reg_rtx (tmode
);
7662 rs6000_expand_vector_init (target
, gen_rtx_PARALLEL (tmode
, v
));
7666 /* Return the integer constant in ARG. Constrain it to be in the range
7667 of the subparts of VEC_TYPE; issue an error if not. */
7670 get_element_number (tree vec_type
, tree arg
)
7672 unsigned HOST_WIDE_INT elt
, max
= TYPE_VECTOR_SUBPARTS (vec_type
) - 1;
7674 if (!host_integerp (arg
, 1)
7675 || (elt
= tree_low_cst (arg
, 1), elt
> max
))
7677 error ("selector must be an integer constant in the range 0..%wi", max
);
7684 /* Expand vec_set builtin. */
7686 altivec_expand_vec_set_builtin (tree exp
)
7688 enum machine_mode tmode
, mode1
;
7689 tree arg0
, arg1
, arg2
;
7693 arg0
= CALL_EXPR_ARG (exp
, 0);
7694 arg1
= CALL_EXPR_ARG (exp
, 1);
7695 arg2
= CALL_EXPR_ARG (exp
, 2);
7697 tmode
= TYPE_MODE (TREE_TYPE (arg0
));
7698 mode1
= TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
)));
7699 gcc_assert (VECTOR_MODE_P (tmode
));
7701 op0
= expand_expr (arg0
, NULL_RTX
, tmode
, 0);
7702 op1
= expand_expr (arg1
, NULL_RTX
, mode1
, 0);
7703 elt
= get_element_number (TREE_TYPE (arg0
), arg2
);
7705 if (GET_MODE (op1
) != mode1
&& GET_MODE (op1
) != VOIDmode
)
7706 op1
= convert_modes (mode1
, GET_MODE (op1
), op1
, true);
7708 op0
= force_reg (tmode
, op0
);
7709 op1
= force_reg (mode1
, op1
);
7711 rs6000_expand_vector_set (op0
, op1
, elt
);
7716 /* Expand vec_ext builtin. */
7718 altivec_expand_vec_ext_builtin (tree exp
, rtx target
)
7720 enum machine_mode tmode
, mode0
;
7725 arg0
= CALL_EXPR_ARG (exp
, 0);
7726 arg1
= CALL_EXPR_ARG (exp
, 1);
7728 op0
= expand_normal (arg0
);
7729 elt
= get_element_number (TREE_TYPE (arg0
), arg1
);
7731 tmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
)));
7732 mode0
= TYPE_MODE (TREE_TYPE (arg0
));
7733 gcc_assert (VECTOR_MODE_P (mode0
));
7735 op0
= force_reg (mode0
, op0
);
7737 if (optimize
|| !target
|| !register_operand (target
, tmode
))
7738 target
= gen_reg_rtx (tmode
);
7740 rs6000_expand_vector_extract (target
, op0
, elt
);
7745 /* Expand the builtin in EXP and store the result in TARGET. Store
7746 true in *EXPANDEDP if we found a builtin to expand. */
7748 altivec_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
7750 struct builtin_description
*d
;
7751 struct builtin_description_predicates
*dp
;
7753 enum insn_code icode
;
7754 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
7757 enum machine_mode tmode
, mode0
;
7758 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7760 if (fcode
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7761 && fcode
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
)
7764 error ("unresolved overload for Altivec builtin %qF", fndecl
);
7768 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
7772 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
7776 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
7784 case ALTIVEC_BUILTIN_STVX
:
7785 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, exp
);
7786 case ALTIVEC_BUILTIN_STVEBX
:
7787 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, exp
);
7788 case ALTIVEC_BUILTIN_STVEHX
:
7789 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, exp
);
7790 case ALTIVEC_BUILTIN_STVEWX
:
7791 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, exp
);
7792 case ALTIVEC_BUILTIN_STVXL
:
7793 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, exp
);
7795 case ALTIVEC_BUILTIN_MFVSCR
:
7796 icode
= CODE_FOR_altivec_mfvscr
;
7797 tmode
= insn_data
[icode
].operand
[0].mode
;
7800 || GET_MODE (target
) != tmode
7801 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7802 target
= gen_reg_rtx (tmode
);
7804 pat
= GEN_FCN (icode
) (target
);
7810 case ALTIVEC_BUILTIN_MTVSCR
:
7811 icode
= CODE_FOR_altivec_mtvscr
;
7812 arg0
= CALL_EXPR_ARG (exp
, 0);
7813 op0
= expand_normal (arg0
);
7814 mode0
= insn_data
[icode
].operand
[0].mode
;
7816 /* If we got invalid arguments bail out before generating bad rtl. */
7817 if (arg0
== error_mark_node
)
7820 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7821 op0
= copy_to_mode_reg (mode0
, op0
);
7823 pat
= GEN_FCN (icode
) (op0
);
7828 case ALTIVEC_BUILTIN_DSSALL
:
7829 emit_insn (gen_altivec_dssall ());
7832 case ALTIVEC_BUILTIN_DSS
:
7833 icode
= CODE_FOR_altivec_dss
;
7834 arg0
= CALL_EXPR_ARG (exp
, 0);
7836 op0
= expand_normal (arg0
);
7837 mode0
= insn_data
[icode
].operand
[0].mode
;
7839 /* If we got invalid arguments bail out before generating bad rtl. */
7840 if (arg0
== error_mark_node
)
7843 if (TREE_CODE (arg0
) != INTEGER_CST
7844 || TREE_INT_CST_LOW (arg0
) & ~0x3)
7846 error ("argument to dss must be a 2-bit unsigned literal");
7850 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7851 op0
= copy_to_mode_reg (mode0
, op0
);
7853 emit_insn (gen_altivec_dss (op0
));
7856 case ALTIVEC_BUILTIN_VEC_INIT_V4SI
:
7857 case ALTIVEC_BUILTIN_VEC_INIT_V8HI
:
7858 case ALTIVEC_BUILTIN_VEC_INIT_V16QI
:
7859 case ALTIVEC_BUILTIN_VEC_INIT_V4SF
:
7860 return altivec_expand_vec_init_builtin (TREE_TYPE (exp
), exp
, target
);
7862 case ALTIVEC_BUILTIN_VEC_SET_V4SI
:
7863 case ALTIVEC_BUILTIN_VEC_SET_V8HI
:
7864 case ALTIVEC_BUILTIN_VEC_SET_V16QI
:
7865 case ALTIVEC_BUILTIN_VEC_SET_V4SF
:
7866 return altivec_expand_vec_set_builtin (exp
);
7868 case ALTIVEC_BUILTIN_VEC_EXT_V4SI
:
7869 case ALTIVEC_BUILTIN_VEC_EXT_V8HI
:
7870 case ALTIVEC_BUILTIN_VEC_EXT_V16QI
:
7871 case ALTIVEC_BUILTIN_VEC_EXT_V4SF
:
7872 return altivec_expand_vec_ext_builtin (exp
, target
);
7879 /* Expand abs* operations. */
7880 d
= (struct builtin_description
*) bdesc_abs
;
7881 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
7882 if (d
->code
== fcode
)
7883 return altivec_expand_abs_builtin (d
->icode
, exp
, target
);
7885 /* Expand the AltiVec predicates. */
7886 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
7887 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
7888 if (dp
->code
== fcode
)
7889 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
,
7892 /* LV* are funky. We initialized them differently. */
7895 case ALTIVEC_BUILTIN_LVSL
:
7896 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl
,
7898 case ALTIVEC_BUILTIN_LVSR
:
7899 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr
,
7901 case ALTIVEC_BUILTIN_LVEBX
:
7902 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx
,
7904 case ALTIVEC_BUILTIN_LVEHX
:
7905 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx
,
7907 case ALTIVEC_BUILTIN_LVEWX
:
7908 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx
,
7910 case ALTIVEC_BUILTIN_LVXL
:
7911 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl
,
7913 case ALTIVEC_BUILTIN_LVX
:
7914 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx
,
7925 /* Binops that need to be initialized manually, but can be expanded
7926 automagically by rs6000_expand_binop_builtin. */
7927 static struct builtin_description bdesc_2arg_spe
[] =
7929 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
7930 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
7931 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
7932 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
7933 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
7934 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
7935 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
7936 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
7937 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
7938 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
7939 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
7940 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
7941 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
7942 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
7943 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
7944 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
7945 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
7946 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
7947 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
7948 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
7949 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
7950 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
7953 /* Expand the builtin in EXP and store the result in TARGET. Store
7954 true in *EXPANDEDP if we found a builtin to expand.
7956 This expands the SPE builtins that are not simple unary and binary
7959 spe_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
7961 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
7963 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7964 enum insn_code icode
;
7965 enum machine_mode tmode
, mode0
;
7967 struct builtin_description
*d
;
7972 /* Syntax check for a 5-bit unsigned immediate. */
7975 case SPE_BUILTIN_EVSTDD
:
7976 case SPE_BUILTIN_EVSTDH
:
7977 case SPE_BUILTIN_EVSTDW
:
7978 case SPE_BUILTIN_EVSTWHE
:
7979 case SPE_BUILTIN_EVSTWHO
:
7980 case SPE_BUILTIN_EVSTWWE
:
7981 case SPE_BUILTIN_EVSTWWO
:
7982 arg1
= CALL_EXPR_ARG (exp
, 2);
7983 if (TREE_CODE (arg1
) != INTEGER_CST
7984 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
7986 error ("argument 2 must be a 5-bit unsigned literal");
7994 /* The evsplat*i instructions are not quite generic. */
7997 case SPE_BUILTIN_EVSPLATFI
:
7998 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi
,
8000 case SPE_BUILTIN_EVSPLATI
:
8001 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati
,
8007 d
= (struct builtin_description
*) bdesc_2arg_spe
;
8008 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
8009 if (d
->code
== fcode
)
8010 return rs6000_expand_binop_builtin (d
->icode
, exp
, target
);
8012 d
= (struct builtin_description
*) bdesc_spe_predicates
;
8013 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
8014 if (d
->code
== fcode
)
8015 return spe_expand_predicate_builtin (d
->icode
, exp
, target
);
8017 d
= (struct builtin_description
*) bdesc_spe_evsel
;
8018 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
8019 if (d
->code
== fcode
)
8020 return spe_expand_evsel_builtin (d
->icode
, exp
, target
);
8024 case SPE_BUILTIN_EVSTDDX
:
8025 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx
, exp
);
8026 case SPE_BUILTIN_EVSTDHX
:
8027 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx
, exp
);
8028 case SPE_BUILTIN_EVSTDWX
:
8029 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx
, exp
);
8030 case SPE_BUILTIN_EVSTWHEX
:
8031 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex
, exp
);
8032 case SPE_BUILTIN_EVSTWHOX
:
8033 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox
, exp
);
8034 case SPE_BUILTIN_EVSTWWEX
:
8035 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex
, exp
);
8036 case SPE_BUILTIN_EVSTWWOX
:
8037 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox
, exp
);
8038 case SPE_BUILTIN_EVSTDD
:
8039 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd
, exp
);
8040 case SPE_BUILTIN_EVSTDH
:
8041 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh
, exp
);
8042 case SPE_BUILTIN_EVSTDW
:
8043 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw
, exp
);
8044 case SPE_BUILTIN_EVSTWHE
:
8045 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe
, exp
);
8046 case SPE_BUILTIN_EVSTWHO
:
8047 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho
, exp
);
8048 case SPE_BUILTIN_EVSTWWE
:
8049 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe
, exp
);
8050 case SPE_BUILTIN_EVSTWWO
:
8051 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo
, exp
);
8052 case SPE_BUILTIN_MFSPEFSCR
:
8053 icode
= CODE_FOR_spe_mfspefscr
;
8054 tmode
= insn_data
[icode
].operand
[0].mode
;
8057 || GET_MODE (target
) != tmode
8058 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
8059 target
= gen_reg_rtx (tmode
);
8061 pat
= GEN_FCN (icode
) (target
);
8066 case SPE_BUILTIN_MTSPEFSCR
:
8067 icode
= CODE_FOR_spe_mtspefscr
;
8068 arg0
= CALL_EXPR_ARG (exp
, 0);
8069 op0
= expand_normal (arg0
);
8070 mode0
= insn_data
[icode
].operand
[0].mode
;
8072 if (arg0
== error_mark_node
)
8075 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
8076 op0
= copy_to_mode_reg (mode0
, op0
);
8078 pat
= GEN_FCN (icode
) (op0
);
8091 spe_expand_predicate_builtin (enum insn_code icode
, tree exp
, rtx target
)
8093 rtx pat
, scratch
, tmp
;
8094 tree form
= CALL_EXPR_ARG (exp
, 0);
8095 tree arg0
= CALL_EXPR_ARG (exp
, 1);
8096 tree arg1
= CALL_EXPR_ARG (exp
, 2);
8097 rtx op0
= expand_normal (arg0
);
8098 rtx op1
= expand_normal (arg1
);
8099 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
8100 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
8104 if (TREE_CODE (form
) != INTEGER_CST
)
8106 error ("argument 1 of __builtin_spe_predicate must be a constant");
8110 form_int
= TREE_INT_CST_LOW (form
);
8112 gcc_assert (mode0
== mode1
);
8114 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
8118 || GET_MODE (target
) != SImode
8119 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
8120 target
= gen_reg_rtx (SImode
);
8122 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
8123 op0
= copy_to_mode_reg (mode0
, op0
);
8124 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
8125 op1
= copy_to_mode_reg (mode1
, op1
);
8127 scratch
= gen_reg_rtx (CCmode
);
8129 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
8134 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
8135 _lower_. We use one compare, but look in different bits of the
8136 CR for each variant.
8138 There are 2 elements in each SPE simd type (upper/lower). The CR
8139 bits are set as follows:
8141 BIT0 | BIT 1 | BIT 2 | BIT 3
8142 U | L | (U | L) | (U & L)
8144 So, for an "all" relationship, BIT 3 would be set.
8145 For an "any" relationship, BIT 2 would be set. Etc.
8147 Following traditional nomenclature, these bits map to:
8149 BIT0 | BIT 1 | BIT 2 | BIT 3
8152 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8157 /* All variant. OV bit. */
8159 /* We need to get to the OV bit, which is the ORDERED bit. We
8160 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
8161 that's ugly and will make validate_condition_mode die.
8162 So let's just use another pattern. */
8163 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
8165 /* Any variant. EQ bit. */
8169 /* Upper variant. LT bit. */
8173 /* Lower variant. GT bit. */
8178 error ("argument 1 of __builtin_spe_predicate is out of range");
8182 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
8183 emit_move_insn (target
, tmp
);
8188 /* The evsel builtins look like this:
8190 e = __builtin_spe_evsel_OP (a, b, c, d);
8194 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8195 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8199 spe_expand_evsel_builtin (enum insn_code icode
, tree exp
, rtx target
)
8202 tree arg0
= CALL_EXPR_ARG (exp
, 0);
8203 tree arg1
= CALL_EXPR_ARG (exp
, 1);
8204 tree arg2
= CALL_EXPR_ARG (exp
, 2);
8205 tree arg3
= CALL_EXPR_ARG (exp
, 3);
8206 rtx op0
= expand_normal (arg0
);
8207 rtx op1
= expand_normal (arg1
);
8208 rtx op2
= expand_normal (arg2
);
8209 rtx op3
= expand_normal (arg3
);
8210 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
8211 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
8213 gcc_assert (mode0
== mode1
);
8215 if (arg0
== error_mark_node
|| arg1
== error_mark_node
8216 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
8220 || GET_MODE (target
) != mode0
8221 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
8222 target
= gen_reg_rtx (mode0
);
8224 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
8225 op0
= copy_to_mode_reg (mode0
, op0
);
8226 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
8227 op1
= copy_to_mode_reg (mode0
, op1
);
8228 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
8229 op2
= copy_to_mode_reg (mode0
, op2
);
8230 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
8231 op3
= copy_to_mode_reg (mode0
, op3
);
8233 /* Generate the compare. */
8234 scratch
= gen_reg_rtx (CCmode
);
8235 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
8240 if (mode0
== V2SImode
)
8241 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
8243 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
8248 /* Expand an expression EXP that calls a built-in function,
8249 with result going to TARGET if that's convenient
8250 (and in mode MODE if that's convenient).
8251 SUBTARGET may be used as the target for computing one of EXP's operands.
8252 IGNORE is nonzero if the value is to be ignored. */
8255 rs6000_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
8256 enum machine_mode mode ATTRIBUTE_UNUSED
,
8257 int ignore ATTRIBUTE_UNUSED
)
8259 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
8260 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
8261 struct builtin_description
*d
;
8266 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_LOAD
8267 || fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
8269 int icode
= (int) CODE_FOR_altivec_lvsr
;
8270 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
8271 enum machine_mode mode
= insn_data
[icode
].operand
[1].mode
;
8275 gcc_assert (TARGET_ALTIVEC
);
8277 arg
= CALL_EXPR_ARG (exp
, 0);
8278 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == POINTER_TYPE
);
8279 op
= expand_expr (arg
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
8280 addr
= memory_address (mode
, op
);
8281 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
8285 /* For the load case need to negate the address. */
8286 op
= gen_reg_rtx (GET_MODE (addr
));
8287 emit_insn (gen_rtx_SET (VOIDmode
, op
,
8288 gen_rtx_NEG (GET_MODE (addr
), addr
)));
8290 op
= gen_rtx_MEM (mode
, op
);
8293 || GET_MODE (target
) != tmode
8294 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
8295 target
= gen_reg_rtx (tmode
);
8297 /*pat = gen_altivec_lvsr (target, op);*/
8298 pat
= GEN_FCN (icode
) (target
, op
);
8306 /* FIXME: There's got to be a nicer way to handle this case than
8307 constructing a new CALL_EXPR. */
8308 if (fcode
== ALTIVEC_BUILTIN_VCFUX
8309 || fcode
== ALTIVEC_BUILTIN_VCFSX
)
8311 if (call_expr_nargs (exp
) == 1)
8312 exp
= build_call_nary (TREE_TYPE (exp
), CALL_EXPR_FN (exp
),
8313 2, CALL_EXPR_ARG (exp
, 0), integer_zero_node
);
8318 ret
= altivec_expand_builtin (exp
, target
, &success
);
8325 ret
= spe_expand_builtin (exp
, target
, &success
);
8331 gcc_assert (TARGET_ALTIVEC
|| TARGET_SPE
);
8333 /* Handle simple unary operations. */
8334 d
= (struct builtin_description
*) bdesc_1arg
;
8335 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
8336 if (d
->code
== fcode
)
8337 return rs6000_expand_unop_builtin (d
->icode
, exp
, target
);
8339 /* Handle simple binary operations. */
8340 d
= (struct builtin_description
*) bdesc_2arg
;
8341 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
8342 if (d
->code
== fcode
)
8343 return rs6000_expand_binop_builtin (d
->icode
, exp
, target
);
8345 /* Handle simple ternary operations. */
8346 d
= (struct builtin_description
*) bdesc_3arg
;
8347 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
8348 if (d
->code
== fcode
)
8349 return rs6000_expand_ternop_builtin (d
->icode
, exp
, target
);
8355 build_opaque_vector_type (tree node
, int nunits
)
8357 node
= copy_node (node
);
8358 TYPE_MAIN_VARIANT (node
) = node
;
8359 return build_vector_type (node
, nunits
);
8363 rs6000_init_builtins (void)
8365 V2SI_type_node
= build_vector_type (intSI_type_node
, 2);
8366 V2SF_type_node
= build_vector_type (float_type_node
, 2);
8367 V4HI_type_node
= build_vector_type (intHI_type_node
, 4);
8368 V4SI_type_node
= build_vector_type (intSI_type_node
, 4);
8369 V4SF_type_node
= build_vector_type (float_type_node
, 4);
8370 V8HI_type_node
= build_vector_type (intHI_type_node
, 8);
8371 V16QI_type_node
= build_vector_type (intQI_type_node
, 16);
8373 unsigned_V16QI_type_node
= build_vector_type (unsigned_intQI_type_node
, 16);
8374 unsigned_V8HI_type_node
= build_vector_type (unsigned_intHI_type_node
, 8);
8375 unsigned_V4SI_type_node
= build_vector_type (unsigned_intSI_type_node
, 4);
8377 opaque_V2SF_type_node
= build_opaque_vector_type (float_type_node
, 2);
8378 opaque_V2SI_type_node
= build_opaque_vector_type (intSI_type_node
, 2);
8379 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
8380 opaque_V4SI_type_node
= copy_node (V4SI_type_node
);
8382 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8383 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8384 'vector unsigned short'. */
8386 bool_char_type_node
= build_distinct_type_copy (unsigned_intQI_type_node
);
8387 bool_short_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
8388 bool_int_type_node
= build_distinct_type_copy (unsigned_intSI_type_node
);
8389 pixel_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
8391 long_integer_type_internal_node
= long_integer_type_node
;
8392 long_unsigned_type_internal_node
= long_unsigned_type_node
;
8393 intQI_type_internal_node
= intQI_type_node
;
8394 uintQI_type_internal_node
= unsigned_intQI_type_node
;
8395 intHI_type_internal_node
= intHI_type_node
;
8396 uintHI_type_internal_node
= unsigned_intHI_type_node
;
8397 intSI_type_internal_node
= intSI_type_node
;
8398 uintSI_type_internal_node
= unsigned_intSI_type_node
;
8399 float_type_internal_node
= float_type_node
;
8400 void_type_internal_node
= void_type_node
;
8402 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8403 get_identifier ("__bool char"),
8404 bool_char_type_node
));
8405 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8406 get_identifier ("__bool short"),
8407 bool_short_type_node
));
8408 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8409 get_identifier ("__bool int"),
8410 bool_int_type_node
));
8411 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8412 get_identifier ("__pixel"),
8415 bool_V16QI_type_node
= build_vector_type (bool_char_type_node
, 16);
8416 bool_V8HI_type_node
= build_vector_type (bool_short_type_node
, 8);
8417 bool_V4SI_type_node
= build_vector_type (bool_int_type_node
, 4);
8418 pixel_V8HI_type_node
= build_vector_type (pixel_type_node
, 8);
8420 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8421 get_identifier ("__vector unsigned char"),
8422 unsigned_V16QI_type_node
));
8423 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8424 get_identifier ("__vector signed char"),
8426 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8427 get_identifier ("__vector __bool char"),
8428 bool_V16QI_type_node
));
8430 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8431 get_identifier ("__vector unsigned short"),
8432 unsigned_V8HI_type_node
));
8433 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8434 get_identifier ("__vector signed short"),
8436 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8437 get_identifier ("__vector __bool short"),
8438 bool_V8HI_type_node
));
8440 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8441 get_identifier ("__vector unsigned int"),
8442 unsigned_V4SI_type_node
));
8443 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8444 get_identifier ("__vector signed int"),
8446 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8447 get_identifier ("__vector __bool int"),
8448 bool_V4SI_type_node
));
8450 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8451 get_identifier ("__vector float"),
8453 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8454 get_identifier ("__vector __pixel"),
8455 pixel_V8HI_type_node
));
8458 spe_init_builtins ();
8460 altivec_init_builtins ();
8461 if (TARGET_ALTIVEC
|| TARGET_SPE
)
8462 rs6000_common_init_builtins ();
8465 /* AIX libm provides clog as __clog. */
8466 if (built_in_decls
[BUILT_IN_CLOG
])
8467 set_user_assembler_name (built_in_decls
[BUILT_IN_CLOG
], "__clog");
8471 /* Search through a set of builtins and enable the mask bits.
8472 DESC is an array of builtins.
8473 SIZE is the total number of builtins.
8474 START is the builtin enum at which to start.
8475 END is the builtin enum at which to end. */
8477 enable_mask_for_builtins (struct builtin_description
*desc
, int size
,
8478 enum rs6000_builtins start
,
8479 enum rs6000_builtins end
)
8483 for (i
= 0; i
< size
; ++i
)
8484 if (desc
[i
].code
== start
)
8490 for (; i
< size
; ++i
)
8492 /* Flip all the bits on. */
8493 desc
[i
].mask
= target_flags
;
8494 if (desc
[i
].code
== end
)
8500 spe_init_builtins (void)
8502 tree endlink
= void_list_node
;
8503 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
8504 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
8505 struct builtin_description
*d
;
8508 tree v2si_ftype_4_v2si
8509 = build_function_type
8510 (opaque_V2SI_type_node
,
8511 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8512 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8513 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8514 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8517 tree v2sf_ftype_4_v2sf
8518 = build_function_type
8519 (opaque_V2SF_type_node
,
8520 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8521 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8522 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8523 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8526 tree int_ftype_int_v2si_v2si
8527 = build_function_type
8529 tree_cons (NULL_TREE
, integer_type_node
,
8530 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8531 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8534 tree int_ftype_int_v2sf_v2sf
8535 = build_function_type
8537 tree_cons (NULL_TREE
, integer_type_node
,
8538 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8539 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8542 tree void_ftype_v2si_puint_int
8543 = build_function_type (void_type_node
,
8544 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8545 tree_cons (NULL_TREE
, puint_type_node
,
8546 tree_cons (NULL_TREE
,
8550 tree void_ftype_v2si_puint_char
8551 = build_function_type (void_type_node
,
8552 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8553 tree_cons (NULL_TREE
, puint_type_node
,
8554 tree_cons (NULL_TREE
,
8558 tree void_ftype_v2si_pv2si_int
8559 = build_function_type (void_type_node
,
8560 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8561 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
8562 tree_cons (NULL_TREE
,
8566 tree void_ftype_v2si_pv2si_char
8567 = build_function_type (void_type_node
,
8568 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8569 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
8570 tree_cons (NULL_TREE
,
8575 = build_function_type (void_type_node
,
8576 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
8579 = build_function_type (integer_type_node
, endlink
);
8581 tree v2si_ftype_pv2si_int
8582 = build_function_type (opaque_V2SI_type_node
,
8583 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
8584 tree_cons (NULL_TREE
, integer_type_node
,
8587 tree v2si_ftype_puint_int
8588 = build_function_type (opaque_V2SI_type_node
,
8589 tree_cons (NULL_TREE
, puint_type_node
,
8590 tree_cons (NULL_TREE
, integer_type_node
,
8593 tree v2si_ftype_pushort_int
8594 = build_function_type (opaque_V2SI_type_node
,
8595 tree_cons (NULL_TREE
, pushort_type_node
,
8596 tree_cons (NULL_TREE
, integer_type_node
,
8599 tree v2si_ftype_signed_char
8600 = build_function_type (opaque_V2SI_type_node
,
8601 tree_cons (NULL_TREE
, signed_char_type_node
,
8604 /* The initialization of the simple binary and unary builtins is
8605 done in rs6000_common_init_builtins, but we have to enable the
8606 mask bits here manually because we have run out of `target_flags'
8607 bits. We really need to redesign this mask business. */
8609 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
8610 ARRAY_SIZE (bdesc_2arg
),
8613 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
8614 ARRAY_SIZE (bdesc_1arg
),
8616 SPE_BUILTIN_EVSUBFUSIAAW
);
8617 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
8618 ARRAY_SIZE (bdesc_spe_predicates
),
8619 SPE_BUILTIN_EVCMPEQ
,
8620 SPE_BUILTIN_EVFSTSTLT
);
8621 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
8622 ARRAY_SIZE (bdesc_spe_evsel
),
8623 SPE_BUILTIN_EVSEL_CMPGTS
,
8624 SPE_BUILTIN_EVSEL_FSTSTEQ
);
8626 (*lang_hooks
.decls
.pushdecl
)
8627 (build_decl (TYPE_DECL
, get_identifier ("__ev64_opaque__"),
8628 opaque_V2SI_type_node
));
8630 /* Initialize irregular SPE builtins. */
8632 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
8633 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
8634 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
8635 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
8636 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
8637 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
8638 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
8639 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
8640 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
8641 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
8642 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
8643 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
8644 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
8645 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
8646 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
8647 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
8648 def_builtin (target_flags
, "__builtin_spe_evsplatfi", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATFI
);
8649 def_builtin (target_flags
, "__builtin_spe_evsplati", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATI
);
8652 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
8653 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
8654 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
8655 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
8656 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
8657 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
8658 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
8659 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
8660 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
8661 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
8662 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
8663 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
8664 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
8665 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
8666 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
8667 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
8668 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
8669 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
8670 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
8671 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
8672 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
8673 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
8676 d
= (struct builtin_description
*) bdesc_spe_predicates
;
8677 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
8681 switch (insn_data
[d
->icode
].operand
[1].mode
)
8684 type
= int_ftype_int_v2si_v2si
;
8687 type
= int_ftype_int_v2sf_v2sf
;
8693 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8696 /* Evsel predicates. */
8697 d
= (struct builtin_description
*) bdesc_spe_evsel
;
8698 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
8702 switch (insn_data
[d
->icode
].operand
[1].mode
)
8705 type
= v2si_ftype_4_v2si
;
8708 type
= v2sf_ftype_4_v2sf
;
8714 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8719 altivec_init_builtins (void)
8721 struct builtin_description
*d
;
8722 struct builtin_description_predicates
*dp
;
8726 tree pfloat_type_node
= build_pointer_type (float_type_node
);
8727 tree pint_type_node
= build_pointer_type (integer_type_node
);
8728 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
8729 tree pchar_type_node
= build_pointer_type (char_type_node
);
8731 tree pvoid_type_node
= build_pointer_type (void_type_node
);
8733 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
8734 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
8735 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
8736 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
8738 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
8740 tree int_ftype_opaque
8741 = build_function_type_list (integer_type_node
,
8742 opaque_V4SI_type_node
, NULL_TREE
);
8744 tree opaque_ftype_opaque_int
8745 = build_function_type_list (opaque_V4SI_type_node
,
8746 opaque_V4SI_type_node
, integer_type_node
, NULL_TREE
);
8747 tree opaque_ftype_opaque_opaque_int
8748 = build_function_type_list (opaque_V4SI_type_node
,
8749 opaque_V4SI_type_node
, opaque_V4SI_type_node
,
8750 integer_type_node
, NULL_TREE
);
8751 tree int_ftype_int_opaque_opaque
8752 = build_function_type_list (integer_type_node
,
8753 integer_type_node
, opaque_V4SI_type_node
,
8754 opaque_V4SI_type_node
, NULL_TREE
);
8755 tree int_ftype_int_v4si_v4si
8756 = build_function_type_list (integer_type_node
,
8757 integer_type_node
, V4SI_type_node
,
8758 V4SI_type_node
, NULL_TREE
);
8759 tree v4sf_ftype_pcfloat
8760 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
8761 tree void_ftype_pfloat_v4sf
8762 = build_function_type_list (void_type_node
,
8763 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
8764 tree v4si_ftype_pcint
8765 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
8766 tree void_ftype_pint_v4si
8767 = build_function_type_list (void_type_node
,
8768 pint_type_node
, V4SI_type_node
, NULL_TREE
);
8769 tree v8hi_ftype_pcshort
8770 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
8771 tree void_ftype_pshort_v8hi
8772 = build_function_type_list (void_type_node
,
8773 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
8774 tree v16qi_ftype_pcchar
8775 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
8776 tree void_ftype_pchar_v16qi
8777 = build_function_type_list (void_type_node
,
8778 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
8779 tree void_ftype_v4si
8780 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
8781 tree v8hi_ftype_void
8782 = build_function_type (V8HI_type_node
, void_list_node
);
8783 tree void_ftype_void
8784 = build_function_type (void_type_node
, void_list_node
);
8786 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
8788 tree opaque_ftype_long_pcvoid
8789 = build_function_type_list (opaque_V4SI_type_node
,
8790 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8791 tree v16qi_ftype_long_pcvoid
8792 = build_function_type_list (V16QI_type_node
,
8793 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8794 tree v8hi_ftype_long_pcvoid
8795 = build_function_type_list (V8HI_type_node
,
8796 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8797 tree v4si_ftype_long_pcvoid
8798 = build_function_type_list (V4SI_type_node
,
8799 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8801 tree void_ftype_opaque_long_pvoid
8802 = build_function_type_list (void_type_node
,
8803 opaque_V4SI_type_node
, long_integer_type_node
,
8804 pvoid_type_node
, NULL_TREE
);
8805 tree void_ftype_v4si_long_pvoid
8806 = build_function_type_list (void_type_node
,
8807 V4SI_type_node
, long_integer_type_node
,
8808 pvoid_type_node
, NULL_TREE
);
8809 tree void_ftype_v16qi_long_pvoid
8810 = build_function_type_list (void_type_node
,
8811 V16QI_type_node
, long_integer_type_node
,
8812 pvoid_type_node
, NULL_TREE
);
8813 tree void_ftype_v8hi_long_pvoid
8814 = build_function_type_list (void_type_node
,
8815 V8HI_type_node
, long_integer_type_node
,
8816 pvoid_type_node
, NULL_TREE
);
8817 tree int_ftype_int_v8hi_v8hi
8818 = build_function_type_list (integer_type_node
,
8819 integer_type_node
, V8HI_type_node
,
8820 V8HI_type_node
, NULL_TREE
);
8821 tree int_ftype_int_v16qi_v16qi
8822 = build_function_type_list (integer_type_node
,
8823 integer_type_node
, V16QI_type_node
,
8824 V16QI_type_node
, NULL_TREE
);
8825 tree int_ftype_int_v4sf_v4sf
8826 = build_function_type_list (integer_type_node
,
8827 integer_type_node
, V4SF_type_node
,
8828 V4SF_type_node
, NULL_TREE
);
8829 tree v4si_ftype_v4si
8830 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
8831 tree v8hi_ftype_v8hi
8832 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8833 tree v16qi_ftype_v16qi
8834 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8835 tree v4sf_ftype_v4sf
8836 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8837 tree void_ftype_pcvoid_int_int
8838 = build_function_type_list (void_type_node
,
8839 pcvoid_type_node
, integer_type_node
,
8840 integer_type_node
, NULL_TREE
);
8842 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
8843 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
8844 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
8845 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
8846 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
8847 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
8848 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
8849 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
8850 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
8851 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
8852 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
8853 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
8854 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
8855 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
8856 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
8857 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
8858 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
8859 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
8860 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
8861 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_int
, ALTIVEC_BUILTIN_DSS
);
8862 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
8863 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
8864 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
8865 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
8866 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
8867 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
8868 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVX
);
8869 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVX
);
8870 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
8871 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVXL
);
8872 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
8873 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
8874 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ld", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LD
);
8875 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lde", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LDE
);
8876 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ldl", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LDL
);
8877 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVSL
);
8878 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVSR
);
8879 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEBX
);
8880 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEHX
);
8881 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEWX
);
8882 def_builtin (MASK_ALTIVEC
, "__builtin_vec_st", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_ST
);
8883 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ste", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STE
);
8884 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stl", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STL
);
8885 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEWX
);
8886 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEBX
);
8887 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEHX
);
8889 def_builtin (MASK_ALTIVEC
, "__builtin_vec_step", int_ftype_opaque
, ALTIVEC_BUILTIN_VEC_STEP
);
8891 def_builtin (MASK_ALTIVEC
, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int
, ALTIVEC_BUILTIN_VEC_SLD
);
8892 def_builtin (MASK_ALTIVEC
, "__builtin_vec_splat", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_SPLAT
);
8893 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vspltw", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTW
);
8894 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vsplth", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTH
);
8895 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vspltb", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTB
);
8896 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ctf", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTF
);
8897 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vcfsx", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VCFSX
);
8898 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vcfux", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VCFUX
);
8899 def_builtin (MASK_ALTIVEC
, "__builtin_vec_cts", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTS
);
8900 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ctu", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTU
);
8902 /* Add the DST variants. */
8903 d
= (struct builtin_description
*) bdesc_dst
;
8904 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
8905 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_int
, d
->code
);
8907 /* Initialize the predicates. */
8908 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
8909 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
8911 enum machine_mode mode1
;
8913 bool is_overloaded
= dp
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8914 && dp
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
8919 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
8924 type
= int_ftype_int_opaque_opaque
;
8927 type
= int_ftype_int_v4si_v4si
;
8930 type
= int_ftype_int_v8hi_v8hi
;
8933 type
= int_ftype_int_v16qi_v16qi
;
8936 type
= int_ftype_int_v4sf_v4sf
;
8942 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
8945 /* Initialize the abs* operators. */
8946 d
= (struct builtin_description
*) bdesc_abs
;
8947 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
8949 enum machine_mode mode0
;
8952 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8957 type
= v4si_ftype_v4si
;
8960 type
= v8hi_ftype_v8hi
;
8963 type
= v16qi_ftype_v16qi
;
8966 type
= v4sf_ftype_v4sf
;
8972 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8979 /* Initialize target builtin that implements
8980 targetm.vectorize.builtin_mask_for_load. */
8982 decl
= add_builtin_function ("__builtin_altivec_mask_for_load",
8983 v16qi_ftype_long_pcvoid
,
8984 ALTIVEC_BUILTIN_MASK_FOR_LOAD
,
8985 BUILT_IN_MD
, NULL
, NULL_TREE
);
8986 TREE_READONLY (decl
) = 1;
8987 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8988 altivec_builtin_mask_for_load
= decl
;
8991 /* Access to the vec_init patterns. */
8992 ftype
= build_function_type_list (V4SI_type_node
, integer_type_node
,
8993 integer_type_node
, integer_type_node
,
8994 integer_type_node
, NULL_TREE
);
8995 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v4si", ftype
,
8996 ALTIVEC_BUILTIN_VEC_INIT_V4SI
);
8998 ftype
= build_function_type_list (V8HI_type_node
, short_integer_type_node
,
8999 short_integer_type_node
,
9000 short_integer_type_node
,
9001 short_integer_type_node
,
9002 short_integer_type_node
,
9003 short_integer_type_node
,
9004 short_integer_type_node
,
9005 short_integer_type_node
, NULL_TREE
);
9006 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v8hi", ftype
,
9007 ALTIVEC_BUILTIN_VEC_INIT_V8HI
);
9009 ftype
= build_function_type_list (V16QI_type_node
, char_type_node
,
9010 char_type_node
, char_type_node
,
9011 char_type_node
, char_type_node
,
9012 char_type_node
, char_type_node
,
9013 char_type_node
, char_type_node
,
9014 char_type_node
, char_type_node
,
9015 char_type_node
, char_type_node
,
9016 char_type_node
, char_type_node
,
9017 char_type_node
, NULL_TREE
);
9018 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v16qi", ftype
,
9019 ALTIVEC_BUILTIN_VEC_INIT_V16QI
);
9021 ftype
= build_function_type_list (V4SF_type_node
, float_type_node
,
9022 float_type_node
, float_type_node
,
9023 float_type_node
, NULL_TREE
);
9024 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v4sf", ftype
,
9025 ALTIVEC_BUILTIN_VEC_INIT_V4SF
);
9027 /* Access to the vec_set patterns. */
9028 ftype
= build_function_type_list (V4SI_type_node
, V4SI_type_node
,
9030 integer_type_node
, NULL_TREE
);
9031 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v4si", ftype
,
9032 ALTIVEC_BUILTIN_VEC_SET_V4SI
);
9034 ftype
= build_function_type_list (V8HI_type_node
, V8HI_type_node
,
9036 integer_type_node
, NULL_TREE
);
9037 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v8hi", ftype
,
9038 ALTIVEC_BUILTIN_VEC_SET_V8HI
);
9040 ftype
= build_function_type_list (V8HI_type_node
, V16QI_type_node
,
9042 integer_type_node
, NULL_TREE
);
9043 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v16qi", ftype
,
9044 ALTIVEC_BUILTIN_VEC_SET_V16QI
);
9046 ftype
= build_function_type_list (V4SF_type_node
, V4SF_type_node
,
9048 integer_type_node
, NULL_TREE
);
9049 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v4sf", ftype
,
9050 ALTIVEC_BUILTIN_VEC_SET_V4SF
);
9052 /* Access to the vec_extract patterns. */
9053 ftype
= build_function_type_list (intSI_type_node
, V4SI_type_node
,
9054 integer_type_node
, NULL_TREE
);
9055 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v4si", ftype
,
9056 ALTIVEC_BUILTIN_VEC_EXT_V4SI
);
9058 ftype
= build_function_type_list (intHI_type_node
, V8HI_type_node
,
9059 integer_type_node
, NULL_TREE
);
9060 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v8hi", ftype
,
9061 ALTIVEC_BUILTIN_VEC_EXT_V8HI
);
9063 ftype
= build_function_type_list (intQI_type_node
, V16QI_type_node
,
9064 integer_type_node
, NULL_TREE
);
9065 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v16qi", ftype
,
9066 ALTIVEC_BUILTIN_VEC_EXT_V16QI
);
9068 ftype
= build_function_type_list (float_type_node
, V4SF_type_node
,
9069 integer_type_node
, NULL_TREE
);
9070 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v4sf", ftype
,
9071 ALTIVEC_BUILTIN_VEC_EXT_V4SF
);
9075 rs6000_common_init_builtins (void)
9077 struct builtin_description
*d
;
9080 tree v4sf_ftype_v4sf_v4sf_v16qi
9081 = build_function_type_list (V4SF_type_node
,
9082 V4SF_type_node
, V4SF_type_node
,
9083 V16QI_type_node
, NULL_TREE
);
9084 tree v4si_ftype_v4si_v4si_v16qi
9085 = build_function_type_list (V4SI_type_node
,
9086 V4SI_type_node
, V4SI_type_node
,
9087 V16QI_type_node
, NULL_TREE
);
9088 tree v8hi_ftype_v8hi_v8hi_v16qi
9089 = build_function_type_list (V8HI_type_node
,
9090 V8HI_type_node
, V8HI_type_node
,
9091 V16QI_type_node
, NULL_TREE
);
9092 tree v16qi_ftype_v16qi_v16qi_v16qi
9093 = build_function_type_list (V16QI_type_node
,
9094 V16QI_type_node
, V16QI_type_node
,
9095 V16QI_type_node
, NULL_TREE
);
9097 = build_function_type_list (V4SI_type_node
, integer_type_node
, NULL_TREE
);
9099 = build_function_type_list (V8HI_type_node
, integer_type_node
, NULL_TREE
);
9100 tree v16qi_ftype_int
9101 = build_function_type_list (V16QI_type_node
, integer_type_node
, NULL_TREE
);
9102 tree v8hi_ftype_v16qi
9103 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
9104 tree v4sf_ftype_v4sf
9105 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
9107 tree v2si_ftype_v2si_v2si
9108 = build_function_type_list (opaque_V2SI_type_node
,
9109 opaque_V2SI_type_node
,
9110 opaque_V2SI_type_node
, NULL_TREE
);
9112 tree v2sf_ftype_v2sf_v2sf
9113 = build_function_type_list (opaque_V2SF_type_node
,
9114 opaque_V2SF_type_node
,
9115 opaque_V2SF_type_node
, NULL_TREE
);
9117 tree v2si_ftype_int_int
9118 = build_function_type_list (opaque_V2SI_type_node
,
9119 integer_type_node
, integer_type_node
,
9122 tree opaque_ftype_opaque
9123 = build_function_type_list (opaque_V4SI_type_node
,
9124 opaque_V4SI_type_node
, NULL_TREE
);
9126 tree v2si_ftype_v2si
9127 = build_function_type_list (opaque_V2SI_type_node
,
9128 opaque_V2SI_type_node
, NULL_TREE
);
9130 tree v2sf_ftype_v2sf
9131 = build_function_type_list (opaque_V2SF_type_node
,
9132 opaque_V2SF_type_node
, NULL_TREE
);
9134 tree v2sf_ftype_v2si
9135 = build_function_type_list (opaque_V2SF_type_node
,
9136 opaque_V2SI_type_node
, NULL_TREE
);
9138 tree v2si_ftype_v2sf
9139 = build_function_type_list (opaque_V2SI_type_node
,
9140 opaque_V2SF_type_node
, NULL_TREE
);
9142 tree v2si_ftype_v2si_char
9143 = build_function_type_list (opaque_V2SI_type_node
,
9144 opaque_V2SI_type_node
,
9145 char_type_node
, NULL_TREE
);
9147 tree v2si_ftype_int_char
9148 = build_function_type_list (opaque_V2SI_type_node
,
9149 integer_type_node
, char_type_node
, NULL_TREE
);
9151 tree v2si_ftype_char
9152 = build_function_type_list (opaque_V2SI_type_node
,
9153 char_type_node
, NULL_TREE
);
9155 tree int_ftype_int_int
9156 = build_function_type_list (integer_type_node
,
9157 integer_type_node
, integer_type_node
,
9160 tree opaque_ftype_opaque_opaque
9161 = build_function_type_list (opaque_V4SI_type_node
,
9162 opaque_V4SI_type_node
, opaque_V4SI_type_node
, NULL_TREE
);
9163 tree v4si_ftype_v4si_v4si
9164 = build_function_type_list (V4SI_type_node
,
9165 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
9166 tree v4sf_ftype_v4si_int
9167 = build_function_type_list (V4SF_type_node
,
9168 V4SI_type_node
, integer_type_node
, NULL_TREE
);
9169 tree v4si_ftype_v4sf_int
9170 = build_function_type_list (V4SI_type_node
,
9171 V4SF_type_node
, integer_type_node
, NULL_TREE
);
9172 tree v4si_ftype_v4si_int
9173 = build_function_type_list (V4SI_type_node
,
9174 V4SI_type_node
, integer_type_node
, NULL_TREE
);
9175 tree v8hi_ftype_v8hi_int
9176 = build_function_type_list (V8HI_type_node
,
9177 V8HI_type_node
, integer_type_node
, NULL_TREE
);
9178 tree v16qi_ftype_v16qi_int
9179 = build_function_type_list (V16QI_type_node
,
9180 V16QI_type_node
, integer_type_node
, NULL_TREE
);
9181 tree v16qi_ftype_v16qi_v16qi_int
9182 = build_function_type_list (V16QI_type_node
,
9183 V16QI_type_node
, V16QI_type_node
,
9184 integer_type_node
, NULL_TREE
);
9185 tree v8hi_ftype_v8hi_v8hi_int
9186 = build_function_type_list (V8HI_type_node
,
9187 V8HI_type_node
, V8HI_type_node
,
9188 integer_type_node
, NULL_TREE
);
9189 tree v4si_ftype_v4si_v4si_int
9190 = build_function_type_list (V4SI_type_node
,
9191 V4SI_type_node
, V4SI_type_node
,
9192 integer_type_node
, NULL_TREE
);
9193 tree v4sf_ftype_v4sf_v4sf_int
9194 = build_function_type_list (V4SF_type_node
,
9195 V4SF_type_node
, V4SF_type_node
,
9196 integer_type_node
, NULL_TREE
);
9197 tree v4sf_ftype_v4sf_v4sf
9198 = build_function_type_list (V4SF_type_node
,
9199 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
9200 tree opaque_ftype_opaque_opaque_opaque
9201 = build_function_type_list (opaque_V4SI_type_node
,
9202 opaque_V4SI_type_node
, opaque_V4SI_type_node
,
9203 opaque_V4SI_type_node
, NULL_TREE
);
9204 tree v4sf_ftype_v4sf_v4sf_v4si
9205 = build_function_type_list (V4SF_type_node
,
9206 V4SF_type_node
, V4SF_type_node
,
9207 V4SI_type_node
, NULL_TREE
);
9208 tree v4sf_ftype_v4sf_v4sf_v4sf
9209 = build_function_type_list (V4SF_type_node
,
9210 V4SF_type_node
, V4SF_type_node
,
9211 V4SF_type_node
, NULL_TREE
);
9212 tree v4si_ftype_v4si_v4si_v4si
9213 = build_function_type_list (V4SI_type_node
,
9214 V4SI_type_node
, V4SI_type_node
,
9215 V4SI_type_node
, NULL_TREE
);
9216 tree v8hi_ftype_v8hi_v8hi
9217 = build_function_type_list (V8HI_type_node
,
9218 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9219 tree v8hi_ftype_v8hi_v8hi_v8hi
9220 = build_function_type_list (V8HI_type_node
,
9221 V8HI_type_node
, V8HI_type_node
,
9222 V8HI_type_node
, NULL_TREE
);
9223 tree v4si_ftype_v8hi_v8hi_v4si
9224 = build_function_type_list (V4SI_type_node
,
9225 V8HI_type_node
, V8HI_type_node
,
9226 V4SI_type_node
, NULL_TREE
);
9227 tree v4si_ftype_v16qi_v16qi_v4si
9228 = build_function_type_list (V4SI_type_node
,
9229 V16QI_type_node
, V16QI_type_node
,
9230 V4SI_type_node
, NULL_TREE
);
9231 tree v16qi_ftype_v16qi_v16qi
9232 = build_function_type_list (V16QI_type_node
,
9233 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9234 tree v4si_ftype_v4sf_v4sf
9235 = build_function_type_list (V4SI_type_node
,
9236 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
9237 tree v8hi_ftype_v16qi_v16qi
9238 = build_function_type_list (V8HI_type_node
,
9239 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9240 tree v4si_ftype_v8hi_v8hi
9241 = build_function_type_list (V4SI_type_node
,
9242 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9243 tree v8hi_ftype_v4si_v4si
9244 = build_function_type_list (V8HI_type_node
,
9245 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
9246 tree v16qi_ftype_v8hi_v8hi
9247 = build_function_type_list (V16QI_type_node
,
9248 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9249 tree v4si_ftype_v16qi_v4si
9250 = build_function_type_list (V4SI_type_node
,
9251 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
9252 tree v4si_ftype_v16qi_v16qi
9253 = build_function_type_list (V4SI_type_node
,
9254 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9255 tree v4si_ftype_v8hi_v4si
9256 = build_function_type_list (V4SI_type_node
,
9257 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
9258 tree v4si_ftype_v8hi
9259 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
9260 tree int_ftype_v4si_v4si
9261 = build_function_type_list (integer_type_node
,
9262 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
9263 tree int_ftype_v4sf_v4sf
9264 = build_function_type_list (integer_type_node
,
9265 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
9266 tree int_ftype_v16qi_v16qi
9267 = build_function_type_list (integer_type_node
,
9268 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9269 tree int_ftype_v8hi_v8hi
9270 = build_function_type_list (integer_type_node
,
9271 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9273 /* Add the simple ternary operators. */
9274 d
= (struct builtin_description
*) bdesc_3arg
;
9275 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
9277 enum machine_mode mode0
, mode1
, mode2
, mode3
;
9279 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9280 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
9291 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
9294 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
9295 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
9296 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
9297 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
9300 /* When all four are of the same mode. */
9301 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
9306 type
= opaque_ftype_opaque_opaque_opaque
;
9309 type
= v4si_ftype_v4si_v4si_v4si
;
9312 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
9315 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
9318 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
9324 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
9329 type
= v4si_ftype_v4si_v4si_v16qi
;
9332 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
9335 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
9338 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
9344 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
9345 && mode3
== V4SImode
)
9346 type
= v4si_ftype_v16qi_v16qi_v4si
;
9347 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
9348 && mode3
== V4SImode
)
9349 type
= v4si_ftype_v8hi_v8hi_v4si
;
9350 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
9351 && mode3
== V4SImode
)
9352 type
= v4sf_ftype_v4sf_v4sf_v4si
;
9354 /* vchar, vchar, vchar, 4-bit literal. */
9355 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
9357 type
= v16qi_ftype_v16qi_v16qi_int
;
9359 /* vshort, vshort, vshort, 4-bit literal. */
9360 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
9362 type
= v8hi_ftype_v8hi_v8hi_int
;
9364 /* vint, vint, vint, 4-bit literal. */
9365 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
9367 type
= v4si_ftype_v4si_v4si_int
;
9369 /* vfloat, vfloat, vfloat, 4-bit literal. */
9370 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
9372 type
= v4sf_ftype_v4sf_v4sf_int
;
9377 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
9380 /* Add the simple binary operators. */
9381 d
= (struct builtin_description
*) bdesc_2arg
;
9382 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
9384 enum machine_mode mode0
, mode1
, mode2
;
9386 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9387 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
9397 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
9400 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
9401 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
9402 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
9405 /* When all three operands are of the same mode. */
9406 if (mode0
== mode1
&& mode1
== mode2
)
9411 type
= opaque_ftype_opaque_opaque
;
9414 type
= v4sf_ftype_v4sf_v4sf
;
9417 type
= v4si_ftype_v4si_v4si
;
9420 type
= v16qi_ftype_v16qi_v16qi
;
9423 type
= v8hi_ftype_v8hi_v8hi
;
9426 type
= v2si_ftype_v2si_v2si
;
9429 type
= v2sf_ftype_v2sf_v2sf
;
9432 type
= int_ftype_int_int
;
9439 /* A few other combos we really don't want to do manually. */
9441 /* vint, vfloat, vfloat. */
9442 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
9443 type
= v4si_ftype_v4sf_v4sf
;
9445 /* vshort, vchar, vchar. */
9446 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
9447 type
= v8hi_ftype_v16qi_v16qi
;
9449 /* vint, vshort, vshort. */
9450 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
9451 type
= v4si_ftype_v8hi_v8hi
;
9453 /* vshort, vint, vint. */
9454 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
9455 type
= v8hi_ftype_v4si_v4si
;
9457 /* vchar, vshort, vshort. */
9458 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
9459 type
= v16qi_ftype_v8hi_v8hi
;
9461 /* vint, vchar, vint. */
9462 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
9463 type
= v4si_ftype_v16qi_v4si
;
9465 /* vint, vchar, vchar. */
9466 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
9467 type
= v4si_ftype_v16qi_v16qi
;
9469 /* vint, vshort, vint. */
9470 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
9471 type
= v4si_ftype_v8hi_v4si
;
9473 /* vint, vint, 5-bit literal. */
9474 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
9475 type
= v4si_ftype_v4si_int
;
9477 /* vshort, vshort, 5-bit literal. */
9478 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
9479 type
= v8hi_ftype_v8hi_int
;
9481 /* vchar, vchar, 5-bit literal. */
9482 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
9483 type
= v16qi_ftype_v16qi_int
;
9485 /* vfloat, vint, 5-bit literal. */
9486 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
9487 type
= v4sf_ftype_v4si_int
;
9489 /* vint, vfloat, 5-bit literal. */
9490 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
9491 type
= v4si_ftype_v4sf_int
;
9493 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
9494 type
= v2si_ftype_int_int
;
9496 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
9497 type
= v2si_ftype_v2si_char
;
9499 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
9500 type
= v2si_ftype_int_char
;
9505 gcc_assert (mode0
== SImode
);
9509 type
= int_ftype_v4si_v4si
;
9512 type
= int_ftype_v4sf_v4sf
;
9515 type
= int_ftype_v16qi_v16qi
;
9518 type
= int_ftype_v8hi_v8hi
;
9525 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
9528 /* Add the simple unary operators. */
9529 d
= (struct builtin_description
*) bdesc_1arg
;
9530 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
9532 enum machine_mode mode0
, mode1
;
9534 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9535 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
9544 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
9547 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
9548 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
9551 if (mode0
== V4SImode
&& mode1
== QImode
)
9552 type
= v4si_ftype_int
;
9553 else if (mode0
== V8HImode
&& mode1
== QImode
)
9554 type
= v8hi_ftype_int
;
9555 else if (mode0
== V16QImode
&& mode1
== QImode
)
9556 type
= v16qi_ftype_int
;
9557 else if (mode0
== VOIDmode
&& mode1
== VOIDmode
)
9558 type
= opaque_ftype_opaque
;
9559 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
9560 type
= v4sf_ftype_v4sf
;
9561 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
9562 type
= v8hi_ftype_v16qi
;
9563 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
9564 type
= v4si_ftype_v8hi
;
9565 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
9566 type
= v2si_ftype_v2si
;
9567 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
9568 type
= v2sf_ftype_v2sf
;
9569 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
9570 type
= v2sf_ftype_v2si
;
9571 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
9572 type
= v2si_ftype_v2sf
;
9573 else if (mode0
== V2SImode
&& mode1
== QImode
)
9574 type
= v2si_ftype_char
;
9578 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
9583 rs6000_init_libfuncs (void)
9585 if (DEFAULT_ABI
!= ABI_V4
&& TARGET_XCOFF
9586 && !TARGET_POWER2
&& !TARGET_POWERPC
)
9588 /* AIX library routines for float->int conversion. */
9589 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, "__itrunc");
9590 set_conv_libfunc (ufix_optab
, SImode
, DFmode
, "__uitrunc");
9591 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_qitrunc");
9592 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_quitrunc");
9595 if (!TARGET_IEEEQUAD
)
9596 /* AIX/Darwin/64-bit Linux quad floating point routines. */
9597 if (!TARGET_XL_COMPAT
)
9599 set_optab_libfunc (add_optab
, TFmode
, "__gcc_qadd");
9600 set_optab_libfunc (sub_optab
, TFmode
, "__gcc_qsub");
9601 set_optab_libfunc (smul_optab
, TFmode
, "__gcc_qmul");
9602 set_optab_libfunc (sdiv_optab
, TFmode
, "__gcc_qdiv");
9604 if (!(TARGET_HARD_FLOAT
&& (TARGET_FPRS
|| TARGET_E500_DOUBLE
)))
9606 set_optab_libfunc (neg_optab
, TFmode
, "__gcc_qneg");
9607 set_optab_libfunc (eq_optab
, TFmode
, "__gcc_qeq");
9608 set_optab_libfunc (ne_optab
, TFmode
, "__gcc_qne");
9609 set_optab_libfunc (gt_optab
, TFmode
, "__gcc_qgt");
9610 set_optab_libfunc (ge_optab
, TFmode
, "__gcc_qge");
9611 set_optab_libfunc (lt_optab
, TFmode
, "__gcc_qlt");
9612 set_optab_libfunc (le_optab
, TFmode
, "__gcc_qle");
9614 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "__gcc_stoq");
9615 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "__gcc_dtoq");
9616 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "__gcc_qtos");
9617 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "__gcc_qtod");
9618 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "__gcc_qtoi");
9619 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "__gcc_qtou");
9620 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "__gcc_itoq");
9621 set_conv_libfunc (ufloat_optab
, TFmode
, SImode
, "__gcc_utoq");
9624 if (!(TARGET_HARD_FLOAT
&& TARGET_FPRS
))
9625 set_optab_libfunc (unord_optab
, TFmode
, "__gcc_qunord");
9629 set_optab_libfunc (add_optab
, TFmode
, "_xlqadd");
9630 set_optab_libfunc (sub_optab
, TFmode
, "_xlqsub");
9631 set_optab_libfunc (smul_optab
, TFmode
, "_xlqmul");
9632 set_optab_libfunc (sdiv_optab
, TFmode
, "_xlqdiv");
9636 /* 32-bit SVR4 quad floating point routines. */
9638 set_optab_libfunc (add_optab
, TFmode
, "_q_add");
9639 set_optab_libfunc (sub_optab
, TFmode
, "_q_sub");
9640 set_optab_libfunc (neg_optab
, TFmode
, "_q_neg");
9641 set_optab_libfunc (smul_optab
, TFmode
, "_q_mul");
9642 set_optab_libfunc (sdiv_optab
, TFmode
, "_q_div");
9643 if (TARGET_PPC_GPOPT
|| TARGET_POWER2
)
9644 set_optab_libfunc (sqrt_optab
, TFmode
, "_q_sqrt");
9646 set_optab_libfunc (eq_optab
, TFmode
, "_q_feq");
9647 set_optab_libfunc (ne_optab
, TFmode
, "_q_fne");
9648 set_optab_libfunc (gt_optab
, TFmode
, "_q_fgt");
9649 set_optab_libfunc (ge_optab
, TFmode
, "_q_fge");
9650 set_optab_libfunc (lt_optab
, TFmode
, "_q_flt");
9651 set_optab_libfunc (le_optab
, TFmode
, "_q_fle");
9653 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "_q_stoq");
9654 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "_q_dtoq");
9655 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "_q_qtos");
9656 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "_q_qtod");
9657 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_q_qtoi");
9658 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_q_qtou");
9659 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "_q_itoq");
9660 set_conv_libfunc (ufloat_optab
, TFmode
, SImode
, "_q_utoq");
9665 /* Expand a block clear operation, and return 1 if successful. Return 0
9666 if we should let the compiler generate normal code.
9668 operands[0] is the destination
9669 operands[1] is the length
9670 operands[3] is the alignment */
9673 expand_block_clear (rtx operands
[])
9675 rtx orig_dest
= operands
[0];
9676 rtx bytes_rtx
= operands
[1];
9677 rtx align_rtx
= operands
[3];
9678 bool constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
9679 HOST_WIDE_INT align
;
9680 HOST_WIDE_INT bytes
;
9685 /* If this is not a fixed size move, just call memcpy */
9689 /* This must be a fixed size alignment */
9690 gcc_assert (GET_CODE (align_rtx
) == CONST_INT
);
9691 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
9693 /* Anything to clear? */
9694 bytes
= INTVAL (bytes_rtx
);
9698 /* Use the builtin memset after a point, to avoid huge code bloat.
9699 When optimize_size, avoid any significant code bloat; calling
9700 memset is about 4 instructions, so allow for one instruction to
9701 load zero and three to do clearing. */
9702 if (TARGET_ALTIVEC
&& align
>= 128)
9704 else if (TARGET_POWERPC64
&& align
>= 32)
9709 if (optimize_size
&& bytes
> 3 * clear_step
)
9711 if (! optimize_size
&& bytes
> 8 * clear_step
)
9714 for (offset
= 0; bytes
> 0; offset
+= clear_bytes
, bytes
-= clear_bytes
)
9716 enum machine_mode mode
= BLKmode
;
9719 if (bytes
>= 16 && TARGET_ALTIVEC
&& align
>= 128)
9724 else if (bytes
>= 8 && TARGET_POWERPC64
9725 /* 64-bit loads and stores require word-aligned
9727 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
9732 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
9733 { /* move 4 bytes */
9737 else if (bytes
>= 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
9738 { /* move 2 bytes */
9742 else /* move 1 byte at a time */
9748 dest
= adjust_address (orig_dest
, mode
, offset
);
9750 emit_move_insn (dest
, CONST0_RTX (mode
));
9757 /* Expand a block move operation, and return 1 if successful. Return 0
9758 if we should let the compiler generate normal code.
9760 operands[0] is the destination
9761 operands[1] is the source
9762 operands[2] is the length
9763 operands[3] is the alignment */
9765 #define MAX_MOVE_REG 4
9768 expand_block_move (rtx operands
[])
9770 rtx orig_dest
= operands
[0];
9771 rtx orig_src
= operands
[1];
9772 rtx bytes_rtx
= operands
[2];
9773 rtx align_rtx
= operands
[3];
9774 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
9779 rtx stores
[MAX_MOVE_REG
];
9782 /* If this is not a fixed size move, just call memcpy */
9786 /* This must be a fixed size alignment */
9787 gcc_assert (GET_CODE (align_rtx
) == CONST_INT
);
9788 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
9790 /* Anything to move? */
9791 bytes
= INTVAL (bytes_rtx
);
9795 /* store_one_arg depends on expand_block_move to handle at least the size of
9796 reg_parm_stack_space. */
9797 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
9800 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
9803 rtx (*movmemsi
) (rtx
, rtx
, rtx
, rtx
);
9804 rtx (*mov
) (rtx
, rtx
);
9806 enum machine_mode mode
= BLKmode
;
9809 /* Altivec first, since it will be faster than a string move
9810 when it applies, and usually not significantly larger. */
9811 if (TARGET_ALTIVEC
&& bytes
>= 16 && align
>= 128)
9815 gen_func
.mov
= gen_movv4si
;
9817 else if (TARGET_STRING
9818 && bytes
> 24 /* move up to 32 bytes at a time */
9826 && ! fixed_regs
[12])
9828 move_bytes
= (bytes
> 32) ? 32 : bytes
;
9829 gen_func
.movmemsi
= gen_movmemsi_8reg
;
9831 else if (TARGET_STRING
9832 && bytes
> 16 /* move up to 24 bytes at a time */
9838 && ! fixed_regs
[10])
9840 move_bytes
= (bytes
> 24) ? 24 : bytes
;
9841 gen_func
.movmemsi
= gen_movmemsi_6reg
;
9843 else if (TARGET_STRING
9844 && bytes
> 8 /* move up to 16 bytes at a time */
9850 move_bytes
= (bytes
> 16) ? 16 : bytes
;
9851 gen_func
.movmemsi
= gen_movmemsi_4reg
;
9853 else if (bytes
>= 8 && TARGET_POWERPC64
9854 /* 64-bit loads and stores require word-aligned
9856 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
9860 gen_func
.mov
= gen_movdi
;
9862 else if (TARGET_STRING
&& bytes
> 4 && !TARGET_POWERPC64
)
9863 { /* move up to 8 bytes at a time */
9864 move_bytes
= (bytes
> 8) ? 8 : bytes
;
9865 gen_func
.movmemsi
= gen_movmemsi_2reg
;
9867 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
9868 { /* move 4 bytes */
9871 gen_func
.mov
= gen_movsi
;
9873 else if (bytes
>= 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
9874 { /* move 2 bytes */
9877 gen_func
.mov
= gen_movhi
;
9879 else if (TARGET_STRING
&& bytes
> 1)
9880 { /* move up to 4 bytes at a time */
9881 move_bytes
= (bytes
> 4) ? 4 : bytes
;
9882 gen_func
.movmemsi
= gen_movmemsi_1reg
;
9884 else /* move 1 byte at a time */
9888 gen_func
.mov
= gen_movqi
;
9891 src
= adjust_address (orig_src
, mode
, offset
);
9892 dest
= adjust_address (orig_dest
, mode
, offset
);
9894 if (mode
!= BLKmode
)
9896 rtx tmp_reg
= gen_reg_rtx (mode
);
9898 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
9899 stores
[num_reg
++] = (*gen_func
.mov
) (dest
, tmp_reg
);
9902 if (mode
== BLKmode
|| num_reg
>= MAX_MOVE_REG
|| bytes
== move_bytes
)
9905 for (i
= 0; i
< num_reg
; i
++)
9906 emit_insn (stores
[i
]);
9910 if (mode
== BLKmode
)
9912 /* Move the address into scratch registers. The movmemsi
9913 patterns require zero offset. */
9914 if (!REG_P (XEXP (src
, 0)))
9916 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
9917 src
= replace_equiv_address (src
, src_reg
);
9919 set_mem_size (src
, GEN_INT (move_bytes
));
9921 if (!REG_P (XEXP (dest
, 0)))
9923 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
9924 dest
= replace_equiv_address (dest
, dest_reg
);
9926 set_mem_size (dest
, GEN_INT (move_bytes
));
9928 emit_insn ((*gen_func
.movmemsi
) (dest
, src
,
9929 GEN_INT (move_bytes
& 31),
9938 /* Return a string to perform a load_multiple operation.
9939 operands[0] is the vector.
9940 operands[1] is the source address.
9941 operands[2] is the first destination register. */
9944 rs6000_output_load_multiple (rtx operands
[3])
9946 /* We have to handle the case where the pseudo used to contain the address
9947 is assigned to one of the output registers. */
9949 int words
= XVECLEN (operands
[0], 0);
9952 if (XVECLEN (operands
[0], 0) == 1)
9953 return "{l|lwz} %2,0(%1)";
9955 for (i
= 0; i
< words
; i
++)
9956 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
9957 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
9961 xop
[0] = GEN_INT (4 * (words
-1));
9962 xop
[1] = operands
[1];
9963 xop
[2] = operands
[2];
9964 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
9969 xop
[0] = GEN_INT (4 * (words
-1));
9970 xop
[1] = operands
[1];
9971 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
9972 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
9977 for (j
= 0; j
< words
; j
++)
9980 xop
[0] = GEN_INT (j
* 4);
9981 xop
[1] = operands
[1];
9982 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
9983 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
9985 xop
[0] = GEN_INT (i
* 4);
9986 xop
[1] = operands
[1];
9987 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
9992 return "{lsi|lswi} %2,%1,%N0";
9996 /* A validation routine: say whether CODE, a condition code, and MODE
9997 match. The other alternatives either don't make sense or should
9998 never be generated. */
10001 validate_condition_mode (enum rtx_code code
, enum machine_mode mode
)
10003 gcc_assert ((GET_RTX_CLASS (code
) == RTX_COMPARE
10004 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
10005 && GET_MODE_CLASS (mode
) == MODE_CC
);
10007 /* These don't make sense. */
10008 gcc_assert ((code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
)
10009 || mode
!= CCUNSmode
);
10011 gcc_assert ((code
!= GTU
&& code
!= LTU
&& code
!= GEU
&& code
!= LEU
)
10012 || mode
== CCUNSmode
);
10014 gcc_assert (mode
== CCFPmode
10015 || (code
!= ORDERED
&& code
!= UNORDERED
10016 && code
!= UNEQ
&& code
!= LTGT
10017 && code
!= UNGT
&& code
!= UNLT
10018 && code
!= UNGE
&& code
!= UNLE
));
10020 /* These should never be generated except for
10021 flag_finite_math_only. */
10022 gcc_assert (mode
!= CCFPmode
10023 || flag_finite_math_only
10024 || (code
!= LE
&& code
!= GE
10025 && code
!= UNEQ
&& code
!= LTGT
10026 && code
!= UNGT
&& code
!= UNLT
));
10028 /* These are invalid; the information is not there. */
10029 gcc_assert (mode
!= CCEQmode
|| code
== EQ
|| code
== NE
);
10033 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
10034 mask required to convert the result of a rotate insn into a shift
10035 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
10038 includes_lshift_p (rtx shiftop
, rtx andop
)
10040 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
10042 shift_mask
<<= INTVAL (shiftop
);
10044 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
10047 /* Similar, but for right shift. */
10050 includes_rshift_p (rtx shiftop
, rtx andop
)
10052 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
10054 shift_mask
>>= INTVAL (shiftop
);
10056 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
10059 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
10060 to perform a left shift. It must have exactly SHIFTOP least
10061 significant 0's, then one or more 1's, then zero or more 0's. */
10064 includes_rldic_lshift_p (rtx shiftop
, rtx andop
)
10066 if (GET_CODE (andop
) == CONST_INT
)
10068 HOST_WIDE_INT c
, lsb
, shift_mask
;
10070 c
= INTVAL (andop
);
10071 if (c
== 0 || c
== ~0)
10075 shift_mask
<<= INTVAL (shiftop
);
10077 /* Find the least significant one bit. */
10080 /* It must coincide with the LSB of the shift mask. */
10081 if (-lsb
!= shift_mask
)
10084 /* Invert to look for the next transition (if any). */
10087 /* Remove the low group of ones (originally low group of zeros). */
10090 /* Again find the lsb, and check we have all 1's above. */
10094 else if (GET_CODE (andop
) == CONST_DOUBLE
10095 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
10097 HOST_WIDE_INT low
, high
, lsb
;
10098 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
10100 low
= CONST_DOUBLE_LOW (andop
);
10101 if (HOST_BITS_PER_WIDE_INT
< 64)
10102 high
= CONST_DOUBLE_HIGH (andop
);
10104 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
10105 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
10108 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
10110 shift_mask_high
= ~0;
10111 if (INTVAL (shiftop
) > 32)
10112 shift_mask_high
<<= INTVAL (shiftop
) - 32;
10114 lsb
= high
& -high
;
10116 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
10122 lsb
= high
& -high
;
10123 return high
== -lsb
;
10126 shift_mask_low
= ~0;
10127 shift_mask_low
<<= INTVAL (shiftop
);
10131 if (-lsb
!= shift_mask_low
)
10134 if (HOST_BITS_PER_WIDE_INT
< 64)
10139 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
10141 lsb
= high
& -high
;
10142 return high
== -lsb
;
10146 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
10152 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
10153 to perform a left shift. It must have SHIFTOP or more least
10154 significant 0's, with the remainder of the word 1's. */
10157 includes_rldicr_lshift_p (rtx shiftop
, rtx andop
)
10159 if (GET_CODE (andop
) == CONST_INT
)
10161 HOST_WIDE_INT c
, lsb
, shift_mask
;
10164 shift_mask
<<= INTVAL (shiftop
);
10165 c
= INTVAL (andop
);
10167 /* Find the least significant one bit. */
10170 /* It must be covered by the shift mask.
10171 This test also rejects c == 0. */
10172 if ((lsb
& shift_mask
) == 0)
10175 /* Check we have all 1's above the transition, and reject all 1's. */
10176 return c
== -lsb
&& lsb
!= 1;
10178 else if (GET_CODE (andop
) == CONST_DOUBLE
10179 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
10181 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
10183 low
= CONST_DOUBLE_LOW (andop
);
10185 if (HOST_BITS_PER_WIDE_INT
< 64)
10187 HOST_WIDE_INT high
, shift_mask_high
;
10189 high
= CONST_DOUBLE_HIGH (andop
);
10193 shift_mask_high
= ~0;
10194 if (INTVAL (shiftop
) > 32)
10195 shift_mask_high
<<= INTVAL (shiftop
) - 32;
10197 lsb
= high
& -high
;
10199 if ((lsb
& shift_mask_high
) == 0)
10202 return high
== -lsb
;
10208 shift_mask_low
= ~0;
10209 shift_mask_low
<<= INTVAL (shiftop
);
10213 if ((lsb
& shift_mask_low
) == 0)
10216 return low
== -lsb
&& lsb
!= 1;
10222 /* Return 1 if operands will generate a valid arguments to rlwimi
10223 instruction for insert with right shift in 64-bit mode. The mask may
10224 not start on the first bit or stop on the last bit because wrap-around
10225 effects of instruction do not correspond to semantics of RTL insn. */
10228 insvdi_rshift_rlwimi_p (rtx sizeop
, rtx startop
, rtx shiftop
)
10230 if (INTVAL (startop
) > 32
10231 && INTVAL (startop
) < 64
10232 && INTVAL (sizeop
) > 1
10233 && INTVAL (sizeop
) + INTVAL (startop
) < 64
10234 && INTVAL (shiftop
) > 0
10235 && INTVAL (sizeop
) + INTVAL (shiftop
) < 32
10236 && (64 - (INTVAL (shiftop
) & 63)) >= INTVAL (sizeop
))
10242 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
10243 for lfq and stfq insns iff the registers are hard registers. */
10246 registers_ok_for_quad_peep (rtx reg1
, rtx reg2
)
10248 /* We might have been passed a SUBREG. */
10249 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
10252 /* We might have been passed non floating point registers. */
10253 if (!FP_REGNO_P (REGNO (reg1
))
10254 || !FP_REGNO_P (REGNO (reg2
)))
10257 return (REGNO (reg1
) == REGNO (reg2
) - 1);
10260 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10261 addr1 and addr2 must be in consecutive memory locations
10262 (addr2 == addr1 + 8). */
10265 mems_ok_for_quad_peep (rtx mem1
, rtx mem2
)
10268 unsigned int reg1
, reg2
;
10269 int offset1
, offset2
;
10271 /* The mems cannot be volatile. */
10272 if (MEM_VOLATILE_P (mem1
) || MEM_VOLATILE_P (mem2
))
10275 addr1
= XEXP (mem1
, 0);
10276 addr2
= XEXP (mem2
, 0);
10278 /* Extract an offset (if used) from the first addr. */
10279 if (GET_CODE (addr1
) == PLUS
)
10281 /* If not a REG, return zero. */
10282 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
10286 reg1
= REGNO (XEXP (addr1
, 0));
10287 /* The offset must be constant! */
10288 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
10290 offset1
= INTVAL (XEXP (addr1
, 1));
10293 else if (GET_CODE (addr1
) != REG
)
10297 reg1
= REGNO (addr1
);
10298 /* This was a simple (mem (reg)) expression. Offset is 0. */
10302 /* And now for the second addr. */
10303 if (GET_CODE (addr2
) == PLUS
)
10305 /* If not a REG, return zero. */
10306 if (GET_CODE (XEXP (addr2
, 0)) != REG
)
10310 reg2
= REGNO (XEXP (addr2
, 0));
10311 /* The offset must be constant. */
10312 if (GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
10314 offset2
= INTVAL (XEXP (addr2
, 1));
10317 else if (GET_CODE (addr2
) != REG
)
10321 reg2
= REGNO (addr2
);
10322 /* This was a simple (mem (reg)) expression. Offset is 0. */
10326 /* Both of these must have the same base register. */
10330 /* The offset for the second addr must be 8 more than the first addr. */
10331 if (offset2
!= offset1
+ 8)
10334 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
10339 /* Return the register class of a scratch register needed to copy IN into
10340 or out of a register in CLASS in MODE. If it can be done directly,
10341 NO_REGS is returned. */
10344 rs6000_secondary_reload_class (enum reg_class
class,
10345 enum machine_mode mode ATTRIBUTE_UNUSED
,
10350 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
10352 && MACHOPIC_INDIRECT
10356 /* We cannot copy a symbolic operand directly into anything
10357 other than BASE_REGS for TARGET_ELF. So indicate that a
10358 register from BASE_REGS is needed as an intermediate
10361 On Darwin, pic addresses require a load from memory, which
10362 needs a base register. */
10363 if (class != BASE_REGS
10364 && (GET_CODE (in
) == SYMBOL_REF
10365 || GET_CODE (in
) == HIGH
10366 || GET_CODE (in
) == LABEL_REF
10367 || GET_CODE (in
) == CONST
))
10371 if (GET_CODE (in
) == REG
)
10373 regno
= REGNO (in
);
10374 if (regno
>= FIRST_PSEUDO_REGISTER
)
10376 regno
= true_regnum (in
);
10377 if (regno
>= FIRST_PSEUDO_REGISTER
)
10381 else if (GET_CODE (in
) == SUBREG
)
10383 regno
= true_regnum (in
);
10384 if (regno
>= FIRST_PSEUDO_REGISTER
)
10390 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10392 if (class == GENERAL_REGS
|| class == BASE_REGS
10393 || (regno
>= 0 && INT_REGNO_P (regno
)))
10396 /* Constants, memory, and FP registers can go into FP registers. */
10397 if ((regno
== -1 || FP_REGNO_P (regno
))
10398 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
10401 /* Memory, and AltiVec registers can go into AltiVec registers. */
10402 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
10403 && class == ALTIVEC_REGS
)
10406 /* We can copy among the CR registers. */
10407 if ((class == CR_REGS
|| class == CR0_REGS
)
10408 && regno
>= 0 && CR_REGNO_P (regno
))
10411 /* Otherwise, we need GENERAL_REGS. */
10412 return GENERAL_REGS
;
10415 /* Given a comparison operation, return the bit number in CCR to test. We
10416 know this is a valid comparison.
10418 SCC_P is 1 if this is for an scc. That means that %D will have been
10419 used instead of %C, so the bits will be in different places.
10421 Return -1 if OP isn't a valid comparison for some reason. */
10424 ccr_bit (rtx op
, int scc_p
)
10426 enum rtx_code code
= GET_CODE (op
);
10427 enum machine_mode cc_mode
;
10432 if (!COMPARISON_P (op
))
10435 reg
= XEXP (op
, 0);
10437 gcc_assert (GET_CODE (reg
) == REG
&& CR_REGNO_P (REGNO (reg
)));
10439 cc_mode
= GET_MODE (reg
);
10440 cc_regnum
= REGNO (reg
);
10441 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
10443 validate_condition_mode (code
, cc_mode
);
10445 /* When generating a sCOND operation, only positive conditions are
10448 || code
== EQ
|| code
== GT
|| code
== LT
|| code
== UNORDERED
10449 || code
== GTU
|| code
== LTU
);
10454 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
10456 return base_bit
+ 2;
10457 case GT
: case GTU
: case UNLE
:
10458 return base_bit
+ 1;
10459 case LT
: case LTU
: case UNGE
:
10461 case ORDERED
: case UNORDERED
:
10462 return base_bit
+ 3;
10465 /* If scc, we will have done a cror to put the bit in the
10466 unordered position. So test that bit. For integer, this is ! LT
10467 unless this is an scc insn. */
10468 return scc_p
? base_bit
+ 3 : base_bit
;
10471 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
10474 gcc_unreachable ();
10478 /* Return the GOT register. */
10481 rs6000_got_register (rtx value ATTRIBUTE_UNUSED
)
10483 /* The second flow pass currently (June 1999) can't update
10484 regs_ever_live without disturbing other parts of the compiler, so
10485 update it here to make the prolog/epilogue code happy. */
10486 if (no_new_pseudos
&& ! df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM
))
10487 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM
, true);
10489 current_function_uses_pic_offset_table
= 1;
10491 return pic_offset_table_rtx
;
10494 /* Function to init struct machine_function.
10495 This will be called, via a pointer variable,
10496 from push_function_context. */
10498 static struct machine_function
*
10499 rs6000_init_machine_status (void)
10501 return ggc_alloc_cleared (sizeof (machine_function
));
10504 /* These macros test for integers and extract the low-order bits. */
10506 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
10507 && GET_MODE (X) == VOIDmode)
10509 #define INT_LOWPART(X) \
10510 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10513 extract_MB (rtx op
)
10516 unsigned long val
= INT_LOWPART (op
);
10518 /* If the high bit is zero, the value is the first 1 bit we find
10520 if ((val
& 0x80000000) == 0)
10522 gcc_assert (val
& 0xffffffff);
10525 while (((val
<<= 1) & 0x80000000) == 0)
10530 /* If the high bit is set and the low bit is not, or the mask is all
10531 1's, the value is zero. */
10532 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
10535 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10538 while (((val
>>= 1) & 1) != 0)
10545 extract_ME (rtx op
)
10548 unsigned long val
= INT_LOWPART (op
);
10550 /* If the low bit is zero, the value is the first 1 bit we find from
10552 if ((val
& 1) == 0)
10554 gcc_assert (val
& 0xffffffff);
10557 while (((val
>>= 1) & 1) == 0)
10563 /* If the low bit is set and the high bit is not, or the mask is all
10564 1's, the value is 31. */
10565 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
10568 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10571 while (((val
<<= 1) & 0x80000000) != 0)
10577 /* Locate some local-dynamic symbol still in use by this function
10578 so that we can print its name in some tls_ld pattern. */
10580 static const char *
10581 rs6000_get_some_local_dynamic_name (void)
10585 if (cfun
->machine
->some_ld_name
)
10586 return cfun
->machine
->some_ld_name
;
10588 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10590 && for_each_rtx (&PATTERN (insn
),
10591 rs6000_get_some_local_dynamic_name_1
, 0))
10592 return cfun
->machine
->some_ld_name
;
10594 gcc_unreachable ();
10597 /* Helper function for rs6000_get_some_local_dynamic_name. */
10600 rs6000_get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
10604 if (GET_CODE (x
) == SYMBOL_REF
)
10606 const char *str
= XSTR (x
, 0);
10607 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
10609 cfun
->machine
->some_ld_name
= str
;
10617 /* Write out a function code label. */
10620 rs6000_output_function_entry (FILE *file
, const char *fname
)
10622 if (fname
[0] != '.')
10624 switch (DEFAULT_ABI
)
10627 gcc_unreachable ();
10633 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "L.");
10642 RS6000_OUTPUT_BASENAME (file
, fname
);
10644 assemble_name (file
, fname
);
10647 /* Print an operand. Recognize special options, documented below. */
10650 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10651 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10653 #define SMALL_DATA_RELOC "sda21"
10654 #define SMALL_DATA_REG 0
10658 print_operand (FILE *file
, rtx x
, int code
)
10662 unsigned HOST_WIDE_INT uval
;
10667 /* Write out an instruction after the call which may be replaced
10668 with glue code by the loader. This depends on the AIX version. */
10669 asm_fprintf (file
, RS6000_CALL_GLUE
);
10672 /* %a is output_address. */
10675 /* If X is a constant integer whose low-order 5 bits are zero,
10676 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
10677 in the AIX assembler where "sri" with a zero shift count
10678 writes a trash instruction. */
10679 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
10686 /* If constant, low-order 16 bits of constant, unsigned.
10687 Otherwise, write normally. */
10689 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
10691 print_operand (file
, x
, 0);
10695 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10696 for 64-bit mask direction. */
10697 putc (((INT_LOWPART (x
) & 1) == 0 ? 'r' : 'l'), file
);
10700 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10704 /* X is a CR register. Print the number of the GT bit of the CR. */
10705 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10706 output_operand_lossage ("invalid %%E value");
10708 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 1);
10712 /* Like 'J' but get to the GT bit only. */
10713 gcc_assert (GET_CODE (x
) == REG
);
10715 /* Bit 1 is GT bit. */
10716 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 1;
10718 /* Add one for shift count in rlinm for scc. */
10719 fprintf (file
, "%d", i
+ 1);
10723 /* X is a CR register. Print the number of the EQ bit of the CR */
10724 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10725 output_operand_lossage ("invalid %%E value");
10727 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
10731 /* X is a CR register. Print the shift count needed to move it
10732 to the high-order four bits. */
10733 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10734 output_operand_lossage ("invalid %%f value");
10736 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
10740 /* Similar, but print the count for the rotate in the opposite
10742 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10743 output_operand_lossage ("invalid %%F value");
10745 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
10749 /* X is a constant integer. If it is negative, print "m",
10750 otherwise print "z". This is to make an aze or ame insn. */
10751 if (GET_CODE (x
) != CONST_INT
)
10752 output_operand_lossage ("invalid %%G value");
10753 else if (INTVAL (x
) >= 0)
10760 /* If constant, output low-order five bits. Otherwise, write
10763 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
10765 print_operand (file
, x
, 0);
10769 /* If constant, output low-order six bits. Otherwise, write
10772 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
10774 print_operand (file
, x
, 0);
10778 /* Print `i' if this is a constant, else nothing. */
10784 /* Write the bit number in CCR for jump. */
10785 i
= ccr_bit (x
, 0);
10787 output_operand_lossage ("invalid %%j code");
10789 fprintf (file
, "%d", i
);
10793 /* Similar, but add one for shift count in rlinm for scc and pass
10794 scc flag to `ccr_bit'. */
10795 i
= ccr_bit (x
, 1);
10797 output_operand_lossage ("invalid %%J code");
10799 /* If we want bit 31, write a shift count of zero, not 32. */
10800 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
10804 /* X must be a constant. Write the 1's complement of the
10807 output_operand_lossage ("invalid %%k value");
10809 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
10813 /* X must be a symbolic constant on ELF. Write an
10814 expression suitable for an 'addi' that adds in the low 16
10815 bits of the MEM. */
10816 if (GET_CODE (x
) != CONST
)
10818 print_operand_address (file
, x
);
10819 fputs ("@l", file
);
10823 if (GET_CODE (XEXP (x
, 0)) != PLUS
10824 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
10825 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
10826 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
10827 output_operand_lossage ("invalid %%K value");
10828 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
10829 fputs ("@l", file
);
10830 /* For GNU as, there must be a non-alphanumeric character
10831 between 'l' and the number. The '-' is added by
10832 print_operand() already. */
10833 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
10835 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
10839 /* %l is output_asm_label. */
10842 /* Write second word of DImode or DFmode reference. Works on register
10843 or non-indexed memory only. */
10844 if (GET_CODE (x
) == REG
)
10845 fputs (reg_names
[REGNO (x
) + 1], file
);
10846 else if (GET_CODE (x
) == MEM
)
10848 /* Handle possible auto-increment. Since it is pre-increment and
10849 we have already done it, we can just use an offset of word. */
10850 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10851 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10852 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
10854 else if (GET_CODE (XEXP (x
, 0)) == PRE_MODIFY
)
10855 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
10858 output_address (XEXP (adjust_address_nv (x
, SImode
,
10862 if (small_data_operand (x
, GET_MODE (x
)))
10863 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10864 reg_names
[SMALL_DATA_REG
]);
10869 /* MB value for a mask operand. */
10870 if (! mask_operand (x
, SImode
))
10871 output_operand_lossage ("invalid %%m value");
10873 fprintf (file
, "%d", extract_MB (x
));
10877 /* ME value for a mask operand. */
10878 if (! mask_operand (x
, SImode
))
10879 output_operand_lossage ("invalid %%M value");
10881 fprintf (file
, "%d", extract_ME (x
));
10884 /* %n outputs the negative of its operand. */
10887 /* Write the number of elements in the vector times 4. */
10888 if (GET_CODE (x
) != PARALLEL
)
10889 output_operand_lossage ("invalid %%N value");
10891 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
10895 /* Similar, but subtract 1 first. */
10896 if (GET_CODE (x
) != PARALLEL
)
10897 output_operand_lossage ("invalid %%O value");
10899 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
10903 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10905 || INT_LOWPART (x
) < 0
10906 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
10907 output_operand_lossage ("invalid %%p value");
10909 fprintf (file
, "%d", i
);
10913 /* The operand must be an indirect memory reference. The result
10914 is the register name. */
10915 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
10916 || REGNO (XEXP (x
, 0)) >= 32)
10917 output_operand_lossage ("invalid %%P value");
10919 fputs (reg_names
[REGNO (XEXP (x
, 0))], file
);
10923 /* This outputs the logical code corresponding to a boolean
10924 expression. The expression may have one or both operands
10925 negated (if one, only the first one). For condition register
10926 logical operations, it will also treat the negated
10927 CR codes as NOTs, but not handle NOTs of them. */
10929 const char *const *t
= 0;
10931 enum rtx_code code
= GET_CODE (x
);
10932 static const char * const tbl
[3][3] = {
10933 { "and", "andc", "nor" },
10934 { "or", "orc", "nand" },
10935 { "xor", "eqv", "xor" } };
10939 else if (code
== IOR
)
10941 else if (code
== XOR
)
10944 output_operand_lossage ("invalid %%q value");
10946 if (GET_CODE (XEXP (x
, 0)) != NOT
)
10950 if (GET_CODE (XEXP (x
, 1)) == NOT
)
10968 /* X is a CR register. Print the mask for `mtcrf'. */
10969 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10970 output_operand_lossage ("invalid %%R value");
10972 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
10976 /* Low 5 bits of 32 - value */
10978 output_operand_lossage ("invalid %%s value");
10980 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
10984 /* PowerPC64 mask position. All 0's is excluded.
10985 CONST_INT 32-bit mask is considered sign-extended so any
10986 transition must occur within the CONST_INT, not on the boundary. */
10987 if (! mask64_operand (x
, DImode
))
10988 output_operand_lossage ("invalid %%S value");
10990 uval
= INT_LOWPART (x
);
10992 if (uval
& 1) /* Clear Left */
10994 #if HOST_BITS_PER_WIDE_INT > 64
10995 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
10999 else /* Clear Right */
11002 #if HOST_BITS_PER_WIDE_INT > 64
11003 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
11009 gcc_assert (i
>= 0);
11010 fprintf (file
, "%d", i
);
11014 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
11015 gcc_assert (GET_CODE (x
) == REG
&& GET_MODE (x
) == CCmode
);
11017 /* Bit 3 is OV bit. */
11018 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
11020 /* If we want bit 31, write a shift count of zero, not 32. */
11021 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
11025 /* Print the symbolic name of a branch target register. */
11026 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
11027 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
11028 output_operand_lossage ("invalid %%T value");
11029 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
11030 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
11032 fputs ("ctr", file
);
11036 /* High-order 16 bits of constant for use in unsigned operand. */
11038 output_operand_lossage ("invalid %%u value");
11040 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
11041 (INT_LOWPART (x
) >> 16) & 0xffff);
11045 /* High-order 16 bits of constant for use in signed operand. */
11047 output_operand_lossage ("invalid %%v value");
11049 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
11050 (INT_LOWPART (x
) >> 16) & 0xffff);
11054 /* Print `u' if this has an auto-increment or auto-decrement. */
11055 if (GET_CODE (x
) == MEM
11056 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
11057 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
11058 || GET_CODE (XEXP (x
, 0)) == PRE_MODIFY
))
11063 /* Print the trap code for this operand. */
11064 switch (GET_CODE (x
))
11067 fputs ("eq", file
); /* 4 */
11070 fputs ("ne", file
); /* 24 */
11073 fputs ("lt", file
); /* 16 */
11076 fputs ("le", file
); /* 20 */
11079 fputs ("gt", file
); /* 8 */
11082 fputs ("ge", file
); /* 12 */
11085 fputs ("llt", file
); /* 2 */
11088 fputs ("lle", file
); /* 6 */
11091 fputs ("lgt", file
); /* 1 */
11094 fputs ("lge", file
); /* 5 */
11097 gcc_unreachable ();
11102 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
11105 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
11106 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
11108 print_operand (file
, x
, 0);
11112 /* MB value for a PowerPC64 rldic operand. */
11113 val
= (GET_CODE (x
) == CONST_INT
11114 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
11119 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
11120 if ((val
<<= 1) < 0)
11123 #if HOST_BITS_PER_WIDE_INT == 32
11124 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
11125 i
+= 32; /* zero-extend high-part was all 0's */
11126 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
11128 val
= CONST_DOUBLE_LOW (x
);
11134 for ( ; i
< 64; i
++)
11135 if ((val
<<= 1) < 0)
11140 fprintf (file
, "%d", i
+ 1);
11144 if (GET_CODE (x
) == MEM
11145 && (legitimate_indexed_address_p (XEXP (x
, 0), 0)
11146 || (GET_CODE (XEXP (x
, 0)) == PRE_MODIFY
11147 && legitimate_indexed_address_p (XEXP (XEXP (x
, 0), 1), 0))))
11152 /* Like 'L', for third word of TImode */
11153 if (GET_CODE (x
) == REG
)
11154 fputs (reg_names
[REGNO (x
) + 2], file
);
11155 else if (GET_CODE (x
) == MEM
)
11157 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
11158 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
11159 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
11160 else if (GET_CODE (XEXP (x
, 0)) == PRE_MODIFY
)
11161 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
11163 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
11164 if (small_data_operand (x
, GET_MODE (x
)))
11165 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
11166 reg_names
[SMALL_DATA_REG
]);
11171 /* X is a SYMBOL_REF. Write out the name preceded by a
11172 period and without any trailing data in brackets. Used for function
11173 names. If we are configured for System V (or the embedded ABI) on
11174 the PowerPC, do not emit the period, since those systems do not use
11175 TOCs and the like. */
11176 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
11178 /* Mark the decl as referenced so that cgraph will output the
11180 if (SYMBOL_REF_DECL (x
))
11181 mark_decl_referenced (SYMBOL_REF_DECL (x
));
11183 /* For macho, check to see if we need a stub. */
11186 const char *name
= XSTR (x
, 0);
11188 if (MACHOPIC_INDIRECT
11189 && machopic_classify_symbol (x
) == MACHOPIC_UNDEFINED_FUNCTION
)
11190 name
= machopic_indirection_name (x
, /*stub_p=*/true);
11192 assemble_name (file
, name
);
11194 else if (!DOT_SYMBOLS
)
11195 assemble_name (file
, XSTR (x
, 0));
11197 rs6000_output_function_entry (file
, XSTR (x
, 0));
11201 /* Like 'L', for last word of TImode. */
11202 if (GET_CODE (x
) == REG
)
11203 fputs (reg_names
[REGNO (x
) + 3], file
);
11204 else if (GET_CODE (x
) == MEM
)
11206 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
11207 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
11208 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
11209 else if (GET_CODE (XEXP (x
, 0)) == PRE_MODIFY
)
11210 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
11212 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
11213 if (small_data_operand (x
, GET_MODE (x
)))
11214 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
11215 reg_names
[SMALL_DATA_REG
]);
11219 /* Print AltiVec or SPE memory operand. */
11224 gcc_assert (GET_CODE (x
) == MEM
);
11228 /* Ugly hack because %y is overloaded. */
11229 if ((TARGET_SPE
|| TARGET_E500_DOUBLE
)
11230 && (GET_MODE_SIZE (GET_MODE (x
)) == 8
11231 || GET_MODE (x
) == TFmode
11232 || GET_MODE (x
) == TImode
))
11234 /* Handle [reg]. */
11235 if (GET_CODE (tmp
) == REG
)
11237 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
11240 /* Handle [reg+UIMM]. */
11241 else if (GET_CODE (tmp
) == PLUS
&&
11242 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
11246 gcc_assert (GET_CODE (XEXP (tmp
, 0)) == REG
);
11248 x
= INTVAL (XEXP (tmp
, 1));
11249 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
11253 /* Fall through. Must be [reg+reg]. */
11256 && GET_CODE (tmp
) == AND
11257 && GET_CODE (XEXP (tmp
, 1)) == CONST_INT
11258 && INTVAL (XEXP (tmp
, 1)) == -16)
11259 tmp
= XEXP (tmp
, 0);
11260 if (GET_CODE (tmp
) == REG
)
11261 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
11264 gcc_assert (GET_CODE (tmp
) == PLUS
11265 && REG_P (XEXP (tmp
, 0))
11266 && REG_P (XEXP (tmp
, 1)));
11268 if (REGNO (XEXP (tmp
, 0)) == 0)
11269 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
11270 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
11272 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
11273 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
11279 if (GET_CODE (x
) == REG
)
11280 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
11281 else if (GET_CODE (x
) == MEM
)
11283 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11284 know the width from the mode. */
11285 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
11286 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
11287 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
11288 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
11289 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
11290 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
11291 else if (GET_CODE (XEXP (x
, 0)) == PRE_MODIFY
)
11292 output_address (XEXP (XEXP (x
, 0), 1));
11294 output_address (XEXP (x
, 0));
11297 output_addr_const (file
, x
);
11301 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
11305 output_operand_lossage ("invalid %%xn code");
11309 /* Print the address of an operand. */
11312 print_operand_address (FILE *file
, rtx x
)
11314 if (GET_CODE (x
) == REG
)
11315 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
11316 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
11317 || GET_CODE (x
) == LABEL_REF
)
11319 output_addr_const (file
, x
);
11320 if (small_data_operand (x
, GET_MODE (x
)))
11321 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
11322 reg_names
[SMALL_DATA_REG
]);
11324 gcc_assert (!TARGET_TOC
);
11326 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
11328 gcc_assert (REG_P (XEXP (x
, 0)));
11329 if (REGNO (XEXP (x
, 0)) == 0)
11330 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
11331 reg_names
[ REGNO (XEXP (x
, 0)) ]);
11333 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
11334 reg_names
[ REGNO (XEXP (x
, 1)) ]);
11336 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
11337 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
"(%s)",
11338 INTVAL (XEXP (x
, 1)), reg_names
[ REGNO (XEXP (x
, 0)) ]);
11340 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
11341 && CONSTANT_P (XEXP (x
, 1)))
11343 output_addr_const (file
, XEXP (x
, 1));
11344 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
11348 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
11349 && CONSTANT_P (XEXP (x
, 1)))
11351 fprintf (file
, "lo16(");
11352 output_addr_const (file
, XEXP (x
, 1));
11353 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
11356 else if (legitimate_constant_pool_address_p (x
))
11358 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
11360 rtx contains_minus
= XEXP (x
, 1);
11364 /* Find the (minus (sym) (toc)) buried in X, and temporarily
11365 turn it into (sym) for output_addr_const. */
11366 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
11367 contains_minus
= XEXP (contains_minus
, 0);
11369 minus
= XEXP (contains_minus
, 0);
11370 symref
= XEXP (minus
, 0);
11371 XEXP (contains_minus
, 0) = symref
;
11376 name
= XSTR (symref
, 0);
11377 newname
= alloca (strlen (name
) + sizeof ("@toc"));
11378 strcpy (newname
, name
);
11379 strcat (newname
, "@toc");
11380 XSTR (symref
, 0) = newname
;
11382 output_addr_const (file
, XEXP (x
, 1));
11384 XSTR (symref
, 0) = name
;
11385 XEXP (contains_minus
, 0) = minus
;
11388 output_addr_const (file
, XEXP (x
, 1));
11390 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
11393 gcc_unreachable ();
11396 /* Target hook for assembling integer objects. The PowerPC version has
11397 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11398 is defined. It also needs to handle DI-mode objects on 64-bit
11402 rs6000_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
11404 #ifdef RELOCATABLE_NEEDS_FIXUP
11405 /* Special handling for SI values. */
11406 if (RELOCATABLE_NEEDS_FIXUP
&& size
== 4 && aligned_p
)
11408 static int recurse
= 0;
11410 /* For -mrelocatable, we mark all addresses that need to be fixed up
11411 in the .fixup section. */
11412 if (TARGET_RELOCATABLE
11413 && in_section
!= toc_section
11414 && in_section
!= text_section
11415 && !unlikely_text_section_p (in_section
)
11417 && GET_CODE (x
) != CONST_INT
11418 && GET_CODE (x
) != CONST_DOUBLE
11424 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
11426 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
11427 fprintf (asm_out_file
, "\t.long\t(");
11428 output_addr_const (asm_out_file
, x
);
11429 fprintf (asm_out_file
, ")@fixup\n");
11430 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
11431 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
11432 fprintf (asm_out_file
, "\t.long\t");
11433 assemble_name (asm_out_file
, buf
);
11434 fprintf (asm_out_file
, "\n\t.previous\n");
11438 /* Remove initial .'s to turn a -mcall-aixdesc function
11439 address into the address of the descriptor, not the function
11441 else if (GET_CODE (x
) == SYMBOL_REF
11442 && XSTR (x
, 0)[0] == '.'
11443 && DEFAULT_ABI
== ABI_AIX
)
11445 const char *name
= XSTR (x
, 0);
11446 while (*name
== '.')
11449 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
11453 #endif /* RELOCATABLE_NEEDS_FIXUP */
11454 return default_assemble_integer (x
, size
, aligned_p
);
11457 #ifdef HAVE_GAS_HIDDEN
11458 /* Emit an assembler directive to set symbol visibility for DECL to
11459 VISIBILITY_TYPE. */
11462 rs6000_assemble_visibility (tree decl
, int vis
)
11464 /* Functions need to have their entry point symbol visibility set as
11465 well as their descriptor symbol visibility. */
11466 if (DEFAULT_ABI
== ABI_AIX
11468 && TREE_CODE (decl
) == FUNCTION_DECL
)
11470 static const char * const visibility_types
[] = {
11471 NULL
, "internal", "hidden", "protected"
11474 const char *name
, *type
;
11476 name
= ((* targetm
.strip_name_encoding
)
11477 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
11478 type
= visibility_types
[vis
];
11480 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
11481 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
11484 default_assemble_visibility (decl
, vis
);
11489 rs6000_reverse_condition (enum machine_mode mode
, enum rtx_code code
)
11491 /* Reversal of FP compares takes care -- an ordered compare
11492 becomes an unordered compare and vice versa. */
11493 if (mode
== CCFPmode
11494 && (!flag_finite_math_only
11495 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
11496 || code
== UNEQ
|| code
== LTGT
))
11497 return reverse_condition_maybe_unordered (code
);
11499 return reverse_condition (code
);
11502 /* Generate a compare for CODE. Return a brand-new rtx that
11503 represents the result of the compare. */
11506 rs6000_generate_compare (enum rtx_code code
)
11508 enum machine_mode comp_mode
;
11509 rtx compare_result
;
11511 if (rs6000_compare_fp_p
)
11512 comp_mode
= CCFPmode
;
11513 else if (code
== GTU
|| code
== LTU
11514 || code
== GEU
|| code
== LEU
)
11515 comp_mode
= CCUNSmode
;
11516 else if ((code
== EQ
|| code
== NE
)
11517 && GET_CODE (rs6000_compare_op0
) == SUBREG
11518 && GET_CODE (rs6000_compare_op1
) == SUBREG
11519 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0
)
11520 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1
))
11521 /* These are unsigned values, perhaps there will be a later
11522 ordering compare that can be shared with this one.
11523 Unfortunately we cannot detect the signedness of the operands
11524 for non-subregs. */
11525 comp_mode
= CCUNSmode
;
11527 comp_mode
= CCmode
;
11529 /* First, the compare. */
11530 compare_result
= gen_reg_rtx (comp_mode
);
11532 /* E500 FP compare instructions on the GPRs. Yuck! */
11533 if ((!TARGET_FPRS
&& TARGET_HARD_FLOAT
)
11534 && rs6000_compare_fp_p
)
11536 rtx cmp
, or_result
, compare_result2
;
11537 enum machine_mode op_mode
= GET_MODE (rs6000_compare_op0
);
11539 if (op_mode
== VOIDmode
)
11540 op_mode
= GET_MODE (rs6000_compare_op1
);
11542 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11543 This explains the following mess. */
11547 case EQ
: case UNEQ
: case NE
: case LTGT
:
11551 cmp
= flag_unsafe_math_optimizations
11552 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
11553 rs6000_compare_op1
)
11554 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
11555 rs6000_compare_op1
);
11559 cmp
= flag_unsafe_math_optimizations
11560 ? gen_tstdfeq_gpr (compare_result
, rs6000_compare_op0
,
11561 rs6000_compare_op1
)
11562 : gen_cmpdfeq_gpr (compare_result
, rs6000_compare_op0
,
11563 rs6000_compare_op1
);
11567 cmp
= flag_unsafe_math_optimizations
11568 ? gen_tsttfeq_gpr (compare_result
, rs6000_compare_op0
,
11569 rs6000_compare_op1
)
11570 : gen_cmptfeq_gpr (compare_result
, rs6000_compare_op0
,
11571 rs6000_compare_op1
);
11575 gcc_unreachable ();
11579 case GT
: case GTU
: case UNGT
: case UNGE
: case GE
: case GEU
:
11583 cmp
= flag_unsafe_math_optimizations
11584 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
11585 rs6000_compare_op1
)
11586 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
11587 rs6000_compare_op1
);
11591 cmp
= flag_unsafe_math_optimizations
11592 ? gen_tstdfgt_gpr (compare_result
, rs6000_compare_op0
,
11593 rs6000_compare_op1
)
11594 : gen_cmpdfgt_gpr (compare_result
, rs6000_compare_op0
,
11595 rs6000_compare_op1
);
11599 cmp
= flag_unsafe_math_optimizations
11600 ? gen_tsttfgt_gpr (compare_result
, rs6000_compare_op0
,
11601 rs6000_compare_op1
)
11602 : gen_cmptfgt_gpr (compare_result
, rs6000_compare_op0
,
11603 rs6000_compare_op1
);
11607 gcc_unreachable ();
11611 case LT
: case LTU
: case UNLT
: case UNLE
: case LE
: case LEU
:
11615 cmp
= flag_unsafe_math_optimizations
11616 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
11617 rs6000_compare_op1
)
11618 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
11619 rs6000_compare_op1
);
11623 cmp
= flag_unsafe_math_optimizations
11624 ? gen_tstdflt_gpr (compare_result
, rs6000_compare_op0
,
11625 rs6000_compare_op1
)
11626 : gen_cmpdflt_gpr (compare_result
, rs6000_compare_op0
,
11627 rs6000_compare_op1
);
11631 cmp
= flag_unsafe_math_optimizations
11632 ? gen_tsttflt_gpr (compare_result
, rs6000_compare_op0
,
11633 rs6000_compare_op1
)
11634 : gen_cmptflt_gpr (compare_result
, rs6000_compare_op0
,
11635 rs6000_compare_op1
);
11639 gcc_unreachable ();
11643 gcc_unreachable ();
11646 /* Synthesize LE and GE from LT/GT || EQ. */
11647 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
11653 case LE
: code
= LT
; break;
11654 case GE
: code
= GT
; break;
11655 case LEU
: code
= LT
; break;
11656 case GEU
: code
= GT
; break;
11657 default: gcc_unreachable ();
11660 compare_result2
= gen_reg_rtx (CCFPmode
);
11666 cmp
= flag_unsafe_math_optimizations
11667 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
11668 rs6000_compare_op1
)
11669 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
11670 rs6000_compare_op1
);
11674 cmp
= flag_unsafe_math_optimizations
11675 ? gen_tstdfeq_gpr (compare_result2
, rs6000_compare_op0
,
11676 rs6000_compare_op1
)
11677 : gen_cmpdfeq_gpr (compare_result2
, rs6000_compare_op0
,
11678 rs6000_compare_op1
);
11682 cmp
= flag_unsafe_math_optimizations
11683 ? gen_tsttfeq_gpr (compare_result2
, rs6000_compare_op0
,
11684 rs6000_compare_op1
)
11685 : gen_cmptfeq_gpr (compare_result2
, rs6000_compare_op0
,
11686 rs6000_compare_op1
);
11690 gcc_unreachable ();
11694 /* OR them together. */
11695 or_result
= gen_reg_rtx (CCFPmode
);
11696 cmp
= gen_e500_cr_ior_compare (or_result
, compare_result
,
11698 compare_result
= or_result
;
11703 if (code
== NE
|| code
== LTGT
)
11713 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11714 CLOBBERs to match cmptf_internal2 pattern. */
11715 if (comp_mode
== CCFPmode
&& TARGET_XL_COMPAT
11716 && GET_MODE (rs6000_compare_op0
) == TFmode
11717 && !TARGET_IEEEQUAD
11718 && TARGET_HARD_FLOAT
&& TARGET_FPRS
&& TARGET_LONG_DOUBLE_128
)
11719 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
11721 gen_rtx_SET (VOIDmode
,
11723 gen_rtx_COMPARE (comp_mode
,
11724 rs6000_compare_op0
,
11725 rs6000_compare_op1
)),
11726 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11727 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11728 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11729 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11730 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11731 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11732 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11733 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)))));
11734 else if (GET_CODE (rs6000_compare_op1
) == UNSPEC
11735 && XINT (rs6000_compare_op1
, 1) == UNSPEC_SP_TEST
)
11737 rtx op1
= XVECEXP (rs6000_compare_op1
, 0, 0);
11738 comp_mode
= CCEQmode
;
11739 compare_result
= gen_reg_rtx (CCEQmode
);
11741 emit_insn (gen_stack_protect_testdi (compare_result
,
11742 rs6000_compare_op0
, op1
));
11744 emit_insn (gen_stack_protect_testsi (compare_result
,
11745 rs6000_compare_op0
, op1
));
11748 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
11749 gen_rtx_COMPARE (comp_mode
,
11750 rs6000_compare_op0
,
11751 rs6000_compare_op1
)));
11754 /* Some kinds of FP comparisons need an OR operation;
11755 under flag_finite_math_only we don't bother. */
11756 if (rs6000_compare_fp_p
11757 && !flag_finite_math_only
11758 && !(TARGET_HARD_FLOAT
&& !TARGET_FPRS
)
11759 && (code
== LE
|| code
== GE
11760 || code
== UNEQ
|| code
== LTGT
11761 || code
== UNGT
|| code
== UNLT
))
11763 enum rtx_code or1
, or2
;
11764 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
11765 rtx or_result
= gen_reg_rtx (CCEQmode
);
11769 case LE
: or1
= LT
; or2
= EQ
; break;
11770 case GE
: or1
= GT
; or2
= EQ
; break;
11771 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
11772 case LTGT
: or1
= LT
; or2
= GT
; break;
11773 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
11774 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
11775 default: gcc_unreachable ();
11777 validate_condition_mode (or1
, comp_mode
);
11778 validate_condition_mode (or2
, comp_mode
);
11779 or1_rtx
= gen_rtx_fmt_ee (or1
, SImode
, compare_result
, const0_rtx
);
11780 or2_rtx
= gen_rtx_fmt_ee (or2
, SImode
, compare_result
, const0_rtx
);
11781 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
11782 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
11784 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
11786 compare_result
= or_result
;
11790 validate_condition_mode (code
, GET_MODE (compare_result
));
11792 return gen_rtx_fmt_ee (code
, VOIDmode
, compare_result
, const0_rtx
);
11796 /* Emit the RTL for an sCOND pattern. */
11799 rs6000_emit_sCOND (enum rtx_code code
, rtx result
)
11802 enum machine_mode op_mode
;
11803 enum rtx_code cond_code
;
11805 condition_rtx
= rs6000_generate_compare (code
);
11806 cond_code
= GET_CODE (condition_rtx
);
11808 if (rs6000_compare_fp_p
11809 && !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
11813 PUT_MODE (condition_rtx
, SImode
);
11814 t
= XEXP (condition_rtx
, 0);
11816 gcc_assert (cond_code
== NE
|| cond_code
== EQ
);
11818 if (cond_code
== NE
)
11819 emit_insn (gen_e500_flip_gt_bit (t
, t
));
11821 emit_insn (gen_move_from_CR_gt_bit (result
, t
));
11825 if (cond_code
== NE
11826 || cond_code
== GE
|| cond_code
== LE
11827 || cond_code
== GEU
|| cond_code
== LEU
11828 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
11830 rtx not_result
= gen_reg_rtx (CCEQmode
);
11831 rtx not_op
, rev_cond_rtx
;
11832 enum machine_mode cc_mode
;
11834 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
11836 rev_cond_rtx
= gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode
, cond_code
),
11837 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
11838 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
11839 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
11840 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
11843 op_mode
= GET_MODE (rs6000_compare_op0
);
11844 if (op_mode
== VOIDmode
)
11845 op_mode
= GET_MODE (rs6000_compare_op1
);
11847 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
11849 PUT_MODE (condition_rtx
, DImode
);
11850 convert_move (result
, condition_rtx
, 0);
11854 PUT_MODE (condition_rtx
, SImode
);
11855 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
11859 /* Emit a branch of kind CODE to location LOC. */
11862 rs6000_emit_cbranch (enum rtx_code code
, rtx loc
)
11864 rtx condition_rtx
, loc_ref
;
11866 condition_rtx
= rs6000_generate_compare (code
);
11867 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
11868 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
11869 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
11870 loc_ref
, pc_rtx
)));
11873 /* Return the string to output a conditional branch to LABEL, which is
11874 the operand number of the label, or -1 if the branch is really a
11875 conditional return.
11877 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11878 condition code register and its mode specifies what kind of
11879 comparison we made.
11881 REVERSED is nonzero if we should reverse the sense of the comparison.
11883 INSN is the insn. */
11886 output_cbranch (rtx op
, const char *label
, int reversed
, rtx insn
)
11888 static char string
[64];
11889 enum rtx_code code
= GET_CODE (op
);
11890 rtx cc_reg
= XEXP (op
, 0);
11891 enum machine_mode mode
= GET_MODE (cc_reg
);
11892 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
11893 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
11894 int really_reversed
= reversed
^ need_longbranch
;
11900 validate_condition_mode (code
, mode
);
11902 /* Work out which way this really branches. We could use
11903 reverse_condition_maybe_unordered here always but this
11904 makes the resulting assembler clearer. */
11905 if (really_reversed
)
11907 /* Reversal of FP compares takes care -- an ordered compare
11908 becomes an unordered compare and vice versa. */
11909 if (mode
== CCFPmode
)
11910 code
= reverse_condition_maybe_unordered (code
);
11912 code
= reverse_condition (code
);
11915 if ((!TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
11917 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11922 /* Opposite of GT. */
11931 gcc_unreachable ();
11937 /* Not all of these are actually distinct opcodes, but
11938 we distinguish them for clarity of the resulting assembler. */
11939 case NE
: case LTGT
:
11940 ccode
= "ne"; break;
11941 case EQ
: case UNEQ
:
11942 ccode
= "eq"; break;
11944 ccode
= "ge"; break;
11945 case GT
: case GTU
: case UNGT
:
11946 ccode
= "gt"; break;
11948 ccode
= "le"; break;
11949 case LT
: case LTU
: case UNLT
:
11950 ccode
= "lt"; break;
11951 case UNORDERED
: ccode
= "un"; break;
11952 case ORDERED
: ccode
= "nu"; break;
11953 case UNGE
: ccode
= "nl"; break;
11954 case UNLE
: ccode
= "ng"; break;
11956 gcc_unreachable ();
11959 /* Maybe we have a guess as to how likely the branch is.
11960 The old mnemonics don't have a way to specify this information. */
11962 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
11963 if (note
!= NULL_RTX
)
11965 /* PROB is the difference from 50%. */
11966 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
11968 /* Only hint for highly probable/improbable branches on newer
11969 cpus as static prediction overrides processor dynamic
11970 prediction. For older cpus we may as well always hint, but
11971 assume not taken for branches that are very close to 50% as a
11972 mispredicted taken branch is more expensive than a
11973 mispredicted not-taken branch. */
11974 if (rs6000_always_hint
11975 || (abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48
11976 && br_prob_note_reliable_p (note
)))
11978 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
11979 && ((prob
> 0) ^ need_longbranch
))
11987 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
11989 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
11991 /* We need to escape any '%' characters in the reg_names string.
11992 Assume they'd only be the first character.... */
11993 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
11995 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
11999 /* If the branch distance was too far, we may have to use an
12000 unconditional branch to go the distance. */
12001 if (need_longbranch
)
12002 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
12004 s
+= sprintf (s
, ",%s", label
);
12010 /* Return the string to flip the GT bit on a CR. */
12012 output_e500_flip_gt_bit (rtx dst
, rtx src
)
12014 static char string
[64];
12017 gcc_assert (GET_CODE (dst
) == REG
&& CR_REGNO_P (REGNO (dst
))
12018 && GET_CODE (src
) == REG
&& CR_REGNO_P (REGNO (src
)));
12021 a
= 4 * (REGNO (dst
) - CR0_REGNO
) + 1;
12022 b
= 4 * (REGNO (src
) - CR0_REGNO
) + 1;
12024 sprintf (string
, "crnot %d,%d", a
, b
);
12028 /* Return insn index for the vector compare instruction for given CODE,
12029 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
12033 get_vec_cmp_insn (enum rtx_code code
,
12034 enum machine_mode dest_mode
,
12035 enum machine_mode op_mode
)
12037 if (!TARGET_ALTIVEC
)
12038 return INSN_NOT_AVAILABLE
;
12043 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
12044 return UNSPEC_VCMPEQUB
;
12045 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
12046 return UNSPEC_VCMPEQUH
;
12047 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
12048 return UNSPEC_VCMPEQUW
;
12049 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
12050 return UNSPEC_VCMPEQFP
;
12053 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
12054 return UNSPEC_VCMPGEFP
;
12056 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
12057 return UNSPEC_VCMPGTSB
;
12058 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
12059 return UNSPEC_VCMPGTSH
;
12060 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
12061 return UNSPEC_VCMPGTSW
;
12062 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
12063 return UNSPEC_VCMPGTFP
;
12066 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
12067 return UNSPEC_VCMPGTUB
;
12068 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
12069 return UNSPEC_VCMPGTUH
;
12070 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
12071 return UNSPEC_VCMPGTUW
;
12076 return INSN_NOT_AVAILABLE
;
12079 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
12080 DMODE is expected destination mode. This is a recursive function. */
12083 rs6000_emit_vector_compare (enum rtx_code rcode
,
12085 enum machine_mode dmode
)
12089 enum machine_mode dest_mode
;
12090 enum machine_mode op_mode
= GET_MODE (op1
);
12092 gcc_assert (TARGET_ALTIVEC
);
12093 gcc_assert (GET_MODE (op0
) == GET_MODE (op1
));
12095 /* Floating point vector compare instructions uses destination V4SImode.
12096 Move destination to appropriate mode later. */
12097 if (dmode
== V4SFmode
)
12098 dest_mode
= V4SImode
;
12102 mask
= gen_reg_rtx (dest_mode
);
12103 vec_cmp_insn
= get_vec_cmp_insn (rcode
, dest_mode
, op_mode
);
12105 if (vec_cmp_insn
== INSN_NOT_AVAILABLE
)
12107 bool swap_operands
= false;
12108 bool try_again
= false;
12113 swap_operands
= true;
12118 swap_operands
= true;
12126 /* Invert condition and try again.
12127 e.g., A != B becomes ~(A==B). */
12129 enum rtx_code rev_code
;
12130 enum insn_code nor_code
;
12133 rev_code
= reverse_condition_maybe_unordered (rcode
);
12134 eq_rtx
= rs6000_emit_vector_compare (rev_code
, op0
, op1
,
12137 nor_code
= one_cmpl_optab
->handlers
[(int)dest_mode
].insn_code
;
12138 gcc_assert (nor_code
!= CODE_FOR_nothing
);
12139 emit_insn (GEN_FCN (nor_code
) (mask
, eq_rtx
));
12141 if (dmode
!= dest_mode
)
12143 rtx temp
= gen_reg_rtx (dest_mode
);
12144 convert_move (temp
, mask
, 0);
12154 /* Try GT/GTU/LT/LTU OR EQ */
12157 enum insn_code ior_code
;
12158 enum rtx_code new_code
;
12179 gcc_unreachable ();
12182 c_rtx
= rs6000_emit_vector_compare (new_code
,
12183 op0
, op1
, dest_mode
);
12184 eq_rtx
= rs6000_emit_vector_compare (EQ
, op0
, op1
,
12187 ior_code
= ior_optab
->handlers
[(int)dest_mode
].insn_code
;
12188 gcc_assert (ior_code
!= CODE_FOR_nothing
);
12189 emit_insn (GEN_FCN (ior_code
) (mask
, c_rtx
, eq_rtx
));
12190 if (dmode
!= dest_mode
)
12192 rtx temp
= gen_reg_rtx (dest_mode
);
12193 convert_move (temp
, mask
, 0);
12200 gcc_unreachable ();
12205 vec_cmp_insn
= get_vec_cmp_insn (rcode
, dest_mode
, op_mode
);
12206 /* You only get two chances. */
12207 gcc_assert (vec_cmp_insn
!= INSN_NOT_AVAILABLE
);
12219 emit_insn (gen_rtx_SET (VOIDmode
, mask
,
12220 gen_rtx_UNSPEC (dest_mode
,
12221 gen_rtvec (2, op0
, op1
),
12223 if (dmode
!= dest_mode
)
12225 rtx temp
= gen_reg_rtx (dest_mode
);
12226 convert_move (temp
, mask
, 0);
12232 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12233 valid insn doesn exist for given mode. */
12236 get_vsel_insn (enum machine_mode mode
)
12241 return UNSPEC_VSEL4SI
;
12244 return UNSPEC_VSEL4SF
;
12247 return UNSPEC_VSEL8HI
;
12250 return UNSPEC_VSEL16QI
;
12253 return INSN_NOT_AVAILABLE
;
12256 return INSN_NOT_AVAILABLE
;
12259 /* Emit vector select insn where DEST is destination using
12260 operands OP1, OP2 and MASK. */
12263 rs6000_emit_vector_select (rtx dest
, rtx op1
, rtx op2
, rtx mask
)
12266 enum machine_mode dest_mode
= GET_MODE (dest
);
12267 int vsel_insn_index
= get_vsel_insn (GET_MODE (dest
));
12269 temp
= gen_reg_rtx (dest_mode
);
12271 /* For each vector element, select op1 when mask is 1 otherwise
12273 t
= gen_rtx_SET (VOIDmode
, temp
,
12274 gen_rtx_UNSPEC (dest_mode
,
12275 gen_rtvec (3, op2
, op1
, mask
),
12278 emit_move_insn (dest
, temp
);
12282 /* Emit vector conditional expression.
12283 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12284 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12287 rs6000_emit_vector_cond_expr (rtx dest
, rtx op1
, rtx op2
,
12288 rtx cond
, rtx cc_op0
, rtx cc_op1
)
12290 enum machine_mode dest_mode
= GET_MODE (dest
);
12291 enum rtx_code rcode
= GET_CODE (cond
);
12294 if (!TARGET_ALTIVEC
)
12297 /* Get the vector mask for the given relational operations. */
12298 mask
= rs6000_emit_vector_compare (rcode
, cc_op0
, cc_op1
, dest_mode
);
12300 rs6000_emit_vector_select (dest
, op1
, op2
, mask
);
12305 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
12306 operands of the last comparison is nonzero/true, FALSE_COND if it
12307 is zero/false. Return 0 if the hardware has no such operation. */
12310 rs6000_emit_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
12312 enum rtx_code code
= GET_CODE (op
);
12313 rtx op0
= rs6000_compare_op0
;
12314 rtx op1
= rs6000_compare_op1
;
12315 REAL_VALUE_TYPE c1
;
12316 enum machine_mode compare_mode
= GET_MODE (op0
);
12317 enum machine_mode result_mode
= GET_MODE (dest
);
12319 bool is_against_zero
;
12321 /* These modes should always match. */
12322 if (GET_MODE (op1
) != compare_mode
12323 /* In the isel case however, we can use a compare immediate, so
12324 op1 may be a small constant. */
12325 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
12327 if (GET_MODE (true_cond
) != result_mode
)
12329 if (GET_MODE (false_cond
) != result_mode
)
12332 /* First, work out if the hardware can do this at all, or
12333 if it's too slow.... */
12334 if (! rs6000_compare_fp_p
)
12337 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
12340 else if (TARGET_HARD_FLOAT
&& !TARGET_FPRS
12341 && SCALAR_FLOAT_MODE_P (compare_mode
))
12344 is_against_zero
= op1
== CONST0_RTX (compare_mode
);
12346 /* A floating-point subtract might overflow, underflow, or produce
12347 an inexact result, thus changing the floating-point flags, so it
12348 can't be generated if we care about that. It's safe if one side
12349 of the construct is zero, since then no subtract will be
12351 if (SCALAR_FLOAT_MODE_P (compare_mode
)
12352 && flag_trapping_math
&& ! is_against_zero
)
12355 /* Eliminate half of the comparisons by switching operands, this
12356 makes the remaining code simpler. */
12357 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
12358 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
12360 code
= reverse_condition_maybe_unordered (code
);
12362 true_cond
= false_cond
;
12366 /* UNEQ and LTGT take four instructions for a comparison with zero,
12367 it'll probably be faster to use a branch here too. */
12368 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
12371 if (GET_CODE (op1
) == CONST_DOUBLE
)
12372 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
12374 /* We're going to try to implement comparisons by performing
12375 a subtract, then comparing against zero. Unfortunately,
12376 Inf - Inf is NaN which is not zero, and so if we don't
12377 know that the operand is finite and the comparison
12378 would treat EQ different to UNORDERED, we can't do it. */
12379 if (HONOR_INFINITIES (compare_mode
)
12380 && code
!= GT
&& code
!= UNGE
12381 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
12382 /* Constructs of the form (a OP b ? a : b) are safe. */
12383 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
12384 || (! rtx_equal_p (op0
, true_cond
)
12385 && ! rtx_equal_p (op1
, true_cond
))))
12388 /* At this point we know we can use fsel. */
12390 /* Reduce the comparison to a comparison against zero. */
12391 if (! is_against_zero
)
12393 temp
= gen_reg_rtx (compare_mode
);
12394 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12395 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
12397 op1
= CONST0_RTX (compare_mode
);
12400 /* If we don't care about NaNs we can reduce some of the comparisons
12401 down to faster ones. */
12402 if (! HONOR_NANS (compare_mode
))
12408 true_cond
= false_cond
;
12421 /* Now, reduce everything down to a GE. */
12428 temp
= gen_reg_rtx (compare_mode
);
12429 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
12434 temp
= gen_reg_rtx (compare_mode
);
12435 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
12440 temp
= gen_reg_rtx (compare_mode
);
12441 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12442 gen_rtx_NEG (compare_mode
,
12443 gen_rtx_ABS (compare_mode
, op0
))));
12448 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
12449 temp
= gen_reg_rtx (result_mode
);
12450 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12451 gen_rtx_IF_THEN_ELSE (result_mode
,
12452 gen_rtx_GE (VOIDmode
,
12454 true_cond
, false_cond
)));
12455 false_cond
= true_cond
;
12458 temp
= gen_reg_rtx (compare_mode
);
12459 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
12464 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
12465 temp
= gen_reg_rtx (result_mode
);
12466 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12467 gen_rtx_IF_THEN_ELSE (result_mode
,
12468 gen_rtx_GE (VOIDmode
,
12470 true_cond
, false_cond
)));
12471 true_cond
= false_cond
;
12474 temp
= gen_reg_rtx (compare_mode
);
12475 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
12480 gcc_unreachable ();
12483 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
12484 gen_rtx_IF_THEN_ELSE (result_mode
,
12485 gen_rtx_GE (VOIDmode
,
12487 true_cond
, false_cond
)));
12491 /* Same as above, but for ints (isel). */
12494 rs6000_emit_int_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
12496 rtx condition_rtx
, cr
;
12498 /* All isel implementations thus far are 32-bits. */
12499 if (GET_MODE (rs6000_compare_op0
) != SImode
)
12502 /* We still have to do the compare, because isel doesn't do a
12503 compare, it just looks at the CRx bits set by a previous compare
12505 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
12506 cr
= XEXP (condition_rtx
, 0);
12508 if (GET_MODE (cr
) == CCmode
)
12509 emit_insn (gen_isel_signed (dest
, condition_rtx
,
12510 true_cond
, false_cond
, cr
));
12512 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
12513 true_cond
, false_cond
, cr
));
12519 output_isel (rtx
*operands
)
12521 enum rtx_code code
;
12523 code
= GET_CODE (operands
[1]);
12524 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
12526 PUT_CODE (operands
[1], reverse_condition (code
));
12527 return "isel %0,%3,%2,%j1";
12530 return "isel %0,%2,%3,%j1";
12534 rs6000_emit_minmax (rtx dest
, enum rtx_code code
, rtx op0
, rtx op1
)
12536 enum machine_mode mode
= GET_MODE (op0
);
12540 if (code
== SMAX
|| code
== SMIN
)
12545 if (code
== SMAX
|| code
== UMAX
)
12546 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
12547 op0
, op1
, mode
, 0);
12549 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
12550 op1
, op0
, mode
, 0);
12551 gcc_assert (target
);
12552 if (target
!= dest
)
12553 emit_move_insn (dest
, target
);
12556 /* Emit instructions to perform a load-reserved/store-conditional operation.
12557 The operation performed is an atomic
12558 (set M (CODE:MODE M OP))
12559 If not NULL, BEFORE is atomically set to M before the operation, and
12560 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
12561 If SYNC_P then a memory barrier is emitted before the operation.
12562 Either OP or M may be wrapped in a NOT operation. */
12565 rs6000_emit_sync (enum rtx_code code
, enum machine_mode mode
,
12566 rtx m
, rtx op
, rtx before_param
, rtx after_param
,
12569 enum machine_mode used_mode
;
12570 rtx the_op
, set_before
, set_after
, set_atomic
, cc_scratch
, before
, after
;
12573 HOST_WIDE_INT imask
= GET_MODE_MASK (mode
);
12574 rtx shift
= NULL_RTX
;
12577 emit_insn (gen_memory_barrier ());
12579 if (GET_CODE (m
) == NOT
)
12580 used_m
= XEXP (m
, 0);
12584 /* If this is smaller than SImode, we'll have to use SImode with
12586 if (mode
== QImode
|| mode
== HImode
)
12590 if (MEM_ALIGN (used_m
) >= 32)
12593 if (BYTES_BIG_ENDIAN
)
12594 ishift
= GET_MODE_BITSIZE (SImode
) - GET_MODE_BITSIZE (mode
);
12596 shift
= GEN_INT (ishift
);
12600 rtx addrSI
, aligned_addr
;
12601 int shift_mask
= mode
== QImode
? 0x18 : 0x10;
12603 addrSI
= force_reg (SImode
, gen_lowpart_common (SImode
,
12604 XEXP (used_m
, 0)));
12605 shift
= gen_reg_rtx (SImode
);
12607 emit_insn (gen_rlwinm (shift
, addrSI
, GEN_INT (3),
12608 GEN_INT (shift_mask
)));
12609 emit_insn (gen_xorsi3 (shift
, shift
, GEN_INT (shift_mask
)));
12611 aligned_addr
= expand_binop (Pmode
, and_optab
,
12613 GEN_INT (-4), NULL_RTX
,
12614 1, OPTAB_LIB_WIDEN
);
12615 used_m
= change_address (used_m
, SImode
, aligned_addr
);
12616 set_mem_align (used_m
, 32);
12617 /* It's safe to keep the old alias set of USED_M, because
12618 the operation is atomic and only affects the original
12620 if (GET_CODE (m
) == NOT
)
12621 m
= gen_rtx_NOT (SImode
, used_m
);
12626 if (GET_CODE (op
) == NOT
)
12628 oldop
= lowpart_subreg (SImode
, XEXP (op
, 0), mode
);
12629 oldop
= gen_rtx_NOT (SImode
, oldop
);
12632 oldop
= lowpart_subreg (SImode
, op
, mode
);
12638 newop
= expand_binop (SImode
, and_optab
,
12639 oldop
, GEN_INT (imask
), NULL_RTX
,
12640 1, OPTAB_LIB_WIDEN
);
12641 emit_insn (gen_ashlsi3 (newop
, newop
, shift
));
12645 newop
= expand_binop (SImode
, ior_optab
,
12646 oldop
, GEN_INT (~imask
), NULL_RTX
,
12647 1, OPTAB_LIB_WIDEN
);
12648 emit_insn (gen_rotlsi3 (newop
, newop
, shift
));
12656 newop
= expand_binop (SImode
, and_optab
,
12657 oldop
, GEN_INT (imask
), NULL_RTX
,
12658 1, OPTAB_LIB_WIDEN
);
12659 emit_insn (gen_ashlsi3 (newop
, newop
, shift
));
12661 mask
= gen_reg_rtx (SImode
);
12662 emit_move_insn (mask
, GEN_INT (imask
));
12663 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
12666 newop
= gen_rtx_PLUS (SImode
, m
, newop
);
12668 newop
= gen_rtx_MINUS (SImode
, m
, newop
);
12669 newop
= gen_rtx_AND (SImode
, newop
, mask
);
12670 newop
= gen_rtx_IOR (SImode
, newop
,
12671 gen_rtx_AND (SImode
,
12672 gen_rtx_NOT (SImode
, mask
),
12678 gcc_unreachable ();
12681 if (GET_CODE (m
) == NOT
)
12685 mask
= gen_reg_rtx (SImode
);
12686 emit_move_insn (mask
, GEN_INT (imask
));
12687 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
12689 xorm
= gen_rtx_XOR (SImode
, used_m
, mask
);
12690 /* Depending on the value of 'op', the XOR or the operation might
12691 be able to be simplified away. */
12692 newop
= simplify_gen_binary (code
, SImode
, xorm
, newop
);
12695 used_mode
= SImode
;
12696 before
= gen_reg_rtx (used_mode
);
12697 after
= gen_reg_rtx (used_mode
);
12702 before
= before_param
;
12703 after
= after_param
;
12705 if (before
== NULL_RTX
)
12706 before
= gen_reg_rtx (used_mode
);
12707 if (after
== NULL_RTX
)
12708 after
= gen_reg_rtx (used_mode
);
12711 if ((code
== PLUS
|| code
== MINUS
|| GET_CODE (m
) == NOT
)
12712 && used_mode
!= mode
)
12713 the_op
= op
; /* Computed above. */
12714 else if (GET_CODE (op
) == NOT
&& GET_CODE (m
) != NOT
)
12715 the_op
= gen_rtx_fmt_ee (code
, used_mode
, op
, m
);
12717 the_op
= gen_rtx_fmt_ee (code
, used_mode
, m
, op
);
12719 set_after
= gen_rtx_SET (VOIDmode
, after
, the_op
);
12720 set_before
= gen_rtx_SET (VOIDmode
, before
, used_m
);
12721 set_atomic
= gen_rtx_SET (VOIDmode
, used_m
,
12722 gen_rtx_UNSPEC (used_mode
,
12723 gen_rtvec (1, the_op
),
12725 cc_scratch
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (CCmode
));
12727 if ((code
== PLUS
|| code
== MINUS
) && used_mode
!= mode
)
12728 vec
= gen_rtvec (5, set_after
, set_before
, set_atomic
, cc_scratch
,
12729 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (SImode
)));
12731 vec
= gen_rtvec (4, set_after
, set_before
, set_atomic
, cc_scratch
);
12732 emit_insn (gen_rtx_PARALLEL (VOIDmode
, vec
));
12734 /* Shift and mask the return values properly. */
12735 if (used_mode
!= mode
&& before_param
)
12737 emit_insn (gen_lshrsi3 (before
, before
, shift
));
12738 convert_move (before_param
, before
, 1);
12741 if (used_mode
!= mode
&& after_param
)
12743 emit_insn (gen_lshrsi3 (after
, after
, shift
));
12744 convert_move (after_param
, after
, 1);
12747 /* The previous sequence will end with a branch that's dependent on
12748 the conditional store, so placing an isync will ensure that no
12749 other instructions (especially, no load or store instructions)
12750 can start before the atomic operation completes. */
12752 emit_insn (gen_isync ());
12755 /* A subroutine of the atomic operation splitters. Jump to LABEL if
12756 COND is true. Mark the jump as unlikely to be taken. */
12759 emit_unlikely_jump (rtx cond
, rtx label
)
12761 rtx very_unlikely
= GEN_INT (REG_BR_PROB_BASE
/ 100 - 1);
12764 x
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, label
, pc_rtx
);
12765 x
= emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
, x
));
12766 REG_NOTES (x
) = gen_rtx_EXPR_LIST (REG_BR_PROB
, very_unlikely
, NULL_RTX
);
12769 /* A subroutine of the atomic operation splitters. Emit a load-locked
12770 instruction in MODE. */
12773 emit_load_locked (enum machine_mode mode
, rtx reg
, rtx mem
)
12775 rtx (*fn
) (rtx
, rtx
) = NULL
;
12776 if (mode
== SImode
)
12777 fn
= gen_load_locked_si
;
12778 else if (mode
== DImode
)
12779 fn
= gen_load_locked_di
;
12780 emit_insn (fn (reg
, mem
));
12783 /* A subroutine of the atomic operation splitters. Emit a store-conditional
12784 instruction in MODE. */
12787 emit_store_conditional (enum machine_mode mode
, rtx res
, rtx mem
, rtx val
)
12789 rtx (*fn
) (rtx
, rtx
, rtx
) = NULL
;
12790 if (mode
== SImode
)
12791 fn
= gen_store_conditional_si
;
12792 else if (mode
== DImode
)
12793 fn
= gen_store_conditional_di
;
12795 /* Emit sync before stwcx. to address PPC405 Erratum. */
12796 if (PPC405_ERRATUM77
)
12797 emit_insn (gen_memory_barrier ());
12799 emit_insn (fn (res
, mem
, val
));
12802 /* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
12803 to perform. MEM is the memory on which to operate. VAL is the second
12804 operand of the binary operator. BEFORE and AFTER are optional locations to
12805 return the value of MEM either before of after the operation. SCRATCH is
12806 a scratch register. */
12809 rs6000_split_atomic_op (enum rtx_code code
, rtx mem
, rtx val
,
12810 rtx before
, rtx after
, rtx scratch
)
12812 enum machine_mode mode
= GET_MODE (mem
);
12813 rtx label
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12815 emit_insn (gen_memory_barrier ());
12817 label
= gen_label_rtx ();
12818 emit_label (label
);
12819 label
= gen_rtx_LABEL_REF (VOIDmode
, label
);
12821 if (before
== NULL_RTX
)
12823 emit_load_locked (mode
, before
, mem
);
12826 x
= gen_rtx_AND (mode
, gen_rtx_NOT (mode
, before
), val
);
12827 else if (code
== AND
)
12828 x
= gen_rtx_UNSPEC (mode
, gen_rtvec (2, before
, val
), UNSPEC_AND
);
12830 x
= gen_rtx_fmt_ee (code
, mode
, before
, val
);
12832 if (after
!= NULL_RTX
)
12833 emit_insn (gen_rtx_SET (VOIDmode
, after
, copy_rtx (x
)));
12834 emit_insn (gen_rtx_SET (VOIDmode
, scratch
, x
));
12836 emit_store_conditional (mode
, cond
, mem
, scratch
);
12838 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12839 emit_unlikely_jump (x
, label
);
12841 emit_insn (gen_isync ());
12844 /* Expand an atomic compare and swap operation. MEM is the memory on which
12845 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12846 value to be stored. SCRATCH is a scratch GPR. */
12849 rs6000_split_compare_and_swap (rtx retval
, rtx mem
, rtx oldval
, rtx newval
,
12852 enum machine_mode mode
= GET_MODE (mem
);
12853 rtx label1
, label2
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12855 emit_insn (gen_memory_barrier ());
12857 label1
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12858 label2
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12859 emit_label (XEXP (label1
, 0));
12861 emit_load_locked (mode
, retval
, mem
);
12863 x
= gen_rtx_COMPARE (CCmode
, retval
, oldval
);
12864 emit_insn (gen_rtx_SET (VOIDmode
, cond
, x
));
12866 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12867 emit_unlikely_jump (x
, label2
);
12869 emit_move_insn (scratch
, newval
);
12870 emit_store_conditional (mode
, cond
, mem
, scratch
);
12872 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12873 emit_unlikely_jump (x
, label1
);
12875 emit_insn (gen_isync ());
12876 emit_label (XEXP (label2
, 0));
12879 /* Expand an atomic test and set operation. MEM is the memory on which
12880 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12883 rs6000_split_lock_test_and_set (rtx retval
, rtx mem
, rtx val
, rtx scratch
)
12885 enum machine_mode mode
= GET_MODE (mem
);
12886 rtx label
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12888 emit_insn (gen_memory_barrier ());
12890 label
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12891 emit_label (XEXP (label
, 0));
12893 emit_load_locked (mode
, retval
, mem
);
12894 emit_move_insn (scratch
, val
);
12895 emit_store_conditional (mode
, cond
, mem
, scratch
);
12897 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12898 emit_unlikely_jump (x
, label
);
12900 emit_insn (gen_isync ());
12904 rs6000_expand_compare_and_swapqhi (rtx dst
, rtx mem
, rtx oldval
, rtx newval
)
12906 enum machine_mode mode
= GET_MODE (mem
);
12907 rtx addrSI
, align
, wdst
, shift
, mask
;
12908 HOST_WIDE_INT shift_mask
= mode
== QImode
? 0x18 : 0x10;
12909 HOST_WIDE_INT imask
= GET_MODE_MASK (mode
);
12911 /* Shift amount for subword relative to aligned word. */
12912 addrSI
= force_reg (SImode
, gen_lowpart_common (SImode
, XEXP (mem
, 0)));
12913 shift
= gen_reg_rtx (SImode
);
12914 emit_insn (gen_rlwinm (shift
, addrSI
, GEN_INT (3),
12915 GEN_INT (shift_mask
)));
12916 emit_insn (gen_xorsi3 (shift
, shift
, GEN_INT (shift_mask
)));
12918 /* Shift and mask old value into position within word. */
12919 oldval
= convert_modes (SImode
, mode
, oldval
, 1);
12920 oldval
= expand_binop (SImode
, and_optab
,
12921 oldval
, GEN_INT (imask
), NULL_RTX
,
12922 1, OPTAB_LIB_WIDEN
);
12923 emit_insn (gen_ashlsi3 (oldval
, oldval
, shift
));
12925 /* Shift and mask new value into position within word. */
12926 newval
= convert_modes (SImode
, mode
, newval
, 1);
12927 newval
= expand_binop (SImode
, and_optab
,
12928 newval
, GEN_INT (imask
), NULL_RTX
,
12929 1, OPTAB_LIB_WIDEN
);
12930 emit_insn (gen_ashlsi3 (newval
, newval
, shift
));
12932 /* Mask for insertion. */
12933 mask
= gen_reg_rtx (SImode
);
12934 emit_move_insn (mask
, GEN_INT (imask
));
12935 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
12937 /* Address of aligned word containing subword. */
12938 align
= expand_binop (Pmode
, and_optab
, XEXP (mem
, 0), GEN_INT (-4),
12939 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
12940 mem
= change_address (mem
, SImode
, align
);
12941 set_mem_align (mem
, 32);
12942 MEM_VOLATILE_P (mem
) = 1;
12944 wdst
= gen_reg_rtx (SImode
);
12945 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst
, mask
,
12946 oldval
, newval
, mem
));
12948 emit_move_insn (dst
, gen_lowpart (mode
, wdst
));
12952 rs6000_split_compare_and_swapqhi (rtx dest
, rtx mask
,
12953 rtx oldval
, rtx newval
, rtx mem
,
12956 rtx label1
, label2
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12958 emit_insn (gen_memory_barrier ());
12959 label1
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12960 label2
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12961 emit_label (XEXP (label1
, 0));
12963 emit_load_locked (SImode
, scratch
, mem
);
12965 /* Mask subword within loaded value for comparison with oldval.
12966 Use UNSPEC_AND to avoid clobber.*/
12967 emit_insn (gen_rtx_SET (SImode
, dest
,
12968 gen_rtx_UNSPEC (SImode
,
12969 gen_rtvec (2, scratch
, mask
),
12972 x
= gen_rtx_COMPARE (CCmode
, dest
, oldval
);
12973 emit_insn (gen_rtx_SET (VOIDmode
, cond
, x
));
12975 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12976 emit_unlikely_jump (x
, label2
);
12978 /* Clear subword within loaded value for insertion of new value. */
12979 emit_insn (gen_rtx_SET (SImode
, scratch
,
12980 gen_rtx_AND (SImode
,
12981 gen_rtx_NOT (SImode
, mask
), scratch
)));
12982 emit_insn (gen_iorsi3 (scratch
, scratch
, newval
));
12983 emit_store_conditional (SImode
, cond
, mem
, scratch
);
12985 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12986 emit_unlikely_jump (x
, label1
);
12988 emit_insn (gen_isync ());
12989 emit_label (XEXP (label2
, 0));
12993 /* Emit instructions to move SRC to DST. Called by splitters for
12994 multi-register moves. It will emit at most one instruction for
12995 each register that is accessed; that is, it won't emit li/lis pairs
12996 (or equivalent for 64-bit code). One of SRC or DST must be a hard
13000 rs6000_split_multireg_move (rtx dst
, rtx src
)
13002 /* The register number of the first register being moved. */
13004 /* The mode that is to be moved. */
13005 enum machine_mode mode
;
13006 /* The mode that the move is being done in, and its size. */
13007 enum machine_mode reg_mode
;
13009 /* The number of registers that will be moved. */
13012 reg
= REG_P (dst
) ? REGNO (dst
) : REGNO (src
);
13013 mode
= GET_MODE (dst
);
13014 nregs
= hard_regno_nregs
[reg
][mode
];
13015 if (FP_REGNO_P (reg
))
13016 reg_mode
= DECIMAL_FLOAT_MODE_P (mode
) ? DDmode
: DFmode
;
13017 else if (ALTIVEC_REGNO_P (reg
))
13018 reg_mode
= V16QImode
;
13019 else if (TARGET_E500_DOUBLE
&& mode
== TFmode
)
13022 reg_mode
= word_mode
;
13023 reg_mode_size
= GET_MODE_SIZE (reg_mode
);
13025 gcc_assert (reg_mode_size
* nregs
== GET_MODE_SIZE (mode
));
13027 if (REG_P (src
) && REG_P (dst
) && (REGNO (src
) < REGNO (dst
)))
13029 /* Move register range backwards, if we might have destructive
13032 for (i
= nregs
- 1; i
>= 0; i
--)
13033 emit_insn (gen_rtx_SET (VOIDmode
,
13034 simplify_gen_subreg (reg_mode
, dst
, mode
,
13035 i
* reg_mode_size
),
13036 simplify_gen_subreg (reg_mode
, src
, mode
,
13037 i
* reg_mode_size
)));
13043 bool used_update
= false;
13045 if (MEM_P (src
) && INT_REGNO_P (reg
))
13049 if (GET_CODE (XEXP (src
, 0)) == PRE_INC
13050 || GET_CODE (XEXP (src
, 0)) == PRE_DEC
)
13053 breg
= XEXP (XEXP (src
, 0), 0);
13054 delta_rtx
= (GET_CODE (XEXP (src
, 0)) == PRE_INC
13055 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src
)))
13056 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src
))));
13057 emit_insn (TARGET_32BIT
13058 ? gen_addsi3 (breg
, breg
, delta_rtx
)
13059 : gen_adddi3 (breg
, breg
, delta_rtx
));
13060 src
= replace_equiv_address (src
, breg
);
13062 else if (! rs6000_offsettable_memref_p (src
))
13065 basereg
= gen_rtx_REG (Pmode
, reg
);
13066 emit_insn (gen_rtx_SET (VOIDmode
, basereg
, XEXP (src
, 0)));
13067 src
= replace_equiv_address (src
, basereg
);
13070 breg
= XEXP (src
, 0);
13071 if (GET_CODE (breg
) == PLUS
|| GET_CODE (breg
) == LO_SUM
)
13072 breg
= XEXP (breg
, 0);
13074 /* If the base register we are using to address memory is
13075 also a destination reg, then change that register last. */
13077 && REGNO (breg
) >= REGNO (dst
)
13078 && REGNO (breg
) < REGNO (dst
) + nregs
)
13079 j
= REGNO (breg
) - REGNO (dst
);
13082 if (GET_CODE (dst
) == MEM
&& INT_REGNO_P (reg
))
13086 if (GET_CODE (XEXP (dst
, 0)) == PRE_INC
13087 || GET_CODE (XEXP (dst
, 0)) == PRE_DEC
)
13090 breg
= XEXP (XEXP (dst
, 0), 0);
13091 delta_rtx
= (GET_CODE (XEXP (dst
, 0)) == PRE_INC
13092 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst
)))
13093 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst
))));
13095 /* We have to update the breg before doing the store.
13096 Use store with update, if available. */
13100 rtx nsrc
= simplify_gen_subreg (reg_mode
, src
, mode
, 0);
13101 emit_insn (TARGET_32BIT
13102 ? (TARGET_POWERPC64
13103 ? gen_movdi_si_update (breg
, breg
, delta_rtx
, nsrc
)
13104 : gen_movsi_update (breg
, breg
, delta_rtx
, nsrc
))
13105 : gen_movdi_di_update (breg
, breg
, delta_rtx
, nsrc
));
13106 used_update
= true;
13109 emit_insn (TARGET_32BIT
13110 ? gen_addsi3 (breg
, breg
, delta_rtx
)
13111 : gen_adddi3 (breg
, breg
, delta_rtx
));
13112 dst
= replace_equiv_address (dst
, breg
);
13115 gcc_assert (rs6000_offsettable_memref_p (dst
));
13118 for (i
= 0; i
< nregs
; i
++)
13120 /* Calculate index to next subword. */
13125 /* If compiler already emitted move of first word by
13126 store with update, no need to do anything. */
13127 if (j
== 0 && used_update
)
13130 emit_insn (gen_rtx_SET (VOIDmode
,
13131 simplify_gen_subreg (reg_mode
, dst
, mode
,
13132 j
* reg_mode_size
),
13133 simplify_gen_subreg (reg_mode
, src
, mode
,
13134 j
* reg_mode_size
)));
13140 /* This page contains routines that are used to determine what the
13141 function prologue and epilogue code will do and write them out. */
13143 /* Return the first fixed-point register that is required to be
13144 saved. 32 if none. */
13147 first_reg_to_save (void)
13151 /* Find lowest numbered live register. */
13152 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
13153 if (df_regs_ever_live_p (first_reg
)
13154 && (! call_used_regs
[first_reg
]
13155 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
13156 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
13157 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
13158 || (TARGET_TOC
&& TARGET_MINIMAL_TOC
)))))
13163 && current_function_uses_pic_offset_table
13164 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
13165 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
13171 /* Similar, for FP regs. */
13174 first_fp_reg_to_save (void)
13178 /* Find lowest numbered live register. */
13179 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
13180 if (df_regs_ever_live_p (first_reg
))
13186 /* Similar, for AltiVec regs. */
13189 first_altivec_reg_to_save (void)
13193 /* Stack frame remains as is unless we are in AltiVec ABI. */
13194 if (! TARGET_ALTIVEC_ABI
)
13195 return LAST_ALTIVEC_REGNO
+ 1;
13197 /* On Darwin, the unwind routines are compiled without
13198 TARGET_ALTIVEC, and use save_world to save/restore the
13199 altivec registers when necessary. */
13200 if (DEFAULT_ABI
== ABI_DARWIN
&& current_function_calls_eh_return
13201 && ! TARGET_ALTIVEC
)
13202 return FIRST_ALTIVEC_REGNO
+ 20;
13204 /* Find lowest numbered live register. */
13205 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13206 if (df_regs_ever_live_p (i
))
13212 /* Return a 32-bit mask of the AltiVec registers we need to set in
13213 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13214 the 32-bit word is 0. */
13216 static unsigned int
13217 compute_vrsave_mask (void)
13219 unsigned int i
, mask
= 0;
13221 /* On Darwin, the unwind routines are compiled without
13222 TARGET_ALTIVEC, and use save_world to save/restore the
13223 call-saved altivec registers when necessary. */
13224 if (DEFAULT_ABI
== ABI_DARWIN
&& current_function_calls_eh_return
13225 && ! TARGET_ALTIVEC
)
13228 /* First, find out if we use _any_ altivec registers. */
13229 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13230 if (df_regs_ever_live_p (i
))
13231 mask
|= ALTIVEC_REG_BIT (i
);
13236 /* Next, remove the argument registers from the set. These must
13237 be in the VRSAVE mask set by the caller, so we don't need to add
13238 them in again. More importantly, the mask we compute here is
13239 used to generate CLOBBERs in the set_vrsave insn, and we do not
13240 wish the argument registers to die. */
13241 for (i
= cfun
->args_info
.vregno
- 1; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
13242 mask
&= ~ALTIVEC_REG_BIT (i
);
13244 /* Similarly, remove the return value from the set. */
13247 diddle_return_value (is_altivec_return_reg
, &yes
);
13249 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
13255 /* For a very restricted set of circumstances, we can cut down the
13256 size of prologues/epilogues by calling our own save/restore-the-world
13260 compute_save_world_info (rs6000_stack_t
*info_ptr
)
13262 info_ptr
->world_save_p
= 1;
13263 info_ptr
->world_save_p
13264 = (WORLD_SAVE_P (info_ptr
)
13265 && DEFAULT_ABI
== ABI_DARWIN
13266 && ! (current_function_calls_setjmp
&& flag_exceptions
)
13267 && info_ptr
->first_fp_reg_save
== FIRST_SAVED_FP_REGNO
13268 && info_ptr
->first_gp_reg_save
== FIRST_SAVED_GP_REGNO
13269 && info_ptr
->first_altivec_reg_save
== FIRST_SAVED_ALTIVEC_REGNO
13270 && info_ptr
->cr_save_p
);
13272 /* This will not work in conjunction with sibcalls. Make sure there
13273 are none. (This check is expensive, but seldom executed.) */
13274 if (WORLD_SAVE_P (info_ptr
))
13277 for ( insn
= get_last_insn_anywhere (); insn
; insn
= PREV_INSN (insn
))
13278 if ( GET_CODE (insn
) == CALL_INSN
13279 && SIBLING_CALL_P (insn
))
13281 info_ptr
->world_save_p
= 0;
13286 if (WORLD_SAVE_P (info_ptr
))
13288 /* Even if we're not touching VRsave, make sure there's room on the
13289 stack for it, if it looks like we're calling SAVE_WORLD, which
13290 will attempt to save it. */
13291 info_ptr
->vrsave_size
= 4;
13293 /* "Save" the VRsave register too if we're saving the world. */
13294 if (info_ptr
->vrsave_mask
== 0)
13295 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
13297 /* Because the Darwin register save/restore routines only handle
13298 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
13300 gcc_assert (info_ptr
->first_fp_reg_save
>= FIRST_SAVED_FP_REGNO
13301 && (info_ptr
->first_altivec_reg_save
13302 >= FIRST_SAVED_ALTIVEC_REGNO
));
13309 is_altivec_return_reg (rtx reg
, void *xyes
)
13311 bool *yes
= (bool *) xyes
;
13312 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
13317 /* Calculate the stack information for the current function. This is
13318 complicated by having two separate calling sequences, the AIX calling
13319 sequence and the V.4 calling sequence.
13321 AIX (and Darwin/Mac OS X) stack frames look like:
13323 SP----> +---------------------------------------+
13324 | back chain to caller | 0 0
13325 +---------------------------------------+
13326 | saved CR | 4 8 (8-11)
13327 +---------------------------------------+
13329 +---------------------------------------+
13330 | reserved for compilers | 12 24
13331 +---------------------------------------+
13332 | reserved for binders | 16 32
13333 +---------------------------------------+
13334 | saved TOC pointer | 20 40
13335 +---------------------------------------+
13336 | Parameter save area (P) | 24 48
13337 +---------------------------------------+
13338 | Alloca space (A) | 24+P etc.
13339 +---------------------------------------+
13340 | Local variable space (L) | 24+P+A
13341 +---------------------------------------+
13342 | Float/int conversion temporary (X) | 24+P+A+L
13343 +---------------------------------------+
13344 | Save area for AltiVec registers (W) | 24+P+A+L+X
13345 +---------------------------------------+
13346 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
13347 +---------------------------------------+
13348 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
13349 +---------------------------------------+
13350 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
13351 +---------------------------------------+
13352 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
13353 +---------------------------------------+
13354 old SP->| back chain to caller's caller |
13355 +---------------------------------------+
13357 The required alignment for AIX configurations is two words (i.e., 8
13361 V.4 stack frames look like:
13363 SP----> +---------------------------------------+
13364 | back chain to caller | 0
13365 +---------------------------------------+
13366 | caller's saved LR | 4
13367 +---------------------------------------+
13368 | Parameter save area (P) | 8
13369 +---------------------------------------+
13370 | Alloca space (A) | 8+P
13371 +---------------------------------------+
13372 | Varargs save area (V) | 8+P+A
13373 +---------------------------------------+
13374 | Local variable space (L) | 8+P+A+V
13375 +---------------------------------------+
13376 | Float/int conversion temporary (X) | 8+P+A+V+L
13377 +---------------------------------------+
13378 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
13379 +---------------------------------------+
13380 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
13381 +---------------------------------------+
13382 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
13383 +---------------------------------------+
13384 | SPE: area for 64-bit GP registers |
13385 +---------------------------------------+
13386 | SPE alignment padding |
13387 +---------------------------------------+
13388 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
13389 +---------------------------------------+
13390 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
13391 +---------------------------------------+
13392 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
13393 +---------------------------------------+
13394 old SP->| back chain to caller's caller |
13395 +---------------------------------------+
13397 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13398 given. (But note below and in sysv4.h that we require only 8 and
13399 may round up the size of our stack frame anyways. The historical
13400 reason is early versions of powerpc-linux which didn't properly
13401 align the stack at program startup. A happy side-effect is that
13402 -mno-eabi libraries can be used with -meabi programs.)
13404 The EABI configuration defaults to the V.4 layout. However,
13405 the stack alignment requirements may differ. If -mno-eabi is not
13406 given, the required stack alignment is 8 bytes; if -mno-eabi is
13407 given, the required alignment is 16 bytes. (But see V.4 comment
13410 #ifndef ABI_STACK_BOUNDARY
13411 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
13414 static rs6000_stack_t
*
13415 rs6000_stack_info (void)
13417 static rs6000_stack_t info
;
13418 rs6000_stack_t
*info_ptr
= &info
;
13419 int reg_size
= TARGET_32BIT
? 4 : 8;
13422 HOST_WIDE_INT non_fixed_size
;
13424 memset (&info
, 0, sizeof (info
));
13428 /* Cache value so we don't rescan instruction chain over and over. */
13429 if (cfun
->machine
->insn_chain_scanned_p
== 0)
13430 cfun
->machine
->insn_chain_scanned_p
13431 = spe_func_has_64bit_regs_p () + 1;
13432 info_ptr
->spe_64bit_regs_used
= cfun
->machine
->insn_chain_scanned_p
- 1;
13435 /* Select which calling sequence. */
13436 info_ptr
->abi
= DEFAULT_ABI
;
13438 /* Calculate which registers need to be saved & save area size. */
13439 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
13440 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
13441 even if it currently looks like we won't. */
13442 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
13443 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
13444 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
13445 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
13446 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
13448 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
13450 /* For the SPE, we have an additional upper 32-bits on each GPR.
13451 Ideally we should save the entire 64-bits only when the upper
13452 half is used in SIMD instructions. Since we only record
13453 registers live (not the size they are used in), this proves
13454 difficult because we'd have to traverse the instruction chain at
13455 the right time, taking reload into account. This is a real pain,
13456 so we opt to save the GPRs in 64-bits always if but one register
13457 gets used in 64-bits. Otherwise, all the registers in the frame
13458 get saved in 32-bits.
13460 So... since when we save all GPRs (except the SP) in 64-bits, the
13461 traditional GP save area will be empty. */
13462 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
13463 info_ptr
->gp_size
= 0;
13465 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
13466 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
13468 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
13469 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
13470 - info_ptr
->first_altivec_reg_save
);
13472 /* Does this function call anything? */
13473 info_ptr
->calls_p
= (! current_function_is_leaf
13474 || cfun
->machine
->ra_needs_full_frame
);
13476 /* Determine if we need to save the link register. */
13477 if ((DEFAULT_ABI
== ABI_AIX
13478 && current_function_profile
13479 && !TARGET_PROFILE_KERNEL
)
13480 #ifdef TARGET_RELOCATABLE
13481 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
13483 || (info_ptr
->first_fp_reg_save
!= 64
13484 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
13485 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
13486 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
13487 || info_ptr
->calls_p
13488 || rs6000_ra_ever_killed ())
13490 info_ptr
->lr_save_p
= 1;
13491 df_set_regs_ever_live (LINK_REGISTER_REGNUM
, true);
13494 /* Determine if we need to save the condition code registers. */
13495 if (df_regs_ever_live_p (CR2_REGNO
)
13496 || df_regs_ever_live_p (CR3_REGNO
)
13497 || df_regs_ever_live_p (CR4_REGNO
))
13499 info_ptr
->cr_save_p
= 1;
13500 if (DEFAULT_ABI
== ABI_V4
)
13501 info_ptr
->cr_size
= reg_size
;
13504 /* If the current function calls __builtin_eh_return, then we need
13505 to allocate stack space for registers that will hold data for
13506 the exception handler. */
13507 if (current_function_calls_eh_return
)
13510 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
13513 /* SPE saves EH registers in 64-bits. */
13514 ehrd_size
= i
* (TARGET_SPE_ABI
13515 && info_ptr
->spe_64bit_regs_used
!= 0
13516 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
13521 /* Determine various sizes. */
13522 info_ptr
->reg_size
= reg_size
;
13523 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
13524 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
13525 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
13526 TARGET_ALTIVEC
? 16 : 8);
13527 if (FRAME_GROWS_DOWNWARD
)
13528 info_ptr
->vars_size
13529 += RS6000_ALIGN (info_ptr
->fixed_size
+ info_ptr
->vars_size
13530 + info_ptr
->parm_size
,
13531 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
)
13532 - (info_ptr
->fixed_size
+ info_ptr
->vars_size
13533 + info_ptr
->parm_size
);
13535 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
13536 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
13538 info_ptr
->spe_gp_size
= 0;
13540 if (TARGET_ALTIVEC_ABI
)
13541 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
13543 info_ptr
->vrsave_mask
= 0;
13545 if (TARGET_ALTIVEC_VRSAVE
&& info_ptr
->vrsave_mask
)
13546 info_ptr
->vrsave_size
= 4;
13548 info_ptr
->vrsave_size
= 0;
13550 compute_save_world_info (info_ptr
);
13552 /* Calculate the offsets. */
13553 switch (DEFAULT_ABI
)
13557 gcc_unreachable ();
13561 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
13562 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
13564 if (TARGET_ALTIVEC_ABI
)
13566 info_ptr
->vrsave_save_offset
13567 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
13569 /* Align stack so vector save area is on a quadword boundary.
13570 The padding goes above the vectors. */
13571 if (info_ptr
->altivec_size
!= 0)
13572 info_ptr
->altivec_padding_size
13573 = info_ptr
->vrsave_save_offset
& 0xF;
13575 info_ptr
->altivec_padding_size
= 0;
13577 info_ptr
->altivec_save_offset
13578 = info_ptr
->vrsave_save_offset
13579 - info_ptr
->altivec_padding_size
13580 - info_ptr
->altivec_size
;
13581 gcc_assert (info_ptr
->altivec_size
== 0
13582 || info_ptr
->altivec_save_offset
% 16 == 0);
13584 /* Adjust for AltiVec case. */
13585 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
13588 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
13589 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
13590 info_ptr
->lr_save_offset
= 2*reg_size
;
13594 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
13595 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
13596 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
13598 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
13600 /* Align stack so SPE GPR save area is aligned on a
13601 double-word boundary. */
13602 if (info_ptr
->spe_gp_size
!= 0)
13603 info_ptr
->spe_padding_size
13604 = 8 - (-info_ptr
->cr_save_offset
% 8);
13606 info_ptr
->spe_padding_size
= 0;
13608 info_ptr
->spe_gp_save_offset
13609 = info_ptr
->cr_save_offset
13610 - info_ptr
->spe_padding_size
13611 - info_ptr
->spe_gp_size
;
13613 /* Adjust for SPE case. */
13614 info_ptr
->ehrd_offset
= info_ptr
->spe_gp_save_offset
;
13616 else if (TARGET_ALTIVEC_ABI
)
13618 info_ptr
->vrsave_save_offset
13619 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
13621 /* Align stack so vector save area is on a quadword boundary. */
13622 if (info_ptr
->altivec_size
!= 0)
13623 info_ptr
->altivec_padding_size
13624 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
13626 info_ptr
->altivec_padding_size
= 0;
13628 info_ptr
->altivec_save_offset
13629 = info_ptr
->vrsave_save_offset
13630 - info_ptr
->altivec_padding_size
13631 - info_ptr
->altivec_size
;
13633 /* Adjust for AltiVec case. */
13634 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
;
13637 info_ptr
->ehrd_offset
= info_ptr
->cr_save_offset
;
13638 info_ptr
->ehrd_offset
-= ehrd_size
;
13639 info_ptr
->lr_save_offset
= reg_size
;
13643 save_align
= (TARGET_ALTIVEC_ABI
|| DEFAULT_ABI
== ABI_DARWIN
) ? 16 : 8;
13644 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
13645 + info_ptr
->gp_size
13646 + info_ptr
->altivec_size
13647 + info_ptr
->altivec_padding_size
13648 + info_ptr
->spe_gp_size
13649 + info_ptr
->spe_padding_size
13651 + info_ptr
->cr_size
13652 + info_ptr
->vrsave_size
,
13655 non_fixed_size
= (info_ptr
->vars_size
13656 + info_ptr
->parm_size
13657 + info_ptr
->save_size
);
13659 info_ptr
->total_size
= RS6000_ALIGN (non_fixed_size
+ info_ptr
->fixed_size
,
13660 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
13662 /* Determine if we need to allocate any stack frame:
13664 For AIX we need to push the stack if a frame pointer is needed
13665 (because the stack might be dynamically adjusted), if we are
13666 debugging, if we make calls, or if the sum of fp_save, gp_save,
13667 and local variables are more than the space needed to save all
13668 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13669 + 18*8 = 288 (GPR13 reserved).
13671 For V.4 we don't have the stack cushion that AIX uses, but assume
13672 that the debugger can handle stackless frames. */
13674 if (info_ptr
->calls_p
)
13675 info_ptr
->push_p
= 1;
13677 else if (DEFAULT_ABI
== ABI_V4
)
13678 info_ptr
->push_p
= non_fixed_size
!= 0;
13680 else if (frame_pointer_needed
)
13681 info_ptr
->push_p
= 1;
13683 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
13684 info_ptr
->push_p
= 1;
13687 info_ptr
->push_p
= non_fixed_size
> (TARGET_32BIT
? 220 : 288);
13689 /* Zero offsets if we're not saving those registers. */
13690 if (info_ptr
->fp_size
== 0)
13691 info_ptr
->fp_save_offset
= 0;
13693 if (info_ptr
->gp_size
== 0)
13694 info_ptr
->gp_save_offset
= 0;
13696 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
13697 info_ptr
->altivec_save_offset
= 0;
13699 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
13700 info_ptr
->vrsave_save_offset
= 0;
13702 if (! TARGET_SPE_ABI
13703 || info_ptr
->spe_64bit_regs_used
== 0
13704 || info_ptr
->spe_gp_size
== 0)
13705 info_ptr
->spe_gp_save_offset
= 0;
13707 if (! info_ptr
->lr_save_p
)
13708 info_ptr
->lr_save_offset
= 0;
13710 if (! info_ptr
->cr_save_p
)
13711 info_ptr
->cr_save_offset
= 0;
13716 /* Return true if the current function uses any GPRs in 64-bit SIMD
13720 spe_func_has_64bit_regs_p (void)
13724 /* Functions that save and restore all the call-saved registers will
13725 need to save/restore the registers in 64-bits. */
13726 if (current_function_calls_eh_return
13727 || current_function_calls_setjmp
13728 || current_function_has_nonlocal_goto
)
13731 insns
= get_insns ();
13733 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
13739 /* FIXME: This should be implemented with attributes...
13741 (set_attr "spe64" "true")....then,
13742 if (get_spe64(insn)) return true;
13744 It's the only reliable way to do the stuff below. */
13746 i
= PATTERN (insn
);
13747 if (GET_CODE (i
) == SET
)
13749 enum machine_mode mode
= GET_MODE (SET_SRC (i
));
13751 if (SPE_VECTOR_MODE (mode
))
13753 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
))
13763 debug_stack_info (rs6000_stack_t
*info
)
13765 const char *abi_string
;
13768 info
= rs6000_stack_info ();
13770 fprintf (stderr
, "\nStack information for function %s:\n",
13771 ((current_function_decl
&& DECL_NAME (current_function_decl
))
13772 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
13777 default: abi_string
= "Unknown"; break;
13778 case ABI_NONE
: abi_string
= "NONE"; break;
13779 case ABI_AIX
: abi_string
= "AIX"; break;
13780 case ABI_DARWIN
: abi_string
= "Darwin"; break;
13781 case ABI_V4
: abi_string
= "V.4"; break;
13784 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
13786 if (TARGET_ALTIVEC_ABI
)
13787 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
13789 if (TARGET_SPE_ABI
)
13790 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
13792 if (info
->first_gp_reg_save
!= 32)
13793 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
13795 if (info
->first_fp_reg_save
!= 64)
13796 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
13798 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
13799 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
13800 info
->first_altivec_reg_save
);
13802 if (info
->lr_save_p
)
13803 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
13805 if (info
->cr_save_p
)
13806 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
13808 if (info
->vrsave_mask
)
13809 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
13812 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
13815 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
13817 if (info
->gp_save_offset
)
13818 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
13820 if (info
->fp_save_offset
)
13821 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
13823 if (info
->altivec_save_offset
)
13824 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
13825 info
->altivec_save_offset
);
13827 if (info
->spe_gp_save_offset
)
13828 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
13829 info
->spe_gp_save_offset
);
13831 if (info
->vrsave_save_offset
)
13832 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
13833 info
->vrsave_save_offset
);
13835 if (info
->lr_save_offset
)
13836 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
13838 if (info
->cr_save_offset
)
13839 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
13841 if (info
->varargs_save_offset
)
13842 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
13844 if (info
->total_size
)
13845 fprintf (stderr
, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
13848 if (info
->vars_size
)
13849 fprintf (stderr
, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
13852 if (info
->parm_size
)
13853 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
13855 if (info
->fixed_size
)
13856 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
13859 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
13861 if (info
->spe_gp_size
)
13862 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
13865 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
13867 if (info
->altivec_size
)
13868 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
13870 if (info
->vrsave_size
)
13871 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
13873 if (info
->altivec_padding_size
)
13874 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
13875 info
->altivec_padding_size
);
13877 if (info
->spe_padding_size
)
13878 fprintf (stderr
, "\tspe_padding_size = %5d\n",
13879 info
->spe_padding_size
);
13882 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
13884 if (info
->save_size
)
13885 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
13887 if (info
->reg_size
!= 4)
13888 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
13890 fprintf (stderr
, "\n");
13894 rs6000_return_addr (int count
, rtx frame
)
13896 /* Currently we don't optimize very well between prolog and body
13897 code and for PIC code the code can be actually quite bad, so
13898 don't try to be too clever here. */
13899 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
13901 cfun
->machine
->ra_needs_full_frame
= 1;
13908 plus_constant (copy_to_reg
13909 (gen_rtx_MEM (Pmode
,
13910 memory_address (Pmode
, frame
))),
13911 RETURN_ADDRESS_OFFSET
)));
13914 cfun
->machine
->ra_need_lr
= 1;
13915 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
13918 /* Say whether a function is a candidate for sibcall handling or not.
13919 We do not allow indirect calls to be optimized into sibling calls.
13920 Also, we can't do it if there are any vector parameters; there's
13921 nowhere to put the VRsave code so it works; note that functions with
13922 vector parameters are required to have a prototype, so the argument
13923 type info must be available here. (The tail recursion case can work
13924 with vector parameters, but there's no way to distinguish here.) */
13926 rs6000_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
13931 if (TARGET_ALTIVEC_VRSAVE
)
13933 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
13934 type
; type
= TREE_CHAIN (type
))
13936 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
13940 if (DEFAULT_ABI
== ABI_DARWIN
13941 || (*targetm
.binds_local_p
) (decl
))
13943 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
13945 if (!lookup_attribute ("longcall", attr_list
)
13946 || lookup_attribute ("shortcall", attr_list
))
13953 /* NULL if INSN insn is valid within a low-overhead loop.
13954 Otherwise return why doloop cannot be applied.
13955 PowerPC uses the COUNT register for branch on table instructions. */
13957 static const char *
13958 rs6000_invalid_within_doloop (rtx insn
)
13961 return "Function call in the loop.";
13964 && (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
13965 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
))
13966 return "Computed branch in the loop.";
13972 rs6000_ra_ever_killed (void)
13978 if (current_function_is_thunk
)
13981 /* regs_ever_live has LR marked as used if any sibcalls are present,
13982 but this should not force saving and restoring in the
13983 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
13984 clobbers LR, so that is inappropriate. */
13986 /* Also, the prologue can generate a store into LR that
13987 doesn't really count, like this:
13990 bcl to set PIC register
13994 When we're called from the epilogue, we need to avoid counting
13995 this as a store. */
13997 push_topmost_sequence ();
13998 top
= get_insns ();
13999 pop_topmost_sequence ();
14000 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
14002 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
14008 if (!SIBLING_CALL_P (insn
))
14011 else if (find_regno_note (insn
, REG_INC
, LINK_REGISTER_REGNUM
))
14013 else if (set_of (reg
, insn
) != NULL_RTX
14014 && !prologue_epilogue_contains (insn
))
14021 /* Emit instructions needed to load the TOC register.
14022 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
14023 a constant pool; or for SVR4 -fpic. */
14026 rs6000_emit_load_toc_table (int fromprolog
)
14029 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
14031 if (TARGET_ELF
&& TARGET_SECURE_PLT
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
)
14034 rtx lab
, tmp1
, tmp2
, got
;
14036 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
14037 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
14039 got
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
14041 got
= rs6000_got_sym ();
14042 tmp1
= tmp2
= dest
;
14045 tmp1
= gen_reg_rtx (Pmode
);
14046 tmp2
= gen_reg_rtx (Pmode
);
14048 emit_insn (gen_load_toc_v4_PIC_1 (lab
));
14049 emit_move_insn (tmp1
,
14050 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14051 emit_insn (gen_load_toc_v4_PIC_3b (tmp2
, tmp1
, got
, lab
));
14052 emit_insn (gen_load_toc_v4_PIC_3c (dest
, tmp2
, got
, lab
));
14054 else if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
14056 emit_insn (gen_load_toc_v4_pic_si ());
14057 emit_move_insn (dest
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14059 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
14062 rtx temp0
= (fromprolog
14063 ? gen_rtx_REG (Pmode
, 0)
14064 : gen_reg_rtx (Pmode
));
14070 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
14071 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
14073 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
14074 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
14076 emit_insn (gen_load_toc_v4_PIC_1 (symF
));
14077 emit_move_insn (dest
,
14078 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14079 emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
, symL
, symF
));
14085 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
14086 emit_insn (gen_load_toc_v4_PIC_1b (tocsym
));
14087 emit_move_insn (dest
,
14088 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14089 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
14091 emit_insn (gen_addsi3 (dest
, temp0
, dest
));
14093 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
14095 /* This is for AIX code running in non-PIC ELF32. */
14098 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
14099 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
14101 emit_insn (gen_elf_high (dest
, realsym
));
14102 emit_insn (gen_elf_low (dest
, dest
, realsym
));
14106 gcc_assert (DEFAULT_ABI
== ABI_AIX
);
14109 emit_insn (gen_load_toc_aix_si (dest
));
14111 emit_insn (gen_load_toc_aix_di (dest
));
14115 /* Emit instructions to restore the link register after determining where
14116 its value has been stored. */
14119 rs6000_emit_eh_reg_restore (rtx source
, rtx scratch
)
14121 rs6000_stack_t
*info
= rs6000_stack_info ();
14124 operands
[0] = source
;
14125 operands
[1] = scratch
;
14127 if (info
->lr_save_p
)
14129 rtx frame_rtx
= stack_pointer_rtx
;
14130 HOST_WIDE_INT sp_offset
= 0;
14133 if (frame_pointer_needed
14134 || current_function_calls_alloca
14135 || info
->total_size
> 32767)
14137 tmp
= gen_frame_mem (Pmode
, frame_rtx
);
14138 emit_move_insn (operands
[1], tmp
);
14139 frame_rtx
= operands
[1];
14141 else if (info
->push_p
)
14142 sp_offset
= info
->total_size
;
14144 tmp
= plus_constant (frame_rtx
, info
->lr_save_offset
+ sp_offset
);
14145 tmp
= gen_frame_mem (Pmode
, tmp
);
14146 emit_move_insn (tmp
, operands
[0]);
14149 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
), operands
[0]);
14152 static GTY(()) int set
= -1;
14155 get_TOC_alias_set (void)
14158 set
= new_alias_set ();
14162 /* This returns nonzero if the current function uses the TOC. This is
14163 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14164 is generated by the ABI_V4 load_toc_* patterns. */
14171 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
14174 rtx pat
= PATTERN (insn
);
14177 if (GET_CODE (pat
) == PARALLEL
)
14178 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
14180 rtx sub
= XVECEXP (pat
, 0, i
);
14181 if (GET_CODE (sub
) == USE
)
14183 sub
= XEXP (sub
, 0);
14184 if (GET_CODE (sub
) == UNSPEC
14185 && XINT (sub
, 1) == UNSPEC_TOC
)
14195 create_TOC_reference (rtx symbol
)
14197 if (no_new_pseudos
)
14198 df_set_regs_ever_live (TOC_REGISTER
, true);
14199 return gen_rtx_PLUS (Pmode
,
14200 gen_rtx_REG (Pmode
, TOC_REGISTER
),
14201 gen_rtx_CONST (Pmode
,
14202 gen_rtx_MINUS (Pmode
, symbol
,
14203 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
14206 /* If _Unwind_* has been called from within the same module,
14207 toc register is not guaranteed to be saved to 40(1) on function
14208 entry. Save it there in that case. */
14211 rs6000_aix_emit_builtin_unwind_init (void)
14214 rtx stack_top
= gen_reg_rtx (Pmode
);
14215 rtx opcode_addr
= gen_reg_rtx (Pmode
);
14216 rtx opcode
= gen_reg_rtx (SImode
);
14217 rtx tocompare
= gen_reg_rtx (SImode
);
14218 rtx no_toc_save_needed
= gen_label_rtx ();
14220 mem
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
14221 emit_move_insn (stack_top
, mem
);
14223 mem
= gen_frame_mem (Pmode
,
14224 gen_rtx_PLUS (Pmode
, stack_top
,
14225 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
14226 emit_move_insn (opcode_addr
, mem
);
14227 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
14228 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
14229 : 0xE8410028, SImode
));
14231 do_compare_rtx_and_jump (opcode
, tocompare
, EQ
, 1,
14232 SImode
, NULL_RTX
, NULL_RTX
,
14233 no_toc_save_needed
);
14235 mem
= gen_frame_mem (Pmode
,
14236 gen_rtx_PLUS (Pmode
, stack_top
,
14237 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
14238 emit_move_insn (mem
, gen_rtx_REG (Pmode
, 2));
14239 emit_label (no_toc_save_needed
);
14242 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
14243 and the change to the stack pointer. */
14246 rs6000_emit_stack_tie (void)
14248 rtx mem
= gen_frame_mem (BLKmode
,
14249 gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
14251 emit_insn (gen_stack_tie (mem
));
14254 /* Emit the correct code for allocating stack space, as insns.
14255 If COPY_R12, make sure a copy of the old frame is left in r12.
14256 The generated code may use hard register 0 as a temporary. */
14259 rs6000_emit_allocate_stack (HOST_WIDE_INT size
, int copy_r12
)
14262 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
14263 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
14264 rtx todec
= gen_int_mode (-size
, Pmode
);
14266 if (INTVAL (todec
) != -size
)
14268 warning (0, "stack frame too large");
14269 emit_insn (gen_trap ());
14273 if (current_function_limit_stack
)
14275 if (REG_P (stack_limit_rtx
)
14276 && REGNO (stack_limit_rtx
) > 1
14277 && REGNO (stack_limit_rtx
) <= 31)
14279 emit_insn (TARGET_32BIT
14280 ? gen_addsi3 (tmp_reg
,
14283 : gen_adddi3 (tmp_reg
,
14287 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
14290 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
14292 && DEFAULT_ABI
== ABI_V4
)
14294 rtx toload
= gen_rtx_CONST (VOIDmode
,
14295 gen_rtx_PLUS (Pmode
,
14299 emit_insn (gen_elf_high (tmp_reg
, toload
));
14300 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
14301 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
14305 warning (0, "stack limit expression is not supported");
14308 if (copy_r12
|| ! TARGET_UPDATE
)
14309 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
14315 /* Need a note here so that try_split doesn't get confused. */
14316 if (get_last_insn () == NULL_RTX
)
14317 emit_note (NOTE_INSN_DELETED
);
14318 insn
= emit_move_insn (tmp_reg
, todec
);
14319 try_split (PATTERN (insn
), insn
, 0);
14323 insn
= emit_insn (TARGET_32BIT
14324 ? gen_movsi_update (stack_reg
, stack_reg
,
14326 : gen_movdi_di_update (stack_reg
, stack_reg
,
14327 todec
, stack_reg
));
14331 insn
= emit_insn (TARGET_32BIT
14332 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
14333 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
14334 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
14335 gen_rtx_REG (Pmode
, 12));
14338 RTX_FRAME_RELATED_P (insn
) = 1;
14340 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
14341 gen_rtx_SET (VOIDmode
, stack_reg
,
14342 gen_rtx_PLUS (Pmode
, stack_reg
,
14347 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14348 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14349 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
14350 deduce these equivalences by itself so it wasn't necessary to hold
14351 its hand so much. */
14354 rs6000_frame_related (rtx insn
, rtx reg
, HOST_WIDE_INT val
,
14355 rtx reg2
, rtx rreg
)
14359 /* copy_rtx will not make unique copies of registers, so we need to
14360 ensure we don't have unwanted sharing here. */
14362 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
14365 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
14367 real
= copy_rtx (PATTERN (insn
));
14369 if (reg2
!= NULL_RTX
)
14370 real
= replace_rtx (real
, reg2
, rreg
);
14372 real
= replace_rtx (real
, reg
,
14373 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
14374 STACK_POINTER_REGNUM
),
14377 /* We expect that 'real' is either a SET or a PARALLEL containing
14378 SETs (and possibly other stuff). In a PARALLEL, all the SETs
14379 are important so they all have to be marked RTX_FRAME_RELATED_P. */
14381 if (GET_CODE (real
) == SET
)
14385 temp
= simplify_rtx (SET_SRC (set
));
14387 SET_SRC (set
) = temp
;
14388 temp
= simplify_rtx (SET_DEST (set
));
14390 SET_DEST (set
) = temp
;
14391 if (GET_CODE (SET_DEST (set
)) == MEM
)
14393 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
14395 XEXP (SET_DEST (set
), 0) = temp
;
14402 gcc_assert (GET_CODE (real
) == PARALLEL
);
14403 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
14404 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
14406 rtx set
= XVECEXP (real
, 0, i
);
14408 temp
= simplify_rtx (SET_SRC (set
));
14410 SET_SRC (set
) = temp
;
14411 temp
= simplify_rtx (SET_DEST (set
));
14413 SET_DEST (set
) = temp
;
14414 if (GET_CODE (SET_DEST (set
)) == MEM
)
14416 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
14418 XEXP (SET_DEST (set
), 0) = temp
;
14420 RTX_FRAME_RELATED_P (set
) = 1;
14425 real
= spe_synthesize_frame_save (real
);
14427 RTX_FRAME_RELATED_P (insn
) = 1;
14428 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
14433 /* Given an SPE frame note, return a PARALLEL of SETs with the
14434 original note, plus a synthetic register save. */
14437 spe_synthesize_frame_save (rtx real
)
14439 rtx synth
, offset
, reg
, real2
;
14441 if (GET_CODE (real
) != SET
14442 || GET_MODE (SET_SRC (real
)) != V2SImode
)
14445 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14446 frame related note. The parallel contains a set of the register
14447 being saved, and another set to a synthetic register (n+1200).
14448 This is so we can differentiate between 64-bit and 32-bit saves.
14449 Words cannot describe this nastiness. */
14451 gcc_assert (GET_CODE (SET_DEST (real
)) == MEM
14452 && GET_CODE (XEXP (SET_DEST (real
), 0)) == PLUS
14453 && GET_CODE (SET_SRC (real
)) == REG
);
14456 (set (mem (plus (reg x) (const y)))
14459 (set (mem (plus (reg x) (const y+4)))
14463 real2
= copy_rtx (real
);
14464 PUT_MODE (SET_DEST (real2
), SImode
);
14465 reg
= SET_SRC (real2
);
14466 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
14467 synth
= copy_rtx (real2
);
14469 if (BYTES_BIG_ENDIAN
)
14471 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
14472 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
14475 reg
= SET_SRC (synth
);
14477 synth
= replace_rtx (synth
, reg
,
14478 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
14480 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
14481 synth
= replace_rtx (synth
, offset
,
14482 GEN_INT (INTVAL (offset
)
14483 + (BYTES_BIG_ENDIAN
? 0 : 4)));
14485 RTX_FRAME_RELATED_P (synth
) = 1;
14486 RTX_FRAME_RELATED_P (real2
) = 1;
14487 if (BYTES_BIG_ENDIAN
)
14488 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
14490 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
14495 /* Returns an insn that has a vrsave set operation with the
14496 appropriate CLOBBERs. */
14499 generate_set_vrsave (rtx reg
, rs6000_stack_t
*info
, int epiloguep
)
14502 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
14503 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
14506 = gen_rtx_SET (VOIDmode
,
14508 gen_rtx_UNSPEC_VOLATILE (SImode
,
14509 gen_rtvec (2, reg
, vrsave
),
14510 UNSPECV_SET_VRSAVE
));
14514 /* We need to clobber the registers in the mask so the scheduler
14515 does not move sets to VRSAVE before sets of AltiVec registers.
14517 However, if the function receives nonlocal gotos, reload will set
14518 all call saved registers live. We will end up with:
14520 (set (reg 999) (mem))
14521 (parallel [ (set (reg vrsave) (unspec blah))
14522 (clobber (reg 999))])
14524 The clobber will cause the store into reg 999 to be dead, and
14525 flow will attempt to delete an epilogue insn. In this case, we
14526 need an unspec use/set of the register. */
14528 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
14529 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
14531 if (!epiloguep
|| call_used_regs
[i
])
14532 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
14533 gen_rtx_REG (V4SImode
, i
));
14536 rtx reg
= gen_rtx_REG (V4SImode
, i
);
14539 = gen_rtx_SET (VOIDmode
,
14541 gen_rtx_UNSPEC (V4SImode
,
14542 gen_rtvec (1, reg
), 27));
14546 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
14548 for (i
= 0; i
< nclobs
; ++i
)
14549 XVECEXP (insn
, 0, i
) = clobs
[i
];
14554 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14555 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
14558 emit_frame_save (rtx frame_reg
, rtx frame_ptr
, enum machine_mode mode
,
14559 unsigned int regno
, int offset
, HOST_WIDE_INT total_size
)
14561 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
14562 rtx replacea
, replaceb
;
14564 int_rtx
= GEN_INT (offset
);
14566 /* Some cases that need register indexed addressing. */
14567 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
14568 || (TARGET_E500_DOUBLE
&& mode
== DFmode
)
14570 && SPE_VECTOR_MODE (mode
)
14571 && !SPE_CONST_OFFSET_OK (offset
)))
14573 /* Whomever calls us must make sure r11 is available in the
14574 flow path of instructions in the prologue. */
14575 offset_rtx
= gen_rtx_REG (Pmode
, 11);
14576 emit_move_insn (offset_rtx
, int_rtx
);
14578 replacea
= offset_rtx
;
14579 replaceb
= int_rtx
;
14583 offset_rtx
= int_rtx
;
14584 replacea
= NULL_RTX
;
14585 replaceb
= NULL_RTX
;
14588 reg
= gen_rtx_REG (mode
, regno
);
14589 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
14590 mem
= gen_frame_mem (mode
, addr
);
14592 insn
= emit_move_insn (mem
, reg
);
14594 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
14597 /* Emit an offset memory reference suitable for a frame store, while
14598 converting to a valid addressing mode. */
14601 gen_frame_mem_offset (enum machine_mode mode
, rtx reg
, int offset
)
14603 rtx int_rtx
, offset_rtx
;
14605 int_rtx
= GEN_INT (offset
);
14607 if ((TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
14608 || (TARGET_E500_DOUBLE
&& mode
== DFmode
))
14610 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
14611 emit_move_insn (offset_rtx
, int_rtx
);
14614 offset_rtx
= int_rtx
;
14616 return gen_frame_mem (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
14619 /* Look for user-defined global regs. We should not save and restore these,
14620 and cannot use stmw/lmw if there are any in its range. */
14623 no_global_regs_above (int first_greg
)
14626 for (i
= 0; i
< 32 - first_greg
; i
++)
14627 if (global_regs
[first_greg
+ i
])
14632 #ifndef TARGET_FIX_AND_CONTINUE
14633 #define TARGET_FIX_AND_CONTINUE 0
14636 /* Determine whether the gp REG is really used. */
14639 rs6000_reg_live_or_pic_offset_p (int reg
)
14641 return ((df_regs_ever_live_p (reg
)
14642 && (!call_used_regs
[reg
]
14643 || (reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
14644 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
14645 || (reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
14646 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
14647 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))));
14650 /* Emit function prologue as insns. */
14653 rs6000_emit_prologue (void)
14655 rs6000_stack_t
*info
= rs6000_stack_info ();
14656 enum machine_mode reg_mode
= Pmode
;
14657 int reg_size
= TARGET_32BIT
? 4 : 8;
14658 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
14659 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
14660 rtx frame_reg_rtx
= sp_reg_rtx
;
14661 rtx cr_save_rtx
= NULL_RTX
;
14663 int saving_FPRs_inline
;
14664 int using_store_multiple
;
14665 HOST_WIDE_INT sp_offset
= 0;
14667 if (TARGET_FIX_AND_CONTINUE
)
14669 /* gdb on darwin arranges to forward a function from the old
14670 address by modifying the first 5 instructions of the function
14671 to branch to the overriding function. This is necessary to
14672 permit function pointers that point to the old function to
14673 actually forward to the new function. */
14674 emit_insn (gen_nop ());
14675 emit_insn (gen_nop ());
14676 emit_insn (gen_nop ());
14677 emit_insn (gen_nop ());
14678 emit_insn (gen_nop ());
14681 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
14683 reg_mode
= V2SImode
;
14687 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
14688 && (!TARGET_SPE_ABI
14689 || info
->spe_64bit_regs_used
== 0)
14690 && info
->first_gp_reg_save
< 31
14691 && no_global_regs_above (info
->first_gp_reg_save
));
14692 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
14693 || FP_SAVE_INLINE (info
->first_fp_reg_save
)
14694 || current_function_calls_eh_return
14695 || cfun
->machine
->ra_need_lr
);
14697 /* For V.4, update stack before we do any saving and set back pointer. */
14698 if (! WORLD_SAVE_P (info
)
14700 && (DEFAULT_ABI
== ABI_V4
14701 || current_function_calls_eh_return
))
14703 if (info
->total_size
< 32767)
14704 sp_offset
= info
->total_size
;
14706 frame_reg_rtx
= frame_ptr_rtx
;
14707 rs6000_emit_allocate_stack (info
->total_size
,
14708 (frame_reg_rtx
!= sp_reg_rtx
14709 && (info
->cr_save_p
14711 || info
->first_fp_reg_save
< 64
14712 || info
->first_gp_reg_save
< 32
14714 if (frame_reg_rtx
!= sp_reg_rtx
)
14715 rs6000_emit_stack_tie ();
14718 /* Handle world saves specially here. */
14719 if (WORLD_SAVE_P (info
))
14726 /* save_world expects lr in r0. */
14727 reg0
= gen_rtx_REG (Pmode
, 0);
14728 if (info
->lr_save_p
)
14730 insn
= emit_move_insn (reg0
,
14731 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14732 RTX_FRAME_RELATED_P (insn
) = 1;
14735 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
14736 assumptions about the offsets of various bits of the stack
14738 gcc_assert (info
->gp_save_offset
== -220
14739 && info
->fp_save_offset
== -144
14740 && info
->lr_save_offset
== 8
14741 && info
->cr_save_offset
== 4
14744 && (!current_function_calls_eh_return
14745 || info
->ehrd_offset
== -432)
14746 && info
->vrsave_save_offset
== -224
14747 && info
->altivec_save_offset
== -416);
14749 treg
= gen_rtx_REG (SImode
, 11);
14750 emit_move_insn (treg
, GEN_INT (-info
->total_size
));
14752 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
14753 in R11. It also clobbers R12, so beware! */
14755 /* Preserve CR2 for save_world prologues */
14757 sz
+= 32 - info
->first_gp_reg_save
;
14758 sz
+= 64 - info
->first_fp_reg_save
;
14759 sz
+= LAST_ALTIVEC_REGNO
- info
->first_altivec_reg_save
+ 1;
14760 p
= rtvec_alloc (sz
);
14762 RTVEC_ELT (p
, j
++) = gen_rtx_CLOBBER (VOIDmode
,
14763 gen_rtx_REG (SImode
,
14764 LINK_REGISTER_REGNUM
));
14765 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
14766 gen_rtx_SYMBOL_REF (Pmode
,
14768 /* We do floats first so that the instruction pattern matches
14770 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14772 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
14773 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14774 GEN_INT (info
->fp_save_offset
14775 + sp_offset
+ 8 * i
));
14776 rtx mem
= gen_frame_mem (DFmode
, addr
);
14778 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14780 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
14782 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
14783 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14784 GEN_INT (info
->altivec_save_offset
14785 + sp_offset
+ 16 * i
));
14786 rtx mem
= gen_frame_mem (V4SImode
, addr
);
14788 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14790 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14792 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
14793 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14794 GEN_INT (info
->gp_save_offset
14795 + sp_offset
+ reg_size
* i
));
14796 rtx mem
= gen_frame_mem (reg_mode
, addr
);
14798 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14802 /* CR register traditionally saved as CR2. */
14803 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
14804 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14805 GEN_INT (info
->cr_save_offset
14807 rtx mem
= gen_frame_mem (reg_mode
, addr
);
14809 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14811 /* Explain about use of R0. */
14812 if (info
->lr_save_p
)
14814 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14815 GEN_INT (info
->lr_save_offset
14817 rtx mem
= gen_frame_mem (reg_mode
, addr
);
14819 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg0
);
14821 /* Explain what happens to the stack pointer. */
14823 rtx newval
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
, treg
);
14824 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, sp_reg_rtx
, newval
);
14827 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14828 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14829 treg
, GEN_INT (-info
->total_size
));
14830 sp_offset
= info
->total_size
;
14833 /* If we use the link register, get it into r0. */
14834 if (!WORLD_SAVE_P (info
) && info
->lr_save_p
)
14836 rtx addr
, reg
, mem
;
14838 insn
= emit_move_insn (gen_rtx_REG (Pmode
, 0),
14839 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14840 RTX_FRAME_RELATED_P (insn
) = 1;
14842 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14843 GEN_INT (info
->lr_save_offset
+ sp_offset
));
14844 reg
= gen_rtx_REG (Pmode
, 0);
14845 mem
= gen_rtx_MEM (Pmode
, addr
);
14846 /* This should not be of rs6000_sr_alias_set, because of
14847 __builtin_return_address. */
14849 insn
= emit_move_insn (mem
, reg
);
14850 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14851 NULL_RTX
, NULL_RTX
);
14854 /* If we need to save CR, put it into r12. */
14855 if (!WORLD_SAVE_P (info
) && info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
14859 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
14860 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
14861 RTX_FRAME_RELATED_P (insn
) = 1;
14862 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14863 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14864 But that's OK. All we have to do is specify that _one_ condition
14865 code register is saved in this stack slot. The thrower's epilogue
14866 will then restore all the call-saved registers.
14867 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14868 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
,
14869 gen_rtx_REG (SImode
, CR2_REGNO
));
14870 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
14875 /* Do any required saving of fpr's. If only one or two to save, do
14876 it ourselves. Otherwise, call function. */
14877 if (!WORLD_SAVE_P (info
) && saving_FPRs_inline
)
14880 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14881 if ((df_regs_ever_live_p (info
->first_fp_reg_save
+i
)
14882 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
14883 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
14884 info
->first_fp_reg_save
+ i
,
14885 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
14888 else if (!WORLD_SAVE_P (info
) && info
->first_fp_reg_save
!= 64)
14892 const char *alloc_rname
;
14894 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
14896 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
14897 gen_rtx_REG (Pmode
,
14898 LINK_REGISTER_REGNUM
));
14899 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
14900 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
14901 alloc_rname
= ggc_strdup (rname
);
14902 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
14903 gen_rtx_SYMBOL_REF (Pmode
,
14905 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14907 rtx addr
, reg
, mem
;
14908 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
14909 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14910 GEN_INT (info
->fp_save_offset
14911 + sp_offset
+ 8*i
));
14912 mem
= gen_frame_mem (DFmode
, addr
);
14914 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14916 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14917 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14918 NULL_RTX
, NULL_RTX
);
14921 /* Save GPRs. This is done as a PARALLEL if we are using
14922 the store-multiple instructions. */
14923 if (!WORLD_SAVE_P (info
) && using_store_multiple
)
14927 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
14928 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14930 rtx addr
, reg
, mem
;
14931 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
14932 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14933 GEN_INT (info
->gp_save_offset
14936 mem
= gen_frame_mem (reg_mode
, addr
);
14938 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14940 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14941 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14942 NULL_RTX
, NULL_RTX
);
14944 else if (!WORLD_SAVE_P (info
)
14946 && info
->spe_64bit_regs_used
!= 0
14947 && info
->first_gp_reg_save
!= 32)
14950 rtx spe_save_area_ptr
;
14951 int using_static_chain_p
= (cfun
->static_chain_decl
!= NULL_TREE
14952 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM
)
14953 && !call_used_regs
[STATIC_CHAIN_REGNUM
]);
14955 /* Determine whether we can address all of the registers that need
14956 to be saved with an offset from the stack pointer that fits in
14957 the small const field for SPE memory instructions. */
14958 int spe_regs_addressable_via_sp
14959 = SPE_CONST_OFFSET_OK(info
->spe_gp_save_offset
+ sp_offset
14960 + (32 - info
->first_gp_reg_save
- 1) * reg_size
);
14963 if (spe_regs_addressable_via_sp
)
14965 spe_save_area_ptr
= sp_reg_rtx
;
14966 spe_offset
= info
->spe_gp_save_offset
+ sp_offset
;
14970 /* Make r11 point to the start of the SPE save area. We need
14971 to be careful here if r11 is holding the static chain. If
14972 it is, then temporarily save it in r0. We would use r0 as
14973 our base register here, but using r0 as a base register in
14974 loads and stores means something different from what we
14976 if (using_static_chain_p
)
14978 rtx r0
= gen_rtx_REG (Pmode
, 0);
14980 gcc_assert (info
->first_gp_reg_save
> 11);
14982 emit_move_insn (r0
, gen_rtx_REG (Pmode
, 11));
14985 spe_save_area_ptr
= gen_rtx_REG (Pmode
, 11);
14986 emit_insn (gen_addsi3 (spe_save_area_ptr
, sp_reg_rtx
,
14987 GEN_INT (info
->spe_gp_save_offset
+ sp_offset
)));
14992 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14993 if (rs6000_reg_live_or_pic_offset_p (info
->first_gp_reg_save
+ i
))
14995 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
14996 rtx offset
, addr
, mem
;
14998 /* We're doing all this to ensure that the offset fits into
14999 the immediate offset of 'evstdd'. */
15000 gcc_assert (SPE_CONST_OFFSET_OK (reg_size
* i
+ spe_offset
));
15002 offset
= GEN_INT (reg_size
* i
+ spe_offset
);
15003 addr
= gen_rtx_PLUS (Pmode
, spe_save_area_ptr
, offset
);
15004 mem
= gen_rtx_MEM (V2SImode
, addr
);
15006 insn
= emit_move_insn (mem
, reg
);
15008 rs6000_frame_related (insn
, spe_save_area_ptr
,
15009 info
->spe_gp_save_offset
15010 + sp_offset
+ reg_size
* i
,
15011 offset
, const0_rtx
);
15014 /* Move the static chain pointer back. */
15015 if (using_static_chain_p
&& !spe_regs_addressable_via_sp
)
15016 emit_move_insn (gen_rtx_REG (Pmode
, 11), gen_rtx_REG (Pmode
, 0));
15018 else if (!WORLD_SAVE_P (info
))
15021 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15022 if (rs6000_reg_live_or_pic_offset_p (info
->first_gp_reg_save
+ i
))
15024 rtx addr
, reg
, mem
;
15025 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
15027 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15028 GEN_INT (info
->gp_save_offset
15031 mem
= gen_frame_mem (reg_mode
, addr
);
15033 insn
= emit_move_insn (mem
, reg
);
15034 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
15035 NULL_RTX
, NULL_RTX
);
15039 /* ??? There's no need to emit actual instructions here, but it's the
15040 easiest way to get the frame unwind information emitted. */
15041 if (current_function_calls_eh_return
)
15043 unsigned int i
, regno
;
15045 /* In AIX ABI we need to pretend we save r2 here. */
15048 rtx addr
, reg
, mem
;
15050 reg
= gen_rtx_REG (reg_mode
, 2);
15051 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15052 GEN_INT (sp_offset
+ 5 * reg_size
));
15053 mem
= gen_frame_mem (reg_mode
, addr
);
15055 insn
= emit_move_insn (mem
, reg
);
15056 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
15057 NULL_RTX
, NULL_RTX
);
15058 PATTERN (insn
) = gen_blockage ();
15063 regno
= EH_RETURN_DATA_REGNO (i
);
15064 if (regno
== INVALID_REGNUM
)
15067 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
15068 info
->ehrd_offset
+ sp_offset
15069 + reg_size
* (int) i
,
15074 /* Save CR if we use any that must be preserved. */
15075 if (!WORLD_SAVE_P (info
) && info
->cr_save_p
)
15077 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15078 GEN_INT (info
->cr_save_offset
+ sp_offset
));
15079 rtx mem
= gen_frame_mem (SImode
, addr
);
15080 /* See the large comment above about why CR2_REGNO is used. */
15081 rtx magic_eh_cr_reg
= gen_rtx_REG (SImode
, CR2_REGNO
);
15083 /* If r12 was used to hold the original sp, copy cr into r0 now
15085 if (REGNO (frame_reg_rtx
) == 12)
15089 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
15090 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
15091 RTX_FRAME_RELATED_P (insn
) = 1;
15092 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
, magic_eh_cr_reg
);
15093 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
15098 insn
= emit_move_insn (mem
, cr_save_rtx
);
15100 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
15101 NULL_RTX
, NULL_RTX
);
15104 /* Update stack and set back pointer unless this is V.4,
15105 for which it was done previously. */
15106 if (!WORLD_SAVE_P (info
) && info
->push_p
15107 && !(DEFAULT_ABI
== ABI_V4
|| current_function_calls_eh_return
))
15109 if (info
->total_size
< 32767)
15110 sp_offset
= info
->total_size
;
15112 frame_reg_rtx
= frame_ptr_rtx
;
15113 rs6000_emit_allocate_stack (info
->total_size
,
15114 (frame_reg_rtx
!= sp_reg_rtx
15115 && ((info
->altivec_size
!= 0)
15116 || (info
->vrsave_mask
!= 0)
15118 if (frame_reg_rtx
!= sp_reg_rtx
)
15119 rs6000_emit_stack_tie ();
15122 /* Set frame pointer, if needed. */
15123 if (frame_pointer_needed
)
15125 insn
= emit_move_insn (gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
15127 RTX_FRAME_RELATED_P (insn
) = 1;
15130 /* Save AltiVec registers if needed. Save here because the red zone does
15131 not include AltiVec registers. */
15132 if (!WORLD_SAVE_P (info
) && TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
15136 /* There should be a non inline version of this, for when we
15137 are saving lots of vector registers. */
15138 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
15139 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
15141 rtx areg
, savereg
, mem
;
15144 offset
= info
->altivec_save_offset
+ sp_offset
15145 + 16 * (i
- info
->first_altivec_reg_save
);
15147 savereg
= gen_rtx_REG (V4SImode
, i
);
15149 areg
= gen_rtx_REG (Pmode
, 0);
15150 emit_move_insn (areg
, GEN_INT (offset
));
15152 /* AltiVec addressing mode is [reg+reg]. */
15153 mem
= gen_frame_mem (V4SImode
,
15154 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
15156 insn
= emit_move_insn (mem
, savereg
);
15158 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
15159 areg
, GEN_INT (offset
));
15163 /* VRSAVE is a bit vector representing which AltiVec registers
15164 are used. The OS uses this to determine which vector
15165 registers to save on a context switch. We need to save
15166 VRSAVE on the stack frame, add whatever AltiVec registers we
15167 used in this function, and do the corresponding magic in the
15170 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
15171 && info
->vrsave_mask
!= 0)
15173 rtx reg
, mem
, vrsave
;
15176 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
15177 as frame_reg_rtx and r11 as the static chain pointer for
15178 nested functions. */
15179 reg
= gen_rtx_REG (SImode
, 0);
15180 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
15182 emit_insn (gen_get_vrsave_internal (reg
));
15184 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
15186 if (!WORLD_SAVE_P (info
))
15189 offset
= info
->vrsave_save_offset
+ sp_offset
;
15190 mem
= gen_frame_mem (SImode
,
15191 gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15192 GEN_INT (offset
)));
15193 insn
= emit_move_insn (mem
, reg
);
15196 /* Include the registers in the mask. */
15197 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
15199 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
15202 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
15203 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
15204 || (DEFAULT_ABI
== ABI_V4
15205 && (flag_pic
== 1 || (flag_pic
&& TARGET_SECURE_PLT
))
15206 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM
)))
15208 /* If emit_load_toc_table will use the link register, we need to save
15209 it. We use R12 for this purpose because emit_load_toc_table
15210 can use register 0. This allows us to use a plain 'blr' to return
15211 from the procedure more often. */
15212 int save_LR_around_toc_setup
= (TARGET_ELF
15213 && DEFAULT_ABI
!= ABI_AIX
15215 && ! info
->lr_save_p
15216 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0);
15217 if (save_LR_around_toc_setup
)
15219 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
15221 insn
= emit_move_insn (frame_ptr_rtx
, lr
);
15222 RTX_FRAME_RELATED_P (insn
) = 1;
15224 rs6000_emit_load_toc_table (TRUE
);
15226 insn
= emit_move_insn (lr
, frame_ptr_rtx
);
15227 RTX_FRAME_RELATED_P (insn
) = 1;
15230 rs6000_emit_load_toc_table (TRUE
);
15234 if (DEFAULT_ABI
== ABI_DARWIN
15235 && flag_pic
&& current_function_uses_pic_offset_table
)
15237 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
15238 rtx src
= machopic_function_base_sym ();
15240 /* Save and restore LR locally around this call (in R0). */
15241 if (!info
->lr_save_p
)
15242 emit_move_insn (gen_rtx_REG (Pmode
, 0), lr
);
15244 emit_insn (gen_load_macho_picbase (src
));
15246 emit_move_insn (gen_rtx_REG (Pmode
,
15247 RS6000_PIC_OFFSET_TABLE_REGNUM
),
15250 if (!info
->lr_save_p
)
15251 emit_move_insn (lr
, gen_rtx_REG (Pmode
, 0));
15256 /* Write function prologue. */
15259 rs6000_output_function_prologue (FILE *file
,
15260 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
15262 rs6000_stack_t
*info
= rs6000_stack_info ();
15264 if (TARGET_DEBUG_STACK
)
15265 debug_stack_info (info
);
15267 /* Write .extern for any function we will call to save and restore
15269 if (info
->first_fp_reg_save
< 64
15270 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
15271 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
15272 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
15273 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
15274 RESTORE_FP_SUFFIX
);
15276 /* Write .extern for AIX common mode routines, if needed. */
15277 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
15279 fputs ("\t.extern __mulh\n", file
);
15280 fputs ("\t.extern __mull\n", file
);
15281 fputs ("\t.extern __divss\n", file
);
15282 fputs ("\t.extern __divus\n", file
);
15283 fputs ("\t.extern __quoss\n", file
);
15284 fputs ("\t.extern __quous\n", file
);
15285 common_mode_defined
= 1;
15288 if (! HAVE_prologue
)
15292 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
15293 the "toplevel" insn chain. */
15294 emit_note (NOTE_INSN_DELETED
);
15295 rs6000_emit_prologue ();
15296 emit_note (NOTE_INSN_DELETED
);
15298 /* Expand INSN_ADDRESSES so final() doesn't crash. */
15302 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
15304 INSN_ADDRESSES_NEW (insn
, addr
);
15309 if (TARGET_DEBUG_STACK
)
15310 debug_rtx_list (get_insns (), 100);
15311 final (get_insns (), file
, FALSE
);
15315 rs6000_pic_labelno
++;
15318 /* Emit function epilogue as insns.
15320 At present, dwarf2out_frame_debug_expr doesn't understand
15321 register restores, so we don't bother setting RTX_FRAME_RELATED_P
15322 anywhere in the epilogue. Most of the insns below would in any case
15323 need special notes to explain where r11 is in relation to the stack. */
15326 rs6000_emit_epilogue (int sibcall
)
15328 rs6000_stack_t
*info
;
15329 int restoring_FPRs_inline
;
15330 int using_load_multiple
;
15331 int using_mtcr_multiple
;
15332 int use_backchain_to_restore_sp
;
15334 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
15335 rtx frame_reg_rtx
= sp_reg_rtx
;
15336 enum machine_mode reg_mode
= Pmode
;
15337 int reg_size
= TARGET_32BIT
? 4 : 8;
15340 info
= rs6000_stack_info ();
15342 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
15344 reg_mode
= V2SImode
;
15348 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
15349 && (!TARGET_SPE_ABI
15350 || info
->spe_64bit_regs_used
== 0)
15351 && info
->first_gp_reg_save
< 31
15352 && no_global_regs_above (info
->first_gp_reg_save
));
15353 restoring_FPRs_inline
= (sibcall
15354 || current_function_calls_eh_return
15355 || info
->first_fp_reg_save
== 64
15356 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
15357 use_backchain_to_restore_sp
= (frame_pointer_needed
15358 || current_function_calls_alloca
15359 || info
->total_size
> 32767);
15360 using_mtcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
15361 || rs6000_cpu
== PROCESSOR_PPC603
15362 || rs6000_cpu
== PROCESSOR_PPC750
15365 if (WORLD_SAVE_P (info
))
15369 const char *alloc_rname
;
15372 /* eh_rest_world_r10 will return to the location saved in the LR
15373 stack slot (which is not likely to be our caller.)
15374 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
15375 rest_world is similar, except any R10 parameter is ignored.
15376 The exception-handling stuff that was here in 2.95 is no
15377 longer necessary. */
15381 + 32 - info
->first_gp_reg_save
15382 + LAST_ALTIVEC_REGNO
+ 1 - info
->first_altivec_reg_save
15383 + 63 + 1 - info
->first_fp_reg_save
);
15385 strcpy (rname
, ((current_function_calls_eh_return
) ?
15386 "*eh_rest_world_r10" : "*rest_world"));
15387 alloc_rname
= ggc_strdup (rname
);
15390 RTVEC_ELT (p
, j
++) = gen_rtx_RETURN (VOIDmode
);
15391 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
15392 gen_rtx_REG (Pmode
,
15393 LINK_REGISTER_REGNUM
));
15395 = gen_rtx_USE (VOIDmode
, gen_rtx_SYMBOL_REF (Pmode
, alloc_rname
));
15396 /* The instruction pattern requires a clobber here;
15397 it is shared with the restVEC helper. */
15399 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
15402 /* CR register traditionally saved as CR2. */
15403 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
15404 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15405 GEN_INT (info
->cr_save_offset
));
15406 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15408 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15411 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15413 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
15414 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15415 GEN_INT (info
->gp_save_offset
15417 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15419 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15421 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
15423 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
15424 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15425 GEN_INT (info
->altivec_save_offset
15427 rtx mem
= gen_frame_mem (V4SImode
, addr
);
15429 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15431 for (i
= 0; info
->first_fp_reg_save
+ i
<= 63; i
++)
15433 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
15434 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15435 GEN_INT (info
->fp_save_offset
15437 rtx mem
= gen_frame_mem (DFmode
, addr
);
15439 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15442 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 0));
15444 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 12));
15446 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 7));
15448 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 8));
15450 = gen_rtx_USE (VOIDmode
, gen_rtx_REG (SImode
, 10));
15451 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15456 /* Set sp_offset based on the stack push from the prologue. */
15457 if (info
->total_size
< 32767)
15458 sp_offset
= info
->total_size
;
15460 /* Restore AltiVec registers if needed. */
15461 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
15465 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
15466 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
15468 rtx addr
, areg
, mem
;
15470 areg
= gen_rtx_REG (Pmode
, 0);
15472 (areg
, GEN_INT (info
->altivec_save_offset
15474 + 16 * (i
- info
->first_altivec_reg_save
)));
15476 /* AltiVec addressing mode is [reg+reg]. */
15477 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
15478 mem
= gen_frame_mem (V4SImode
, addr
);
15480 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
15484 /* Restore VRSAVE if needed. */
15485 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
15486 && info
->vrsave_mask
!= 0)
15488 rtx addr
, mem
, reg
;
15490 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15491 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
15492 mem
= gen_frame_mem (SImode
, addr
);
15493 reg
= gen_rtx_REG (SImode
, 12);
15494 emit_move_insn (reg
, mem
);
15496 emit_insn (generate_set_vrsave (reg
, info
, 1));
15501 /* If we have a frame pointer, a call to alloca, or a large stack
15502 frame, restore the old stack pointer using the backchain. Otherwise,
15503 we know what size to update it with. */
15504 if (use_backchain_to_restore_sp
)
15506 /* Under V.4, don't reset the stack pointer until after we're done
15507 loading the saved registers. */
15508 if (DEFAULT_ABI
== ABI_V4
)
15509 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
15511 emit_move_insn (frame_reg_rtx
,
15512 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
15514 else if (info
->push_p
)
15516 if (DEFAULT_ABI
== ABI_V4
15517 || current_function_calls_eh_return
)
15518 sp_offset
= info
->total_size
;
15521 emit_insn (TARGET_32BIT
15522 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
15523 GEN_INT (info
->total_size
))
15524 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
15525 GEN_INT (info
->total_size
)));
15529 /* Get the old lr if we saved it. */
15530 if (info
->lr_save_p
)
15532 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
15533 info
->lr_save_offset
+ sp_offset
);
15535 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
15538 /* Get the old cr if we saved it. */
15539 if (info
->cr_save_p
)
15541 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15542 GEN_INT (info
->cr_save_offset
+ sp_offset
));
15543 rtx mem
= gen_frame_mem (SImode
, addr
);
15545 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
15548 /* Set LR here to try to overlap restores below. */
15549 if (info
->lr_save_p
)
15550 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
15551 gen_rtx_REG (Pmode
, 0));
15553 /* Load exception handler data registers, if needed. */
15554 if (current_function_calls_eh_return
)
15556 unsigned int i
, regno
;
15560 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15561 GEN_INT (sp_offset
+ 5 * reg_size
));
15562 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15564 emit_move_insn (gen_rtx_REG (reg_mode
, 2), mem
);
15571 regno
= EH_RETURN_DATA_REGNO (i
);
15572 if (regno
== INVALID_REGNUM
)
15575 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
15576 info
->ehrd_offset
+ sp_offset
15577 + reg_size
* (int) i
);
15579 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
15583 /* Restore GPRs. This is done as a PARALLEL if we are using
15584 the load-multiple instructions. */
15585 if (using_load_multiple
)
15588 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
15589 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15591 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15592 GEN_INT (info
->gp_save_offset
15595 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15598 gen_rtx_SET (VOIDmode
,
15599 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
15602 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15604 else if (TARGET_SPE_ABI
15605 && info
->spe_64bit_regs_used
!= 0
15606 && info
->first_gp_reg_save
!= 32)
15608 rtx spe_save_area_ptr
;
15609 /* Determine whether we can address all of the registers that need
15610 to be saved with an offset from the stack pointer that fits in
15611 the small const field for SPE memory instructions. */
15612 int spe_regs_addressable_via_sp
15613 = SPE_CONST_OFFSET_OK(info
->spe_gp_save_offset
+ sp_offset
15614 + (32 - info
->first_gp_reg_save
- 1) * reg_size
);
15617 if (spe_regs_addressable_via_sp
)
15619 spe_save_area_ptr
= frame_reg_rtx
;
15620 spe_offset
= info
->spe_gp_save_offset
+ sp_offset
;
15624 /* Make r11 point to the start of the SPE save area. We worried about
15625 not clobbering it when we were saving registers in the prolgoue.
15626 There's no need to worry here because the static chain is passed
15627 anew to every function. */
15628 spe_save_area_ptr
= gen_rtx_REG (Pmode
, 11);
15630 emit_insn (gen_addsi3 (spe_save_area_ptr
, frame_reg_rtx
,
15631 GEN_INT (info
->spe_gp_save_offset
+ sp_offset
)));
15636 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15637 if (rs6000_reg_live_or_pic_offset_p (info
->first_gp_reg_save
+ i
))
15639 rtx offset
, addr
, mem
;
15641 /* We're doing all this to ensure that the immediate offset
15642 fits into the immediate field of 'evldd'. */
15643 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset
+ reg_size
* i
));
15645 offset
= GEN_INT (spe_offset
+ reg_size
* i
);
15646 addr
= gen_rtx_PLUS (Pmode
, spe_save_area_ptr
, offset
);
15647 mem
= gen_rtx_MEM (V2SImode
, addr
);
15649 emit_move_insn (gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
15654 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15655 if (rs6000_reg_live_or_pic_offset_p (info
->first_gp_reg_save
+ i
))
15657 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15658 GEN_INT (info
->gp_save_offset
15661 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15663 emit_move_insn (gen_rtx_REG (reg_mode
,
15664 info
->first_gp_reg_save
+ i
), mem
);
15667 /* Restore fpr's if we need to do it without calling a function. */
15668 if (restoring_FPRs_inline
)
15669 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
15670 if ((df_regs_ever_live_p (info
->first_fp_reg_save
+i
)
15671 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
15674 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15675 GEN_INT (info
->fp_save_offset
15678 mem
= gen_frame_mem (DFmode
, addr
);
15680 emit_move_insn (gen_rtx_REG (DFmode
,
15681 info
->first_fp_reg_save
+ i
),
15685 /* If we saved cr, restore it here. Just those that were used. */
15686 if (info
->cr_save_p
)
15688 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
15691 if (using_mtcr_multiple
)
15693 for (i
= 0; i
< 8; i
++)
15694 if (df_regs_ever_live_p (CR0_REGNO
+i
) && ! call_used_regs
[CR0_REGNO
+i
])
15696 gcc_assert (count
);
15699 if (using_mtcr_multiple
&& count
> 1)
15704 p
= rtvec_alloc (count
);
15707 for (i
= 0; i
< 8; i
++)
15708 if (df_regs_ever_live_p (CR0_REGNO
+i
) && ! call_used_regs
[CR0_REGNO
+i
])
15710 rtvec r
= rtvec_alloc (2);
15711 RTVEC_ELT (r
, 0) = r12_rtx
;
15712 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
15713 RTVEC_ELT (p
, ndx
) =
15714 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
15715 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
15718 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15719 gcc_assert (ndx
== count
);
15722 for (i
= 0; i
< 8; i
++)
15723 if (df_regs_ever_live_p (CR0_REGNO
+i
) && ! call_used_regs
[CR0_REGNO
+i
])
15725 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
15731 /* If this is V.4, unwind the stack pointer after all of the loads
15733 if (frame_reg_rtx
!= sp_reg_rtx
)
15735 /* This blockage is needed so that sched doesn't decide to move
15736 the sp change before the register restores. */
15737 rs6000_emit_stack_tie ();
15739 && info
->spe_64bit_regs_used
!= 0
15740 && info
->first_gp_reg_save
!= 32)
15741 emit_insn (gen_addsi3 (sp_reg_rtx
, gen_rtx_REG (Pmode
, 11),
15742 GEN_INT (-(info
->spe_gp_save_offset
+ sp_offset
))));
15744 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
15746 else if (sp_offset
!= 0)
15747 emit_insn (TARGET_32BIT
15748 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
15749 GEN_INT (sp_offset
))
15750 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
15751 GEN_INT (sp_offset
)));
15753 if (current_function_calls_eh_return
)
15755 rtx sa
= EH_RETURN_STACKADJ_RTX
;
15756 emit_insn (TARGET_32BIT
15757 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
15758 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
15764 if (! restoring_FPRs_inline
)
15765 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
15767 p
= rtvec_alloc (2);
15769 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
15770 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
15771 gen_rtx_REG (Pmode
,
15772 LINK_REGISTER_REGNUM
));
15774 /* If we have to restore more than two FP registers, branch to the
15775 restore function. It will return to our caller. */
15776 if (! restoring_FPRs_inline
)
15780 const char *alloc_rname
;
15782 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
15783 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
15784 alloc_rname
= ggc_strdup (rname
);
15785 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
15786 gen_rtx_SYMBOL_REF (Pmode
,
15789 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
15792 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
15793 GEN_INT (info
->fp_save_offset
+ 8*i
));
15794 mem
= gen_frame_mem (DFmode
, addr
);
15796 RTVEC_ELT (p
, i
+3) =
15797 gen_rtx_SET (VOIDmode
,
15798 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
15803 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15807 /* Write function epilogue. */
15810 rs6000_output_function_epilogue (FILE *file
,
15811 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
15813 if (! HAVE_epilogue
)
15815 rtx insn
= get_last_insn ();
15816 /* If the last insn was a BARRIER, we don't have to write anything except
15817 the trace table. */
15818 if (GET_CODE (insn
) == NOTE
)
15819 insn
= prev_nonnote_insn (insn
);
15820 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
15822 /* This is slightly ugly, but at least we don't have two
15823 copies of the epilogue-emitting code. */
15826 /* A NOTE_INSN_DELETED is supposed to be at the start
15827 and end of the "toplevel" insn chain. */
15828 emit_note (NOTE_INSN_DELETED
);
15829 rs6000_emit_epilogue (FALSE
);
15830 emit_note (NOTE_INSN_DELETED
);
15832 /* Expand INSN_ADDRESSES so final() doesn't crash. */
15836 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
15838 INSN_ADDRESSES_NEW (insn
, addr
);
15843 if (TARGET_DEBUG_STACK
)
15844 debug_rtx_list (get_insns (), 100);
15845 final (get_insns (), file
, FALSE
);
15851 macho_branch_islands ();
15852 /* Mach-O doesn't support labels at the end of objects, so if
15853 it looks like we might want one, insert a NOP. */
15855 rtx insn
= get_last_insn ();
15858 && NOTE_KIND (insn
) != NOTE_INSN_DELETED_LABEL
)
15859 insn
= PREV_INSN (insn
);
15863 && NOTE_KIND (insn
) == NOTE_INSN_DELETED_LABEL
)))
15864 fputs ("\tnop\n", file
);
15868 /* Output a traceback table here. See /usr/include/sys/debug.h for info
15871 We don't output a traceback table if -finhibit-size-directive was
15872 used. The documentation for -finhibit-size-directive reads
15873 ``don't output a @code{.size} assembler directive, or anything
15874 else that would cause trouble if the function is split in the
15875 middle, and the two halves are placed at locations far apart in
15876 memory.'' The traceback table has this property, since it
15877 includes the offset from the start of the function to the
15878 traceback table itself.
15880 System V.4 Powerpc's (and the embedded ABI derived from it) use a
15881 different traceback table. */
15882 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
15883 && rs6000_traceback
!= traceback_none
&& !current_function_is_thunk
)
15885 const char *fname
= NULL
;
15886 const char *language_string
= lang_hooks
.name
;
15887 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
15889 int optional_tbtab
;
15890 rs6000_stack_t
*info
= rs6000_stack_info ();
15892 if (rs6000_traceback
== traceback_full
)
15893 optional_tbtab
= 1;
15894 else if (rs6000_traceback
== traceback_part
)
15895 optional_tbtab
= 0;
15897 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
15899 if (optional_tbtab
)
15901 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
15902 while (*fname
== '.') /* V.4 encodes . in the name */
15905 /* Need label immediately before tbtab, so we can compute
15906 its offset from the function start. */
15907 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
15908 ASM_OUTPUT_LABEL (file
, fname
);
15911 /* The .tbtab pseudo-op can only be used for the first eight
15912 expressions, since it can't handle the possibly variable
15913 length fields that follow. However, if you omit the optional
15914 fields, the assembler outputs zeros for all optional fields
15915 anyways, giving each variable length field is minimum length
15916 (as defined in sys/debug.h). Thus we can not use the .tbtab
15917 pseudo-op at all. */
15919 /* An all-zero word flags the start of the tbtab, for debuggers
15920 that have to find it by searching forward from the entry
15921 point or from the current pc. */
15922 fputs ("\t.long 0\n", file
);
15924 /* Tbtab format type. Use format type 0. */
15925 fputs ("\t.byte 0,", file
);
15927 /* Language type. Unfortunately, there does not seem to be any
15928 official way to discover the language being compiled, so we
15929 use language_string.
15930 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
15931 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
15932 a number, so for now use 9. */
15933 if (! strcmp (language_string
, "GNU C"))
15935 else if (! strcmp (language_string
, "GNU F77")
15936 || ! strcmp (language_string
, "GNU F95"))
15938 else if (! strcmp (language_string
, "GNU Pascal"))
15940 else if (! strcmp (language_string
, "GNU Ada"))
15942 else if (! strcmp (language_string
, "GNU C++")
15943 || ! strcmp (language_string
, "GNU Objective-C++"))
15945 else if (! strcmp (language_string
, "GNU Java"))
15947 else if (! strcmp (language_string
, "GNU Objective-C"))
15950 gcc_unreachable ();
15951 fprintf (file
, "%d,", i
);
15953 /* 8 single bit fields: global linkage (not set for C extern linkage,
15954 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15955 from start of procedure stored in tbtab, internal function, function
15956 has controlled storage, function has no toc, function uses fp,
15957 function logs/aborts fp operations. */
15958 /* Assume that fp operations are used if any fp reg must be saved. */
15959 fprintf (file
, "%d,",
15960 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
15962 /* 6 bitfields: function is interrupt handler, name present in
15963 proc table, function calls alloca, on condition directives
15964 (controls stack walks, 3 bits), saves condition reg, saves
15966 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15967 set up as a frame pointer, even when there is no alloca call. */
15968 fprintf (file
, "%d,",
15969 ((optional_tbtab
<< 6)
15970 | ((optional_tbtab
& frame_pointer_needed
) << 5)
15971 | (info
->cr_save_p
<< 1)
15972 | (info
->lr_save_p
)));
15974 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15976 fprintf (file
, "%d,",
15977 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
15979 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15980 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
15982 if (optional_tbtab
)
15984 /* Compute the parameter info from the function decl argument
15987 int next_parm_info_bit
= 31;
15989 for (decl
= DECL_ARGUMENTS (current_function_decl
);
15990 decl
; decl
= TREE_CHAIN (decl
))
15992 rtx parameter
= DECL_INCOMING_RTL (decl
);
15993 enum machine_mode mode
= GET_MODE (parameter
);
15995 if (GET_CODE (parameter
) == REG
)
15997 if (SCALAR_FLOAT_MODE_P (mode
))
16017 gcc_unreachable ();
16020 /* If only one bit will fit, don't or in this entry. */
16021 if (next_parm_info_bit
> 0)
16022 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
16023 next_parm_info_bit
-= 2;
16027 fixed_parms
+= ((GET_MODE_SIZE (mode
)
16028 + (UNITS_PER_WORD
- 1))
16030 next_parm_info_bit
-= 1;
16036 /* Number of fixed point parameters. */
16037 /* This is actually the number of words of fixed point parameters; thus
16038 an 8 byte struct counts as 2; and thus the maximum value is 8. */
16039 fprintf (file
, "%d,", fixed_parms
);
16041 /* 2 bitfields: number of floating point parameters (7 bits), parameters
16043 /* This is actually the number of fp registers that hold parameters;
16044 and thus the maximum value is 13. */
16045 /* Set parameters on stack bit if parameters are not in their original
16046 registers, regardless of whether they are on the stack? Xlc
16047 seems to set the bit when not optimizing. */
16048 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
16050 if (! optional_tbtab
)
16053 /* Optional fields follow. Some are variable length. */
16055 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
16056 11 double float. */
16057 /* There is an entry for each parameter in a register, in the order that
16058 they occur in the parameter list. Any intervening arguments on the
16059 stack are ignored. If the list overflows a long (max possible length
16060 34 bits) then completely leave off all elements that don't fit. */
16061 /* Only emit this long if there was at least one parameter. */
16062 if (fixed_parms
|| float_parms
)
16063 fprintf (file
, "\t.long %d\n", parm_info
);
16065 /* Offset from start of code to tb table. */
16066 fputs ("\t.long ", file
);
16067 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
16069 RS6000_OUTPUT_BASENAME (file
, fname
);
16071 assemble_name (file
, fname
);
16073 rs6000_output_function_entry (file
, fname
);
16076 /* Interrupt handler mask. */
16077 /* Omit this long, since we never set the interrupt handler bit
16080 /* Number of CTL (controlled storage) anchors. */
16081 /* Omit this long, since the has_ctl bit is never set above. */
16083 /* Displacement into stack of each CTL anchor. */
16084 /* Omit this list of longs, because there are no CTL anchors. */
16086 /* Length of function name. */
16089 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
16091 /* Function name. */
16092 assemble_string (fname
, strlen (fname
));
16094 /* Register for alloca automatic storage; this is always reg 31.
16095 Only emit this if the alloca bit was set above. */
16096 if (frame_pointer_needed
)
16097 fputs ("\t.byte 31\n", file
);
16099 fputs ("\t.align 2\n", file
);
16103 /* A C compound statement that outputs the assembler code for a thunk
16104 function, used to implement C++ virtual function calls with
16105 multiple inheritance. The thunk acts as a wrapper around a virtual
16106 function, adjusting the implicit object parameter before handing
16107 control off to the real function.
16109 First, emit code to add the integer DELTA to the location that
16110 contains the incoming first argument. Assume that this argument
16111 contains a pointer, and is the one used to pass the `this' pointer
16112 in C++. This is the incoming argument *before* the function
16113 prologue, e.g. `%o0' on a sparc. The addition must preserve the
16114 values of all other incoming arguments.
16116 After the addition, emit code to jump to FUNCTION, which is a
16117 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
16118 not touch the return address. Hence returning from FUNCTION will
16119 return to whoever called the current `thunk'.
16121 The effect must be as if FUNCTION had been called directly with the
16122 adjusted first argument. This macro is responsible for emitting
16123 all of the code for a thunk function; output_function_prologue()
16124 and output_function_epilogue() are not invoked.
16126 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
16127 been extracted from it.) It might possibly be useful on some
16128 targets, but probably not.
16130 If you do not define this macro, the target-independent code in the
16131 C++ frontend will generate a less efficient heavyweight thunk that
16132 calls FUNCTION instead of jumping to it. The generic approach does
16133 not support varargs. */
16136 rs6000_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
16137 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
16140 rtx
this, insn
, funexp
;
16142 reload_completed
= 1;
16143 epilogue_completed
= 1;
16144 no_new_pseudos
= 1;
16146 /* Mark the end of the (empty) prologue. */
16147 emit_note (NOTE_INSN_PROLOGUE_END
);
16149 /* Find the "this" pointer. If the function returns a structure,
16150 the structure return pointer is in r3. */
16151 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
16152 this = gen_rtx_REG (Pmode
, 4);
16154 this = gen_rtx_REG (Pmode
, 3);
16156 /* Apply the constant offset, if required. */
16159 rtx delta_rtx
= GEN_INT (delta
);
16160 emit_insn (TARGET_32BIT
16161 ? gen_addsi3 (this, this, delta_rtx
)
16162 : gen_adddi3 (this, this, delta_rtx
));
16165 /* Apply the offset from the vtable, if required. */
16168 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
16169 rtx tmp
= gen_rtx_REG (Pmode
, 12);
16171 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
16172 if (((unsigned HOST_WIDE_INT
) vcall_offset
) + 0x8000 >= 0x10000)
16174 emit_insn (TARGET_32BIT
16175 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
16176 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
16177 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
16181 rtx loc
= gen_rtx_PLUS (Pmode
, tmp
, vcall_offset_rtx
);
16183 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, loc
));
16185 emit_insn (TARGET_32BIT
16186 ? gen_addsi3 (this, this, tmp
)
16187 : gen_adddi3 (this, this, tmp
));
16190 /* Generate a tail call to the target function. */
16191 if (!TREE_USED (function
))
16193 assemble_external (function
);
16194 TREE_USED (function
) = 1;
16196 funexp
= XEXP (DECL_RTL (function
), 0);
16197 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
16200 if (MACHOPIC_INDIRECT
)
16201 funexp
= machopic_indirect_call_target (funexp
);
16204 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
16205 generate sibcall RTL explicitly. */
16206 insn
= emit_call_insn (
16207 gen_rtx_PARALLEL (VOIDmode
,
16209 gen_rtx_CALL (VOIDmode
,
16210 funexp
, const0_rtx
),
16211 gen_rtx_USE (VOIDmode
, const0_rtx
),
16212 gen_rtx_USE (VOIDmode
,
16213 gen_rtx_REG (SImode
,
16214 LINK_REGISTER_REGNUM
)),
16215 gen_rtx_RETURN (VOIDmode
))));
16216 SIBLING_CALL_P (insn
) = 1;
16219 /* Run just enough of rest_of_compilation to get the insns emitted.
16220 There's not really enough bulk here to make other passes such as
16221 instruction scheduling worth while. Note that use_thunk calls
16222 assemble_start_function and assemble_end_function. */
16223 insn
= get_insns ();
16224 insn_locators_alloc ();
16225 shorten_branches (insn
);
16226 final_start_function (insn
, file
, 1);
16227 final (insn
, file
, 1);
16228 final_end_function ();
16230 reload_completed
= 0;
16231 epilogue_completed
= 0;
16232 no_new_pseudos
= 0;
16235 /* A quick summary of the various types of 'constant-pool tables'
16238 Target Flags Name One table per
16239 AIX (none) AIX TOC object file
16240 AIX -mfull-toc AIX TOC object file
16241 AIX -mminimal-toc AIX minimal TOC translation unit
16242 SVR4/EABI (none) SVR4 SDATA object file
16243 SVR4/EABI -fpic SVR4 pic object file
16244 SVR4/EABI -fPIC SVR4 PIC translation unit
16245 SVR4/EABI -mrelocatable EABI TOC function
16246 SVR4/EABI -maix AIX TOC object file
16247 SVR4/EABI -maix -mminimal-toc
16248 AIX minimal TOC translation unit
16250 Name Reg. Set by entries contains:
16251 made by addrs? fp? sum?
16253 AIX TOC 2 crt0 as Y option option
16254 AIX minimal TOC 30 prolog gcc Y Y option
16255 SVR4 SDATA 13 crt0 gcc N Y N
16256 SVR4 pic 30 prolog ld Y not yet N
16257 SVR4 PIC 30 prolog gcc Y option option
16258 EABI TOC 30 prolog gcc Y option option
16262 /* Hash functions for the hash table. */
16265 rs6000_hash_constant (rtx k
)
16267 enum rtx_code code
= GET_CODE (k
);
16268 enum machine_mode mode
= GET_MODE (k
);
16269 unsigned result
= (code
<< 3) ^ mode
;
16270 const char *format
;
16273 format
= GET_RTX_FORMAT (code
);
16274 flen
= strlen (format
);
16280 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
16283 if (mode
!= VOIDmode
)
16284 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
16296 for (; fidx
< flen
; fidx
++)
16297 switch (format
[fidx
])
16302 const char *str
= XSTR (k
, fidx
);
16303 len
= strlen (str
);
16304 result
= result
* 613 + len
;
16305 for (i
= 0; i
< len
; i
++)
16306 result
= result
* 613 + (unsigned) str
[i
];
16311 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
16315 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
16318 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
16319 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
16323 for (i
= 0; i
< sizeof (HOST_WIDE_INT
) / sizeof (unsigned); i
++)
16324 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
16331 gcc_unreachable ();
16338 toc_hash_function (const void *hash_entry
)
16340 const struct toc_hash_struct
*thc
=
16341 (const struct toc_hash_struct
*) hash_entry
;
16342 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
16345 /* Compare H1 and H2 for equivalence. */
16348 toc_hash_eq (const void *h1
, const void *h2
)
16350 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
16351 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
16353 if (((const struct toc_hash_struct
*) h1
)->key_mode
16354 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
16357 return rtx_equal_p (r1
, r2
);
16360 /* These are the names given by the C++ front-end to vtables, and
16361 vtable-like objects. Ideally, this logic should not be here;
16362 instead, there should be some programmatic way of inquiring as
16363 to whether or not an object is a vtable. */
16365 #define VTABLE_NAME_P(NAME) \
16366 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
16367 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
16368 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
16369 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
16370 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
16373 rs6000_output_symbol_ref (FILE *file
, rtx x
)
16375 /* Currently C++ toc references to vtables can be emitted before it
16376 is decided whether the vtable is public or private. If this is
16377 the case, then the linker will eventually complain that there is
16378 a reference to an unknown section. Thus, for vtables only,
16379 we emit the TOC reference to reference the symbol and not the
16381 const char *name
= XSTR (x
, 0);
16383 if (VTABLE_NAME_P (name
))
16385 RS6000_OUTPUT_BASENAME (file
, name
);
16388 assemble_name (file
, name
);
16391 /* Output a TOC entry. We derive the entry name from what is being
16395 output_toc (FILE *file
, rtx x
, int labelno
, enum machine_mode mode
)
16398 const char *name
= buf
;
16399 const char *real_name
;
16401 HOST_WIDE_INT offset
= 0;
16403 gcc_assert (!TARGET_NO_TOC
);
16405 /* When the linker won't eliminate them, don't output duplicate
16406 TOC entries (this happens on AIX if there is any kind of TOC,
16407 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
16409 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
16411 struct toc_hash_struct
*h
;
16414 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
16415 time because GGC is not initialized at that point. */
16416 if (toc_hash_table
== NULL
)
16417 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
16418 toc_hash_eq
, NULL
);
16420 h
= ggc_alloc (sizeof (*h
));
16422 h
->key_mode
= mode
;
16423 h
->labelno
= labelno
;
16425 found
= htab_find_slot (toc_hash_table
, h
, 1);
16426 if (*found
== NULL
)
16428 else /* This is indeed a duplicate.
16429 Set this label equal to that label. */
16431 fputs ("\t.set ", file
);
16432 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
16433 fprintf (file
, "%d,", labelno
);
16434 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
16435 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
16441 /* If we're going to put a double constant in the TOC, make sure it's
16442 aligned properly when strict alignment is on. */
16443 if (GET_CODE (x
) == CONST_DOUBLE
16444 && STRICT_ALIGNMENT
16445 && GET_MODE_BITSIZE (mode
) >= 64
16446 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
16447 ASM_OUTPUT_ALIGN (file
, 3);
16450 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
16452 /* Handle FP constants specially. Note that if we have a minimal
16453 TOC, things we put here aren't actually in the TOC, so we can allow
16455 if (GET_CODE (x
) == CONST_DOUBLE
&&
16456 (GET_MODE (x
) == TFmode
|| GET_MODE (x
) == TDmode
))
16458 REAL_VALUE_TYPE rv
;
16461 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
16462 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x
)))
16463 REAL_VALUE_TO_TARGET_DECIMAL128 (rv
, k
);
16465 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
16469 if (TARGET_MINIMAL_TOC
)
16470 fputs (DOUBLE_INT_ASM_OP
, file
);
16472 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16473 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16474 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16475 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
16476 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16477 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16482 if (TARGET_MINIMAL_TOC
)
16483 fputs ("\t.long ", file
);
16485 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16486 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16487 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16488 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16489 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16490 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16494 else if (GET_CODE (x
) == CONST_DOUBLE
&&
16495 (GET_MODE (x
) == DFmode
|| GET_MODE (x
) == DDmode
))
16497 REAL_VALUE_TYPE rv
;
16500 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
16502 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x
)))
16503 REAL_VALUE_TO_TARGET_DECIMAL64 (rv
, k
);
16505 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
16509 if (TARGET_MINIMAL_TOC
)
16510 fputs (DOUBLE_INT_ASM_OP
, file
);
16512 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
16513 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16514 fprintf (file
, "0x%lx%08lx\n",
16515 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16520 if (TARGET_MINIMAL_TOC
)
16521 fputs ("\t.long ", file
);
16523 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
16524 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16525 fprintf (file
, "0x%lx,0x%lx\n",
16526 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16530 else if (GET_CODE (x
) == CONST_DOUBLE
&&
16531 (GET_MODE (x
) == SFmode
|| GET_MODE (x
) == SDmode
))
16533 REAL_VALUE_TYPE rv
;
16536 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
16537 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x
)))
16538 REAL_VALUE_TO_TARGET_DECIMAL32 (rv
, l
);
16540 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
16544 if (TARGET_MINIMAL_TOC
)
16545 fputs (DOUBLE_INT_ASM_OP
, file
);
16547 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
16548 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
16553 if (TARGET_MINIMAL_TOC
)
16554 fputs ("\t.long ", file
);
16556 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
16557 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
16561 else if (GET_MODE (x
) == VOIDmode
16562 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
16564 unsigned HOST_WIDE_INT low
;
16565 HOST_WIDE_INT high
;
16567 if (GET_CODE (x
) == CONST_DOUBLE
)
16569 low
= CONST_DOUBLE_LOW (x
);
16570 high
= CONST_DOUBLE_HIGH (x
);
16573 #if HOST_BITS_PER_WIDE_INT == 32
16576 high
= (low
& 0x80000000) ? ~0 : 0;
16580 low
= INTVAL (x
) & 0xffffffff;
16581 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
16585 /* TOC entries are always Pmode-sized, but since this
16586 is a bigendian machine then if we're putting smaller
16587 integer constants in the TOC we have to pad them.
16588 (This is still a win over putting the constants in
16589 a separate constant pool, because then we'd have
16590 to have both a TOC entry _and_ the actual constant.)
16592 For a 32-bit target, CONST_INT values are loaded and shifted
16593 entirely within `low' and can be stored in one TOC entry. */
16595 /* It would be easy to make this work, but it doesn't now. */
16596 gcc_assert (!TARGET_64BIT
|| POINTER_SIZE
>= GET_MODE_BITSIZE (mode
));
16598 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
16600 #if HOST_BITS_PER_WIDE_INT == 32
16601 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
16602 POINTER_SIZE
, &low
, &high
, 0);
16605 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
16606 high
= (HOST_WIDE_INT
) low
>> 32;
16613 if (TARGET_MINIMAL_TOC
)
16614 fputs (DOUBLE_INT_ASM_OP
, file
);
16616 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
16617 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16618 fprintf (file
, "0x%lx%08lx\n",
16619 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16624 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
16626 if (TARGET_MINIMAL_TOC
)
16627 fputs ("\t.long ", file
);
16629 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
16630 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16631 fprintf (file
, "0x%lx,0x%lx\n",
16632 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16636 if (TARGET_MINIMAL_TOC
)
16637 fputs ("\t.long ", file
);
16639 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
16640 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
16646 if (GET_CODE (x
) == CONST
)
16648 gcc_assert (GET_CODE (XEXP (x
, 0)) == PLUS
);
16650 base
= XEXP (XEXP (x
, 0), 0);
16651 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
16654 switch (GET_CODE (base
))
16657 name
= XSTR (base
, 0);
16661 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
16662 CODE_LABEL_NUMBER (XEXP (base
, 0)));
16666 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
16670 gcc_unreachable ();
16673 real_name
= (*targetm
.strip_name_encoding
) (name
);
16674 if (TARGET_MINIMAL_TOC
)
16675 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
16678 fprintf (file
, "\t.tc %s", real_name
);
16681 fprintf (file
, ".N" HOST_WIDE_INT_PRINT_UNSIGNED
, - offset
);
16683 fprintf (file
, ".P" HOST_WIDE_INT_PRINT_UNSIGNED
, offset
);
16685 fputs ("[TC],", file
);
16688 /* Currently C++ toc references to vtables can be emitted before it
16689 is decided whether the vtable is public or private. If this is
16690 the case, then the linker will eventually complain that there is
16691 a TOC reference to an unknown section. Thus, for vtables only,
16692 we emit the TOC reference to reference the symbol and not the
16694 if (VTABLE_NAME_P (name
))
16696 RS6000_OUTPUT_BASENAME (file
, name
);
16698 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
);
16699 else if (offset
> 0)
16700 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
, offset
);
16703 output_addr_const (file
, x
);
16707 /* Output an assembler pseudo-op to write an ASCII string of N characters
16708 starting at P to FILE.
16710 On the RS/6000, we have to do this using the .byte operation and
16711 write out special characters outside the quoted string.
16712 Also, the assembler is broken; very long strings are truncated,
16713 so we must artificially break them up early. */
16716 output_ascii (FILE *file
, const char *p
, int n
)
16719 int i
, count_string
;
16720 const char *for_string
= "\t.byte \"";
16721 const char *for_decimal
= "\t.byte ";
16722 const char *to_close
= NULL
;
16725 for (i
= 0; i
< n
; i
++)
16728 if (c
>= ' ' && c
< 0177)
16731 fputs (for_string
, file
);
16734 /* Write two quotes to get one. */
16742 for_decimal
= "\"\n\t.byte ";
16746 if (count_string
>= 512)
16748 fputs (to_close
, file
);
16750 for_string
= "\t.byte \"";
16751 for_decimal
= "\t.byte ";
16759 fputs (for_decimal
, file
);
16760 fprintf (file
, "%d", c
);
16762 for_string
= "\n\t.byte \"";
16763 for_decimal
= ", ";
16769 /* Now close the string if we have written one. Then end the line. */
16771 fputs (to_close
, file
);
16774 /* Generate a unique section name for FILENAME for a section type
16775 represented by SECTION_DESC. Output goes into BUF.
16777 SECTION_DESC can be any string, as long as it is different for each
16778 possible section type.
16780 We name the section in the same manner as xlc. The name begins with an
16781 underscore followed by the filename (after stripping any leading directory
16782 names) with the last period replaced by the string SECTION_DESC. If
16783 FILENAME does not contain a period, SECTION_DESC is appended to the end of
16787 rs6000_gen_section_name (char **buf
, const char *filename
,
16788 const char *section_desc
)
16790 const char *q
, *after_last_slash
, *last_period
= 0;
16794 after_last_slash
= filename
;
16795 for (q
= filename
; *q
; q
++)
16798 after_last_slash
= q
+ 1;
16799 else if (*q
== '.')
16803 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
16804 *buf
= (char *) xmalloc (len
);
16809 for (q
= after_last_slash
; *q
; q
++)
16811 if (q
== last_period
)
16813 strcpy (p
, section_desc
);
16814 p
+= strlen (section_desc
);
16818 else if (ISALNUM (*q
))
16822 if (last_period
== 0)
16823 strcpy (p
, section_desc
);
16828 /* Emit profile function. */
16831 output_profile_hook (int labelno ATTRIBUTE_UNUSED
)
16833 /* Non-standard profiling for kernels, which just saves LR then calls
16834 _mcount without worrying about arg saves. The idea is to change
16835 the function prologue as little as possible as it isn't easy to
16836 account for arg save/restore code added just for _mcount. */
16837 if (TARGET_PROFILE_KERNEL
)
16840 if (DEFAULT_ABI
== ABI_AIX
)
16842 #ifndef NO_PROFILE_COUNTERS
16843 # define NO_PROFILE_COUNTERS 0
16845 if (NO_PROFILE_COUNTERS
)
16846 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
16850 const char *label_name
;
16853 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
16854 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
16855 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
16857 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
16861 else if (DEFAULT_ABI
== ABI_DARWIN
)
16863 const char *mcount_name
= RS6000_MCOUNT
;
16864 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
16866 /* Be conservative and always set this, at least for now. */
16867 current_function_uses_pic_offset_table
= 1;
16870 /* For PIC code, set up a stub and collect the caller's address
16871 from r0, which is where the prologue puts it. */
16872 if (MACHOPIC_INDIRECT
16873 && current_function_uses_pic_offset_table
)
16874 caller_addr_regno
= 0;
16876 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
16878 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
16882 /* Write function profiler code. */
16885 output_function_profiler (FILE *file
, int labelno
)
16889 switch (DEFAULT_ABI
)
16892 gcc_unreachable ();
16897 warning (0, "no profiling of 64-bit code for this ABI");
16900 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
16901 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
16902 if (NO_PROFILE_COUNTERS
)
16904 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16905 reg_names
[0], reg_names
[1]);
16907 else if (TARGET_SECURE_PLT
&& flag_pic
)
16909 asm_fprintf (file
, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16910 reg_names
[0], reg_names
[1]);
16911 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
16912 asm_fprintf (file
, "\t{cau|addis} %s,%s,",
16913 reg_names
[12], reg_names
[12]);
16914 assemble_name (file
, buf
);
16915 asm_fprintf (file
, "-1b@ha\n\t{cal|la} %s,", reg_names
[0]);
16916 assemble_name (file
, buf
);
16917 asm_fprintf (file
, "-1b@l(%s)\n", reg_names
[12]);
16919 else if (flag_pic
== 1)
16921 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
16922 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16923 reg_names
[0], reg_names
[1]);
16924 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
16925 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
16926 assemble_name (file
, buf
);
16927 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
16929 else if (flag_pic
> 1)
16931 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16932 reg_names
[0], reg_names
[1]);
16933 /* Now, we need to get the address of the label. */
16934 fputs ("\tbcl 20,31,1f\n\t.long ", file
);
16935 assemble_name (file
, buf
);
16936 fputs ("-.\n1:", file
);
16937 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
16938 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
16939 reg_names
[0], reg_names
[11]);
16940 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
16941 reg_names
[0], reg_names
[0], reg_names
[11]);
16945 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
16946 assemble_name (file
, buf
);
16947 fputs ("@ha\n", file
);
16948 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16949 reg_names
[0], reg_names
[1]);
16950 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
16951 assemble_name (file
, buf
);
16952 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
16955 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
16956 fprintf (file
, "\tbl %s%s\n",
16957 RS6000_MCOUNT
, flag_pic
? "@plt" : "");
16962 if (!TARGET_PROFILE_KERNEL
)
16964 /* Don't do anything, done in output_profile_hook (). */
16968 gcc_assert (!TARGET_32BIT
);
16970 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
16971 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
16973 if (cfun
->static_chain_decl
!= NULL
)
16975 asm_fprintf (file
, "\tstd %s,24(%s)\n",
16976 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
16977 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
16978 asm_fprintf (file
, "\tld %s,24(%s)\n",
16979 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
16982 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
16990 /* The following variable value is the last issued insn. */
16992 static rtx last_scheduled_insn
;
16994 /* The following variable helps to balance issuing of load and
16995 store instructions */
16997 static int load_store_pendulum
;
16999 /* Power4 load update and store update instructions are cracked into a
17000 load or store and an integer insn which are executed in the same cycle.
17001 Branches have their own dispatch slot which does not count against the
17002 GCC issue rate, but it changes the program flow so there are no other
17003 instructions to issue in this cycle. */
17006 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED
,
17007 int verbose ATTRIBUTE_UNUSED
,
17008 rtx insn
, int more
)
17010 last_scheduled_insn
= insn
;
17011 if (GET_CODE (PATTERN (insn
)) == USE
17012 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17014 cached_can_issue_more
= more
;
17015 return cached_can_issue_more
;
17018 if (insn_terminates_group_p (insn
, current_group
))
17020 cached_can_issue_more
= 0;
17021 return cached_can_issue_more
;
17024 /* If no reservation, but reach here */
17025 if (recog_memoized (insn
) < 0)
17028 if (rs6000_sched_groups
)
17030 if (is_microcoded_insn (insn
))
17031 cached_can_issue_more
= 0;
17032 else if (is_cracked_insn (insn
))
17033 cached_can_issue_more
= more
> 2 ? more
- 2 : 0;
17035 cached_can_issue_more
= more
- 1;
17037 return cached_can_issue_more
;
17040 if (rs6000_cpu_attr
== CPU_CELL
&& is_nonpipeline_insn (insn
))
17043 cached_can_issue_more
= more
- 1;
17044 return cached_can_issue_more
;
17047 /* Adjust the cost of a scheduling dependency. Return the new cost of
17048 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
17051 rs6000_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
17053 enum attr_type attr_type
;
17055 if (! recog_memoized (insn
))
17058 switch (REG_NOTE_KIND (link
))
17062 /* Data dependency; DEP_INSN writes a register that INSN reads
17063 some cycles later. */
17065 /* Separate a load from a narrower, dependent store. */
17066 if (rs6000_sched_groups
17067 && GET_CODE (PATTERN (insn
)) == SET
17068 && GET_CODE (PATTERN (dep_insn
)) == SET
17069 && GET_CODE (XEXP (PATTERN (insn
), 1)) == MEM
17070 && GET_CODE (XEXP (PATTERN (dep_insn
), 0)) == MEM
17071 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn
), 1)))
17072 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn
), 0)))))
17075 attr_type
= get_attr_type (insn
);
17080 /* Tell the first scheduling pass about the latency between
17081 a mtctr and bctr (and mtlr and br/blr). The first
17082 scheduling pass will not know about this latency since
17083 the mtctr instruction, which has the latency associated
17084 to it, will be generated by reload. */
17085 return TARGET_POWER
? 5 : 4;
17087 /* Leave some extra cycles between a compare and its
17088 dependent branch, to inhibit expensive mispredicts. */
17089 if ((rs6000_cpu_attr
== CPU_PPC603
17090 || rs6000_cpu_attr
== CPU_PPC604
17091 || rs6000_cpu_attr
== CPU_PPC604E
17092 || rs6000_cpu_attr
== CPU_PPC620
17093 || rs6000_cpu_attr
== CPU_PPC630
17094 || rs6000_cpu_attr
== CPU_PPC750
17095 || rs6000_cpu_attr
== CPU_PPC7400
17096 || rs6000_cpu_attr
== CPU_PPC7450
17097 || rs6000_cpu_attr
== CPU_POWER4
17098 || rs6000_cpu_attr
== CPU_POWER5
17099 || rs6000_cpu_attr
== CPU_CELL
)
17100 && recog_memoized (dep_insn
)
17101 && (INSN_CODE (dep_insn
) >= 0))
17103 switch (get_attr_type (dep_insn
))
17107 case TYPE_DELAYED_COMPARE
:
17108 case TYPE_IMUL_COMPARE
:
17109 case TYPE_LMUL_COMPARE
:
17110 case TYPE_FPCOMPARE
:
17111 case TYPE_CR_LOGICAL
:
17112 case TYPE_DELAYED_CR
:
17121 case TYPE_STORE_UX
:
17123 case TYPE_FPSTORE_U
:
17124 case TYPE_FPSTORE_UX
:
17125 if ((rs6000_cpu
== PROCESSOR_POWER6
)
17126 && recog_memoized (dep_insn
)
17127 && (INSN_CODE (dep_insn
) >= 0))
17130 if (GET_CODE (PATTERN (insn
)) != SET
)
17131 /* If this happens, we have to extend this to schedule
17132 optimally. Return default for now. */
17135 /* Adjust the cost for the case where the value written
17136 by a fixed point operation is used as the address
17137 gen value on a store. */
17138 switch (get_attr_type (dep_insn
))
17145 if (! store_data_bypass_p (dep_insn
, insn
))
17149 case TYPE_LOAD_EXT
:
17150 case TYPE_LOAD_EXT_U
:
17151 case TYPE_LOAD_EXT_UX
:
17152 case TYPE_VAR_SHIFT_ROTATE
:
17153 case TYPE_VAR_DELAYED_COMPARE
:
17155 if (! store_data_bypass_p (dep_insn
, insn
))
17161 case TYPE_FAST_COMPARE
:
17164 case TYPE_INSERT_WORD
:
17165 case TYPE_INSERT_DWORD
:
17166 case TYPE_FPLOAD_U
:
17167 case TYPE_FPLOAD_UX
:
17169 case TYPE_STORE_UX
:
17170 case TYPE_FPSTORE_U
:
17171 case TYPE_FPSTORE_UX
:
17173 if (! store_data_bypass_p (dep_insn
, insn
))
17181 case TYPE_IMUL_COMPARE
:
17182 case TYPE_LMUL_COMPARE
:
17184 if (! store_data_bypass_p (dep_insn
, insn
))
17190 if (! store_data_bypass_p (dep_insn
, insn
))
17196 if (! store_data_bypass_p (dep_insn
, insn
))
17209 case TYPE_LOAD_EXT
:
17210 case TYPE_LOAD_EXT_U
:
17211 case TYPE_LOAD_EXT_UX
:
17212 if ((rs6000_cpu
== PROCESSOR_POWER6
)
17213 && recog_memoized (dep_insn
)
17214 && (INSN_CODE (dep_insn
) >= 0))
17217 /* Adjust the cost for the case where the value written
17218 by a fixed point instruction is used within the address
17219 gen portion of a subsequent load(u)(x) */
17220 switch (get_attr_type (dep_insn
))
17227 if (set_to_load_agen (dep_insn
, insn
))
17231 case TYPE_LOAD_EXT
:
17232 case TYPE_LOAD_EXT_U
:
17233 case TYPE_LOAD_EXT_UX
:
17234 case TYPE_VAR_SHIFT_ROTATE
:
17235 case TYPE_VAR_DELAYED_COMPARE
:
17237 if (set_to_load_agen (dep_insn
, insn
))
17243 case TYPE_FAST_COMPARE
:
17246 case TYPE_INSERT_WORD
:
17247 case TYPE_INSERT_DWORD
:
17248 case TYPE_FPLOAD_U
:
17249 case TYPE_FPLOAD_UX
:
17251 case TYPE_STORE_UX
:
17252 case TYPE_FPSTORE_U
:
17253 case TYPE_FPSTORE_UX
:
17255 if (set_to_load_agen (dep_insn
, insn
))
17263 case TYPE_IMUL_COMPARE
:
17264 case TYPE_LMUL_COMPARE
:
17266 if (set_to_load_agen (dep_insn
, insn
))
17272 if (set_to_load_agen (dep_insn
, insn
))
17278 if (set_to_load_agen (dep_insn
, insn
))
17289 if ((rs6000_cpu
== PROCESSOR_POWER6
)
17290 && recog_memoized (dep_insn
)
17291 && (INSN_CODE (dep_insn
) >= 0)
17292 && (get_attr_type (dep_insn
) == TYPE_MFFGPR
))
17299 /* Fall out to return default cost. */
17303 case REG_DEP_OUTPUT
:
17304 /* Output dependency; DEP_INSN writes a register that INSN writes some
17306 if ((rs6000_cpu
== PROCESSOR_POWER6
)
17307 && recog_memoized (dep_insn
)
17308 && (INSN_CODE (dep_insn
) >= 0))
17310 attr_type
= get_attr_type (insn
);
17315 if (get_attr_type (dep_insn
) == TYPE_FP
)
17319 if (get_attr_type (dep_insn
) == TYPE_MFFGPR
)
17327 /* Anti dependency; DEP_INSN reads a register that INSN writes some
17332 gcc_unreachable ();
17338 /* The function returns a true if INSN is microcoded.
17339 Return false otherwise. */
17342 is_microcoded_insn (rtx insn
)
17344 if (!insn
|| !INSN_P (insn
)
17345 || GET_CODE (PATTERN (insn
)) == USE
17346 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17349 if (rs6000_cpu_attr
== CPU_CELL
)
17350 return get_attr_cell_micro (insn
) == CELL_MICRO_ALWAYS
;
17352 if (rs6000_sched_groups
)
17354 enum attr_type type
= get_attr_type (insn
);
17355 if (type
== TYPE_LOAD_EXT_U
17356 || type
== TYPE_LOAD_EXT_UX
17357 || type
== TYPE_LOAD_UX
17358 || type
== TYPE_STORE_UX
17359 || type
== TYPE_MFCR
)
17366 /* The function returns true if INSN is cracked into 2 instructions
17367 by the processor (and therefore occupies 2 issue slots). */
17370 is_cracked_insn (rtx insn
)
17372 if (!insn
|| !INSN_P (insn
)
17373 || GET_CODE (PATTERN (insn
)) == USE
17374 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17377 if (rs6000_sched_groups
)
17379 enum attr_type type
= get_attr_type (insn
);
17380 if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
17381 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
17382 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
17383 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
17384 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
17385 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
17386 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
17387 || type
== TYPE_INSERT_WORD
)
17394 /* The function returns true if INSN can be issued only from
17395 the branch slot. */
17398 is_branch_slot_insn (rtx insn
)
17400 if (!insn
|| !INSN_P (insn
)
17401 || GET_CODE (PATTERN (insn
)) == USE
17402 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17405 if (rs6000_sched_groups
)
17407 enum attr_type type
= get_attr_type (insn
);
17408 if (type
== TYPE_BRANCH
|| type
== TYPE_JMPREG
)
17416 /* The function returns true if out_inst sets a value that is
17417 used in the address generation computation of in_insn */
17419 set_to_load_agen (rtx out_insn
, rtx in_insn
)
17421 rtx out_set
, in_set
;
17423 /* For performance reasons, only handle the simple case where
17424 both loads are a single_set. */
17425 out_set
= single_set (out_insn
);
17428 in_set
= single_set (in_insn
);
17430 return reg_mentioned_p (SET_DEST (out_set
), SET_SRC (in_set
));
17436 /* The function returns true if the target storage location of
17437 out_insn is adjacent to the target storage location of in_insn */
17438 /* Return 1 if memory locations are adjacent. */
17441 adjacent_mem_locations (rtx insn1
, rtx insn2
)
17444 rtx a
= get_store_dest (PATTERN (insn1
));
17445 rtx b
= get_store_dest (PATTERN (insn2
));
17447 if ((GET_CODE (XEXP (a
, 0)) == REG
17448 || (GET_CODE (XEXP (a
, 0)) == PLUS
17449 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
17450 && (GET_CODE (XEXP (b
, 0)) == REG
17451 || (GET_CODE (XEXP (b
, 0)) == PLUS
17452 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
17454 HOST_WIDE_INT val0
= 0, val1
= 0;
17458 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
17460 reg0
= XEXP (XEXP (a
, 0), 0);
17461 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
17464 reg0
= XEXP (a
, 0);
17466 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
17468 reg1
= XEXP (XEXP (b
, 0), 0);
17469 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
17472 reg1
= XEXP (b
, 0);
17474 val_diff
= val1
- val0
;
17476 return ((REGNO (reg0
) == REGNO (reg1
))
17477 && (val_diff
== INTVAL (MEM_SIZE (a
))
17478 || val_diff
== -INTVAL (MEM_SIZE (b
))));
17484 /* A C statement (sans semicolon) to update the integer scheduling
17485 priority INSN_PRIORITY (INSN). Increase the priority to execute the
17486 INSN earlier, reduce the priority to execute INSN later. Do not
17487 define this macro if you do not need to adjust the scheduling
17488 priorities of insns. */
17491 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
17493 /* On machines (like the 750) which have asymmetric integer units,
17494 where one integer unit can do multiply and divides and the other
17495 can't, reduce the priority of multiply/divide so it is scheduled
17496 before other integer operations. */
17499 if (! INSN_P (insn
))
17502 if (GET_CODE (PATTERN (insn
)) == USE
)
17505 switch (rs6000_cpu_attr
) {
17507 switch (get_attr_type (insn
))
17514 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
17515 priority
, priority
);
17516 if (priority
>= 0 && priority
< 0x01000000)
17523 if (insn_must_be_first_in_group (insn
)
17524 && reload_completed
17525 && current_sched_info
->sched_max_insns_priority
17526 && rs6000_sched_restricted_insns_priority
)
17529 /* Prioritize insns that can be dispatched only in the first
17531 if (rs6000_sched_restricted_insns_priority
== 1)
17532 /* Attach highest priority to insn. This means that in
17533 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
17534 precede 'priority' (critical path) considerations. */
17535 return current_sched_info
->sched_max_insns_priority
;
17536 else if (rs6000_sched_restricted_insns_priority
== 2)
17537 /* Increase priority of insn by a minimal amount. This means that in
17538 haifa-sched.c:ready_sort(), only 'priority' (critical path)
17539 considerations precede dispatch-slot restriction considerations. */
17540 return (priority
+ 1);
17543 if (rs6000_cpu
== PROCESSOR_POWER6
17544 && ((load_store_pendulum
== -2 && is_load_insn (insn
))
17545 || (load_store_pendulum
== 2 && is_store_insn (insn
))))
17546 /* Attach highest priority to insn if the scheduler has just issued two
17547 stores and this instruction is a load, or two loads and this instruction
17548 is a store. Power6 wants loads and stores scheduled alternately
17550 return current_sched_info
->sched_max_insns_priority
;
17555 /* Return true if the instruction is nonpipelined on the Cell. */
17557 is_nonpipeline_insn (rtx insn
)
17559 enum attr_type type
;
17560 if (!insn
|| !INSN_P (insn
)
17561 || GET_CODE (PATTERN (insn
)) == USE
17562 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17565 type
= get_attr_type (insn
);
17566 if (type
== TYPE_IMUL
17567 || type
== TYPE_IMUL2
17568 || type
== TYPE_IMUL3
17569 || type
== TYPE_LMUL
17570 || type
== TYPE_IDIV
17571 || type
== TYPE_LDIV
17572 || type
== TYPE_SDIV
17573 || type
== TYPE_DDIV
17574 || type
== TYPE_SSQRT
17575 || type
== TYPE_DSQRT
17576 || type
== TYPE_MFCR
17577 || type
== TYPE_MFCRF
17578 || type
== TYPE_MFJMPR
)
17586 /* Return how many instructions the machine can issue per cycle. */
17589 rs6000_issue_rate (void)
17591 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
17592 if (!reload_completed
)
17595 switch (rs6000_cpu_attr
) {
17596 case CPU_RIOS1
: /* ? */
17598 case CPU_PPC601
: /* ? */
17623 /* Return how many instructions to look ahead for better insn
17627 rs6000_use_sched_lookahead (void)
17629 if (rs6000_cpu_attr
== CPU_PPC8540
)
17631 if (rs6000_cpu_attr
== CPU_CELL
)
17632 return (reload_completed
? 8 : 0);
17636 /* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
17638 rs6000_use_sched_lookahead_guard (rtx insn
)
17640 if (rs6000_cpu_attr
!= CPU_CELL
)
17643 if (insn
== NULL_RTX
|| !INSN_P (insn
))
17646 if (!reload_completed
17647 || is_nonpipeline_insn (insn
)
17648 || is_microcoded_insn (insn
))
17654 /* Determine is PAT refers to memory. */
17657 is_mem_ref (rtx pat
)
17663 if (GET_CODE (pat
) == MEM
)
17666 /* Recursively process the pattern. */
17667 fmt
= GET_RTX_FORMAT (GET_CODE (pat
));
17669 for (i
= GET_RTX_LENGTH (GET_CODE (pat
)) - 1; i
>= 0 && !ret
; i
--)
17672 ret
|= is_mem_ref (XEXP (pat
, i
));
17673 else if (fmt
[i
] == 'E')
17674 for (j
= XVECLEN (pat
, i
) - 1; j
>= 0; j
--)
17675 ret
|= is_mem_ref (XVECEXP (pat
, i
, j
));
17681 /* Determine if PAT is a PATTERN of a load insn. */
17684 is_load_insn1 (rtx pat
)
17686 if (!pat
|| pat
== NULL_RTX
)
17689 if (GET_CODE (pat
) == SET
)
17690 return is_mem_ref (SET_SRC (pat
));
17692 if (GET_CODE (pat
) == PARALLEL
)
17696 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
17697 if (is_load_insn1 (XVECEXP (pat
, 0, i
)))
17704 /* Determine if INSN loads from memory. */
17707 is_load_insn (rtx insn
)
17709 if (!insn
|| !INSN_P (insn
))
17712 if (GET_CODE (insn
) == CALL_INSN
)
17715 return is_load_insn1 (PATTERN (insn
));
17718 /* Determine if PAT is a PATTERN of a store insn. */
17721 is_store_insn1 (rtx pat
)
17723 if (!pat
|| pat
== NULL_RTX
)
17726 if (GET_CODE (pat
) == SET
)
17727 return is_mem_ref (SET_DEST (pat
));
17729 if (GET_CODE (pat
) == PARALLEL
)
17733 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
17734 if (is_store_insn1 (XVECEXP (pat
, 0, i
)))
17741 /* Determine if INSN stores to memory. */
17744 is_store_insn (rtx insn
)
17746 if (!insn
|| !INSN_P (insn
))
17749 return is_store_insn1 (PATTERN (insn
));
17752 /* Return the dest of a store insn. */
17755 get_store_dest (rtx pat
)
17757 gcc_assert (is_store_insn1 (pat
));
17759 if (GET_CODE (pat
) == SET
)
17760 return SET_DEST (pat
);
17761 else if (GET_CODE (pat
) == PARALLEL
)
17765 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
17767 rtx inner_pat
= XVECEXP (pat
, 0, i
);
17768 if (GET_CODE (inner_pat
) == SET
17769 && is_mem_ref (SET_DEST (inner_pat
)))
17773 /* We shouldn't get here, because we should have either a simple
17774 store insn or a store with update which are covered above. */
17778 /* Returns whether the dependence between INSN and NEXT is considered
17779 costly by the given target. */
17782 rs6000_is_costly_dependence (dep_t dep
, int cost
, int distance
)
17787 /* If the flag is not enabled - no dependence is considered costly;
17788 allow all dependent insns in the same group.
17789 This is the most aggressive option. */
17790 if (rs6000_sched_costly_dep
== no_dep_costly
)
17793 /* If the flag is set to 1 - a dependence is always considered costly;
17794 do not allow dependent instructions in the same group.
17795 This is the most conservative option. */
17796 if (rs6000_sched_costly_dep
== all_deps_costly
)
17799 insn
= DEP_PRO (dep
);
17800 next
= DEP_CON (dep
);
17802 if (rs6000_sched_costly_dep
== store_to_load_dep_costly
17803 && is_load_insn (next
)
17804 && is_store_insn (insn
))
17805 /* Prevent load after store in the same group. */
17808 if (rs6000_sched_costly_dep
== true_store_to_load_dep_costly
17809 && is_load_insn (next
)
17810 && is_store_insn (insn
)
17811 && DEP_KIND (dep
) == REG_DEP_TRUE
)
17812 /* Prevent load after store in the same group if it is a true
17816 /* The flag is set to X; dependences with latency >= X are considered costly,
17817 and will not be scheduled in the same group. */
17818 if (rs6000_sched_costly_dep
<= max_dep_latency
17819 && ((cost
- distance
) >= (int)rs6000_sched_costly_dep
))
17825 /* Return the next insn after INSN that is found before TAIL is reached,
17826 skipping any "non-active" insns - insns that will not actually occupy
17827 an issue slot. Return NULL_RTX if such an insn is not found. */
17830 get_next_active_insn (rtx insn
, rtx tail
)
17832 if (insn
== NULL_RTX
|| insn
== tail
)
17837 insn
= NEXT_INSN (insn
);
17838 if (insn
== NULL_RTX
|| insn
== tail
)
17843 || (NONJUMP_INSN_P (insn
)
17844 && GET_CODE (PATTERN (insn
)) != USE
17845 && GET_CODE (PATTERN (insn
)) != CLOBBER
17846 && INSN_CODE (insn
) != CODE_FOR_stack_tie
))
17852 /* We are about to begin issuing insns for this clock cycle. */
17855 rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
, int sched_verbose
,
17856 rtx
*ready ATTRIBUTE_UNUSED
,
17857 int *pn_ready ATTRIBUTE_UNUSED
,
17858 int clock_var ATTRIBUTE_UNUSED
)
17860 int n_ready
= *pn_ready
;
17863 fprintf (dump
, "// rs6000_sched_reorder :\n");
17865 /* Reorder the ready list, if the second to last ready insn
17866 is a nonepipeline insn. */
17867 if (rs6000_cpu_attr
== CPU_CELL
&& n_ready
> 1)
17869 if (is_nonpipeline_insn (ready
[n_ready
- 1])
17870 && (recog_memoized (ready
[n_ready
- 2]) > 0))
17871 /* Simply swap first two insns. */
17873 rtx tmp
= ready
[n_ready
- 1];
17874 ready
[n_ready
- 1] = ready
[n_ready
- 2];
17875 ready
[n_ready
- 2] = tmp
;
17879 if (rs6000_cpu
== PROCESSOR_POWER6
)
17880 load_store_pendulum
= 0;
17882 return rs6000_issue_rate ();
17885 /* Like rs6000_sched_reorder, but called after issuing each insn. */
17888 rs6000_sched_reorder2 (FILE *dump
, int sched_verbose
, rtx
*ready
,
17889 int *pn_ready
, int clock_var ATTRIBUTE_UNUSED
)
17892 fprintf (dump
, "// rs6000_sched_reorder2 :\n");
17894 /* For Power6, we need to handle some special cases to try and keep the
17895 store queue from overflowing and triggering expensive flushes.
17897 This code monitors how load and store instructions are being issued
17898 and skews the ready list one way or the other to increase the likelihood
17899 that a desired instruction is issued at the proper time.
17901 A couple of things are done. First, we maintain a "load_store_pendulum"
17902 to track the current state of load/store issue.
17904 - If the pendulum is at zero, then no loads or stores have been
17905 issued in the current cycle so we do nothing.
17907 - If the pendulum is 1, then a single load has been issued in this
17908 cycle and we attempt to locate another load in the ready list to
17911 - If the pendulum is -2, then two stores have already been
17912 issued in this cycle, so we increase the priority of the first load
17913 in the ready list to increase it's likelihood of being chosen first
17916 - If the pendulum is -1, then a single store has been issued in this
17917 cycle and we attempt to locate another store in the ready list to
17918 issue with it, preferring a store to an adjacent memory location to
17919 facilitate store pairing in the store queue.
17921 - If the pendulum is 2, then two loads have already been
17922 issued in this cycle, so we increase the priority of the first store
17923 in the ready list to increase it's likelihood of being chosen first
17926 - If the pendulum < -2 or > 2, then do nothing.
17928 Note: This code covers the most common scenarios. There exist non
17929 load/store instructions which make use of the LSU and which
17930 would need to be accounted for to strictly model the behavior
17931 of the machine. Those instructions are currently unaccounted
17932 for to help minimize compile time overhead of this code.
17934 if (rs6000_cpu
== PROCESSOR_POWER6
&& last_scheduled_insn
)
17940 if (is_store_insn (last_scheduled_insn
))
17941 /* Issuing a store, swing the load_store_pendulum to the left */
17942 load_store_pendulum
--;
17943 else if (is_load_insn (last_scheduled_insn
))
17944 /* Issuing a load, swing the load_store_pendulum to the right */
17945 load_store_pendulum
++;
17947 return cached_can_issue_more
;
17949 /* If the pendulum is balanced, or there is only one instruction on
17950 the ready list, then all is well, so return. */
17951 if ((load_store_pendulum
== 0) || (*pn_ready
<= 1))
17952 return cached_can_issue_more
;
17954 if (load_store_pendulum
== 1)
17956 /* A load has been issued in this cycle. Scan the ready list
17957 for another load to issue with it */
17962 if (is_load_insn (ready
[pos
]))
17964 /* Found a load. Move it to the head of the ready list,
17965 and adjust it's priority so that it is more likely to
17968 for (i
=pos
; i
<*pn_ready
-1; i
++)
17969 ready
[i
] = ready
[i
+ 1];
17970 ready
[*pn_ready
-1] = tmp
;
17971 if INSN_PRIORITY_KNOWN (tmp
)
17972 INSN_PRIORITY (tmp
)++;
17978 else if (load_store_pendulum
== -2)
17980 /* Two stores have been issued in this cycle. Increase the
17981 priority of the first load in the ready list to favor it for
17982 issuing in the next cycle. */
17987 if (is_load_insn (ready
[pos
])
17988 && INSN_PRIORITY_KNOWN (ready
[pos
]))
17990 INSN_PRIORITY (ready
[pos
])++;
17992 /* Adjust the pendulum to account for the fact that a load
17993 was found and increased in priority. This is to prevent
17994 increasing the priority of multiple loads */
17995 load_store_pendulum
--;
18002 else if (load_store_pendulum
== -1)
18004 /* A store has been issued in this cycle. Scan the ready list for
18005 another store to issue with it, preferring a store to an adjacent
18007 int first_store_pos
= -1;
18013 if (is_store_insn (ready
[pos
]))
18015 /* Maintain the index of the first store found on the
18017 if (first_store_pos
== -1)
18018 first_store_pos
= pos
;
18020 if (is_store_insn (last_scheduled_insn
)
18021 && adjacent_mem_locations (last_scheduled_insn
,ready
[pos
]))
18023 /* Found an adjacent store. Move it to the head of the
18024 ready list, and adjust it's priority so that it is
18025 more likely to stay there */
18027 for (i
=pos
; i
<*pn_ready
-1; i
++)
18028 ready
[i
] = ready
[i
+ 1];
18029 ready
[*pn_ready
-1] = tmp
;
18030 if INSN_PRIORITY_KNOWN (tmp
)
18031 INSN_PRIORITY (tmp
)++;
18032 first_store_pos
= -1;
18040 if (first_store_pos
>= 0)
18042 /* An adjacent store wasn't found, but a non-adjacent store was,
18043 so move the non-adjacent store to the front of the ready
18044 list, and adjust its priority so that it is more likely to
18046 tmp
= ready
[first_store_pos
];
18047 for (i
=first_store_pos
; i
<*pn_ready
-1; i
++)
18048 ready
[i
] = ready
[i
+ 1];
18049 ready
[*pn_ready
-1] = tmp
;
18050 if INSN_PRIORITY_KNOWN (tmp
)
18051 INSN_PRIORITY (tmp
)++;
18054 else if (load_store_pendulum
== 2)
18056 /* Two loads have been issued in this cycle. Increase the priority
18057 of the first store in the ready list to favor it for issuing in
18063 if (is_store_insn (ready
[pos
])
18064 && INSN_PRIORITY_KNOWN (ready
[pos
]))
18066 INSN_PRIORITY (ready
[pos
])++;
18068 /* Adjust the pendulum to account for the fact that a store
18069 was found and increased in priority. This is to prevent
18070 increasing the priority of multiple stores */
18071 load_store_pendulum
++;
18080 return cached_can_issue_more
;
18083 /* Return whether the presence of INSN causes a dispatch group termination
18084 of group WHICH_GROUP.
18086 If WHICH_GROUP == current_group, this function will return true if INSN
18087 causes the termination of the current group (i.e, the dispatch group to
18088 which INSN belongs). This means that INSN will be the last insn in the
18089 group it belongs to.
18091 If WHICH_GROUP == previous_group, this function will return true if INSN
18092 causes the termination of the previous group (i.e, the dispatch group that
18093 precedes the group to which INSN belongs). This means that INSN will be
18094 the first insn in the group it belongs to). */
18097 insn_terminates_group_p (rtx insn
, enum group_termination which_group
)
18104 first
= insn_must_be_first_in_group (insn
);
18105 last
= insn_must_be_last_in_group (insn
);
18110 if (which_group
== current_group
)
18112 else if (which_group
== previous_group
)
18120 insn_must_be_first_in_group (rtx insn
)
18122 enum attr_type type
;
18125 || insn
== NULL_RTX
18126 || GET_CODE (insn
) == NOTE
18127 || GET_CODE (PATTERN (insn
)) == USE
18128 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
18131 switch (rs6000_cpu
)
18133 case PROCESSOR_POWER5
:
18134 if (is_cracked_insn (insn
))
18136 case PROCESSOR_POWER4
:
18137 if (is_microcoded_insn (insn
))
18140 if (!rs6000_sched_groups
)
18143 type
= get_attr_type (insn
);
18150 case TYPE_DELAYED_CR
:
18151 case TYPE_CR_LOGICAL
:
18165 case PROCESSOR_POWER6
:
18166 type
= get_attr_type (insn
);
18170 case TYPE_INSERT_DWORD
:
18174 case TYPE_VAR_SHIFT_ROTATE
:
18181 case TYPE_INSERT_WORD
:
18182 case TYPE_DELAYED_COMPARE
:
18183 case TYPE_IMUL_COMPARE
:
18184 case TYPE_LMUL_COMPARE
:
18185 case TYPE_FPCOMPARE
:
18196 case TYPE_LOAD_EXT_UX
:
18198 case TYPE_STORE_UX
:
18199 case TYPE_FPLOAD_U
:
18200 case TYPE_FPLOAD_UX
:
18201 case TYPE_FPSTORE_U
:
18202 case TYPE_FPSTORE_UX
:
18216 insn_must_be_last_in_group (rtx insn
)
18218 enum attr_type type
;
18221 || insn
== NULL_RTX
18222 || GET_CODE (insn
) == NOTE
18223 || GET_CODE (PATTERN (insn
)) == USE
18224 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
18227 switch (rs6000_cpu
) {
18228 case PROCESSOR_POWER4
:
18229 case PROCESSOR_POWER5
:
18230 if (is_microcoded_insn (insn
))
18233 if (is_branch_slot_insn (insn
))
18237 case PROCESSOR_POWER6
:
18238 type
= get_attr_type (insn
);
18245 case TYPE_VAR_SHIFT_ROTATE
:
18252 case TYPE_DELAYED_COMPARE
:
18253 case TYPE_IMUL_COMPARE
:
18254 case TYPE_LMUL_COMPARE
:
18255 case TYPE_FPCOMPARE
:
18276 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
18277 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18280 is_costly_group (rtx
*group_insns
, rtx next_insn
)
18283 int issue_rate
= rs6000_issue_rate ();
18285 for (i
= 0; i
< issue_rate
; i
++)
18288 rtx insn
= group_insns
[i
];
18293 FOR_EACH_DEP_LINK (link
, INSN_FORW_DEPS (insn
))
18295 dep_t dep
= DEP_LINK_DEP (link
);
18296 rtx next
= DEP_CON (dep
);
18298 if (next
== next_insn
18299 && rs6000_is_costly_dependence (dep
, dep_cost (dep
), 0))
18307 /* Utility of the function redefine_groups.
18308 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
18309 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
18310 to keep it "far" (in a separate group) from GROUP_INSNS, following
18311 one of the following schemes, depending on the value of the flag
18312 -minsert_sched_nops = X:
18313 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
18314 in order to force NEXT_INSN into a separate group.
18315 (2) X < sched_finish_regroup_exact: insert exactly X nops.
18316 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
18317 insertion (has a group just ended, how many vacant issue slots remain in the
18318 last group, and how many dispatch groups were encountered so far). */
18321 force_new_group (int sched_verbose
, FILE *dump
, rtx
*group_insns
,
18322 rtx next_insn
, bool *group_end
, int can_issue_more
,
18327 int issue_rate
= rs6000_issue_rate ();
18328 bool end
= *group_end
;
18331 if (next_insn
== NULL_RTX
)
18332 return can_issue_more
;
18334 if (rs6000_sched_insert_nops
> sched_finish_regroup_exact
)
18335 return can_issue_more
;
18337 force
= is_costly_group (group_insns
, next_insn
);
18339 return can_issue_more
;
18341 if (sched_verbose
> 6)
18342 fprintf (dump
,"force: group count = %d, can_issue_more = %d\n",
18343 *group_count
,can_issue_more
);
18345 if (rs6000_sched_insert_nops
== sched_finish_regroup_exact
)
18348 can_issue_more
= 0;
18350 /* Since only a branch can be issued in the last issue_slot, it is
18351 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
18352 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
18353 in this case the last nop will start a new group and the branch
18354 will be forced to the new group. */
18355 if (can_issue_more
&& !is_branch_slot_insn (next_insn
))
18358 while (can_issue_more
> 0)
18361 emit_insn_before (nop
, next_insn
);
18369 if (rs6000_sched_insert_nops
< sched_finish_regroup_exact
)
18371 int n_nops
= rs6000_sched_insert_nops
;
18373 /* Nops can't be issued from the branch slot, so the effective
18374 issue_rate for nops is 'issue_rate - 1'. */
18375 if (can_issue_more
== 0)
18376 can_issue_more
= issue_rate
;
18378 if (can_issue_more
== 0)
18380 can_issue_more
= issue_rate
- 1;
18383 for (i
= 0; i
< issue_rate
; i
++)
18385 group_insns
[i
] = 0;
18392 emit_insn_before (nop
, next_insn
);
18393 if (can_issue_more
== issue_rate
- 1) /* new group begins */
18396 if (can_issue_more
== 0)
18398 can_issue_more
= issue_rate
- 1;
18401 for (i
= 0; i
< issue_rate
; i
++)
18403 group_insns
[i
] = 0;
18409 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
18412 /* Is next_insn going to start a new group? */
18415 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
18416 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
18417 || (can_issue_more
< issue_rate
&&
18418 insn_terminates_group_p (next_insn
, previous_group
)));
18419 if (*group_end
&& end
)
18422 if (sched_verbose
> 6)
18423 fprintf (dump
, "done force: group count = %d, can_issue_more = %d\n",
18424 *group_count
, can_issue_more
);
18425 return can_issue_more
;
18428 return can_issue_more
;
18431 /* This function tries to synch the dispatch groups that the compiler "sees"
18432 with the dispatch groups that the processor dispatcher is expected to
18433 form in practice. It tries to achieve this synchronization by forcing the
18434 estimated processor grouping on the compiler (as opposed to the function
18435 'pad_goups' which tries to force the scheduler's grouping on the processor).
18437 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
18438 examines the (estimated) dispatch groups that will be formed by the processor
18439 dispatcher. It marks these group boundaries to reflect the estimated
18440 processor grouping, overriding the grouping that the scheduler had marked.
18441 Depending on the value of the flag '-minsert-sched-nops' this function can
18442 force certain insns into separate groups or force a certain distance between
18443 them by inserting nops, for example, if there exists a "costly dependence"
18446 The function estimates the group boundaries that the processor will form as
18447 follows: It keeps track of how many vacant issue slots are available after
18448 each insn. A subsequent insn will start a new group if one of the following
18450 - no more vacant issue slots remain in the current dispatch group.
18451 - only the last issue slot, which is the branch slot, is vacant, but the next
18452 insn is not a branch.
18453 - only the last 2 or less issue slots, including the branch slot, are vacant,
18454 which means that a cracked insn (which occupies two issue slots) can't be
18455 issued in this group.
18456 - less than 'issue_rate' slots are vacant, and the next insn always needs to
18457 start a new group. */
18460 redefine_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
18462 rtx insn
, next_insn
;
18464 int can_issue_more
;
18467 int group_count
= 0;
18471 issue_rate
= rs6000_issue_rate ();
18472 group_insns
= alloca (issue_rate
* sizeof (rtx
));
18473 for (i
= 0; i
< issue_rate
; i
++)
18475 group_insns
[i
] = 0;
18477 can_issue_more
= issue_rate
;
18479 insn
= get_next_active_insn (prev_head_insn
, tail
);
18482 while (insn
!= NULL_RTX
)
18484 slot
= (issue_rate
- can_issue_more
);
18485 group_insns
[slot
] = insn
;
18487 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
18488 if (insn_terminates_group_p (insn
, current_group
))
18489 can_issue_more
= 0;
18491 next_insn
= get_next_active_insn (insn
, tail
);
18492 if (next_insn
== NULL_RTX
)
18493 return group_count
+ 1;
18495 /* Is next_insn going to start a new group? */
18497 = (can_issue_more
== 0
18498 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
18499 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
18500 || (can_issue_more
< issue_rate
&&
18501 insn_terminates_group_p (next_insn
, previous_group
)));
18503 can_issue_more
= force_new_group (sched_verbose
, dump
, group_insns
,
18504 next_insn
, &group_end
, can_issue_more
,
18510 can_issue_more
= 0;
18511 for (i
= 0; i
< issue_rate
; i
++)
18513 group_insns
[i
] = 0;
18517 if (GET_MODE (next_insn
) == TImode
&& can_issue_more
)
18518 PUT_MODE (next_insn
, VOIDmode
);
18519 else if (!can_issue_more
&& GET_MODE (next_insn
) != TImode
)
18520 PUT_MODE (next_insn
, TImode
);
18523 if (can_issue_more
== 0)
18524 can_issue_more
= issue_rate
;
18527 return group_count
;
18530 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
18531 dispatch group boundaries that the scheduler had marked. Pad with nops
18532 any dispatch groups which have vacant issue slots, in order to force the
18533 scheduler's grouping on the processor dispatcher. The function
18534 returns the number of dispatch groups found. */
18537 pad_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
18539 rtx insn
, next_insn
;
18542 int can_issue_more
;
18544 int group_count
= 0;
18546 /* Initialize issue_rate. */
18547 issue_rate
= rs6000_issue_rate ();
18548 can_issue_more
= issue_rate
;
18550 insn
= get_next_active_insn (prev_head_insn
, tail
);
18551 next_insn
= get_next_active_insn (insn
, tail
);
18553 while (insn
!= NULL_RTX
)
18556 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
18558 group_end
= (next_insn
== NULL_RTX
|| GET_MODE (next_insn
) == TImode
);
18560 if (next_insn
== NULL_RTX
)
18565 /* If the scheduler had marked group termination at this location
18566 (between insn and next_indn), and neither insn nor next_insn will
18567 force group termination, pad the group with nops to force group
18570 && (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
18571 && !insn_terminates_group_p (insn
, current_group
)
18572 && !insn_terminates_group_p (next_insn
, previous_group
))
18574 if (!is_branch_slot_insn (next_insn
))
18577 while (can_issue_more
)
18580 emit_insn_before (nop
, next_insn
);
18585 can_issue_more
= issue_rate
;
18590 next_insn
= get_next_active_insn (insn
, tail
);
18593 return group_count
;
18596 /* We're beginning a new block. Initialize data structures as necessary. */
18599 rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED
,
18600 int sched_verbose ATTRIBUTE_UNUSED
,
18601 int max_ready ATTRIBUTE_UNUSED
)
18603 last_scheduled_insn
= NULL_RTX
;
18604 load_store_pendulum
= 0;
18607 /* The following function is called at the end of scheduling BB.
18608 After reload, it inserts nops at insn group bundling. */
18611 rs6000_sched_finish (FILE *dump
, int sched_verbose
)
18616 fprintf (dump
, "=== Finishing schedule.\n");
18618 if (reload_completed
&& rs6000_sched_groups
)
18620 if (rs6000_sched_insert_nops
== sched_finish_none
)
18623 if (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
18624 n_groups
= pad_groups (dump
, sched_verbose
,
18625 current_sched_info
->prev_head
,
18626 current_sched_info
->next_tail
);
18628 n_groups
= redefine_groups (dump
, sched_verbose
,
18629 current_sched_info
->prev_head
,
18630 current_sched_info
->next_tail
);
18632 if (sched_verbose
>= 6)
18634 fprintf (dump
, "ngroups = %d\n", n_groups
);
18635 print_rtl (dump
, current_sched_info
->prev_head
);
18636 fprintf (dump
, "Done finish_sched\n");
18641 /* Length in units of the trampoline for entering a nested function. */
18644 rs6000_trampoline_size (void)
18648 switch (DEFAULT_ABI
)
18651 gcc_unreachable ();
18654 ret
= (TARGET_32BIT
) ? 12 : 24;
18659 ret
= (TARGET_32BIT
) ? 40 : 48;
18666 /* Emit RTL insns to initialize the variable parts of a trampoline.
18667 FNADDR is an RTX for the address of the function's pure code.
18668 CXT is an RTX for the static chain value for the function. */
18671 rs6000_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
18673 int regsize
= (TARGET_32BIT
) ? 4 : 8;
18674 rtx ctx_reg
= force_reg (Pmode
, cxt
);
18676 switch (DEFAULT_ABI
)
18679 gcc_unreachable ();
18681 /* Macros to shorten the code expansions below. */
18682 #define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
18683 #define MEM_PLUS(addr,offset) \
18684 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
18686 /* Under AIX, just build the 3 word function descriptor */
18689 rtx fn_reg
= gen_reg_rtx (Pmode
);
18690 rtx toc_reg
= gen_reg_rtx (Pmode
);
18691 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
18692 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
18693 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
18694 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
18695 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
18699 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
18702 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
18703 FALSE
, VOIDmode
, 4,
18705 GEN_INT (rs6000_trampoline_size ()), SImode
,
18715 /* Table of valid machine attributes. */
18717 const struct attribute_spec rs6000_attribute_table
[] =
18719 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
18720 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute
},
18721 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
18722 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
18723 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute
},
18724 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute
},
18725 #ifdef SUBTARGET_ATTRIBUTE_TABLE
18726 SUBTARGET_ATTRIBUTE_TABLE
,
18728 { NULL
, 0, 0, false, false, false, NULL
}
18731 /* Handle the "altivec" attribute. The attribute may have
18732 arguments as follows:
18734 __attribute__((altivec(vector__)))
18735 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
18736 __attribute__((altivec(bool__))) (always followed by 'unsigned')
18738 and may appear more than once (e.g., 'vector bool char') in a
18739 given declaration. */
18742 rs6000_handle_altivec_attribute (tree
*node
,
18743 tree name ATTRIBUTE_UNUSED
,
18745 int flags ATTRIBUTE_UNUSED
,
18746 bool *no_add_attrs
)
18748 tree type
= *node
, result
= NULL_TREE
;
18749 enum machine_mode mode
;
18752 = ((args
&& TREE_CODE (args
) == TREE_LIST
&& TREE_VALUE (args
)
18753 && TREE_CODE (TREE_VALUE (args
)) == IDENTIFIER_NODE
)
18754 ? *IDENTIFIER_POINTER (TREE_VALUE (args
))
18757 while (POINTER_TYPE_P (type
)
18758 || TREE_CODE (type
) == FUNCTION_TYPE
18759 || TREE_CODE (type
) == METHOD_TYPE
18760 || TREE_CODE (type
) == ARRAY_TYPE
)
18761 type
= TREE_TYPE (type
);
18763 mode
= TYPE_MODE (type
);
18765 /* Check for invalid AltiVec type qualifiers. */
18766 if (type
== long_unsigned_type_node
|| type
== long_integer_type_node
)
18769 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
18770 else if (rs6000_warn_altivec_long
)
18771 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
18773 else if (type
== long_long_unsigned_type_node
18774 || type
== long_long_integer_type_node
)
18775 error ("use of %<long long%> in AltiVec types is invalid");
18776 else if (type
== double_type_node
)
18777 error ("use of %<double%> in AltiVec types is invalid");
18778 else if (type
== long_double_type_node
)
18779 error ("use of %<long double%> in AltiVec types is invalid");
18780 else if (type
== boolean_type_node
)
18781 error ("use of boolean types in AltiVec types is invalid");
18782 else if (TREE_CODE (type
) == COMPLEX_TYPE
)
18783 error ("use of %<complex%> in AltiVec types is invalid");
18784 else if (DECIMAL_FLOAT_MODE_P (mode
))
18785 error ("use of decimal floating point types in AltiVec types is invalid");
18787 switch (altivec_type
)
18790 unsigned_p
= TYPE_UNSIGNED (type
);
18794 result
= (unsigned_p
? unsigned_V4SI_type_node
: V4SI_type_node
);
18797 result
= (unsigned_p
? unsigned_V8HI_type_node
: V8HI_type_node
);
18800 result
= (unsigned_p
? unsigned_V16QI_type_node
: V16QI_type_node
);
18802 case SFmode
: result
= V4SF_type_node
; break;
18803 /* If the user says 'vector int bool', we may be handed the 'bool'
18804 attribute _before_ the 'vector' attribute, and so select the
18805 proper type in the 'b' case below. */
18806 case V4SImode
: case V8HImode
: case V16QImode
: case V4SFmode
:
18814 case SImode
: case V4SImode
: result
= bool_V4SI_type_node
; break;
18815 case HImode
: case V8HImode
: result
= bool_V8HI_type_node
; break;
18816 case QImode
: case V16QImode
: result
= bool_V16QI_type_node
;
18823 case V8HImode
: result
= pixel_V8HI_type_node
;
18829 if (result
&& result
!= type
&& TYPE_READONLY (type
))
18830 result
= build_qualified_type (result
, TYPE_QUAL_CONST
);
18832 *no_add_attrs
= true; /* No need to hang on to the attribute. */
18835 *node
= reconstruct_complex_type (*node
, result
);
18840 /* AltiVec defines four built-in scalar types that serve as vector
18841 elements; we must teach the compiler how to mangle them. */
18843 static const char *
18844 rs6000_mangle_fundamental_type (tree type
)
18846 if (type
== bool_char_type_node
) return "U6__boolc";
18847 if (type
== bool_short_type_node
) return "U6__bools";
18848 if (type
== pixel_type_node
) return "u7__pixel";
18849 if (type
== bool_int_type_node
) return "U6__booli";
18851 /* Mangle IBM extended float long double as `g' (__float128) on
18852 powerpc*-linux where long-double-64 previously was the default. */
18853 if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
18855 && TARGET_LONG_DOUBLE_128
18856 && !TARGET_IEEEQUAD
)
18859 /* For all other types, use normal C++ mangling. */
18863 /* Handle a "longcall" or "shortcall" attribute; arguments as in
18864 struct attribute_spec.handler. */
18867 rs6000_handle_longcall_attribute (tree
*node
, tree name
,
18868 tree args ATTRIBUTE_UNUSED
,
18869 int flags ATTRIBUTE_UNUSED
,
18870 bool *no_add_attrs
)
18872 if (TREE_CODE (*node
) != FUNCTION_TYPE
18873 && TREE_CODE (*node
) != FIELD_DECL
18874 && TREE_CODE (*node
) != TYPE_DECL
)
18876 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
18877 IDENTIFIER_POINTER (name
));
18878 *no_add_attrs
= true;
18884 /* Set longcall attributes on all functions declared when
18885 rs6000_default_long_calls is true. */
18887 rs6000_set_default_type_attributes (tree type
)
18889 if (rs6000_default_long_calls
18890 && (TREE_CODE (type
) == FUNCTION_TYPE
18891 || TREE_CODE (type
) == METHOD_TYPE
))
18892 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
18894 TYPE_ATTRIBUTES (type
));
18897 darwin_set_default_type_attributes (type
);
18901 /* Return a reference suitable for calling a function with the
18902 longcall attribute. */
18905 rs6000_longcall_ref (rtx call_ref
)
18907 const char *call_name
;
18910 if (GET_CODE (call_ref
) != SYMBOL_REF
)
18913 /* System V adds '.' to the internal name, so skip them. */
18914 call_name
= XSTR (call_ref
, 0);
18915 if (*call_name
== '.')
18917 while (*call_name
== '.')
18920 node
= get_identifier (call_name
);
18921 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
18924 return force_reg (Pmode
, call_ref
);
18927 #ifndef TARGET_USE_MS_BITFIELD_LAYOUT
18928 #define TARGET_USE_MS_BITFIELD_LAYOUT 0
18931 /* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
18932 struct attribute_spec.handler. */
18934 rs6000_handle_struct_attribute (tree
*node
, tree name
,
18935 tree args ATTRIBUTE_UNUSED
,
18936 int flags ATTRIBUTE_UNUSED
, bool *no_add_attrs
)
18939 if (DECL_P (*node
))
18941 if (TREE_CODE (*node
) == TYPE_DECL
)
18942 type
= &TREE_TYPE (*node
);
18947 if (!(type
&& (TREE_CODE (*type
) == RECORD_TYPE
18948 || TREE_CODE (*type
) == UNION_TYPE
)))
18950 warning (OPT_Wattributes
, "%qs attribute ignored", IDENTIFIER_POINTER (name
));
18951 *no_add_attrs
= true;
18954 else if ((is_attribute_p ("ms_struct", name
)
18955 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type
)))
18956 || ((is_attribute_p ("gcc_struct", name
)
18957 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type
)))))
18959 warning (OPT_Wattributes
, "%qs incompatible attribute ignored",
18960 IDENTIFIER_POINTER (name
));
18961 *no_add_attrs
= true;
18968 rs6000_ms_bitfield_layout_p (tree record_type
)
18970 return (TARGET_USE_MS_BITFIELD_LAYOUT
&&
18971 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type
)))
18972 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type
));
18975 #ifdef USING_ELFOS_H
18977 /* A get_unnamed_section callback, used for switching to toc_section. */
18980 rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED
)
18982 if (DEFAULT_ABI
== ABI_AIX
18983 && TARGET_MINIMAL_TOC
18984 && !TARGET_RELOCATABLE
)
18986 if (!toc_initialized
)
18988 toc_initialized
= 1;
18989 fprintf (asm_out_file
, "%s\n", TOC_SECTION_ASM_OP
);
18990 (*targetm
.asm_out
.internal_label
) (asm_out_file
, "LCTOC", 0);
18991 fprintf (asm_out_file
, "\t.tc ");
18992 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1[TC],");
18993 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1");
18994 fprintf (asm_out_file
, "\n");
18996 fprintf (asm_out_file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
18997 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1");
18998 fprintf (asm_out_file
, " = .+32768\n");
19001 fprintf (asm_out_file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
19003 else if (DEFAULT_ABI
== ABI_AIX
&& !TARGET_RELOCATABLE
)
19004 fprintf (asm_out_file
, "%s\n", TOC_SECTION_ASM_OP
);
19007 fprintf (asm_out_file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
19008 if (!toc_initialized
)
19010 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1");
19011 fprintf (asm_out_file
, " = .+32768\n");
19012 toc_initialized
= 1;
19017 /* Implement TARGET_ASM_INIT_SECTIONS. */
19020 rs6000_elf_asm_init_sections (void)
19023 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op
, NULL
);
19026 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
19027 SDATA2_SECTION_ASM_OP
);
19030 /* Implement TARGET_SELECT_RTX_SECTION. */
19033 rs6000_elf_select_rtx_section (enum machine_mode mode
, rtx x
,
19034 unsigned HOST_WIDE_INT align
)
19036 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
19037 return toc_section
;
19039 return default_elf_select_rtx_section (mode
, x
, align
);
19042 /* For a SYMBOL_REF, set generic flags and then perform some
19043 target-specific processing.
19045 When the AIX ABI is requested on a non-AIX system, replace the
19046 function name with the real name (with a leading .) rather than the
19047 function descriptor name. This saves a lot of overriding code to
19048 read the prefixes. */
19051 rs6000_elf_encode_section_info (tree decl
, rtx rtl
, int first
)
19053 default_encode_section_info (decl
, rtl
, first
);
19056 && TREE_CODE (decl
) == FUNCTION_DECL
19058 && DEFAULT_ABI
== ABI_AIX
)
19060 rtx sym_ref
= XEXP (rtl
, 0);
19061 size_t len
= strlen (XSTR (sym_ref
, 0));
19062 char *str
= alloca (len
+ 2);
19064 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
19065 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
19070 rs6000_elf_in_small_data_p (tree decl
)
19072 if (rs6000_sdata
== SDATA_NONE
)
19075 /* We want to merge strings, so we never consider them small data. */
19076 if (TREE_CODE (decl
) == STRING_CST
)
19079 /* Functions are never in the small data area. */
19080 if (TREE_CODE (decl
) == FUNCTION_DECL
)
19083 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
19085 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
19086 if (strcmp (section
, ".sdata") == 0
19087 || strcmp (section
, ".sdata2") == 0
19088 || strcmp (section
, ".sbss") == 0
19089 || strcmp (section
, ".sbss2") == 0
19090 || strcmp (section
, ".PPC.EMB.sdata0") == 0
19091 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
19096 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
19099 && (unsigned HOST_WIDE_INT
) size
<= g_switch_value
19100 /* If it's not public, and we're not going to reference it there,
19101 there's no need to put it in the small data section. */
19102 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
19109 #endif /* USING_ELFOS_H */
19111 /* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
19114 rs6000_use_blocks_for_constant_p (enum machine_mode mode
, rtx x
)
19116 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
);
19119 /* Return a REG that occurs in ADDR with coefficient 1.
19120 ADDR can be effectively incremented by incrementing REG.
19122 r0 is special and we must not select it as an address
19123 register by this routine since our caller will try to
19124 increment the returned register via an "la" instruction. */
19127 find_addr_reg (rtx addr
)
19129 while (GET_CODE (addr
) == PLUS
)
19131 if (GET_CODE (XEXP (addr
, 0)) == REG
19132 && REGNO (XEXP (addr
, 0)) != 0)
19133 addr
= XEXP (addr
, 0);
19134 else if (GET_CODE (XEXP (addr
, 1)) == REG
19135 && REGNO (XEXP (addr
, 1)) != 0)
19136 addr
= XEXP (addr
, 1);
19137 else if (CONSTANT_P (XEXP (addr
, 0)))
19138 addr
= XEXP (addr
, 1);
19139 else if (CONSTANT_P (XEXP (addr
, 1)))
19140 addr
= XEXP (addr
, 0);
19142 gcc_unreachable ();
19144 gcc_assert (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0);
19149 rs6000_fatal_bad_address (rtx op
)
19151 fatal_insn ("bad address", op
);
19156 static tree branch_island_list
= 0;
19158 /* Remember to generate a branch island for far calls to the given
19162 add_compiler_branch_island (tree label_name
, tree function_name
,
19165 tree branch_island
= build_tree_list (function_name
, label_name
);
19166 TREE_TYPE (branch_island
) = build_int_cst (NULL_TREE
, line_number
);
19167 TREE_CHAIN (branch_island
) = branch_island_list
;
19168 branch_island_list
= branch_island
;
19171 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
19172 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
19173 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
19174 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
19176 /* Generate far-jump branch islands for everything on the
19177 branch_island_list. Invoked immediately after the last instruction
19178 of the epilogue has been emitted; the branch-islands must be
19179 appended to, and contiguous with, the function body. Mach-O stubs
19180 are generated in machopic_output_stub(). */
19183 macho_branch_islands (void)
19186 tree branch_island
;
19188 for (branch_island
= branch_island_list
;
19190 branch_island
= TREE_CHAIN (branch_island
))
19192 const char *label
=
19193 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island
));
19195 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island
));
19196 char name_buf
[512];
19197 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
19198 if (name
[0] == '*' || name
[0] == '&')
19199 strcpy (name_buf
, name
+1);
19203 strcpy (name_buf
+1, name
);
19205 strcpy (tmp_buf
, "\n");
19206 strcat (tmp_buf
, label
);
19207 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
19208 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
19209 dbxout_stabd (N_SLINE
, BRANCH_ISLAND_LINE_NUMBER (branch_island
));
19210 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
19213 strcat (tmp_buf
, ":\n\tmflr r0\n\tbcl 20,31,");
19214 strcat (tmp_buf
, label
);
19215 strcat (tmp_buf
, "_pic\n");
19216 strcat (tmp_buf
, label
);
19217 strcat (tmp_buf
, "_pic:\n\tmflr r11\n");
19219 strcat (tmp_buf
, "\taddis r11,r11,ha16(");
19220 strcat (tmp_buf
, name_buf
);
19221 strcat (tmp_buf
, " - ");
19222 strcat (tmp_buf
, label
);
19223 strcat (tmp_buf
, "_pic)\n");
19225 strcat (tmp_buf
, "\tmtlr r0\n");
19227 strcat (tmp_buf
, "\taddi r12,r11,lo16(");
19228 strcat (tmp_buf
, name_buf
);
19229 strcat (tmp_buf
, " - ");
19230 strcat (tmp_buf
, label
);
19231 strcat (tmp_buf
, "_pic)\n");
19233 strcat (tmp_buf
, "\tmtctr r12\n\tbctr\n");
19237 strcat (tmp_buf
, ":\nlis r12,hi16(");
19238 strcat (tmp_buf
, name_buf
);
19239 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
19240 strcat (tmp_buf
, name_buf
);
19241 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
19243 output_asm_insn (tmp_buf
, 0);
19244 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
19245 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
19246 dbxout_stabd (N_SLINE
, BRANCH_ISLAND_LINE_NUMBER (branch_island
));
19247 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
19250 branch_island_list
= 0;
19253 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
19254 already there or not. */
19257 no_previous_def (tree function_name
)
19259 tree branch_island
;
19260 for (branch_island
= branch_island_list
;
19262 branch_island
= TREE_CHAIN (branch_island
))
19263 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
19268 /* GET_PREV_LABEL gets the label name from the previous definition of
19272 get_prev_label (tree function_name
)
19274 tree branch_island
;
19275 for (branch_island
= branch_island_list
;
19277 branch_island
= TREE_CHAIN (branch_island
))
19278 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
19279 return BRANCH_ISLAND_LABEL_NAME (branch_island
);
19283 #ifndef DARWIN_LINKER_GENERATES_ISLANDS
19284 #define DARWIN_LINKER_GENERATES_ISLANDS 0
19287 /* KEXTs still need branch islands. */
19288 #define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
19289 || flag_mkernel || flag_apple_kext)
19291 /* INSN is either a function call or a millicode call. It may have an
19292 unconditional jump in its delay slot.
19294 CALL_DEST is the routine we are calling. */
19297 output_call (rtx insn
, rtx
*operands
, int dest_operand_number
,
19298 int cookie_operand_number
)
19300 static char buf
[256];
19301 if (DARWIN_GENERATE_ISLANDS
19302 && GET_CODE (operands
[dest_operand_number
]) == SYMBOL_REF
19303 && (INTVAL (operands
[cookie_operand_number
]) & CALL_LONG
))
19306 tree funname
= get_identifier (XSTR (operands
[dest_operand_number
], 0));
19308 if (no_previous_def (funname
))
19310 rtx label_rtx
= gen_label_rtx ();
19311 char *label_buf
, temp_buf
[256];
19312 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
19313 CODE_LABEL_NUMBER (label_rtx
));
19314 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
19315 labelname
= get_identifier (label_buf
);
19316 add_compiler_branch_island (labelname
, funname
, insn_line (insn
));
19319 labelname
= get_prev_label (funname
);
19321 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
19322 instruction will reach 'foo', otherwise link as 'bl L42'".
19323 "L42" should be a 'branch island', that will do a far jump to
19324 'foo'. Branch islands are generated in
19325 macho_branch_islands(). */
19326 sprintf (buf
, "jbsr %%z%d,%.246s",
19327 dest_operand_number
, IDENTIFIER_POINTER (labelname
));
19330 sprintf (buf
, "bl %%z%d", dest_operand_number
);
19334 /* Generate PIC and indirect symbol stubs. */
19337 machopic_output_stub (FILE *file
, const char *symb
, const char *stub
)
19339 unsigned int length
;
19340 char *symbol_name
, *lazy_ptr_name
;
19341 char *local_label_0
;
19342 static int label
= 0;
19344 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
19345 symb
= (*targetm
.strip_name_encoding
) (symb
);
19348 length
= strlen (symb
);
19349 symbol_name
= alloca (length
+ 32);
19350 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
19352 lazy_ptr_name
= alloca (length
+ 32);
19353 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
19356 switch_to_section (darwin_sections
[machopic_picsymbol_stub1_section
]);
19358 switch_to_section (darwin_sections
[machopic_symbol_stub1_section
]);
19362 fprintf (file
, "\t.align 5\n");
19364 fprintf (file
, "%s:\n", stub
);
19365 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
19368 local_label_0
= alloca (sizeof ("\"L00000000000$spb\""));
19369 sprintf (local_label_0
, "\"L%011d$spb\"", label
);
19371 fprintf (file
, "\tmflr r0\n");
19372 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
19373 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
19374 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
19375 lazy_ptr_name
, local_label_0
);
19376 fprintf (file
, "\tmtlr r0\n");
19377 fprintf (file
, "\t%s r12,lo16(%s-%s)(r11)\n",
19378 (TARGET_64BIT
? "ldu" : "lwzu"),
19379 lazy_ptr_name
, local_label_0
);
19380 fprintf (file
, "\tmtctr r12\n");
19381 fprintf (file
, "\tbctr\n");
19385 fprintf (file
, "\t.align 4\n");
19387 fprintf (file
, "%s:\n", stub
);
19388 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
19390 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
19391 fprintf (file
, "\t%s r12,lo16(%s)(r11)\n",
19392 (TARGET_64BIT
? "ldu" : "lwzu"),
19394 fprintf (file
, "\tmtctr r12\n");
19395 fprintf (file
, "\tbctr\n");
19398 switch_to_section (darwin_sections
[machopic_lazy_symbol_ptr_section
]);
19399 fprintf (file
, "%s:\n", lazy_ptr_name
);
19400 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
19401 fprintf (file
, "%sdyld_stub_binding_helper\n",
19402 (TARGET_64BIT
? DOUBLE_INT_ASM_OP
: "\t.long\t"));
19405 /* Legitimize PIC addresses. If the address is already
19406 position-independent, we return ORIG. Newly generated
19407 position-independent addresses go into a reg. This is REG if non
19408 zero, otherwise we allocate register(s) as necessary. */
19410 #define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
19413 rs6000_machopic_legitimize_pic_address (rtx orig
, enum machine_mode mode
,
19418 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
19419 reg
= gen_reg_rtx (Pmode
);
19421 if (GET_CODE (orig
) == CONST
)
19425 if (GET_CODE (XEXP (orig
, 0)) == PLUS
19426 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
19429 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
19431 /* Use a different reg for the intermediate value, as
19432 it will be marked UNCHANGING. */
19433 reg_temp
= no_new_pseudos
? reg
: gen_reg_rtx (Pmode
);
19434 base
= rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
19437 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
19440 if (GET_CODE (offset
) == CONST_INT
)
19442 if (SMALL_INT (offset
))
19443 return plus_constant (base
, INTVAL (offset
));
19444 else if (! reload_in_progress
&& ! reload_completed
)
19445 offset
= force_reg (Pmode
, offset
);
19448 rtx mem
= force_const_mem (Pmode
, orig
);
19449 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
19452 return gen_rtx_PLUS (Pmode
, base
, offset
);
19455 /* Fall back on generic machopic code. */
19456 return machopic_legitimize_pic_address (orig
, mode
, reg
);
19459 /* Output a .machine directive for the Darwin assembler, and call
19460 the generic start_file routine. */
19463 rs6000_darwin_file_start (void)
19465 static const struct
19471 { "ppc64", "ppc64", MASK_64BIT
},
19472 { "970", "ppc970", MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
19473 { "power4", "ppc970", 0 },
19474 { "G5", "ppc970", 0 },
19475 { "7450", "ppc7450", 0 },
19476 { "7400", "ppc7400", MASK_ALTIVEC
},
19477 { "G4", "ppc7400", 0 },
19478 { "750", "ppc750", 0 },
19479 { "740", "ppc750", 0 },
19480 { "G3", "ppc750", 0 },
19481 { "604e", "ppc604e", 0 },
19482 { "604", "ppc604", 0 },
19483 { "603e", "ppc603", 0 },
19484 { "603", "ppc603", 0 },
19485 { "601", "ppc601", 0 },
19486 { NULL
, "ppc", 0 } };
19487 const char *cpu_id
= "";
19490 rs6000_file_start ();
19491 darwin_file_start ();
19493 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
19494 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
19495 if (rs6000_select
[i
].set_arch_p
&& rs6000_select
[i
].string
19496 && rs6000_select
[i
].string
[0] != '\0')
19497 cpu_id
= rs6000_select
[i
].string
;
19499 /* Look through the mapping array. Pick the first name that either
19500 matches the argument, has a bit set in IF_SET that is also set
19501 in the target flags, or has a NULL name. */
19504 while (mapping
[i
].arg
!= NULL
19505 && strcmp (mapping
[i
].arg
, cpu_id
) != 0
19506 && (mapping
[i
].if_set
& target_flags
) == 0)
19509 fprintf (asm_out_file
, "\t.machine %s\n", mapping
[i
].name
);
19512 #endif /* TARGET_MACHO */
19516 rs6000_elf_reloc_rw_mask (void)
19520 else if (DEFAULT_ABI
== ABI_AIX
)
19526 /* Record an element in the table of global constructors. SYMBOL is
19527 a SYMBOL_REF of the function to be called; PRIORITY is a number
19528 between 0 and MAX_INIT_PRIORITY.
19530 This differs from default_named_section_asm_out_constructor in
19531 that we have special handling for -mrelocatable. */
19534 rs6000_elf_asm_out_constructor (rtx symbol
, int priority
)
19536 const char *section
= ".ctors";
19539 if (priority
!= DEFAULT_INIT_PRIORITY
)
19541 sprintf (buf
, ".ctors.%.5u",
19542 /* Invert the numbering so the linker puts us in the proper
19543 order; constructors are run from right to left, and the
19544 linker sorts in increasing order. */
19545 MAX_INIT_PRIORITY
- priority
);
19549 switch_to_section (get_section (section
, SECTION_WRITE
, NULL
));
19550 assemble_align (POINTER_SIZE
);
19552 if (TARGET_RELOCATABLE
)
19554 fputs ("\t.long (", asm_out_file
);
19555 output_addr_const (asm_out_file
, symbol
);
19556 fputs (")@fixup\n", asm_out_file
);
19559 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
19563 rs6000_elf_asm_out_destructor (rtx symbol
, int priority
)
19565 const char *section
= ".dtors";
19568 if (priority
!= DEFAULT_INIT_PRIORITY
)
19570 sprintf (buf
, ".dtors.%.5u",
19571 /* Invert the numbering so the linker puts us in the proper
19572 order; constructors are run from right to left, and the
19573 linker sorts in increasing order. */
19574 MAX_INIT_PRIORITY
- priority
);
19578 switch_to_section (get_section (section
, SECTION_WRITE
, NULL
));
19579 assemble_align (POINTER_SIZE
);
19581 if (TARGET_RELOCATABLE
)
19583 fputs ("\t.long (", asm_out_file
);
19584 output_addr_const (asm_out_file
, symbol
);
19585 fputs (")@fixup\n", asm_out_file
);
19588 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
19592 rs6000_elf_declare_function_name (FILE *file
, const char *name
, tree decl
)
19596 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file
);
19597 ASM_OUTPUT_LABEL (file
, name
);
19598 fputs (DOUBLE_INT_ASM_OP
, file
);
19599 rs6000_output_function_entry (file
, name
);
19600 fputs (",.TOC.@tocbase,0\n\t.previous\n", file
);
19603 fputs ("\t.size\t", file
);
19604 assemble_name (file
, name
);
19605 fputs (",24\n\t.type\t.", file
);
19606 assemble_name (file
, name
);
19607 fputs (",@function\n", file
);
19608 if (TREE_PUBLIC (decl
) && ! DECL_WEAK (decl
))
19610 fputs ("\t.globl\t.", file
);
19611 assemble_name (file
, name
);
19616 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
19617 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
19618 rs6000_output_function_entry (file
, name
);
19619 fputs (":\n", file
);
19623 if (TARGET_RELOCATABLE
19624 && !TARGET_SECURE_PLT
19625 && (get_pool_size () != 0 || current_function_profile
)
19630 (*targetm
.asm_out
.internal_label
) (file
, "LCL", rs6000_pic_labelno
);
19632 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
19633 fprintf (file
, "\t.long ");
19634 assemble_name (file
, buf
);
19636 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
19637 assemble_name (file
, buf
);
19641 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
19642 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
19644 if (DEFAULT_ABI
== ABI_AIX
)
19646 const char *desc_name
, *orig_name
;
19648 orig_name
= (*targetm
.strip_name_encoding
) (name
);
19649 desc_name
= orig_name
;
19650 while (*desc_name
== '.')
19653 if (TREE_PUBLIC (decl
))
19654 fprintf (file
, "\t.globl %s\n", desc_name
);
19656 fprintf (file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
19657 fprintf (file
, "%s:\n", desc_name
);
19658 fprintf (file
, "\t.long %s\n", orig_name
);
19659 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file
);
19660 if (DEFAULT_ABI
== ABI_AIX
)
19661 fputs ("\t.long 0\n", file
);
19662 fprintf (file
, "\t.previous\n");
19664 ASM_OUTPUT_LABEL (file
, name
);
19668 rs6000_elf_end_indicate_exec_stack (void)
19671 file_end_indicate_exec_stack ();
19677 rs6000_xcoff_asm_output_anchor (rtx symbol
)
19681 sprintf (buffer
, "$ + " HOST_WIDE_INT_PRINT_DEC
,
19682 SYMBOL_REF_BLOCK_OFFSET (symbol
));
19683 ASM_OUTPUT_DEF (asm_out_file
, XSTR (symbol
, 0), buffer
);
19687 rs6000_xcoff_asm_globalize_label (FILE *stream
, const char *name
)
19689 fputs (GLOBAL_ASM_OP
, stream
);
19690 RS6000_OUTPUT_BASENAME (stream
, name
);
19691 putc ('\n', stream
);
19694 /* A get_unnamed_decl callback, used for read-only sections. PTR
19695 points to the section string variable. */
19698 rs6000_xcoff_output_readonly_section_asm_op (const void *directive
)
19700 fprintf (asm_out_file
, "\t.csect %s[RO],3\n",
19701 *(const char *const *) directive
);
19704 /* Likewise for read-write sections. */
19707 rs6000_xcoff_output_readwrite_section_asm_op (const void *directive
)
19709 fprintf (asm_out_file
, "\t.csect %s[RW],3\n",
19710 *(const char *const *) directive
);
19713 /* A get_unnamed_section callback, used for switching to toc_section. */
19716 rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED
)
19718 if (TARGET_MINIMAL_TOC
)
19720 /* toc_section is always selected at least once from
19721 rs6000_xcoff_file_start, so this is guaranteed to
19722 always be defined once and only once in each file. */
19723 if (!toc_initialized
)
19725 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file
);
19726 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file
);
19727 toc_initialized
= 1;
19729 fprintf (asm_out_file
, "\t.csect toc_table[RW]%s\n",
19730 (TARGET_32BIT
? "" : ",3"));
19733 fputs ("\t.toc\n", asm_out_file
);
19736 /* Implement TARGET_ASM_INIT_SECTIONS. */
19739 rs6000_xcoff_asm_init_sections (void)
19741 read_only_data_section
19742 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op
,
19743 &xcoff_read_only_section_name
);
19745 private_data_section
19746 = get_unnamed_section (SECTION_WRITE
,
19747 rs6000_xcoff_output_readwrite_section_asm_op
,
19748 &xcoff_private_data_section_name
);
19750 read_only_private_data_section
19751 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op
,
19752 &xcoff_private_data_section_name
);
19755 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op
, NULL
);
19757 readonly_data_section
= read_only_data_section
;
19758 exception_section
= data_section
;
19762 rs6000_xcoff_reloc_rw_mask (void)
19768 rs6000_xcoff_asm_named_section (const char *name
, unsigned int flags
,
19769 tree decl ATTRIBUTE_UNUSED
)
19772 static const char * const suffix
[3] = { "PR", "RO", "RW" };
19774 if (flags
& SECTION_CODE
)
19776 else if (flags
& SECTION_WRITE
)
19781 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
19782 (flags
& SECTION_CODE
) ? "." : "",
19783 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
19787 rs6000_xcoff_select_section (tree decl
, int reloc
,
19788 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
19790 if (decl_readonly_section (decl
, reloc
))
19792 if (TREE_PUBLIC (decl
))
19793 return read_only_data_section
;
19795 return read_only_private_data_section
;
19799 if (TREE_PUBLIC (decl
))
19800 return data_section
;
19802 return private_data_section
;
19807 rs6000_xcoff_unique_section (tree decl
, int reloc ATTRIBUTE_UNUSED
)
19811 /* Use select_section for private and uninitialized data. */
19812 if (!TREE_PUBLIC (decl
)
19813 || DECL_COMMON (decl
)
19814 || DECL_INITIAL (decl
) == NULL_TREE
19815 || DECL_INITIAL (decl
) == error_mark_node
19816 || (flag_zero_initialized_in_bss
19817 && initializer_zerop (DECL_INITIAL (decl
))))
19820 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
19821 name
= (*targetm
.strip_name_encoding
) (name
);
19822 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
19825 /* Select section for constant in constant pool.
19827 On RS/6000, all constants are in the private read-only data area.
19828 However, if this is being placed in the TOC it must be output as a
19832 rs6000_xcoff_select_rtx_section (enum machine_mode mode
, rtx x
,
19833 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
19835 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
19836 return toc_section
;
19838 return read_only_private_data_section
;
19841 /* Remove any trailing [DS] or the like from the symbol name. */
19843 static const char *
19844 rs6000_xcoff_strip_name_encoding (const char *name
)
19849 len
= strlen (name
);
19850 if (name
[len
- 1] == ']')
19851 return ggc_alloc_string (name
, len
- 4);
19856 /* Section attributes. AIX is always PIC. */
19858 static unsigned int
19859 rs6000_xcoff_section_type_flags (tree decl
, const char *name
, int reloc
)
19861 unsigned int align
;
19862 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
19864 /* Align to at least UNIT size. */
19865 if (flags
& SECTION_CODE
)
19866 align
= MIN_UNITS_PER_WORD
;
19868 /* Increase alignment of large objects if not already stricter. */
19869 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
19870 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
19871 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
19873 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
19876 /* Output at beginning of assembler file.
19878 Initialize the section names for the RS/6000 at this point.
19880 Specify filename, including full path, to assembler.
19882 We want to go into the TOC section so at least one .toc will be emitted.
19883 Also, in order to output proper .bs/.es pairs, we need at least one static
19884 [RW] section emitted.
19886 Finally, declare mcount when profiling to make the assembler happy. */
19889 rs6000_xcoff_file_start (void)
19891 rs6000_gen_section_name (&xcoff_bss_section_name
,
19892 main_input_filename
, ".bss_");
19893 rs6000_gen_section_name (&xcoff_private_data_section_name
,
19894 main_input_filename
, ".rw_");
19895 rs6000_gen_section_name (&xcoff_read_only_section_name
,
19896 main_input_filename
, ".ro_");
19898 fputs ("\t.file\t", asm_out_file
);
19899 output_quoted_string (asm_out_file
, main_input_filename
);
19900 fputc ('\n', asm_out_file
);
19901 if (write_symbols
!= NO_DEBUG
)
19902 switch_to_section (private_data_section
);
19903 switch_to_section (text_section
);
19905 fprintf (asm_out_file
, "\t.extern %s\n", RS6000_MCOUNT
);
19906 rs6000_file_start ();
19909 /* Output at end of assembler file.
19910 On the RS/6000, referencing data should automatically pull in text. */
19913 rs6000_xcoff_file_end (void)
19915 switch_to_section (text_section
);
19916 fputs ("_section_.text:\n", asm_out_file
);
19917 switch_to_section (data_section
);
19918 fputs (TARGET_32BIT
19919 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
19922 #endif /* TARGET_XCOFF */
19924 /* Compute a (partial) cost for rtx X. Return true if the complete
19925 cost has been computed, and false if subexpressions should be
19926 scanned. In either case, *TOTAL contains the cost result. */
19929 rs6000_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
19931 enum machine_mode mode
= GET_MODE (x
);
19935 /* On the RS/6000, if it is valid in the insn, it is free. */
19937 if (((outer_code
== SET
19938 || outer_code
== PLUS
19939 || outer_code
== MINUS
)
19940 && (satisfies_constraint_I (x
)
19941 || satisfies_constraint_L (x
)))
19942 || (outer_code
== AND
19943 && (satisfies_constraint_K (x
)
19945 ? satisfies_constraint_L (x
)
19946 : satisfies_constraint_J (x
))
19947 || mask_operand (x
, mode
)
19949 && mask64_operand (x
, DImode
))))
19950 || ((outer_code
== IOR
|| outer_code
== XOR
)
19951 && (satisfies_constraint_K (x
)
19953 ? satisfies_constraint_L (x
)
19954 : satisfies_constraint_J (x
))))
19955 || outer_code
== ASHIFT
19956 || outer_code
== ASHIFTRT
19957 || outer_code
== LSHIFTRT
19958 || outer_code
== ROTATE
19959 || outer_code
== ROTATERT
19960 || outer_code
== ZERO_EXTRACT
19961 || (outer_code
== MULT
19962 && satisfies_constraint_I (x
))
19963 || ((outer_code
== DIV
|| outer_code
== UDIV
19964 || outer_code
== MOD
|| outer_code
== UMOD
)
19965 && exact_log2 (INTVAL (x
)) >= 0)
19966 || (outer_code
== COMPARE
19967 && (satisfies_constraint_I (x
)
19968 || satisfies_constraint_K (x
)))
19969 || (outer_code
== EQ
19970 && (satisfies_constraint_I (x
)
19971 || satisfies_constraint_K (x
)
19973 ? satisfies_constraint_L (x
)
19974 : satisfies_constraint_J (x
))))
19975 || (outer_code
== GTU
19976 && satisfies_constraint_I (x
))
19977 || (outer_code
== LTU
19978 && satisfies_constraint_P (x
)))
19983 else if ((outer_code
== PLUS
19984 && reg_or_add_cint_operand (x
, VOIDmode
))
19985 || (outer_code
== MINUS
19986 && reg_or_sub_cint_operand (x
, VOIDmode
))
19987 || ((outer_code
== SET
19988 || outer_code
== IOR
19989 || outer_code
== XOR
)
19991 & ~ (unsigned HOST_WIDE_INT
) 0xffffffff) == 0))
19993 *total
= COSTS_N_INSNS (1);
19999 if (mode
== DImode
&& code
== CONST_DOUBLE
)
20001 if ((outer_code
== IOR
|| outer_code
== XOR
)
20002 && CONST_DOUBLE_HIGH (x
) == 0
20003 && (CONST_DOUBLE_LOW (x
)
20004 & ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0)
20009 else if ((outer_code
== AND
&& and64_2_operand (x
, DImode
))
20010 || ((outer_code
== SET
20011 || outer_code
== IOR
20012 || outer_code
== XOR
)
20013 && CONST_DOUBLE_HIGH (x
) == 0))
20015 *total
= COSTS_N_INSNS (1);
20025 /* When optimizing for size, MEM should be slightly more expensive
20026 than generating address, e.g., (plus (reg) (const)).
20027 L1 cache latency is about two instructions. */
20028 *total
= optimize_size
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
20036 if (mode
== DFmode
)
20038 if (GET_CODE (XEXP (x
, 0)) == MULT
)
20040 /* FNMA accounted in outer NEG. */
20041 if (outer_code
== NEG
)
20042 *total
= rs6000_cost
->dmul
- rs6000_cost
->fp
;
20044 *total
= rs6000_cost
->dmul
;
20047 *total
= rs6000_cost
->fp
;
20049 else if (mode
== SFmode
)
20051 /* FNMA accounted in outer NEG. */
20052 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
20055 *total
= rs6000_cost
->fp
;
20058 *total
= COSTS_N_INSNS (1);
20062 if (mode
== DFmode
)
20064 if (GET_CODE (XEXP (x
, 0)) == MULT
20065 || GET_CODE (XEXP (x
, 1)) == MULT
)
20067 /* FNMA accounted in outer NEG. */
20068 if (outer_code
== NEG
)
20069 *total
= rs6000_cost
->dmul
- rs6000_cost
->fp
;
20071 *total
= rs6000_cost
->dmul
;
20074 *total
= rs6000_cost
->fp
;
20076 else if (mode
== SFmode
)
20078 /* FNMA accounted in outer NEG. */
20079 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
20082 *total
= rs6000_cost
->fp
;
20085 *total
= COSTS_N_INSNS (1);
20089 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
20090 && satisfies_constraint_I (XEXP (x
, 1)))
20092 if (INTVAL (XEXP (x
, 1)) >= -256
20093 && INTVAL (XEXP (x
, 1)) <= 255)
20094 *total
= rs6000_cost
->mulsi_const9
;
20096 *total
= rs6000_cost
->mulsi_const
;
20098 /* FMA accounted in outer PLUS/MINUS. */
20099 else if ((mode
== DFmode
|| mode
== SFmode
)
20100 && (outer_code
== PLUS
|| outer_code
== MINUS
))
20102 else if (mode
== DFmode
)
20103 *total
= rs6000_cost
->dmul
;
20104 else if (mode
== SFmode
)
20105 *total
= rs6000_cost
->fp
;
20106 else if (mode
== DImode
)
20107 *total
= rs6000_cost
->muldi
;
20109 *total
= rs6000_cost
->mulsi
;
20114 if (FLOAT_MODE_P (mode
))
20116 *total
= mode
== DFmode
? rs6000_cost
->ddiv
20117 : rs6000_cost
->sdiv
;
20124 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
20125 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
20127 if (code
== DIV
|| code
== MOD
)
20129 *total
= COSTS_N_INSNS (2);
20132 *total
= COSTS_N_INSNS (1);
20136 if (GET_MODE (XEXP (x
, 1)) == DImode
)
20137 *total
= rs6000_cost
->divdi
;
20139 *total
= rs6000_cost
->divsi
;
20141 /* Add in shift and subtract for MOD. */
20142 if (code
== MOD
|| code
== UMOD
)
20143 *total
+= COSTS_N_INSNS (2);
20147 *total
= COSTS_N_INSNS (4);
20151 if (outer_code
== AND
|| outer_code
== IOR
|| outer_code
== XOR
)
20162 *total
= COSTS_N_INSNS (1);
20170 /* Handle mul_highpart. */
20171 if (outer_code
== TRUNCATE
20172 && GET_CODE (XEXP (x
, 0)) == MULT
)
20174 if (mode
== DImode
)
20175 *total
= rs6000_cost
->muldi
;
20177 *total
= rs6000_cost
->mulsi
;
20180 else if (outer_code
== AND
)
20183 *total
= COSTS_N_INSNS (1);
20188 if (GET_CODE (XEXP (x
, 0)) == MEM
)
20191 *total
= COSTS_N_INSNS (1);
20197 if (!FLOAT_MODE_P (mode
))
20199 *total
= COSTS_N_INSNS (1);
20205 case UNSIGNED_FLOAT
:
20208 case FLOAT_TRUNCATE
:
20209 *total
= rs6000_cost
->fp
;
20213 if (mode
== DFmode
)
20216 *total
= rs6000_cost
->fp
;
20220 switch (XINT (x
, 1))
20223 *total
= rs6000_cost
->fp
;
20235 *total
= COSTS_N_INSNS (1);
20238 else if (FLOAT_MODE_P (mode
)
20239 && TARGET_PPC_GFXOPT
&& TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20241 *total
= rs6000_cost
->fp
;
20249 /* Carry bit requires mode == Pmode.
20250 NEG or PLUS already counted so only add one. */
20252 && (outer_code
== NEG
|| outer_code
== PLUS
))
20254 *total
= COSTS_N_INSNS (1);
20257 if (outer_code
== SET
)
20259 if (XEXP (x
, 1) == const0_rtx
)
20261 *total
= COSTS_N_INSNS (2);
20264 else if (mode
== Pmode
)
20266 *total
= COSTS_N_INSNS (3);
20275 if (outer_code
== SET
&& (XEXP (x
, 1) == const0_rtx
))
20277 *total
= COSTS_N_INSNS (2);
20281 if (outer_code
== COMPARE
)
20295 /* A C expression returning the cost of moving data from a register of class
20296 CLASS1 to one of CLASS2. */
20299 rs6000_register_move_cost (enum machine_mode mode
,
20300 enum reg_class from
, enum reg_class to
)
20302 /* Moves from/to GENERAL_REGS. */
20303 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
20304 || reg_classes_intersect_p (from
, GENERAL_REGS
))
20306 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
20309 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
20310 return (rs6000_memory_move_cost (mode
, from
, 0)
20311 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
20313 /* It's more expensive to move CR_REGS than CR0_REGS because of the
20315 else if (from
== CR_REGS
)
20319 /* A move will cost one instruction per GPR moved. */
20320 return 2 * hard_regno_nregs
[0][mode
];
20323 /* Moving between two similar registers is just one instruction. */
20324 else if (reg_classes_intersect_p (to
, from
))
20325 return (mode
== TFmode
|| mode
== TDmode
) ? 4 : 2;
20327 /* Everything else has to go through GENERAL_REGS. */
20329 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
20330 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
20333 /* A C expressions returning the cost of moving data of MODE from a register to
20337 rs6000_memory_move_cost (enum machine_mode mode
, enum reg_class
class,
20338 int in ATTRIBUTE_UNUSED
)
20340 if (reg_classes_intersect_p (class, GENERAL_REGS
))
20341 return 4 * hard_regno_nregs
[0][mode
];
20342 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
20343 return 4 * hard_regno_nregs
[32][mode
];
20344 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
20345 return 4 * hard_regno_nregs
[FIRST_ALTIVEC_REGNO
][mode
];
20347 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
20350 /* Newton-Raphson approximation of single-precision floating point divide n/d.
20351 Assumes no trapping math and finite arguments. */
20354 rs6000_emit_swdivsf (rtx res
, rtx n
, rtx d
)
20356 rtx x0
, e0
, e1
, y1
, u0
, v0
, one
;
20358 x0
= gen_reg_rtx (SFmode
);
20359 e0
= gen_reg_rtx (SFmode
);
20360 e1
= gen_reg_rtx (SFmode
);
20361 y1
= gen_reg_rtx (SFmode
);
20362 u0
= gen_reg_rtx (SFmode
);
20363 v0
= gen_reg_rtx (SFmode
);
20364 one
= force_reg (SFmode
, CONST_DOUBLE_FROM_REAL_VALUE (dconst1
, SFmode
));
20366 /* x0 = 1./d estimate */
20367 emit_insn (gen_rtx_SET (VOIDmode
, x0
,
20368 gen_rtx_UNSPEC (SFmode
, gen_rtvec (1, d
),
20370 /* e0 = 1. - d * x0 */
20371 emit_insn (gen_rtx_SET (VOIDmode
, e0
,
20372 gen_rtx_MINUS (SFmode
, one
,
20373 gen_rtx_MULT (SFmode
, d
, x0
))));
20374 /* e1 = e0 + e0 * e0 */
20375 emit_insn (gen_rtx_SET (VOIDmode
, e1
,
20376 gen_rtx_PLUS (SFmode
,
20377 gen_rtx_MULT (SFmode
, e0
, e0
), e0
)));
20378 /* y1 = x0 + e1 * x0 */
20379 emit_insn (gen_rtx_SET (VOIDmode
, y1
,
20380 gen_rtx_PLUS (SFmode
,
20381 gen_rtx_MULT (SFmode
, e1
, x0
), x0
)));
20383 emit_insn (gen_rtx_SET (VOIDmode
, u0
,
20384 gen_rtx_MULT (SFmode
, n
, y1
)));
20385 /* v0 = n - d * u0 */
20386 emit_insn (gen_rtx_SET (VOIDmode
, v0
,
20387 gen_rtx_MINUS (SFmode
, n
,
20388 gen_rtx_MULT (SFmode
, d
, u0
))));
20389 /* res = u0 + v0 * y1 */
20390 emit_insn (gen_rtx_SET (VOIDmode
, res
,
20391 gen_rtx_PLUS (SFmode
,
20392 gen_rtx_MULT (SFmode
, v0
, y1
), u0
)));
20395 /* Newton-Raphson approximation of double-precision floating point divide n/d.
20396 Assumes no trapping math and finite arguments. */
20399 rs6000_emit_swdivdf (rtx res
, rtx n
, rtx d
)
20401 rtx x0
, e0
, e1
, e2
, y1
, y2
, y3
, u0
, v0
, one
;
20403 x0
= gen_reg_rtx (DFmode
);
20404 e0
= gen_reg_rtx (DFmode
);
20405 e1
= gen_reg_rtx (DFmode
);
20406 e2
= gen_reg_rtx (DFmode
);
20407 y1
= gen_reg_rtx (DFmode
);
20408 y2
= gen_reg_rtx (DFmode
);
20409 y3
= gen_reg_rtx (DFmode
);
20410 u0
= gen_reg_rtx (DFmode
);
20411 v0
= gen_reg_rtx (DFmode
);
20412 one
= force_reg (DFmode
, CONST_DOUBLE_FROM_REAL_VALUE (dconst1
, DFmode
));
20414 /* x0 = 1./d estimate */
20415 emit_insn (gen_rtx_SET (VOIDmode
, x0
,
20416 gen_rtx_UNSPEC (DFmode
, gen_rtvec (1, d
),
20418 /* e0 = 1. - d * x0 */
20419 emit_insn (gen_rtx_SET (VOIDmode
, e0
,
20420 gen_rtx_MINUS (DFmode
, one
,
20421 gen_rtx_MULT (SFmode
, d
, x0
))));
20422 /* y1 = x0 + e0 * x0 */
20423 emit_insn (gen_rtx_SET (VOIDmode
, y1
,
20424 gen_rtx_PLUS (DFmode
,
20425 gen_rtx_MULT (DFmode
, e0
, x0
), x0
)));
20427 emit_insn (gen_rtx_SET (VOIDmode
, e1
,
20428 gen_rtx_MULT (DFmode
, e0
, e0
)));
20429 /* y2 = y1 + e1 * y1 */
20430 emit_insn (gen_rtx_SET (VOIDmode
, y2
,
20431 gen_rtx_PLUS (DFmode
,
20432 gen_rtx_MULT (DFmode
, e1
, y1
), y1
)));
20434 emit_insn (gen_rtx_SET (VOIDmode
, e2
,
20435 gen_rtx_MULT (DFmode
, e1
, e1
)));
20436 /* y3 = y2 + e2 * y2 */
20437 emit_insn (gen_rtx_SET (VOIDmode
, y3
,
20438 gen_rtx_PLUS (DFmode
,
20439 gen_rtx_MULT (DFmode
, e2
, y2
), y2
)));
20441 emit_insn (gen_rtx_SET (VOIDmode
, u0
,
20442 gen_rtx_MULT (DFmode
, n
, y3
)));
20443 /* v0 = n - d * u0 */
20444 emit_insn (gen_rtx_SET (VOIDmode
, v0
,
20445 gen_rtx_MINUS (DFmode
, n
,
20446 gen_rtx_MULT (DFmode
, d
, u0
))));
20447 /* res = u0 + v0 * y3 */
20448 emit_insn (gen_rtx_SET (VOIDmode
, res
,
20449 gen_rtx_PLUS (DFmode
,
20450 gen_rtx_MULT (DFmode
, v0
, y3
), u0
)));
20454 /* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
20455 target, and SRC is the argument operand. */
20458 rs6000_emit_popcount (rtx dst
, rtx src
)
20460 enum machine_mode mode
= GET_MODE (dst
);
20463 tmp1
= gen_reg_rtx (mode
);
20465 if (mode
== SImode
)
20467 emit_insn (gen_popcntbsi2 (tmp1
, src
));
20468 tmp2
= expand_mult (SImode
, tmp1
, GEN_INT (0x01010101),
20470 tmp2
= force_reg (SImode
, tmp2
);
20471 emit_insn (gen_lshrsi3 (dst
, tmp2
, GEN_INT (24)));
20475 emit_insn (gen_popcntbdi2 (tmp1
, src
));
20476 tmp2
= expand_mult (DImode
, tmp1
,
20477 GEN_INT ((HOST_WIDE_INT
)
20478 0x01010101 << 32 | 0x01010101),
20480 tmp2
= force_reg (DImode
, tmp2
);
20481 emit_insn (gen_lshrdi3 (dst
, tmp2
, GEN_INT (56)));
20486 /* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
20487 target, and SRC is the argument operand. */
20490 rs6000_emit_parity (rtx dst
, rtx src
)
20492 enum machine_mode mode
= GET_MODE (dst
);
20495 tmp
= gen_reg_rtx (mode
);
20496 if (mode
== SImode
)
20498 /* Is mult+shift >= shift+xor+shift+xor? */
20499 if (rs6000_cost
->mulsi_const
>= COSTS_N_INSNS (3))
20501 rtx tmp1
, tmp2
, tmp3
, tmp4
;
20503 tmp1
= gen_reg_rtx (SImode
);
20504 emit_insn (gen_popcntbsi2 (tmp1
, src
));
20506 tmp2
= gen_reg_rtx (SImode
);
20507 emit_insn (gen_lshrsi3 (tmp2
, tmp1
, GEN_INT (16)));
20508 tmp3
= gen_reg_rtx (SImode
);
20509 emit_insn (gen_xorsi3 (tmp3
, tmp1
, tmp2
));
20511 tmp4
= gen_reg_rtx (SImode
);
20512 emit_insn (gen_lshrsi3 (tmp4
, tmp3
, GEN_INT (8)));
20513 emit_insn (gen_xorsi3 (tmp
, tmp3
, tmp4
));
20516 rs6000_emit_popcount (tmp
, src
);
20517 emit_insn (gen_andsi3 (dst
, tmp
, const1_rtx
));
20521 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
20522 if (rs6000_cost
->muldi
>= COSTS_N_INSNS (5))
20524 rtx tmp1
, tmp2
, tmp3
, tmp4
, tmp5
, tmp6
;
20526 tmp1
= gen_reg_rtx (DImode
);
20527 emit_insn (gen_popcntbdi2 (tmp1
, src
));
20529 tmp2
= gen_reg_rtx (DImode
);
20530 emit_insn (gen_lshrdi3 (tmp2
, tmp1
, GEN_INT (32)));
20531 tmp3
= gen_reg_rtx (DImode
);
20532 emit_insn (gen_xordi3 (tmp3
, tmp1
, tmp2
));
20534 tmp4
= gen_reg_rtx (DImode
);
20535 emit_insn (gen_lshrdi3 (tmp4
, tmp3
, GEN_INT (16)));
20536 tmp5
= gen_reg_rtx (DImode
);
20537 emit_insn (gen_xordi3 (tmp5
, tmp3
, tmp4
));
20539 tmp6
= gen_reg_rtx (DImode
);
20540 emit_insn (gen_lshrdi3 (tmp6
, tmp5
, GEN_INT (8)));
20541 emit_insn (gen_xordi3 (tmp
, tmp5
, tmp6
));
20544 rs6000_emit_popcount (tmp
, src
);
20545 emit_insn (gen_anddi3 (dst
, tmp
, const1_rtx
));
20549 /* Return an RTX representing where to find the function value of a
20550 function returning MODE. */
20552 rs6000_complex_function_value (enum machine_mode mode
)
20554 unsigned int regno
;
20556 enum machine_mode inner
= GET_MODE_INNER (mode
);
20557 unsigned int inner_bytes
= GET_MODE_SIZE (inner
);
20559 if (FLOAT_MODE_P (mode
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20560 regno
= FP_ARG_RETURN
;
20563 regno
= GP_ARG_RETURN
;
20565 /* 32-bit is OK since it'll go in r3/r4. */
20566 if (TARGET_32BIT
&& inner_bytes
>= 4)
20567 return gen_rtx_REG (mode
, regno
);
20570 if (inner_bytes
>= 8)
20571 return gen_rtx_REG (mode
, regno
);
20573 r1
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
),
20575 r2
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
+ 1),
20576 GEN_INT (inner_bytes
));
20577 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
20580 /* Define how to find the value returned by a function.
20581 VALTYPE is the data type of the value (as a tree).
20582 If the precise function being called is known, FUNC is its FUNCTION_DECL;
20583 otherwise, FUNC is 0.
20585 On the SPE, both FPs and vectors are returned in r3.
20587 On RS/6000 an integer value is in r3 and a floating-point value is in
20588 fp1, unless -msoft-float. */
20591 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
20593 enum machine_mode mode
;
20594 unsigned int regno
;
20596 /* Special handling for structs in darwin64. */
20597 if (rs6000_darwin64_abi
20598 && TYPE_MODE (valtype
) == BLKmode
20599 && TREE_CODE (valtype
) == RECORD_TYPE
20600 && int_size_in_bytes (valtype
) > 0)
20602 CUMULATIVE_ARGS valcum
;
20606 valcum
.fregno
= FP_ARG_MIN_REG
;
20607 valcum
.vregno
= ALTIVEC_ARG_MIN_REG
;
20608 /* Do a trial code generation as if this were going to be passed as
20609 an argument; if any part goes in memory, we return NULL. */
20610 valret
= rs6000_darwin64_record_arg (&valcum
, valtype
, 1, true);
20613 /* Otherwise fall through to standard ABI rules. */
20616 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DImode
)
20618 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20619 return gen_rtx_PARALLEL (DImode
,
20621 gen_rtx_EXPR_LIST (VOIDmode
,
20622 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
20624 gen_rtx_EXPR_LIST (VOIDmode
,
20625 gen_rtx_REG (SImode
,
20626 GP_ARG_RETURN
+ 1),
20629 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DCmode
)
20631 return gen_rtx_PARALLEL (DCmode
,
20633 gen_rtx_EXPR_LIST (VOIDmode
,
20634 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
20636 gen_rtx_EXPR_LIST (VOIDmode
,
20637 gen_rtx_REG (SImode
,
20638 GP_ARG_RETURN
+ 1),
20640 gen_rtx_EXPR_LIST (VOIDmode
,
20641 gen_rtx_REG (SImode
,
20642 GP_ARG_RETURN
+ 2),
20644 gen_rtx_EXPR_LIST (VOIDmode
,
20645 gen_rtx_REG (SImode
,
20646 GP_ARG_RETURN
+ 3),
20650 mode
= TYPE_MODE (valtype
);
20651 if ((INTEGRAL_TYPE_P (valtype
) && GET_MODE_BITSIZE (mode
) < BITS_PER_WORD
)
20652 || POINTER_TYPE_P (valtype
))
20653 mode
= TARGET_32BIT
? SImode
: DImode
;
20655 if (DECIMAL_FLOAT_MODE_P (mode
))
20657 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20662 gcc_unreachable ();
20664 regno
= GP_ARG_RETURN
;
20667 regno
= FP_ARG_RETURN
;
20670 /* Use f2:f3 specified by the ABI. */
20671 regno
= FP_ARG_RETURN
+ 1;
20676 regno
= GP_ARG_RETURN
;
20678 else if (SCALAR_FLOAT_TYPE_P (valtype
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20679 regno
= FP_ARG_RETURN
;
20680 else if (TREE_CODE (valtype
) == COMPLEX_TYPE
20681 && targetm
.calls
.split_complex_arg
)
20682 return rs6000_complex_function_value (mode
);
20683 else if (TREE_CODE (valtype
) == VECTOR_TYPE
20684 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
20685 && ALTIVEC_VECTOR_MODE (mode
))
20686 regno
= ALTIVEC_ARG_RETURN
;
20687 else if (TARGET_E500_DOUBLE
&& TARGET_HARD_FLOAT
20688 && (mode
== DFmode
|| mode
== DCmode
20689 || mode
== TFmode
|| mode
== TCmode
))
20690 return spe_build_register_parallel (mode
, GP_ARG_RETURN
);
20692 regno
= GP_ARG_RETURN
;
20694 return gen_rtx_REG (mode
, regno
);
20697 /* Define how to find the value returned by a library function
20698 assuming the value has mode MODE. */
20700 rs6000_libcall_value (enum machine_mode mode
)
20702 unsigned int regno
;
20704 if (TARGET_32BIT
&& TARGET_POWERPC64
&& mode
== DImode
)
20706 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20707 return gen_rtx_PARALLEL (DImode
,
20709 gen_rtx_EXPR_LIST (VOIDmode
,
20710 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
20712 gen_rtx_EXPR_LIST (VOIDmode
,
20713 gen_rtx_REG (SImode
,
20714 GP_ARG_RETURN
+ 1),
20718 if (DECIMAL_FLOAT_MODE_P (mode
))
20720 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20725 gcc_unreachable ();
20727 regno
= GP_ARG_RETURN
;
20730 regno
= FP_ARG_RETURN
;
20733 /* Use f2:f3 specified by the ABI. */
20734 regno
= FP_ARG_RETURN
+ 1;
20739 regno
= GP_ARG_RETURN
;
20741 else if (SCALAR_FLOAT_MODE_P (mode
)
20742 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20743 regno
= FP_ARG_RETURN
;
20744 else if (ALTIVEC_VECTOR_MODE (mode
)
20745 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
)
20746 regno
= ALTIVEC_ARG_RETURN
;
20747 else if (COMPLEX_MODE_P (mode
) && targetm
.calls
.split_complex_arg
)
20748 return rs6000_complex_function_value (mode
);
20749 else if (TARGET_E500_DOUBLE
&& TARGET_HARD_FLOAT
20750 && (mode
== DFmode
|| mode
== DCmode
20751 || mode
== TFmode
|| mode
== TCmode
))
20752 return spe_build_register_parallel (mode
, GP_ARG_RETURN
);
20754 regno
= GP_ARG_RETURN
;
20756 return gen_rtx_REG (mode
, regno
);
20759 /* Define the offset between two registers, FROM to be eliminated and its
20760 replacement TO, at the start of a routine. */
20762 rs6000_initial_elimination_offset (int from
, int to
)
20764 rs6000_stack_t
*info
= rs6000_stack_info ();
20765 HOST_WIDE_INT offset
;
20767 if (from
== HARD_FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
20768 offset
= info
->push_p
? 0 : -info
->total_size
;
20769 else if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
20771 offset
= info
->push_p
? 0 : -info
->total_size
;
20772 if (FRAME_GROWS_DOWNWARD
)
20773 offset
+= info
->fixed_size
+ info
->vars_size
+ info
->parm_size
;
20775 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
20776 offset
= FRAME_GROWS_DOWNWARD
20777 ? info
->fixed_size
+ info
->vars_size
+ info
->parm_size
20779 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
20780 offset
= info
->total_size
;
20781 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
20782 offset
= info
->push_p
? info
->total_size
: 0;
20783 else if (from
== RS6000_PIC_OFFSET_TABLE_REGNUM
)
20786 gcc_unreachable ();
20791 /* Return true if TYPE is a SPE or AltiVec opaque type. */
20794 rs6000_is_opaque_type (tree type
)
20796 return (type
== opaque_V2SI_type_node
20797 || type
== opaque_V2SF_type_node
20798 || type
== opaque_p_V2SI_type_node
20799 || type
== opaque_V4SI_type_node
);
20803 rs6000_dwarf_register_span (rtx reg
)
20808 && (SPE_VECTOR_MODE (GET_MODE (reg
))
20809 || (TARGET_E500_DOUBLE
&& GET_MODE (reg
) == DFmode
)))
20814 regno
= REGNO (reg
);
20816 /* The duality of the SPE register size wreaks all kinds of havoc.
20817 This is a way of distinguishing r0 in 32-bits from r0 in
20820 gen_rtx_PARALLEL (VOIDmode
,
20823 gen_rtx_REG (SImode
, regno
+ 1200),
20824 gen_rtx_REG (SImode
, regno
))
20826 gen_rtx_REG (SImode
, regno
),
20827 gen_rtx_REG (SImode
, regno
+ 1200)));
20830 /* Fill in sizes for SPE register high parts in table used by unwinder. */
20833 rs6000_init_dwarf_reg_sizes_extra (tree address
)
20838 enum machine_mode mode
= TYPE_MODE (char_type_node
);
20839 rtx addr
= expand_expr (address
, NULL_RTX
, VOIDmode
, 0);
20840 rtx mem
= gen_rtx_MEM (BLKmode
, addr
);
20841 rtx value
= gen_int_mode (4, mode
);
20843 for (i
= 1201; i
< 1232; i
++)
20845 int column
= DWARF_REG_TO_UNWIND_COLUMN (i
);
20846 HOST_WIDE_INT offset
20847 = DWARF_FRAME_REGNUM (column
) * GET_MODE_SIZE (mode
);
20849 emit_move_insn (adjust_address (mem
, mode
, offset
), value
);
20854 /* Map internal gcc register numbers to DWARF2 register numbers. */
20857 rs6000_dbx_register_number (unsigned int regno
)
20859 if (regno
<= 63 || write_symbols
!= DWARF2_DEBUG
)
20861 if (regno
== MQ_REGNO
)
20863 if (regno
== LINK_REGISTER_REGNUM
)
20865 if (regno
== COUNT_REGISTER_REGNUM
)
20867 if (CR_REGNO_P (regno
))
20868 return regno
- CR0_REGNO
+ 86;
20869 if (regno
== XER_REGNO
)
20871 if (ALTIVEC_REGNO_P (regno
))
20872 return regno
- FIRST_ALTIVEC_REGNO
+ 1124;
20873 if (regno
== VRSAVE_REGNO
)
20875 if (regno
== VSCR_REGNO
)
20877 if (regno
== SPE_ACC_REGNO
)
20879 if (regno
== SPEFSCR_REGNO
)
20881 /* SPE high reg number. We get these values of regno from
20882 rs6000_dwarf_register_span. */
20883 gcc_assert (regno
>= 1200 && regno
< 1232);
20887 /* target hook eh_return_filter_mode */
20888 static enum machine_mode
20889 rs6000_eh_return_filter_mode (void)
20891 return TARGET_32BIT
? SImode
: word_mode
;
20894 /* Target hook for scalar_mode_supported_p. */
20896 rs6000_scalar_mode_supported_p (enum machine_mode mode
)
20898 if (DECIMAL_FLOAT_MODE_P (mode
))
20901 return default_scalar_mode_supported_p (mode
);
20904 /* Target hook for vector_mode_supported_p. */
20906 rs6000_vector_mode_supported_p (enum machine_mode mode
)
20909 if (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
20912 else if (TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
20919 /* Target hook for invalid_arg_for_unprototyped_fn. */
20920 static const char *
20921 invalid_arg_for_unprototyped_fn (tree typelist
, tree funcdecl
, tree val
)
20923 return (!rs6000_darwin64_abi
20925 && TREE_CODE (TREE_TYPE (val
)) == VECTOR_TYPE
20926 && (funcdecl
== NULL_TREE
20927 || (TREE_CODE (funcdecl
) == FUNCTION_DECL
20928 && DECL_BUILT_IN_CLASS (funcdecl
) != BUILT_IN_MD
)))
20929 ? N_("AltiVec argument passed to unprototyped function")
20933 /* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
20934 setup by using __stack_chk_fail_local hidden function instead of
20935 calling __stack_chk_fail directly. Otherwise it is better to call
20936 __stack_chk_fail directly. */
20939 rs6000_stack_protect_fail (void)
20941 return (DEFAULT_ABI
== ABI_V4
&& TARGET_SECURE_PLT
&& flag_pic
)
20942 ? default_hidden_stack_protect_fail ()
20943 : default_external_stack_protect_fail ();
20946 #include "gt-rs6000.h"