1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60 #ifndef TARGET_NO_PROTOTYPE
61 #define TARGET_NO_PROTOTYPE 0
64 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
65 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack
{
73 int first_gp_reg_save
; /* first callee saved GP register used */
74 int first_fp_reg_save
; /* first callee saved FP register used */
75 int first_altivec_reg_save
; /* first callee saved AltiVec register used */
76 int lr_save_p
; /* true if the link reg needs to be saved */
77 int cr_save_p
; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask
; /* mask of vec registers to save */
79 int toc_save_p
; /* true if the TOC needs to be saved */
80 int push_p
; /* true if we need to allocate stack space */
81 int calls_p
; /* true if the function makes any calls */
82 enum rs6000_abi abi
; /* which ABI to use */
83 int gp_save_offset
; /* offset to save GP regs from initial SP */
84 int fp_save_offset
; /* offset to save FP regs from initial SP */
85 int altivec_save_offset
; /* offset to save AltiVec regs from initial SP */
86 int lr_save_offset
; /* offset to save LR from initial SP */
87 int cr_save_offset
; /* offset to save CR from initial SP */
88 int vrsave_save_offset
; /* offset to save VRSAVE from initial SP */
89 int spe_gp_save_offset
; /* offset to save spe 64-bit gprs */
90 int toc_save_offset
; /* offset to save the TOC pointer */
91 int varargs_save_offset
; /* offset to save the varargs registers */
92 int ehrd_offset
; /* offset to EH return data */
93 int reg_size
; /* register size (4 or 8) */
94 int varargs_size
; /* size to hold V.4 args passed in regs */
95 HOST_WIDE_INT vars_size
; /* variable save area size */
96 int parm_size
; /* outgoing parameter size */
97 int save_size
; /* save area size */
98 int fixed_size
; /* fixed size of stack frame */
99 int gp_size
; /* size of saved GP registers */
100 int fp_size
; /* size of saved FP registers */
101 int altivec_size
; /* size of saved AltiVec registers */
102 int cr_size
; /* size to hold CR if not in save_size */
103 int lr_size
; /* size to hold LR if not in save_size */
104 int vrsave_size
; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size
; /* size of altivec alignment padding if
107 int spe_gp_size
; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size
;
109 int toc_size
; /* size to hold TOC if not in save_size */
110 HOST_WIDE_INT total_size
; /* total bytes allocated for stack */
111 int spe_64bit_regs_used
;
114 /* Target cpu type */
116 enum processor_type rs6000_cpu
;
117 struct rs6000_cpu_select rs6000_select
[3] =
119 /* switch name, tune arch */
120 { (const char *)0, "--with-cpu=", 1, 1 },
121 { (const char *)0, "-mcpu=", 1, 1 },
122 { (const char *)0, "-mtune=", 1, 0 },
125 /* Always emit branch hint bits. */
126 static GTY(()) bool rs6000_always_hint
;
128 /* Schedule instructions for group formation. */
129 static GTY(()) bool rs6000_sched_groups
;
131 /* Support adjust_priority scheduler hook
132 and -mprioritize-restricted-insns= option. */
133 const char *rs6000_sched_restricted_insns_priority_str
;
134 int rs6000_sched_restricted_insns_priority
;
136 /* Support for -msched-costly-dep option. */
137 const char *rs6000_sched_costly_dep_str
;
138 enum rs6000_dependence_cost rs6000_sched_costly_dep
;
140 /* Support for -minsert-sched-nops option. */
141 const char *rs6000_sched_insert_nops_str
;
142 enum rs6000_nop_insertion rs6000_sched_insert_nops
;
144 /* Size of long double */
145 const char *rs6000_long_double_size_string
;
146 int rs6000_long_double_type_size
;
148 /* Whether -mabi=altivec has appeared */
149 int rs6000_altivec_abi
;
151 /* Whether VRSAVE instructions should be generated. */
152 int rs6000_altivec_vrsave
;
154 /* String from -mvrsave= option. */
155 const char *rs6000_altivec_vrsave_string
;
157 /* Nonzero if we want SPE ABI extensions. */
160 /* Whether isel instructions should be generated. */
163 /* Whether SPE simd instructions should be generated. */
166 /* Nonzero if floating point operations are done in the GPRs. */
167 int rs6000_float_gprs
= 0;
169 /* String from -mfloat-gprs=. */
170 const char *rs6000_float_gprs_string
;
172 /* String from -misel=. */
173 const char *rs6000_isel_string
;
175 /* String from -mspe=. */
176 const char *rs6000_spe_string
;
178 /* Set to nonzero once AIX common-mode calls have been defined. */
179 static GTY(()) int common_mode_defined
;
181 /* Save information from a "cmpxx" operation until the branch or scc is
183 rtx rs6000_compare_op0
, rs6000_compare_op1
;
184 int rs6000_compare_fp_p
;
186 /* Label number of label created for -mrelocatable, to call to so we can
187 get the address of the GOT section */
188 int rs6000_pic_labelno
;
191 /* Which abi to adhere to */
192 const char *rs6000_abi_name
;
194 /* Semantics of the small data area */
195 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
197 /* Which small data model to use */
198 const char *rs6000_sdata_name
= (char *)0;
200 /* Counter for labels which are to be placed in .fixup. */
201 int fixuplabelno
= 0;
204 /* Bit size of immediate TLS offsets and string from which it is decoded. */
205 int rs6000_tls_size
= 32;
206 const char *rs6000_tls_size_string
;
208 /* ABI enumeration available for subtarget to use. */
209 enum rs6000_abi rs6000_current_abi
;
211 /* ABI string from -mabi= option. */
212 const char *rs6000_abi_string
;
215 const char *rs6000_debug_name
;
216 int rs6000_debug_stack
; /* debug stack applications */
217 int rs6000_debug_arg
; /* debug argument handling */
219 /* Value is TRUE if register/mode pair is accepatable. */
220 bool rs6000_hard_regno_mode_ok_p
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
223 static GTY(()) tree opaque_V2SI_type_node
;
224 static GTY(()) tree opaque_V2SF_type_node
;
225 static GTY(()) tree opaque_p_V2SI_type_node
;
226 static GTY(()) tree V16QI_type_node
;
227 static GTY(()) tree V2SI_type_node
;
228 static GTY(()) tree V2SF_type_node
;
229 static GTY(()) tree V4HI_type_node
;
230 static GTY(()) tree V4SI_type_node
;
231 static GTY(()) tree V4SF_type_node
;
232 static GTY(()) tree V8HI_type_node
;
233 static GTY(()) tree unsigned_V16QI_type_node
;
234 static GTY(()) tree unsigned_V8HI_type_node
;
235 static GTY(()) tree unsigned_V4SI_type_node
;
236 static GTY(()) tree bool_char_type_node
; /* __bool char */
237 static GTY(()) tree bool_short_type_node
; /* __bool short */
238 static GTY(()) tree bool_int_type_node
; /* __bool int */
239 static GTY(()) tree pixel_type_node
; /* __pixel */
240 static GTY(()) tree bool_V16QI_type_node
; /* __vector __bool char */
241 static GTY(()) tree bool_V8HI_type_node
; /* __vector __bool short */
242 static GTY(()) tree bool_V4SI_type_node
; /* __vector __bool int */
243 static GTY(()) tree pixel_V8HI_type_node
; /* __vector __pixel */
245 int rs6000_warn_altivec_long
= 1; /* On by default. */
246 const char *rs6000_warn_altivec_long_switch
;
248 const char *rs6000_traceback_name
;
250 traceback_default
= 0,
256 /* Flag to say the TOC is initialized */
258 char toc_label_name
[10];
260 /* Alias set for saves and restores from the rs6000 stack. */
261 static GTY(()) int rs6000_sr_alias_set
;
263 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
264 The only place that looks at this is rs6000_set_default_type_attributes;
265 everywhere else should rely on the presence or absence of a longcall
266 attribute on the function declaration. */
267 int rs6000_default_long_calls
;
268 const char *rs6000_longcall_switch
;
270 /* Control alignment for fields within structures. */
271 /* String from -malign-XXXXX. */
272 const char *rs6000_alignment_string
;
273 int rs6000_alignment_flags
;
275 struct builtin_description
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
281 const enum insn_code icode
;
282 const char *const name
;
283 const enum rs6000_builtins code
;
286 /* Target cpu costs. */
288 struct processor_costs
{
289 const int mulsi
; /* cost of SImode multiplication. */
290 const int mulsi_const
; /* cost of SImode multiplication by constant. */
291 const int mulsi_const9
; /* cost of SImode mult by short constant. */
292 const int muldi
; /* cost of DImode multiplication. */
293 const int divsi
; /* cost of SImode division. */
294 const int divdi
; /* cost of DImode division. */
295 const int fp
; /* cost of simple SFmode and DFmode insns. */
296 const int dmul
; /* cost of DFmode multiplication (and fmadd). */
297 const int sdiv
; /* cost of SFmode division (fdivs). */
298 const int ddiv
; /* cost of DFmode division (fdiv). */
301 const struct processor_costs
*rs6000_cost
;
303 /* Processor costs (relative to an add) */
305 /* Instruction size costs on 32bit processors. */
307 struct processor_costs size32_cost
= {
308 COSTS_N_INSNS (1), /* mulsi */
309 COSTS_N_INSNS (1), /* mulsi_const */
310 COSTS_N_INSNS (1), /* mulsi_const9 */
311 COSTS_N_INSNS (1), /* muldi */
312 COSTS_N_INSNS (1), /* divsi */
313 COSTS_N_INSNS (1), /* divdi */
314 COSTS_N_INSNS (1), /* fp */
315 COSTS_N_INSNS (1), /* dmul */
316 COSTS_N_INSNS (1), /* sdiv */
317 COSTS_N_INSNS (1), /* ddiv */
320 /* Instruction size costs on 64bit processors. */
322 struct processor_costs size64_cost
= {
323 COSTS_N_INSNS (1), /* mulsi */
324 COSTS_N_INSNS (1), /* mulsi_const */
325 COSTS_N_INSNS (1), /* mulsi_const9 */
326 COSTS_N_INSNS (1), /* muldi */
327 COSTS_N_INSNS (1), /* divsi */
328 COSTS_N_INSNS (1), /* divdi */
329 COSTS_N_INSNS (1), /* fp */
330 COSTS_N_INSNS (1), /* dmul */
331 COSTS_N_INSNS (1), /* sdiv */
332 COSTS_N_INSNS (1), /* ddiv */
335 /* Instruction costs on RIOS1 processors. */
337 struct processor_costs rios1_cost
= {
338 COSTS_N_INSNS (5), /* mulsi */
339 COSTS_N_INSNS (4), /* mulsi_const */
340 COSTS_N_INSNS (3), /* mulsi_const9 */
341 COSTS_N_INSNS (5), /* muldi */
342 COSTS_N_INSNS (19), /* divsi */
343 COSTS_N_INSNS (19), /* divdi */
344 COSTS_N_INSNS (2), /* fp */
345 COSTS_N_INSNS (2), /* dmul */
346 COSTS_N_INSNS (19), /* sdiv */
347 COSTS_N_INSNS (19), /* ddiv */
350 /* Instruction costs on RIOS2 processors. */
352 struct processor_costs rios2_cost
= {
353 COSTS_N_INSNS (2), /* mulsi */
354 COSTS_N_INSNS (2), /* mulsi_const */
355 COSTS_N_INSNS (2), /* mulsi_const9 */
356 COSTS_N_INSNS (2), /* muldi */
357 COSTS_N_INSNS (13), /* divsi */
358 COSTS_N_INSNS (13), /* divdi */
359 COSTS_N_INSNS (2), /* fp */
360 COSTS_N_INSNS (2), /* dmul */
361 COSTS_N_INSNS (17), /* sdiv */
362 COSTS_N_INSNS (17), /* ddiv */
365 /* Instruction costs on RS64A processors. */
367 struct processor_costs rs64a_cost
= {
368 COSTS_N_INSNS (20), /* mulsi */
369 COSTS_N_INSNS (12), /* mulsi_const */
370 COSTS_N_INSNS (8), /* mulsi_const9 */
371 COSTS_N_INSNS (34), /* muldi */
372 COSTS_N_INSNS (65), /* divsi */
373 COSTS_N_INSNS (67), /* divdi */
374 COSTS_N_INSNS (4), /* fp */
375 COSTS_N_INSNS (4), /* dmul */
376 COSTS_N_INSNS (31), /* sdiv */
377 COSTS_N_INSNS (31), /* ddiv */
380 /* Instruction costs on MPCCORE processors. */
382 struct processor_costs mpccore_cost
= {
383 COSTS_N_INSNS (2), /* mulsi */
384 COSTS_N_INSNS (2), /* mulsi_const */
385 COSTS_N_INSNS (2), /* mulsi_const9 */
386 COSTS_N_INSNS (2), /* muldi */
387 COSTS_N_INSNS (6), /* divsi */
388 COSTS_N_INSNS (6), /* divdi */
389 COSTS_N_INSNS (4), /* fp */
390 COSTS_N_INSNS (5), /* dmul */
391 COSTS_N_INSNS (10), /* sdiv */
392 COSTS_N_INSNS (17), /* ddiv */
395 /* Instruction costs on PPC403 processors. */
397 struct processor_costs ppc403_cost
= {
398 COSTS_N_INSNS (4), /* mulsi */
399 COSTS_N_INSNS (4), /* mulsi_const */
400 COSTS_N_INSNS (4), /* mulsi_const9 */
401 COSTS_N_INSNS (4), /* muldi */
402 COSTS_N_INSNS (33), /* divsi */
403 COSTS_N_INSNS (33), /* divdi */
404 COSTS_N_INSNS (11), /* fp */
405 COSTS_N_INSNS (11), /* dmul */
406 COSTS_N_INSNS (11), /* sdiv */
407 COSTS_N_INSNS (11), /* ddiv */
410 /* Instruction costs on PPC405 processors. */
412 struct processor_costs ppc405_cost
= {
413 COSTS_N_INSNS (5), /* mulsi */
414 COSTS_N_INSNS (4), /* mulsi_const */
415 COSTS_N_INSNS (3), /* mulsi_const9 */
416 COSTS_N_INSNS (5), /* muldi */
417 COSTS_N_INSNS (35), /* divsi */
418 COSTS_N_INSNS (35), /* divdi */
419 COSTS_N_INSNS (11), /* fp */
420 COSTS_N_INSNS (11), /* dmul */
421 COSTS_N_INSNS (11), /* sdiv */
422 COSTS_N_INSNS (11), /* ddiv */
425 /* Instruction costs on PPC440 processors. */
427 struct processor_costs ppc440_cost
= {
428 COSTS_N_INSNS (3), /* mulsi */
429 COSTS_N_INSNS (2), /* mulsi_const */
430 COSTS_N_INSNS (2), /* mulsi_const9 */
431 COSTS_N_INSNS (3), /* muldi */
432 COSTS_N_INSNS (34), /* divsi */
433 COSTS_N_INSNS (34), /* divdi */
434 COSTS_N_INSNS (5), /* fp */
435 COSTS_N_INSNS (5), /* dmul */
436 COSTS_N_INSNS (19), /* sdiv */
437 COSTS_N_INSNS (33), /* ddiv */
440 /* Instruction costs on PPC601 processors. */
442 struct processor_costs ppc601_cost
= {
443 COSTS_N_INSNS (5), /* mulsi */
444 COSTS_N_INSNS (5), /* mulsi_const */
445 COSTS_N_INSNS (5), /* mulsi_const9 */
446 COSTS_N_INSNS (5), /* muldi */
447 COSTS_N_INSNS (36), /* divsi */
448 COSTS_N_INSNS (36), /* divdi */
449 COSTS_N_INSNS (4), /* fp */
450 COSTS_N_INSNS (5), /* dmul */
451 COSTS_N_INSNS (17), /* sdiv */
452 COSTS_N_INSNS (31), /* ddiv */
455 /* Instruction costs on PPC603 processors. */
457 struct processor_costs ppc603_cost
= {
458 COSTS_N_INSNS (5), /* mulsi */
459 COSTS_N_INSNS (3), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (5), /* muldi */
462 COSTS_N_INSNS (37), /* divsi */
463 COSTS_N_INSNS (37), /* divdi */
464 COSTS_N_INSNS (3), /* fp */
465 COSTS_N_INSNS (4), /* dmul */
466 COSTS_N_INSNS (18), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
470 /* Instruction costs on PPC604 processors. */
472 struct processor_costs ppc604_cost
= {
473 COSTS_N_INSNS (4), /* mulsi */
474 COSTS_N_INSNS (4), /* mulsi_const */
475 COSTS_N_INSNS (4), /* mulsi_const9 */
476 COSTS_N_INSNS (4), /* muldi */
477 COSTS_N_INSNS (20), /* divsi */
478 COSTS_N_INSNS (20), /* divdi */
479 COSTS_N_INSNS (3), /* fp */
480 COSTS_N_INSNS (3), /* dmul */
481 COSTS_N_INSNS (18), /* sdiv */
482 COSTS_N_INSNS (32), /* ddiv */
485 /* Instruction costs on PPC604e processors. */
487 struct processor_costs ppc604e_cost
= {
488 COSTS_N_INSNS (2), /* mulsi */
489 COSTS_N_INSNS (2), /* mulsi_const */
490 COSTS_N_INSNS (2), /* mulsi_const9 */
491 COSTS_N_INSNS (2), /* muldi */
492 COSTS_N_INSNS (20), /* divsi */
493 COSTS_N_INSNS (20), /* divdi */
494 COSTS_N_INSNS (3), /* fp */
495 COSTS_N_INSNS (3), /* dmul */
496 COSTS_N_INSNS (18), /* sdiv */
497 COSTS_N_INSNS (32), /* ddiv */
500 /* Instruction costs on PPC620 processors. */
502 struct processor_costs ppc620_cost
= {
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (4), /* mulsi_const */
505 COSTS_N_INSNS (3), /* mulsi_const9 */
506 COSTS_N_INSNS (7), /* muldi */
507 COSTS_N_INSNS (21), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (3), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (32), /* ddiv */
515 /* Instruction costs on PPC630 processors. */
517 struct processor_costs ppc630_cost
= {
518 COSTS_N_INSNS (5), /* mulsi */
519 COSTS_N_INSNS (4), /* mulsi_const */
520 COSTS_N_INSNS (3), /* mulsi_const9 */
521 COSTS_N_INSNS (7), /* muldi */
522 COSTS_N_INSNS (21), /* divsi */
523 COSTS_N_INSNS (37), /* divdi */
524 COSTS_N_INSNS (3), /* fp */
525 COSTS_N_INSNS (3), /* dmul */
526 COSTS_N_INSNS (17), /* sdiv */
527 COSTS_N_INSNS (21), /* ddiv */
530 /* Instruction costs on PPC750 and PPC7400 processors. */
532 struct processor_costs ppc750_cost
= {
533 COSTS_N_INSNS (5), /* mulsi */
534 COSTS_N_INSNS (3), /* mulsi_const */
535 COSTS_N_INSNS (2), /* mulsi_const9 */
536 COSTS_N_INSNS (5), /* muldi */
537 COSTS_N_INSNS (17), /* divsi */
538 COSTS_N_INSNS (17), /* divdi */
539 COSTS_N_INSNS (3), /* fp */
540 COSTS_N_INSNS (3), /* dmul */
541 COSTS_N_INSNS (17), /* sdiv */
542 COSTS_N_INSNS (31), /* ddiv */
545 /* Instruction costs on PPC7450 processors. */
547 struct processor_costs ppc7450_cost
= {
548 COSTS_N_INSNS (4), /* mulsi */
549 COSTS_N_INSNS (3), /* mulsi_const */
550 COSTS_N_INSNS (3), /* mulsi_const9 */
551 COSTS_N_INSNS (4), /* muldi */
552 COSTS_N_INSNS (23), /* divsi */
553 COSTS_N_INSNS (23), /* divdi */
554 COSTS_N_INSNS (5), /* fp */
555 COSTS_N_INSNS (5), /* dmul */
556 COSTS_N_INSNS (21), /* sdiv */
557 COSTS_N_INSNS (35), /* ddiv */
560 /* Instruction costs on PPC8540 processors. */
562 struct processor_costs ppc8540_cost
= {
563 COSTS_N_INSNS (4), /* mulsi */
564 COSTS_N_INSNS (4), /* mulsi_const */
565 COSTS_N_INSNS (4), /* mulsi_const9 */
566 COSTS_N_INSNS (4), /* muldi */
567 COSTS_N_INSNS (19), /* divsi */
568 COSTS_N_INSNS (19), /* divdi */
569 COSTS_N_INSNS (4), /* fp */
570 COSTS_N_INSNS (4), /* dmul */
571 COSTS_N_INSNS (29), /* sdiv */
572 COSTS_N_INSNS (29), /* ddiv */
575 /* Instruction costs on POWER4 and POWER5 processors. */
577 struct processor_costs power4_cost
= {
578 COSTS_N_INSNS (3), /* mulsi */
579 COSTS_N_INSNS (2), /* mulsi_const */
580 COSTS_N_INSNS (2), /* mulsi_const9 */
581 COSTS_N_INSNS (4), /* muldi */
582 COSTS_N_INSNS (18), /* divsi */
583 COSTS_N_INSNS (34), /* divdi */
584 COSTS_N_INSNS (3), /* fp */
585 COSTS_N_INSNS (3), /* dmul */
586 COSTS_N_INSNS (17), /* sdiv */
587 COSTS_N_INSNS (17), /* ddiv */
591 static bool rs6000_function_ok_for_sibcall (tree
, tree
);
592 static int num_insns_constant_wide (HOST_WIDE_INT
);
593 static void validate_condition_mode (enum rtx_code
, enum machine_mode
);
594 static rtx
rs6000_generate_compare (enum rtx_code
);
595 static void rs6000_maybe_dead (rtx
);
596 static void rs6000_emit_stack_tie (void);
597 static void rs6000_frame_related (rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
);
598 static rtx
spe_synthesize_frame_save (rtx
);
599 static bool spe_func_has_64bit_regs_p (void);
600 static void emit_frame_save (rtx
, rtx
, enum machine_mode
, unsigned int,
602 static rtx
gen_frame_mem_offset (enum machine_mode
, rtx
, int);
603 static void rs6000_emit_allocate_stack (HOST_WIDE_INT
, int);
604 static unsigned rs6000_hash_constant (rtx
);
605 static unsigned toc_hash_function (const void *);
606 static int toc_hash_eq (const void *, const void *);
607 static int constant_pool_expr_1 (rtx
, int *, int *);
608 static bool constant_pool_expr_p (rtx
);
609 static bool toc_relative_expr_p (rtx
);
610 static bool legitimate_small_data_p (enum machine_mode
, rtx
);
611 static bool legitimate_indexed_address_p (rtx
, int);
612 static bool legitimate_indirect_address_p (rtx
, int);
613 static bool macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
);
614 static bool legitimate_lo_sum_address_p (enum machine_mode
, rtx
, int);
615 static struct machine_function
* rs6000_init_machine_status (void);
616 static bool rs6000_assemble_integer (rtx
, unsigned int, int);
617 #ifdef HAVE_GAS_HIDDEN
618 static void rs6000_assemble_visibility (tree
, int);
620 static int rs6000_ra_ever_killed (void);
621 static tree
rs6000_handle_longcall_attribute (tree
*, tree
, tree
, int, bool *);
622 static tree
rs6000_handle_altivec_attribute (tree
*, tree
, tree
, int, bool *);
623 static void rs6000_eliminate_indexed_memrefs (rtx operands
[2]);
624 static const char *rs6000_mangle_fundamental_type (tree
);
625 extern const struct attribute_spec rs6000_attribute_table
[];
626 static void rs6000_set_default_type_attributes (tree
);
627 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT
);
628 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT
);
629 static void rs6000_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
631 static rtx
rs6000_emit_set_long_const (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
);
632 static bool rs6000_return_in_memory (tree
, tree
);
633 static void rs6000_file_start (void);
635 static unsigned int rs6000_elf_section_type_flags (tree
, const char *, int);
636 static void rs6000_elf_asm_out_constructor (rtx
, int);
637 static void rs6000_elf_asm_out_destructor (rtx
, int);
638 static void rs6000_elf_select_section (tree
, int, unsigned HOST_WIDE_INT
);
639 static void rs6000_elf_unique_section (tree
, int);
640 static void rs6000_elf_select_rtx_section (enum machine_mode
, rtx
,
641 unsigned HOST_WIDE_INT
);
642 static void rs6000_elf_encode_section_info (tree
, rtx
, int)
644 static bool rs6000_elf_in_small_data_p (tree
);
647 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
648 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
649 static void rs6000_xcoff_select_section (tree
, int, unsigned HOST_WIDE_INT
);
650 static void rs6000_xcoff_unique_section (tree
, int);
651 static void rs6000_xcoff_select_rtx_section (enum machine_mode
, rtx
,
652 unsigned HOST_WIDE_INT
);
653 static const char * rs6000_xcoff_strip_name_encoding (const char *);
654 static unsigned int rs6000_xcoff_section_type_flags (tree
, const char *, int);
655 static void rs6000_xcoff_file_start (void);
656 static void rs6000_xcoff_file_end (void);
659 static bool rs6000_binds_local_p (tree
);
661 static int rs6000_variable_issue (FILE *, int, rtx
, int);
662 static bool rs6000_rtx_costs (rtx
, int, int, int *);
663 static int rs6000_adjust_cost (rtx
, rtx
, rtx
, int);
664 static bool is_microcoded_insn (rtx
);
665 static int is_dispatch_slot_restricted (rtx
);
666 static bool is_cracked_insn (rtx
);
667 static bool is_branch_slot_insn (rtx
);
668 static int rs6000_adjust_priority (rtx
, int);
669 static int rs6000_issue_rate (void);
670 static bool rs6000_is_costly_dependence (rtx
, rtx
, rtx
, int, int);
671 static rtx
get_next_active_insn (rtx
, rtx
);
672 static bool insn_terminates_group_p (rtx
, enum group_termination
);
673 static bool is_costly_group (rtx
*, rtx
);
674 static int force_new_group (int, FILE *, rtx
*, rtx
, bool *, int, int *);
675 static int redefine_groups (FILE *, int, rtx
, rtx
);
676 static int pad_groups (FILE *, int, rtx
, rtx
);
677 static void rs6000_sched_finish (FILE *, int);
678 static int rs6000_use_sched_lookahead (void);
680 static void rs6000_init_builtins (void);
681 static rtx
rs6000_expand_unop_builtin (enum insn_code
, tree
, rtx
);
682 static rtx
rs6000_expand_binop_builtin (enum insn_code
, tree
, rtx
);
683 static rtx
rs6000_expand_ternop_builtin (enum insn_code
, tree
, rtx
);
684 static rtx
rs6000_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
685 static void altivec_init_builtins (void);
686 static void rs6000_common_init_builtins (void);
687 static void rs6000_init_libfuncs (void);
689 static void enable_mask_for_builtins (struct builtin_description
*, int,
690 enum rs6000_builtins
,
691 enum rs6000_builtins
);
692 static tree
build_opaque_vector_type (tree
, int);
693 static void spe_init_builtins (void);
694 static rtx
spe_expand_builtin (tree
, rtx
, bool *);
695 static rtx
spe_expand_stv_builtin (enum insn_code
, tree
);
696 static rtx
spe_expand_predicate_builtin (enum insn_code
, tree
, rtx
);
697 static rtx
spe_expand_evsel_builtin (enum insn_code
, tree
, rtx
);
698 static int rs6000_emit_int_cmove (rtx
, rtx
, rtx
, rtx
);
699 static rs6000_stack_t
*rs6000_stack_info (void);
700 static void debug_stack_info (rs6000_stack_t
*);
702 static rtx
altivec_expand_builtin (tree
, rtx
, bool *);
703 static rtx
altivec_expand_ld_builtin (tree
, rtx
, bool *);
704 static rtx
altivec_expand_st_builtin (tree
, rtx
, bool *);
705 static rtx
altivec_expand_dst_builtin (tree
, rtx
, bool *);
706 static rtx
altivec_expand_abs_builtin (enum insn_code
, tree
, rtx
);
707 static rtx
altivec_expand_predicate_builtin (enum insn_code
,
708 const char *, tree
, rtx
);
709 static rtx
altivec_expand_lv_builtin (enum insn_code
, tree
, rtx
);
710 static rtx
altivec_expand_stv_builtin (enum insn_code
, tree
);
711 static void rs6000_parse_abi_options (void);
712 static void rs6000_parse_alignment_option (void);
713 static void rs6000_parse_tls_size_option (void);
714 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
715 static int first_altivec_reg_to_save (void);
716 static unsigned int compute_vrsave_mask (void);
717 static void is_altivec_return_reg (rtx
, void *);
718 static rtx
generate_set_vrsave (rtx
, rs6000_stack_t
*, int);
719 int easy_vector_constant (rtx
, enum machine_mode
);
720 static int easy_vector_same (rtx
, enum machine_mode
);
721 static int easy_vector_splat_const (int, enum machine_mode
);
722 static bool is_ev64_opaque_type (tree
);
723 static rtx
rs6000_dwarf_register_span (rtx
);
724 static rtx
rs6000_legitimize_tls_address (rtx
, enum tls_model
);
725 static rtx
rs6000_tls_get_addr (void);
726 static rtx
rs6000_got_sym (void);
727 static inline int rs6000_tls_symbol_ref_1 (rtx
*, void *);
728 static const char *rs6000_get_some_local_dynamic_name (void);
729 static int rs6000_get_some_local_dynamic_name_1 (rtx
*, void *);
730 static rtx
rs6000_complex_function_value (enum machine_mode
);
731 static rtx
rs6000_spe_function_arg (CUMULATIVE_ARGS
*,
732 enum machine_mode
, tree
);
733 static rtx
rs6000_mixed_function_arg (enum machine_mode
, tree
, int);
734 static void rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
);
735 static void setup_incoming_varargs (CUMULATIVE_ARGS
*,
736 enum machine_mode
, tree
,
738 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode
,
741 static void macho_branch_islands (void);
742 static void add_compiler_branch_island (tree
, tree
, int);
743 static int no_previous_def (tree function_name
);
744 static tree
get_prev_label (tree function_name
);
747 static tree
rs6000_build_builtin_va_list (void);
748 static tree
rs6000_gimplify_va_arg (tree
, tree
, tree
*, tree
*);
749 static bool rs6000_must_pass_in_stack (enum machine_mode
, tree
);
751 /* Hash table stuff for keeping track of TOC entries. */
753 struct toc_hash_struct
GTY(())
755 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
756 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
758 enum machine_mode key_mode
;
762 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
764 /* Default register names. */
765 char rs6000_reg_names
[][8] =
767 "0", "1", "2", "3", "4", "5", "6", "7",
768 "8", "9", "10", "11", "12", "13", "14", "15",
769 "16", "17", "18", "19", "20", "21", "22", "23",
770 "24", "25", "26", "27", "28", "29", "30", "31",
771 "0", "1", "2", "3", "4", "5", "6", "7",
772 "8", "9", "10", "11", "12", "13", "14", "15",
773 "16", "17", "18", "19", "20", "21", "22", "23",
774 "24", "25", "26", "27", "28", "29", "30", "31",
775 "mq", "lr", "ctr","ap",
776 "0", "1", "2", "3", "4", "5", "6", "7",
778 /* AltiVec registers. */
779 "0", "1", "2", "3", "4", "5", "6", "7",
780 "8", "9", "10", "11", "12", "13", "14", "15",
781 "16", "17", "18", "19", "20", "21", "22", "23",
782 "24", "25", "26", "27", "28", "29", "30", "31",
788 #ifdef TARGET_REGNAMES
789 static const char alt_reg_names
[][8] =
791 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
792 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
793 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
794 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
795 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
796 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
797 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
798 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
799 "mq", "lr", "ctr", "ap",
800 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
802 /* AltiVec registers. */
803 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
804 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
805 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
806 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
813 #ifndef MASK_STRICT_ALIGN
814 #define MASK_STRICT_ALIGN 0
816 #ifndef TARGET_PROFILE_KERNEL
817 #define TARGET_PROFILE_KERNEL 0
820 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
821 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
823 /* Return 1 for a symbol ref for a thread-local storage symbol. */
824 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
825 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
827 /* Initialize the GCC target structure. */
828 #undef TARGET_ATTRIBUTE_TABLE
829 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
830 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
831 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
833 #undef TARGET_ASM_ALIGNED_DI_OP
834 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
836 /* Default unaligned ops are only provided for ELF. Find the ops needed
837 for non-ELF systems. */
838 #ifndef OBJECT_FORMAT_ELF
840 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
842 #undef TARGET_ASM_UNALIGNED_HI_OP
843 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
844 #undef TARGET_ASM_UNALIGNED_SI_OP
845 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
846 #undef TARGET_ASM_UNALIGNED_DI_OP
847 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
850 #undef TARGET_ASM_UNALIGNED_HI_OP
851 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
852 #undef TARGET_ASM_UNALIGNED_SI_OP
853 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
857 /* This hook deals with fixups for relocatable code and DI-mode objects
859 #undef TARGET_ASM_INTEGER
860 #define TARGET_ASM_INTEGER rs6000_assemble_integer
862 #ifdef HAVE_GAS_HIDDEN
863 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
864 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
867 #undef TARGET_HAVE_TLS
868 #define TARGET_HAVE_TLS HAVE_AS_TLS
870 #undef TARGET_CANNOT_FORCE_CONST_MEM
871 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
873 #undef TARGET_ASM_FUNCTION_PROLOGUE
874 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
875 #undef TARGET_ASM_FUNCTION_EPILOGUE
876 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
878 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
879 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
880 #undef TARGET_SCHED_VARIABLE_ISSUE
881 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
883 #undef TARGET_SCHED_ISSUE_RATE
884 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
885 #undef TARGET_SCHED_ADJUST_COST
886 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
887 #undef TARGET_SCHED_ADJUST_PRIORITY
888 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
889 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
890 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
891 #undef TARGET_SCHED_FINISH
892 #define TARGET_SCHED_FINISH rs6000_sched_finish
894 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
895 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
897 #undef TARGET_INIT_BUILTINS
898 #define TARGET_INIT_BUILTINS rs6000_init_builtins
900 #undef TARGET_EXPAND_BUILTIN
901 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
903 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
904 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
906 #undef TARGET_INIT_LIBFUNCS
907 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
910 #undef TARGET_BINDS_LOCAL_P
911 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
914 #undef TARGET_ASM_OUTPUT_MI_THUNK
915 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
917 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
918 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
920 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
921 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
923 #undef TARGET_RTX_COSTS
924 #define TARGET_RTX_COSTS rs6000_rtx_costs
925 #undef TARGET_ADDRESS_COST
926 #define TARGET_ADDRESS_COST hook_int_rtx_0
928 #undef TARGET_VECTOR_OPAQUE_P
929 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
931 #undef TARGET_DWARF_REGISTER_SPAN
932 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
934 /* On rs6000, function arguments are promoted, as are function return
936 #undef TARGET_PROMOTE_FUNCTION_ARGS
937 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
938 #undef TARGET_PROMOTE_FUNCTION_RETURN
939 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
941 #undef TARGET_RETURN_IN_MEMORY
942 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
944 #undef TARGET_SETUP_INCOMING_VARARGS
945 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
947 /* Always strict argument naming on rs6000. */
948 #undef TARGET_STRICT_ARGUMENT_NAMING
949 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
950 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
951 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
952 #undef TARGET_SPLIT_COMPLEX_ARG
953 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
954 #undef TARGET_MUST_PASS_IN_STACK
955 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
956 #undef TARGET_PASS_BY_REFERENCE
957 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
959 #undef TARGET_BUILD_BUILTIN_VA_LIST
960 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
962 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
963 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
965 struct gcc_target targetm
= TARGET_INITIALIZER
;
968 /* Value is 1 if hard register REGNO can hold a value of machine-mode
971 rs6000_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
973 /* The GPRs can hold any mode, but values bigger than one register
974 cannot go past R31. */
975 if (INT_REGNO_P (regno
))
976 return INT_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
978 /* The float registers can only hold floating modes and DImode. */
979 if (FP_REGNO_P (regno
))
981 (GET_MODE_CLASS (mode
) == MODE_FLOAT
982 && FP_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1))
983 || (GET_MODE_CLASS (mode
) == MODE_INT
984 && GET_MODE_SIZE (mode
) == UNITS_PER_FP_WORD
);
986 /* The CR register can only hold CC modes. */
987 if (CR_REGNO_P (regno
))
988 return GET_MODE_CLASS (mode
) == MODE_CC
;
990 if (XER_REGNO_P (regno
))
991 return mode
== PSImode
;
993 /* AltiVec only in AldyVec registers. */
994 if (ALTIVEC_REGNO_P (regno
))
995 return ALTIVEC_VECTOR_MODE (mode
);
997 /* ...but GPRs can hold SIMD data on the SPE in one register. */
998 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
1001 /* We cannot put TImode anywhere except general register and it must be
1002 able to fit within the register set. */
1004 return GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
;
1007 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1009 rs6000_init_hard_regno_mode_ok (void)
1013 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; ++r
)
1014 for (m
= 0; m
< NUM_MACHINE_MODES
; ++m
)
1015 if (rs6000_hard_regno_mode_ok (r
, m
))
1016 rs6000_hard_regno_mode_ok_p
[m
][r
] = true;
1019 /* Override command line options. Mostly we process the processor
1020 type and sometimes adjust other TARGET_ options. */
1023 rs6000_override_options (const char *default_cpu
)
1026 struct rs6000_cpu_select
*ptr
;
1029 /* Simplifications for entries below. */
1032 POWERPC_BASE_MASK
= MASK_POWERPC
| MASK_NEW_MNEMONICS
,
1033 POWERPC_7400_MASK
= POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_ALTIVEC
1036 /* This table occasionally claims that a processor does not support
1037 a particular feature even though it does, but the feature is slower
1038 than the alternative. Thus, it shouldn't be relied on as a
1039 complete description of the processor's support.
1041 Please keep this list in order, and don't forget to update the
1042 documentation in invoke.texi when adding a new processor or
1046 const char *const name
; /* Canonical processor name. */
1047 const enum processor_type processor
; /* Processor type enum value. */
1048 const int target_enable
; /* Target flags to enable. */
1049 } const processor_target_table
[]
1050 = {{"401", PROCESSOR_PPC403
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1051 {"403", PROCESSOR_PPC403
,
1052 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_STRICT_ALIGN
},
1053 {"405", PROCESSOR_PPC405
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1054 {"405fp", PROCESSOR_PPC405
, POWERPC_BASE_MASK
},
1055 {"440", PROCESSOR_PPC440
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1056 {"440fp", PROCESSOR_PPC440
, POWERPC_BASE_MASK
},
1057 {"505", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
},
1058 {"601", PROCESSOR_PPC601
,
1059 MASK_POWER
| POWERPC_BASE_MASK
| MASK_MULTIPLE
| MASK_STRING
},
1060 {"602", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1061 {"603", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1062 {"603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1063 {"604", PROCESSOR_PPC604
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1064 {"604e", PROCESSOR_PPC604e
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1065 {"620", PROCESSOR_PPC620
,
1066 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1067 {"630", PROCESSOR_PPC630
,
1068 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1069 {"740", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1070 {"7400", PROCESSOR_PPC7400
, POWERPC_7400_MASK
},
1071 {"7450", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1072 {"750", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1073 {"801", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1074 {"821", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1075 {"823", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1076 {"8540", PROCESSOR_PPC8540
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1077 {"860", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1078 {"970", PROCESSOR_POWER4
,
1079 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1080 {"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
},
1081 {"ec603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1082 {"G3", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1083 {"G4", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1084 {"G5", PROCESSOR_POWER4
,
1085 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1086 {"power", PROCESSOR_POWER
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1087 {"power2", PROCESSOR_POWER
,
1088 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1089 {"power3", PROCESSOR_PPC630
,
1090 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1091 {"power4", PROCESSOR_POWER4
,
1092 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1093 {"power5", PROCESSOR_POWER5
,
1094 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1095 {"powerpc", PROCESSOR_POWERPC
, POWERPC_BASE_MASK
},
1096 {"powerpc64", PROCESSOR_POWERPC64
,
1097 POWERPC_BASE_MASK
| MASK_POWERPC64
},
1098 {"rios", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1099 {"rios1", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1100 {"rios2", PROCESSOR_RIOS2
,
1101 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1102 {"rsc", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1103 {"rsc1", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1104 {"rs64a", PROCESSOR_RS64A
, POWERPC_BASE_MASK
| MASK_POWERPC64
},
1107 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
1109 /* Some OSs don't support saving the high part of 64-bit registers on
1110 context switch. Other OSs don't support saving Altivec registers.
1111 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1112 settings; if the user wants either, the user must explicitly specify
1113 them and we won't interfere with the user's specification. */
1116 POWER_MASKS
= MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
1117 POWERPC_MASKS
= (POWERPC_BASE_MASK
| MASK_PPC_GPOPT
1118 | MASK_PPC_GFXOPT
| MASK_POWERPC64
| MASK_ALTIVEC
1122 rs6000_init_hard_regno_mode_ok ();
1124 set_masks
= POWER_MASKS
| POWERPC_MASKS
| MASK_SOFT_FLOAT
;
1125 #ifdef OS_MISSING_POWERPC64
1126 if (OS_MISSING_POWERPC64
)
1127 set_masks
&= ~MASK_POWERPC64
;
1129 #ifdef OS_MISSING_ALTIVEC
1130 if (OS_MISSING_ALTIVEC
)
1131 set_masks
&= ~MASK_ALTIVEC
;
1134 /* Don't override these by the processor default if given explicitly. */
1135 set_masks
&= ~(target_flags_explicit
1136 & (MASK_MULTIPLE
| MASK_STRING
| MASK_SOFT_FLOAT
));
1138 /* Identify the processor type. */
1139 rs6000_select
[0].string
= default_cpu
;
1140 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
1142 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1144 ptr
= &rs6000_select
[i
];
1145 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1147 for (j
= 0; j
< ptt_size
; j
++)
1148 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
1150 if (ptr
->set_tune_p
)
1151 rs6000_cpu
= processor_target_table
[j
].processor
;
1153 if (ptr
->set_arch_p
)
1155 target_flags
&= ~set_masks
;
1156 target_flags
|= (processor_target_table
[j
].target_enable
1163 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
1170 /* If we are optimizing big endian systems for space, use the load/store
1171 multiple and string instructions. */
1172 if (BYTES_BIG_ENDIAN
&& optimize_size
)
1173 target_flags
|= ~target_flags_explicit
& (MASK_MULTIPLE
| MASK_STRING
);
1175 /* Don't allow -mmultiple or -mstring on little endian systems
1176 unless the cpu is a 750, because the hardware doesn't support the
1177 instructions used in little endian mode, and causes an alignment
1178 trap. The 750 does not cause an alignment trap (except when the
1179 target is unaligned). */
1181 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
1183 if (TARGET_MULTIPLE
)
1185 target_flags
&= ~MASK_MULTIPLE
;
1186 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
1187 warning ("-mmultiple is not supported on little endian systems");
1192 target_flags
&= ~MASK_STRING
;
1193 if ((target_flags_explicit
& MASK_STRING
) != 0)
1194 warning ("-mstring is not supported on little endian systems");
1198 /* Set debug flags */
1199 if (rs6000_debug_name
)
1201 if (! strcmp (rs6000_debug_name
, "all"))
1202 rs6000_debug_stack
= rs6000_debug_arg
= 1;
1203 else if (! strcmp (rs6000_debug_name
, "stack"))
1204 rs6000_debug_stack
= 1;
1205 else if (! strcmp (rs6000_debug_name
, "arg"))
1206 rs6000_debug_arg
= 1;
1208 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
1211 if (rs6000_traceback_name
)
1213 if (! strncmp (rs6000_traceback_name
, "full", 4))
1214 rs6000_traceback
= traceback_full
;
1215 else if (! strncmp (rs6000_traceback_name
, "part", 4))
1216 rs6000_traceback
= traceback_part
;
1217 else if (! strncmp (rs6000_traceback_name
, "no", 2))
1218 rs6000_traceback
= traceback_none
;
1220 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
1221 rs6000_traceback_name
);
1224 /* Set size of long double */
1225 rs6000_long_double_type_size
= 64;
1226 if (rs6000_long_double_size_string
)
1229 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
1230 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
1231 error ("Unknown switch -mlong-double-%s",
1232 rs6000_long_double_size_string
);
1234 rs6000_long_double_type_size
= size
;
1237 /* Set Altivec ABI as default for powerpc64 linux. */
1238 if (TARGET_ELF
&& TARGET_64BIT
)
1240 rs6000_altivec_abi
= 1;
1241 rs6000_altivec_vrsave
= 1;
1244 /* Handle -mabi= options. */
1245 rs6000_parse_abi_options ();
1247 /* Handle -malign-XXXXX option. */
1248 rs6000_parse_alignment_option ();
1250 /* Handle generic -mFOO=YES/NO options. */
1251 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string
,
1252 &rs6000_altivec_vrsave
);
1253 rs6000_parse_yes_no_option ("isel", rs6000_isel_string
,
1255 rs6000_parse_yes_no_option ("spe", rs6000_spe_string
, &rs6000_spe
);
1256 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string
,
1257 &rs6000_float_gprs
);
1259 /* Handle -mtls-size option. */
1260 rs6000_parse_tls_size_option ();
1262 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1263 SUBTARGET_OVERRIDE_OPTIONS
;
1265 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1266 SUBSUBTARGET_OVERRIDE_OPTIONS
;
1272 error ("AltiVec and E500 instructions cannot coexist");
1274 /* The e500 does not have string instructions, and we set
1275 MASK_STRING above when optimizing for size. */
1276 if ((target_flags
& MASK_STRING
) != 0)
1277 target_flags
= target_flags
& ~MASK_STRING
;
1279 /* No SPE means 64-bit long doubles, even if an E500. */
1280 if (rs6000_spe_string
!= 0
1281 && !strcmp (rs6000_spe_string
, "no"))
1282 rs6000_long_double_type_size
= 64;
1284 else if (rs6000_select
[1].string
!= NULL
)
1286 /* For the powerpc-eabispe configuration, we set all these by
1287 default, so let's unset them if we manually set another
1288 CPU that is not the E500. */
1289 if (rs6000_abi_string
== 0)
1291 if (rs6000_spe_string
== 0)
1293 if (rs6000_float_gprs_string
== 0)
1294 rs6000_float_gprs
= 0;
1295 if (rs6000_isel_string
== 0)
1297 if (rs6000_long_double_size_string
== 0)
1298 rs6000_long_double_type_size
= 64;
1301 rs6000_always_hint
= (rs6000_cpu
!= PROCESSOR_POWER4
1302 && rs6000_cpu
!= PROCESSOR_POWER5
);
1303 rs6000_sched_groups
= (rs6000_cpu
== PROCESSOR_POWER4
1304 || rs6000_cpu
== PROCESSOR_POWER5
);
1306 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1307 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1308 bits in target_flags so TARGET_SWITCHES cannot be used.
1309 Assumption here is that rs6000_longcall_switch points into the
1310 text of the complete option, rather than being a copy, so we can
1311 scan back for the presence or absence of the no- modifier. */
1312 if (rs6000_longcall_switch
)
1314 const char *base
= rs6000_longcall_switch
;
1315 while (base
[-1] != 'm') base
--;
1317 if (*rs6000_longcall_switch
!= '\0')
1318 error ("invalid option `%s'", base
);
1319 rs6000_default_long_calls
= (base
[0] != 'n');
1322 /* Handle -m(no-)warn-altivec-long similarly. */
1323 if (rs6000_warn_altivec_long_switch
)
1325 const char *base
= rs6000_warn_altivec_long_switch
;
1326 while (base
[-1] != 'm') base
--;
1328 if (*rs6000_warn_altivec_long_switch
!= '\0')
1329 error ("invalid option `%s'", base
);
1330 rs6000_warn_altivec_long
= (base
[0] != 'n');
1333 /* Handle -mprioritize-restricted-insns option. */
1334 rs6000_sched_restricted_insns_priority
1335 = (rs6000_sched_groups
? 1 : 0);
1336 if (rs6000_sched_restricted_insns_priority_str
)
1337 rs6000_sched_restricted_insns_priority
=
1338 atoi (rs6000_sched_restricted_insns_priority_str
);
1340 /* Handle -msched-costly-dep option. */
1341 rs6000_sched_costly_dep
1342 = (rs6000_sched_groups
? store_to_load_dep_costly
: no_dep_costly
);
1343 if (rs6000_sched_costly_dep_str
)
1345 if (! strcmp (rs6000_sched_costly_dep_str
, "no"))
1346 rs6000_sched_costly_dep
= no_dep_costly
;
1347 else if (! strcmp (rs6000_sched_costly_dep_str
, "all"))
1348 rs6000_sched_costly_dep
= all_deps_costly
;
1349 else if (! strcmp (rs6000_sched_costly_dep_str
, "true_store_to_load"))
1350 rs6000_sched_costly_dep
= true_store_to_load_dep_costly
;
1351 else if (! strcmp (rs6000_sched_costly_dep_str
, "store_to_load"))
1352 rs6000_sched_costly_dep
= store_to_load_dep_costly
;
1354 rs6000_sched_costly_dep
= atoi (rs6000_sched_costly_dep_str
);
1357 /* Handle -minsert-sched-nops option. */
1358 rs6000_sched_insert_nops
1359 = (rs6000_sched_groups
? sched_finish_regroup_exact
: sched_finish_none
);
1360 if (rs6000_sched_insert_nops_str
)
1362 if (! strcmp (rs6000_sched_insert_nops_str
, "no"))
1363 rs6000_sched_insert_nops
= sched_finish_none
;
1364 else if (! strcmp (rs6000_sched_insert_nops_str
, "pad"))
1365 rs6000_sched_insert_nops
= sched_finish_pad_groups
;
1366 else if (! strcmp (rs6000_sched_insert_nops_str
, "regroup_exact"))
1367 rs6000_sched_insert_nops
= sched_finish_regroup_exact
;
1369 rs6000_sched_insert_nops
= atoi (rs6000_sched_insert_nops_str
);
1372 #ifdef TARGET_REGNAMES
1373 /* If the user desires alternate register names, copy in the
1374 alternate names now. */
1375 if (TARGET_REGNAMES
)
1376 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
1379 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1380 If -maix-struct-return or -msvr4-struct-return was explicitly
1381 used, don't override with the ABI default. */
1382 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
1384 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
1385 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
1387 target_flags
|= MASK_AIX_STRUCT_RET
;
1390 if (TARGET_LONG_DOUBLE_128
1391 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
1392 REAL_MODE_FORMAT (TFmode
) = &ibm_extended_format
;
1394 /* Allocate an alias set for register saves & restores from stack. */
1395 rs6000_sr_alias_set
= new_alias_set ();
1398 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
1400 /* We can only guarantee the availability of DI pseudo-ops when
1401 assembling for 64-bit targets. */
1404 targetm
.asm_out
.aligned_op
.di
= NULL
;
1405 targetm
.asm_out
.unaligned_op
.di
= NULL
;
1408 /* Set maximum branch target alignment at two instructions, eight bytes. */
1409 align_jumps_max_skip
= 8;
1410 align_loops_max_skip
= 8;
1412 /* Arrange to save and restore machine status around nested functions. */
1413 init_machine_status
= rs6000_init_machine_status
;
1415 /* We should always be splitting complex arguments, but we can't break
1416 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1417 if (DEFAULT_ABI
!= ABI_AIX
)
1418 targetm
.calls
.split_complex_arg
= NULL
;
1420 /* Initialize rs6000_cost with the appropriate target costs. */
1422 rs6000_cost
= TARGET_POWERPC64
? &size64_cost
: &size32_cost
;
1426 case PROCESSOR_RIOS1
:
1427 rs6000_cost
= &rios1_cost
;
1430 case PROCESSOR_RIOS2
:
1431 rs6000_cost
= &rios2_cost
;
1434 case PROCESSOR_RS64A
:
1435 rs6000_cost
= &rs64a_cost
;
1438 case PROCESSOR_MPCCORE
:
1439 rs6000_cost
= &mpccore_cost
;
1442 case PROCESSOR_PPC403
:
1443 rs6000_cost
= &ppc403_cost
;
1446 case PROCESSOR_PPC405
:
1447 rs6000_cost
= &ppc405_cost
;
1450 case PROCESSOR_PPC440
:
1451 rs6000_cost
= &ppc440_cost
;
1454 case PROCESSOR_PPC601
:
1455 rs6000_cost
= &ppc601_cost
;
1458 case PROCESSOR_PPC603
:
1459 rs6000_cost
= &ppc603_cost
;
1462 case PROCESSOR_PPC604
:
1463 rs6000_cost
= &ppc604_cost
;
1466 case PROCESSOR_PPC604e
:
1467 rs6000_cost
= &ppc604e_cost
;
1470 case PROCESSOR_PPC620
:
1471 rs6000_cost
= &ppc620_cost
;
1474 case PROCESSOR_PPC630
:
1475 rs6000_cost
= &ppc630_cost
;
1478 case PROCESSOR_PPC750
:
1479 case PROCESSOR_PPC7400
:
1480 rs6000_cost
= &ppc750_cost
;
1483 case PROCESSOR_PPC7450
:
1484 rs6000_cost
= &ppc7450_cost
;
1487 case PROCESSOR_PPC8540
:
1488 rs6000_cost
= &ppc8540_cost
;
1491 case PROCESSOR_POWER4
:
1492 case PROCESSOR_POWER5
:
1493 rs6000_cost
= &power4_cost
;
1501 /* Handle generic options of the form -mfoo=yes/no.
1502 NAME is the option name.
1503 VALUE is the option value.
1504 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1505 whether the option value is 'yes' or 'no' respectively. */
1507 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
1511 else if (!strcmp (value
, "yes"))
1513 else if (!strcmp (value
, "no"))
1516 error ("unknown -m%s= option specified: '%s'", name
, value
);
1519 /* Handle -mabi= options. */
1521 rs6000_parse_abi_options (void)
1523 if (rs6000_abi_string
== 0)
1525 else if (! strcmp (rs6000_abi_string
, "altivec"))
1527 rs6000_altivec_abi
= 1;
1530 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
1531 rs6000_altivec_abi
= 0;
1532 else if (! strcmp (rs6000_abi_string
, "spe"))
1535 rs6000_altivec_abi
= 0;
1536 if (!TARGET_SPE_ABI
)
1537 error ("not configured for ABI: '%s'", rs6000_abi_string
);
1540 else if (! strcmp (rs6000_abi_string
, "no-spe"))
1543 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
1546 /* Handle -malign-XXXXXX options. */
1548 rs6000_parse_alignment_option (void)
1550 if (rs6000_alignment_string
== 0)
1552 else if (! strcmp (rs6000_alignment_string
, "power"))
1553 rs6000_alignment_flags
= MASK_ALIGN_POWER
;
1554 else if (! strcmp (rs6000_alignment_string
, "natural"))
1555 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
1557 error ("unknown -malign-XXXXX option specified: '%s'",
1558 rs6000_alignment_string
);
1561 /* Validate and record the size specified with the -mtls-size option. */
1564 rs6000_parse_tls_size_option (void)
1566 if (rs6000_tls_size_string
== 0)
1568 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
1569 rs6000_tls_size
= 16;
1570 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
1571 rs6000_tls_size
= 32;
1572 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
1573 rs6000_tls_size
= 64;
1575 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string
);
1579 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1583 /* Do anything needed at the start of the asm file. */
1586 rs6000_file_start (void)
1590 const char *start
= buffer
;
1591 struct rs6000_cpu_select
*ptr
;
1592 const char *default_cpu
= TARGET_CPU_DEFAULT
;
1593 FILE *file
= asm_out_file
;
1595 default_file_start ();
1597 #ifdef TARGET_BI_ARCH
1598 if ((TARGET_DEFAULT
^ target_flags
) & MASK_64BIT
)
1602 if (flag_verbose_asm
)
1604 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
1605 rs6000_select
[0].string
= default_cpu
;
1607 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1609 ptr
= &rs6000_select
[i
];
1610 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1612 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
1617 #ifdef USING_ELFOS_H
1618 switch (rs6000_sdata
)
1620 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
1621 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
1622 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
1623 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
1626 if (rs6000_sdata
&& g_switch_value
)
1628 fprintf (file
, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED
, start
,
1639 /* Return nonzero if this function is known to have a null epilogue. */
1642 direct_return (void)
1644 if (reload_completed
)
1646 rs6000_stack_t
*info
= rs6000_stack_info ();
1648 if (info
->first_gp_reg_save
== 32
1649 && info
->first_fp_reg_save
== 64
1650 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
1651 && ! info
->lr_save_p
1652 && ! info
->cr_save_p
1653 && info
->vrsave_mask
== 0
1661 /* Returns 1 always. */
1664 any_operand (rtx op ATTRIBUTE_UNUSED
,
1665 enum machine_mode mode ATTRIBUTE_UNUSED
)
1670 /* Returns 1 if op is the count register. */
1672 count_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1674 if (GET_CODE (op
) != REG
)
1677 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
1680 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
1686 /* Returns 1 if op is an altivec register. */
1688 altivec_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1691 return (register_operand (op
, mode
)
1692 && (GET_CODE (op
) != REG
1693 || REGNO (op
) > FIRST_PSEUDO_REGISTER
1694 || ALTIVEC_REGNO_P (REGNO (op
))));
1698 xer_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1700 if (GET_CODE (op
) != REG
)
1703 if (XER_REGNO_P (REGNO (op
)))
1709 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1710 by such constants completes more quickly. */
1713 s8bit_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1715 return ( GET_CODE (op
) == CONST_INT
1716 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
1719 /* Return 1 if OP is a constant that can fit in a D field. */
1722 short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1724 return (GET_CODE (op
) == CONST_INT
1725 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
1728 /* Similar for an unsigned D field. */
1731 u_short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1733 return (GET_CODE (op
) == CONST_INT
1734 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
1737 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1740 non_short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1742 return (GET_CODE (op
) == CONST_INT
1743 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
1746 /* Returns 1 if OP is a CONST_INT that is a positive value
1747 and an exact power of 2. */
1750 exact_log2_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1752 return (GET_CODE (op
) == CONST_INT
1754 && exact_log2 (INTVAL (op
)) >= 0);
1757 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1761 gpc_reg_operand (rtx op
, enum machine_mode mode
)
1763 return (register_operand (op
, mode
)
1764 && (GET_CODE (op
) != REG
1765 || (REGNO (op
) >= ARG_POINTER_REGNUM
1766 && !XER_REGNO_P (REGNO (op
)))
1767 || REGNO (op
) < MQ_REGNO
));
1770 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1774 cc_reg_operand (rtx op
, enum machine_mode mode
)
1776 return (register_operand (op
, mode
)
1777 && (GET_CODE (op
) != REG
1778 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1779 || CR_REGNO_P (REGNO (op
))));
1782 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1783 CR field that isn't CR0. */
1786 cc_reg_not_cr0_operand (rtx op
, enum machine_mode mode
)
1788 return (register_operand (op
, mode
)
1789 && (GET_CODE (op
) != REG
1790 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1791 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1794 /* Returns 1 if OP is either a constant integer valid for a D-field or
1795 a non-special register. If a register, it must be in the proper
1796 mode unless MODE is VOIDmode. */
1799 reg_or_short_operand (rtx op
, enum machine_mode mode
)
1801 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1804 /* Similar, except check if the negation of the constant would be
1805 valid for a D-field. Don't allow a constant zero, since all the
1806 patterns that call this predicate use "addic r1,r2,-constant" on
1807 a constant value to set a carry when r2 is greater or equal to
1808 "constant". That doesn't work for zero. */
1811 reg_or_neg_short_operand (rtx op
, enum machine_mode mode
)
1813 if (GET_CODE (op
) == CONST_INT
)
1814 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P') && INTVAL (op
) != 0;
1816 return gpc_reg_operand (op
, mode
);
1819 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1820 a non-special register. If a register, it must be in the proper
1821 mode unless MODE is VOIDmode. */
1824 reg_or_aligned_short_operand (rtx op
, enum machine_mode mode
)
1826 if (gpc_reg_operand (op
, mode
))
1828 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1835 /* Return 1 if the operand is either a register or an integer whose
1836 high-order 16 bits are zero. */
1839 reg_or_u_short_operand (rtx op
, enum machine_mode mode
)
1841 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1844 /* Return 1 is the operand is either a non-special register or ANY
1845 constant integer. */
1848 reg_or_cint_operand (rtx op
, enum machine_mode mode
)
1850 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1853 /* Return 1 is the operand is either a non-special register or ANY
1854 32-bit signed constant integer. */
1857 reg_or_arith_cint_operand (rtx op
, enum machine_mode mode
)
1859 return (gpc_reg_operand (op
, mode
)
1860 || (GET_CODE (op
) == CONST_INT
1861 #if HOST_BITS_PER_WIDE_INT != 32
1862 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1863 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1868 /* Return 1 is the operand is either a non-special register or a 32-bit
1869 signed constant integer valid for 64-bit addition. */
1872 reg_or_add_cint64_operand (rtx op
, enum machine_mode mode
)
1874 return (gpc_reg_operand (op
, mode
)
1875 || (GET_CODE (op
) == CONST_INT
1876 #if HOST_BITS_PER_WIDE_INT == 32
1877 && INTVAL (op
) < 0x7fff8000
1879 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1885 /* Return 1 is the operand is either a non-special register or a 32-bit
1886 signed constant integer valid for 64-bit subtraction. */
1889 reg_or_sub_cint64_operand (rtx op
, enum machine_mode mode
)
1891 return (gpc_reg_operand (op
, mode
)
1892 || (GET_CODE (op
) == CONST_INT
1893 #if HOST_BITS_PER_WIDE_INT == 32
1894 && (- INTVAL (op
)) < 0x7fff8000
1896 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1902 /* Return 1 is the operand is either a non-special register or ANY
1903 32-bit unsigned constant integer. */
1906 reg_or_logical_cint_operand (rtx op
, enum machine_mode mode
)
1908 if (GET_CODE (op
) == CONST_INT
)
1910 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1912 if (GET_MODE_BITSIZE (mode
) <= 32)
1915 if (INTVAL (op
) < 0)
1919 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1920 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1922 else if (GET_CODE (op
) == CONST_DOUBLE
)
1924 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1928 return CONST_DOUBLE_HIGH (op
) == 0;
1931 return gpc_reg_operand (op
, mode
);
1934 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1937 got_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1939 return (GET_CODE (op
) == SYMBOL_REF
1940 || GET_CODE (op
) == CONST
1941 || GET_CODE (op
) == LABEL_REF
);
1944 /* Return 1 if the operand is a simple references that can be loaded via
1945 the GOT (labels involving addition aren't allowed). */
1948 got_no_const_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1950 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1953 /* Return the number of instructions it takes to form a constant in an
1954 integer register. */
1957 num_insns_constant_wide (HOST_WIDE_INT value
)
1959 /* signed constant loadable with {cal|addi} */
1960 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1963 /* constant loadable with {cau|addis} */
1964 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1967 #if HOST_BITS_PER_WIDE_INT == 64
1968 else if (TARGET_POWERPC64
)
1970 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1971 HOST_WIDE_INT high
= value
>> 31;
1973 if (high
== 0 || high
== -1)
1979 return num_insns_constant_wide (high
) + 1;
1981 return (num_insns_constant_wide (high
)
1982 + num_insns_constant_wide (low
) + 1);
1991 num_insns_constant (rtx op
, enum machine_mode mode
)
1993 if (GET_CODE (op
) == CONST_INT
)
1995 #if HOST_BITS_PER_WIDE_INT == 64
1996 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1997 && mask64_operand (op
, mode
))
2001 return num_insns_constant_wide (INTVAL (op
));
2004 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
2009 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2010 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
2011 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
2014 else if (GET_CODE (op
) == CONST_DOUBLE
)
2020 int endian
= (WORDS_BIG_ENDIAN
== 0);
2022 if (mode
== VOIDmode
|| mode
== DImode
)
2024 high
= CONST_DOUBLE_HIGH (op
);
2025 low
= CONST_DOUBLE_LOW (op
);
2029 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2030 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
2032 low
= l
[1 - endian
];
2036 return (num_insns_constant_wide (low
)
2037 + num_insns_constant_wide (high
));
2041 if (high
== 0 && low
>= 0)
2042 return num_insns_constant_wide (low
);
2044 else if (high
== -1 && low
< 0)
2045 return num_insns_constant_wide (low
);
2047 else if (mask64_operand (op
, mode
))
2051 return num_insns_constant_wide (high
) + 1;
2054 return (num_insns_constant_wide (high
)
2055 + num_insns_constant_wide (low
) + 1);
2063 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
2064 register with one instruction per word. We only do this if we can
2065 safely read CONST_DOUBLE_{LOW,HIGH}. */
2068 easy_fp_constant (rtx op
, enum machine_mode mode
)
2070 if (GET_CODE (op
) != CONST_DOUBLE
2071 || GET_MODE (op
) != mode
2072 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
2075 /* Consider all constants with -msoft-float to be easy. */
2076 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
2080 /* If we are using V.4 style PIC, consider all constants to be hard. */
2081 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
2084 #ifdef TARGET_RELOCATABLE
2085 /* Similarly if we are using -mrelocatable, consider all constants
2087 if (TARGET_RELOCATABLE
)
2096 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2097 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
2099 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
2100 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
2101 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
2102 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
2105 else if (mode
== DFmode
)
2110 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2111 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
2113 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
2114 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
2117 else if (mode
== SFmode
)
2122 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2123 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
2125 return num_insns_constant_wide (l
) == 1;
2128 else if (mode
== DImode
)
2129 return ((TARGET_POWERPC64
2130 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
2131 || (num_insns_constant (op
, DImode
) <= 2));
2133 else if (mode
== SImode
)
2139 /* Returns the constant for the splat instruction, if exists. */
2142 easy_vector_splat_const (int cst
, enum machine_mode mode
)
2147 if (EASY_VECTOR_15 (cst
)
2148 || EASY_VECTOR_15_ADD_SELF (cst
))
2150 if ((cst
& 0xffff) != ((cst
>> 16) & 0xffff))
2154 if (EASY_VECTOR_15 (cst
)
2155 || EASY_VECTOR_15_ADD_SELF (cst
))
2157 if ((cst
& 0xff) != ((cst
>> 8) & 0xff))
2161 if (EASY_VECTOR_15 (cst
)
2162 || EASY_VECTOR_15_ADD_SELF (cst
))
2171 /* Return nonzero if all elements of a vector have the same value. */
2174 easy_vector_same (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2178 units
= CONST_VECTOR_NUNITS (op
);
2180 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
2181 for (i
= 1; i
< units
; ++i
)
2182 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
2184 if (i
== units
&& easy_vector_splat_const (cst
, mode
))
2189 /* Return 1 if the operand is a CONST_INT and can be put into a
2190 register without using memory. */
2193 easy_vector_constant (rtx op
, enum machine_mode mode
)
2197 if (GET_CODE (op
) != CONST_VECTOR
2202 if (zero_constant (op
, mode
)
2203 && ((TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
2204 || (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))))
2207 if (GET_MODE_CLASS (mode
) != MODE_VECTOR_INT
)
2210 if (TARGET_SPE
&& mode
== V1DImode
)
2213 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
2214 cst2
= INTVAL (CONST_VECTOR_ELT (op
, 1));
2216 /* Limit SPE vectors to 15 bits signed. These we can generate with:
2218 evmergelo r0, r0, r0
2221 I don't know how efficient it would be to allow bigger constants,
2222 considering we'll have an extra 'ori' for every 'li'. I doubt 5
2223 instructions is better than a 64-bit memory load, but I don't
2224 have the e500 timing specs. */
2225 if (TARGET_SPE
&& mode
== V2SImode
2226 && cst
>= -0x7fff && cst
<= 0x7fff
2227 && cst2
>= -0x7fff && cst2
<= 0x7fff)
2231 && easy_vector_same (op
, mode
))
2233 cst
= easy_vector_splat_const (cst
, mode
);
2234 if (EASY_VECTOR_15_ADD_SELF (cst
)
2235 || EASY_VECTOR_15 (cst
))
2241 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
2244 easy_vector_constant_add_self (rtx op
, enum machine_mode mode
)
2248 && GET_CODE (op
) == CONST_VECTOR
2249 && easy_vector_same (op
, mode
))
2251 cst
= easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op
, 0)), mode
);
2252 if (EASY_VECTOR_15_ADD_SELF (cst
))
2258 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2261 gen_easy_vector_constant_add_self (rtx op
)
2265 units
= GET_MODE_NUNITS (GET_MODE (op
));
2266 v
= rtvec_alloc (units
);
2268 for (i
= 0; i
< units
; i
++)
2270 GEN_INT (INTVAL (CONST_VECTOR_ELT (op
, i
)) >> 1);
2271 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op
), v
);
2275 output_vec_const_move (rtx
*operands
)
2278 enum machine_mode mode
;
2284 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
2285 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
2286 mode
= GET_MODE (dest
);
2290 if (zero_constant (vec
, mode
))
2291 return "vxor %0,%0,%0";
2292 else if (easy_vector_constant (vec
, mode
))
2294 operands
[1] = GEN_INT (cst
);
2298 if (EASY_VECTOR_15 (cst
))
2300 operands
[1] = GEN_INT (cst
);
2301 return "vspltisw %0,%1";
2303 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2307 if (EASY_VECTOR_15 (cst
))
2309 operands
[1] = GEN_INT (cst
);
2310 return "vspltish %0,%1";
2312 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2316 if (EASY_VECTOR_15 (cst
))
2318 operands
[1] = GEN_INT (cst
);
2319 return "vspltisb %0,%1";
2321 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2333 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2334 pattern of V1DI, V4HI, and V2SF.
2336 FIXME: We should probably return # and add post reload
2337 splitters for these, but this way is so easy ;-).
2339 operands
[1] = GEN_INT (cst
);
2340 operands
[2] = GEN_INT (cst2
);
2342 return "li %0,%1\n\tevmergelo %0,%0,%0";
2344 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2350 /* Return 1 if the operand is the constant 0. This works for scalars
2351 as well as vectors. */
2353 zero_constant (rtx op
, enum machine_mode mode
)
2355 return op
== CONST0_RTX (mode
);
2358 /* Return 1 if the operand is 0.0. */
2360 zero_fp_constant (rtx op
, enum machine_mode mode
)
2362 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
2365 /* Return 1 if the operand is in volatile memory. Note that during
2366 the RTL generation phase, memory_operand does not return TRUE for
2367 volatile memory references. So this function allows us to
2368 recognize volatile references where its safe. */
2371 volatile_mem_operand (rtx op
, enum machine_mode mode
)
2373 if (GET_CODE (op
) != MEM
)
2376 if (!MEM_VOLATILE_P (op
))
2379 if (mode
!= GET_MODE (op
))
2382 if (reload_completed
)
2383 return memory_operand (op
, mode
);
2385 if (reload_in_progress
)
2386 return strict_memory_address_p (mode
, XEXP (op
, 0));
2388 return memory_address_p (mode
, XEXP (op
, 0));
2391 /* Return 1 if the operand is an offsettable memory operand. */
2394 offsettable_mem_operand (rtx op
, enum machine_mode mode
)
2396 return ((GET_CODE (op
) == MEM
)
2397 && offsettable_address_p (reload_completed
|| reload_in_progress
,
2398 mode
, XEXP (op
, 0)));
2401 /* Return 1 if the operand is either an easy FP constant (see above) or
2405 mem_or_easy_const_operand (rtx op
, enum machine_mode mode
)
2407 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
2410 /* Return 1 if the operand is either a non-special register or an item
2411 that can be used as the operand of a `mode' add insn. */
2414 add_operand (rtx op
, enum machine_mode mode
)
2416 if (GET_CODE (op
) == CONST_INT
)
2417 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
2418 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
2420 return gpc_reg_operand (op
, mode
);
2423 /* Return 1 if OP is a constant but not a valid add_operand. */
2426 non_add_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2428 return (GET_CODE (op
) == CONST_INT
2429 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
2430 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
2433 /* Return 1 if the operand is a non-special register or a constant that
2434 can be used as the operand of an OR or XOR insn on the RS/6000. */
2437 logical_operand (rtx op
, enum machine_mode mode
)
2439 HOST_WIDE_INT opl
, oph
;
2441 if (gpc_reg_operand (op
, mode
))
2444 if (GET_CODE (op
) == CONST_INT
)
2446 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
2448 #if HOST_BITS_PER_WIDE_INT <= 32
2449 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
2453 else if (GET_CODE (op
) == CONST_DOUBLE
)
2455 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2458 opl
= CONST_DOUBLE_LOW (op
);
2459 oph
= CONST_DOUBLE_HIGH (op
);
2466 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
2467 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
2470 /* Return 1 if C is a constant that is not a logical operand (as
2471 above), but could be split into one. */
2474 non_logical_cint_operand (rtx op
, enum machine_mode mode
)
2476 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
2477 && ! logical_operand (op
, mode
)
2478 && reg_or_logical_cint_operand (op
, mode
));
2481 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2482 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2483 Reject all ones and all zeros, since these should have been optimized
2484 away and confuse the making of MB and ME. */
2487 mask_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2489 HOST_WIDE_INT c
, lsb
;
2491 if (GET_CODE (op
) != CONST_INT
)
2496 /* Fail in 64-bit mode if the mask wraps around because the upper
2497 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2498 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
2501 /* We don't change the number of transitions by inverting,
2502 so make sure we start with the LS bit zero. */
2506 /* Reject all zeros or all ones. */
2510 /* Find the first transition. */
2513 /* Invert to look for a second transition. */
2516 /* Erase first transition. */
2519 /* Find the second transition (if any). */
2522 /* Match if all the bits above are 1's (or c is zero). */
2526 /* Return 1 for the PowerPC64 rlwinm corner case. */
2529 mask_operand_wrap (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2531 HOST_WIDE_INT c
, lsb
;
2533 if (GET_CODE (op
) != CONST_INT
)
2538 if ((c
& 0x80000001) != 0x80000001)
2552 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2553 It is if there are no more than one 1->0 or 0->1 transitions.
2554 Reject all zeros, since zero should have been optimized away and
2555 confuses the making of MB and ME. */
2558 mask64_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2560 if (GET_CODE (op
) == CONST_INT
)
2562 HOST_WIDE_INT c
, lsb
;
2566 /* Reject all zeros. */
2570 /* We don't change the number of transitions by inverting,
2571 so make sure we start with the LS bit zero. */
2575 /* Find the transition, and check that all bits above are 1's. */
2578 /* Match if all the bits above are 1's (or c is zero). */
2584 /* Like mask64_operand, but allow up to three transitions. This
2585 predicate is used by insn patterns that generate two rldicl or
2586 rldicr machine insns. */
2589 mask64_2_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2591 if (GET_CODE (op
) == CONST_INT
)
2593 HOST_WIDE_INT c
, lsb
;
2597 /* Disallow all zeros. */
2601 /* We don't change the number of transitions by inverting,
2602 so make sure we start with the LS bit zero. */
2606 /* Find the first transition. */
2609 /* Invert to look for a second transition. */
2612 /* Erase first transition. */
2615 /* Find the second transition. */
2618 /* Invert to look for a third transition. */
2621 /* Erase second transition. */
2624 /* Find the third transition (if any). */
2627 /* Match if all the bits above are 1's (or c is zero). */
2633 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2634 implement ANDing by the mask IN. */
2636 build_mask64_2_operands (rtx in
, rtx
*out
)
2638 #if HOST_BITS_PER_WIDE_INT >= 64
2639 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
2642 if (GET_CODE (in
) != CONST_INT
)
2648 /* Assume c initially something like 0x00fff000000fffff. The idea
2649 is to rotate the word so that the middle ^^^^^^ group of zeros
2650 is at the MS end and can be cleared with an rldicl mask. We then
2651 rotate back and clear off the MS ^^ group of zeros with a
2653 c
= ~c
; /* c == 0xff000ffffff00000 */
2654 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
2655 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
2656 c
= ~c
; /* c == 0x00fff000000fffff */
2657 c
&= -lsb
; /* c == 0x00fff00000000000 */
2658 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2659 c
= ~c
; /* c == 0xff000fffffffffff */
2660 c
&= -lsb
; /* c == 0xff00000000000000 */
2662 while ((lsb
>>= 1) != 0)
2663 shift
++; /* shift == 44 on exit from loop */
2664 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2665 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2666 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2670 /* Assume c initially something like 0xff000f0000000000. The idea
2671 is to rotate the word so that the ^^^ middle group of zeros
2672 is at the LS end and can be cleared with an rldicr mask. We then
2673 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2675 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2676 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2677 c
= ~c
; /* c == 0x00fff0ffffffffff */
2678 c
&= -lsb
; /* c == 0x00fff00000000000 */
2679 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2680 c
= ~c
; /* c == 0xff000fffffffffff */
2681 c
&= -lsb
; /* c == 0xff00000000000000 */
2683 while ((lsb
>>= 1) != 0)
2684 shift
++; /* shift == 44 on exit from loop */
2685 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2686 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2687 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2690 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2691 masks will be all 1's. We are guaranteed more than one transition. */
2692 out
[0] = GEN_INT (64 - shift
);
2693 out
[1] = GEN_INT (m1
);
2694 out
[2] = GEN_INT (shift
);
2695 out
[3] = GEN_INT (m2
);
2703 /* Return 1 if the operand is either a non-special register or a constant
2704 that can be used as the operand of a PowerPC64 logical AND insn. */
2707 and64_operand (rtx op
, enum machine_mode mode
)
2709 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2710 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
2712 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
2715 /* Like the above, but also match constants that can be implemented
2716 with two rldicl or rldicr insns. */
2719 and64_2_operand (rtx op
, enum machine_mode mode
)
2721 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2722 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2724 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2727 /* Return 1 if the operand is either a non-special register or a
2728 constant that can be used as the operand of an RS/6000 logical AND insn. */
2731 and_operand (rtx op
, enum machine_mode mode
)
2733 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2734 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
2736 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
2739 /* Return 1 if the operand is a general register or memory operand. */
2742 reg_or_mem_operand (rtx op
, enum machine_mode mode
)
2744 return (gpc_reg_operand (op
, mode
)
2745 || memory_operand (op
, mode
)
2746 || macho_lo_sum_memory_operand (op
, mode
)
2747 || volatile_mem_operand (op
, mode
));
2750 /* Return 1 if the operand is a general register or memory operand without
2751 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2755 lwa_operand (rtx op
, enum machine_mode mode
)
2759 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
2760 inner
= SUBREG_REG (inner
);
2762 return gpc_reg_operand (inner
, mode
)
2763 || (memory_operand (inner
, mode
)
2764 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
2765 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
2766 && (GET_CODE (XEXP (inner
, 0)) != PLUS
2767 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
2768 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
2771 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2774 symbol_ref_operand (rtx op
, enum machine_mode mode
)
2776 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2779 return (GET_CODE (op
) == SYMBOL_REF
2780 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
)));
2783 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2784 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2787 call_operand (rtx op
, enum machine_mode mode
)
2789 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2792 return (GET_CODE (op
) == SYMBOL_REF
2793 || (GET_CODE (op
) == REG
2794 && (REGNO (op
) == LINK_REGISTER_REGNUM
2795 || REGNO (op
) == COUNT_REGISTER_REGNUM
2796 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
2799 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2803 current_file_function_operand (rtx op
,
2804 enum machine_mode mode ATTRIBUTE_UNUSED
)
2806 return (GET_CODE (op
) == SYMBOL_REF
2807 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
))
2808 && (SYMBOL_REF_LOCAL_P (op
)
2809 || (op
== XEXP (DECL_RTL (current_function_decl
), 0))));
2812 /* Return 1 if this operand is a valid input for a move insn. */
2815 input_operand (rtx op
, enum machine_mode mode
)
2817 /* Memory is always valid. */
2818 if (memory_operand (op
, mode
))
2821 /* For floating-point, easy constants are valid. */
2822 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2824 && easy_fp_constant (op
, mode
))
2827 /* Allow any integer constant. */
2828 if (GET_MODE_CLASS (mode
) == MODE_INT
2829 && (GET_CODE (op
) == CONST_INT
2830 || GET_CODE (op
) == CONST_DOUBLE
))
2833 /* Allow easy vector constants. */
2834 if (GET_CODE (op
) == CONST_VECTOR
2835 && easy_vector_constant (op
, mode
))
2838 /* For floating-point or multi-word mode, the only remaining valid type
2840 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2841 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2842 return register_operand (op
, mode
);
2844 /* The only cases left are integral modes one word or smaller (we
2845 do not get called for MODE_CC values). These can be in any
2847 if (register_operand (op
, mode
))
2850 /* A SYMBOL_REF referring to the TOC is valid. */
2851 if (legitimate_constant_pool_address_p (op
))
2854 /* A constant pool expression (relative to the TOC) is valid */
2855 if (toc_relative_expr_p (op
))
2858 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2860 if (DEFAULT_ABI
== ABI_V4
2861 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2862 && small_data_operand (op
, Pmode
))
2869 /* Darwin, AIX increases natural record alignment to doubleword if the first
2870 field is an FP double while the FP fields remain word aligned. */
2873 rs6000_special_round_type_align (tree type
, int computed
, int specified
)
2875 tree field
= TYPE_FIELDS (type
);
2877 /* Skip all the static variables only if ABI is greater than
2879 while (field
!= NULL
&& TREE_CODE (field
) == VAR_DECL
)
2880 field
= TREE_CHAIN (field
);
2882 if (field
== NULL
|| field
== type
|| DECL_MODE (field
) != DFmode
)
2883 return MAX (computed
, specified
);
2885 return MAX (MAX (computed
, specified
), 64);
2888 /* Return 1 for an operand in small memory on V.4/eabi. */
2891 small_data_operand (rtx op ATTRIBUTE_UNUSED
,
2892 enum machine_mode mode ATTRIBUTE_UNUSED
)
2897 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2900 if (DEFAULT_ABI
!= ABI_V4
)
2903 if (GET_CODE (op
) == SYMBOL_REF
)
2906 else if (GET_CODE (op
) != CONST
2907 || GET_CODE (XEXP (op
, 0)) != PLUS
2908 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2909 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2914 rtx sum
= XEXP (op
, 0);
2915 HOST_WIDE_INT summand
;
2917 /* We have to be careful here, because it is the referenced address
2918 that must be 32k from _SDA_BASE_, not just the symbol. */
2919 summand
= INTVAL (XEXP (sum
, 1));
2920 if (summand
< 0 || (unsigned HOST_WIDE_INT
) summand
> g_switch_value
)
2923 sym_ref
= XEXP (sum
, 0);
2926 return SYMBOL_REF_SMALL_P (sym_ref
);
2932 /* Return true, if operand is a memory operand and has a
2933 displacement divisible by 4. */
2936 word_offset_memref_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2941 if (!memory_operand (op
, mode
))
2944 addr
= XEXP (op
, 0);
2945 if (GET_CODE (addr
) == PLUS
2946 && GET_CODE (XEXP (addr
, 0)) == REG
2947 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
2948 off
= INTVAL (XEXP (addr
, 1));
2950 return (off
% 4) == 0;
2953 /* Return true if either operand is a general purpose register. */
2956 gpr_or_gpr_p (rtx op0
, rtx op1
)
2958 return ((REG_P (op0
) && INT_REGNO_P (REGNO (op0
)))
2959 || (REG_P (op1
) && INT_REGNO_P (REGNO (op1
))));
2963 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2966 constant_pool_expr_1 (rtx op
, int *have_sym
, int *have_toc
)
2968 switch (GET_CODE(op
))
2971 if (RS6000_SYMBOL_REF_TLS_P (op
))
2973 else if (CONSTANT_POOL_ADDRESS_P (op
))
2975 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2983 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2992 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2993 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2995 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
3004 constant_pool_expr_p (rtx op
)
3008 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
3012 toc_relative_expr_p (rtx op
)
3016 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
3020 legitimate_constant_pool_address_p (rtx x
)
3023 && GET_CODE (x
) == PLUS
3024 && GET_CODE (XEXP (x
, 0)) == REG
3025 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
3026 && constant_pool_expr_p (XEXP (x
, 1)));
3030 legitimate_small_data_p (enum machine_mode mode
, rtx x
)
3032 return (DEFAULT_ABI
== ABI_V4
3033 && !flag_pic
&& !TARGET_TOC
3034 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
3035 && small_data_operand (x
, mode
));
3038 /* SPE offset addressing is limited to 5-bits worth of double words. */
3039 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3042 rs6000_legitimate_offset_address_p (enum machine_mode mode
, rtx x
, int strict
)
3044 unsigned HOST_WIDE_INT offset
, extra
;
3046 if (GET_CODE (x
) != PLUS
)
3048 if (GET_CODE (XEXP (x
, 0)) != REG
)
3050 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
3052 if (legitimate_constant_pool_address_p (x
))
3054 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
3057 offset
= INTVAL (XEXP (x
, 1));
3065 /* AltiVec vector modes. Only reg+reg addressing is valid here,
3066 which leaves the only valid constant offset of zero, which by
3067 canonicalization rules is also invalid. */
3074 /* SPE vector modes. */
3075 return SPE_CONST_OFFSET_OK (offset
);
3079 if (mode
== DFmode
|| !TARGET_POWERPC64
)
3081 else if (offset
& 3)
3087 if (mode
== TFmode
|| !TARGET_POWERPC64
)
3089 else if (offset
& 3)
3100 return (offset
< 0x10000) && (offset
+ extra
< 0x10000);
3104 legitimate_indexed_address_p (rtx x
, int strict
)
3108 if (GET_CODE (x
) != PLUS
)
3113 if (!REG_P (op0
) || !REG_P (op1
))
3116 return ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
3117 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
3118 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
3119 && INT_REG_OK_FOR_INDEX_P (op0
, strict
)));
3123 legitimate_indirect_address_p (rtx x
, int strict
)
3125 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
3129 macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
)
3131 if (!TARGET_MACHO
|| !flag_pic
3132 || mode
!= SImode
|| GET_CODE(x
) != MEM
)
3136 if (GET_CODE (x
) != LO_SUM
)
3138 if (GET_CODE (XEXP (x
, 0)) != REG
)
3140 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), 0))
3144 return CONSTANT_P (x
);
3148 legitimate_lo_sum_address_p (enum machine_mode mode
, rtx x
, int strict
)
3150 if (GET_CODE (x
) != LO_SUM
)
3152 if (GET_CODE (XEXP (x
, 0)) != REG
)
3154 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
3158 if (TARGET_ELF
|| TARGET_MACHO
)
3160 if (DEFAULT_ABI
!= ABI_AIX
&& DEFAULT_ABI
!= ABI_DARWIN
&& flag_pic
)
3164 if (GET_MODE_NUNITS (mode
) != 1)
3166 if (GET_MODE_BITSIZE (mode
) > 32
3167 && !(TARGET_HARD_FLOAT
&& TARGET_FPRS
&& mode
== DFmode
))
3170 return CONSTANT_P (x
);
3177 /* Try machine-dependent ways of modifying an illegitimate address
3178 to be legitimate. If we find one, return the new, valid address.
3179 This is used from only one place: `memory_address' in explow.c.
3181 OLDX is the address as it was before break_out_memory_refs was
3182 called. In some cases it is useful to look at this to decide what
3185 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3187 It is always safe for this function to do nothing. It exists to
3188 recognize opportunities to optimize the output.
3190 On RS/6000, first check for the sum of a register with a constant
3191 integer that is out of range. If so, generate code to add the
3192 constant with the low-order 16 bits masked to the register and force
3193 this result into another register (this can be done with `cau').
3194 Then generate an address of REG+(CONST&0xffff), allowing for the
3195 possibility of bit 16 being a one.
3197 Then check for the sum of a register and something not constant, try to
3198 load the other things into a register and return the sum. */
3201 rs6000_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
3202 enum machine_mode mode
)
3204 if (GET_CODE (x
) == SYMBOL_REF
)
3206 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
3208 return rs6000_legitimize_tls_address (x
, model
);
3211 if (GET_CODE (x
) == PLUS
3212 && GET_CODE (XEXP (x
, 0)) == REG
3213 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3214 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
3216 HOST_WIDE_INT high_int
, low_int
;
3218 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3219 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
3220 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3221 GEN_INT (high_int
)), 0);
3222 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
3224 else if (GET_CODE (x
) == PLUS
3225 && GET_CODE (XEXP (x
, 0)) == REG
3226 && GET_CODE (XEXP (x
, 1)) != CONST_INT
3227 && GET_MODE_NUNITS (mode
) == 1
3228 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3230 || (mode
!= DFmode
&& mode
!= TFmode
))
3231 && (TARGET_POWERPC64
|| mode
!= DImode
)
3234 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3235 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
3237 else if (ALTIVEC_VECTOR_MODE (mode
))
3241 /* Make sure both operands are registers. */
3242 if (GET_CODE (x
) == PLUS
)
3243 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
3244 force_reg (Pmode
, XEXP (x
, 1)));
3246 reg
= force_reg (Pmode
, x
);
3249 else if (SPE_VECTOR_MODE (mode
))
3251 /* We accept [reg + reg] and [reg + OFFSET]. */
3253 if (GET_CODE (x
) == PLUS
)
3255 rtx op1
= XEXP (x
, 0);
3256 rtx op2
= XEXP (x
, 1);
3258 op1
= force_reg (Pmode
, op1
);
3260 if (GET_CODE (op2
) != REG
3261 && (GET_CODE (op2
) != CONST_INT
3262 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
3263 op2
= force_reg (Pmode
, op2
);
3265 return gen_rtx_PLUS (Pmode
, op1
, op2
);
3268 return force_reg (Pmode
, x
);
3274 && GET_CODE (x
) != CONST_INT
3275 && GET_CODE (x
) != CONST_DOUBLE
3277 && GET_MODE_NUNITS (mode
) == 1
3278 && (GET_MODE_BITSIZE (mode
) <= 32
3279 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
3281 rtx reg
= gen_reg_rtx (Pmode
);
3282 emit_insn (gen_elf_high (reg
, x
));
3283 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3285 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
3288 && ! MACHO_DYNAMIC_NO_PIC_P
3290 && GET_CODE (x
) != CONST_INT
3291 && GET_CODE (x
) != CONST_DOUBLE
3293 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
3297 rtx reg
= gen_reg_rtx (Pmode
);
3298 emit_insn (gen_macho_high (reg
, x
));
3299 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3302 && constant_pool_expr_p (x
)
3303 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
3305 return create_TOC_reference (x
);
3311 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3312 We need to emit DTP-relative relocations. */
3315 rs6000_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3320 fputs ("\t.long\t", file
);
3323 fputs (DOUBLE_INT_ASM_OP
, file
);
3328 output_addr_const (file
, x
);
3329 fputs ("@dtprel+0x8000", file
);
3332 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3334 static GTY(()) rtx rs6000_tls_symbol
;
3336 rs6000_tls_get_addr (void)
3338 if (!rs6000_tls_symbol
)
3339 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
3341 return rs6000_tls_symbol
;
3344 /* Construct the SYMBOL_REF for TLS GOT references. */
3346 static GTY(()) rtx rs6000_got_symbol
;
3348 rs6000_got_sym (void)
3350 if (!rs6000_got_symbol
)
3352 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
3353 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
3354 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
3357 return rs6000_got_symbol
;
3360 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3361 this (thread-local) address. */
3364 rs6000_legitimize_tls_address (rtx addr
, enum tls_model model
)
3368 dest
= gen_reg_rtx (Pmode
);
3369 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
3375 tlsreg
= gen_rtx_REG (Pmode
, 13);
3376 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
3380 tlsreg
= gen_rtx_REG (Pmode
, 2);
3381 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
3385 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
3389 tmp
= gen_reg_rtx (Pmode
);
3392 tlsreg
= gen_rtx_REG (Pmode
, 13);
3393 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
3397 tlsreg
= gen_rtx_REG (Pmode
, 2);
3398 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
3402 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
3404 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
3409 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
3412 got
= gen_rtx_REG (Pmode
, TOC_REGISTER
);
3416 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
3419 rtx gsym
= rs6000_got_sym ();
3420 got
= gen_reg_rtx (Pmode
);
3422 rs6000_emit_move (got
, gsym
, Pmode
);
3426 static int tls_got_labelno
= 0;
3427 rtx tempLR
, lab
, tmp3
, mem
;
3430 ASM_GENERATE_INTERNAL_LABEL (buf
, "LTLS", tls_got_labelno
++);
3431 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
3432 tempLR
= gen_reg_rtx (Pmode
);
3433 tmp1
= gen_reg_rtx (Pmode
);
3434 tmp2
= gen_reg_rtx (Pmode
);
3435 tmp3
= gen_reg_rtx (Pmode
);
3436 mem
= gen_rtx_MEM (Pmode
, tmp1
);
3437 RTX_UNCHANGING_P (mem
) = 1;
3439 first
= emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, lab
,
3441 emit_move_insn (tmp1
, tempLR
);
3442 emit_move_insn (tmp2
, mem
);
3443 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
3444 last
= emit_move_insn (got
, tmp3
);
3445 REG_NOTES (last
) = gen_rtx_EXPR_LIST (REG_EQUAL
, gsym
,
3447 REG_NOTES (first
) = gen_rtx_INSN_LIST (REG_LIBCALL
, last
,
3449 REG_NOTES (last
) = gen_rtx_INSN_LIST (REG_RETVAL
, first
,
3455 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
3457 r3
= gen_rtx_REG (Pmode
, 3);
3459 insn
= gen_tls_gd_64 (r3
, got
, addr
);
3461 insn
= gen_tls_gd_32 (r3
, got
, addr
);
3464 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3465 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3466 insn
= emit_call_insn (insn
);
3467 CONST_OR_PURE_CALL_P (insn
) = 1;
3468 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3469 insn
= get_insns ();
3471 emit_libcall_block (insn
, dest
, r3
, addr
);
3473 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
3475 r3
= gen_rtx_REG (Pmode
, 3);
3477 insn
= gen_tls_ld_64 (r3
, got
);
3479 insn
= gen_tls_ld_32 (r3
, got
);
3482 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3483 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3484 insn
= emit_call_insn (insn
);
3485 CONST_OR_PURE_CALL_P (insn
) = 1;
3486 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3487 insn
= get_insns ();
3489 tmp1
= gen_reg_rtx (Pmode
);
3490 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
3492 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
3493 if (rs6000_tls_size
== 16)
3496 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
3498 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
3500 else if (rs6000_tls_size
== 32)
3502 tmp2
= gen_reg_rtx (Pmode
);
3504 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
3506 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
3509 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
3511 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
3515 tmp2
= gen_reg_rtx (Pmode
);
3517 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
3519 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
3521 insn
= gen_rtx_SET (Pmode
, dest
,
3522 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
3528 /* IE, or 64 bit offset LE. */
3529 tmp2
= gen_reg_rtx (Pmode
);
3531 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
3533 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
3536 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
3538 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
3546 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3547 instruction definitions. */
3550 rs6000_tls_symbol_ref (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
)
3552 return RS6000_SYMBOL_REF_TLS_P (x
);
3555 /* Return 1 if X contains a thread-local symbol. */
3558 rs6000_tls_referenced_p (rtx x
)
3560 if (! TARGET_HAVE_TLS
)
3563 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
3566 /* Return 1 if *X is a thread-local symbol. This is the same as
3567 rs6000_tls_symbol_ref except for the type of the unused argument. */
3570 rs6000_tls_symbol_ref_1 (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
3572 return RS6000_SYMBOL_REF_TLS_P (*x
);
3575 /* The convention appears to be to define this wherever it is used.
3576 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3577 is now used here. */
3578 #ifndef REG_MODE_OK_FOR_BASE_P
3579 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3582 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3583 replace the input X, or the original X if no replacement is called for.
3584 The output parameter *WIN is 1 if the calling macro should goto WIN,
3587 For RS/6000, we wish to handle large displacements off a base
3588 register by splitting the addend across an addiu/addis and the mem insn.
3589 This cuts number of extra insns needed from 3 to 1.
3591 On Darwin, we use this to generate code for floating point constants.
3592 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3593 The Darwin code is inside #if TARGET_MACHO because only then is
3594 machopic_function_base_name() defined. */
3596 rs6000_legitimize_reload_address (rtx x
, enum machine_mode mode
,
3597 int opnum
, int type
, int ind_levels ATTRIBUTE_UNUSED
, int *win
)
3599 /* We must recognize output that we have already generated ourselves. */
3600 if (GET_CODE (x
) == PLUS
3601 && GET_CODE (XEXP (x
, 0)) == PLUS
3602 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3603 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3604 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3606 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3607 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3608 opnum
, (enum reload_type
)type
);
3614 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
3615 && GET_CODE (x
) == LO_SUM
3616 && GET_CODE (XEXP (x
, 0)) == PLUS
3617 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
3618 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
3619 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
3620 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
3621 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
3622 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
3623 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
3625 /* Result of previous invocation of this function on Darwin
3626 floating point constant. */
3627 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3628 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3629 opnum
, (enum reload_type
)type
);
3634 if (GET_CODE (x
) == PLUS
3635 && GET_CODE (XEXP (x
, 0)) == REG
3636 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
3637 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3638 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3639 && !SPE_VECTOR_MODE (mode
)
3640 && !ALTIVEC_VECTOR_MODE (mode
))
3642 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
3643 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
3645 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3647 /* Check for 32-bit overflow. */
3648 if (high
+ low
!= val
)
3654 /* Reload the high part into a base reg; leave the low part
3655 in the mem directly. */
3657 x
= gen_rtx_PLUS (GET_MODE (x
),
3658 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
3662 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3663 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3664 opnum
, (enum reload_type
)type
);
3669 if (GET_CODE (x
) == SYMBOL_REF
3670 && DEFAULT_ABI
== ABI_DARWIN
3671 && !ALTIVEC_VECTOR_MODE (mode
)
3672 && (flag_pic
|| MACHO_DYNAMIC_NO_PIC_P
)
3673 /* Don't do this for TFmode, since the result isn't offsettable. */
3678 rtx offset
= gen_rtx_CONST (Pmode
,
3679 gen_rtx_MINUS (Pmode
, x
,
3680 gen_rtx_SYMBOL_REF (Pmode
,
3681 machopic_function_base_name ())));
3682 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3683 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
3684 gen_rtx_HIGH (Pmode
, offset
)), offset
);
3687 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3688 gen_rtx_HIGH (Pmode
, x
), x
);
3690 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3691 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3692 opnum
, (enum reload_type
)type
);
3698 && constant_pool_expr_p (x
)
3699 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
3701 (x
) = create_TOC_reference (x
);
3709 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3710 that is a valid memory address for an instruction.
3711 The MODE argument is the machine mode for the MEM expression
3712 that wants to use this address.
3714 On the RS/6000, there are four valid address: a SYMBOL_REF that
3715 refers to a constant pool entry of an address (or the sum of it
3716 plus a constant), a short (16-bit signed) constant plus a register,
3717 the sum of two registers, or a register indirect, possibly with an
3718 auto-increment. For DFmode and DImode with a constant plus register,
3719 we must ensure that both words are addressable or PowerPC64 with offset
3722 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3723 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3724 adjacent memory cells are accessed by adding word-sized offsets
3725 during assembly output. */
3727 rs6000_legitimate_address (enum machine_mode mode
, rtx x
, int reg_ok_strict
)
3729 if (RS6000_SYMBOL_REF_TLS_P (x
))
3731 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3733 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3734 && !ALTIVEC_VECTOR_MODE (mode
)
3735 && !SPE_VECTOR_MODE (mode
)
3737 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3739 if (legitimate_small_data_p (mode
, x
))
3741 if (legitimate_constant_pool_address_p (x
))
3743 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3745 && GET_CODE (x
) == PLUS
3746 && GET_CODE (XEXP (x
, 0)) == REG
3747 && (XEXP (x
, 0) == virtual_stack_vars_rtx
3748 || XEXP (x
, 0) == arg_pointer_rtx
)
3749 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3751 if (rs6000_legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3755 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3757 || (mode
!= DFmode
&& mode
!= TFmode
))
3758 && (TARGET_POWERPC64
|| mode
!= DImode
)
3759 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3761 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3766 /* Go to LABEL if ADDR (a legitimate address expression)
3767 has an effect that depends on the machine mode it is used for.
3769 On the RS/6000 this is true of all integral offsets (since AltiVec
3770 modes don't allow them) or is a pre-increment or decrement.
3772 ??? Except that due to conceptual problems in offsettable_address_p
3773 we can't really report the problems of integral offsets. So leave
3774 this assuming that the adjustable offset must be valid for the
3775 sub-words of a TFmode operand, which is what we had before. */
3778 rs6000_mode_dependent_address (rtx addr
)
3780 switch (GET_CODE (addr
))
3783 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3785 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3786 return val
+ 12 + 0x8000 >= 0x10000;
3795 return TARGET_UPDATE
;
3804 /* Return number of consecutive hard regs needed starting at reg REGNO
3805 to hold something of mode MODE.
3806 This is ordinarily the length in words of a value of mode MODE
3807 but can be less for certain modes in special long registers.
3809 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3810 scalar instructions. The upper 32 bits are only available to the
3813 POWER and PowerPC GPRs hold 32 bits worth;
3814 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3817 rs6000_hard_regno_nregs (int regno
, enum machine_mode mode
)
3819 if (FP_REGNO_P (regno
))
3820 return (GET_MODE_SIZE (mode
) + UNITS_PER_FP_WORD
- 1) / UNITS_PER_FP_WORD
;
3822 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
3823 return (GET_MODE_SIZE (mode
) + UNITS_PER_SPE_WORD
- 1) / UNITS_PER_SPE_WORD
;
3825 if (ALTIVEC_REGNO_P (regno
))
3827 (GET_MODE_SIZE (mode
) + UNITS_PER_ALTIVEC_WORD
- 1) / UNITS_PER_ALTIVEC_WORD
;
3829 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3832 /* Change register usage conditional on target flags. */
3834 rs6000_conditional_register_usage (void)
3838 /* Set MQ register fixed (already call_used) if not POWER
3839 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3844 /* 64-bit AIX reserves GPR13 for thread-private data. */
3846 fixed_regs
[13] = call_used_regs
[13]
3847 = call_really_used_regs
[13] = 1;
3849 /* Conditionally disable FPRs. */
3850 if (TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
3851 for (i
= 32; i
< 64; i
++)
3852 fixed_regs
[i
] = call_used_regs
[i
]
3853 = call_really_used_regs
[i
] = 1;
3855 if (DEFAULT_ABI
== ABI_V4
3856 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3858 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3860 if (DEFAULT_ABI
== ABI_V4
3861 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3863 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3864 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3865 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3867 if (DEFAULT_ABI
== ABI_DARWIN
3868 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3869 global_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3870 = fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3871 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3872 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3874 if (TARGET_TOC
&& TARGET_MINIMAL_TOC
)
3875 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3876 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3879 global_regs
[VSCR_REGNO
] = 1;
3883 global_regs
[SPEFSCR_REGNO
] = 1;
3884 fixed_regs
[FIXED_SCRATCH
]
3885 = call_used_regs
[FIXED_SCRATCH
]
3886 = call_really_used_regs
[FIXED_SCRATCH
] = 1;
3889 if (! TARGET_ALTIVEC
)
3891 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
3892 fixed_regs
[i
] = call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3893 call_really_used_regs
[VRSAVE_REGNO
] = 1;
3896 if (TARGET_ALTIVEC_ABI
)
3897 for (i
= FIRST_ALTIVEC_REGNO
; i
< FIRST_ALTIVEC_REGNO
+ 20; ++i
)
3898 call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3901 /* Try to output insns to set TARGET equal to the constant C if it can
3902 be done in less than N insns. Do all computations in MODE.
3903 Returns the place where the output has been placed if it can be
3904 done and the insns have been emitted. If it would take more than N
3905 insns, zero is returned and no insns and emitted. */
3908 rs6000_emit_set_const (rtx dest
, enum machine_mode mode
,
3909 rtx source
, int n ATTRIBUTE_UNUSED
)
3911 rtx result
, insn
, set
;
3912 HOST_WIDE_INT c0
, c1
;
3914 if (mode
== QImode
|| mode
== HImode
)
3917 dest
= gen_reg_rtx (mode
);
3918 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
3921 else if (mode
== SImode
)
3923 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
3925 emit_insn (gen_rtx_SET (VOIDmode
, result
,
3926 GEN_INT (INTVAL (source
)
3927 & (~ (HOST_WIDE_INT
) 0xffff))));
3928 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
3929 gen_rtx_IOR (SImode
, result
,
3930 GEN_INT (INTVAL (source
) & 0xffff))));
3933 else if (mode
== DImode
)
3935 if (GET_CODE (source
) == CONST_INT
)
3937 c0
= INTVAL (source
);
3940 else if (GET_CODE (source
) == CONST_DOUBLE
)
3942 #if HOST_BITS_PER_WIDE_INT >= 64
3943 c0
= CONST_DOUBLE_LOW (source
);
3946 c0
= CONST_DOUBLE_LOW (source
);
3947 c1
= CONST_DOUBLE_HIGH (source
);
3953 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
3958 insn
= get_last_insn ();
3959 set
= single_set (insn
);
3960 if (! CONSTANT_P (SET_SRC (set
)))
3961 set_unique_reg_note (insn
, REG_EQUAL
, source
);
3966 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3967 fall back to a straight forward decomposition. We do this to avoid
3968 exponential run times encountered when looking for longer sequences
3969 with rs6000_emit_set_const. */
3971 rs6000_emit_set_long_const (rtx dest
, HOST_WIDE_INT c1
, HOST_WIDE_INT c2
)
3973 if (!TARGET_POWERPC64
)
3975 rtx operand1
, operand2
;
3977 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
3979 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
3981 emit_move_insn (operand1
, GEN_INT (c1
));
3982 emit_move_insn (operand2
, GEN_INT (c2
));
3986 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
3989 ud2
= (c1
& 0xffff0000) >> 16;
3990 #if HOST_BITS_PER_WIDE_INT >= 64
3994 ud4
= (c2
& 0xffff0000) >> 16;
3996 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
3997 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
4000 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
4002 emit_move_insn (dest
, GEN_INT (ud1
));
4005 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
4006 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
4009 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
4012 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
4014 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
4016 else if ((ud4
== 0xffff && (ud3
& 0x8000))
4017 || (ud4
== 0 && ! (ud3
& 0x8000)))
4020 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
4023 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
4026 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
4027 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
4029 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
4034 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
4037 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
4040 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
4042 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
4044 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
4045 GEN_INT (ud2
<< 16)));
4047 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
4053 /* Helper for the following. Get rid of [r+r] memory refs
4054 in cases where it won't work (TImode, TFmode). */
4057 rs6000_eliminate_indexed_memrefs (rtx operands
[2])
4059 if (GET_CODE (operands
[0]) == MEM
4060 && GET_CODE (XEXP (operands
[0], 0)) != REG
4061 && ! reload_in_progress
)
4063 = replace_equiv_address (operands
[0],
4064 copy_addr_to_reg (XEXP (operands
[0], 0)));
4066 if (GET_CODE (operands
[1]) == MEM
4067 && GET_CODE (XEXP (operands
[1], 0)) != REG
4068 && ! reload_in_progress
)
4070 = replace_equiv_address (operands
[1],
4071 copy_addr_to_reg (XEXP (operands
[1], 0)));
4074 /* Emit a move from SOURCE to DEST in mode MODE. */
4076 rs6000_emit_move (rtx dest
, rtx source
, enum machine_mode mode
)
4080 operands
[1] = source
;
4082 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4083 if (GET_CODE (operands
[1]) == CONST_DOUBLE
4084 && ! FLOAT_MODE_P (mode
)
4085 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
4087 /* FIXME. This should never happen. */
4088 /* Since it seems that it does, do the safe thing and convert
4090 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
4092 if (GET_CODE (operands
[1]) == CONST_DOUBLE
4093 && ! FLOAT_MODE_P (mode
)
4094 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
4095 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
4096 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
4097 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
4100 /* Check if GCC is setting up a block move that will end up using FP
4101 registers as temporaries. We must make sure this is acceptable. */
4102 if (GET_CODE (operands
[0]) == MEM
4103 && GET_CODE (operands
[1]) == MEM
4105 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
4106 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
4107 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
4108 ? 32 : MEM_ALIGN (operands
[0])))
4109 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
4111 : MEM_ALIGN (operands
[1]))))
4112 && ! MEM_VOLATILE_P (operands
[0])
4113 && ! MEM_VOLATILE_P (operands
[1]))
4115 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
4116 adjust_address (operands
[1], SImode
, 0));
4117 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
4118 adjust_address (operands
[1], SImode
, 4));
4122 if (!no_new_pseudos
)
4124 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
4125 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
4126 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
4128 rtx reg
= gen_reg_rtx (word_mode
);
4130 emit_insn (gen_rtx_SET (word_mode
, reg
,
4131 gen_rtx_ZERO_EXTEND (word_mode
,
4133 operands
[1] = gen_lowpart (mode
, reg
);
4135 if (GET_CODE (operands
[0]) != REG
)
4136 operands
[1] = force_reg (mode
, operands
[1]);
4139 if (mode
== SFmode
&& ! TARGET_POWERPC
4140 && TARGET_HARD_FLOAT
&& TARGET_FPRS
4141 && GET_CODE (operands
[0]) == MEM
)
4145 if (reload_in_progress
|| reload_completed
)
4146 regnum
= true_regnum (operands
[1]);
4147 else if (GET_CODE (operands
[1]) == REG
)
4148 regnum
= REGNO (operands
[1]);
4152 /* If operands[1] is a register, on POWER it may have
4153 double-precision data in it, so truncate it to single
4155 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
4158 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
4159 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
4160 operands
[1] = newreg
;
4164 /* Recognize the case where operand[1] is a reference to thread-local
4165 data and load its address to a register. */
4166 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
4168 enum tls_model model
= SYMBOL_REF_TLS_MODEL (operands
[1]);
4170 operands
[1] = rs6000_legitimize_tls_address (operands
[1], model
);
4173 /* Handle the case where reload calls us with an invalid address. */
4174 if (reload_in_progress
&& mode
== Pmode
4175 && (! general_operand (operands
[1], mode
)
4176 || ! nonimmediate_operand (operands
[0], mode
)))
4179 /* 128-bit constant floating-point values on Darwin should really be
4180 loaded as two parts. */
4181 if ((DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
)
4182 && TARGET_HARD_FLOAT
&& TARGET_FPRS
&& TARGET_LONG_DOUBLE_128
4183 && mode
== TFmode
&& GET_CODE (operands
[1]) == CONST_DOUBLE
)
4185 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4186 know how to get a DFmode SUBREG of a TFmode. */
4187 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
, 0),
4188 simplify_gen_subreg (DImode
, operands
[1], mode
, 0),
4190 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
,
4191 GET_MODE_SIZE (DImode
)),
4192 simplify_gen_subreg (DImode
, operands
[1], mode
,
4193 GET_MODE_SIZE (DImode
)),
4198 /* FIXME: In the long term, this switch statement should go away
4199 and be replaced by a sequence of tests based on things like
4205 if (CONSTANT_P (operands
[1])
4206 && GET_CODE (operands
[1]) != CONST_INT
)
4207 operands
[1] = force_const_mem (mode
, operands
[1]);
4211 rs6000_eliminate_indexed_memrefs (operands
);
4216 if (CONSTANT_P (operands
[1])
4217 && ! easy_fp_constant (operands
[1], mode
))
4218 operands
[1] = force_const_mem (mode
, operands
[1]);
4229 if (CONSTANT_P (operands
[1])
4230 && !easy_vector_constant (operands
[1], mode
))
4231 operands
[1] = force_const_mem (mode
, operands
[1]);
4236 /* Use default pattern for address of ELF small data */
4239 && DEFAULT_ABI
== ABI_V4
4240 && (GET_CODE (operands
[1]) == SYMBOL_REF
4241 || GET_CODE (operands
[1]) == CONST
)
4242 && small_data_operand (operands
[1], mode
))
4244 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4248 if (DEFAULT_ABI
== ABI_V4
4249 && mode
== Pmode
&& mode
== SImode
4250 && flag_pic
== 1 && got_operand (operands
[1], mode
))
4252 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
4256 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
4260 && CONSTANT_P (operands
[1])
4261 && GET_CODE (operands
[1]) != HIGH
4262 && GET_CODE (operands
[1]) != CONST_INT
)
4264 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
4266 /* If this is a function address on -mcall-aixdesc,
4267 convert it to the address of the descriptor. */
4268 if (DEFAULT_ABI
== ABI_AIX
4269 && GET_CODE (operands
[1]) == SYMBOL_REF
4270 && XSTR (operands
[1], 0)[0] == '.')
4272 const char *name
= XSTR (operands
[1], 0);
4274 while (*name
== '.')
4276 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
4277 CONSTANT_POOL_ADDRESS_P (new_ref
)
4278 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
4279 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
4280 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
4281 SYMBOL_REF_DECL (new_ref
) = SYMBOL_REF_DECL (operands
[1]);
4282 operands
[1] = new_ref
;
4285 if (DEFAULT_ABI
== ABI_DARWIN
)
4288 if (MACHO_DYNAMIC_NO_PIC_P
)
4290 /* Take care of any required data indirection. */
4291 operands
[1] = rs6000_machopic_legitimize_pic_address (
4292 operands
[1], mode
, operands
[0]);
4293 if (operands
[0] != operands
[1])
4294 emit_insn (gen_rtx_SET (VOIDmode
,
4295 operands
[0], operands
[1]));
4299 emit_insn (gen_macho_high (target
, operands
[1]));
4300 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
4304 emit_insn (gen_elf_high (target
, operands
[1]));
4305 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
4309 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4310 and we have put it in the TOC, we just need to make a TOC-relative
4313 && GET_CODE (operands
[1]) == SYMBOL_REF
4314 && constant_pool_expr_p (operands
[1])
4315 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
4316 get_pool_mode (operands
[1])))
4318 operands
[1] = create_TOC_reference (operands
[1]);
4320 else if (mode
== Pmode
4321 && CONSTANT_P (operands
[1])
4322 && ((GET_CODE (operands
[1]) != CONST_INT
4323 && ! easy_fp_constant (operands
[1], mode
))
4324 || (GET_CODE (operands
[1]) == CONST_INT
4325 && num_insns_constant (operands
[1], mode
) > 2)
4326 || (GET_CODE (operands
[0]) == REG
4327 && FP_REGNO_P (REGNO (operands
[0]))))
4328 && GET_CODE (operands
[1]) != HIGH
4329 && ! legitimate_constant_pool_address_p (operands
[1])
4330 && ! toc_relative_expr_p (operands
[1]))
4332 /* Emit a USE operation so that the constant isn't deleted if
4333 expensive optimizations are turned on because nobody
4334 references it. This should only be done for operands that
4335 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4336 This should not be done for operands that contain LABEL_REFs.
4337 For now, we just handle the obvious case. */
4338 if (GET_CODE (operands
[1]) != LABEL_REF
)
4339 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
4342 /* Darwin uses a special PIC legitimizer. */
4343 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
4346 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
4348 if (operands
[0] != operands
[1])
4349 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4354 /* If we are to limit the number of things we put in the TOC and
4355 this is a symbol plus a constant we can add in one insn,
4356 just put the symbol in the TOC and add the constant. Don't do
4357 this if reload is in progress. */
4358 if (GET_CODE (operands
[1]) == CONST
4359 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
4360 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
4361 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
4362 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
4363 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
4364 && ! side_effects_p (operands
[0]))
4367 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
4368 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
4370 sym
= force_reg (mode
, sym
);
4372 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
4374 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
4378 operands
[1] = force_const_mem (mode
, operands
[1]);
4381 && constant_pool_expr_p (XEXP (operands
[1], 0))
4382 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4383 get_pool_constant (XEXP (operands
[1], 0)),
4384 get_pool_mode (XEXP (operands
[1], 0))))
4387 = gen_rtx_MEM (mode
,
4388 create_TOC_reference (XEXP (operands
[1], 0)));
4389 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
4390 RTX_UNCHANGING_P (operands
[1]) = 1;
4396 rs6000_eliminate_indexed_memrefs (operands
);
4400 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
4402 gen_rtx_SET (VOIDmode
,
4403 operands
[0], operands
[1]),
4404 gen_rtx_CLOBBER (VOIDmode
,
4405 gen_rtx_SCRATCH (SImode
)))));
4414 /* Above, we may have called force_const_mem which may have returned
4415 an invalid address. If we can, fix this up; otherwise, reload will
4416 have to deal with it. */
4417 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
4418 operands
[1] = validize_mem (operands
[1]);
4421 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4424 /* Nonzero if we can use a floating-point register to pass this arg. */
4425 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4426 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4427 && (CUM)->fregno <= FP_ARG_MAX_REG \
4428 && TARGET_HARD_FLOAT && TARGET_FPRS)
4430 /* Nonzero if we can use an AltiVec register to pass this arg. */
4431 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4432 (ALTIVEC_VECTOR_MODE (MODE) \
4433 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4434 && TARGET_ALTIVEC_ABI \
4437 /* Return a nonzero value to say to return the function value in
4438 memory, just as large structures are always returned. TYPE will be
4439 the data type of the value, and FNTYPE will be the type of the
4440 function doing the returning, or @code{NULL} for libcalls.
4442 The AIX ABI for the RS/6000 specifies that all structures are
4443 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4444 specifies that structures <= 8 bytes are returned in r3/r4, but a
4445 draft put them in memory, and GCC used to implement the draft
4446 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4447 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4448 compatibility can change DRAFT_V4_STRUCT_RET to override the
4449 default, and -m switches get the final word. See
4450 rs6000_override_options for more details.
4452 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4453 long double support is enabled. These values are returned in memory.
4455 int_size_in_bytes returns -1 for variable size objects, which go in
4456 memory always. The cast to unsigned makes -1 > 8. */
4459 rs6000_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
4461 if (AGGREGATE_TYPE_P (type
)
4462 && (TARGET_AIX_STRUCT_RET
4463 || (unsigned HOST_WIDE_INT
) int_size_in_bytes (type
) > 8))
4465 if (DEFAULT_ABI
== ABI_V4
&& TYPE_MODE (type
) == TFmode
)
4470 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4471 for a call to a function whose data type is FNTYPE.
4472 For a library call, FNTYPE is 0.
4474 For incoming args we set the number of arguments in the prototype large
4475 so we never return a PARALLEL. */
4478 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
4479 rtx libname ATTRIBUTE_UNUSED
, int incoming
,
4480 int libcall
, int n_named_args
)
4482 static CUMULATIVE_ARGS zero_cumulative
;
4484 *cum
= zero_cumulative
;
4486 cum
->fregno
= FP_ARG_MIN_REG
;
4487 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
4488 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
4489 cum
->call_cookie
= ((DEFAULT_ABI
== ABI_V4
&& libcall
)
4490 ? CALL_LIBCALL
: CALL_NORMAL
);
4491 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
4492 cum
->stdarg
= fntype
4493 && (TYPE_ARG_TYPES (fntype
) != 0
4494 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4495 != void_type_node
));
4497 cum
->nargs_prototype
= 0;
4498 if (incoming
|| cum
->prototype
)
4499 cum
->nargs_prototype
= n_named_args
;
4501 /* Check for a longcall attribute. */
4503 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
4504 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
4505 cum
->call_cookie
= CALL_LONG
;
4507 if (TARGET_DEBUG_ARG
)
4509 fprintf (stderr
, "\ninit_cumulative_args:");
4512 tree ret_type
= TREE_TYPE (fntype
);
4513 fprintf (stderr
, " ret code = %s,",
4514 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
4517 if (cum
->call_cookie
& CALL_LONG
)
4518 fprintf (stderr
, " longcall,");
4520 fprintf (stderr
, " proto = %d, nargs = %d\n",
4521 cum
->prototype
, cum
->nargs_prototype
);
4526 && TARGET_ALTIVEC_ABI
4527 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype
))))
4529 error ("Cannot return value in vector register because"
4530 " altivec instructions are disabled, use -maltivec"
4531 " to enable them.");
4535 /* Return true if TYPE must be passed on the stack and not in registers. */
4538 rs6000_must_pass_in_stack (enum machine_mode mode
, tree type
)
4540 if (DEFAULT_ABI
== ABI_AIX
|| TARGET_64BIT
)
4541 return must_pass_in_stack_var_size (mode
, type
);
4543 return must_pass_in_stack_var_size_or_pad (mode
, type
);
4546 /* If defined, a C expression which determines whether, and in which
4547 direction, to pad out an argument with extra space. The value
4548 should be of type `enum direction': either `upward' to pad above
4549 the argument, `downward' to pad below, or `none' to inhibit
4552 For the AIX ABI structs are always stored left shifted in their
4556 function_arg_padding (enum machine_mode mode
, tree type
)
4558 #ifndef AGGREGATE_PADDING_FIXED
4559 #define AGGREGATE_PADDING_FIXED 0
4561 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4562 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4565 if (!AGGREGATE_PADDING_FIXED
)
4567 /* GCC used to pass structures of the same size as integer types as
4568 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4569 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4570 passed padded downward, except that -mstrict-align further
4571 muddied the water in that multi-component structures of 2 and 4
4572 bytes in size were passed padded upward.
4574 The following arranges for best compatibility with previous
4575 versions of gcc, but removes the -mstrict-align dependency. */
4576 if (BYTES_BIG_ENDIAN
)
4578 HOST_WIDE_INT size
= 0;
4580 if (mode
== BLKmode
)
4582 if (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
4583 size
= int_size_in_bytes (type
);
4586 size
= GET_MODE_SIZE (mode
);
4588 if (size
== 1 || size
== 2 || size
== 4)
4594 if (AGGREGATES_PAD_UPWARD_ALWAYS
)
4596 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
4600 /* SFmode parameters are not padded. */
4601 if (TARGET_64BIT
&& mode
== SFmode
)
4604 /* Fall back to the default. */
4605 return DEFAULT_FUNCTION_ARG_PADDING (mode
, type
);
4608 /* If defined, a C expression that gives the alignment boundary, in bits,
4609 of an argument with the specified mode and type. If it is not defined,
4610 PARM_BOUNDARY is used for all arguments.
4612 V.4 wants long longs to be double word aligned. */
4615 function_arg_boundary (enum machine_mode mode
, tree type ATTRIBUTE_UNUSED
)
4617 if (DEFAULT_ABI
== ABI_V4
&& GET_MODE_SIZE (mode
) == 8)
4619 else if (SPE_VECTOR_MODE (mode
))
4621 else if (ALTIVEC_VECTOR_MODE (mode
))
4624 return PARM_BOUNDARY
;
4627 /* Compute the size (in words) of a function argument. */
4629 static unsigned long
4630 rs6000_arg_size (enum machine_mode mode
, tree type
)
4634 if (mode
!= BLKmode
)
4635 size
= GET_MODE_SIZE (mode
);
4637 size
= int_size_in_bytes (type
);
4640 return (size
+ 3) >> 2;
4642 return (size
+ 7) >> 3;
4645 /* Update the data in CUM to advance over an argument
4646 of mode MODE and data type TYPE.
4647 (TYPE is null for libcalls where that information may not be available.)
4649 Note that for args passed by reference, function_arg will be called
4650 with MODE and TYPE set to that of the pointer to the arg, not the arg
4654 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4655 tree type
, int named
)
4657 cum
->nargs_prototype
--;
4659 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
4663 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4666 if (!TARGET_ALTIVEC
)
4667 error ("Cannot pass argument in vector register because"
4668 " altivec instructions are disabled, use -maltivec"
4669 " to enable them.");
4671 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4672 even if it is going to be passed in a vector register.
4673 Darwin does the same for variable-argument functions. */
4674 if ((DEFAULT_ABI
== ABI_AIX
&& TARGET_64BIT
)
4675 || (cum
->stdarg
&& DEFAULT_ABI
!= ABI_V4
))
4685 /* Vector parameters must be 16-byte aligned. This places
4686 them at 2 mod 4 in terms of words in 32-bit mode, since
4687 the parameter save area starts at offset 24 from the
4688 stack. In 64-bit mode, they just have to start on an
4689 even word, since the parameter save area is 16-byte
4690 aligned. Space for GPRs is reserved even if the argument
4691 will be passed in memory. */
4693 align
= (2 - cum
->words
) & 3;
4695 align
= cum
->words
& 1;
4696 cum
->words
+= align
+ rs6000_arg_size (mode
, type
);
4698 if (TARGET_DEBUG_ARG
)
4700 fprintf (stderr
, "function_adv: words = %2d, align=%d, ",
4702 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s\n",
4703 cum
->nargs_prototype
, cum
->prototype
,
4704 GET_MODE_NAME (mode
));
4708 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
4710 && cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
4712 else if (DEFAULT_ABI
== ABI_V4
)
4714 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
4715 && (mode
== SFmode
|| mode
== DFmode
))
4717 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
4722 cum
->words
+= cum
->words
& 1;
4723 cum
->words
+= rs6000_arg_size (mode
, type
);
4728 int n_words
= rs6000_arg_size (mode
, type
);
4729 int gregno
= cum
->sysv_gregno
;
4731 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4732 (r7,r8) or (r9,r10). As does any other 2 word item such
4733 as complex int due to a historical mistake. */
4735 gregno
+= (1 - gregno
) & 1;
4737 /* Multi-reg args are not split between registers and stack. */
4738 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
4740 /* Long long and SPE vectors are aligned on the stack.
4741 So are other 2 word items such as complex int due to
4742 a historical mistake. */
4744 cum
->words
+= cum
->words
& 1;
4745 cum
->words
+= n_words
;
4748 /* Note: continuing to accumulate gregno past when we've started
4749 spilling to the stack indicates the fact that we've started
4750 spilling to the stack to expand_builtin_saveregs. */
4751 cum
->sysv_gregno
= gregno
+ n_words
;
4754 if (TARGET_DEBUG_ARG
)
4756 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4757 cum
->words
, cum
->fregno
);
4758 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
4759 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
4760 fprintf (stderr
, "mode = %4s, named = %d\n",
4761 GET_MODE_NAME (mode
), named
);
4766 int n_words
= rs6000_arg_size (mode
, type
);
4767 int align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
4769 /* The simple alignment calculation here works because
4770 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4771 If we ever want to handle alignments larger than 8 bytes for
4772 32-bit or 16 bytes for 64-bit, then we'll need to take into
4773 account the offset to the start of the parm save area. */
4774 align
&= cum
->words
;
4775 cum
->words
+= align
+ n_words
;
4777 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
4778 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4779 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4781 if (TARGET_DEBUG_ARG
)
4783 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4784 cum
->words
, cum
->fregno
);
4785 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
4786 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
4787 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
4792 /* Determine where to put a SIMD argument on the SPE. */
4795 rs6000_spe_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4800 int gregno
= cum
->sysv_gregno
;
4801 int n_words
= rs6000_arg_size (mode
, type
);
4803 /* SPE vectors are put in odd registers. */
4804 if (n_words
== 2 && (gregno
& 1) == 0)
4807 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
4810 enum machine_mode m
= SImode
;
4812 r1
= gen_rtx_REG (m
, gregno
);
4813 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
4814 r2
= gen_rtx_REG (m
, gregno
+ 1);
4815 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
4816 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
4823 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
4824 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
4830 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4833 rs6000_mixed_function_arg (enum machine_mode mode
, tree type
, int align_words
)
4837 rtx rvec
[GP_ARG_NUM_REG
+ 1];
4839 if (align_words
>= GP_ARG_NUM_REG
)
4842 n_units
= rs6000_arg_size (mode
, type
);
4844 /* Optimize the simple case where the arg fits in one gpr, except in
4845 the case of BLKmode due to assign_parms assuming that registers are
4846 BITS_PER_WORD wide. */
4848 || (n_units
== 1 && mode
!= BLKmode
))
4849 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
4852 if (align_words
+ n_units
> GP_ARG_NUM_REG
)
4853 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4854 using a magic NULL_RTX component.
4855 FIXME: This is not strictly correct. Only some of the arg
4856 belongs in memory, not all of it. However, there isn't any way
4857 to do this currently, apart from building rtx descriptions for
4858 the pieces of memory we want stored. Due to bugs in the generic
4859 code we can't use the normal function_arg_partial_nregs scheme
4860 with the PARALLEL arg description we emit here.
4861 In any case, the code to store the whole arg to memory is often
4862 more efficient than code to store pieces, and we know that space
4863 is available in the right place for the whole arg. */
4864 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
4869 rtx r
= gen_rtx_REG (SImode
, GP_ARG_MIN_REG
+ align_words
);
4870 rtx off
= GEN_INT (i
++ * 4);
4871 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
4873 while (++align_words
< GP_ARG_NUM_REG
&& --n_units
!= 0);
4875 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
4878 /* Determine where to put an argument to a function.
4879 Value is zero to push the argument on the stack,
4880 or a hard register in which to store the argument.
4882 MODE is the argument's machine mode.
4883 TYPE is the data type of the argument (as a tree).
4884 This is null for libcalls where that information may
4886 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4887 the preceding args and about the function being called.
4888 NAMED is nonzero if this argument is a named parameter
4889 (otherwise it is an extra parameter matching an ellipsis).
4891 On RS/6000 the first eight words of non-FP are normally in registers
4892 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4893 Under V.4, the first 8 FP args are in registers.
4895 If this is floating-point and no prototype is specified, we use
4896 both an FP and integer register (or possibly FP reg and stack). Library
4897 functions (when CALL_LIBCALL is set) always have the proper types for args,
4898 so we can pass the FP value just in one register. emit_library_function
4899 doesn't support PARALLEL anyway.
4901 Note that for args passed by reference, function_arg will be called
4902 with MODE and TYPE set to that of the pointer to the arg, not the arg
4906 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4907 tree type
, int named
)
4909 enum rs6000_abi abi
= DEFAULT_ABI
;
4911 /* Return a marker to indicate whether CR1 needs to set or clear the
4912 bit that V.4 uses to say fp args were passed in registers.
4913 Assume that we don't need the marker for software floating point,
4914 or compiler generated library calls. */
4915 if (mode
== VOIDmode
)
4918 && cum
->nargs_prototype
< 0
4919 && (cum
->call_cookie
& CALL_LIBCALL
) == 0
4920 && (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
4922 /* For the SPE, we need to crxor CR6 always. */
4924 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
4925 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4926 return GEN_INT (cum
->call_cookie
4927 | ((cum
->fregno
== FP_ARG_MIN_REG
)
4928 ? CALL_V4_SET_FP_ARGS
4929 : CALL_V4_CLEAR_FP_ARGS
));
4932 return GEN_INT (cum
->call_cookie
);
4935 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4936 if (TARGET_64BIT
&& ! cum
->prototype
)
4938 /* Vector parameters get passed in vector register
4939 and also in GPRs or memory, in absence of prototype. */
4942 align_words
= (cum
->words
+ 1) & ~1;
4944 if (align_words
>= GP_ARG_NUM_REG
)
4950 slot
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
4952 return gen_rtx_PARALLEL (mode
,
4954 gen_rtx_EXPR_LIST (VOIDmode
,
4956 gen_rtx_EXPR_LIST (VOIDmode
,
4957 gen_rtx_REG (mode
, cum
->vregno
),
4961 return gen_rtx_REG (mode
, cum
->vregno
);
4962 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
4964 if (named
|| abi
== ABI_V4
)
4968 /* Vector parameters to varargs functions under AIX or Darwin
4969 get passed in memory and possibly also in GPRs. */
4970 int align
, align_words
, n_words
;
4971 enum machine_mode part_mode
;
4973 /* Vector parameters must be 16-byte aligned. This places them at
4974 2 mod 4 in terms of words in 32-bit mode, since the parameter
4975 save area starts at offset 24 from the stack. In 64-bit mode,
4976 they just have to start on an even word, since the parameter
4977 save area is 16-byte aligned. */
4979 align
= (2 - cum
->words
) & 3;
4981 align
= cum
->words
& 1;
4982 align_words
= cum
->words
+ align
;
4984 /* Out of registers? Memory, then. */
4985 if (align_words
>= GP_ARG_NUM_REG
)
4988 if (TARGET_32BIT
&& TARGET_POWERPC64
)
4989 return rs6000_mixed_function_arg (mode
, type
, align_words
);
4991 /* The vector value goes in GPRs. Only the part of the
4992 value in GPRs is reported here. */
4994 n_words
= rs6000_arg_size (mode
, type
);
4995 if (align_words
+ n_words
> GP_ARG_NUM_REG
)
4996 /* Fortunately, there are only two possibilities, the value
4997 is either wholly in GPRs or half in GPRs and half not. */
5000 return gen_rtx_REG (part_mode
, GP_ARG_MIN_REG
+ align_words
);
5003 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
5004 return rs6000_spe_function_arg (cum
, mode
, type
);
5005 else if (abi
== ABI_V4
)
5007 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5008 && (mode
== SFmode
|| mode
== DFmode
))
5010 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
5011 return gen_rtx_REG (mode
, cum
->fregno
);
5017 int n_words
= rs6000_arg_size (mode
, type
);
5018 int gregno
= cum
->sysv_gregno
;
5020 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5021 (r7,r8) or (r9,r10). As does any other 2 word item such
5022 as complex int due to a historical mistake. */
5024 gregno
+= (1 - gregno
) & 1;
5026 /* Multi-reg args are not split between registers and stack. */
5027 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
5030 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5031 return rs6000_mixed_function_arg (mode
, type
,
5032 gregno
- GP_ARG_MIN_REG
);
5033 return gen_rtx_REG (mode
, gregno
);
5038 int align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
5039 int align_words
= cum
->words
+ (cum
->words
& align
);
5041 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
5043 rtx rvec
[GP_ARG_NUM_REG
+ 1];
5047 enum machine_mode fmode
= mode
;
5048 unsigned long n_fpreg
= (GET_MODE_SIZE (mode
) + 7) >> 3;
5050 if (cum
->fregno
+ n_fpreg
> FP_ARG_MAX_REG
+ 1)
5052 /* Currently, we only ever need one reg here because complex
5053 doubles are split. */
5054 if (cum
->fregno
!= FP_ARG_MAX_REG
|| fmode
!= TFmode
)
5057 /* Long double split over regs and memory. */
5061 /* Do we also need to pass this arg in the parameter save
5064 && (cum
->nargs_prototype
<= 0
5065 || (DEFAULT_ABI
== ABI_AIX
5067 && align_words
>= GP_ARG_NUM_REG
)));
5069 if (!needs_psave
&& mode
== fmode
)
5070 return gen_rtx_REG (fmode
, cum
->fregno
);
5075 /* Describe the part that goes in gprs or the stack.
5076 This piece must come first, before the fprs. */
5077 if (align_words
< GP_ARG_NUM_REG
)
5079 unsigned long n_words
= rs6000_arg_size (mode
, type
);
5081 if (align_words
+ n_words
> GP_ARG_NUM_REG
5082 || (TARGET_32BIT
&& TARGET_POWERPC64
))
5084 /* If this is partially on the stack, then we only
5085 include the portion actually in registers here. */
5086 enum machine_mode rmode
= TARGET_32BIT
? SImode
: DImode
;
5090 r
= gen_rtx_REG (rmode
,
5091 GP_ARG_MIN_REG
+ align_words
);
5092 off
= GEN_INT (k
* GET_MODE_SIZE (rmode
));
5093 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
5095 while (++align_words
< GP_ARG_NUM_REG
&& --n_words
!= 0);
5099 /* The whole arg fits in gprs. */
5100 r
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5101 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5105 /* It's entirely in memory. */
5106 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5109 /* Describe where this piece goes in the fprs. */
5110 r
= gen_rtx_REG (fmode
, cum
->fregno
);
5111 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5113 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
5115 else if (align_words
< GP_ARG_NUM_REG
)
5117 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5118 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5120 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5127 /* For an arg passed partly in registers and partly in memory, this is
5128 the number of registers used. For args passed entirely in registers
5129 or entirely in memory, zero. When an arg is described by a PARALLEL,
5130 perhaps using more than one register type, this function returns the
5131 number of registers used by the first element of the PARALLEL. */
5134 function_arg_partial_nregs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5135 tree type
, int named
)
5142 if (DEFAULT_ABI
== ABI_V4
)
5145 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
)
5146 && cum
->nargs_prototype
>= 0)
5149 align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
5150 parm_offset
= TARGET_32BIT
? 2 : 0;
5151 align_words
= cum
->words
+ ((parm_offset
- cum
->words
) & align
);
5153 if (USE_FP_FOR_ARG_P (cum
, mode
, type
)
5154 /* If we are passing this arg in gprs as well, then this function
5155 should return the number of gprs (or memory) partially passed,
5156 *not* the number of fprs. */
5158 && (cum
->nargs_prototype
<= 0
5159 || (DEFAULT_ABI
== ABI_AIX
5161 && align_words
>= GP_ARG_NUM_REG
))))
5163 if (cum
->fregno
+ ((GET_MODE_SIZE (mode
) + 7) >> 3) > FP_ARG_MAX_REG
+ 1)
5164 ret
= FP_ARG_MAX_REG
+ 1 - cum
->fregno
;
5165 else if (cum
->nargs_prototype
>= 0)
5169 if (align_words
< GP_ARG_NUM_REG
5170 && GP_ARG_NUM_REG
< align_words
+ rs6000_arg_size (mode
, type
))
5171 ret
= GP_ARG_NUM_REG
- align_words
;
5173 if (ret
!= 0 && TARGET_DEBUG_ARG
)
5174 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
5179 /* A C expression that indicates when an argument must be passed by
5180 reference. If nonzero for an argument, a copy of that argument is
5181 made in memory and a pointer to the argument is passed instead of
5182 the argument itself. The pointer is passed in whatever way is
5183 appropriate for passing a pointer to that type.
5185 Under V.4, aggregates and long double are passed by reference.
5187 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5188 reference unless the AltiVec vector extension ABI is in force.
5190 As an extension to all ABIs, variable sized types are passed by
5194 rs6000_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
5195 enum machine_mode mode ATTRIBUTE_UNUSED
,
5196 tree type
, bool named ATTRIBUTE_UNUSED
)
5198 if ((DEFAULT_ABI
== ABI_V4
5199 && ((type
&& AGGREGATE_TYPE_P (type
))
5201 || (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
5202 || (type
&& int_size_in_bytes (type
) < 0))
5204 if (TARGET_DEBUG_ARG
)
5205 fprintf (stderr
, "function_arg_pass_by_reference\n");
5213 rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
)
5216 enum machine_mode reg_mode
= TARGET_32BIT
? SImode
: DImode
;
5221 for (i
= 0; i
< nregs
; i
++)
5223 rtx tem
= adjust_address_nv (x
, reg_mode
, i
*GET_MODE_SIZE(reg_mode
));
5224 if (reload_completed
)
5226 if (! strict_memory_address_p (reg_mode
, XEXP (tem
, 0)))
5229 tem
= simplify_gen_subreg (reg_mode
, x
, BLKmode
,
5230 i
* GET_MODE_SIZE(reg_mode
));
5233 tem
= replace_equiv_address (tem
, XEXP (tem
, 0));
5235 if (tem
== NULL_RTX
)
5238 emit_move_insn (tem
, gen_rtx_REG (reg_mode
, regno
+ i
));
5243 /* Perform any needed actions needed for a function that is receiving a
5244 variable number of arguments.
5248 MODE and TYPE are the mode and type of the current parameter.
5250 PRETEND_SIZE is a variable that should be set to the amount of stack
5251 that must be pushed by the prolog to pretend that our caller pushed
5254 Normally, this macro will push all remaining incoming registers on the
5255 stack and set PRETEND_SIZE to the length of the registers pushed. */
5258 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5259 tree type
, int *pretend_size ATTRIBUTE_UNUSED
, int no_rtl
)
5261 CUMULATIVE_ARGS next_cum
;
5262 int reg_size
= TARGET_32BIT
? 4 : 8;
5263 rtx save_area
= NULL_RTX
, mem
;
5264 int first_reg_offset
, set
;
5266 /* Skip the last named argument. */
5268 function_arg_advance (&next_cum
, mode
, type
, 1);
5270 if (DEFAULT_ABI
== ABI_V4
)
5272 /* Indicate to allocate space on the stack for varargs save area. */
5273 cfun
->machine
->sysv_varargs_p
= 1;
5275 save_area
= plus_constant (virtual_stack_vars_rtx
,
5276 - RS6000_VARARGS_SIZE
);
5278 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
5282 first_reg_offset
= next_cum
.words
;
5283 save_area
= virtual_incoming_args_rtx
;
5284 cfun
->machine
->sysv_varargs_p
= 0;
5286 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
5287 first_reg_offset
+= rs6000_arg_size (TYPE_MODE (type
), type
);
5290 set
= get_varargs_alias_set ();
5291 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
5293 mem
= gen_rtx_MEM (BLKmode
,
5294 plus_constant (save_area
,
5295 first_reg_offset
* reg_size
)),
5296 set_mem_alias_set (mem
, set
);
5297 set_mem_align (mem
, BITS_PER_WORD
);
5299 rs6000_move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
5300 GP_ARG_NUM_REG
- first_reg_offset
);
5303 /* Save FP registers if needed. */
5304 if (DEFAULT_ABI
== ABI_V4
5305 && TARGET_HARD_FLOAT
&& TARGET_FPRS
5307 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
5309 int fregno
= next_cum
.fregno
;
5310 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
5311 rtx lab
= gen_label_rtx ();
5312 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
5314 emit_jump_insn (gen_rtx_SET (VOIDmode
,
5316 gen_rtx_IF_THEN_ELSE (VOIDmode
,
5317 gen_rtx_NE (VOIDmode
, cr1
,
5319 gen_rtx_LABEL_REF (VOIDmode
, lab
),
5322 while (fregno
<= FP_ARG_V4_MAX_REG
)
5324 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
5325 set_mem_alias_set (mem
, set
);
5326 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
5335 /* Create the va_list data type. */
5338 rs6000_build_builtin_va_list (void)
5340 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
, record
, type_decl
;
5342 /* For AIX, prefer 'char *' because that's what the system
5343 header files like. */
5344 if (DEFAULT_ABI
!= ABI_V4
)
5345 return build_pointer_type (char_type_node
);
5347 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
5348 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
5350 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
5351 unsigned_char_type_node
);
5352 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
5353 unsigned_char_type_node
);
5354 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5356 f_res
= build_decl (FIELD_DECL
, get_identifier ("reserved"),
5357 short_unsigned_type_node
);
5358 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
5360 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
5363 DECL_FIELD_CONTEXT (f_gpr
) = record
;
5364 DECL_FIELD_CONTEXT (f_fpr
) = record
;
5365 DECL_FIELD_CONTEXT (f_res
) = record
;
5366 DECL_FIELD_CONTEXT (f_ovf
) = record
;
5367 DECL_FIELD_CONTEXT (f_sav
) = record
;
5369 TREE_CHAIN (record
) = type_decl
;
5370 TYPE_NAME (record
) = type_decl
;
5371 TYPE_FIELDS (record
) = f_gpr
;
5372 TREE_CHAIN (f_gpr
) = f_fpr
;
5373 TREE_CHAIN (f_fpr
) = f_res
;
5374 TREE_CHAIN (f_res
) = f_ovf
;
5375 TREE_CHAIN (f_ovf
) = f_sav
;
5377 layout_type (record
);
5379 /* The correct type is an array type of one element. */
5380 return build_array_type (record
, build_index_type (size_zero_node
));
5383 /* Implement va_start. */
5386 rs6000_va_start (tree valist
, rtx nextarg
)
5388 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
5389 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
5390 tree gpr
, fpr
, ovf
, sav
, t
;
5392 /* Only SVR4 needs something special. */
5393 if (DEFAULT_ABI
!= ABI_V4
)
5395 std_expand_builtin_va_start (valist
, nextarg
);
5399 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
5400 f_fpr
= TREE_CHAIN (f_gpr
);
5401 f_res
= TREE_CHAIN (f_fpr
);
5402 f_ovf
= TREE_CHAIN (f_res
);
5403 f_sav
= TREE_CHAIN (f_ovf
);
5405 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
5406 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
5407 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
5408 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
5409 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
5411 /* Count number of gp and fp argument registers used. */
5412 words
= current_function_args_info
.words
;
5413 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
5414 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
5416 if (TARGET_DEBUG_ARG
)
5417 fprintf (stderr
, "va_start: words = "HOST_WIDE_INT_PRINT_DEC
", n_gpr = "
5418 HOST_WIDE_INT_PRINT_DEC
", n_fpr = "HOST_WIDE_INT_PRINT_DEC
"\n",
5419 words
, n_gpr
, n_fpr
);
5421 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
5422 TREE_SIDE_EFFECTS (t
) = 1;
5423 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5425 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
5426 TREE_SIDE_EFFECTS (t
) = 1;
5427 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5429 /* Find the overflow area. */
5430 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
5432 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
5433 build_int_2 (words
* UNITS_PER_WORD
, 0));
5434 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
5435 TREE_SIDE_EFFECTS (t
) = 1;
5436 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5438 /* Find the register save area. */
5439 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
5440 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
5441 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
5442 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
5443 TREE_SIDE_EFFECTS (t
) = 1;
5444 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5447 /* Implement va_arg. */
5450 rs6000_gimplify_va_arg (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
5452 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
5453 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
5454 int size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
5455 tree lab_false
, lab_over
, addr
;
5457 tree ptrtype
= build_pointer_type (type
);
5459 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
5461 t
= rs6000_gimplify_va_arg (valist
, ptrtype
, pre_p
, post_p
);
5462 return build_fold_indirect_ref (t
);
5465 if (DEFAULT_ABI
!= ABI_V4
)
5467 if (targetm
.calls
.split_complex_arg
&& TREE_CODE (type
) == COMPLEX_TYPE
)
5469 tree elem_type
= TREE_TYPE (type
);
5470 enum machine_mode elem_mode
= TYPE_MODE (elem_type
);
5471 int elem_size
= GET_MODE_SIZE (elem_mode
);
5473 if (elem_size
< UNITS_PER_WORD
)
5475 tree real_part
, imag_part
;
5476 tree post
= NULL_TREE
;
5478 real_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
5480 /* Copy the value into a temporary, lest the formal temporary
5481 be reused out from under us. */
5482 real_part
= get_initialized_tmp_var (real_part
, pre_p
, &post
);
5483 append_to_statement_list (post
, pre_p
);
5485 imag_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
5488 return build (COMPLEX_EXPR
, type
, real_part
, imag_part
);
5492 return std_gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
5495 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
5496 f_fpr
= TREE_CHAIN (f_gpr
);
5497 f_res
= TREE_CHAIN (f_fpr
);
5498 f_ovf
= TREE_CHAIN (f_res
);
5499 f_sav
= TREE_CHAIN (f_ovf
);
5501 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
5502 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
5503 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
5504 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
5505 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
5507 size
= int_size_in_bytes (type
);
5508 rsize
= (size
+ 3) / 4;
5511 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5512 && (TYPE_MODE (type
) == SFmode
|| TYPE_MODE (type
) == DFmode
))
5514 /* FP args go in FP registers, if present. */
5519 if (TYPE_MODE (type
) == DFmode
)
5524 /* Otherwise into GP registers. */
5533 /* Pull the value out of the saved registers.... */
5536 addr
= create_tmp_var (ptr_type_node
, "addr");
5537 DECL_POINTER_ALIAS_SET (addr
) = get_varargs_alias_set ();
5539 /* AltiVec vectors never go in registers when -mabi=altivec. */
5540 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
5544 lab_false
= create_artificial_label ();
5545 lab_over
= create_artificial_label ();
5547 /* Long long and SPE vectors are aligned in the registers.
5548 As are any other 2 gpr item such as complex int due to a
5549 historical mistake. */
5553 u
= build2 (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
5554 size_int (n_reg
- 1));
5555 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, u
);
5558 t
= fold_convert (TREE_TYPE (reg
), size_int (8 - n_reg
+ 1));
5559 t
= build2 (GE_EXPR
, boolean_type_node
, u
, t
);
5560 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
5561 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
5562 gimplify_and_add (t
, pre_p
);
5566 t
= build2 (PLUS_EXPR
, ptr_type_node
, sav
, size_int (sav_ofs
));
5568 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, size_int (n_reg
));
5569 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
5570 u
= build2 (MULT_EXPR
, integer_type_node
, u
, size_int (sav_scale
));
5571 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
, u
);
5573 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
5574 gimplify_and_add (t
, pre_p
);
5576 t
= build1 (GOTO_EXPR
, void_type_node
, lab_over
);
5577 gimplify_and_add (t
, pre_p
);
5579 t
= build1 (LABEL_EXPR
, void_type_node
, lab_false
);
5580 append_to_statement_list (t
, pre_p
);
5584 /* Ensure that we don't find any more args in regs.
5585 Alignment has taken care of the n_reg == 2 case. */
5586 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, size_int (8));
5587 gimplify_and_add (t
, pre_p
);
5591 /* ... otherwise out of the overflow area. */
5593 /* Care for on-stack alignment if needed. */
5597 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (align
- 1));
5598 t
= build2 (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
, -1));
5600 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
5602 u
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
5603 gimplify_and_add (u
, pre_p
);
5605 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (size
));
5606 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
5607 gimplify_and_add (t
, pre_p
);
5611 t
= build1 (LABEL_EXPR
, void_type_node
, lab_over
);
5612 append_to_statement_list (t
, pre_p
);
5615 addr
= fold_convert (ptrtype
, addr
);
5616 return build_fold_indirect_ref (addr
);
5621 #define def_builtin(MASK, NAME, TYPE, CODE) \
5623 if ((MASK) & target_flags) \
5624 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5628 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5630 static const struct builtin_description bdesc_3arg
[] =
5632 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
5633 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
5634 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
5635 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
5636 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
5637 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
5638 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
5639 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
5640 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
5641 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
5642 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
5643 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
5644 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
5645 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
5646 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
5647 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
5648 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
5649 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
5650 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
5651 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
5652 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
5653 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
5654 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
5657 /* DST operations: void foo (void *, const int, const char). */
5659 static const struct builtin_description bdesc_dst
[] =
5661 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
5662 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
5663 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
5664 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
5667 /* Simple binary operations: VECc = foo (VECa, VECb). */
5669 static struct builtin_description bdesc_2arg
[] =
5671 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
5672 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
5673 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
5674 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
5675 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
5676 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
5677 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
5678 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
5679 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
5680 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
5681 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
5682 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
5683 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
5684 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
5685 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
5686 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
5687 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
5688 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
5689 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
5690 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
5691 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
5692 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
5693 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
5694 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
5695 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
5696 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
5697 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
5698 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
5699 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
5700 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
5701 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
5702 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
5703 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
5704 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
5705 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
5706 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
5707 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
5708 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
5709 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
5710 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
5711 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
5712 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
5713 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
5714 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
5715 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
5716 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
5717 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
5718 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
5719 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
5720 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
5721 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
5722 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
5723 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
5724 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
5725 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
5726 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
5727 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
5728 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
5729 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
5730 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
5731 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
5732 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
5733 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
5734 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
5735 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
5736 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
5737 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
5738 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
5739 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
5740 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
5741 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
5742 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
5743 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
5744 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
5745 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
5746 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
5747 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
5748 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
5749 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
5750 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
5751 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
5752 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
5753 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
5754 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
5755 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
5756 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
5757 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
5758 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
5759 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
5760 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
5761 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
5762 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
5763 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
5764 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
5765 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
5766 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
5767 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
5768 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
5769 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
5770 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
5771 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
5772 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
5773 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
5774 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
5775 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
5776 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
5777 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
5778 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
5779 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
5780 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
5781 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
5782 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
5783 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
5785 /* Place holder, leave as first spe builtin. */
5786 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
5787 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
5788 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
5789 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
5790 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
5791 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
5792 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
5793 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
5794 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
5795 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
5796 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
5797 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
5798 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
5799 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
5800 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
5801 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
5802 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
5803 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
5804 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
5805 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
5806 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
5807 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
5808 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
5809 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
5810 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
5811 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
5812 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
5813 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
5814 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
5815 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
5816 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
5817 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
5818 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
5819 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
5820 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
5821 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
5822 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
5823 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
5824 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
5825 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
5826 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
5827 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
5828 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
5829 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
5830 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
5831 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
5832 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
5833 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
5834 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
5835 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
5836 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
5837 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
5838 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
5839 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
5840 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
5841 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
5842 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
5843 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
5844 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
5845 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
5846 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
5847 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
5848 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
5849 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
5850 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
5851 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
5852 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
5853 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
5854 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
5855 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
5856 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
5857 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
5858 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
5859 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
5860 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
5861 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
5862 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
5863 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
5864 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
5865 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
5866 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
5867 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
5868 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
5869 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
5870 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
5871 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
5872 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
5873 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
5874 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
5875 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
5876 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
5877 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
5878 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
5879 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
5880 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
5881 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
5882 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
5883 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
5884 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
5885 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
5886 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
5887 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
5888 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
5889 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
5890 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
5891 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
5892 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
5893 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
5894 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
5896 /* SPE binary operations expecting a 5-bit unsigned literal. */
5897 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
5899 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
5900 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
5901 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
5902 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
5903 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
5904 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
5905 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
5906 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
5907 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
5908 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
5909 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
5910 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
5911 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
5912 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
5913 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
5914 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
5915 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
5916 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
5917 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
5918 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
5919 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
5920 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
5921 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
5922 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
5923 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
5924 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
5926 /* Place-holder. Leave as last binary SPE builtin. */
5927 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
5930 /* AltiVec predicates. */
5932 struct builtin_description_predicates
5934 const unsigned int mask
;
5935 const enum insn_code icode
;
5937 const char *const name
;
5938 const enum rs6000_builtins code
;
5941 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
5943 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
5944 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
5945 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
5946 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
5947 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
5948 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
5949 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
5950 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
5951 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
5952 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
5953 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
5954 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
5955 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
5958 /* SPE predicates. */
5959 static struct builtin_description bdesc_spe_predicates
[] =
5961 /* Place-holder. Leave as first. */
5962 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
5963 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
5964 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
5965 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
5966 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
5967 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
5968 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
5969 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
5970 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
5971 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
5972 /* Place-holder. Leave as last. */
5973 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
5976 /* SPE evsel predicates. */
5977 static struct builtin_description bdesc_spe_evsel
[] =
5979 /* Place-holder. Leave as first. */
5980 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
5981 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
5982 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
5983 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
5984 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
5985 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
5986 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
5987 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
5988 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
5989 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
5990 /* Place-holder. Leave as last. */
5991 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
5994 /* ABS* operations. */
5996 static const struct builtin_description bdesc_abs
[] =
5998 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
5999 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
6000 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
6001 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
6002 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
6003 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
6004 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
6007 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6010 static struct builtin_description bdesc_1arg
[] =
6012 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
6013 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
6014 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
6015 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
6016 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
6017 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
6018 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
6019 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
6020 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
6021 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
6022 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
6023 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
6024 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
6025 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
6026 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
6027 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
6028 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
6030 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6031 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6032 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
6033 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
6034 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
6035 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
6036 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
6037 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
6038 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
6039 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
6040 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
6041 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
6042 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
6043 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
6044 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
6045 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
6046 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
6047 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
6048 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
6049 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
6050 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
6051 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
6052 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
6053 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
6054 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
6055 { 0, CODE_FOR_negv2si2
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
6056 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
6057 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
6058 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
6059 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
6061 /* Place-holder. Leave as last unary SPE builtin. */
6062 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
6066 rs6000_expand_unop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6069 tree arg0
= TREE_VALUE (arglist
);
6070 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6071 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6072 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6074 if (icode
== CODE_FOR_nothing
)
6075 /* Builtin not supported on this processor. */
6078 /* If we got invalid arguments bail out before generating bad rtl. */
6079 if (arg0
== error_mark_node
)
6082 if (icode
== CODE_FOR_altivec_vspltisb
6083 || icode
== CODE_FOR_altivec_vspltish
6084 || icode
== CODE_FOR_altivec_vspltisw
6085 || icode
== CODE_FOR_spe_evsplatfi
6086 || icode
== CODE_FOR_spe_evsplati
)
6088 /* Only allow 5-bit *signed* literals. */
6089 if (GET_CODE (op0
) != CONST_INT
6090 || INTVAL (op0
) > 0x1f
6091 || INTVAL (op0
) < -0x1f)
6093 error ("argument 1 must be a 5-bit signed literal");
6099 || GET_MODE (target
) != tmode
6100 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6101 target
= gen_reg_rtx (tmode
);
6103 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6104 op0
= copy_to_mode_reg (mode0
, op0
);
6106 pat
= GEN_FCN (icode
) (target
, op0
);
6115 altivec_expand_abs_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6117 rtx pat
, scratch1
, scratch2
;
6118 tree arg0
= TREE_VALUE (arglist
);
6119 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6120 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6121 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6123 /* If we have invalid arguments, bail out before generating bad rtl. */
6124 if (arg0
== error_mark_node
)
6128 || GET_MODE (target
) != tmode
6129 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6130 target
= gen_reg_rtx (tmode
);
6132 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6133 op0
= copy_to_mode_reg (mode0
, op0
);
6135 scratch1
= gen_reg_rtx (mode0
);
6136 scratch2
= gen_reg_rtx (mode0
);
6138 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
6147 rs6000_expand_binop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6150 tree arg0
= TREE_VALUE (arglist
);
6151 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6152 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6153 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6154 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6155 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6156 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6158 if (icode
== CODE_FOR_nothing
)
6159 /* Builtin not supported on this processor. */
6162 /* If we got invalid arguments bail out before generating bad rtl. */
6163 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6166 if (icode
== CODE_FOR_altivec_vcfux
6167 || icode
== CODE_FOR_altivec_vcfsx
6168 || icode
== CODE_FOR_altivec_vctsxs
6169 || icode
== CODE_FOR_altivec_vctuxs
6170 || icode
== CODE_FOR_altivec_vspltb
6171 || icode
== CODE_FOR_altivec_vsplth
6172 || icode
== CODE_FOR_altivec_vspltw
6173 || icode
== CODE_FOR_spe_evaddiw
6174 || icode
== CODE_FOR_spe_evldd
6175 || icode
== CODE_FOR_spe_evldh
6176 || icode
== CODE_FOR_spe_evldw
6177 || icode
== CODE_FOR_spe_evlhhesplat
6178 || icode
== CODE_FOR_spe_evlhhossplat
6179 || icode
== CODE_FOR_spe_evlhhousplat
6180 || icode
== CODE_FOR_spe_evlwhe
6181 || icode
== CODE_FOR_spe_evlwhos
6182 || icode
== CODE_FOR_spe_evlwhou
6183 || icode
== CODE_FOR_spe_evlwhsplat
6184 || icode
== CODE_FOR_spe_evlwwsplat
6185 || icode
== CODE_FOR_spe_evrlwi
6186 || icode
== CODE_FOR_spe_evslwi
6187 || icode
== CODE_FOR_spe_evsrwis
6188 || icode
== CODE_FOR_spe_evsubifw
6189 || icode
== CODE_FOR_spe_evsrwiu
)
6191 /* Only allow 5-bit unsigned literals. */
6193 if (TREE_CODE (arg1
) != INTEGER_CST
6194 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
6196 error ("argument 2 must be a 5-bit unsigned literal");
6202 || GET_MODE (target
) != tmode
6203 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6204 target
= gen_reg_rtx (tmode
);
6206 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6207 op0
= copy_to_mode_reg (mode0
, op0
);
6208 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6209 op1
= copy_to_mode_reg (mode1
, op1
);
6211 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
6220 altivec_expand_predicate_builtin (enum insn_code icode
, const char *opcode
,
6221 tree arglist
, rtx target
)
6224 tree cr6_form
= TREE_VALUE (arglist
);
6225 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
6226 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6227 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6228 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6229 enum machine_mode tmode
= SImode
;
6230 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6231 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6234 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
6236 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6240 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
6245 /* If we have invalid arguments, bail out before generating bad rtl. */
6246 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6250 || GET_MODE (target
) != tmode
6251 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6252 target
= gen_reg_rtx (tmode
);
6254 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6255 op0
= copy_to_mode_reg (mode0
, op0
);
6256 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6257 op1
= copy_to_mode_reg (mode1
, op1
);
6259 scratch
= gen_reg_rtx (mode0
);
6261 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
6262 gen_rtx_SYMBOL_REF (Pmode
, opcode
));
6267 /* The vec_any* and vec_all* predicates use the same opcodes for two
6268 different operations, but the bits in CR6 will be different
6269 depending on what information we want. So we have to play tricks
6270 with CR6 to get the right bits out.
6272 If you think this is disgusting, look at the specs for the
6273 AltiVec predicates. */
6275 switch (cr6_form_int
)
6278 emit_insn (gen_cr6_test_for_zero (target
));
6281 emit_insn (gen_cr6_test_for_zero_reverse (target
));
6284 emit_insn (gen_cr6_test_for_lt (target
));
6287 emit_insn (gen_cr6_test_for_lt_reverse (target
));
6290 error ("argument 1 of __builtin_altivec_predicate is out of range");
6298 altivec_expand_lv_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6301 tree arg0
= TREE_VALUE (arglist
);
6302 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6303 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6304 enum machine_mode mode0
= Pmode
;
6305 enum machine_mode mode1
= Pmode
;
6306 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6307 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6309 if (icode
== CODE_FOR_nothing
)
6310 /* Builtin not supported on this processor. */
6313 /* If we got invalid arguments bail out before generating bad rtl. */
6314 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6318 || GET_MODE (target
) != tmode
6319 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6320 target
= gen_reg_rtx (tmode
);
6322 op1
= copy_to_mode_reg (mode1
, op1
);
6324 if (op0
== const0_rtx
)
6326 addr
= gen_rtx_MEM (tmode
, op1
);
6330 op0
= copy_to_mode_reg (mode0
, op0
);
6331 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op0
, op1
));
6334 pat
= GEN_FCN (icode
) (target
, addr
);
6344 spe_expand_stv_builtin (enum insn_code icode
, tree arglist
)
6346 tree arg0
= TREE_VALUE (arglist
);
6347 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6348 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6349 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6350 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6351 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6353 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
6354 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
6355 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
6357 /* Invalid arguments. Bail before doing anything stoopid! */
6358 if (arg0
== error_mark_node
6359 || arg1
== error_mark_node
6360 || arg2
== error_mark_node
)
6363 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
6364 op0
= copy_to_mode_reg (mode2
, op0
);
6365 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
6366 op1
= copy_to_mode_reg (mode0
, op1
);
6367 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
6368 op2
= copy_to_mode_reg (mode1
, op2
);
6370 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
6377 altivec_expand_stv_builtin (enum insn_code icode
, tree arglist
)
6379 tree arg0
= TREE_VALUE (arglist
);
6380 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6381 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6382 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6383 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6384 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6386 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6387 enum machine_mode mode1
= Pmode
;
6388 enum machine_mode mode2
= Pmode
;
6390 /* Invalid arguments. Bail before doing anything stoopid! */
6391 if (arg0
== error_mark_node
6392 || arg1
== error_mark_node
6393 || arg2
== error_mark_node
)
6396 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, tmode
))
6397 op0
= copy_to_mode_reg (tmode
, op0
);
6399 op2
= copy_to_mode_reg (mode2
, op2
);
6401 if (op1
== const0_rtx
)
6403 addr
= gen_rtx_MEM (tmode
, op2
);
6407 op1
= copy_to_mode_reg (mode1
, op1
);
6408 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op1
, op2
));
6411 pat
= GEN_FCN (icode
) (addr
, op0
);
6418 rs6000_expand_ternop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6421 tree arg0
= TREE_VALUE (arglist
);
6422 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6423 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6424 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6425 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6426 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6427 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6428 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6429 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6430 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
6432 if (icode
== CODE_FOR_nothing
)
6433 /* Builtin not supported on this processor. */
6436 /* If we got invalid arguments bail out before generating bad rtl. */
6437 if (arg0
== error_mark_node
6438 || arg1
== error_mark_node
6439 || arg2
== error_mark_node
)
6442 if (icode
== CODE_FOR_altivec_vsldoi_4sf
6443 || icode
== CODE_FOR_altivec_vsldoi_4si
6444 || icode
== CODE_FOR_altivec_vsldoi_8hi
6445 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
6447 /* Only allow 4-bit unsigned literals. */
6449 if (TREE_CODE (arg2
) != INTEGER_CST
6450 || TREE_INT_CST_LOW (arg2
) & ~0xf)
6452 error ("argument 3 must be a 4-bit unsigned literal");
6458 || GET_MODE (target
) != tmode
6459 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6460 target
= gen_reg_rtx (tmode
);
6462 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6463 op0
= copy_to_mode_reg (mode0
, op0
);
6464 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6465 op1
= copy_to_mode_reg (mode1
, op1
);
6466 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
6467 op2
= copy_to_mode_reg (mode2
, op2
);
6469 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
6477 /* Expand the lvx builtins. */
6479 altivec_expand_ld_builtin (tree exp
, rtx target
, bool *expandedp
)
6481 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6482 tree arglist
= TREE_OPERAND (exp
, 1);
6483 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6485 enum machine_mode tmode
, mode0
;
6487 enum insn_code icode
;
6491 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
6492 icode
= CODE_FOR_altivec_lvx_16qi
;
6494 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
6495 icode
= CODE_FOR_altivec_lvx_8hi
;
6497 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
6498 icode
= CODE_FOR_altivec_lvx_4si
;
6500 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
6501 icode
= CODE_FOR_altivec_lvx_4sf
;
6510 arg0
= TREE_VALUE (arglist
);
6511 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6512 tmode
= insn_data
[icode
].operand
[0].mode
;
6513 mode0
= insn_data
[icode
].operand
[1].mode
;
6516 || GET_MODE (target
) != tmode
6517 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6518 target
= gen_reg_rtx (tmode
);
6520 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6521 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6523 pat
= GEN_FCN (icode
) (target
, op0
);
6530 /* Expand the stvx builtins. */
6532 altivec_expand_st_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6535 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6536 tree arglist
= TREE_OPERAND (exp
, 1);
6537 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6539 enum machine_mode mode0
, mode1
;
6541 enum insn_code icode
;
6545 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
6546 icode
= CODE_FOR_altivec_stvx_16qi
;
6548 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
6549 icode
= CODE_FOR_altivec_stvx_8hi
;
6551 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
6552 icode
= CODE_FOR_altivec_stvx_4si
;
6554 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
6555 icode
= CODE_FOR_altivec_stvx_4sf
;
6562 arg0
= TREE_VALUE (arglist
);
6563 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6564 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6565 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6566 mode0
= insn_data
[icode
].operand
[0].mode
;
6567 mode1
= insn_data
[icode
].operand
[1].mode
;
6569 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6570 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6571 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
6572 op1
= copy_to_mode_reg (mode1
, op1
);
6574 pat
= GEN_FCN (icode
) (op0
, op1
);
6582 /* Expand the dst builtins. */
6584 altivec_expand_dst_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6587 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6588 tree arglist
= TREE_OPERAND (exp
, 1);
6589 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6590 tree arg0
, arg1
, arg2
;
6591 enum machine_mode mode0
, mode1
, mode2
;
6592 rtx pat
, op0
, op1
, op2
;
6593 struct builtin_description
*d
;
6598 /* Handle DST variants. */
6599 d
= (struct builtin_description
*) bdesc_dst
;
6600 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
6601 if (d
->code
== fcode
)
6603 arg0
= TREE_VALUE (arglist
);
6604 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6605 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6606 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6607 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6608 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6609 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6610 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6611 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6613 /* Invalid arguments, bail out before generating bad rtl. */
6614 if (arg0
== error_mark_node
6615 || arg1
== error_mark_node
6616 || arg2
== error_mark_node
)
6621 if (TREE_CODE (arg2
) != INTEGER_CST
6622 || TREE_INT_CST_LOW (arg2
) & ~0x3)
6624 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
6628 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
6629 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6630 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
6631 op1
= copy_to_mode_reg (mode1
, op1
);
6633 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
6643 /* Expand the builtin in EXP and store the result in TARGET. Store
6644 true in *EXPANDEDP if we found a builtin to expand. */
6646 altivec_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
6648 struct builtin_description
*d
;
6649 struct builtin_description_predicates
*dp
;
6651 enum insn_code icode
;
6652 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6653 tree arglist
= TREE_OPERAND (exp
, 1);
6656 enum machine_mode tmode
, mode0
;
6657 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6659 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
6663 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
6667 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
6675 case ALTIVEC_BUILTIN_STVX
:
6676 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
6677 case ALTIVEC_BUILTIN_STVEBX
:
6678 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
6679 case ALTIVEC_BUILTIN_STVEHX
:
6680 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
6681 case ALTIVEC_BUILTIN_STVEWX
:
6682 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
6683 case ALTIVEC_BUILTIN_STVXL
:
6684 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
6686 case ALTIVEC_BUILTIN_MFVSCR
:
6687 icode
= CODE_FOR_altivec_mfvscr
;
6688 tmode
= insn_data
[icode
].operand
[0].mode
;
6691 || GET_MODE (target
) != tmode
6692 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6693 target
= gen_reg_rtx (tmode
);
6695 pat
= GEN_FCN (icode
) (target
);
6701 case ALTIVEC_BUILTIN_MTVSCR
:
6702 icode
= CODE_FOR_altivec_mtvscr
;
6703 arg0
= TREE_VALUE (arglist
);
6704 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6705 mode0
= insn_data
[icode
].operand
[0].mode
;
6707 /* If we got invalid arguments bail out before generating bad rtl. */
6708 if (arg0
== error_mark_node
)
6711 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6712 op0
= copy_to_mode_reg (mode0
, op0
);
6714 pat
= GEN_FCN (icode
) (op0
);
6719 case ALTIVEC_BUILTIN_DSSALL
:
6720 emit_insn (gen_altivec_dssall ());
6723 case ALTIVEC_BUILTIN_DSS
:
6724 icode
= CODE_FOR_altivec_dss
;
6725 arg0
= TREE_VALUE (arglist
);
6727 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6728 mode0
= insn_data
[icode
].operand
[0].mode
;
6730 /* If we got invalid arguments bail out before generating bad rtl. */
6731 if (arg0
== error_mark_node
)
6734 if (TREE_CODE (arg0
) != INTEGER_CST
6735 || TREE_INT_CST_LOW (arg0
) & ~0x3)
6737 error ("argument to dss must be a 2-bit unsigned literal");
6741 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6742 op0
= copy_to_mode_reg (mode0
, op0
);
6744 emit_insn (gen_altivec_dss (op0
));
6747 case ALTIVEC_BUILTIN_COMPILETIME_ERROR
:
6748 arg0
= TREE_VALUE (arglist
);
6749 while (TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == ADDR_EXPR
)
6750 arg0
= TREE_OPERAND (arg0
, 0);
6751 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6752 TREE_STRING_POINTER (arg0
));
6757 /* Expand abs* operations. */
6758 d
= (struct builtin_description
*) bdesc_abs
;
6759 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
6760 if (d
->code
== fcode
)
6761 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
6763 /* Expand the AltiVec predicates. */
6764 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
6765 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
6766 if (dp
->code
== fcode
)
6767 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
6769 /* LV* are funky. We initialized them differently. */
6772 case ALTIVEC_BUILTIN_LVSL
:
6773 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl
,
6775 case ALTIVEC_BUILTIN_LVSR
:
6776 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr
,
6778 case ALTIVEC_BUILTIN_LVEBX
:
6779 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx
,
6781 case ALTIVEC_BUILTIN_LVEHX
:
6782 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx
,
6784 case ALTIVEC_BUILTIN_LVEWX
:
6785 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx
,
6787 case ALTIVEC_BUILTIN_LVXL
:
6788 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl
,
6790 case ALTIVEC_BUILTIN_LVX
:
6791 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx
,
6802 /* Binops that need to be initialized manually, but can be expanded
6803 automagically by rs6000_expand_binop_builtin. */
6804 static struct builtin_description bdesc_2arg_spe
[] =
6806 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
6807 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
6808 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
6809 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
6810 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
6811 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
6812 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
6813 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
6814 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
6815 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
6816 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
6817 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
6818 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
6819 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
6820 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
6821 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
6822 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
6823 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
6824 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
6825 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
6826 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
6827 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
6830 /* Expand the builtin in EXP and store the result in TARGET. Store
6831 true in *EXPANDEDP if we found a builtin to expand.
6833 This expands the SPE builtins that are not simple unary and binary
6836 spe_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
6838 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6839 tree arglist
= TREE_OPERAND (exp
, 1);
6841 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6842 enum insn_code icode
;
6843 enum machine_mode tmode
, mode0
;
6845 struct builtin_description
*d
;
6850 /* Syntax check for a 5-bit unsigned immediate. */
6853 case SPE_BUILTIN_EVSTDD
:
6854 case SPE_BUILTIN_EVSTDH
:
6855 case SPE_BUILTIN_EVSTDW
:
6856 case SPE_BUILTIN_EVSTWHE
:
6857 case SPE_BUILTIN_EVSTWHO
:
6858 case SPE_BUILTIN_EVSTWWE
:
6859 case SPE_BUILTIN_EVSTWWO
:
6860 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6861 if (TREE_CODE (arg1
) != INTEGER_CST
6862 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
6864 error ("argument 2 must be a 5-bit unsigned literal");
6872 /* The evsplat*i instructions are not quite generic. */
6875 case SPE_BUILTIN_EVSPLATFI
:
6876 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi
,
6878 case SPE_BUILTIN_EVSPLATI
:
6879 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati
,
6885 d
= (struct builtin_description
*) bdesc_2arg_spe
;
6886 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
6887 if (d
->code
== fcode
)
6888 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
6890 d
= (struct builtin_description
*) bdesc_spe_predicates
;
6891 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
6892 if (d
->code
== fcode
)
6893 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
6895 d
= (struct builtin_description
*) bdesc_spe_evsel
;
6896 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
6897 if (d
->code
== fcode
)
6898 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
6902 case SPE_BUILTIN_EVSTDDX
:
6903 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
6904 case SPE_BUILTIN_EVSTDHX
:
6905 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
6906 case SPE_BUILTIN_EVSTDWX
:
6907 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
6908 case SPE_BUILTIN_EVSTWHEX
:
6909 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
6910 case SPE_BUILTIN_EVSTWHOX
:
6911 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
6912 case SPE_BUILTIN_EVSTWWEX
:
6913 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
6914 case SPE_BUILTIN_EVSTWWOX
:
6915 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
6916 case SPE_BUILTIN_EVSTDD
:
6917 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
6918 case SPE_BUILTIN_EVSTDH
:
6919 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
6920 case SPE_BUILTIN_EVSTDW
:
6921 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
6922 case SPE_BUILTIN_EVSTWHE
:
6923 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
6924 case SPE_BUILTIN_EVSTWHO
:
6925 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
6926 case SPE_BUILTIN_EVSTWWE
:
6927 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
6928 case SPE_BUILTIN_EVSTWWO
:
6929 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
6930 case SPE_BUILTIN_MFSPEFSCR
:
6931 icode
= CODE_FOR_spe_mfspefscr
;
6932 tmode
= insn_data
[icode
].operand
[0].mode
;
6935 || GET_MODE (target
) != tmode
6936 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6937 target
= gen_reg_rtx (tmode
);
6939 pat
= GEN_FCN (icode
) (target
);
6944 case SPE_BUILTIN_MTSPEFSCR
:
6945 icode
= CODE_FOR_spe_mtspefscr
;
6946 arg0
= TREE_VALUE (arglist
);
6947 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6948 mode0
= insn_data
[icode
].operand
[0].mode
;
6950 if (arg0
== error_mark_node
)
6953 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6954 op0
= copy_to_mode_reg (mode0
, op0
);
6956 pat
= GEN_FCN (icode
) (op0
);
6969 spe_expand_predicate_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6971 rtx pat
, scratch
, tmp
;
6972 tree form
= TREE_VALUE (arglist
);
6973 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
6974 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6975 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6976 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6977 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6978 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6982 if (TREE_CODE (form
) != INTEGER_CST
)
6984 error ("argument 1 of __builtin_spe_predicate must be a constant");
6988 form_int
= TREE_INT_CST_LOW (form
);
6993 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6997 || GET_MODE (target
) != SImode
6998 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
6999 target
= gen_reg_rtx (SImode
);
7001 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7002 op0
= copy_to_mode_reg (mode0
, op0
);
7003 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7004 op1
= copy_to_mode_reg (mode1
, op1
);
7006 scratch
= gen_reg_rtx (CCmode
);
7008 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
7013 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7014 _lower_. We use one compare, but look in different bits of the
7015 CR for each variant.
7017 There are 2 elements in each SPE simd type (upper/lower). The CR
7018 bits are set as follows:
7020 BIT0 | BIT 1 | BIT 2 | BIT 3
7021 U | L | (U | L) | (U & L)
7023 So, for an "all" relationship, BIT 3 would be set.
7024 For an "any" relationship, BIT 2 would be set. Etc.
7026 Following traditional nomenclature, these bits map to:
7028 BIT0 | BIT 1 | BIT 2 | BIT 3
7031 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7036 /* All variant. OV bit. */
7038 /* We need to get to the OV bit, which is the ORDERED bit. We
7039 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7040 that's ugly and will trigger a validate_condition_mode abort.
7041 So let's just use another pattern. */
7042 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
7044 /* Any variant. EQ bit. */
7048 /* Upper variant. LT bit. */
7052 /* Lower variant. GT bit. */
7057 error ("argument 1 of __builtin_spe_predicate is out of range");
7061 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
7062 emit_move_insn (target
, tmp
);
7067 /* The evsel builtins look like this:
7069 e = __builtin_spe_evsel_OP (a, b, c, d);
7073 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7074 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7078 spe_expand_evsel_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7081 tree arg0
= TREE_VALUE (arglist
);
7082 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7083 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7084 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
7085 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
7086 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
7087 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
7088 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
7089 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7090 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7095 if (arg0
== error_mark_node
|| arg1
== error_mark_node
7096 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
7100 || GET_MODE (target
) != mode0
7101 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
7102 target
= gen_reg_rtx (mode0
);
7104 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7105 op0
= copy_to_mode_reg (mode0
, op0
);
7106 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
7107 op1
= copy_to_mode_reg (mode0
, op1
);
7108 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
7109 op2
= copy_to_mode_reg (mode0
, op2
);
7110 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
7111 op3
= copy_to_mode_reg (mode0
, op3
);
7113 /* Generate the compare. */
7114 scratch
= gen_reg_rtx (CCmode
);
7115 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
7120 if (mode0
== V2SImode
)
7121 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
7123 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
7128 /* Expand an expression EXP that calls a built-in function,
7129 with result going to TARGET if that's convenient
7130 (and in mode MODE if that's convenient).
7131 SUBTARGET may be used as the target for computing one of EXP's operands.
7132 IGNORE is nonzero if the value is to be ignored. */
7135 rs6000_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
7136 enum machine_mode mode ATTRIBUTE_UNUSED
,
7137 int ignore ATTRIBUTE_UNUSED
)
7139 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7140 tree arglist
= TREE_OPERAND (exp
, 1);
7141 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7142 struct builtin_description
*d
;
7149 ret
= altivec_expand_builtin (exp
, target
, &success
);
7156 ret
= spe_expand_builtin (exp
, target
, &success
);
7162 if (TARGET_ALTIVEC
|| TARGET_SPE
)
7164 /* Handle simple unary operations. */
7165 d
= (struct builtin_description
*) bdesc_1arg
;
7166 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
7167 if (d
->code
== fcode
)
7168 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
7170 /* Handle simple binary operations. */
7171 d
= (struct builtin_description
*) bdesc_2arg
;
7172 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
7173 if (d
->code
== fcode
)
7174 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
7176 /* Handle simple ternary operations. */
7177 d
= (struct builtin_description
*) bdesc_3arg
;
7178 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
7179 if (d
->code
== fcode
)
7180 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
7188 build_opaque_vector_type (tree node
, int nunits
)
7190 node
= copy_node (node
);
7191 TYPE_MAIN_VARIANT (node
) = node
;
7192 return build_vector_type (node
, nunits
);
7196 rs6000_init_builtins (void)
7198 V2SI_type_node
= build_vector_type (intSI_type_node
, 2);
7199 V2SF_type_node
= build_vector_type (float_type_node
, 2);
7200 V4HI_type_node
= build_vector_type (intHI_type_node
, 4);
7201 V4SI_type_node
= build_vector_type (intSI_type_node
, 4);
7202 V4SF_type_node
= build_vector_type (float_type_node
, 4);
7203 V8HI_type_node
= build_vector_type (intHI_type_node
, 8);
7204 V16QI_type_node
= build_vector_type (intQI_type_node
, 16);
7206 unsigned_V16QI_type_node
= build_vector_type (unsigned_intQI_type_node
, 16);
7207 unsigned_V8HI_type_node
= build_vector_type (unsigned_intHI_type_node
, 8);
7208 unsigned_V4SI_type_node
= build_vector_type (unsigned_intSI_type_node
, 4);
7210 opaque_V2SF_type_node
= build_opaque_vector_type (float_type_node
, 2);
7211 opaque_V2SI_type_node
= build_opaque_vector_type (intSI_type_node
, 2);
7212 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
7214 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7215 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7216 'vector unsigned short'. */
7218 bool_char_type_node
= copy_node (unsigned_intQI_type_node
);
7219 TYPE_MAIN_VARIANT (bool_char_type_node
) = bool_char_type_node
;
7220 bool_short_type_node
= copy_node (unsigned_intHI_type_node
);
7221 TYPE_MAIN_VARIANT (bool_short_type_node
) = bool_short_type_node
;
7222 bool_int_type_node
= copy_node (unsigned_intSI_type_node
);
7223 TYPE_MAIN_VARIANT (bool_int_type_node
) = bool_int_type_node
;
7224 pixel_type_node
= copy_node (unsigned_intHI_type_node
);
7225 TYPE_MAIN_VARIANT (pixel_type_node
) = pixel_type_node
;
7227 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7228 get_identifier ("__bool char"),
7229 bool_char_type_node
));
7230 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7231 get_identifier ("__bool short"),
7232 bool_short_type_node
));
7233 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7234 get_identifier ("__bool int"),
7235 bool_int_type_node
));
7236 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7237 get_identifier ("__pixel"),
7240 bool_V16QI_type_node
= build_vector_type (bool_char_type_node
, 16);
7241 bool_V8HI_type_node
= build_vector_type (bool_short_type_node
, 8);
7242 bool_V4SI_type_node
= build_vector_type (bool_int_type_node
, 4);
7243 pixel_V8HI_type_node
= build_vector_type (pixel_type_node
, 8);
7245 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7246 get_identifier ("__vector unsigned char"),
7247 unsigned_V16QI_type_node
));
7248 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7249 get_identifier ("__vector signed char"),
7251 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7252 get_identifier ("__vector __bool char"),
7253 bool_V16QI_type_node
));
7255 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7256 get_identifier ("__vector unsigned short"),
7257 unsigned_V8HI_type_node
));
7258 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7259 get_identifier ("__vector signed short"),
7261 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7262 get_identifier ("__vector __bool short"),
7263 bool_V8HI_type_node
));
7265 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7266 get_identifier ("__vector unsigned int"),
7267 unsigned_V4SI_type_node
));
7268 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7269 get_identifier ("__vector signed int"),
7271 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7272 get_identifier ("__vector __bool int"),
7273 bool_V4SI_type_node
));
7275 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7276 get_identifier ("__vector float"),
7278 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7279 get_identifier ("__vector __pixel"),
7280 pixel_V8HI_type_node
));
7283 spe_init_builtins ();
7285 altivec_init_builtins ();
7286 if (TARGET_ALTIVEC
|| TARGET_SPE
)
7287 rs6000_common_init_builtins ();
7290 /* Search through a set of builtins and enable the mask bits.
7291 DESC is an array of builtins.
7292 SIZE is the total number of builtins.
7293 START is the builtin enum at which to start.
7294 END is the builtin enum at which to end. */
7296 enable_mask_for_builtins (struct builtin_description
*desc
, int size
,
7297 enum rs6000_builtins start
,
7298 enum rs6000_builtins end
)
7302 for (i
= 0; i
< size
; ++i
)
7303 if (desc
[i
].code
== start
)
7309 for (; i
< size
; ++i
)
7311 /* Flip all the bits on. */
7312 desc
[i
].mask
= target_flags
;
7313 if (desc
[i
].code
== end
)
7319 spe_init_builtins (void)
7321 tree endlink
= void_list_node
;
7322 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
7323 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
7324 struct builtin_description
*d
;
7327 tree v2si_ftype_4_v2si
7328 = build_function_type
7329 (opaque_V2SI_type_node
,
7330 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7331 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7332 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7333 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7336 tree v2sf_ftype_4_v2sf
7337 = build_function_type
7338 (opaque_V2SF_type_node
,
7339 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7340 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7341 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7342 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7345 tree int_ftype_int_v2si_v2si
7346 = build_function_type
7348 tree_cons (NULL_TREE
, integer_type_node
,
7349 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7350 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7353 tree int_ftype_int_v2sf_v2sf
7354 = build_function_type
7356 tree_cons (NULL_TREE
, integer_type_node
,
7357 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7358 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7361 tree void_ftype_v2si_puint_int
7362 = build_function_type (void_type_node
,
7363 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7364 tree_cons (NULL_TREE
, puint_type_node
,
7365 tree_cons (NULL_TREE
,
7369 tree void_ftype_v2si_puint_char
7370 = build_function_type (void_type_node
,
7371 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7372 tree_cons (NULL_TREE
, puint_type_node
,
7373 tree_cons (NULL_TREE
,
7377 tree void_ftype_v2si_pv2si_int
7378 = build_function_type (void_type_node
,
7379 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7380 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7381 tree_cons (NULL_TREE
,
7385 tree void_ftype_v2si_pv2si_char
7386 = build_function_type (void_type_node
,
7387 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7388 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7389 tree_cons (NULL_TREE
,
7394 = build_function_type (void_type_node
,
7395 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
7398 = build_function_type (integer_type_node
, endlink
);
7400 tree v2si_ftype_pv2si_int
7401 = build_function_type (opaque_V2SI_type_node
,
7402 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7403 tree_cons (NULL_TREE
, integer_type_node
,
7406 tree v2si_ftype_puint_int
7407 = build_function_type (opaque_V2SI_type_node
,
7408 tree_cons (NULL_TREE
, puint_type_node
,
7409 tree_cons (NULL_TREE
, integer_type_node
,
7412 tree v2si_ftype_pushort_int
7413 = build_function_type (opaque_V2SI_type_node
,
7414 tree_cons (NULL_TREE
, pushort_type_node
,
7415 tree_cons (NULL_TREE
, integer_type_node
,
7418 tree v2si_ftype_signed_char
7419 = build_function_type (opaque_V2SI_type_node
,
7420 tree_cons (NULL_TREE
, signed_char_type_node
,
7423 /* The initialization of the simple binary and unary builtins is
7424 done in rs6000_common_init_builtins, but we have to enable the
7425 mask bits here manually because we have run out of `target_flags'
7426 bits. We really need to redesign this mask business. */
7428 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
7429 ARRAY_SIZE (bdesc_2arg
),
7432 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
7433 ARRAY_SIZE (bdesc_1arg
),
7435 SPE_BUILTIN_EVSUBFUSIAAW
);
7436 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
7437 ARRAY_SIZE (bdesc_spe_predicates
),
7438 SPE_BUILTIN_EVCMPEQ
,
7439 SPE_BUILTIN_EVFSTSTLT
);
7440 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
7441 ARRAY_SIZE (bdesc_spe_evsel
),
7442 SPE_BUILTIN_EVSEL_CMPGTS
,
7443 SPE_BUILTIN_EVSEL_FSTSTEQ
);
7445 (*lang_hooks
.decls
.pushdecl
)
7446 (build_decl (TYPE_DECL
, get_identifier ("__ev64_opaque__"),
7447 opaque_V2SI_type_node
));
7449 /* Initialize irregular SPE builtins. */
7451 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
7452 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
7453 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
7454 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
7455 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
7456 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
7457 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
7458 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
7459 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
7460 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
7461 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
7462 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
7463 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
7464 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
7465 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
7466 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
7467 def_builtin (target_flags
, "__builtin_spe_evsplatfi", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATFI
);
7468 def_builtin (target_flags
, "__builtin_spe_evsplati", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATI
);
7471 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
7472 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
7473 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
7474 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
7475 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
7476 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
7477 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
7478 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
7479 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
7480 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
7481 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
7482 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
7483 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
7484 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
7485 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
7486 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
7487 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
7488 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
7489 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
7490 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
7491 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
7492 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
7495 d
= (struct builtin_description
*) bdesc_spe_predicates
;
7496 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
7500 switch (insn_data
[d
->icode
].operand
[1].mode
)
7503 type
= int_ftype_int_v2si_v2si
;
7506 type
= int_ftype_int_v2sf_v2sf
;
7512 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7515 /* Evsel predicates. */
7516 d
= (struct builtin_description
*) bdesc_spe_evsel
;
7517 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
7521 switch (insn_data
[d
->icode
].operand
[1].mode
)
7524 type
= v2si_ftype_4_v2si
;
7527 type
= v2sf_ftype_4_v2sf
;
7533 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7538 altivec_init_builtins (void)
7540 struct builtin_description
*d
;
7541 struct builtin_description_predicates
*dp
;
7543 tree pfloat_type_node
= build_pointer_type (float_type_node
);
7544 tree pint_type_node
= build_pointer_type (integer_type_node
);
7545 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
7546 tree pchar_type_node
= build_pointer_type (char_type_node
);
7548 tree pvoid_type_node
= build_pointer_type (void_type_node
);
7550 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
7551 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
7552 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
7553 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
7555 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
7557 tree int_ftype_int_v4si_v4si
7558 = build_function_type_list (integer_type_node
,
7559 integer_type_node
, V4SI_type_node
,
7560 V4SI_type_node
, NULL_TREE
);
7561 tree v4sf_ftype_pcfloat
7562 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
7563 tree void_ftype_pfloat_v4sf
7564 = build_function_type_list (void_type_node
,
7565 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
7566 tree v4si_ftype_pcint
7567 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
7568 tree void_ftype_pint_v4si
7569 = build_function_type_list (void_type_node
,
7570 pint_type_node
, V4SI_type_node
, NULL_TREE
);
7571 tree v8hi_ftype_pcshort
7572 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
7573 tree void_ftype_pshort_v8hi
7574 = build_function_type_list (void_type_node
,
7575 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
7576 tree v16qi_ftype_pcchar
7577 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
7578 tree void_ftype_pchar_v16qi
7579 = build_function_type_list (void_type_node
,
7580 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
7581 tree void_ftype_v4si
7582 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
7583 tree v8hi_ftype_void
7584 = build_function_type (V8HI_type_node
, void_list_node
);
7585 tree void_ftype_void
7586 = build_function_type (void_type_node
, void_list_node
);
7588 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
7590 tree v16qi_ftype_long_pcvoid
7591 = build_function_type_list (V16QI_type_node
,
7592 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7593 tree v8hi_ftype_long_pcvoid
7594 = build_function_type_list (V8HI_type_node
,
7595 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7596 tree v4si_ftype_long_pcvoid
7597 = build_function_type_list (V4SI_type_node
,
7598 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7600 tree void_ftype_v4si_long_pvoid
7601 = build_function_type_list (void_type_node
,
7602 V4SI_type_node
, long_integer_type_node
,
7603 pvoid_type_node
, NULL_TREE
);
7604 tree void_ftype_v16qi_long_pvoid
7605 = build_function_type_list (void_type_node
,
7606 V16QI_type_node
, long_integer_type_node
,
7607 pvoid_type_node
, NULL_TREE
);
7608 tree void_ftype_v8hi_long_pvoid
7609 = build_function_type_list (void_type_node
,
7610 V8HI_type_node
, long_integer_type_node
,
7611 pvoid_type_node
, NULL_TREE
);
7612 tree int_ftype_int_v8hi_v8hi
7613 = build_function_type_list (integer_type_node
,
7614 integer_type_node
, V8HI_type_node
,
7615 V8HI_type_node
, NULL_TREE
);
7616 tree int_ftype_int_v16qi_v16qi
7617 = build_function_type_list (integer_type_node
,
7618 integer_type_node
, V16QI_type_node
,
7619 V16QI_type_node
, NULL_TREE
);
7620 tree int_ftype_int_v4sf_v4sf
7621 = build_function_type_list (integer_type_node
,
7622 integer_type_node
, V4SF_type_node
,
7623 V4SF_type_node
, NULL_TREE
);
7624 tree v4si_ftype_v4si
7625 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7626 tree v8hi_ftype_v8hi
7627 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7628 tree v16qi_ftype_v16qi
7629 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7630 tree v4sf_ftype_v4sf
7631 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7632 tree void_ftype_pcvoid_int_int
7633 = build_function_type_list (void_type_node
,
7634 pcvoid_type_node
, integer_type_node
,
7635 integer_type_node
, NULL_TREE
);
7636 tree int_ftype_pcchar
7637 = build_function_type_list (integer_type_node
,
7638 pcchar_type_node
, NULL_TREE
);
7640 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
7641 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
7642 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
7643 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
7644 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
7645 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
7646 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
7647 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
7648 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
7649 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
7650 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
7651 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
7652 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
7653 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
7654 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
7655 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
7656 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
7657 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
7658 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
7659 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
7660 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
7661 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
7662 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
7663 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
7664 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
7665 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
7666 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVX
);
7667 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVX
);
7668 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
7669 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVXL
);
7670 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
7671 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
7673 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7674 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_compiletime_error", int_ftype_pcchar
,
7675 ALTIVEC_BUILTIN_COMPILETIME_ERROR
);
7677 /* Add the DST variants. */
7678 d
= (struct builtin_description
*) bdesc_dst
;
7679 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
7680 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_int
, d
->code
);
7682 /* Initialize the predicates. */
7683 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
7684 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
7686 enum machine_mode mode1
;
7689 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
7694 type
= int_ftype_int_v4si_v4si
;
7697 type
= int_ftype_int_v8hi_v8hi
;
7700 type
= int_ftype_int_v16qi_v16qi
;
7703 type
= int_ftype_int_v4sf_v4sf
;
7709 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
7712 /* Initialize the abs* operators. */
7713 d
= (struct builtin_description
*) bdesc_abs
;
7714 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
7716 enum machine_mode mode0
;
7719 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7724 type
= v4si_ftype_v4si
;
7727 type
= v8hi_ftype_v8hi
;
7730 type
= v16qi_ftype_v16qi
;
7733 type
= v4sf_ftype_v4sf
;
7739 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7744 rs6000_common_init_builtins (void)
7746 struct builtin_description
*d
;
7749 tree v4sf_ftype_v4sf_v4sf_v16qi
7750 = build_function_type_list (V4SF_type_node
,
7751 V4SF_type_node
, V4SF_type_node
,
7752 V16QI_type_node
, NULL_TREE
);
7753 tree v4si_ftype_v4si_v4si_v16qi
7754 = build_function_type_list (V4SI_type_node
,
7755 V4SI_type_node
, V4SI_type_node
,
7756 V16QI_type_node
, NULL_TREE
);
7757 tree v8hi_ftype_v8hi_v8hi_v16qi
7758 = build_function_type_list (V8HI_type_node
,
7759 V8HI_type_node
, V8HI_type_node
,
7760 V16QI_type_node
, NULL_TREE
);
7761 tree v16qi_ftype_v16qi_v16qi_v16qi
7762 = build_function_type_list (V16QI_type_node
,
7763 V16QI_type_node
, V16QI_type_node
,
7764 V16QI_type_node
, NULL_TREE
);
7766 = build_function_type_list (V4SI_type_node
, integer_type_node
, NULL_TREE
);
7768 = build_function_type_list (V8HI_type_node
, integer_type_node
, NULL_TREE
);
7769 tree v16qi_ftype_int
7770 = build_function_type_list (V16QI_type_node
, integer_type_node
, NULL_TREE
);
7771 tree v8hi_ftype_v16qi
7772 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
7773 tree v4sf_ftype_v4sf
7774 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7776 tree v2si_ftype_v2si_v2si
7777 = build_function_type_list (opaque_V2SI_type_node
,
7778 opaque_V2SI_type_node
,
7779 opaque_V2SI_type_node
, NULL_TREE
);
7781 tree v2sf_ftype_v2sf_v2sf
7782 = build_function_type_list (opaque_V2SF_type_node
,
7783 opaque_V2SF_type_node
,
7784 opaque_V2SF_type_node
, NULL_TREE
);
7786 tree v2si_ftype_int_int
7787 = build_function_type_list (opaque_V2SI_type_node
,
7788 integer_type_node
, integer_type_node
,
7791 tree v2si_ftype_v2si
7792 = build_function_type_list (opaque_V2SI_type_node
,
7793 opaque_V2SI_type_node
, NULL_TREE
);
7795 tree v2sf_ftype_v2sf
7796 = build_function_type_list (opaque_V2SF_type_node
,
7797 opaque_V2SF_type_node
, NULL_TREE
);
7799 tree v2sf_ftype_v2si
7800 = build_function_type_list (opaque_V2SF_type_node
,
7801 opaque_V2SI_type_node
, NULL_TREE
);
7803 tree v2si_ftype_v2sf
7804 = build_function_type_list (opaque_V2SI_type_node
,
7805 opaque_V2SF_type_node
, NULL_TREE
);
7807 tree v2si_ftype_v2si_char
7808 = build_function_type_list (opaque_V2SI_type_node
,
7809 opaque_V2SI_type_node
,
7810 char_type_node
, NULL_TREE
);
7812 tree v2si_ftype_int_char
7813 = build_function_type_list (opaque_V2SI_type_node
,
7814 integer_type_node
, char_type_node
, NULL_TREE
);
7816 tree v2si_ftype_char
7817 = build_function_type_list (opaque_V2SI_type_node
,
7818 char_type_node
, NULL_TREE
);
7820 tree int_ftype_int_int
7821 = build_function_type_list (integer_type_node
,
7822 integer_type_node
, integer_type_node
,
7825 tree v4si_ftype_v4si_v4si
7826 = build_function_type_list (V4SI_type_node
,
7827 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7828 tree v4sf_ftype_v4si_int
7829 = build_function_type_list (V4SF_type_node
,
7830 V4SI_type_node
, integer_type_node
, NULL_TREE
);
7831 tree v4si_ftype_v4sf_int
7832 = build_function_type_list (V4SI_type_node
,
7833 V4SF_type_node
, integer_type_node
, NULL_TREE
);
7834 tree v4si_ftype_v4si_int
7835 = build_function_type_list (V4SI_type_node
,
7836 V4SI_type_node
, integer_type_node
, NULL_TREE
);
7837 tree v8hi_ftype_v8hi_int
7838 = build_function_type_list (V8HI_type_node
,
7839 V8HI_type_node
, integer_type_node
, NULL_TREE
);
7840 tree v16qi_ftype_v16qi_int
7841 = build_function_type_list (V16QI_type_node
,
7842 V16QI_type_node
, integer_type_node
, NULL_TREE
);
7843 tree v16qi_ftype_v16qi_v16qi_int
7844 = build_function_type_list (V16QI_type_node
,
7845 V16QI_type_node
, V16QI_type_node
,
7846 integer_type_node
, NULL_TREE
);
7847 tree v8hi_ftype_v8hi_v8hi_int
7848 = build_function_type_list (V8HI_type_node
,
7849 V8HI_type_node
, V8HI_type_node
,
7850 integer_type_node
, NULL_TREE
);
7851 tree v4si_ftype_v4si_v4si_int
7852 = build_function_type_list (V4SI_type_node
,
7853 V4SI_type_node
, V4SI_type_node
,
7854 integer_type_node
, NULL_TREE
);
7855 tree v4sf_ftype_v4sf_v4sf_int
7856 = build_function_type_list (V4SF_type_node
,
7857 V4SF_type_node
, V4SF_type_node
,
7858 integer_type_node
, NULL_TREE
);
7859 tree v4sf_ftype_v4sf_v4sf
7860 = build_function_type_list (V4SF_type_node
,
7861 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7862 tree v4sf_ftype_v4sf_v4sf_v4si
7863 = build_function_type_list (V4SF_type_node
,
7864 V4SF_type_node
, V4SF_type_node
,
7865 V4SI_type_node
, NULL_TREE
);
7866 tree v4sf_ftype_v4sf_v4sf_v4sf
7867 = build_function_type_list (V4SF_type_node
,
7868 V4SF_type_node
, V4SF_type_node
,
7869 V4SF_type_node
, NULL_TREE
);
7870 tree v4si_ftype_v4si_v4si_v4si
7871 = build_function_type_list (V4SI_type_node
,
7872 V4SI_type_node
, V4SI_type_node
,
7873 V4SI_type_node
, NULL_TREE
);
7874 tree v8hi_ftype_v8hi_v8hi
7875 = build_function_type_list (V8HI_type_node
,
7876 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7877 tree v8hi_ftype_v8hi_v8hi_v8hi
7878 = build_function_type_list (V8HI_type_node
,
7879 V8HI_type_node
, V8HI_type_node
,
7880 V8HI_type_node
, NULL_TREE
);
7881 tree v4si_ftype_v8hi_v8hi_v4si
7882 = build_function_type_list (V4SI_type_node
,
7883 V8HI_type_node
, V8HI_type_node
,
7884 V4SI_type_node
, NULL_TREE
);
7885 tree v4si_ftype_v16qi_v16qi_v4si
7886 = build_function_type_list (V4SI_type_node
,
7887 V16QI_type_node
, V16QI_type_node
,
7888 V4SI_type_node
, NULL_TREE
);
7889 tree v16qi_ftype_v16qi_v16qi
7890 = build_function_type_list (V16QI_type_node
,
7891 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7892 tree v4si_ftype_v4sf_v4sf
7893 = build_function_type_list (V4SI_type_node
,
7894 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7895 tree v8hi_ftype_v16qi_v16qi
7896 = build_function_type_list (V8HI_type_node
,
7897 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7898 tree v4si_ftype_v8hi_v8hi
7899 = build_function_type_list (V4SI_type_node
,
7900 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7901 tree v8hi_ftype_v4si_v4si
7902 = build_function_type_list (V8HI_type_node
,
7903 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7904 tree v16qi_ftype_v8hi_v8hi
7905 = build_function_type_list (V16QI_type_node
,
7906 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7907 tree v4si_ftype_v16qi_v4si
7908 = build_function_type_list (V4SI_type_node
,
7909 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
7910 tree v4si_ftype_v16qi_v16qi
7911 = build_function_type_list (V4SI_type_node
,
7912 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7913 tree v4si_ftype_v8hi_v4si
7914 = build_function_type_list (V4SI_type_node
,
7915 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
7916 tree v4si_ftype_v8hi
7917 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
7918 tree int_ftype_v4si_v4si
7919 = build_function_type_list (integer_type_node
,
7920 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7921 tree int_ftype_v4sf_v4sf
7922 = build_function_type_list (integer_type_node
,
7923 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7924 tree int_ftype_v16qi_v16qi
7925 = build_function_type_list (integer_type_node
,
7926 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7927 tree int_ftype_v8hi_v8hi
7928 = build_function_type_list (integer_type_node
,
7929 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7931 /* Add the simple ternary operators. */
7932 d
= (struct builtin_description
*) bdesc_3arg
;
7933 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
7936 enum machine_mode mode0
, mode1
, mode2
, mode3
;
7939 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
7942 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7943 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
7944 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
7945 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
7947 /* When all four are of the same mode. */
7948 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
7953 type
= v4si_ftype_v4si_v4si_v4si
;
7956 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
7959 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
7962 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
7968 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
7973 type
= v4si_ftype_v4si_v4si_v16qi
;
7976 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
7979 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
7982 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
7988 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
7989 && mode3
== V4SImode
)
7990 type
= v4si_ftype_v16qi_v16qi_v4si
;
7991 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
7992 && mode3
== V4SImode
)
7993 type
= v4si_ftype_v8hi_v8hi_v4si
;
7994 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
7995 && mode3
== V4SImode
)
7996 type
= v4sf_ftype_v4sf_v4sf_v4si
;
7998 /* vchar, vchar, vchar, 4 bit literal. */
7999 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
8001 type
= v16qi_ftype_v16qi_v16qi_int
;
8003 /* vshort, vshort, vshort, 4 bit literal. */
8004 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
8006 type
= v8hi_ftype_v8hi_v8hi_int
;
8008 /* vint, vint, vint, 4 bit literal. */
8009 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
8011 type
= v4si_ftype_v4si_v4si_int
;
8013 /* vfloat, vfloat, vfloat, 4 bit literal. */
8014 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
8016 type
= v4sf_ftype_v4sf_v4sf_int
;
8021 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8024 /* Add the simple binary operators. */
8025 d
= (struct builtin_description
*) bdesc_2arg
;
8026 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
8028 enum machine_mode mode0
, mode1
, mode2
;
8031 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8034 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8035 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8036 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
8038 /* When all three operands are of the same mode. */
8039 if (mode0
== mode1
&& mode1
== mode2
)
8044 type
= v4sf_ftype_v4sf_v4sf
;
8047 type
= v4si_ftype_v4si_v4si
;
8050 type
= v16qi_ftype_v16qi_v16qi
;
8053 type
= v8hi_ftype_v8hi_v8hi
;
8056 type
= v2si_ftype_v2si_v2si
;
8059 type
= v2sf_ftype_v2sf_v2sf
;
8062 type
= int_ftype_int_int
;
8069 /* A few other combos we really don't want to do manually. */
8071 /* vint, vfloat, vfloat. */
8072 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
8073 type
= v4si_ftype_v4sf_v4sf
;
8075 /* vshort, vchar, vchar. */
8076 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
8077 type
= v8hi_ftype_v16qi_v16qi
;
8079 /* vint, vshort, vshort. */
8080 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
8081 type
= v4si_ftype_v8hi_v8hi
;
8083 /* vshort, vint, vint. */
8084 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
8085 type
= v8hi_ftype_v4si_v4si
;
8087 /* vchar, vshort, vshort. */
8088 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
8089 type
= v16qi_ftype_v8hi_v8hi
;
8091 /* vint, vchar, vint. */
8092 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
8093 type
= v4si_ftype_v16qi_v4si
;
8095 /* vint, vchar, vchar. */
8096 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
8097 type
= v4si_ftype_v16qi_v16qi
;
8099 /* vint, vshort, vint. */
8100 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
8101 type
= v4si_ftype_v8hi_v4si
;
8103 /* vint, vint, 5 bit literal. */
8104 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
8105 type
= v4si_ftype_v4si_int
;
8107 /* vshort, vshort, 5 bit literal. */
8108 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
8109 type
= v8hi_ftype_v8hi_int
;
8111 /* vchar, vchar, 5 bit literal. */
8112 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
8113 type
= v16qi_ftype_v16qi_int
;
8115 /* vfloat, vint, 5 bit literal. */
8116 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
8117 type
= v4sf_ftype_v4si_int
;
8119 /* vint, vfloat, 5 bit literal. */
8120 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
8121 type
= v4si_ftype_v4sf_int
;
8123 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
8124 type
= v2si_ftype_int_int
;
8126 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
8127 type
= v2si_ftype_v2si_char
;
8129 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
8130 type
= v2si_ftype_int_char
;
8133 else if (mode0
== SImode
)
8138 type
= int_ftype_v4si_v4si
;
8141 type
= int_ftype_v4sf_v4sf
;
8144 type
= int_ftype_v16qi_v16qi
;
8147 type
= int_ftype_v8hi_v8hi
;
8157 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8160 /* Add the simple unary operators. */
8161 d
= (struct builtin_description
*) bdesc_1arg
;
8162 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
8164 enum machine_mode mode0
, mode1
;
8167 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8170 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8171 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8173 if (mode0
== V4SImode
&& mode1
== QImode
)
8174 type
= v4si_ftype_int
;
8175 else if (mode0
== V8HImode
&& mode1
== QImode
)
8176 type
= v8hi_ftype_int
;
8177 else if (mode0
== V16QImode
&& mode1
== QImode
)
8178 type
= v16qi_ftype_int
;
8179 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
8180 type
= v4sf_ftype_v4sf
;
8181 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
8182 type
= v8hi_ftype_v16qi
;
8183 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
8184 type
= v4si_ftype_v8hi
;
8185 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
8186 type
= v2si_ftype_v2si
;
8187 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
8188 type
= v2sf_ftype_v2sf
;
8189 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
8190 type
= v2sf_ftype_v2si
;
8191 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
8192 type
= v2si_ftype_v2sf
;
8193 else if (mode0
== V2SImode
&& mode1
== QImode
)
8194 type
= v2si_ftype_char
;
8198 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8203 rs6000_init_libfuncs (void)
8205 if (!TARGET_HARD_FLOAT
)
8208 if (DEFAULT_ABI
!= ABI_V4
)
8210 if (TARGET_XCOFF
&& ! TARGET_POWER2
&& ! TARGET_POWERPC
)
8212 /* AIX library routines for float->int conversion. */
8213 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, "__itrunc");
8214 set_conv_libfunc (ufix_optab
, SImode
, DFmode
, "__uitrunc");
8215 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_qitrunc");
8216 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_quitrunc");
8219 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
8220 set_optab_libfunc (add_optab
, TFmode
, "_xlqadd");
8221 set_optab_libfunc (sub_optab
, TFmode
, "_xlqsub");
8222 set_optab_libfunc (smul_optab
, TFmode
, "_xlqmul");
8223 set_optab_libfunc (sdiv_optab
, TFmode
, "_xlqdiv");
8227 /* 32-bit SVR4 quad floating point routines. */
8229 set_optab_libfunc (add_optab
, TFmode
, "_q_add");
8230 set_optab_libfunc (sub_optab
, TFmode
, "_q_sub");
8231 set_optab_libfunc (neg_optab
, TFmode
, "_q_neg");
8232 set_optab_libfunc (smul_optab
, TFmode
, "_q_mul");
8233 set_optab_libfunc (sdiv_optab
, TFmode
, "_q_div");
8234 if (TARGET_PPC_GPOPT
|| TARGET_POWER2
)
8235 set_optab_libfunc (sqrt_optab
, TFmode
, "_q_sqrt");
8237 set_optab_libfunc (eq_optab
, TFmode
, "_q_feq");
8238 set_optab_libfunc (ne_optab
, TFmode
, "_q_fne");
8239 set_optab_libfunc (gt_optab
, TFmode
, "_q_fgt");
8240 set_optab_libfunc (ge_optab
, TFmode
, "_q_fge");
8241 set_optab_libfunc (lt_optab
, TFmode
, "_q_flt");
8242 set_optab_libfunc (le_optab
, TFmode
, "_q_fle");
8244 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "_q_stoq");
8245 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "_q_dtoq");
8246 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "_q_qtos");
8247 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "_q_qtod");
8248 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_q_qtoi");
8249 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_q_qtou");
8250 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "_q_itoq");
8254 /* Expand a block move operation, and return 1 if successful. Return 0
8255 if we should let the compiler generate normal code.
8257 operands[0] is the destination
8258 operands[1] is the source
8259 operands[2] is the length
8260 operands[3] is the alignment */
8262 #define MAX_MOVE_REG 4
8265 expand_block_move (rtx operands
[])
8267 rtx orig_dest
= operands
[0];
8268 rtx orig_src
= operands
[1];
8269 rtx bytes_rtx
= operands
[2];
8270 rtx align_rtx
= operands
[3];
8271 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
8276 rtx stores
[MAX_MOVE_REG
];
8279 /* If this is not a fixed size move, just call memcpy */
8283 /* If this is not a fixed size alignment, abort */
8284 if (GET_CODE (align_rtx
) != CONST_INT
)
8286 align
= INTVAL (align_rtx
);
8288 /* Anything to move? */
8289 bytes
= INTVAL (bytes_rtx
);
8293 /* store_one_arg depends on expand_block_move to handle at least the size of
8294 reg_parm_stack_space. */
8295 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
8298 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
8301 rtx (*movmemsi
) (rtx
, rtx
, rtx
, rtx
);
8302 rtx (*mov
) (rtx
, rtx
);
8304 enum machine_mode mode
= BLKmode
;
8308 && bytes
> 24 /* move up to 32 bytes at a time */
8316 && ! fixed_regs
[12])
8318 move_bytes
= (bytes
> 32) ? 32 : bytes
;
8319 gen_func
.movmemsi
= gen_movmemsi_8reg
;
8321 else if (TARGET_STRING
8322 && bytes
> 16 /* move up to 24 bytes at a time */
8328 && ! fixed_regs
[10])
8330 move_bytes
= (bytes
> 24) ? 24 : bytes
;
8331 gen_func
.movmemsi
= gen_movmemsi_6reg
;
8333 else if (TARGET_STRING
8334 && bytes
> 8 /* move up to 16 bytes at a time */
8340 move_bytes
= (bytes
> 16) ? 16 : bytes
;
8341 gen_func
.movmemsi
= gen_movmemsi_4reg
;
8343 else if (bytes
>= 8 && TARGET_POWERPC64
8344 /* 64-bit loads and stores require word-aligned
8346 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
8350 gen_func
.mov
= gen_movdi
;
8352 else if (TARGET_STRING
&& bytes
> 4 && !TARGET_POWERPC64
)
8353 { /* move up to 8 bytes at a time */
8354 move_bytes
= (bytes
> 8) ? 8 : bytes
;
8355 gen_func
.movmemsi
= gen_movmemsi_2reg
;
8357 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
8358 { /* move 4 bytes */
8361 gen_func
.mov
= gen_movsi
;
8363 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
8364 { /* move 2 bytes */
8367 gen_func
.mov
= gen_movhi
;
8369 else if (TARGET_STRING
&& bytes
> 1)
8370 { /* move up to 4 bytes at a time */
8371 move_bytes
= (bytes
> 4) ? 4 : bytes
;
8372 gen_func
.movmemsi
= gen_movmemsi_1reg
;
8374 else /* move 1 byte at a time */
8378 gen_func
.mov
= gen_movqi
;
8381 src
= adjust_address (orig_src
, mode
, offset
);
8382 dest
= adjust_address (orig_dest
, mode
, offset
);
8384 if (mode
!= BLKmode
)
8386 rtx tmp_reg
= gen_reg_rtx (mode
);
8388 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
8389 stores
[num_reg
++] = (*gen_func
.mov
) (dest
, tmp_reg
);
8392 if (mode
== BLKmode
|| num_reg
>= MAX_MOVE_REG
|| bytes
== move_bytes
)
8395 for (i
= 0; i
< num_reg
; i
++)
8396 emit_insn (stores
[i
]);
8400 if (mode
== BLKmode
)
8402 /* Move the address into scratch registers. The movmemsi
8403 patterns require zero offset. */
8404 if (!REG_P (XEXP (src
, 0)))
8406 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
8407 src
= replace_equiv_address (src
, src_reg
);
8409 set_mem_size (src
, GEN_INT (move_bytes
));
8411 if (!REG_P (XEXP (dest
, 0)))
8413 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
8414 dest
= replace_equiv_address (dest
, dest_reg
);
8416 set_mem_size (dest
, GEN_INT (move_bytes
));
8418 emit_insn ((*gen_func
.movmemsi
) (dest
, src
,
8419 GEN_INT (move_bytes
& 31),
8428 /* Return 1 if OP is a load multiple operation. It is known to be a
8429 PARALLEL and the first section will be tested. */
8432 load_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8434 int count
= XVECLEN (op
, 0);
8435 unsigned int dest_regno
;
8439 /* Perform a quick check so we don't blow up below. */
8441 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8442 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
8443 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
8446 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
8447 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
8449 for (i
= 1; i
< count
; i
++)
8451 rtx elt
= XVECEXP (op
, 0, i
);
8453 if (GET_CODE (elt
) != SET
8454 || GET_CODE (SET_DEST (elt
)) != REG
8455 || GET_MODE (SET_DEST (elt
)) != SImode
8456 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
8457 || GET_CODE (SET_SRC (elt
)) != MEM
8458 || GET_MODE (SET_SRC (elt
)) != SImode
8459 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
8460 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
8461 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
8462 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
8469 /* Similar, but tests for store multiple. Here, the second vector element
8470 is a CLOBBER. It will be tested later. */
8473 store_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8475 int count
= XVECLEN (op
, 0) - 1;
8476 unsigned int src_regno
;
8480 /* Perform a quick check so we don't blow up below. */
8482 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8483 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
8484 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
8487 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
8488 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
8490 for (i
= 1; i
< count
; i
++)
8492 rtx elt
= XVECEXP (op
, 0, i
+ 1);
8494 if (GET_CODE (elt
) != SET
8495 || GET_CODE (SET_SRC (elt
)) != REG
8496 || GET_MODE (SET_SRC (elt
)) != SImode
8497 || REGNO (SET_SRC (elt
)) != src_regno
+ i
8498 || GET_CODE (SET_DEST (elt
)) != MEM
8499 || GET_MODE (SET_DEST (elt
)) != SImode
8500 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
8501 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
8502 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
8503 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
8510 /* Return a string to perform a load_multiple operation.
8511 operands[0] is the vector.
8512 operands[1] is the source address.
8513 operands[2] is the first destination register. */
8516 rs6000_output_load_multiple (rtx operands
[3])
8518 /* We have to handle the case where the pseudo used to contain the address
8519 is assigned to one of the output registers. */
8521 int words
= XVECLEN (operands
[0], 0);
8524 if (XVECLEN (operands
[0], 0) == 1)
8525 return "{l|lwz} %2,0(%1)";
8527 for (i
= 0; i
< words
; i
++)
8528 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
8529 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
8533 xop
[0] = GEN_INT (4 * (words
-1));
8534 xop
[1] = operands
[1];
8535 xop
[2] = operands
[2];
8536 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
8541 xop
[0] = GEN_INT (4 * (words
-1));
8542 xop
[1] = operands
[1];
8543 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
8544 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
8549 for (j
= 0; j
< words
; j
++)
8552 xop
[0] = GEN_INT (j
* 4);
8553 xop
[1] = operands
[1];
8554 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
8555 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
8557 xop
[0] = GEN_INT (i
* 4);
8558 xop
[1] = operands
[1];
8559 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
8564 return "{lsi|lswi} %2,%1,%N0";
8567 /* Return 1 for a parallel vrsave operation. */
8570 vrsave_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8572 int count
= XVECLEN (op
, 0);
8573 unsigned int dest_regno
, src_regno
;
8577 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8578 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
8579 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
8582 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
8583 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
8585 if (dest_regno
!= VRSAVE_REGNO
8586 && src_regno
!= VRSAVE_REGNO
)
8589 for (i
= 1; i
< count
; i
++)
8591 rtx elt
= XVECEXP (op
, 0, i
);
8593 if (GET_CODE (elt
) != CLOBBER
8594 && GET_CODE (elt
) != SET
)
8601 /* Return 1 for an PARALLEL suitable for mfcr. */
8604 mfcr_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8606 int count
= XVECLEN (op
, 0);
8609 /* Perform a quick check so we don't blow up below. */
8611 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8612 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
8613 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
8616 for (i
= 0; i
< count
; i
++)
8618 rtx exp
= XVECEXP (op
, 0, i
);
8623 src_reg
= XVECEXP (SET_SRC (exp
), 0, 0);
8625 if (GET_CODE (src_reg
) != REG
8626 || GET_MODE (src_reg
) != CCmode
8627 || ! CR_REGNO_P (REGNO (src_reg
)))
8630 if (GET_CODE (exp
) != SET
8631 || GET_CODE (SET_DEST (exp
)) != REG
8632 || GET_MODE (SET_DEST (exp
)) != SImode
8633 || ! INT_REGNO_P (REGNO (SET_DEST (exp
))))
8635 unspec
= SET_SRC (exp
);
8636 maskval
= 1 << (MAX_CR_REGNO
- REGNO (src_reg
));
8638 if (GET_CODE (unspec
) != UNSPEC
8639 || XINT (unspec
, 1) != UNSPEC_MOVESI_FROM_CR
8640 || XVECLEN (unspec
, 0) != 2
8641 || XVECEXP (unspec
, 0, 0) != src_reg
8642 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
8643 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
8649 /* Return 1 for an PARALLEL suitable for mtcrf. */
8652 mtcrf_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8654 int count
= XVECLEN (op
, 0);
8658 /* Perform a quick check so we don't blow up below. */
8660 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8661 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
8662 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
8664 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
8666 if (GET_CODE (src_reg
) != REG
8667 || GET_MODE (src_reg
) != SImode
8668 || ! INT_REGNO_P (REGNO (src_reg
)))
8671 for (i
= 0; i
< count
; i
++)
8673 rtx exp
= XVECEXP (op
, 0, i
);
8677 if (GET_CODE (exp
) != SET
8678 || GET_CODE (SET_DEST (exp
)) != REG
8679 || GET_MODE (SET_DEST (exp
)) != CCmode
8680 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
8682 unspec
= SET_SRC (exp
);
8683 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
8685 if (GET_CODE (unspec
) != UNSPEC
8686 || XINT (unspec
, 1) != UNSPEC_MOVESI_TO_CR
8687 || XVECLEN (unspec
, 0) != 2
8688 || XVECEXP (unspec
, 0, 0) != src_reg
8689 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
8690 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
8696 /* Return 1 for an PARALLEL suitable for lmw. */
8699 lmw_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8701 int count
= XVECLEN (op
, 0);
8702 unsigned int dest_regno
;
8704 unsigned int base_regno
;
8705 HOST_WIDE_INT offset
;
8708 /* Perform a quick check so we don't blow up below. */
8710 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8711 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
8712 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
8715 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
8716 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
8719 || count
!= 32 - (int) dest_regno
)
8722 if (legitimate_indirect_address_p (src_addr
, 0))
8725 base_regno
= REGNO (src_addr
);
8726 if (base_regno
== 0)
8729 else if (rs6000_legitimate_offset_address_p (SImode
, src_addr
, 0))
8731 offset
= INTVAL (XEXP (src_addr
, 1));
8732 base_regno
= REGNO (XEXP (src_addr
, 0));
8737 for (i
= 0; i
< count
; i
++)
8739 rtx elt
= XVECEXP (op
, 0, i
);
8742 HOST_WIDE_INT newoffset
;
8744 if (GET_CODE (elt
) != SET
8745 || GET_CODE (SET_DEST (elt
)) != REG
8746 || GET_MODE (SET_DEST (elt
)) != SImode
8747 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
8748 || GET_CODE (SET_SRC (elt
)) != MEM
8749 || GET_MODE (SET_SRC (elt
)) != SImode
)
8751 newaddr
= XEXP (SET_SRC (elt
), 0);
8752 if (legitimate_indirect_address_p (newaddr
, 0))
8757 else if (rs6000_legitimate_offset_address_p (SImode
, newaddr
, 0))
8759 addr_reg
= XEXP (newaddr
, 0);
8760 newoffset
= INTVAL (XEXP (newaddr
, 1));
8764 if (REGNO (addr_reg
) != base_regno
8765 || newoffset
!= offset
+ 4 * i
)
8772 /* Return 1 for an PARALLEL suitable for stmw. */
8775 stmw_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8777 int count
= XVECLEN (op
, 0);
8778 unsigned int src_regno
;
8780 unsigned int base_regno
;
8781 HOST_WIDE_INT offset
;
8784 /* Perform a quick check so we don't blow up below. */
8786 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8787 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
8788 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
8791 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
8792 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
8795 || count
!= 32 - (int) src_regno
)
8798 if (legitimate_indirect_address_p (dest_addr
, 0))
8801 base_regno
= REGNO (dest_addr
);
8802 if (base_regno
== 0)
8805 else if (rs6000_legitimate_offset_address_p (SImode
, dest_addr
, 0))
8807 offset
= INTVAL (XEXP (dest_addr
, 1));
8808 base_regno
= REGNO (XEXP (dest_addr
, 0));
8813 for (i
= 0; i
< count
; i
++)
8815 rtx elt
= XVECEXP (op
, 0, i
);
8818 HOST_WIDE_INT newoffset
;
8820 if (GET_CODE (elt
) != SET
8821 || GET_CODE (SET_SRC (elt
)) != REG
8822 || GET_MODE (SET_SRC (elt
)) != SImode
8823 || REGNO (SET_SRC (elt
)) != src_regno
+ i
8824 || GET_CODE (SET_DEST (elt
)) != MEM
8825 || GET_MODE (SET_DEST (elt
)) != SImode
)
8827 newaddr
= XEXP (SET_DEST (elt
), 0);
8828 if (legitimate_indirect_address_p (newaddr
, 0))
8833 else if (rs6000_legitimate_offset_address_p (SImode
, newaddr
, 0))
8835 addr_reg
= XEXP (newaddr
, 0);
8836 newoffset
= INTVAL (XEXP (newaddr
, 1));
8840 if (REGNO (addr_reg
) != base_regno
8841 || newoffset
!= offset
+ 4 * i
)
8848 /* A validation routine: say whether CODE, a condition code, and MODE
8849 match. The other alternatives either don't make sense or should
8850 never be generated. */
8853 validate_condition_mode (enum rtx_code code
, enum machine_mode mode
)
8855 if ((GET_RTX_CLASS (code
) != RTX_COMPARE
8856 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
8857 || GET_MODE_CLASS (mode
) != MODE_CC
)
8860 /* These don't make sense. */
8861 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
8862 && mode
== CCUNSmode
)
8865 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
8866 && mode
!= CCUNSmode
)
8869 if (mode
!= CCFPmode
8870 && (code
== ORDERED
|| code
== UNORDERED
8871 || code
== UNEQ
|| code
== LTGT
8872 || code
== UNGT
|| code
== UNLT
8873 || code
== UNGE
|| code
== UNLE
))
8876 /* These should never be generated except for
8877 flag_finite_math_only. */
8878 if (mode
== CCFPmode
8879 && ! flag_finite_math_only
8880 && (code
== LE
|| code
== GE
8881 || code
== UNEQ
|| code
== LTGT
8882 || code
== UNGT
|| code
== UNLT
))
8885 /* These are invalid; the information is not there. */
8886 if (mode
== CCEQmode
8887 && code
!= EQ
&& code
!= NE
)
8891 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8892 We only check the opcode against the mode of the CC value here. */
8895 branch_comparison_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8897 enum rtx_code code
= GET_CODE (op
);
8898 enum machine_mode cc_mode
;
8900 if (!COMPARISON_P (op
))
8903 cc_mode
= GET_MODE (XEXP (op
, 0));
8904 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
8907 validate_condition_mode (code
, cc_mode
);
8912 /* Return 1 if OP is a comparison operation that is valid for a branch
8913 insn and which is true if the corresponding bit in the CC register
8917 branch_positive_comparison_operator (rtx op
, enum machine_mode mode
)
8921 if (! branch_comparison_operator (op
, mode
))
8924 code
= GET_CODE (op
);
8925 return (code
== EQ
|| code
== LT
|| code
== GT
8926 || code
== LTU
|| code
== GTU
8927 || code
== UNORDERED
);
8930 /* Return 1 if OP is a comparison operation that is valid for an scc
8931 insn: it must be a positive comparison. */
8934 scc_comparison_operator (rtx op
, enum machine_mode mode
)
8936 return branch_positive_comparison_operator (op
, mode
);
8940 trap_comparison_operator (rtx op
, enum machine_mode mode
)
8942 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
8944 return COMPARISON_P (op
);
8948 boolean_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8950 enum rtx_code code
= GET_CODE (op
);
8951 return (code
== AND
|| code
== IOR
|| code
== XOR
);
8955 boolean_or_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8957 enum rtx_code code
= GET_CODE (op
);
8958 return (code
== IOR
|| code
== XOR
);
8962 min_max_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8964 enum rtx_code code
= GET_CODE (op
);
8965 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
8968 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8969 mask required to convert the result of a rotate insn into a shift
8970 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8973 includes_lshift_p (rtx shiftop
, rtx andop
)
8975 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
8977 shift_mask
<<= INTVAL (shiftop
);
8979 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
8982 /* Similar, but for right shift. */
8985 includes_rshift_p (rtx shiftop
, rtx andop
)
8987 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
8989 shift_mask
>>= INTVAL (shiftop
);
8991 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
8994 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8995 to perform a left shift. It must have exactly SHIFTOP least
8996 significant 0's, then one or more 1's, then zero or more 0's. */
8999 includes_rldic_lshift_p (rtx shiftop
, rtx andop
)
9001 if (GET_CODE (andop
) == CONST_INT
)
9003 HOST_WIDE_INT c
, lsb
, shift_mask
;
9006 if (c
== 0 || c
== ~0)
9010 shift_mask
<<= INTVAL (shiftop
);
9012 /* Find the least significant one bit. */
9015 /* It must coincide with the LSB of the shift mask. */
9016 if (-lsb
!= shift_mask
)
9019 /* Invert to look for the next transition (if any). */
9022 /* Remove the low group of ones (originally low group of zeros). */
9025 /* Again find the lsb, and check we have all 1's above. */
9029 else if (GET_CODE (andop
) == CONST_DOUBLE
9030 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
9032 HOST_WIDE_INT low
, high
, lsb
;
9033 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
9035 low
= CONST_DOUBLE_LOW (andop
);
9036 if (HOST_BITS_PER_WIDE_INT
< 64)
9037 high
= CONST_DOUBLE_HIGH (andop
);
9039 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
9040 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
9043 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9045 shift_mask_high
= ~0;
9046 if (INTVAL (shiftop
) > 32)
9047 shift_mask_high
<<= INTVAL (shiftop
) - 32;
9051 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
9058 return high
== -lsb
;
9061 shift_mask_low
= ~0;
9062 shift_mask_low
<<= INTVAL (shiftop
);
9066 if (-lsb
!= shift_mask_low
)
9069 if (HOST_BITS_PER_WIDE_INT
< 64)
9074 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9077 return high
== -lsb
;
9081 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
9087 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9088 to perform a left shift. It must have SHIFTOP or more least
9089 significant 0's, with the remainder of the word 1's. */
9092 includes_rldicr_lshift_p (rtx shiftop
, rtx andop
)
9094 if (GET_CODE (andop
) == CONST_INT
)
9096 HOST_WIDE_INT c
, lsb
, shift_mask
;
9099 shift_mask
<<= INTVAL (shiftop
);
9102 /* Find the least significant one bit. */
9105 /* It must be covered by the shift mask.
9106 This test also rejects c == 0. */
9107 if ((lsb
& shift_mask
) == 0)
9110 /* Check we have all 1's above the transition, and reject all 1's. */
9111 return c
== -lsb
&& lsb
!= 1;
9113 else if (GET_CODE (andop
) == CONST_DOUBLE
9114 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
9116 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
9118 low
= CONST_DOUBLE_LOW (andop
);
9120 if (HOST_BITS_PER_WIDE_INT
< 64)
9122 HOST_WIDE_INT high
, shift_mask_high
;
9124 high
= CONST_DOUBLE_HIGH (andop
);
9128 shift_mask_high
= ~0;
9129 if (INTVAL (shiftop
) > 32)
9130 shift_mask_high
<<= INTVAL (shiftop
) - 32;
9134 if ((lsb
& shift_mask_high
) == 0)
9137 return high
== -lsb
;
9143 shift_mask_low
= ~0;
9144 shift_mask_low
<<= INTVAL (shiftop
);
9148 if ((lsb
& shift_mask_low
) == 0)
9151 return low
== -lsb
&& lsb
!= 1;
9157 /* Return 1 if operands will generate a valid arguments to rlwimi
9158 instruction for insert with right shift in 64-bit mode. The mask may
9159 not start on the first bit or stop on the last bit because wrap-around
9160 effects of instruction do not correspond to semantics of RTL insn. */
9163 insvdi_rshift_rlwimi_p (rtx sizeop
, rtx startop
, rtx shiftop
)
9165 if (INTVAL (startop
) < 64
9166 && INTVAL (startop
) > 32
9167 && (INTVAL (sizeop
) + INTVAL (startop
) < 64)
9168 && (INTVAL (sizeop
) + INTVAL (startop
) > 33)
9169 && (INTVAL (sizeop
) + INTVAL (startop
) + INTVAL (shiftop
) < 96)
9170 && (INTVAL (sizeop
) + INTVAL (startop
) + INTVAL (shiftop
) >= 64)
9171 && (64 - (INTVAL (shiftop
) & 63)) >= INTVAL (sizeop
))
9177 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9178 for lfq and stfq insns iff the registers are hard registers. */
9181 registers_ok_for_quad_peep (rtx reg1
, rtx reg2
)
9183 /* We might have been passed a SUBREG. */
9184 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
9187 /* We might have been passed non floating point registers. */
9188 if (!FP_REGNO_P (REGNO (reg1
))
9189 || !FP_REGNO_P (REGNO (reg2
)))
9192 return (REGNO (reg1
) == REGNO (reg2
) - 1);
9195 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9196 addr1 and addr2 must be in consecutive memory locations
9197 (addr2 == addr1 + 8). */
9200 mems_ok_for_quad_peep (rtx mem1
, rtx mem2
)
9206 /* The mems cannot be volatile. */
9207 if (MEM_VOLATILE_P (mem1
) || MEM_VOLATILE_P (mem2
))
9210 addr1
= XEXP (mem1
, 0);
9211 addr2
= XEXP (mem2
, 0);
9213 /* Extract an offset (if used) from the first addr. */
9214 if (GET_CODE (addr1
) == PLUS
)
9216 /* If not a REG, return zero. */
9217 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
9221 reg1
= REGNO (XEXP (addr1
, 0));
9222 /* The offset must be constant! */
9223 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
9225 offset1
= INTVAL (XEXP (addr1
, 1));
9228 else if (GET_CODE (addr1
) != REG
)
9232 reg1
= REGNO (addr1
);
9233 /* This was a simple (mem (reg)) expression. Offset is 0. */
9237 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9238 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9239 register as addr1. */
9240 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
9242 if (GET_CODE (addr2
) != PLUS
)
9245 if (GET_CODE (XEXP (addr2
, 0)) != REG
9246 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
9249 if (reg1
!= REGNO (XEXP (addr2
, 0)))
9252 /* The offset for the second addr must be 8 more than the first addr. */
9253 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
9256 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9261 /* Return the register class of a scratch register needed to copy IN into
9262 or out of a register in CLASS in MODE. If it can be done directly,
9263 NO_REGS is returned. */
9266 secondary_reload_class (enum reg_class
class,
9267 enum machine_mode mode ATTRIBUTE_UNUSED
,
9272 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
9274 && MACHOPIC_INDIRECT
9278 /* We cannot copy a symbolic operand directly into anything
9279 other than BASE_REGS for TARGET_ELF. So indicate that a
9280 register from BASE_REGS is needed as an intermediate
9283 On Darwin, pic addresses require a load from memory, which
9284 needs a base register. */
9285 if (class != BASE_REGS
9286 && (GET_CODE (in
) == SYMBOL_REF
9287 || GET_CODE (in
) == HIGH
9288 || GET_CODE (in
) == LABEL_REF
9289 || GET_CODE (in
) == CONST
))
9293 if (GET_CODE (in
) == REG
)
9296 if (regno
>= FIRST_PSEUDO_REGISTER
)
9298 regno
= true_regnum (in
);
9299 if (regno
>= FIRST_PSEUDO_REGISTER
)
9303 else if (GET_CODE (in
) == SUBREG
)
9305 regno
= true_regnum (in
);
9306 if (regno
>= FIRST_PSEUDO_REGISTER
)
9312 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9314 if (class == GENERAL_REGS
|| class == BASE_REGS
9315 || (regno
>= 0 && INT_REGNO_P (regno
)))
9318 /* Constants, memory, and FP registers can go into FP registers. */
9319 if ((regno
== -1 || FP_REGNO_P (regno
))
9320 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
9323 /* Memory, and AltiVec registers can go into AltiVec registers. */
9324 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
9325 && class == ALTIVEC_REGS
)
9328 /* We can copy among the CR registers. */
9329 if ((class == CR_REGS
|| class == CR0_REGS
)
9330 && regno
>= 0 && CR_REGNO_P (regno
))
9333 /* Otherwise, we need GENERAL_REGS. */
9334 return GENERAL_REGS
;
9337 /* Given a comparison operation, return the bit number in CCR to test. We
9338 know this is a valid comparison.
9340 SCC_P is 1 if this is for an scc. That means that %D will have been
9341 used instead of %C, so the bits will be in different places.
9343 Return -1 if OP isn't a valid comparison for some reason. */
9346 ccr_bit (rtx op
, int scc_p
)
9348 enum rtx_code code
= GET_CODE (op
);
9349 enum machine_mode cc_mode
;
9354 if (!COMPARISON_P (op
))
9359 if (GET_CODE (reg
) != REG
9360 || ! CR_REGNO_P (REGNO (reg
)))
9363 cc_mode
= GET_MODE (reg
);
9364 cc_regnum
= REGNO (reg
);
9365 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
9367 validate_condition_mode (code
, cc_mode
);
9369 /* When generating a sCOND operation, only positive conditions are
9371 if (scc_p
&& code
!= EQ
&& code
!= GT
&& code
!= LT
&& code
!= UNORDERED
9372 && code
!= GTU
&& code
!= LTU
)
9378 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
9380 return base_bit
+ 2;
9381 case GT
: case GTU
: case UNLE
:
9382 return base_bit
+ 1;
9383 case LT
: case LTU
: case UNGE
:
9385 case ORDERED
: case UNORDERED
:
9386 return base_bit
+ 3;
9389 /* If scc, we will have done a cror to put the bit in the
9390 unordered position. So test that bit. For integer, this is ! LT
9391 unless this is an scc insn. */
9392 return scc_p
? base_bit
+ 3 : base_bit
;
9395 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
9402 /* Return the GOT register. */
9405 rs6000_got_register (rtx value ATTRIBUTE_UNUSED
)
9407 /* The second flow pass currently (June 1999) can't update
9408 regs_ever_live without disturbing other parts of the compiler, so
9409 update it here to make the prolog/epilogue code happy. */
9410 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
9411 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
9413 current_function_uses_pic_offset_table
= 1;
9415 return pic_offset_table_rtx
;
9418 /* Function to init struct machine_function.
9419 This will be called, via a pointer variable,
9420 from push_function_context. */
9422 static struct machine_function
*
9423 rs6000_init_machine_status (void)
9425 return ggc_alloc_cleared (sizeof (machine_function
));
9428 /* These macros test for integers and extract the low-order bits. */
9430 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9431 && GET_MODE (X) == VOIDmode)
9433 #define INT_LOWPART(X) \
9434 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9440 unsigned long val
= INT_LOWPART (op
);
9442 /* If the high bit is zero, the value is the first 1 bit we find
9444 if ((val
& 0x80000000) == 0)
9446 if ((val
& 0xffffffff) == 0)
9450 while (((val
<<= 1) & 0x80000000) == 0)
9455 /* If the high bit is set and the low bit is not, or the mask is all
9456 1's, the value is zero. */
9457 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
9460 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9463 while (((val
>>= 1) & 1) != 0)
9473 unsigned long val
= INT_LOWPART (op
);
9475 /* If the low bit is zero, the value is the first 1 bit we find from
9479 if ((val
& 0xffffffff) == 0)
9483 while (((val
>>= 1) & 1) == 0)
9489 /* If the low bit is set and the high bit is not, or the mask is all
9490 1's, the value is 31. */
9491 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
9494 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9497 while (((val
<<= 1) & 0x80000000) != 0)
9503 /* Locate some local-dynamic symbol still in use by this function
9504 so that we can print its name in some tls_ld pattern. */
9507 rs6000_get_some_local_dynamic_name (void)
9511 if (cfun
->machine
->some_ld_name
)
9512 return cfun
->machine
->some_ld_name
;
9514 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9516 && for_each_rtx (&PATTERN (insn
),
9517 rs6000_get_some_local_dynamic_name_1
, 0))
9518 return cfun
->machine
->some_ld_name
;
9523 /* Helper function for rs6000_get_some_local_dynamic_name. */
9526 rs6000_get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
9530 if (GET_CODE (x
) == SYMBOL_REF
)
9532 const char *str
= XSTR (x
, 0);
9533 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
9535 cfun
->machine
->some_ld_name
= str
;
9543 /* Print an operand. Recognize special options, documented below. */
9546 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9547 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9549 #define SMALL_DATA_RELOC "sda21"
9550 #define SMALL_DATA_REG 0
9554 print_operand (FILE *file
, rtx x
, int code
)
9558 unsigned HOST_WIDE_INT uval
;
9563 /* Write out an instruction after the call which may be replaced
9564 with glue code by the loader. This depends on the AIX version. */
9565 asm_fprintf (file
, RS6000_CALL_GLUE
);
9568 /* %a is output_address. */
9571 /* If X is a constant integer whose low-order 5 bits are zero,
9572 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9573 in the AIX assembler where "sri" with a zero shift count
9574 writes a trash instruction. */
9575 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
9582 /* If constant, low-order 16 bits of constant, unsigned.
9583 Otherwise, write normally. */
9585 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
9587 print_operand (file
, x
, 0);
9591 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9592 for 64-bit mask direction. */
9593 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
9596 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9600 /* X is a CR register. Print the number of the GT bit of the CR. */
9601 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9602 output_operand_lossage ("invalid %%E value");
9604 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 1);
9608 /* Like 'J' but get to the GT bit. */
9609 if (GET_CODE (x
) != REG
)
9612 /* Bit 1 is GT bit. */
9613 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 1;
9615 /* If we want bit 31, write a shift count of zero, not 32. */
9616 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
9620 /* X is a CR register. Print the number of the EQ bit of the CR */
9621 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9622 output_operand_lossage ("invalid %%E value");
9624 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
9628 /* X is a CR register. Print the shift count needed to move it
9629 to the high-order four bits. */
9630 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9631 output_operand_lossage ("invalid %%f value");
9633 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
9637 /* Similar, but print the count for the rotate in the opposite
9639 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9640 output_operand_lossage ("invalid %%F value");
9642 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
9646 /* X is a constant integer. If it is negative, print "m",
9647 otherwise print "z". This is to make an aze or ame insn. */
9648 if (GET_CODE (x
) != CONST_INT
)
9649 output_operand_lossage ("invalid %%G value");
9650 else if (INTVAL (x
) >= 0)
9657 /* If constant, output low-order five bits. Otherwise, write
9660 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
9662 print_operand (file
, x
, 0);
9666 /* If constant, output low-order six bits. Otherwise, write
9669 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
9671 print_operand (file
, x
, 0);
9675 /* Print `i' if this is a constant, else nothing. */
9681 /* Write the bit number in CCR for jump. */
9684 output_operand_lossage ("invalid %%j code");
9686 fprintf (file
, "%d", i
);
9690 /* Similar, but add one for shift count in rlinm for scc and pass
9691 scc flag to `ccr_bit'. */
9694 output_operand_lossage ("invalid %%J code");
9696 /* If we want bit 31, write a shift count of zero, not 32. */
9697 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
9701 /* X must be a constant. Write the 1's complement of the
9704 output_operand_lossage ("invalid %%k value");
9706 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
9710 /* X must be a symbolic constant on ELF. Write an
9711 expression suitable for an 'addi' that adds in the low 16
9713 if (GET_CODE (x
) != CONST
)
9715 print_operand_address (file
, x
);
9720 if (GET_CODE (XEXP (x
, 0)) != PLUS
9721 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
9722 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
9723 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
9724 output_operand_lossage ("invalid %%K value");
9725 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
9727 /* For GNU as, there must be a non-alphanumeric character
9728 between 'l' and the number. The '-' is added by
9729 print_operand() already. */
9730 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
9732 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
9736 /* %l is output_asm_label. */
9739 /* Write second word of DImode or DFmode reference. Works on register
9740 or non-indexed memory only. */
9741 if (GET_CODE (x
) == REG
)
9742 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
9743 else if (GET_CODE (x
) == MEM
)
9745 /* Handle possible auto-increment. Since it is pre-increment and
9746 we have already done it, we can just use an offset of word. */
9747 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
9748 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
9749 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
9752 output_address (XEXP (adjust_address_nv (x
, SImode
,
9756 if (small_data_operand (x
, GET_MODE (x
)))
9757 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
9758 reg_names
[SMALL_DATA_REG
]);
9763 /* MB value for a mask operand. */
9764 if (! mask_operand (x
, SImode
))
9765 output_operand_lossage ("invalid %%m value");
9767 fprintf (file
, "%d", extract_MB (x
));
9771 /* ME value for a mask operand. */
9772 if (! mask_operand (x
, SImode
))
9773 output_operand_lossage ("invalid %%M value");
9775 fprintf (file
, "%d", extract_ME (x
));
9778 /* %n outputs the negative of its operand. */
9781 /* Write the number of elements in the vector times 4. */
9782 if (GET_CODE (x
) != PARALLEL
)
9783 output_operand_lossage ("invalid %%N value");
9785 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
9789 /* Similar, but subtract 1 first. */
9790 if (GET_CODE (x
) != PARALLEL
)
9791 output_operand_lossage ("invalid %%O value");
9793 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
9797 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9799 || INT_LOWPART (x
) < 0
9800 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
9801 output_operand_lossage ("invalid %%p value");
9803 fprintf (file
, "%d", i
);
9807 /* The operand must be an indirect memory reference. The result
9808 is the register name. */
9809 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
9810 || REGNO (XEXP (x
, 0)) >= 32)
9811 output_operand_lossage ("invalid %%P value");
9813 fprintf (file
, "%s", reg_names
[REGNO (XEXP (x
, 0))]);
9817 /* This outputs the logical code corresponding to a boolean
9818 expression. The expression may have one or both operands
9819 negated (if one, only the first one). For condition register
9820 logical operations, it will also treat the negated
9821 CR codes as NOTs, but not handle NOTs of them. */
9823 const char *const *t
= 0;
9825 enum rtx_code code
= GET_CODE (x
);
9826 static const char * const tbl
[3][3] = {
9827 { "and", "andc", "nor" },
9828 { "or", "orc", "nand" },
9829 { "xor", "eqv", "xor" } };
9833 else if (code
== IOR
)
9835 else if (code
== XOR
)
9838 output_operand_lossage ("invalid %%q value");
9840 if (GET_CODE (XEXP (x
, 0)) != NOT
)
9844 if (GET_CODE (XEXP (x
, 1)) == NOT
)
9862 /* X is a CR register. Print the mask for `mtcrf'. */
9863 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9864 output_operand_lossage ("invalid %%R value");
9866 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
9870 /* Low 5 bits of 32 - value */
9872 output_operand_lossage ("invalid %%s value");
9874 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
9878 /* PowerPC64 mask position. All 0's is excluded.
9879 CONST_INT 32-bit mask is considered sign-extended so any
9880 transition must occur within the CONST_INT, not on the boundary. */
9881 if (! mask64_operand (x
, DImode
))
9882 output_operand_lossage ("invalid %%S value");
9884 uval
= INT_LOWPART (x
);
9886 if (uval
& 1) /* Clear Left */
9888 #if HOST_BITS_PER_WIDE_INT > 64
9889 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
9893 else /* Clear Right */
9896 #if HOST_BITS_PER_WIDE_INT > 64
9897 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
9905 fprintf (file
, "%d", i
);
9909 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9910 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
9913 /* Bit 3 is OV bit. */
9914 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
9916 /* If we want bit 31, write a shift count of zero, not 32. */
9917 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
9921 /* Print the symbolic name of a branch target register. */
9922 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
9923 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
9924 output_operand_lossage ("invalid %%T value");
9925 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
9926 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
9928 fputs ("ctr", file
);
9932 /* High-order 16 bits of constant for use in unsigned operand. */
9934 output_operand_lossage ("invalid %%u value");
9936 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
9937 (INT_LOWPART (x
) >> 16) & 0xffff);
9941 /* High-order 16 bits of constant for use in signed operand. */
9943 output_operand_lossage ("invalid %%v value");
9945 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
9946 (INT_LOWPART (x
) >> 16) & 0xffff);
9950 /* Print `u' if this has an auto-increment or auto-decrement. */
9951 if (GET_CODE (x
) == MEM
9952 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
9953 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
9958 /* Print the trap code for this operand. */
9959 switch (GET_CODE (x
))
9962 fputs ("eq", file
); /* 4 */
9965 fputs ("ne", file
); /* 24 */
9968 fputs ("lt", file
); /* 16 */
9971 fputs ("le", file
); /* 20 */
9974 fputs ("gt", file
); /* 8 */
9977 fputs ("ge", file
); /* 12 */
9980 fputs ("llt", file
); /* 2 */
9983 fputs ("lle", file
); /* 6 */
9986 fputs ("lgt", file
); /* 1 */
9989 fputs ("lge", file
); /* 5 */
9997 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10000 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
10001 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
10003 print_operand (file
, x
, 0);
10007 /* MB value for a PowerPC64 rldic operand. */
10008 val
= (GET_CODE (x
) == CONST_INT
10009 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
10014 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
10015 if ((val
<<= 1) < 0)
10018 #if HOST_BITS_PER_WIDE_INT == 32
10019 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
10020 i
+= 32; /* zero-extend high-part was all 0's */
10021 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
10023 val
= CONST_DOUBLE_LOW (x
);
10030 for ( ; i
< 64; i
++)
10031 if ((val
<<= 1) < 0)
10036 fprintf (file
, "%d", i
+ 1);
10040 if (GET_CODE (x
) == MEM
10041 && legitimate_indexed_address_p (XEXP (x
, 0), 0))
10046 /* Like 'L', for third word of TImode */
10047 if (GET_CODE (x
) == REG
)
10048 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
10049 else if (GET_CODE (x
) == MEM
)
10051 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10052 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10053 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
10055 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
10056 if (small_data_operand (x
, GET_MODE (x
)))
10057 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10058 reg_names
[SMALL_DATA_REG
]);
10063 /* X is a SYMBOL_REF. Write out the name preceded by a
10064 period and without any trailing data in brackets. Used for function
10065 names. If we are configured for System V (or the embedded ABI) on
10066 the PowerPC, do not emit the period, since those systems do not use
10067 TOCs and the like. */
10068 if (GET_CODE (x
) != SYMBOL_REF
)
10071 /* Mark the decl as referenced so that cgraph will output the function. */
10072 if (SYMBOL_REF_DECL (x
))
10073 mark_decl_referenced (SYMBOL_REF_DECL (x
));
10075 if (XSTR (x
, 0)[0] != '.')
10077 switch (DEFAULT_ABI
)
10091 /* For macho, we need to check it see if we need a stub. */
10094 const char *name
= XSTR (x
, 0);
10096 if (MACHOPIC_INDIRECT
10097 && machopic_classify_name (name
) == MACHOPIC_UNDEFINED_FUNCTION
)
10098 name
= machopic_stub_name (name
);
10100 assemble_name (file
, name
);
10102 else if (TARGET_AIX
)
10103 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
10105 assemble_name (file
, XSTR (x
, 0));
10109 /* Like 'L', for last word of TImode. */
10110 if (GET_CODE (x
) == REG
)
10111 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
10112 else if (GET_CODE (x
) == MEM
)
10114 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10115 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10116 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
10118 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
10119 if (small_data_operand (x
, GET_MODE (x
)))
10120 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10121 reg_names
[SMALL_DATA_REG
]);
10125 /* Print AltiVec or SPE memory operand. */
10130 if (GET_CODE (x
) != MEM
)
10137 /* Handle [reg]. */
10138 if (GET_CODE (tmp
) == REG
)
10140 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
10143 /* Handle [reg+UIMM]. */
10144 else if (GET_CODE (tmp
) == PLUS
&&
10145 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
10149 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
10152 x
= INTVAL (XEXP (tmp
, 1));
10153 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
10157 /* Fall through. Must be [reg+reg]. */
10159 if (GET_CODE (tmp
) == REG
)
10160 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
10161 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
10163 if (REGNO (XEXP (tmp
, 0)) == 0)
10164 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
10165 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
10167 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
10168 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
10176 if (GET_CODE (x
) == REG
)
10177 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
10178 else if (GET_CODE (x
) == MEM
)
10180 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10181 know the width from the mode. */
10182 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
10183 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
10184 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
10185 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10186 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
10187 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
10189 output_address (XEXP (x
, 0));
10192 output_addr_const (file
, x
);
10196 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
10200 output_operand_lossage ("invalid %%xn code");
10204 /* Print the address of an operand. */
10207 print_operand_address (FILE *file
, rtx x
)
10209 if (GET_CODE (x
) == REG
)
10210 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
10211 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
10212 || GET_CODE (x
) == LABEL_REF
)
10214 output_addr_const (file
, x
);
10215 if (small_data_operand (x
, GET_MODE (x
)))
10216 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10217 reg_names
[SMALL_DATA_REG
]);
10218 else if (TARGET_TOC
)
10221 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
10223 if (REGNO (XEXP (x
, 0)) == 0)
10224 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
10225 reg_names
[ REGNO (XEXP (x
, 0)) ]);
10227 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
10228 reg_names
[ REGNO (XEXP (x
, 1)) ]);
10230 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
10231 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
"(%s)",
10232 INTVAL (XEXP (x
, 1)), reg_names
[ REGNO (XEXP (x
, 0)) ]);
10234 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
10235 && CONSTANT_P (XEXP (x
, 1)))
10237 output_addr_const (file
, XEXP (x
, 1));
10238 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
10242 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
10243 && CONSTANT_P (XEXP (x
, 1)))
10245 fprintf (file
, "lo16(");
10246 output_addr_const (file
, XEXP (x
, 1));
10247 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
10250 else if (legitimate_constant_pool_address_p (x
))
10252 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
10254 rtx contains_minus
= XEXP (x
, 1);
10258 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10259 turn it into (sym) for output_addr_const. */
10260 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
10261 contains_minus
= XEXP (contains_minus
, 0);
10263 minus
= XEXP (contains_minus
, 0);
10264 symref
= XEXP (minus
, 0);
10265 XEXP (contains_minus
, 0) = symref
;
10270 name
= XSTR (symref
, 0);
10271 newname
= alloca (strlen (name
) + sizeof ("@toc"));
10272 strcpy (newname
, name
);
10273 strcat (newname
, "@toc");
10274 XSTR (symref
, 0) = newname
;
10276 output_addr_const (file
, XEXP (x
, 1));
10278 XSTR (symref
, 0) = name
;
10279 XEXP (contains_minus
, 0) = minus
;
10282 output_addr_const (file
, XEXP (x
, 1));
10284 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
10290 /* Target hook for assembling integer objects. The PowerPC version has
10291 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10292 is defined. It also needs to handle DI-mode objects on 64-bit
10296 rs6000_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
10298 #ifdef RELOCATABLE_NEEDS_FIXUP
10299 /* Special handling for SI values. */
10300 if (size
== 4 && aligned_p
)
10302 extern int in_toc_section (void);
10303 static int recurse
= 0;
10305 /* For -mrelocatable, we mark all addresses that need to be fixed up
10306 in the .fixup section. */
10307 if (TARGET_RELOCATABLE
10308 && !in_toc_section ()
10309 && !in_text_section ()
10310 && !in_unlikely_text_section ()
10312 && GET_CODE (x
) != CONST_INT
10313 && GET_CODE (x
) != CONST_DOUBLE
10319 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
10321 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
10322 fprintf (asm_out_file
, "\t.long\t(");
10323 output_addr_const (asm_out_file
, x
);
10324 fprintf (asm_out_file
, ")@fixup\n");
10325 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
10326 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
10327 fprintf (asm_out_file
, "\t.long\t");
10328 assemble_name (asm_out_file
, buf
);
10329 fprintf (asm_out_file
, "\n\t.previous\n");
10333 /* Remove initial .'s to turn a -mcall-aixdesc function
10334 address into the address of the descriptor, not the function
10336 else if (GET_CODE (x
) == SYMBOL_REF
10337 && XSTR (x
, 0)[0] == '.'
10338 && DEFAULT_ABI
== ABI_AIX
)
10340 const char *name
= XSTR (x
, 0);
10341 while (*name
== '.')
10344 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
10348 #endif /* RELOCATABLE_NEEDS_FIXUP */
10349 return default_assemble_integer (x
, size
, aligned_p
);
10352 #ifdef HAVE_GAS_HIDDEN
10353 /* Emit an assembler directive to set symbol visibility for DECL to
10354 VISIBILITY_TYPE. */
10357 rs6000_assemble_visibility (tree decl
, int vis
)
10359 /* Functions need to have their entry point symbol visibility set as
10360 well as their descriptor symbol visibility. */
10361 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
10363 static const char * const visibility_types
[] = {
10364 NULL
, "internal", "hidden", "protected"
10367 const char *name
, *type
;
10369 name
= ((* targetm
.strip_name_encoding
)
10370 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
10371 type
= visibility_types
[vis
];
10373 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
10374 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
10377 default_assemble_visibility (decl
, vis
);
10382 rs6000_reverse_condition (enum machine_mode mode
, enum rtx_code code
)
10384 /* Reversal of FP compares takes care -- an ordered compare
10385 becomes an unordered compare and vice versa. */
10386 if (mode
== CCFPmode
10387 && (!flag_finite_math_only
10388 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
10389 || code
== UNEQ
|| code
== LTGT
))
10390 return reverse_condition_maybe_unordered (code
);
10392 return reverse_condition (code
);
10395 /* Generate a compare for CODE. Return a brand-new rtx that
10396 represents the result of the compare. */
10399 rs6000_generate_compare (enum rtx_code code
)
10401 enum machine_mode comp_mode
;
10402 rtx compare_result
;
10404 if (rs6000_compare_fp_p
)
10405 comp_mode
= CCFPmode
;
10406 else if (code
== GTU
|| code
== LTU
10407 || code
== GEU
|| code
== LEU
)
10408 comp_mode
= CCUNSmode
;
10410 comp_mode
= CCmode
;
10412 /* First, the compare. */
10413 compare_result
= gen_reg_rtx (comp_mode
);
10415 /* SPE FP compare instructions on the GPRs. Yuck! */
10416 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
10417 && rs6000_compare_fp_p
)
10419 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
10421 /* Note: The E500 comparison instructions set the GT bit (x +
10422 1), on success. This explains the mess. */
10426 case EQ
: case UNEQ
: case NE
: case LTGT
:
10427 cmp
= flag_finite_math_only
10428 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
10429 rs6000_compare_op1
)
10430 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
10431 rs6000_compare_op1
);
10433 case GT
: case GTU
: case UNGT
: case UNGE
: case GE
: case GEU
:
10434 cmp
= flag_finite_math_only
10435 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
10436 rs6000_compare_op1
)
10437 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
10438 rs6000_compare_op1
);
10440 case LT
: case LTU
: case UNLT
: case UNLE
: case LE
: case LEU
:
10441 cmp
= flag_finite_math_only
10442 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
10443 rs6000_compare_op1
)
10444 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
10445 rs6000_compare_op1
);
10451 /* Synthesize LE and GE from LT/GT || EQ. */
10452 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
10458 case LE
: code
= LT
; break;
10459 case GE
: code
= GT
; break;
10460 case LEU
: code
= LT
; break;
10461 case GEU
: code
= GT
; break;
10465 or1
= gen_reg_rtx (SImode
);
10466 or2
= gen_reg_rtx (SImode
);
10467 or_result
= gen_reg_rtx (CCEQmode
);
10468 compare_result2
= gen_reg_rtx (CCFPmode
);
10471 cmp
= flag_finite_math_only
10472 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
10473 rs6000_compare_op1
)
10474 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
10475 rs6000_compare_op1
);
10478 or1
= gen_rtx_GT (SImode
, compare_result
, const0_rtx
);
10479 or2
= gen_rtx_GT (SImode
, compare_result2
, const0_rtx
);
10481 /* OR them together. */
10482 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
10483 gen_rtx_COMPARE (CCEQmode
,
10484 gen_rtx_IOR (SImode
, or1
, or2
),
10486 compare_result
= or_result
;
10491 if (code
== NE
|| code
== LTGT
)
10500 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
10501 gen_rtx_COMPARE (comp_mode
,
10502 rs6000_compare_op0
,
10503 rs6000_compare_op1
)));
10505 /* Some kinds of FP comparisons need an OR operation;
10506 under flag_finite_math_only we don't bother. */
10507 if (rs6000_compare_fp_p
10508 && ! flag_finite_math_only
10509 && ! (TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
10510 && (code
== LE
|| code
== GE
10511 || code
== UNEQ
|| code
== LTGT
10512 || code
== UNGT
|| code
== UNLT
))
10514 enum rtx_code or1
, or2
;
10515 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
10516 rtx or_result
= gen_reg_rtx (CCEQmode
);
10520 case LE
: or1
= LT
; or2
= EQ
; break;
10521 case GE
: or1
= GT
; or2
= EQ
; break;
10522 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
10523 case LTGT
: or1
= LT
; or2
= GT
; break;
10524 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
10525 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
10528 validate_condition_mode (or1
, comp_mode
);
10529 validate_condition_mode (or2
, comp_mode
);
10530 or1_rtx
= gen_rtx_fmt_ee (or1
, SImode
, compare_result
, const0_rtx
);
10531 or2_rtx
= gen_rtx_fmt_ee (or2
, SImode
, compare_result
, const0_rtx
);
10532 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
10533 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
10535 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
10537 compare_result
= or_result
;
10541 validate_condition_mode (code
, GET_MODE (compare_result
));
10543 return gen_rtx_fmt_ee (code
, VOIDmode
, compare_result
, const0_rtx
);
10547 /* Emit the RTL for an sCOND pattern. */
10550 rs6000_emit_sCOND (enum rtx_code code
, rtx result
)
10553 enum machine_mode op_mode
;
10554 enum rtx_code cond_code
;
10556 condition_rtx
= rs6000_generate_compare (code
);
10557 cond_code
= GET_CODE (condition_rtx
);
10559 if (TARGET_E500
&& rs6000_compare_fp_p
10560 && !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
10564 PUT_MODE (condition_rtx
, SImode
);
10565 t
= XEXP (condition_rtx
, 0);
10567 if (cond_code
!= NE
&& cond_code
!= EQ
)
10570 if (cond_code
== NE
)
10571 emit_insn (gen_e500_flip_gt_bit (t
, t
));
10573 emit_insn (gen_move_from_CR_gt_bit (result
, t
));
10577 if (cond_code
== NE
10578 || cond_code
== GE
|| cond_code
== LE
10579 || cond_code
== GEU
|| cond_code
== LEU
10580 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
10582 rtx not_result
= gen_reg_rtx (CCEQmode
);
10583 rtx not_op
, rev_cond_rtx
;
10584 enum machine_mode cc_mode
;
10586 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
10588 rev_cond_rtx
= gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode
, cond_code
),
10589 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
10590 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
10591 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
10592 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
10595 op_mode
= GET_MODE (rs6000_compare_op0
);
10596 if (op_mode
== VOIDmode
)
10597 op_mode
= GET_MODE (rs6000_compare_op1
);
10599 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
10601 PUT_MODE (condition_rtx
, DImode
);
10602 convert_move (result
, condition_rtx
, 0);
10606 PUT_MODE (condition_rtx
, SImode
);
10607 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
10611 /* Emit a branch of kind CODE to location LOC. */
10614 rs6000_emit_cbranch (enum rtx_code code
, rtx loc
)
10616 rtx condition_rtx
, loc_ref
;
10618 condition_rtx
= rs6000_generate_compare (code
);
10619 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
10620 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
10621 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
10622 loc_ref
, pc_rtx
)));
10625 /* Return the string to output a conditional branch to LABEL, which is
10626 the operand number of the label, or -1 if the branch is really a
10627 conditional return.
10629 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10630 condition code register and its mode specifies what kind of
10631 comparison we made.
10633 REVERSED is nonzero if we should reverse the sense of the comparison.
10635 INSN is the insn. */
10638 output_cbranch (rtx op
, const char *label
, int reversed
, rtx insn
)
10640 static char string
[64];
10641 enum rtx_code code
= GET_CODE (op
);
10642 rtx cc_reg
= XEXP (op
, 0);
10643 enum machine_mode mode
= GET_MODE (cc_reg
);
10644 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
10645 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
10646 int really_reversed
= reversed
^ need_longbranch
;
10652 validate_condition_mode (code
, mode
);
10654 /* Work out which way this really branches. We could use
10655 reverse_condition_maybe_unordered here always but this
10656 makes the resulting assembler clearer. */
10657 if (really_reversed
)
10659 /* Reversal of FP compares takes care -- an ordered compare
10660 becomes an unordered compare and vice versa. */
10661 if (mode
== CCFPmode
)
10662 code
= reverse_condition_maybe_unordered (code
);
10664 code
= reverse_condition (code
);
10667 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
10669 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10672 /* Opposite of GT. */
10674 else if (code
== NE
)
10682 /* Not all of these are actually distinct opcodes, but
10683 we distinguish them for clarity of the resulting assembler. */
10684 case NE
: case LTGT
:
10685 ccode
= "ne"; break;
10686 case EQ
: case UNEQ
:
10687 ccode
= "eq"; break;
10689 ccode
= "ge"; break;
10690 case GT
: case GTU
: case UNGT
:
10691 ccode
= "gt"; break;
10693 ccode
= "le"; break;
10694 case LT
: case LTU
: case UNLT
:
10695 ccode
= "lt"; break;
10696 case UNORDERED
: ccode
= "un"; break;
10697 case ORDERED
: ccode
= "nu"; break;
10698 case UNGE
: ccode
= "nl"; break;
10699 case UNLE
: ccode
= "ng"; break;
10704 /* Maybe we have a guess as to how likely the branch is.
10705 The old mnemonics don't have a way to specify this information. */
10707 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
10708 if (note
!= NULL_RTX
)
10710 /* PROB is the difference from 50%. */
10711 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
10713 /* Only hint for highly probable/improbable branches on newer
10714 cpus as static prediction overrides processor dynamic
10715 prediction. For older cpus we may as well always hint, but
10716 assume not taken for branches that are very close to 50% as a
10717 mispredicted taken branch is more expensive than a
10718 mispredicted not-taken branch. */
10719 if (rs6000_always_hint
10720 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
10722 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
10723 && ((prob
> 0) ^ need_longbranch
))
10731 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
10733 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
10735 /* We need to escape any '%' characters in the reg_names string.
10736 Assume they'd only be the first character.... */
10737 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
10739 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
10743 /* If the branch distance was too far, we may have to use an
10744 unconditional branch to go the distance. */
10745 if (need_longbranch
)
10746 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
10748 s
+= sprintf (s
, ",%s", label
);
10754 /* Return the string to flip the GT bit on a CR. */
10756 output_e500_flip_gt_bit (rtx dst
, rtx src
)
10758 static char string
[64];
10761 if (GET_CODE (dst
) != REG
|| ! CR_REGNO_P (REGNO (dst
))
10762 || GET_CODE (src
) != REG
|| ! CR_REGNO_P (REGNO (src
)))
10766 a
= 4 * (REGNO (dst
) - CR0_REGNO
) + 1;
10767 b
= 4 * (REGNO (src
) - CR0_REGNO
) + 1;
10769 sprintf (string
, "crnot %d,%d", a
, b
);
10773 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10774 operands of the last comparison is nonzero/true, FALSE_COND if it
10775 is zero/false. Return 0 if the hardware has no such operation. */
10778 rs6000_emit_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
10780 enum rtx_code code
= GET_CODE (op
);
10781 rtx op0
= rs6000_compare_op0
;
10782 rtx op1
= rs6000_compare_op1
;
10783 REAL_VALUE_TYPE c1
;
10784 enum machine_mode compare_mode
= GET_MODE (op0
);
10785 enum machine_mode result_mode
= GET_MODE (dest
);
10788 /* These modes should always match. */
10789 if (GET_MODE (op1
) != compare_mode
10790 /* In the isel case however, we can use a compare immediate, so
10791 op1 may be a small constant. */
10792 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
10794 if (GET_MODE (true_cond
) != result_mode
)
10796 if (GET_MODE (false_cond
) != result_mode
)
10799 /* First, work out if the hardware can do this at all, or
10800 if it's too slow.... */
10801 if (! rs6000_compare_fp_p
)
10804 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
10807 else if (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
10808 && GET_MODE_CLASS (compare_mode
) == MODE_FLOAT
)
10811 /* Eliminate half of the comparisons by switching operands, this
10812 makes the remaining code simpler. */
10813 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
10814 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
10816 code
= reverse_condition_maybe_unordered (code
);
10818 true_cond
= false_cond
;
10822 /* UNEQ and LTGT take four instructions for a comparison with zero,
10823 it'll probably be faster to use a branch here too. */
10824 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
10827 if (GET_CODE (op1
) == CONST_DOUBLE
)
10828 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
10830 /* We're going to try to implement comparisons by performing
10831 a subtract, then comparing against zero. Unfortunately,
10832 Inf - Inf is NaN which is not zero, and so if we don't
10833 know that the operand is finite and the comparison
10834 would treat EQ different to UNORDERED, we can't do it. */
10835 if (HONOR_INFINITIES (compare_mode
)
10836 && code
!= GT
&& code
!= UNGE
10837 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
10838 /* Constructs of the form (a OP b ? a : b) are safe. */
10839 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
10840 || (! rtx_equal_p (op0
, true_cond
)
10841 && ! rtx_equal_p (op1
, true_cond
))))
10843 /* At this point we know we can use fsel. */
10845 /* Reduce the comparison to a comparison against zero. */
10846 temp
= gen_reg_rtx (compare_mode
);
10847 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
10848 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
10850 op1
= CONST0_RTX (compare_mode
);
10852 /* If we don't care about NaNs we can reduce some of the comparisons
10853 down to faster ones. */
10854 if (! HONOR_NANS (compare_mode
))
10860 true_cond
= false_cond
;
10873 /* Now, reduce everything down to a GE. */
10880 temp
= gen_reg_rtx (compare_mode
);
10881 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
10886 temp
= gen_reg_rtx (compare_mode
);
10887 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
10892 temp
= gen_reg_rtx (compare_mode
);
10893 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
10894 gen_rtx_NEG (compare_mode
,
10895 gen_rtx_ABS (compare_mode
, op0
))));
10900 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10901 temp
= gen_reg_rtx (result_mode
);
10902 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
10903 gen_rtx_IF_THEN_ELSE (result_mode
,
10904 gen_rtx_GE (VOIDmode
,
10906 true_cond
, false_cond
)));
10907 false_cond
= true_cond
;
10910 temp
= gen_reg_rtx (compare_mode
);
10911 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
10916 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10917 temp
= gen_reg_rtx (result_mode
);
10918 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
10919 gen_rtx_IF_THEN_ELSE (result_mode
,
10920 gen_rtx_GE (VOIDmode
,
10922 true_cond
, false_cond
)));
10923 true_cond
= false_cond
;
10926 temp
= gen_reg_rtx (compare_mode
);
10927 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
10935 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
10936 gen_rtx_IF_THEN_ELSE (result_mode
,
10937 gen_rtx_GE (VOIDmode
,
10939 true_cond
, false_cond
)));
10943 /* Same as above, but for ints (isel). */
10946 rs6000_emit_int_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
10948 rtx condition_rtx
, cr
;
10950 /* All isel implementations thus far are 32-bits. */
10951 if (GET_MODE (rs6000_compare_op0
) != SImode
)
10954 /* We still have to do the compare, because isel doesn't do a
10955 compare, it just looks at the CRx bits set by a previous compare
10957 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
10958 cr
= XEXP (condition_rtx
, 0);
10960 if (GET_MODE (cr
) == CCmode
)
10961 emit_insn (gen_isel_signed (dest
, condition_rtx
,
10962 true_cond
, false_cond
, cr
));
10964 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
10965 true_cond
, false_cond
, cr
));
10971 output_isel (rtx
*operands
)
10973 enum rtx_code code
;
10975 code
= GET_CODE (operands
[1]);
10976 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
10978 PUT_CODE (operands
[1], reverse_condition (code
));
10979 return "isel %0,%3,%2,%j1";
10982 return "isel %0,%2,%3,%j1";
10986 rs6000_emit_minmax (rtx dest
, enum rtx_code code
, rtx op0
, rtx op1
)
10988 enum machine_mode mode
= GET_MODE (op0
);
10992 if (code
== SMAX
|| code
== SMIN
)
10997 if (code
== SMAX
|| code
== UMAX
)
10998 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
10999 op0
, op1
, mode
, 0);
11001 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
11002 op1
, op0
, mode
, 0);
11003 if (target
== NULL_RTX
)
11005 if (target
!= dest
)
11006 emit_move_insn (dest
, target
);
11009 /* Emit instructions to move SRC to DST. Called by splitters for
11010 multi-register moves. It will emit at most one instruction for
11011 each register that is accessed; that is, it won't emit li/lis pairs
11012 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11016 rs6000_split_multireg_move (rtx dst
, rtx src
)
11018 /* The register number of the first register being moved. */
11020 /* The mode that is to be moved. */
11021 enum machine_mode mode
;
11022 /* The mode that the move is being done in, and its size. */
11023 enum machine_mode reg_mode
;
11025 /* The number of registers that will be moved. */
11028 reg
= REG_P (dst
) ? REGNO (dst
) : REGNO (src
);
11029 mode
= GET_MODE (dst
);
11030 nregs
= HARD_REGNO_NREGS (reg
, mode
);
11031 if (FP_REGNO_P (reg
))
11033 else if (ALTIVEC_REGNO_P (reg
))
11034 reg_mode
= V16QImode
;
11036 reg_mode
= word_mode
;
11037 reg_mode_size
= GET_MODE_SIZE (reg_mode
);
11039 if (reg_mode_size
* nregs
!= GET_MODE_SIZE (mode
))
11042 if (REG_P (src
) && REG_P (dst
) && (REGNO (src
) < REGNO (dst
)))
11044 /* Move register range backwards, if we might have destructive
11047 for (i
= nregs
- 1; i
>= 0; i
--)
11048 emit_insn (gen_rtx_SET (VOIDmode
,
11049 simplify_gen_subreg (reg_mode
, dst
, mode
,
11050 i
* reg_mode_size
),
11051 simplify_gen_subreg (reg_mode
, src
, mode
,
11052 i
* reg_mode_size
)));
11058 bool used_update
= false;
11060 if (GET_CODE (src
) == MEM
&& INT_REGNO_P (reg
))
11064 if (GET_CODE (XEXP (src
, 0)) == PRE_INC
11065 || GET_CODE (XEXP (src
, 0)) == PRE_DEC
)
11068 breg
= XEXP (XEXP (src
, 0), 0);
11069 delta_rtx
= GET_CODE (XEXP (src
, 0)) == PRE_INC
11070 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src
)))
11071 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src
)));
11072 emit_insn (TARGET_32BIT
11073 ? gen_addsi3 (breg
, breg
, delta_rtx
)
11074 : gen_adddi3 (breg
, breg
, delta_rtx
));
11075 src
= gen_rtx_MEM (mode
, breg
);
11078 /* We have now address involving an base register only.
11079 If we use one of the registers to address memory,
11080 we have change that register last. */
11082 breg
= (GET_CODE (XEXP (src
, 0)) == PLUS
11083 ? XEXP (XEXP (src
, 0), 0)
11089 if (REGNO (breg
) >= REGNO (dst
)
11090 && REGNO (breg
) < REGNO (dst
) + nregs
)
11091 j
= REGNO (breg
) - REGNO (dst
);
11094 if (GET_CODE (dst
) == MEM
&& INT_REGNO_P (reg
))
11098 if (GET_CODE (XEXP (dst
, 0)) == PRE_INC
11099 || GET_CODE (XEXP (dst
, 0)) == PRE_DEC
)
11102 breg
= XEXP (XEXP (dst
, 0), 0);
11103 delta_rtx
= GET_CODE (XEXP (dst
, 0)) == PRE_INC
11104 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst
)))
11105 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst
)));
11107 /* We have to update the breg before doing the store.
11108 Use store with update, if available. */
11112 rtx nsrc
= simplify_gen_subreg (reg_mode
, src
, mode
, 0);
11113 emit_insn (TARGET_32BIT
11114 ? gen_movsi_update (breg
, breg
, delta_rtx
, nsrc
)
11115 : gen_movdi_update (breg
, breg
, delta_rtx
, nsrc
));
11116 used_update
= true;
11119 emit_insn (TARGET_32BIT
11120 ? gen_addsi3 (breg
, breg
, delta_rtx
)
11121 : gen_adddi3 (breg
, breg
, delta_rtx
));
11122 dst
= gen_rtx_MEM (mode
, breg
);
11126 for (i
= 0; i
< nregs
; i
++)
11128 /* Calculate index to next subword. */
11133 /* If compiler already emited move of first word by
11134 store with update, no need to do anything. */
11135 if (j
== 0 && used_update
)
11138 emit_insn (gen_rtx_SET (VOIDmode
,
11139 simplify_gen_subreg (reg_mode
, dst
, mode
,
11140 j
* reg_mode_size
),
11141 simplify_gen_subreg (reg_mode
, src
, mode
,
11142 j
* reg_mode_size
)));
11148 /* This page contains routines that are used to determine what the
11149 function prologue and epilogue code will do and write them out. */
11151 /* Return the first fixed-point register that is required to be
11152 saved. 32 if none. */
11155 first_reg_to_save (void)
11159 /* Find lowest numbered live register. */
11160 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
11161 if (regs_ever_live
[first_reg
]
11162 && (! call_used_regs
[first_reg
]
11163 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
11164 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11165 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
11166 || (TARGET_TOC
&& TARGET_MINIMAL_TOC
)))))
11171 && current_function_uses_pic_offset_table
11172 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
11173 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
11179 /* Similar, for FP regs. */
11182 first_fp_reg_to_save (void)
11186 /* Find lowest numbered live register. */
11187 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
11188 if (regs_ever_live
[first_reg
])
11194 /* Similar, for AltiVec regs. */
11197 first_altivec_reg_to_save (void)
11201 /* Stack frame remains as is unless we are in AltiVec ABI. */
11202 if (! TARGET_ALTIVEC_ABI
)
11203 return LAST_ALTIVEC_REGNO
+ 1;
11205 /* Find lowest numbered live register. */
11206 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11207 if (regs_ever_live
[i
])
11213 /* Return a 32-bit mask of the AltiVec registers we need to set in
11214 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11215 the 32-bit word is 0. */
11217 static unsigned int
11218 compute_vrsave_mask (void)
11220 unsigned int i
, mask
= 0;
11222 /* First, find out if we use _any_ altivec registers. */
11223 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11224 if (regs_ever_live
[i
])
11225 mask
|= ALTIVEC_REG_BIT (i
);
11230 /* Next, remove the argument registers from the set. These must
11231 be in the VRSAVE mask set by the caller, so we don't need to add
11232 them in again. More importantly, the mask we compute here is
11233 used to generate CLOBBERs in the set_vrsave insn, and we do not
11234 wish the argument registers to die. */
11235 for (i
= cfun
->args_info
.vregno
- 1; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
11236 mask
&= ~ALTIVEC_REG_BIT (i
);
11238 /* Similarly, remove the return value from the set. */
11241 diddle_return_value (is_altivec_return_reg
, &yes
);
11243 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
11250 is_altivec_return_reg (rtx reg
, void *xyes
)
11252 bool *yes
= (bool *) xyes
;
11253 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
11258 /* Calculate the stack information for the current function. This is
11259 complicated by having two separate calling sequences, the AIX calling
11260 sequence and the V.4 calling sequence.
11262 AIX (and Darwin/Mac OS X) stack frames look like:
11264 SP----> +---------------------------------------+
11265 | back chain to caller | 0 0
11266 +---------------------------------------+
11267 | saved CR | 4 8 (8-11)
11268 +---------------------------------------+
11270 +---------------------------------------+
11271 | reserved for compilers | 12 24
11272 +---------------------------------------+
11273 | reserved for binders | 16 32
11274 +---------------------------------------+
11275 | saved TOC pointer | 20 40
11276 +---------------------------------------+
11277 | Parameter save area (P) | 24 48
11278 +---------------------------------------+
11279 | Alloca space (A) | 24+P etc.
11280 +---------------------------------------+
11281 | Local variable space (L) | 24+P+A
11282 +---------------------------------------+
11283 | Float/int conversion temporary (X) | 24+P+A+L
11284 +---------------------------------------+
11285 | Save area for AltiVec registers (W) | 24+P+A+L+X
11286 +---------------------------------------+
11287 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11288 +---------------------------------------+
11289 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11290 +---------------------------------------+
11291 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11292 +---------------------------------------+
11293 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11294 +---------------------------------------+
11295 old SP->| back chain to caller's caller |
11296 +---------------------------------------+
11298 The required alignment for AIX configurations is two words (i.e., 8
11302 V.4 stack frames look like:
11304 SP----> +---------------------------------------+
11305 | back chain to caller | 0
11306 +---------------------------------------+
11307 | caller's saved LR | 4
11308 +---------------------------------------+
11309 | Parameter save area (P) | 8
11310 +---------------------------------------+
11311 | Alloca space (A) | 8+P
11312 +---------------------------------------+
11313 | Varargs save area (V) | 8+P+A
11314 +---------------------------------------+
11315 | Local variable space (L) | 8+P+A+V
11316 +---------------------------------------+
11317 | Float/int conversion temporary (X) | 8+P+A+V+L
11318 +---------------------------------------+
11319 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11320 +---------------------------------------+
11321 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11322 +---------------------------------------+
11323 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11324 +---------------------------------------+
11325 | SPE: area for 64-bit GP registers |
11326 +---------------------------------------+
11327 | SPE alignment padding |
11328 +---------------------------------------+
11329 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11330 +---------------------------------------+
11331 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11332 +---------------------------------------+
11333 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11334 +---------------------------------------+
11335 old SP->| back chain to caller's caller |
11336 +---------------------------------------+
11338 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11339 given. (But note below and in sysv4.h that we require only 8 and
11340 may round up the size of our stack frame anyways. The historical
11341 reason is early versions of powerpc-linux which didn't properly
11342 align the stack at program startup. A happy side-effect is that
11343 -mno-eabi libraries can be used with -meabi programs.)
11345 The EABI configuration defaults to the V.4 layout. However,
11346 the stack alignment requirements may differ. If -mno-eabi is not
11347 given, the required stack alignment is 8 bytes; if -mno-eabi is
11348 given, the required alignment is 16 bytes. (But see V.4 comment
11351 #ifndef ABI_STACK_BOUNDARY
11352 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11355 static rs6000_stack_t
*
11356 rs6000_stack_info (void)
11358 static rs6000_stack_t info
, zero_info
;
11359 rs6000_stack_t
*info_ptr
= &info
;
11360 int reg_size
= TARGET_32BIT
? 4 : 8;
11362 HOST_WIDE_INT non_fixed_size
;
11364 /* Zero all fields portably. */
11369 /* Cache value so we don't rescan instruction chain over and over. */
11370 if (cfun
->machine
->insn_chain_scanned_p
== 0)
11372 cfun
->machine
->insn_chain_scanned_p
= 1;
11373 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
11377 /* Select which calling sequence. */
11378 info_ptr
->abi
= DEFAULT_ABI
;
11380 /* Calculate which registers need to be saved & save area size. */
11381 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
11382 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11383 even if it currently looks like we won't. */
11384 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
11385 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
11386 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
11387 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
11388 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
11390 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
11392 /* For the SPE, we have an additional upper 32-bits on each GPR.
11393 Ideally we should save the entire 64-bits only when the upper
11394 half is used in SIMD instructions. Since we only record
11395 registers live (not the size they are used in), this proves
11396 difficult because we'd have to traverse the instruction chain at
11397 the right time, taking reload into account. This is a real pain,
11398 so we opt to save the GPRs in 64-bits always if but one register
11399 gets used in 64-bits. Otherwise, all the registers in the frame
11400 get saved in 32-bits.
11402 So... since when we save all GPRs (except the SP) in 64-bits, the
11403 traditional GP save area will be empty. */
11404 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
11405 info_ptr
->gp_size
= 0;
11407 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
11408 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
11410 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
11411 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
11412 - info_ptr
->first_altivec_reg_save
);
11414 /* Does this function call anything? */
11415 info_ptr
->calls_p
= (! current_function_is_leaf
11416 || cfun
->machine
->ra_needs_full_frame
);
11418 /* Determine if we need to save the link register. */
11419 if (rs6000_ra_ever_killed ()
11420 || (DEFAULT_ABI
== ABI_AIX
11421 && current_function_profile
11422 && !TARGET_PROFILE_KERNEL
)
11423 #ifdef TARGET_RELOCATABLE
11424 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
11426 || (info_ptr
->first_fp_reg_save
!= 64
11427 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
11428 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
11429 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
11430 || (DEFAULT_ABI
== ABI_DARWIN
11432 && current_function_uses_pic_offset_table
)
11433 || info_ptr
->calls_p
)
11435 info_ptr
->lr_save_p
= 1;
11436 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
11439 /* Determine if we need to save the condition code registers. */
11440 if (regs_ever_live
[CR2_REGNO
]
11441 || regs_ever_live
[CR3_REGNO
]
11442 || regs_ever_live
[CR4_REGNO
])
11444 info_ptr
->cr_save_p
= 1;
11445 if (DEFAULT_ABI
== ABI_V4
)
11446 info_ptr
->cr_size
= reg_size
;
11449 /* If the current function calls __builtin_eh_return, then we need
11450 to allocate stack space for registers that will hold data for
11451 the exception handler. */
11452 if (current_function_calls_eh_return
)
11455 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
11458 /* SPE saves EH registers in 64-bits. */
11459 ehrd_size
= i
* (TARGET_SPE_ABI
11460 && info_ptr
->spe_64bit_regs_used
!= 0
11461 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
11466 /* Determine various sizes. */
11467 info_ptr
->reg_size
= reg_size
;
11468 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
11469 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
11470 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
11471 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
11472 TARGET_ALTIVEC
? 16 : 8);
11474 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
11475 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
11477 info_ptr
->spe_gp_size
= 0;
11479 if (TARGET_ALTIVEC_ABI
)
11480 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
11482 info_ptr
->vrsave_mask
= 0;
11484 if (TARGET_ALTIVEC_VRSAVE
&& info_ptr
->vrsave_mask
)
11485 info_ptr
->vrsave_size
= 4;
11487 info_ptr
->vrsave_size
= 0;
11489 /* Calculate the offsets. */
11490 switch (DEFAULT_ABI
)
11498 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
11499 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
11501 if (TARGET_ALTIVEC_ABI
)
11503 info_ptr
->vrsave_save_offset
11504 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
11506 /* Align stack so vector save area is on a quadword boundary. */
11507 if (info_ptr
->altivec_size
!= 0)
11508 info_ptr
->altivec_padding_size
11509 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
11511 info_ptr
->altivec_padding_size
= 0;
11513 info_ptr
->altivec_save_offset
11514 = info_ptr
->vrsave_save_offset
11515 - info_ptr
->altivec_padding_size
11516 - info_ptr
->altivec_size
;
11518 /* Adjust for AltiVec case. */
11519 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
11522 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
11523 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
11524 info_ptr
->lr_save_offset
= 2*reg_size
;
11528 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
11529 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
11530 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
11532 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
11534 /* Align stack so SPE GPR save area is aligned on a
11535 double-word boundary. */
11536 if (info_ptr
->spe_gp_size
!= 0)
11537 info_ptr
->spe_padding_size
11538 = 8 - (-info_ptr
->cr_save_offset
% 8);
11540 info_ptr
->spe_padding_size
= 0;
11542 info_ptr
->spe_gp_save_offset
11543 = info_ptr
->cr_save_offset
11544 - info_ptr
->spe_padding_size
11545 - info_ptr
->spe_gp_size
;
11547 /* Adjust for SPE case. */
11548 info_ptr
->toc_save_offset
11549 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
11551 else if (TARGET_ALTIVEC_ABI
)
11553 info_ptr
->vrsave_save_offset
11554 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
11556 /* Align stack so vector save area is on a quadword boundary. */
11557 if (info_ptr
->altivec_size
!= 0)
11558 info_ptr
->altivec_padding_size
11559 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
11561 info_ptr
->altivec_padding_size
= 0;
11563 info_ptr
->altivec_save_offset
11564 = info_ptr
->vrsave_save_offset
11565 - info_ptr
->altivec_padding_size
11566 - info_ptr
->altivec_size
;
11568 /* Adjust for AltiVec case. */
11569 info_ptr
->toc_save_offset
11570 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
11573 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
11574 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
11575 info_ptr
->lr_save_offset
= reg_size
;
11579 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
11580 + info_ptr
->gp_size
11581 + info_ptr
->altivec_size
11582 + info_ptr
->altivec_padding_size
11583 + info_ptr
->spe_gp_size
11584 + info_ptr
->spe_padding_size
11586 + info_ptr
->cr_size
11587 + info_ptr
->lr_size
11588 + info_ptr
->vrsave_size
11589 + info_ptr
->toc_size
,
11590 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
11593 non_fixed_size
= (info_ptr
->vars_size
11594 + info_ptr
->parm_size
11595 + info_ptr
->save_size
11596 + info_ptr
->varargs_size
);
11598 info_ptr
->total_size
= RS6000_ALIGN (non_fixed_size
+ info_ptr
->fixed_size
,
11599 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
11601 /* Determine if we need to allocate any stack frame:
11603 For AIX we need to push the stack if a frame pointer is needed
11604 (because the stack might be dynamically adjusted), if we are
11605 debugging, if we make calls, or if the sum of fp_save, gp_save,
11606 and local variables are more than the space needed to save all
11607 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11608 + 18*8 = 288 (GPR13 reserved).
11610 For V.4 we don't have the stack cushion that AIX uses, but assume
11611 that the debugger can handle stackless frames. */
11613 if (info_ptr
->calls_p
)
11614 info_ptr
->push_p
= 1;
11616 else if (DEFAULT_ABI
== ABI_V4
)
11617 info_ptr
->push_p
= non_fixed_size
!= 0;
11619 else if (frame_pointer_needed
)
11620 info_ptr
->push_p
= 1;
11622 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
11623 info_ptr
->push_p
= 1;
11626 info_ptr
->push_p
= non_fixed_size
> (TARGET_32BIT
? 220 : 288);
11628 /* Zero offsets if we're not saving those registers. */
11629 if (info_ptr
->fp_size
== 0)
11630 info_ptr
->fp_save_offset
= 0;
11632 if (info_ptr
->gp_size
== 0)
11633 info_ptr
->gp_save_offset
= 0;
11635 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
11636 info_ptr
->altivec_save_offset
= 0;
11638 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
11639 info_ptr
->vrsave_save_offset
= 0;
11641 if (! TARGET_SPE_ABI
11642 || info_ptr
->spe_64bit_regs_used
== 0
11643 || info_ptr
->spe_gp_size
== 0)
11644 info_ptr
->spe_gp_save_offset
= 0;
11646 if (! info_ptr
->lr_save_p
)
11647 info_ptr
->lr_save_offset
= 0;
11649 if (! info_ptr
->cr_save_p
)
11650 info_ptr
->cr_save_offset
= 0;
11652 if (! info_ptr
->toc_save_p
)
11653 info_ptr
->toc_save_offset
= 0;
11658 /* Return true if the current function uses any GPRs in 64-bit SIMD
11662 spe_func_has_64bit_regs_p (void)
11666 /* Functions that save and restore all the call-saved registers will
11667 need to save/restore the registers in 64-bits. */
11668 if (current_function_calls_eh_return
11669 || current_function_calls_setjmp
11670 || current_function_has_nonlocal_goto
)
11673 insns
= get_insns ();
11675 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
11681 i
= PATTERN (insn
);
11682 if (GET_CODE (i
) == SET
11683 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
11692 debug_stack_info (rs6000_stack_t
*info
)
11694 const char *abi_string
;
11697 info
= rs6000_stack_info ();
11699 fprintf (stderr
, "\nStack information for function %s:\n",
11700 ((current_function_decl
&& DECL_NAME (current_function_decl
))
11701 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
11706 default: abi_string
= "Unknown"; break;
11707 case ABI_NONE
: abi_string
= "NONE"; break;
11708 case ABI_AIX
: abi_string
= "AIX"; break;
11709 case ABI_DARWIN
: abi_string
= "Darwin"; break;
11710 case ABI_V4
: abi_string
= "V.4"; break;
11713 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
11715 if (TARGET_ALTIVEC_ABI
)
11716 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
11718 if (TARGET_SPE_ABI
)
11719 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
11721 if (info
->first_gp_reg_save
!= 32)
11722 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
11724 if (info
->first_fp_reg_save
!= 64)
11725 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
11727 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
11728 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
11729 info
->first_altivec_reg_save
);
11731 if (info
->lr_save_p
)
11732 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
11734 if (info
->cr_save_p
)
11735 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
11737 if (info
->toc_save_p
)
11738 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
11740 if (info
->vrsave_mask
)
11741 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
11744 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
11747 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
11749 if (info
->gp_save_offset
)
11750 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
11752 if (info
->fp_save_offset
)
11753 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
11755 if (info
->altivec_save_offset
)
11756 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
11757 info
->altivec_save_offset
);
11759 if (info
->spe_gp_save_offset
)
11760 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
11761 info
->spe_gp_save_offset
);
11763 if (info
->vrsave_save_offset
)
11764 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
11765 info
->vrsave_save_offset
);
11767 if (info
->lr_save_offset
)
11768 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
11770 if (info
->cr_save_offset
)
11771 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
11773 if (info
->toc_save_offset
)
11774 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
11776 if (info
->varargs_save_offset
)
11777 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
11779 if (info
->total_size
)
11780 fprintf (stderr
, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
11783 if (info
->varargs_size
)
11784 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
11786 if (info
->vars_size
)
11787 fprintf (stderr
, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
11790 if (info
->parm_size
)
11791 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
11793 if (info
->fixed_size
)
11794 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
11797 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
11799 if (info
->spe_gp_size
)
11800 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
11803 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
11805 if (info
->altivec_size
)
11806 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
11808 if (info
->vrsave_size
)
11809 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
11811 if (info
->altivec_padding_size
)
11812 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
11813 info
->altivec_padding_size
);
11815 if (info
->spe_padding_size
)
11816 fprintf (stderr
, "\tspe_padding_size = %5d\n",
11817 info
->spe_padding_size
);
11820 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
11823 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
11825 if (info
->toc_size
)
11826 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
11828 if (info
->save_size
)
11829 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
11831 if (info
->reg_size
!= 4)
11832 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
11834 fprintf (stderr
, "\n");
11838 rs6000_return_addr (int count
, rtx frame
)
11840 /* Currently we don't optimize very well between prolog and body
11841 code and for PIC code the code can be actually quite bad, so
11842 don't try to be too clever here. */
11843 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
11845 cfun
->machine
->ra_needs_full_frame
= 1;
11852 plus_constant (copy_to_reg
11853 (gen_rtx_MEM (Pmode
,
11854 memory_address (Pmode
, frame
))),
11855 RETURN_ADDRESS_OFFSET
)));
11858 cfun
->machine
->ra_need_lr
= 1;
11859 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
11862 /* Say whether a function is a candidate for sibcall handling or not.
11863 We do not allow indirect calls to be optimized into sibling calls.
11864 Also, we can't do it if there are any vector parameters; there's
11865 nowhere to put the VRsave code so it works; note that functions with
11866 vector parameters are required to have a prototype, so the argument
11867 type info must be available here. (The tail recursion case can work
11868 with vector parameters, but there's no way to distinguish here.) */
11870 rs6000_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
11875 if (TARGET_ALTIVEC_VRSAVE
)
11877 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
11878 type
; type
= TREE_CHAIN (type
))
11880 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
11884 if (DEFAULT_ABI
== ABI_DARWIN
11885 || (*targetm
.binds_local_p
) (decl
))
11887 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
11889 if (!lookup_attribute ("longcall", attr_list
)
11890 || lookup_attribute ("shortcall", attr_list
))
11898 rs6000_ra_ever_killed (void)
11904 if (current_function_is_thunk
)
11907 /* regs_ever_live has LR marked as used if any sibcalls are present,
11908 but this should not force saving and restoring in the
11909 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11910 clobbers LR, so that is inappropriate. */
11912 /* Also, the prologue can generate a store into LR that
11913 doesn't really count, like this:
11916 bcl to set PIC register
11920 When we're called from the epilogue, we need to avoid counting
11921 this as a store. */
11923 push_topmost_sequence ();
11924 top
= get_insns ();
11925 pop_topmost_sequence ();
11926 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
11928 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
11932 if (FIND_REG_INC_NOTE (insn
, reg
))
11934 else if (GET_CODE (insn
) == CALL_INSN
11935 && !SIBLING_CALL_P (insn
))
11937 else if (set_of (reg
, insn
) != NULL_RTX
11938 && !prologue_epilogue_contains (insn
))
11945 /* Add a REG_MAYBE_DEAD note to the insn. */
11947 rs6000_maybe_dead (rtx insn
)
11949 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
11954 /* Emit instructions needed to load the TOC register.
11955 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11956 a constant pool; or for SVR4 -fpic. */
11959 rs6000_emit_load_toc_table (int fromprolog
)
11962 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
11964 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
11966 rtx temp
= (fromprolog
11967 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
11968 : gen_reg_rtx (Pmode
));
11969 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
11971 rs6000_maybe_dead (insn
);
11972 insn
= emit_move_insn (dest
, temp
);
11974 rs6000_maybe_dead (insn
);
11976 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
11979 rtx tempLR
= (fromprolog
11980 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
11981 : gen_reg_rtx (Pmode
));
11982 rtx temp0
= (fromprolog
11983 ? gen_rtx_REG (Pmode
, 0)
11984 : gen_reg_rtx (Pmode
));
11987 /* possibly create the toc section */
11988 if (! toc_initialized
)
11991 function_section (current_function_decl
);
11998 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
11999 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12001 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
12002 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12004 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
12006 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
12007 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
12014 static int reload_toc_labelno
= 0;
12016 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
12018 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
12019 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12021 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
12022 emit_move_insn (dest
, tempLR
);
12023 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
12025 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
12027 rs6000_maybe_dead (insn
);
12029 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
12031 /* This is for AIX code running in non-PIC ELF32. */
12034 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
12035 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12037 insn
= emit_insn (gen_elf_high (dest
, realsym
));
12039 rs6000_maybe_dead (insn
);
12040 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
12042 rs6000_maybe_dead (insn
);
12044 else if (DEFAULT_ABI
== ABI_AIX
)
12047 insn
= emit_insn (gen_load_toc_aix_si (dest
));
12049 insn
= emit_insn (gen_load_toc_aix_di (dest
));
12051 rs6000_maybe_dead (insn
);
12057 /* Emit instructions to restore the link register after determining where
12058 its value has been stored. */
12061 rs6000_emit_eh_reg_restore (rtx source
, rtx scratch
)
12063 rs6000_stack_t
*info
= rs6000_stack_info ();
12066 operands
[0] = source
;
12067 operands
[1] = scratch
;
12069 if (info
->lr_save_p
)
12071 rtx frame_rtx
= stack_pointer_rtx
;
12072 HOST_WIDE_INT sp_offset
= 0;
12075 if (frame_pointer_needed
12076 || current_function_calls_alloca
12077 || info
->total_size
> 32767)
12079 emit_move_insn (operands
[1], gen_rtx_MEM (Pmode
, frame_rtx
));
12080 frame_rtx
= operands
[1];
12082 else if (info
->push_p
)
12083 sp_offset
= info
->total_size
;
12085 tmp
= plus_constant (frame_rtx
, info
->lr_save_offset
+ sp_offset
);
12086 tmp
= gen_rtx_MEM (Pmode
, tmp
);
12087 emit_move_insn (tmp
, operands
[0]);
12090 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
), operands
[0]);
12093 static GTY(()) int set
= -1;
12096 get_TOC_alias_set (void)
12099 set
= new_alias_set ();
12103 /* This returns nonzero if the current function uses the TOC. This is
12104 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12105 is generated by the ABI_V4 load_toc_* patterns. */
12112 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
12115 rtx pat
= PATTERN (insn
);
12118 if (GET_CODE (pat
) == PARALLEL
)
12119 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
12121 rtx sub
= XVECEXP (pat
, 0, i
);
12122 if (GET_CODE (sub
) == USE
)
12124 sub
= XEXP (sub
, 0);
12125 if (GET_CODE (sub
) == UNSPEC
12126 && XINT (sub
, 1) == UNSPEC_TOC
)
12136 create_TOC_reference (rtx symbol
)
12138 return gen_rtx_PLUS (Pmode
,
12139 gen_rtx_REG (Pmode
, TOC_REGISTER
),
12140 gen_rtx_CONST (Pmode
,
12141 gen_rtx_MINUS (Pmode
, symbol
,
12142 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
12145 /* If _Unwind_* has been called from within the same module,
12146 toc register is not guaranteed to be saved to 40(1) on function
12147 entry. Save it there in that case. */
12150 rs6000_aix_emit_builtin_unwind_init (void)
12153 rtx stack_top
= gen_reg_rtx (Pmode
);
12154 rtx opcode_addr
= gen_reg_rtx (Pmode
);
12155 rtx opcode
= gen_reg_rtx (SImode
);
12156 rtx tocompare
= gen_reg_rtx (SImode
);
12157 rtx no_toc_save_needed
= gen_label_rtx ();
12159 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
12160 emit_move_insn (stack_top
, mem
);
12162 mem
= gen_rtx_MEM (Pmode
,
12163 gen_rtx_PLUS (Pmode
, stack_top
,
12164 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
12165 emit_move_insn (opcode_addr
, mem
);
12166 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
12167 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
12168 : 0xE8410028, SImode
));
12170 do_compare_rtx_and_jump (opcode
, tocompare
, EQ
, 1,
12171 SImode
, NULL_RTX
, NULL_RTX
,
12172 no_toc_save_needed
);
12174 mem
= gen_rtx_MEM (Pmode
,
12175 gen_rtx_PLUS (Pmode
, stack_top
,
12176 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
12177 emit_move_insn (mem
, gen_rtx_REG (Pmode
, 2));
12178 emit_label (no_toc_save_needed
);
12181 /* This ties together stack memory (MEM with an alias set of
12182 rs6000_sr_alias_set) and the change to the stack pointer. */
12185 rs6000_emit_stack_tie (void)
12187 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
12189 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12190 emit_insn (gen_stack_tie (mem
));
12193 /* Emit the correct code for allocating stack space, as insns.
12194 If COPY_R12, make sure a copy of the old frame is left in r12.
12195 The generated code may use hard register 0 as a temporary. */
12198 rs6000_emit_allocate_stack (HOST_WIDE_INT size
, int copy_r12
)
12201 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
12202 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
12203 rtx todec
= GEN_INT (-size
);
12205 if (current_function_limit_stack
)
12207 if (REG_P (stack_limit_rtx
)
12208 && REGNO (stack_limit_rtx
) > 1
12209 && REGNO (stack_limit_rtx
) <= 31)
12211 emit_insn (TARGET_32BIT
12212 ? gen_addsi3 (tmp_reg
,
12215 : gen_adddi3 (tmp_reg
,
12219 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
12222 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
12224 && DEFAULT_ABI
== ABI_V4
)
12226 rtx toload
= gen_rtx_CONST (VOIDmode
,
12227 gen_rtx_PLUS (Pmode
,
12231 emit_insn (gen_elf_high (tmp_reg
, toload
));
12232 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
12233 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
12237 warning ("stack limit expression is not supported");
12240 if (copy_r12
|| ! TARGET_UPDATE
)
12241 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
12247 /* Need a note here so that try_split doesn't get confused. */
12248 if (get_last_insn() == NULL_RTX
)
12249 emit_note (NOTE_INSN_DELETED
);
12250 insn
= emit_move_insn (tmp_reg
, todec
);
12251 try_split (PATTERN (insn
), insn
, 0);
12255 insn
= emit_insn (TARGET_32BIT
12256 ? gen_movsi_update (stack_reg
, stack_reg
,
12258 : gen_movdi_update (stack_reg
, stack_reg
,
12259 todec
, stack_reg
));
12263 insn
= emit_insn (TARGET_32BIT
12264 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
12265 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
12266 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
12267 gen_rtx_REG (Pmode
, 12));
12270 RTX_FRAME_RELATED_P (insn
) = 1;
12272 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
12273 gen_rtx_SET (VOIDmode
, stack_reg
,
12274 gen_rtx_PLUS (Pmode
, stack_reg
,
12279 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12280 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12281 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12282 deduce these equivalences by itself so it wasn't necessary to hold
12283 its hand so much. */
12286 rs6000_frame_related (rtx insn
, rtx reg
, HOST_WIDE_INT val
,
12287 rtx reg2
, rtx rreg
)
12291 /* copy_rtx will not make unique copies of registers, so we need to
12292 ensure we don't have unwanted sharing here. */
12294 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
12297 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
12299 real
= copy_rtx (PATTERN (insn
));
12301 if (reg2
!= NULL_RTX
)
12302 real
= replace_rtx (real
, reg2
, rreg
);
12304 real
= replace_rtx (real
, reg
,
12305 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
12306 STACK_POINTER_REGNUM
),
12309 /* We expect that 'real' is either a SET or a PARALLEL containing
12310 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12311 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12313 if (GET_CODE (real
) == SET
)
12317 temp
= simplify_rtx (SET_SRC (set
));
12319 SET_SRC (set
) = temp
;
12320 temp
= simplify_rtx (SET_DEST (set
));
12322 SET_DEST (set
) = temp
;
12323 if (GET_CODE (SET_DEST (set
)) == MEM
)
12325 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
12327 XEXP (SET_DEST (set
), 0) = temp
;
12330 else if (GET_CODE (real
) == PARALLEL
)
12333 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
12334 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
12336 rtx set
= XVECEXP (real
, 0, i
);
12338 temp
= simplify_rtx (SET_SRC (set
));
12340 SET_SRC (set
) = temp
;
12341 temp
= simplify_rtx (SET_DEST (set
));
12343 SET_DEST (set
) = temp
;
12344 if (GET_CODE (SET_DEST (set
)) == MEM
)
12346 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
12348 XEXP (SET_DEST (set
), 0) = temp
;
12350 RTX_FRAME_RELATED_P (set
) = 1;
12357 real
= spe_synthesize_frame_save (real
);
12359 RTX_FRAME_RELATED_P (insn
) = 1;
12360 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
12365 /* Given an SPE frame note, return a PARALLEL of SETs with the
12366 original note, plus a synthetic register save. */
12369 spe_synthesize_frame_save (rtx real
)
12371 rtx synth
, offset
, reg
, real2
;
12373 if (GET_CODE (real
) != SET
12374 || GET_MODE (SET_SRC (real
)) != V2SImode
)
12377 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12378 frame related note. The parallel contains a set of the register
12379 being saved, and another set to a synthetic register (n+1200).
12380 This is so we can differentiate between 64-bit and 32-bit saves.
12381 Words cannot describe this nastiness. */
12383 if (GET_CODE (SET_DEST (real
)) != MEM
12384 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
12385 || GET_CODE (SET_SRC (real
)) != REG
)
12389 (set (mem (plus (reg x) (const y)))
12392 (set (mem (plus (reg x) (const y+4)))
12396 real2
= copy_rtx (real
);
12397 PUT_MODE (SET_DEST (real2
), SImode
);
12398 reg
= SET_SRC (real2
);
12399 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
12400 synth
= copy_rtx (real2
);
12402 if (BYTES_BIG_ENDIAN
)
12404 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
12405 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
12408 reg
= SET_SRC (synth
);
12410 synth
= replace_rtx (synth
, reg
,
12411 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
12413 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
12414 synth
= replace_rtx (synth
, offset
,
12415 GEN_INT (INTVAL (offset
)
12416 + (BYTES_BIG_ENDIAN
? 0 : 4)));
12418 RTX_FRAME_RELATED_P (synth
) = 1;
12419 RTX_FRAME_RELATED_P (real2
) = 1;
12420 if (BYTES_BIG_ENDIAN
)
12421 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
12423 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
12428 /* Returns an insn that has a vrsave set operation with the
12429 appropriate CLOBBERs. */
12432 generate_set_vrsave (rtx reg
, rs6000_stack_t
*info
, int epiloguep
)
12435 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
12436 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
12439 = gen_rtx_SET (VOIDmode
,
12441 gen_rtx_UNSPEC_VOLATILE (SImode
,
12442 gen_rtvec (2, reg
, vrsave
),
12447 /* We need to clobber the registers in the mask so the scheduler
12448 does not move sets to VRSAVE before sets of AltiVec registers.
12450 However, if the function receives nonlocal gotos, reload will set
12451 all call saved registers live. We will end up with:
12453 (set (reg 999) (mem))
12454 (parallel [ (set (reg vrsave) (unspec blah))
12455 (clobber (reg 999))])
12457 The clobber will cause the store into reg 999 to be dead, and
12458 flow will attempt to delete an epilogue insn. In this case, we
12459 need an unspec use/set of the register. */
12461 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
12462 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
12464 if (!epiloguep
|| call_used_regs
[i
])
12465 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
12466 gen_rtx_REG (V4SImode
, i
));
12469 rtx reg
= gen_rtx_REG (V4SImode
, i
);
12472 = gen_rtx_SET (VOIDmode
,
12474 gen_rtx_UNSPEC (V4SImode
,
12475 gen_rtvec (1, reg
), 27));
12479 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
12481 for (i
= 0; i
< nclobs
; ++i
)
12482 XVECEXP (insn
, 0, i
) = clobs
[i
];
12487 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12488 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12491 emit_frame_save (rtx frame_reg
, rtx frame_ptr
, enum machine_mode mode
,
12492 unsigned int regno
, int offset
, HOST_WIDE_INT total_size
)
12494 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
12495 rtx replacea
, replaceb
;
12497 int_rtx
= GEN_INT (offset
);
12499 /* Some cases that need register indexed addressing. */
12500 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
12502 && SPE_VECTOR_MODE (mode
)
12503 && !SPE_CONST_OFFSET_OK (offset
)))
12505 /* Whomever calls us must make sure r11 is available in the
12506 flow path of instructions in the prologue. */
12507 offset_rtx
= gen_rtx_REG (Pmode
, 11);
12508 emit_move_insn (offset_rtx
, int_rtx
);
12510 replacea
= offset_rtx
;
12511 replaceb
= int_rtx
;
12515 offset_rtx
= int_rtx
;
12516 replacea
= NULL_RTX
;
12517 replaceb
= NULL_RTX
;
12520 reg
= gen_rtx_REG (mode
, regno
);
12521 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
12522 mem
= gen_rtx_MEM (mode
, addr
);
12523 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12525 insn
= emit_move_insn (mem
, reg
);
12527 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
12530 /* Emit an offset memory reference suitable for a frame store, while
12531 converting to a valid addressing mode. */
12534 gen_frame_mem_offset (enum machine_mode mode
, rtx reg
, int offset
)
12536 rtx int_rtx
, offset_rtx
;
12538 int_rtx
= GEN_INT (offset
);
12540 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
12542 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
12543 emit_move_insn (offset_rtx
, int_rtx
);
12546 offset_rtx
= int_rtx
;
12548 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
12551 /* Emit function prologue as insns. */
12554 rs6000_emit_prologue (void)
12556 rs6000_stack_t
*info
= rs6000_stack_info ();
12557 enum machine_mode reg_mode
= Pmode
;
12558 int reg_size
= TARGET_32BIT
? 4 : 8;
12559 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
12560 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
12561 rtx frame_reg_rtx
= sp_reg_rtx
;
12562 rtx cr_save_rtx
= NULL_RTX
;
12564 int saving_FPRs_inline
;
12565 int using_store_multiple
;
12566 HOST_WIDE_INT sp_offset
= 0;
12568 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
12570 reg_mode
= V2SImode
;
12574 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
12575 && (!TARGET_SPE_ABI
12576 || info
->spe_64bit_regs_used
== 0)
12577 && info
->first_gp_reg_save
< 31);
12578 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
12579 || FP_SAVE_INLINE (info
->first_fp_reg_save
)
12580 || current_function_calls_eh_return
12581 || cfun
->machine
->ra_need_lr
);
12583 /* For V.4, update stack before we do any saving and set back pointer. */
12585 && (DEFAULT_ABI
== ABI_V4
12586 || current_function_calls_eh_return
))
12588 if (info
->total_size
< 32767)
12589 sp_offset
= info
->total_size
;
12591 frame_reg_rtx
= frame_ptr_rtx
;
12592 rs6000_emit_allocate_stack (info
->total_size
,
12593 (frame_reg_rtx
!= sp_reg_rtx
12594 && (info
->cr_save_p
12596 || info
->first_fp_reg_save
< 64
12597 || info
->first_gp_reg_save
< 32
12599 if (frame_reg_rtx
!= sp_reg_rtx
)
12600 rs6000_emit_stack_tie ();
12603 /* Save AltiVec registers if needed. */
12604 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
12608 /* There should be a non inline version of this, for when we
12609 are saving lots of vector registers. */
12610 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
12611 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
12613 rtx areg
, savereg
, mem
;
12616 offset
= info
->altivec_save_offset
+ sp_offset
12617 + 16 * (i
- info
->first_altivec_reg_save
);
12619 savereg
= gen_rtx_REG (V4SImode
, i
);
12621 areg
= gen_rtx_REG (Pmode
, 0);
12622 emit_move_insn (areg
, GEN_INT (offset
));
12624 /* AltiVec addressing mode is [reg+reg]. */
12625 mem
= gen_rtx_MEM (V4SImode
,
12626 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
12628 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12630 insn
= emit_move_insn (mem
, savereg
);
12632 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12633 areg
, GEN_INT (offset
));
12637 /* VRSAVE is a bit vector representing which AltiVec registers
12638 are used. The OS uses this to determine which vector
12639 registers to save on a context switch. We need to save
12640 VRSAVE on the stack frame, add whatever AltiVec registers we
12641 used in this function, and do the corresponding magic in the
12644 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
12645 && info
->vrsave_mask
!= 0)
12647 rtx reg
, mem
, vrsave
;
12650 /* Get VRSAVE onto a GPR. */
12651 reg
= gen_rtx_REG (SImode
, 12);
12652 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
12654 emit_insn (gen_get_vrsave_internal (reg
));
12656 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
12659 offset
= info
->vrsave_save_offset
+ sp_offset
;
12661 = gen_rtx_MEM (SImode
,
12662 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
12663 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12664 insn
= emit_move_insn (mem
, reg
);
12666 /* Include the registers in the mask. */
12667 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
12669 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
12672 /* If we use the link register, get it into r0. */
12673 if (info
->lr_save_p
)
12675 insn
= emit_move_insn (gen_rtx_REG (Pmode
, 0),
12676 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
12677 RTX_FRAME_RELATED_P (insn
) = 1;
12680 /* If we need to save CR, put it into r12. */
12681 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
12685 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
12686 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
12687 RTX_FRAME_RELATED_P (insn
) = 1;
12688 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12689 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12690 But that's OK. All we have to do is specify that _one_ condition
12691 code register is saved in this stack slot. The thrower's epilogue
12692 will then restore all the call-saved registers.
12693 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12694 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
,
12695 gen_rtx_REG (SImode
, CR2_REGNO
));
12696 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
12701 /* Do any required saving of fpr's. If only one or two to save, do
12702 it ourselves. Otherwise, call function. */
12703 if (saving_FPRs_inline
)
12706 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
12707 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
12708 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
12709 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
12710 info
->first_fp_reg_save
+ i
,
12711 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
12714 else if (info
->first_fp_reg_save
!= 64)
12718 const char *alloc_rname
;
12720 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
12722 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
12723 gen_rtx_REG (Pmode
,
12724 LINK_REGISTER_REGNUM
));
12725 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
12726 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
12727 alloc_rname
= ggc_strdup (rname
);
12728 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
12729 gen_rtx_SYMBOL_REF (Pmode
,
12731 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
12733 rtx addr
, reg
, mem
;
12734 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
12735 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12736 GEN_INT (info
->fp_save_offset
12737 + sp_offset
+ 8*i
));
12738 mem
= gen_rtx_MEM (DFmode
, addr
);
12739 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12741 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
12743 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
12744 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12745 NULL_RTX
, NULL_RTX
);
12748 /* Save GPRs. This is done as a PARALLEL if we are using
12749 the store-multiple instructions. */
12750 if (using_store_multiple
)
12754 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
12755 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
12757 rtx addr
, reg
, mem
;
12758 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
12759 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12760 GEN_INT (info
->gp_save_offset
12763 mem
= gen_rtx_MEM (reg_mode
, addr
);
12764 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12766 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
12768 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
12769 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12770 NULL_RTX
, NULL_RTX
);
12775 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
12776 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
12777 && (! call_used_regs
[info
->first_gp_reg_save
+i
]
12778 || (i
+info
->first_gp_reg_save
12779 == RS6000_PIC_OFFSET_TABLE_REGNUM
12780 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
12781 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
12782 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
12783 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
12785 rtx addr
, reg
, mem
;
12786 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
12788 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
12790 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
12793 if (!SPE_CONST_OFFSET_OK (offset
))
12795 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
12796 emit_move_insn (b
, GEN_INT (offset
));
12799 b
= GEN_INT (offset
);
12801 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
12802 mem
= gen_rtx_MEM (V2SImode
, addr
);
12803 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12804 insn
= emit_move_insn (mem
, reg
);
12806 if (GET_CODE (b
) == CONST_INT
)
12807 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12808 NULL_RTX
, NULL_RTX
);
12810 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12811 b
, GEN_INT (offset
));
12815 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12816 GEN_INT (info
->gp_save_offset
12819 mem
= gen_rtx_MEM (reg_mode
, addr
);
12820 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12822 insn
= emit_move_insn (mem
, reg
);
12823 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12824 NULL_RTX
, NULL_RTX
);
12829 /* ??? There's no need to emit actual instructions here, but it's the
12830 easiest way to get the frame unwind information emitted. */
12831 if (current_function_calls_eh_return
)
12833 unsigned int i
, regno
;
12835 /* In AIX ABI we need to pretend we save r2 here. */
12838 rtx addr
, reg
, mem
;
12840 reg
= gen_rtx_REG (reg_mode
, 2);
12841 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12842 GEN_INT (sp_offset
+ 5 * reg_size
));
12843 mem
= gen_rtx_MEM (reg_mode
, addr
);
12844 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12846 insn
= emit_move_insn (mem
, reg
);
12847 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12848 NULL_RTX
, NULL_RTX
);
12849 PATTERN (insn
) = gen_blockage ();
12854 regno
= EH_RETURN_DATA_REGNO (i
);
12855 if (regno
== INVALID_REGNUM
)
12858 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
12859 info
->ehrd_offset
+ sp_offset
12860 + reg_size
* (int) i
,
12865 /* Save lr if we used it. */
12866 if (info
->lr_save_p
)
12868 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12869 GEN_INT (info
->lr_save_offset
+ sp_offset
));
12870 rtx reg
= gen_rtx_REG (Pmode
, 0);
12871 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
12872 /* This should not be of rs6000_sr_alias_set, because of
12873 __builtin_return_address. */
12875 insn
= emit_move_insn (mem
, reg
);
12876 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12877 NULL_RTX
, NULL_RTX
);
12880 /* Save CR if we use any that must be preserved. */
12881 if (info
->cr_save_p
)
12883 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12884 GEN_INT (info
->cr_save_offset
+ sp_offset
));
12885 rtx mem
= gen_rtx_MEM (SImode
, addr
);
12886 /* See the large comment above about why CR2_REGNO is used. */
12887 rtx magic_eh_cr_reg
= gen_rtx_REG (SImode
, CR2_REGNO
);
12889 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12891 /* If r12 was used to hold the original sp, copy cr into r0 now
12893 if (REGNO (frame_reg_rtx
) == 12)
12897 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
12898 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
12899 RTX_FRAME_RELATED_P (insn
) = 1;
12900 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
, magic_eh_cr_reg
);
12901 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
12906 insn
= emit_move_insn (mem
, cr_save_rtx
);
12908 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
12909 NULL_RTX
, NULL_RTX
);
12912 /* Update stack and set back pointer unless this is V.4,
12913 for which it was done previously. */
12915 && !(DEFAULT_ABI
== ABI_V4
|| current_function_calls_eh_return
))
12916 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
12918 /* Set frame pointer, if needed. */
12919 if (frame_pointer_needed
)
12921 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
12923 RTX_FRAME_RELATED_P (insn
) = 1;
12926 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12927 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
12928 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
12929 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
12931 /* If emit_load_toc_table will use the link register, we need to save
12932 it. We use R12 for this purpose because emit_load_toc_table
12933 can use register 0. This allows us to use a plain 'blr' to return
12934 from the procedure more often. */
12935 int save_LR_around_toc_setup
= (TARGET_ELF
12936 && DEFAULT_ABI
!= ABI_AIX
12938 && ! info
->lr_save_p
12939 && EXIT_BLOCK_PTR
->pred
!= NULL
);
12940 if (save_LR_around_toc_setup
)
12942 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
12944 insn
= emit_move_insn (frame_ptr_rtx
, lr
);
12945 rs6000_maybe_dead (insn
);
12946 RTX_FRAME_RELATED_P (insn
) = 1;
12948 rs6000_emit_load_toc_table (TRUE
);
12950 insn
= emit_move_insn (lr
, frame_ptr_rtx
);
12951 rs6000_maybe_dead (insn
);
12952 RTX_FRAME_RELATED_P (insn
) = 1;
12955 rs6000_emit_load_toc_table (TRUE
);
12959 if (DEFAULT_ABI
== ABI_DARWIN
12960 && flag_pic
&& current_function_uses_pic_offset_table
)
12962 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
12963 const char *picbase
= machopic_function_base_name ();
12964 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, picbase
);
12966 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr
, src
)));
12968 insn
= emit_move_insn (gen_rtx_REG (Pmode
,
12969 RS6000_PIC_OFFSET_TABLE_REGNUM
),
12971 rs6000_maybe_dead (insn
);
12976 /* Write function prologue. */
12979 rs6000_output_function_prologue (FILE *file
,
12980 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
12982 rs6000_stack_t
*info
= rs6000_stack_info ();
12984 if (TARGET_DEBUG_STACK
)
12985 debug_stack_info (info
);
12987 /* Write .extern for any function we will call to save and restore
12989 if (info
->first_fp_reg_save
< 64
12990 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
12991 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12992 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
12993 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
12994 RESTORE_FP_SUFFIX
);
12996 /* Write .extern for AIX common mode routines, if needed. */
12997 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
12999 fputs ("\t.extern __mulh\n", file
);
13000 fputs ("\t.extern __mull\n", file
);
13001 fputs ("\t.extern __divss\n", file
);
13002 fputs ("\t.extern __divus\n", file
);
13003 fputs ("\t.extern __quoss\n", file
);
13004 fputs ("\t.extern __quous\n", file
);
13005 common_mode_defined
= 1;
13008 if (! HAVE_prologue
)
13012 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13013 the "toplevel" insn chain. */
13014 emit_note (NOTE_INSN_DELETED
);
13015 rs6000_emit_prologue ();
13016 emit_note (NOTE_INSN_DELETED
);
13018 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13022 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
13024 INSN_ADDRESSES_NEW (insn
, addr
);
13029 if (TARGET_DEBUG_STACK
)
13030 debug_rtx_list (get_insns (), 100);
13031 final (get_insns (), file
, FALSE
, FALSE
);
13035 rs6000_pic_labelno
++;
13038 /* Emit function epilogue as insns.
13040 At present, dwarf2out_frame_debug_expr doesn't understand
13041 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13042 anywhere in the epilogue. Most of the insns below would in any case
13043 need special notes to explain where r11 is in relation to the stack. */
13046 rs6000_emit_epilogue (int sibcall
)
13048 rs6000_stack_t
*info
;
13049 int restoring_FPRs_inline
;
13050 int using_load_multiple
;
13051 int using_mfcr_multiple
;
13052 int use_backchain_to_restore_sp
;
13054 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
13055 rtx frame_reg_rtx
= sp_reg_rtx
;
13056 enum machine_mode reg_mode
= Pmode
;
13057 int reg_size
= TARGET_32BIT
? 4 : 8;
13060 info
= rs6000_stack_info ();
13062 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13064 reg_mode
= V2SImode
;
13068 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
13069 && (!TARGET_SPE_ABI
13070 || info
->spe_64bit_regs_used
== 0)
13071 && info
->first_gp_reg_save
< 31);
13072 restoring_FPRs_inline
= (sibcall
13073 || current_function_calls_eh_return
13074 || info
->first_fp_reg_save
== 64
13075 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
13076 use_backchain_to_restore_sp
= (frame_pointer_needed
13077 || current_function_calls_alloca
13078 || info
->total_size
> 32767);
13079 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
13080 || rs6000_cpu
== PROCESSOR_PPC603
13081 || rs6000_cpu
== PROCESSOR_PPC750
13084 /* If we have a frame pointer, a call to alloca, or a large stack
13085 frame, restore the old stack pointer using the backchain. Otherwise,
13086 we know what size to update it with. */
13087 if (use_backchain_to_restore_sp
)
13089 /* Under V.4, don't reset the stack pointer until after we're done
13090 loading the saved registers. */
13091 if (DEFAULT_ABI
== ABI_V4
)
13092 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
13094 emit_move_insn (frame_reg_rtx
,
13095 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
13098 else if (info
->push_p
)
13100 if (DEFAULT_ABI
== ABI_V4
13101 || current_function_calls_eh_return
)
13102 sp_offset
= info
->total_size
;
13105 emit_insn (TARGET_32BIT
13106 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
13107 GEN_INT (info
->total_size
))
13108 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
13109 GEN_INT (info
->total_size
)));
13113 /* Restore AltiVec registers if needed. */
13114 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
13118 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13119 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
13121 rtx addr
, areg
, mem
;
13123 areg
= gen_rtx_REG (Pmode
, 0);
13125 (areg
, GEN_INT (info
->altivec_save_offset
13127 + 16 * (i
- info
->first_altivec_reg_save
)));
13129 /* AltiVec addressing mode is [reg+reg]. */
13130 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
13131 mem
= gen_rtx_MEM (V4SImode
, addr
);
13132 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13134 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
13138 /* Restore VRSAVE if needed. */
13139 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
13140 && info
->vrsave_mask
!= 0)
13142 rtx addr
, mem
, reg
;
13144 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13145 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
13146 mem
= gen_rtx_MEM (SImode
, addr
);
13147 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13148 reg
= gen_rtx_REG (SImode
, 12);
13149 emit_move_insn (reg
, mem
);
13151 emit_insn (generate_set_vrsave (reg
, info
, 1));
13154 /* Get the old lr if we saved it. */
13155 if (info
->lr_save_p
)
13157 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
13158 info
->lr_save_offset
+ sp_offset
);
13160 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13162 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
13165 /* Get the old cr if we saved it. */
13166 if (info
->cr_save_p
)
13168 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13169 GEN_INT (info
->cr_save_offset
+ sp_offset
));
13170 rtx mem
= gen_rtx_MEM (SImode
, addr
);
13172 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13174 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
13177 /* Set LR here to try to overlap restores below. */
13178 if (info
->lr_save_p
)
13179 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
13180 gen_rtx_REG (Pmode
, 0));
13182 /* Load exception handler data registers, if needed. */
13183 if (current_function_calls_eh_return
)
13185 unsigned int i
, regno
;
13189 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13190 GEN_INT (sp_offset
+ 5 * reg_size
));
13191 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13193 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13195 emit_move_insn (gen_rtx_REG (reg_mode
, 2), mem
);
13202 regno
= EH_RETURN_DATA_REGNO (i
);
13203 if (regno
== INVALID_REGNUM
)
13206 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
13207 info
->ehrd_offset
+ sp_offset
13208 + reg_size
* (int) i
);
13209 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13211 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
13215 /* Restore GPRs. This is done as a PARALLEL if we are using
13216 the load-multiple instructions. */
13217 if (using_load_multiple
)
13220 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
13221 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13223 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13224 GEN_INT (info
->gp_save_offset
13227 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13229 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13232 gen_rtx_SET (VOIDmode
,
13233 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
13236 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13239 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13240 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
13241 && (! call_used_regs
[info
->first_gp_reg_save
+i
]
13242 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
13243 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
13244 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
13245 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
13246 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
13248 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13249 GEN_INT (info
->gp_save_offset
13252 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13254 /* Restore 64-bit quantities for SPE. */
13255 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13257 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
13260 if (!SPE_CONST_OFFSET_OK (offset
))
13262 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
13263 emit_move_insn (b
, GEN_INT (offset
));
13266 b
= GEN_INT (offset
);
13268 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
13269 mem
= gen_rtx_MEM (V2SImode
, addr
);
13272 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13274 emit_move_insn (gen_rtx_REG (reg_mode
,
13275 info
->first_gp_reg_save
+ i
), mem
);
13278 /* Restore fpr's if we need to do it without calling a function. */
13279 if (restoring_FPRs_inline
)
13280 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13281 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
13282 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
13285 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13286 GEN_INT (info
->fp_save_offset
13289 mem
= gen_rtx_MEM (DFmode
, addr
);
13290 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13292 emit_move_insn (gen_rtx_REG (DFmode
,
13293 info
->first_fp_reg_save
+ i
),
13297 /* If we saved cr, restore it here. Just those that were used. */
13298 if (info
->cr_save_p
)
13300 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
13303 if (using_mfcr_multiple
)
13305 for (i
= 0; i
< 8; i
++)
13306 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
13312 if (using_mfcr_multiple
&& count
> 1)
13317 p
= rtvec_alloc (count
);
13320 for (i
= 0; i
< 8; i
++)
13321 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
13323 rtvec r
= rtvec_alloc (2);
13324 RTVEC_ELT (r
, 0) = r12_rtx
;
13325 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
13326 RTVEC_ELT (p
, ndx
) =
13327 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
13328 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
13331 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13336 for (i
= 0; i
< 8; i
++)
13337 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
13339 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
13345 /* If this is V.4, unwind the stack pointer after all of the loads
13346 have been done. We need to emit a block here so that sched
13347 doesn't decide to move the sp change before the register restores
13348 (which may not have any obvious dependency on the stack). This
13349 doesn't hurt performance, because there is no scheduling that can
13350 be done after this point. */
13351 if (DEFAULT_ABI
== ABI_V4
13352 || current_function_calls_eh_return
)
13354 if (frame_reg_rtx
!= sp_reg_rtx
)
13355 rs6000_emit_stack_tie ();
13357 if (use_backchain_to_restore_sp
)
13359 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
13361 else if (sp_offset
!= 0)
13363 emit_insn (TARGET_32BIT
13364 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
13365 GEN_INT (sp_offset
))
13366 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
13367 GEN_INT (sp_offset
)));
13371 if (current_function_calls_eh_return
)
13373 rtx sa
= EH_RETURN_STACKADJ_RTX
;
13374 emit_insn (TARGET_32BIT
13375 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
13376 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
13382 if (! restoring_FPRs_inline
)
13383 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
13385 p
= rtvec_alloc (2);
13387 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
13388 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
13389 gen_rtx_REG (Pmode
,
13390 LINK_REGISTER_REGNUM
));
13392 /* If we have to restore more than two FP registers, branch to the
13393 restore function. It will return to our caller. */
13394 if (! restoring_FPRs_inline
)
13398 const char *alloc_rname
;
13400 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
13401 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
13402 alloc_rname
= ggc_strdup (rname
);
13403 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
13404 gen_rtx_SYMBOL_REF (Pmode
,
13407 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13410 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
13411 GEN_INT (info
->fp_save_offset
+ 8*i
));
13412 mem
= gen_rtx_MEM (DFmode
, addr
);
13413 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13415 RTVEC_ELT (p
, i
+3) =
13416 gen_rtx_SET (VOIDmode
,
13417 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
13422 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13426 /* Write function epilogue. */
13429 rs6000_output_function_epilogue (FILE *file
,
13430 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
13432 rs6000_stack_t
*info
= rs6000_stack_info ();
13434 if (! HAVE_epilogue
)
13436 rtx insn
= get_last_insn ();
13437 /* If the last insn was a BARRIER, we don't have to write anything except
13438 the trace table. */
13439 if (GET_CODE (insn
) == NOTE
)
13440 insn
= prev_nonnote_insn (insn
);
13441 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
13443 /* This is slightly ugly, but at least we don't have two
13444 copies of the epilogue-emitting code. */
13447 /* A NOTE_INSN_DELETED is supposed to be at the start
13448 and end of the "toplevel" insn chain. */
13449 emit_note (NOTE_INSN_DELETED
);
13450 rs6000_emit_epilogue (FALSE
);
13451 emit_note (NOTE_INSN_DELETED
);
13453 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13457 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
13459 INSN_ADDRESSES_NEW (insn
, addr
);
13464 if (TARGET_DEBUG_STACK
)
13465 debug_rtx_list (get_insns (), 100);
13466 final (get_insns (), file
, FALSE
, FALSE
);
13472 macho_branch_islands ();
13473 /* Mach-O doesn't support labels at the end of objects, so if
13474 it looks like we might want one, insert a NOP. */
13476 rtx insn
= get_last_insn ();
13479 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED_LABEL
)
13480 insn
= PREV_INSN (insn
);
13484 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED_LABEL
)))
13485 fputs ("\tnop\n", file
);
13489 /* Output a traceback table here. See /usr/include/sys/debug.h for info
13492 We don't output a traceback table if -finhibit-size-directive was
13493 used. The documentation for -finhibit-size-directive reads
13494 ``don't output a @code{.size} assembler directive, or anything
13495 else that would cause trouble if the function is split in the
13496 middle, and the two halves are placed at locations far apart in
13497 memory.'' The traceback table has this property, since it
13498 includes the offset from the start of the function to the
13499 traceback table itself.
13501 System V.4 Powerpc's (and the embedded ABI derived from it) use a
13502 different traceback table. */
13503 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
13504 && rs6000_traceback
!= traceback_none
)
13506 const char *fname
= NULL
;
13507 const char *language_string
= lang_hooks
.name
;
13508 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
13510 int optional_tbtab
;
13512 if (rs6000_traceback
== traceback_full
)
13513 optional_tbtab
= 1;
13514 else if (rs6000_traceback
== traceback_part
)
13515 optional_tbtab
= 0;
13517 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
13519 if (optional_tbtab
)
13521 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
13522 while (*fname
== '.') /* V.4 encodes . in the name */
13525 /* Need label immediately before tbtab, so we can compute
13526 its offset from the function start. */
13527 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
13528 ASM_OUTPUT_LABEL (file
, fname
);
13531 /* The .tbtab pseudo-op can only be used for the first eight
13532 expressions, since it can't handle the possibly variable
13533 length fields that follow. However, if you omit the optional
13534 fields, the assembler outputs zeros for all optional fields
13535 anyways, giving each variable length field is minimum length
13536 (as defined in sys/debug.h). Thus we can not use the .tbtab
13537 pseudo-op at all. */
13539 /* An all-zero word flags the start of the tbtab, for debuggers
13540 that have to find it by searching forward from the entry
13541 point or from the current pc. */
13542 fputs ("\t.long 0\n", file
);
13544 /* Tbtab format type. Use format type 0. */
13545 fputs ("\t.byte 0,", file
);
13547 /* Language type. Unfortunately, there does not seem to be any
13548 official way to discover the language being compiled, so we
13549 use language_string.
13550 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
13551 Java is 13. Objective-C is 14. */
13552 if (! strcmp (language_string
, "GNU C"))
13554 else if (! strcmp (language_string
, "GNU F77")
13555 || ! strcmp (language_string
, "GNU F95"))
13557 else if (! strcmp (language_string
, "GNU Pascal"))
13559 else if (! strcmp (language_string
, "GNU Ada"))
13561 else if (! strcmp (language_string
, "GNU C++"))
13563 else if (! strcmp (language_string
, "GNU Java"))
13565 else if (! strcmp (language_string
, "GNU Objective-C"))
13569 fprintf (file
, "%d,", i
);
13571 /* 8 single bit fields: global linkage (not set for C extern linkage,
13572 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13573 from start of procedure stored in tbtab, internal function, function
13574 has controlled storage, function has no toc, function uses fp,
13575 function logs/aborts fp operations. */
13576 /* Assume that fp operations are used if any fp reg must be saved. */
13577 fprintf (file
, "%d,",
13578 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
13580 /* 6 bitfields: function is interrupt handler, name present in
13581 proc table, function calls alloca, on condition directives
13582 (controls stack walks, 3 bits), saves condition reg, saves
13584 /* The `function calls alloca' bit seems to be set whenever reg 31 is
13585 set up as a frame pointer, even when there is no alloca call. */
13586 fprintf (file
, "%d,",
13587 ((optional_tbtab
<< 6)
13588 | ((optional_tbtab
& frame_pointer_needed
) << 5)
13589 | (info
->cr_save_p
<< 1)
13590 | (info
->lr_save_p
)));
13592 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13594 fprintf (file
, "%d,",
13595 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
13597 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
13598 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
13600 if (optional_tbtab
)
13602 /* Compute the parameter info from the function decl argument
13605 int next_parm_info_bit
= 31;
13607 for (decl
= DECL_ARGUMENTS (current_function_decl
);
13608 decl
; decl
= TREE_CHAIN (decl
))
13610 rtx parameter
= DECL_INCOMING_RTL (decl
);
13611 enum machine_mode mode
= GET_MODE (parameter
);
13613 if (GET_CODE (parameter
) == REG
)
13615 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
13621 if (mode
== SFmode
)
13623 else if (mode
== DFmode
|| mode
== TFmode
)
13628 /* If only one bit will fit, don't or in this entry. */
13629 if (next_parm_info_bit
> 0)
13630 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
13631 next_parm_info_bit
-= 2;
13635 fixed_parms
+= ((GET_MODE_SIZE (mode
)
13636 + (UNITS_PER_WORD
- 1))
13638 next_parm_info_bit
-= 1;
13644 /* Number of fixed point parameters. */
13645 /* This is actually the number of words of fixed point parameters; thus
13646 an 8 byte struct counts as 2; and thus the maximum value is 8. */
13647 fprintf (file
, "%d,", fixed_parms
);
13649 /* 2 bitfields: number of floating point parameters (7 bits), parameters
13651 /* This is actually the number of fp registers that hold parameters;
13652 and thus the maximum value is 13. */
13653 /* Set parameters on stack bit if parameters are not in their original
13654 registers, regardless of whether they are on the stack? Xlc
13655 seems to set the bit when not optimizing. */
13656 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
13658 if (! optional_tbtab
)
13661 /* Optional fields follow. Some are variable length. */
13663 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13664 11 double float. */
13665 /* There is an entry for each parameter in a register, in the order that
13666 they occur in the parameter list. Any intervening arguments on the
13667 stack are ignored. If the list overflows a long (max possible length
13668 34 bits) then completely leave off all elements that don't fit. */
13669 /* Only emit this long if there was at least one parameter. */
13670 if (fixed_parms
|| float_parms
)
13671 fprintf (file
, "\t.long %d\n", parm_info
);
13673 /* Offset from start of code to tb table. */
13674 fputs ("\t.long ", file
);
13675 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
13677 RS6000_OUTPUT_BASENAME (file
, fname
);
13679 assemble_name (file
, fname
);
13681 fputs ("-.", file
);
13683 RS6000_OUTPUT_BASENAME (file
, fname
);
13685 assemble_name (file
, fname
);
13689 /* Interrupt handler mask. */
13690 /* Omit this long, since we never set the interrupt handler bit
13693 /* Number of CTL (controlled storage) anchors. */
13694 /* Omit this long, since the has_ctl bit is never set above. */
13696 /* Displacement into stack of each CTL anchor. */
13697 /* Omit this list of longs, because there are no CTL anchors. */
13699 /* Length of function name. */
13702 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
13704 /* Function name. */
13705 assemble_string (fname
, strlen (fname
));
13707 /* Register for alloca automatic storage; this is always reg 31.
13708 Only emit this if the alloca bit was set above. */
13709 if (frame_pointer_needed
)
13710 fputs ("\t.byte 31\n", file
);
13712 fputs ("\t.align 2\n", file
);
13716 /* A C compound statement that outputs the assembler code for a thunk
13717 function, used to implement C++ virtual function calls with
13718 multiple inheritance. The thunk acts as a wrapper around a virtual
13719 function, adjusting the implicit object parameter before handing
13720 control off to the real function.
13722 First, emit code to add the integer DELTA to the location that
13723 contains the incoming first argument. Assume that this argument
13724 contains a pointer, and is the one used to pass the `this' pointer
13725 in C++. This is the incoming argument *before* the function
13726 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13727 values of all other incoming arguments.
13729 After the addition, emit code to jump to FUNCTION, which is a
13730 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13731 not touch the return address. Hence returning from FUNCTION will
13732 return to whoever called the current `thunk'.
13734 The effect must be as if FUNCTION had been called directly with the
13735 adjusted first argument. This macro is responsible for emitting
13736 all of the code for a thunk function; output_function_prologue()
13737 and output_function_epilogue() are not invoked.
13739 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13740 been extracted from it.) It might possibly be useful on some
13741 targets, but probably not.
13743 If you do not define this macro, the target-independent code in the
13744 C++ frontend will generate a less efficient heavyweight thunk that
13745 calls FUNCTION instead of jumping to it. The generic approach does
13746 not support varargs. */
13749 rs6000_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
13750 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
13753 rtx
this, insn
, funexp
;
13755 reload_completed
= 1;
13756 epilogue_completed
= 1;
13757 no_new_pseudos
= 1;
13758 reset_block_changes ();
13760 /* Mark the end of the (empty) prologue. */
13761 emit_note (NOTE_INSN_PROLOGUE_END
);
13763 /* Find the "this" pointer. If the function returns a structure,
13764 the structure return pointer is in r3. */
13765 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
13766 this = gen_rtx_REG (Pmode
, 4);
13768 this = gen_rtx_REG (Pmode
, 3);
13770 /* Apply the constant offset, if required. */
13773 rtx delta_rtx
= GEN_INT (delta
);
13774 emit_insn (TARGET_32BIT
13775 ? gen_addsi3 (this, this, delta_rtx
)
13776 : gen_adddi3 (this, this, delta_rtx
));
13779 /* Apply the offset from the vtable, if required. */
13782 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
13783 rtx tmp
= gen_rtx_REG (Pmode
, 12);
13785 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
13786 if (((unsigned HOST_WIDE_INT
) vcall_offset
) + 0x8000 >= 0x10000)
13788 emit_insn (TARGET_32BIT
13789 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
13790 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
13791 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
13795 rtx loc
= gen_rtx_PLUS (Pmode
, tmp
, vcall_offset_rtx
);
13797 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, loc
));
13799 emit_insn (TARGET_32BIT
13800 ? gen_addsi3 (this, this, tmp
)
13801 : gen_adddi3 (this, this, tmp
));
13804 /* Generate a tail call to the target function. */
13805 if (!TREE_USED (function
))
13807 assemble_external (function
);
13808 TREE_USED (function
) = 1;
13810 funexp
= XEXP (DECL_RTL (function
), 0);
13811 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
13814 if (MACHOPIC_INDIRECT
)
13815 funexp
= machopic_indirect_call_target (funexp
);
13818 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13819 generate sibcall RTL explicitly to avoid constraint abort. */
13820 insn
= emit_call_insn (
13821 gen_rtx_PARALLEL (VOIDmode
,
13823 gen_rtx_CALL (VOIDmode
,
13824 funexp
, const0_rtx
),
13825 gen_rtx_USE (VOIDmode
, const0_rtx
),
13826 gen_rtx_USE (VOIDmode
,
13827 gen_rtx_REG (SImode
,
13828 LINK_REGISTER_REGNUM
)),
13829 gen_rtx_RETURN (VOIDmode
))));
13830 SIBLING_CALL_P (insn
) = 1;
13833 /* Run just enough of rest_of_compilation to get the insns emitted.
13834 There's not really enough bulk here to make other passes such as
13835 instruction scheduling worth while. Note that use_thunk calls
13836 assemble_start_function and assemble_end_function. */
13837 insn
= get_insns ();
13838 insn_locators_initialize ();
13839 shorten_branches (insn
);
13840 final_start_function (insn
, file
, 1);
13841 final (insn
, file
, 1, 0);
13842 final_end_function ();
13844 reload_completed
= 0;
13845 epilogue_completed
= 0;
13846 no_new_pseudos
= 0;
13849 /* A quick summary of the various types of 'constant-pool tables'
13852 Target Flags Name One table per
13853 AIX (none) AIX TOC object file
13854 AIX -mfull-toc AIX TOC object file
13855 AIX -mminimal-toc AIX minimal TOC translation unit
13856 SVR4/EABI (none) SVR4 SDATA object file
13857 SVR4/EABI -fpic SVR4 pic object file
13858 SVR4/EABI -fPIC SVR4 PIC translation unit
13859 SVR4/EABI -mrelocatable EABI TOC function
13860 SVR4/EABI -maix AIX TOC object file
13861 SVR4/EABI -maix -mminimal-toc
13862 AIX minimal TOC translation unit
13864 Name Reg. Set by entries contains:
13865 made by addrs? fp? sum?
13867 AIX TOC 2 crt0 as Y option option
13868 AIX minimal TOC 30 prolog gcc Y Y option
13869 SVR4 SDATA 13 crt0 gcc N Y N
13870 SVR4 pic 30 prolog ld Y not yet N
13871 SVR4 PIC 30 prolog gcc Y option option
13872 EABI TOC 30 prolog gcc Y option option
13876 /* Hash functions for the hash table. */
13879 rs6000_hash_constant (rtx k
)
13881 enum rtx_code code
= GET_CODE (k
);
13882 enum machine_mode mode
= GET_MODE (k
);
13883 unsigned result
= (code
<< 3) ^ mode
;
13884 const char *format
;
13887 format
= GET_RTX_FORMAT (code
);
13888 flen
= strlen (format
);
13894 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
13897 if (mode
!= VOIDmode
)
13898 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
13910 for (; fidx
< flen
; fidx
++)
13911 switch (format
[fidx
])
13916 const char *str
= XSTR (k
, fidx
);
13917 len
= strlen (str
);
13918 result
= result
* 613 + len
;
13919 for (i
= 0; i
< len
; i
++)
13920 result
= result
* 613 + (unsigned) str
[i
];
13925 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
13929 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
13932 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
13933 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
13937 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
13938 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
13952 toc_hash_function (const void *hash_entry
)
13954 const struct toc_hash_struct
*thc
=
13955 (const struct toc_hash_struct
*) hash_entry
;
13956 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
13959 /* Compare H1 and H2 for equivalence. */
13962 toc_hash_eq (const void *h1
, const void *h2
)
13964 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
13965 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
13967 if (((const struct toc_hash_struct
*) h1
)->key_mode
13968 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
13971 return rtx_equal_p (r1
, r2
);
13974 /* These are the names given by the C++ front-end to vtables, and
13975 vtable-like objects. Ideally, this logic should not be here;
13976 instead, there should be some programmatic way of inquiring as
13977 to whether or not an object is a vtable. */
13979 #define VTABLE_NAME_P(NAME) \
13980 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13981 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13982 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13983 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
13984 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13987 rs6000_output_symbol_ref (FILE *file
, rtx x
)
13989 /* Currently C++ toc references to vtables can be emitted before it
13990 is decided whether the vtable is public or private. If this is
13991 the case, then the linker will eventually complain that there is
13992 a reference to an unknown section. Thus, for vtables only,
13993 we emit the TOC reference to reference the symbol and not the
13995 const char *name
= XSTR (x
, 0);
13997 if (VTABLE_NAME_P (name
))
13999 RS6000_OUTPUT_BASENAME (file
, name
);
14002 assemble_name (file
, name
);
14005 /* Output a TOC entry. We derive the entry name from what is being
14009 output_toc (FILE *file
, rtx x
, int labelno
, enum machine_mode mode
)
14012 const char *name
= buf
;
14013 const char *real_name
;
14020 /* When the linker won't eliminate them, don't output duplicate
14021 TOC entries (this happens on AIX if there is any kind of TOC,
14022 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14024 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
14026 struct toc_hash_struct
*h
;
14029 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14030 time because GGC is not initialized at that point. */
14031 if (toc_hash_table
== NULL
)
14032 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
14033 toc_hash_eq
, NULL
);
14035 h
= ggc_alloc (sizeof (*h
));
14037 h
->key_mode
= mode
;
14038 h
->labelno
= labelno
;
14040 found
= htab_find_slot (toc_hash_table
, h
, 1);
14041 if (*found
== NULL
)
14043 else /* This is indeed a duplicate.
14044 Set this label equal to that label. */
14046 fputs ("\t.set ", file
);
14047 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
14048 fprintf (file
, "%d,", labelno
);
14049 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
14050 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
14056 /* If we're going to put a double constant in the TOC, make sure it's
14057 aligned properly when strict alignment is on. */
14058 if (GET_CODE (x
) == CONST_DOUBLE
14059 && STRICT_ALIGNMENT
14060 && GET_MODE_BITSIZE (mode
) >= 64
14061 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
14062 ASM_OUTPUT_ALIGN (file
, 3);
14065 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
14067 /* Handle FP constants specially. Note that if we have a minimal
14068 TOC, things we put here aren't actually in the TOC, so we can allow
14070 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
14072 REAL_VALUE_TYPE rv
;
14075 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
14076 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
14080 if (TARGET_MINIMAL_TOC
)
14081 fputs (DOUBLE_INT_ASM_OP
, file
);
14083 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14084 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14085 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14086 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
14087 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14088 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14093 if (TARGET_MINIMAL_TOC
)
14094 fputs ("\t.long ", file
);
14096 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14097 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14098 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14099 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
14100 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14101 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14105 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
14107 REAL_VALUE_TYPE rv
;
14110 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
14111 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
14115 if (TARGET_MINIMAL_TOC
)
14116 fputs (DOUBLE_INT_ASM_OP
, file
);
14118 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
14119 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14120 fprintf (file
, "0x%lx%08lx\n",
14121 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14126 if (TARGET_MINIMAL_TOC
)
14127 fputs ("\t.long ", file
);
14129 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
14130 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14131 fprintf (file
, "0x%lx,0x%lx\n",
14132 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14136 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
14138 REAL_VALUE_TYPE rv
;
14141 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
14142 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
14146 if (TARGET_MINIMAL_TOC
)
14147 fputs (DOUBLE_INT_ASM_OP
, file
);
14149 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
14150 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
14155 if (TARGET_MINIMAL_TOC
)
14156 fputs ("\t.long ", file
);
14158 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
14159 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
14163 else if (GET_MODE (x
) == VOIDmode
14164 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
14166 unsigned HOST_WIDE_INT low
;
14167 HOST_WIDE_INT high
;
14169 if (GET_CODE (x
) == CONST_DOUBLE
)
14171 low
= CONST_DOUBLE_LOW (x
);
14172 high
= CONST_DOUBLE_HIGH (x
);
14175 #if HOST_BITS_PER_WIDE_INT == 32
14178 high
= (low
& 0x80000000) ? ~0 : 0;
14182 low
= INTVAL (x
) & 0xffffffff;
14183 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
14187 /* TOC entries are always Pmode-sized, but since this
14188 is a bigendian machine then if we're putting smaller
14189 integer constants in the TOC we have to pad them.
14190 (This is still a win over putting the constants in
14191 a separate constant pool, because then we'd have
14192 to have both a TOC entry _and_ the actual constant.)
14194 For a 32-bit target, CONST_INT values are loaded and shifted
14195 entirely within `low' and can be stored in one TOC entry. */
14197 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
14198 abort ();/* It would be easy to make this work, but it doesn't now. */
14200 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
14202 #if HOST_BITS_PER_WIDE_INT == 32
14203 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
14204 POINTER_SIZE
, &low
, &high
, 0);
14207 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
14208 high
= (HOST_WIDE_INT
) low
>> 32;
14215 if (TARGET_MINIMAL_TOC
)
14216 fputs (DOUBLE_INT_ASM_OP
, file
);
14218 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
14219 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14220 fprintf (file
, "0x%lx%08lx\n",
14221 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14226 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
14228 if (TARGET_MINIMAL_TOC
)
14229 fputs ("\t.long ", file
);
14231 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
14232 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14233 fprintf (file
, "0x%lx,0x%lx\n",
14234 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14238 if (TARGET_MINIMAL_TOC
)
14239 fputs ("\t.long ", file
);
14241 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
14242 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
14248 if (GET_CODE (x
) == CONST
)
14250 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
14253 base
= XEXP (XEXP (x
, 0), 0);
14254 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
14257 if (GET_CODE (base
) == SYMBOL_REF
)
14258 name
= XSTR (base
, 0);
14259 else if (GET_CODE (base
) == LABEL_REF
)
14260 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
14261 else if (GET_CODE (base
) == CODE_LABEL
)
14262 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
14266 real_name
= (*targetm
.strip_name_encoding
) (name
);
14267 if (TARGET_MINIMAL_TOC
)
14268 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
14271 fprintf (file
, "\t.tc %s", real_name
);
14274 fprintf (file
, ".N%d", - offset
);
14276 fprintf (file
, ".P%d", offset
);
14278 fputs ("[TC],", file
);
14281 /* Currently C++ toc references to vtables can be emitted before it
14282 is decided whether the vtable is public or private. If this is
14283 the case, then the linker will eventually complain that there is
14284 a TOC reference to an unknown section. Thus, for vtables only,
14285 we emit the TOC reference to reference the symbol and not the
14287 if (VTABLE_NAME_P (name
))
14289 RS6000_OUTPUT_BASENAME (file
, name
);
14291 fprintf (file
, "%d", offset
);
14292 else if (offset
> 0)
14293 fprintf (file
, "+%d", offset
);
14296 output_addr_const (file
, x
);
14300 /* Output an assembler pseudo-op to write an ASCII string of N characters
14301 starting at P to FILE.
14303 On the RS/6000, we have to do this using the .byte operation and
14304 write out special characters outside the quoted string.
14305 Also, the assembler is broken; very long strings are truncated,
14306 so we must artificially break them up early. */
14309 output_ascii (FILE *file
, const char *p
, int n
)
14312 int i
, count_string
;
14313 const char *for_string
= "\t.byte \"";
14314 const char *for_decimal
= "\t.byte ";
14315 const char *to_close
= NULL
;
14318 for (i
= 0; i
< n
; i
++)
14321 if (c
>= ' ' && c
< 0177)
14324 fputs (for_string
, file
);
14327 /* Write two quotes to get one. */
14335 for_decimal
= "\"\n\t.byte ";
14339 if (count_string
>= 512)
14341 fputs (to_close
, file
);
14343 for_string
= "\t.byte \"";
14344 for_decimal
= "\t.byte ";
14352 fputs (for_decimal
, file
);
14353 fprintf (file
, "%d", c
);
14355 for_string
= "\n\t.byte \"";
14356 for_decimal
= ", ";
14362 /* Now close the string if we have written one. Then end the line. */
14364 fputs (to_close
, file
);
14367 /* Generate a unique section name for FILENAME for a section type
14368 represented by SECTION_DESC. Output goes into BUF.
14370 SECTION_DESC can be any string, as long as it is different for each
14371 possible section type.
14373 We name the section in the same manner as xlc. The name begins with an
14374 underscore followed by the filename (after stripping any leading directory
14375 names) with the last period replaced by the string SECTION_DESC. If
14376 FILENAME does not contain a period, SECTION_DESC is appended to the end of
14380 rs6000_gen_section_name (char **buf
, const char *filename
,
14381 const char *section_desc
)
14383 const char *q
, *after_last_slash
, *last_period
= 0;
14387 after_last_slash
= filename
;
14388 for (q
= filename
; *q
; q
++)
14391 after_last_slash
= q
+ 1;
14392 else if (*q
== '.')
14396 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
14397 *buf
= (char *) xmalloc (len
);
14402 for (q
= after_last_slash
; *q
; q
++)
14404 if (q
== last_period
)
14406 strcpy (p
, section_desc
);
14407 p
+= strlen (section_desc
);
14411 else if (ISALNUM (*q
))
14415 if (last_period
== 0)
14416 strcpy (p
, section_desc
);
14421 /* Emit profile function. */
14424 output_profile_hook (int labelno ATTRIBUTE_UNUSED
)
14426 if (TARGET_PROFILE_KERNEL
)
14429 if (DEFAULT_ABI
== ABI_AIX
)
14431 #ifndef NO_PROFILE_COUNTERS
14432 # define NO_PROFILE_COUNTERS 0
14434 if (NO_PROFILE_COUNTERS
)
14435 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
14439 const char *label_name
;
14442 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
14443 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
14444 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
14446 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
14450 else if (DEFAULT_ABI
== ABI_DARWIN
)
14452 const char *mcount_name
= RS6000_MCOUNT
;
14453 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
14455 /* Be conservative and always set this, at least for now. */
14456 current_function_uses_pic_offset_table
= 1;
14459 /* For PIC code, set up a stub and collect the caller's address
14460 from r0, which is where the prologue puts it. */
14461 if (MACHOPIC_INDIRECT
)
14463 mcount_name
= machopic_stub_name (mcount_name
);
14464 if (current_function_uses_pic_offset_table
)
14465 caller_addr_regno
= 0;
14468 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
14470 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
14474 /* Write function profiler code. */
14477 output_function_profiler (FILE *file
, int labelno
)
14482 switch (DEFAULT_ABI
)
14491 warning ("no profiling of 64-bit code for this ABI");
14494 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
14495 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
14498 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
14499 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
14500 reg_names
[0], save_lr
, reg_names
[1]);
14501 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
14502 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
14503 assemble_name (file
, buf
);
14504 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
14506 else if (flag_pic
> 1)
14508 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
14509 reg_names
[0], save_lr
, reg_names
[1]);
14510 /* Now, we need to get the address of the label. */
14511 fputs ("\tbl 1f\n\t.long ", file
);
14512 assemble_name (file
, buf
);
14513 fputs ("-.\n1:", file
);
14514 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
14515 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
14516 reg_names
[0], reg_names
[11]);
14517 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
14518 reg_names
[0], reg_names
[0], reg_names
[11]);
14522 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
14523 assemble_name (file
, buf
);
14524 fputs ("@ha\n", file
);
14525 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
14526 reg_names
[0], save_lr
, reg_names
[1]);
14527 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
14528 assemble_name (file
, buf
);
14529 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
14532 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
14533 fprintf (file
, "\tbl %s%s\n",
14534 RS6000_MCOUNT
, flag_pic
? "@plt" : "");
14539 if (!TARGET_PROFILE_KERNEL
)
14541 /* Don't do anything, done in output_profile_hook (). */
14548 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
14549 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
14551 if (cfun
->static_chain_decl
!= NULL
)
14553 asm_fprintf (file
, "\tstd %s,24(%s)\n",
14554 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
14555 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
14556 asm_fprintf (file
, "\tld %s,24(%s)\n",
14557 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
14560 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
14567 /* Power4 load update and store update instructions are cracked into a
14568 load or store and an integer insn which are executed in the same cycle.
14569 Branches have their own dispatch slot which does not count against the
14570 GCC issue rate, but it changes the program flow so there are no other
14571 instructions to issue in this cycle. */
14574 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED
,
14575 int verbose ATTRIBUTE_UNUSED
,
14576 rtx insn
, int more
)
14578 if (GET_CODE (PATTERN (insn
)) == USE
14579 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
14582 if (rs6000_sched_groups
)
14584 if (is_microcoded_insn (insn
))
14586 else if (is_cracked_insn (insn
))
14587 return more
> 2 ? more
- 2 : 0;
14593 /* Adjust the cost of a scheduling dependency. Return the new cost of
14594 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
14597 rs6000_adjust_cost (rtx insn
, rtx link
, rtx dep_insn ATTRIBUTE_UNUSED
,
14600 if (! recog_memoized (insn
))
14603 if (REG_NOTE_KIND (link
) != 0)
14606 if (REG_NOTE_KIND (link
) == 0)
14608 /* Data dependency; DEP_INSN writes a register that INSN reads
14609 some cycles later. */
14610 switch (get_attr_type (insn
))
14613 /* Tell the first scheduling pass about the latency between
14614 a mtctr and bctr (and mtlr and br/blr). The first
14615 scheduling pass will not know about this latency since
14616 the mtctr instruction, which has the latency associated
14617 to it, will be generated by reload. */
14618 return TARGET_POWER
? 5 : 4;
14620 /* Leave some extra cycles between a compare and its
14621 dependent branch, to inhibit expensive mispredicts. */
14622 if ((rs6000_cpu_attr
== CPU_PPC603
14623 || rs6000_cpu_attr
== CPU_PPC604
14624 || rs6000_cpu_attr
== CPU_PPC604E
14625 || rs6000_cpu_attr
== CPU_PPC620
14626 || rs6000_cpu_attr
== CPU_PPC630
14627 || rs6000_cpu_attr
== CPU_PPC750
14628 || rs6000_cpu_attr
== CPU_PPC7400
14629 || rs6000_cpu_attr
== CPU_PPC7450
14630 || rs6000_cpu_attr
== CPU_POWER4
14631 || rs6000_cpu_attr
== CPU_POWER5
)
14632 && recog_memoized (dep_insn
)
14633 && (INSN_CODE (dep_insn
) >= 0)
14634 && (get_attr_type (dep_insn
) == TYPE_CMP
14635 || get_attr_type (dep_insn
) == TYPE_COMPARE
14636 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
14637 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
14638 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
14639 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
14640 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
14641 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
14646 /* Fall out to return default cost. */
14652 /* The function returns a true if INSN is microcoded.
14653 Return false otherwise. */
14656 is_microcoded_insn (rtx insn
)
14658 if (!insn
|| !INSN_P (insn
)
14659 || GET_CODE (PATTERN (insn
)) == USE
14660 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
14663 if (rs6000_sched_groups
)
14665 enum attr_type type
= get_attr_type (insn
);
14666 if (type
== TYPE_LOAD_EXT_U
14667 || type
== TYPE_LOAD_EXT_UX
14668 || type
== TYPE_LOAD_UX
14669 || type
== TYPE_STORE_UX
14670 || type
== TYPE_MFCR
)
14677 /* The function returns a nonzero value if INSN can be scheduled only
14678 as the first insn in a dispatch group ("dispatch-slot restricted").
14679 In this case, the returned value indicates how many dispatch slots
14680 the insn occupies (at the beginning of the group).
14681 Return 0 otherwise. */
14684 is_dispatch_slot_restricted (rtx insn
)
14686 enum attr_type type
;
14688 if (!rs6000_sched_groups
)
14692 || insn
== NULL_RTX
14693 || GET_CODE (insn
) == NOTE
14694 || GET_CODE (PATTERN (insn
)) == USE
14695 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
14698 type
= get_attr_type (insn
);
14705 case TYPE_DELAYED_CR
:
14706 case TYPE_CR_LOGICAL
:
14714 if (rs6000_cpu
== PROCESSOR_POWER5
14715 && is_cracked_insn (insn
))
14721 /* The function returns true if INSN is cracked into 2 instructions
14722 by the processor (and therefore occupies 2 issue slots). */
14725 is_cracked_insn (rtx insn
)
14727 if (!insn
|| !INSN_P (insn
)
14728 || GET_CODE (PATTERN (insn
)) == USE
14729 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
14732 if (rs6000_sched_groups
)
14734 enum attr_type type
= get_attr_type (insn
);
14735 if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
14736 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
14737 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
14738 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
14739 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
14740 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
14741 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
14742 || type
== TYPE_INSERT_WORD
)
14749 /* The function returns true if INSN can be issued only from
14750 the branch slot. */
14753 is_branch_slot_insn (rtx insn
)
14755 if (!insn
|| !INSN_P (insn
)
14756 || GET_CODE (PATTERN (insn
)) == USE
14757 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
14760 if (rs6000_sched_groups
)
14762 enum attr_type type
= get_attr_type (insn
);
14763 if (type
== TYPE_BRANCH
|| type
== TYPE_JMPREG
)
14771 /* A C statement (sans semicolon) to update the integer scheduling
14772 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14773 INSN earlier, reduce the priority to execute INSN later. Do not
14774 define this macro if you do not need to adjust the scheduling
14775 priorities of insns. */
14778 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
14780 /* On machines (like the 750) which have asymmetric integer units,
14781 where one integer unit can do multiply and divides and the other
14782 can't, reduce the priority of multiply/divide so it is scheduled
14783 before other integer operations. */
14786 if (! INSN_P (insn
))
14789 if (GET_CODE (PATTERN (insn
)) == USE
)
14792 switch (rs6000_cpu_attr
) {
14794 switch (get_attr_type (insn
))
14801 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
14802 priority
, priority
);
14803 if (priority
>= 0 && priority
< 0x01000000)
14810 if (is_dispatch_slot_restricted (insn
)
14811 && reload_completed
14812 && current_sched_info
->sched_max_insns_priority
14813 && rs6000_sched_restricted_insns_priority
)
14816 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14817 if (rs6000_sched_restricted_insns_priority
== 1)
14818 /* Attach highest priority to insn. This means that in
14819 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14820 precede 'priority' (critical path) considerations. */
14821 return current_sched_info
->sched_max_insns_priority
;
14822 else if (rs6000_sched_restricted_insns_priority
== 2)
14823 /* Increase priority of insn by a minimal amount. This means that in
14824 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14825 precede dispatch-slot restriction considerations. */
14826 return (priority
+ 1);
14832 /* Return how many instructions the machine can issue per cycle. */
14835 rs6000_issue_rate (void)
14837 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14838 if (!reload_completed
)
14841 switch (rs6000_cpu_attr
) {
14842 case CPU_RIOS1
: /* ? */
14844 case CPU_PPC601
: /* ? */
14867 /* Return how many instructions to look ahead for better insn
14871 rs6000_use_sched_lookahead (void)
14873 if (rs6000_cpu_attr
== CPU_PPC8540
)
14878 /* Determine is PAT refers to memory. */
14881 is_mem_ref (rtx pat
)
14887 if (GET_CODE (pat
) == MEM
)
14890 /* Recursively process the pattern. */
14891 fmt
= GET_RTX_FORMAT (GET_CODE (pat
));
14893 for (i
= GET_RTX_LENGTH (GET_CODE (pat
)) - 1; i
>= 0 && !ret
; i
--)
14896 ret
|= is_mem_ref (XEXP (pat
, i
));
14897 else if (fmt
[i
] == 'E')
14898 for (j
= XVECLEN (pat
, i
) - 1; j
>= 0; j
--)
14899 ret
|= is_mem_ref (XVECEXP (pat
, i
, j
));
14905 /* Determine if PAT is a PATTERN of a load insn. */
14908 is_load_insn1 (rtx pat
)
14910 if (!pat
|| pat
== NULL_RTX
)
14913 if (GET_CODE (pat
) == SET
)
14914 return is_mem_ref (SET_SRC (pat
));
14916 if (GET_CODE (pat
) == PARALLEL
)
14920 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
14921 if (is_load_insn1 (XVECEXP (pat
, 0, i
)))
14928 /* Determine if INSN loads from memory. */
14931 is_load_insn (rtx insn
)
14933 if (!insn
|| !INSN_P (insn
))
14936 if (GET_CODE (insn
) == CALL_INSN
)
14939 return is_load_insn1 (PATTERN (insn
));
14942 /* Determine if PAT is a PATTERN of a store insn. */
14945 is_store_insn1 (rtx pat
)
14947 if (!pat
|| pat
== NULL_RTX
)
14950 if (GET_CODE (pat
) == SET
)
14951 return is_mem_ref (SET_DEST (pat
));
14953 if (GET_CODE (pat
) == PARALLEL
)
14957 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
14958 if (is_store_insn1 (XVECEXP (pat
, 0, i
)))
14965 /* Determine if INSN stores to memory. */
14968 is_store_insn (rtx insn
)
14970 if (!insn
|| !INSN_P (insn
))
14973 return is_store_insn1 (PATTERN (insn
));
14976 /* Returns whether the dependence between INSN and NEXT is considered
14977 costly by the given target. */
14980 rs6000_is_costly_dependence (rtx insn
, rtx next
, rtx link
, int cost
, int distance
)
14982 /* If the flag is not enbled - no dependence is considered costly;
14983 allow all dependent insns in the same group.
14984 This is the most aggressive option. */
14985 if (rs6000_sched_costly_dep
== no_dep_costly
)
14988 /* If the flag is set to 1 - a dependence is always considered costly;
14989 do not allow dependent instructions in the same group.
14990 This is the most conservative option. */
14991 if (rs6000_sched_costly_dep
== all_deps_costly
)
14994 if (rs6000_sched_costly_dep
== store_to_load_dep_costly
14995 && is_load_insn (next
)
14996 && is_store_insn (insn
))
14997 /* Prevent load after store in the same group. */
15000 if (rs6000_sched_costly_dep
== true_store_to_load_dep_costly
15001 && is_load_insn (next
)
15002 && is_store_insn (insn
)
15003 && (!link
|| (int) REG_NOTE_KIND (link
) == 0))
15004 /* Prevent load after store in the same group if it is a true dependence. */
15007 /* The flag is set to X; dependences with latency >= X are considered costly,
15008 and will not be scheduled in the same group. */
15009 if (rs6000_sched_costly_dep
<= max_dep_latency
15010 && ((cost
- distance
) >= (int)rs6000_sched_costly_dep
))
15016 /* Return the next insn after INSN that is found before TAIL is reached,
15017 skipping any "non-active" insns - insns that will not actually occupy
15018 an issue slot. Return NULL_RTX if such an insn is not found. */
15021 get_next_active_insn (rtx insn
, rtx tail
)
15025 if (!insn
|| insn
== tail
)
15028 next_insn
= NEXT_INSN (insn
);
15031 && next_insn
!= tail
15032 && (GET_CODE(next_insn
) == NOTE
15033 || GET_CODE (PATTERN (next_insn
)) == USE
15034 || GET_CODE (PATTERN (next_insn
)) == CLOBBER
))
15036 next_insn
= NEXT_INSN (next_insn
);
15039 if (!next_insn
|| next_insn
== tail
)
15045 /* Return whether the presence of INSN causes a dispatch group termination
15046 of group WHICH_GROUP.
15048 If WHICH_GROUP == current_group, this function will return true if INSN
15049 causes the termination of the current group (i.e, the dispatch group to
15050 which INSN belongs). This means that INSN will be the last insn in the
15051 group it belongs to.
15053 If WHICH_GROUP == previous_group, this function will return true if INSN
15054 causes the termination of the previous group (i.e, the dispatch group that
15055 precedes the group to which INSN belongs). This means that INSN will be
15056 the first insn in the group it belongs to). */
15059 insn_terminates_group_p (rtx insn
, enum group_termination which_group
)
15061 enum attr_type type
;
15066 type
= get_attr_type (insn
);
15068 if (is_microcoded_insn (insn
))
15071 if (which_group
== current_group
)
15073 if (is_branch_slot_insn (insn
))
15077 else if (which_group
== previous_group
)
15079 if (is_dispatch_slot_restricted (insn
))
15087 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
15088 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
15091 is_costly_group (rtx
*group_insns
, rtx next_insn
)
15096 int issue_rate
= rs6000_issue_rate ();
15098 for (i
= 0; i
< issue_rate
; i
++)
15100 rtx insn
= group_insns
[i
];
15103 for (link
= INSN_DEPEND (insn
); link
!= 0; link
= XEXP (link
, 1))
15105 rtx next
= XEXP (link
, 0);
15106 if (next
== next_insn
)
15108 cost
= insn_cost (insn
, link
, next_insn
);
15109 if (rs6000_is_costly_dependence (insn
, next_insn
, link
, cost
, 0))
15118 /* Utility of the function redefine_groups.
15119 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
15120 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
15121 to keep it "far" (in a separate group) from GROUP_INSNS, following
15122 one of the following schemes, depending on the value of the flag
15123 -minsert_sched_nops = X:
15124 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
15125 in order to force NEXT_INSN into a separate group.
15126 (2) X < sched_finish_regroup_exact: insert exactly X nops.
15127 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
15128 insertion (has a group just ended, how many vacant issue slots remain in the
15129 last group, and how many dispatch groups were encountered so far). */
15132 force_new_group (int sched_verbose
, FILE *dump
, rtx
*group_insns
, rtx next_insn
,
15133 bool *group_end
, int can_issue_more
, int *group_count
)
15137 int issue_rate
= rs6000_issue_rate ();
15138 bool end
= *group_end
;
15141 if (next_insn
== NULL_RTX
)
15142 return can_issue_more
;
15144 if (rs6000_sched_insert_nops
> sched_finish_regroup_exact
)
15145 return can_issue_more
;
15147 force
= is_costly_group (group_insns
, next_insn
);
15149 return can_issue_more
;
15151 if (sched_verbose
> 6)
15152 fprintf (dump
,"force: group count = %d, can_issue_more = %d\n",
15153 *group_count
,can_issue_more
);
15155 if (rs6000_sched_insert_nops
== sched_finish_regroup_exact
)
15158 can_issue_more
= 0;
15160 /* Since only a branch can be issued in the last issue_slot, it is
15161 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
15162 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
15163 in this case the last nop will start a new group and the branch will be
15164 forced to the new group. */
15165 if (can_issue_more
&& !is_branch_slot_insn (next_insn
))
15168 while (can_issue_more
> 0)
15171 emit_insn_before (nop
, next_insn
);
15179 if (rs6000_sched_insert_nops
< sched_finish_regroup_exact
)
15181 int n_nops
= rs6000_sched_insert_nops
;
15183 /* Nops can't be issued from the branch slot, so the effective
15184 issue_rate for nops is 'issue_rate - 1'. */
15185 if (can_issue_more
== 0)
15186 can_issue_more
= issue_rate
;
15188 if (can_issue_more
== 0)
15190 can_issue_more
= issue_rate
- 1;
15193 for (i
= 0; i
< issue_rate
; i
++)
15195 group_insns
[i
] = 0;
15202 emit_insn_before (nop
, next_insn
);
15203 if (can_issue_more
== issue_rate
- 1) /* new group begins */
15206 if (can_issue_more
== 0)
15208 can_issue_more
= issue_rate
- 1;
15211 for (i
= 0; i
< issue_rate
; i
++)
15213 group_insns
[i
] = 0;
15219 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
15222 *group_end
= /* Is next_insn going to start a new group? */
15224 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
15225 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
15226 || (can_issue_more
< issue_rate
&&
15227 insn_terminates_group_p (next_insn
, previous_group
)));
15228 if (*group_end
&& end
)
15231 if (sched_verbose
> 6)
15232 fprintf (dump
, "done force: group count = %d, can_issue_more = %d\n",
15233 *group_count
, can_issue_more
);
15234 return can_issue_more
;
15237 return can_issue_more
;
15240 /* This function tries to synch the dispatch groups that the compiler "sees"
15241 with the dispatch groups that the processor dispatcher is expected to
15242 form in practice. It tries to achieve this synchronization by forcing the
15243 estimated processor grouping on the compiler (as opposed to the function
15244 'pad_goups' which tries to force the scheduler's grouping on the processor).
15246 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
15247 examines the (estimated) dispatch groups that will be formed by the processor
15248 dispatcher. It marks these group boundaries to reflect the estimated
15249 processor grouping, overriding the grouping that the scheduler had marked.
15250 Depending on the value of the flag '-minsert-sched-nops' this function can
15251 force certain insns into separate groups or force a certain distance between
15252 them by inserting nops, for example, if there exists a "costly dependence"
15255 The function estimates the group boundaries that the processor will form as
15256 folllows: It keeps track of how many vacant issue slots are available after
15257 each insn. A subsequent insn will start a new group if one of the following
15259 - no more vacant issue slots remain in the current dispatch group.
15260 - only the last issue slot, which is the branch slot, is vacant, but the next
15261 insn is not a branch.
15262 - only the last 2 or less issue slots, including the branch slot, are vacant,
15263 which means that a cracked insn (which occupies two issue slots) can't be
15264 issued in this group.
15265 - less than 'issue_rate' slots are vacant, and the next insn always needs to
15266 start a new group. */
15269 redefine_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
15271 rtx insn
, next_insn
;
15273 int can_issue_more
;
15276 int group_count
= 0;
15280 issue_rate
= rs6000_issue_rate ();
15281 group_insns
= alloca (issue_rate
* sizeof (rtx
));
15282 for (i
= 0; i
< issue_rate
; i
++)
15284 group_insns
[i
] = 0;
15286 can_issue_more
= issue_rate
;
15288 insn
= get_next_active_insn (prev_head_insn
, tail
);
15291 while (insn
!= NULL_RTX
)
15293 slot
= (issue_rate
- can_issue_more
);
15294 group_insns
[slot
] = insn
;
15296 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
15297 if (insn_terminates_group_p (insn
, current_group
))
15298 can_issue_more
= 0;
15300 next_insn
= get_next_active_insn (insn
, tail
);
15301 if (next_insn
== NULL_RTX
)
15302 return group_count
+ 1;
15304 group_end
= /* Is next_insn going to start a new group? */
15305 (can_issue_more
== 0
15306 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
15307 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
15308 || (can_issue_more
< issue_rate
&&
15309 insn_terminates_group_p (next_insn
, previous_group
)));
15311 can_issue_more
= force_new_group (sched_verbose
, dump
, group_insns
,
15312 next_insn
, &group_end
, can_issue_more
, &group_count
);
15317 can_issue_more
= 0;
15318 for (i
= 0; i
< issue_rate
; i
++)
15320 group_insns
[i
] = 0;
15324 if (GET_MODE (next_insn
) == TImode
&& can_issue_more
)
15325 PUT_MODE(next_insn
, VOIDmode
);
15326 else if (!can_issue_more
&& GET_MODE (next_insn
) != TImode
)
15327 PUT_MODE (next_insn
, TImode
);
15330 if (can_issue_more
== 0)
15331 can_issue_more
= issue_rate
;
15334 return group_count
;
15337 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
15338 dispatch group boundaries that the scheduler had marked. Pad with nops
15339 any dispatch groups which have vacant issue slots, in order to force the
15340 scheduler's grouping on the processor dispatcher. The function
15341 returns the number of dispatch groups found. */
15344 pad_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
15346 rtx insn
, next_insn
;
15349 int can_issue_more
;
15351 int group_count
= 0;
15353 /* Initialize issue_rate. */
15354 issue_rate
= rs6000_issue_rate ();
15355 can_issue_more
= issue_rate
;
15357 insn
= get_next_active_insn (prev_head_insn
, tail
);
15358 next_insn
= get_next_active_insn (insn
, tail
);
15360 while (insn
!= NULL_RTX
)
15363 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
15365 group_end
= (next_insn
== NULL_RTX
|| GET_MODE (next_insn
) == TImode
);
15367 if (next_insn
== NULL_RTX
)
15372 /* If the scheduler had marked group termination at this location
15373 (between insn and next_indn), and neither insn nor next_insn will
15374 force group termination, pad the group with nops to force group
15377 && (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
15378 && !insn_terminates_group_p (insn
, current_group
)
15379 && !insn_terminates_group_p (next_insn
, previous_group
))
15381 if (!is_branch_slot_insn(next_insn
))
15384 while (can_issue_more
)
15387 emit_insn_before (nop
, next_insn
);
15392 can_issue_more
= issue_rate
;
15397 next_insn
= get_next_active_insn (insn
, tail
);
15400 return group_count
;
15403 /* The following function is called at the end of scheduling BB.
15404 After reload, it inserts nops at insn group bundling. */
15407 rs6000_sched_finish (FILE *dump
, int sched_verbose
)
15412 fprintf (dump
, "=== Finishing schedule.\n");
15414 if (reload_completed
&& rs6000_sched_groups
)
15416 if (rs6000_sched_insert_nops
== sched_finish_none
)
15419 if (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
15420 n_groups
= pad_groups (dump
, sched_verbose
,
15421 current_sched_info
->prev_head
,
15422 current_sched_info
->next_tail
);
15424 n_groups
= redefine_groups (dump
, sched_verbose
,
15425 current_sched_info
->prev_head
,
15426 current_sched_info
->next_tail
);
15428 if (sched_verbose
>= 6)
15430 fprintf (dump
, "ngroups = %d\n", n_groups
);
15431 print_rtl (dump
, current_sched_info
->prev_head
);
15432 fprintf (dump
, "Done finish_sched\n");
15437 /* Length in units of the trampoline for entering a nested function. */
15440 rs6000_trampoline_size (void)
15444 switch (DEFAULT_ABI
)
15450 ret
= (TARGET_32BIT
) ? 12 : 24;
15455 ret
= (TARGET_32BIT
) ? 40 : 48;
15462 /* Emit RTL insns to initialize the variable parts of a trampoline.
15463 FNADDR is an RTX for the address of the function's pure code.
15464 CXT is an RTX for the static chain value for the function. */
15467 rs6000_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
15469 enum machine_mode pmode
= Pmode
;
15470 int regsize
= (TARGET_32BIT
) ? 4 : 8;
15471 rtx ctx_reg
= force_reg (pmode
, cxt
);
15473 switch (DEFAULT_ABI
)
15478 /* Macros to shorten the code expansions below. */
15479 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
15480 #define MEM_PLUS(addr,offset) \
15481 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
15483 /* Under AIX, just build the 3 word function descriptor */
15486 rtx fn_reg
= gen_reg_rtx (pmode
);
15487 rtx toc_reg
= gen_reg_rtx (pmode
);
15488 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
15489 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
15490 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
15491 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
15492 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
15496 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
15499 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
15500 FALSE
, VOIDmode
, 4,
15502 GEN_INT (rs6000_trampoline_size ()), SImode
,
15512 /* Table of valid machine attributes. */
15514 const struct attribute_spec rs6000_attribute_table
[] =
15516 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
15517 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute
},
15518 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
15519 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
15520 { NULL
, 0, 0, false, false, false, NULL
}
15523 /* Handle the "altivec" attribute. The attribute may have
15524 arguments as follows:
15526 __attribute__((altivec(vector__)))
15527 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
15528 __attribute__((altivec(bool__))) (always followed by 'unsigned')
15530 and may appear more than once (e.g., 'vector bool char') in a
15531 given declaration. */
15534 rs6000_handle_altivec_attribute (tree
*node
, tree name
, tree args
,
15535 int flags ATTRIBUTE_UNUSED
,
15536 bool *no_add_attrs
)
15538 tree type
= *node
, result
= NULL_TREE
;
15539 enum machine_mode mode
;
15542 = ((args
&& TREE_CODE (args
) == TREE_LIST
&& TREE_VALUE (args
)
15543 && TREE_CODE (TREE_VALUE (args
)) == IDENTIFIER_NODE
)
15544 ? *IDENTIFIER_POINTER (TREE_VALUE (args
))
15547 while (POINTER_TYPE_P (type
)
15548 || TREE_CODE (type
) == FUNCTION_TYPE
15549 || TREE_CODE (type
) == METHOD_TYPE
15550 || TREE_CODE (type
) == ARRAY_TYPE
)
15551 type
= TREE_TYPE (type
);
15553 mode
= TYPE_MODE (type
);
15555 if (rs6000_warn_altivec_long
15556 && (type
== long_unsigned_type_node
|| type
== long_integer_type_node
))
15557 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15559 switch (altivec_type
)
15562 unsigned_p
= TYPE_UNSIGNED (type
);
15566 result
= (unsigned_p
? unsigned_V4SI_type_node
: V4SI_type_node
);
15569 result
= (unsigned_p
? unsigned_V8HI_type_node
: V8HI_type_node
);
15572 result
= (unsigned_p
? unsigned_V16QI_type_node
: V16QI_type_node
);
15574 case SFmode
: result
= V4SF_type_node
; break;
15575 /* If the user says 'vector int bool', we may be handed the 'bool'
15576 attribute _before_ the 'vector' attribute, and so select the proper
15577 type in the 'b' case below. */
15578 case V4SImode
: case V8HImode
: case V16QImode
: result
= type
;
15585 case SImode
: case V4SImode
: result
= bool_V4SI_type_node
; break;
15586 case HImode
: case V8HImode
: result
= bool_V8HI_type_node
; break;
15587 case QImode
: case V16QImode
: result
= bool_V16QI_type_node
;
15594 case V8HImode
: result
= pixel_V8HI_type_node
;
15600 if (result
&& result
!= type
&& TYPE_READONLY (type
))
15601 result
= build_qualified_type (result
, TYPE_QUAL_CONST
);
15603 *no_add_attrs
= true; /* No need to hang on to the attribute. */
15606 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name
));
15608 *node
= reconstruct_complex_type (*node
, result
);
15613 /* AltiVec defines four built-in scalar types that serve as vector
15614 elements; we must teach the compiler how to mangle them. */
15616 static const char *
15617 rs6000_mangle_fundamental_type (tree type
)
15619 if (type
== bool_char_type_node
) return "U6__boolc";
15620 if (type
== bool_short_type_node
) return "U6__bools";
15621 if (type
== pixel_type_node
) return "u7__pixel";
15622 if (type
== bool_int_type_node
) return "U6__booli";
15624 /* For all other types, use normal C++ mangling. */
15628 /* Handle a "longcall" or "shortcall" attribute; arguments as in
15629 struct attribute_spec.handler. */
15632 rs6000_handle_longcall_attribute (tree
*node
, tree name
,
15633 tree args ATTRIBUTE_UNUSED
,
15634 int flags ATTRIBUTE_UNUSED
,
15635 bool *no_add_attrs
)
15637 if (TREE_CODE (*node
) != FUNCTION_TYPE
15638 && TREE_CODE (*node
) != FIELD_DECL
15639 && TREE_CODE (*node
) != TYPE_DECL
)
15641 warning ("`%s' attribute only applies to functions",
15642 IDENTIFIER_POINTER (name
));
15643 *no_add_attrs
= true;
15649 /* Set longcall attributes on all functions declared when
15650 rs6000_default_long_calls is true. */
15652 rs6000_set_default_type_attributes (tree type
)
15654 if (rs6000_default_long_calls
15655 && (TREE_CODE (type
) == FUNCTION_TYPE
15656 || TREE_CODE (type
) == METHOD_TYPE
))
15657 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
15659 TYPE_ATTRIBUTES (type
));
15662 /* Return a reference suitable for calling a function with the
15663 longcall attribute. */
15666 rs6000_longcall_ref (rtx call_ref
)
15668 const char *call_name
;
15671 if (GET_CODE (call_ref
) != SYMBOL_REF
)
15674 /* System V adds '.' to the internal name, so skip them. */
15675 call_name
= XSTR (call_ref
, 0);
15676 if (*call_name
== '.')
15678 while (*call_name
== '.')
15681 node
= get_identifier (call_name
);
15682 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
15685 return force_reg (Pmode
, call_ref
);
15688 #ifdef USING_ELFOS_H
15690 /* A C statement or statements to switch to the appropriate section
15691 for output of RTX in mode MODE. You can assume that RTX is some
15692 kind of constant in RTL. The argument MODE is redundant except in
15693 the case of a `const_int' rtx. Select the section by calling
15694 `text_section' or one of the alternatives for other sections.
15696 Do not define this macro if you put all constants in the read-only
15700 rs6000_elf_select_rtx_section (enum machine_mode mode
, rtx x
,
15701 unsigned HOST_WIDE_INT align
)
15703 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
15706 default_elf_select_rtx_section (mode
, x
, align
);
15709 /* A C statement or statements to switch to the appropriate
15710 section for output of DECL. DECL is either a `VAR_DECL' node
15711 or a constant of some sort. RELOC indicates whether forming
15712 the initial value of DECL requires link-time relocations. */
15715 rs6000_elf_select_section (tree decl
, int reloc
,
15716 unsigned HOST_WIDE_INT align
)
15718 /* Pretend that we're always building for a shared library when
15719 ABI_AIX, because otherwise we end up with dynamic relocations
15720 in read-only sections. This happens for function pointers,
15721 references to vtables in typeinfo, and probably other cases. */
15722 default_elf_select_section_1 (decl
, reloc
, align
,
15723 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
15726 /* A C statement to build up a unique section name, expressed as a
15727 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15728 RELOC indicates whether the initial value of EXP requires
15729 link-time relocations. If you do not define this macro, GCC will use
15730 the symbol name prefixed by `.' as the section name. Note - this
15731 macro can now be called for uninitialized data items as well as
15732 initialized data and functions. */
15735 rs6000_elf_unique_section (tree decl
, int reloc
)
15737 /* As above, pretend that we're always building for a shared library
15738 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15739 default_unique_section_1 (decl
, reloc
,
15740 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
15743 /* For a SYMBOL_REF, set generic flags and then perform some
15744 target-specific processing.
15746 When the AIX ABI is requested on a non-AIX system, replace the
15747 function name with the real name (with a leading .) rather than the
15748 function descriptor name. This saves a lot of overriding code to
15749 read the prefixes. */
15752 rs6000_elf_encode_section_info (tree decl
, rtx rtl
, int first
)
15754 default_encode_section_info (decl
, rtl
, first
);
15757 && TREE_CODE (decl
) == FUNCTION_DECL
15759 && DEFAULT_ABI
== ABI_AIX
)
15761 rtx sym_ref
= XEXP (rtl
, 0);
15762 size_t len
= strlen (XSTR (sym_ref
, 0));
15763 char *str
= alloca (len
+ 2);
15765 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
15766 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
15771 rs6000_elf_in_small_data_p (tree decl
)
15773 if (rs6000_sdata
== SDATA_NONE
)
15776 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
15778 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
15779 if (strcmp (section
, ".sdata") == 0
15780 || strcmp (section
, ".sdata2") == 0
15781 || strcmp (section
, ".sbss") == 0
15782 || strcmp (section
, ".sbss2") == 0
15783 || strcmp (section
, ".PPC.EMB.sdata0") == 0
15784 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
15789 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
15792 && (unsigned HOST_WIDE_INT
) size
<= g_switch_value
15793 /* If it's not public, and we're not going to reference it there,
15794 there's no need to put it in the small data section. */
15795 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
15802 #endif /* USING_ELFOS_H */
15805 /* Return a REG that occurs in ADDR with coefficient 1.
15806 ADDR can be effectively incremented by incrementing REG.
15808 r0 is special and we must not select it as an address
15809 register by this routine since our caller will try to
15810 increment the returned register via an "la" instruction. */
15813 find_addr_reg (rtx addr
)
15815 while (GET_CODE (addr
) == PLUS
)
15817 if (GET_CODE (XEXP (addr
, 0)) == REG
15818 && REGNO (XEXP (addr
, 0)) != 0)
15819 addr
= XEXP (addr
, 0);
15820 else if (GET_CODE (XEXP (addr
, 1)) == REG
15821 && REGNO (XEXP (addr
, 1)) != 0)
15822 addr
= XEXP (addr
, 1);
15823 else if (CONSTANT_P (XEXP (addr
, 0)))
15824 addr
= XEXP (addr
, 1);
15825 else if (CONSTANT_P (XEXP (addr
, 1)))
15826 addr
= XEXP (addr
, 0);
15830 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
15836 rs6000_fatal_bad_address (rtx op
)
15838 fatal_insn ("bad address", op
);
15844 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15845 reference and a constant. */
15848 symbolic_operand (rtx op
)
15850 switch (GET_CODE (op
))
15857 return (GET_CODE (op
) == SYMBOL_REF
||
15858 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
15859 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
15860 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
15869 static tree branch_island_list
= 0;
15871 /* Remember to generate a branch island for far calls to the given
15875 add_compiler_branch_island (tree label_name
, tree function_name
, int line_number
)
15877 tree branch_island
= build_tree_list (function_name
, label_name
);
15878 TREE_TYPE (branch_island
) = build_int_2 (line_number
, 0);
15879 TREE_CHAIN (branch_island
) = branch_island_list
;
15880 branch_island_list
= branch_island
;
15883 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15884 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15885 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15886 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15888 /* Generate far-jump branch islands for everything on the
15889 branch_island_list. Invoked immediately after the last instruction
15890 of the epilogue has been emitted; the branch-islands must be
15891 appended to, and contiguous with, the function body. Mach-O stubs
15892 are generated in machopic_output_stub(). */
15895 macho_branch_islands (void)
15898 tree branch_island
;
15900 for (branch_island
= branch_island_list
;
15902 branch_island
= TREE_CHAIN (branch_island
))
15904 const char *label
=
15905 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island
));
15907 darwin_strip_name_encoding (
15908 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island
)));
15909 char name_buf
[512];
15910 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15911 if (name
[0] == '*' || name
[0] == '&')
15912 strcpy (name_buf
, name
+1);
15916 strcpy (name_buf
+1, name
);
15918 strcpy (tmp_buf
, "\n");
15919 strcat (tmp_buf
, label
);
15920 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15921 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
15922 fprintf (asm_out_file
, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
15923 BRANCH_ISLAND_LINE_NUMBER(branch_island
));
15924 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15927 strcat (tmp_buf
, ":\n\tmflr r0\n\tbcl 20,31,");
15928 strcat (tmp_buf
, label
);
15929 strcat (tmp_buf
, "_pic\n");
15930 strcat (tmp_buf
, label
);
15931 strcat (tmp_buf
, "_pic:\n\tmflr r11\n");
15933 strcat (tmp_buf
, "\taddis r11,r11,ha16(");
15934 strcat (tmp_buf
, name_buf
);
15935 strcat (tmp_buf
, " - ");
15936 strcat (tmp_buf
, label
);
15937 strcat (tmp_buf
, "_pic)\n");
15939 strcat (tmp_buf
, "\tmtlr r0\n");
15941 strcat (tmp_buf
, "\taddi r12,r11,lo16(");
15942 strcat (tmp_buf
, name_buf
);
15943 strcat (tmp_buf
, " - ");
15944 strcat (tmp_buf
, label
);
15945 strcat (tmp_buf
, "_pic)\n");
15947 strcat (tmp_buf
, "\tmtctr r12\n\tbctr\n");
15951 strcat (tmp_buf
, ":\nlis r12,hi16(");
15952 strcat (tmp_buf
, name_buf
);
15953 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
15954 strcat (tmp_buf
, name_buf
);
15955 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
15957 output_asm_insn (tmp_buf
, 0);
15958 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15959 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
15960 fprintf(asm_out_file
, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
15961 BRANCH_ISLAND_LINE_NUMBER (branch_island
));
15962 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15965 branch_island_list
= 0;
15968 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15969 already there or not. */
15972 no_previous_def (tree function_name
)
15974 tree branch_island
;
15975 for (branch_island
= branch_island_list
;
15977 branch_island
= TREE_CHAIN (branch_island
))
15978 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
15983 /* GET_PREV_LABEL gets the label name from the previous definition of
15987 get_prev_label (tree function_name
)
15989 tree branch_island
;
15990 for (branch_island
= branch_island_list
;
15992 branch_island
= TREE_CHAIN (branch_island
))
15993 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
15994 return BRANCH_ISLAND_LABEL_NAME (branch_island
);
15998 /* INSN is either a function call or a millicode call. It may have an
15999 unconditional jump in its delay slot.
16001 CALL_DEST is the routine we are calling. */
16004 output_call (rtx insn
, rtx
*operands
, int dest_operand_number
, int cookie_operand_number
)
16006 static char buf
[256];
16007 if (GET_CODE (operands
[dest_operand_number
]) == SYMBOL_REF
16008 && (INTVAL (operands
[cookie_operand_number
]) & CALL_LONG
))
16011 tree funname
= get_identifier (XSTR (operands
[dest_operand_number
], 0));
16013 if (no_previous_def (funname
))
16015 int line_number
= 0;
16016 rtx label_rtx
= gen_label_rtx ();
16017 char *label_buf
, temp_buf
[256];
16018 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
16019 CODE_LABEL_NUMBER (label_rtx
));
16020 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
16021 labelname
= get_identifier (label_buf
);
16022 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
16024 line_number
= NOTE_LINE_NUMBER (insn
);
16025 add_compiler_branch_island (labelname
, funname
, line_number
);
16028 labelname
= get_prev_label (funname
);
16030 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
16031 instruction will reach 'foo', otherwise link as 'bl L42'".
16032 "L42" should be a 'branch island', that will do a far jump to
16033 'foo'. Branch islands are generated in
16034 macho_branch_islands(). */
16035 sprintf (buf
, "jbsr %%z%d,%.246s",
16036 dest_operand_number
, IDENTIFIER_POINTER (labelname
));
16039 sprintf (buf
, "bl %%z%d", dest_operand_number
);
16043 #endif /* TARGET_MACHO */
16045 /* Generate PIC and indirect symbol stubs. */
16048 machopic_output_stub (FILE *file
, const char *symb
, const char *stub
)
16050 unsigned int length
;
16051 char *symbol_name
, *lazy_ptr_name
;
16052 char *local_label_0
;
16053 static int label
= 0;
16055 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
16056 symb
= (*targetm
.strip_name_encoding
) (symb
);
16059 length
= strlen (symb
);
16060 symbol_name
= alloca (length
+ 32);
16061 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
16063 lazy_ptr_name
= alloca (length
+ 32);
16064 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
16067 machopic_picsymbol_stub1_section ();
16069 machopic_symbol_stub1_section ();
16070 fprintf (file
, "\t.align 2\n");
16072 fprintf (file
, "%s:\n", stub
);
16073 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
16078 local_label_0
= alloca (sizeof("\"L0000000000$spb\""));
16079 sprintf (local_label_0
, "\"L%011d$spb\"", label
);
16081 fprintf (file
, "\tmflr r0\n");
16082 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
16083 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
16084 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
16085 lazy_ptr_name
, local_label_0
);
16086 fprintf (file
, "\tmtlr r0\n");
16087 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
16088 lazy_ptr_name
, local_label_0
);
16089 fprintf (file
, "\tmtctr r12\n");
16090 fprintf (file
, "\tbctr\n");
16094 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
16095 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
16096 fprintf (file
, "\tmtctr r12\n");
16097 fprintf (file
, "\tbctr\n");
16100 machopic_lazy_symbol_ptr_section ();
16101 fprintf (file
, "%s:\n", lazy_ptr_name
);
16102 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
16103 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
16106 /* Legitimize PIC addresses. If the address is already
16107 position-independent, we return ORIG. Newly generated
16108 position-independent addresses go into a reg. This is REG if non
16109 zero, otherwise we allocate register(s) as necessary. */
16111 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
16114 rs6000_machopic_legitimize_pic_address (rtx orig
, enum machine_mode mode
,
16119 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
16120 reg
= gen_reg_rtx (Pmode
);
16122 if (GET_CODE (orig
) == CONST
)
16124 if (GET_CODE (XEXP (orig
, 0)) == PLUS
16125 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
16128 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
16130 /* Use a different reg for the intermediate value, as
16131 it will be marked UNCHANGING. */
16132 rtx reg_temp
= no_new_pseudos
? reg
: gen_reg_rtx (Pmode
);
16135 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
16138 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
16144 if (GET_CODE (offset
) == CONST_INT
)
16146 if (SMALL_INT (offset
))
16147 return plus_constant (base
, INTVAL (offset
));
16148 else if (! reload_in_progress
&& ! reload_completed
)
16149 offset
= force_reg (Pmode
, offset
);
16152 rtx mem
= force_const_mem (Pmode
, orig
);
16153 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
16156 return gen_rtx_PLUS (Pmode
, base
, offset
);
16159 /* Fall back on generic machopic code. */
16160 return machopic_legitimize_pic_address (orig
, mode
, reg
);
16163 /* This is just a placeholder to make linking work without having to
16164 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
16165 ever needed for Darwin (not too likely!) this would have to get a
16166 real definition. */
16173 #endif /* TARGET_MACHO */
16176 static unsigned int
16177 rs6000_elf_section_type_flags (tree decl
, const char *name
, int reloc
)
16179 return default_section_type_flags_1 (decl
, name
, reloc
,
16180 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
16183 /* Record an element in the table of global constructors. SYMBOL is
16184 a SYMBOL_REF of the function to be called; PRIORITY is a number
16185 between 0 and MAX_INIT_PRIORITY.
16187 This differs from default_named_section_asm_out_constructor in
16188 that we have special handling for -mrelocatable. */
16191 rs6000_elf_asm_out_constructor (rtx symbol
, int priority
)
16193 const char *section
= ".ctors";
16196 if (priority
!= DEFAULT_INIT_PRIORITY
)
16198 sprintf (buf
, ".ctors.%.5u",
16199 /* Invert the numbering so the linker puts us in the proper
16200 order; constructors are run from right to left, and the
16201 linker sorts in increasing order. */
16202 MAX_INIT_PRIORITY
- priority
);
16206 named_section_flags (section
, SECTION_WRITE
);
16207 assemble_align (POINTER_SIZE
);
16209 if (TARGET_RELOCATABLE
)
16211 fputs ("\t.long (", asm_out_file
);
16212 output_addr_const (asm_out_file
, symbol
);
16213 fputs (")@fixup\n", asm_out_file
);
16216 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
16220 rs6000_elf_asm_out_destructor (rtx symbol
, int priority
)
16222 const char *section
= ".dtors";
16225 if (priority
!= DEFAULT_INIT_PRIORITY
)
16227 sprintf (buf
, ".dtors.%.5u",
16228 /* Invert the numbering so the linker puts us in the proper
16229 order; constructors are run from right to left, and the
16230 linker sorts in increasing order. */
16231 MAX_INIT_PRIORITY
- priority
);
16235 named_section_flags (section
, SECTION_WRITE
);
16236 assemble_align (POINTER_SIZE
);
16238 if (TARGET_RELOCATABLE
)
16240 fputs ("\t.long (", asm_out_file
);
16241 output_addr_const (asm_out_file
, symbol
);
16242 fputs (")@fixup\n", asm_out_file
);
16245 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
16249 rs6000_elf_declare_function_name (FILE *file
, const char *name
, tree decl
)
16253 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file
);
16254 ASM_OUTPUT_LABEL (file
, name
);
16255 fputs (DOUBLE_INT_ASM_OP
, file
);
16257 assemble_name (file
, name
);
16258 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file
);
16259 assemble_name (file
, name
);
16260 fputs (",24\n\t.type\t.", file
);
16261 assemble_name (file
, name
);
16262 fputs (",@function\n", file
);
16263 if (TREE_PUBLIC (decl
) && ! DECL_WEAK (decl
))
16265 fputs ("\t.globl\t.", file
);
16266 assemble_name (file
, name
);
16269 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
16271 ASM_OUTPUT_LABEL (file
, name
);
16275 if (TARGET_RELOCATABLE
16276 && (get_pool_size () != 0 || current_function_profile
)
16281 (*targetm
.asm_out
.internal_label
) (file
, "LCL", rs6000_pic_labelno
);
16283 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
16284 fprintf (file
, "\t.long ");
16285 assemble_name (file
, buf
);
16287 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
16288 assemble_name (file
, buf
);
16292 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
16293 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
16295 if (DEFAULT_ABI
== ABI_AIX
)
16297 const char *desc_name
, *orig_name
;
16299 orig_name
= (*targetm
.strip_name_encoding
) (name
);
16300 desc_name
= orig_name
;
16301 while (*desc_name
== '.')
16304 if (TREE_PUBLIC (decl
))
16305 fprintf (file
, "\t.globl %s\n", desc_name
);
16307 fprintf (file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
16308 fprintf (file
, "%s:\n", desc_name
);
16309 fprintf (file
, "\t.long %s\n", orig_name
);
16310 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file
);
16311 if (DEFAULT_ABI
== ABI_AIX
)
16312 fputs ("\t.long 0\n", file
);
16313 fprintf (file
, "\t.previous\n");
16315 ASM_OUTPUT_LABEL (file
, name
);
16321 rs6000_xcoff_asm_globalize_label (FILE *stream
, const char *name
)
16323 fputs (GLOBAL_ASM_OP
, stream
);
16324 RS6000_OUTPUT_BASENAME (stream
, name
);
16325 putc ('\n', stream
);
16329 rs6000_xcoff_asm_named_section (const char *name
, unsigned int flags
)
16332 static const char * const suffix
[3] = { "PR", "RO", "RW" };
16334 if (flags
& SECTION_CODE
)
16336 else if (flags
& SECTION_WRITE
)
16341 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
16342 (flags
& SECTION_CODE
) ? "." : "",
16343 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
16347 rs6000_xcoff_select_section (tree decl
, int reloc
,
16348 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
16350 if (decl_readonly_section_1 (decl
, reloc
, 1))
16352 if (TREE_PUBLIC (decl
))
16353 read_only_data_section ();
16355 read_only_private_data_section ();
16359 if (TREE_PUBLIC (decl
))
16362 private_data_section ();
16367 rs6000_xcoff_unique_section (tree decl
, int reloc ATTRIBUTE_UNUSED
)
16371 /* Use select_section for private and uninitialized data. */
16372 if (!TREE_PUBLIC (decl
)
16373 || DECL_COMMON (decl
)
16374 || DECL_INITIAL (decl
) == NULL_TREE
16375 || DECL_INITIAL (decl
) == error_mark_node
16376 || (flag_zero_initialized_in_bss
16377 && initializer_zerop (DECL_INITIAL (decl
))))
16380 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
16381 name
= (*targetm
.strip_name_encoding
) (name
);
16382 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
16385 /* Select section for constant in constant pool.
16387 On RS/6000, all constants are in the private read-only data area.
16388 However, if this is being placed in the TOC it must be output as a
16392 rs6000_xcoff_select_rtx_section (enum machine_mode mode
, rtx x
,
16393 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
16395 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
16398 read_only_private_data_section ();
16401 /* Remove any trailing [DS] or the like from the symbol name. */
16403 static const char *
16404 rs6000_xcoff_strip_name_encoding (const char *name
)
16409 len
= strlen (name
);
16410 if (name
[len
- 1] == ']')
16411 return ggc_alloc_string (name
, len
- 4);
16416 /* Section attributes. AIX is always PIC. */
16418 static unsigned int
16419 rs6000_xcoff_section_type_flags (tree decl
, const char *name
, int reloc
)
16421 unsigned int align
;
16422 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
16424 /* Align to at least UNIT size. */
16425 if (flags
& SECTION_CODE
)
16426 align
= MIN_UNITS_PER_WORD
;
16428 /* Increase alignment of large objects if not already stricter. */
16429 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
16430 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
16431 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
16433 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
16436 /* Output at beginning of assembler file.
16438 Initialize the section names for the RS/6000 at this point.
16440 Specify filename, including full path, to assembler.
16442 We want to go into the TOC section so at least one .toc will be emitted.
16443 Also, in order to output proper .bs/.es pairs, we need at least one static
16444 [RW] section emitted.
16446 Finally, declare mcount when profiling to make the assembler happy. */
16449 rs6000_xcoff_file_start (void)
16451 rs6000_gen_section_name (&xcoff_bss_section_name
,
16452 main_input_filename
, ".bss_");
16453 rs6000_gen_section_name (&xcoff_private_data_section_name
,
16454 main_input_filename
, ".rw_");
16455 rs6000_gen_section_name (&xcoff_read_only_section_name
,
16456 main_input_filename
, ".ro_");
16458 fputs ("\t.file\t", asm_out_file
);
16459 output_quoted_string (asm_out_file
, main_input_filename
);
16460 fputc ('\n', asm_out_file
);
16462 if (write_symbols
!= NO_DEBUG
)
16463 private_data_section ();
16466 fprintf (asm_out_file
, "\t.extern %s\n", RS6000_MCOUNT
);
16467 rs6000_file_start ();
16470 /* Output at end of assembler file.
16471 On the RS/6000, referencing data should automatically pull in text. */
16474 rs6000_xcoff_file_end (void)
16477 fputs ("_section_.text:\n", asm_out_file
);
16479 fputs (TARGET_32BIT
16480 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
16483 #endif /* TARGET_XCOFF */
16486 /* Cross-module name binding. Darwin does not support overriding
16487 functions at dynamic-link time. */
16490 rs6000_binds_local_p (tree decl
)
16492 return default_binds_local_p_1 (decl
, 0);
16496 /* Compute a (partial) cost for rtx X. Return true if the complete
16497 cost has been computed, and false if subexpressions should be
16498 scanned. In either case, *TOTAL contains the cost result. */
16501 rs6000_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
16504 enum machine_mode mode
= GET_MODE (x
);
16508 /* On the RS/6000, if it is valid in the insn, it is free.
16509 So this always returns 0. */
16520 if (mode
== DFmode
)
16521 *total
= GET_CODE (XEXP (x
, 0)) == MULT
16522 ? rs6000_cost
->dmul
16524 else if (mode
== SFmode
)
16525 *total
= rs6000_cost
->fp
;
16526 else if (GET_CODE (XEXP (x
, 0)) == MULT
)
16528 /* The rs6000 doesn't have shift-and-add instructions. */
16529 rs6000_rtx_costs (XEXP (x
, 0), MULT
, PLUS
, total
);
16530 *total
+= COSTS_N_INSNS (1);
16533 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
16534 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
16535 + 0x8000) >= 0x10000)
16536 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
16537 ? COSTS_N_INSNS (2)
16538 : COSTS_N_INSNS (1));
16542 if (mode
== DFmode
)
16543 *total
= GET_CODE (XEXP (x
, 0)) == MULT
16544 ? rs6000_cost
->dmul
16546 else if (mode
== SFmode
)
16547 *total
= rs6000_cost
->fp
;
16548 else if (GET_CODE (XEXP (x
, 0)) == MULT
)
16550 /* The rs6000 doesn't have shift-and-sub instructions. */
16551 rs6000_rtx_costs (XEXP (x
, 0), MULT
, MINUS
, total
);
16552 *total
+= COSTS_N_INSNS (1);
16555 *total
= COSTS_N_INSNS (1);
16561 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
16562 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
16563 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
16564 ? COSTS_N_INSNS (2)
16565 : COSTS_N_INSNS (1));
16569 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
16571 if (INTVAL (XEXP (x
, 1)) >= -256
16572 && INTVAL (XEXP (x
, 1)) <= 255)
16573 *total
= rs6000_cost
->mulsi_const9
;
16575 *total
= rs6000_cost
->mulsi_const
;
16577 else if (mode
== DFmode
)
16578 *total
= rs6000_cost
->dmul
;
16579 else if (mode
== SFmode
)
16580 *total
= rs6000_cost
->fp
;
16581 else if (mode
== DImode
)
16582 *total
= rs6000_cost
->muldi
;
16584 *total
= rs6000_cost
->mulsi
;
16589 if (FLOAT_MODE_P (mode
))
16591 *total
= mode
== DFmode
? rs6000_cost
->ddiv
16592 : rs6000_cost
->sdiv
;
16595 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
16596 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
16598 *total
= COSTS_N_INSNS (2);
16605 if (GET_MODE (XEXP (x
, 1)) == DImode
)
16606 *total
= rs6000_cost
->divdi
;
16608 *total
= rs6000_cost
->divsi
;
16612 *total
= COSTS_N_INSNS (4);
16617 if (FLOAT_MODE_P (mode
))
16618 *total
= rs6000_cost
->fp
;
16620 *total
= COSTS_N_INSNS (1);
16624 /* MEM should be slightly more expensive than (plus (reg) (const)). */
16625 *total
= COSTS_N_INSNS (1) + 1;
16632 *total
= COSTS_N_INSNS (1);
16635 case FLOAT_TRUNCATE
:
16636 *total
= rs6000_cost
->fp
;
16640 switch (XINT (x
, 1))
16643 *total
= rs6000_cost
->fp
;
16655 *total
= COSTS_N_INSNS (1);
16667 /* A C expression returning the cost of moving data from a register of class
16668 CLASS1 to one of CLASS2. */
16671 rs6000_register_move_cost (enum machine_mode mode
,
16672 enum reg_class from
, enum reg_class to
)
16674 /* Moves from/to GENERAL_REGS. */
16675 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
16676 || reg_classes_intersect_p (from
, GENERAL_REGS
))
16678 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
16681 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
16682 return (rs6000_memory_move_cost (mode
, from
, 0)
16683 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
16685 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16686 else if (from
== CR_REGS
)
16690 /* A move will cost one instruction per GPR moved. */
16691 return 2 * HARD_REGNO_NREGS (0, mode
);
16694 /* Moving between two similar registers is just one instruction. */
16695 else if (reg_classes_intersect_p (to
, from
))
16696 return mode
== TFmode
? 4 : 2;
16698 /* Everything else has to go through GENERAL_REGS. */
16700 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
16701 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
16704 /* A C expressions returning the cost of moving data of MODE from a register to
16708 rs6000_memory_move_cost (enum machine_mode mode
, enum reg_class
class,
16709 int in ATTRIBUTE_UNUSED
)
16711 if (reg_classes_intersect_p (class, GENERAL_REGS
))
16712 return 4 * HARD_REGNO_NREGS (0, mode
);
16713 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
16714 return 4 * HARD_REGNO_NREGS (32, mode
);
16715 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
16716 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
16718 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
16721 /* Return an RTX representing where to find the function value of a
16722 function returning MODE. */
16724 rs6000_complex_function_value (enum machine_mode mode
)
16726 unsigned int regno
;
16728 enum machine_mode inner
= GET_MODE_INNER (mode
);
16729 unsigned int inner_bytes
= GET_MODE_SIZE (inner
);
16731 if (FLOAT_MODE_P (mode
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
16732 regno
= FP_ARG_RETURN
;
16735 regno
= GP_ARG_RETURN
;
16737 /* 32-bit is OK since it'll go in r3/r4. */
16738 if (TARGET_32BIT
&& inner_bytes
>= 4)
16739 return gen_rtx_REG (mode
, regno
);
16742 if (inner_bytes
>= 8)
16743 return gen_rtx_REG (mode
, regno
);
16745 r1
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
),
16747 r2
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
+ 1),
16748 GEN_INT (inner_bytes
));
16749 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
16752 /* Define how to find the value returned by a function.
16753 VALTYPE is the data type of the value (as a tree).
16754 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16755 otherwise, FUNC is 0.
16757 On the SPE, both FPs and vectors are returned in r3.
16759 On RS/6000 an integer value is in r3 and a floating-point value is in
16760 fp1, unless -msoft-float. */
16763 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
16765 enum machine_mode mode
;
16766 unsigned int regno
;
16768 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DImode
)
16770 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16771 return gen_rtx_PARALLEL (DImode
,
16773 gen_rtx_EXPR_LIST (VOIDmode
,
16774 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
16776 gen_rtx_EXPR_LIST (VOIDmode
,
16777 gen_rtx_REG (SImode
,
16778 GP_ARG_RETURN
+ 1),
16782 if ((INTEGRAL_TYPE_P (valtype
)
16783 && TYPE_PRECISION (valtype
) < BITS_PER_WORD
)
16784 || POINTER_TYPE_P (valtype
))
16785 mode
= TARGET_32BIT
? SImode
: DImode
;
16787 mode
= TYPE_MODE (valtype
);
16789 if (SCALAR_FLOAT_TYPE_P (valtype
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
16790 regno
= FP_ARG_RETURN
;
16791 else if (TREE_CODE (valtype
) == COMPLEX_TYPE
16792 && targetm
.calls
.split_complex_arg
)
16793 return rs6000_complex_function_value (mode
);
16794 else if (TREE_CODE (valtype
) == VECTOR_TYPE
16795 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
)
16796 regno
= ALTIVEC_ARG_RETURN
;
16798 regno
= GP_ARG_RETURN
;
16800 return gen_rtx_REG (mode
, regno
);
16803 /* Define how to find the value returned by a library function
16804 assuming the value has mode MODE. */
16806 rs6000_libcall_value (enum machine_mode mode
)
16808 unsigned int regno
;
16810 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
16811 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
16812 regno
= FP_ARG_RETURN
;
16813 else if (ALTIVEC_VECTOR_MODE (mode
)
16814 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
)
16815 regno
= ALTIVEC_ARG_RETURN
;
16816 else if (COMPLEX_MODE_P (mode
) && targetm
.calls
.split_complex_arg
)
16817 return rs6000_complex_function_value (mode
);
16819 regno
= GP_ARG_RETURN
;
16821 return gen_rtx_REG (mode
, regno
);
16824 /* Define the offset between two registers, FROM to be eliminated and its
16825 replacement TO, at the start of a routine. */
16827 rs6000_initial_elimination_offset (int from
, int to
)
16829 rs6000_stack_t
*info
= rs6000_stack_info ();
16830 HOST_WIDE_INT offset
;
16832 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
16833 offset
= info
->push_p
? 0 : -info
->total_size
;
16834 else if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
16835 offset
= info
->total_size
;
16836 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
16837 offset
= info
->push_p
? info
->total_size
: 0;
16838 else if (from
== RS6000_PIC_OFFSET_TABLE_REGNUM
)
16846 /* Return true if TYPE is of type __ev64_opaque__. */
16849 is_ev64_opaque_type (tree type
)
16852 && (type
== opaque_V2SI_type_node
16853 || type
== opaque_V2SF_type_node
16854 || type
== opaque_p_V2SI_type_node
));
16858 rs6000_dwarf_register_span (rtx reg
)
16862 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
16865 regno
= REGNO (reg
);
16867 /* The duality of the SPE register size wreaks all kinds of havoc.
16868 This is a way of distinguishing r0 in 32-bits from r0 in
16871 gen_rtx_PARALLEL (VOIDmode
,
16874 gen_rtx_REG (SImode
, regno
+ 1200),
16875 gen_rtx_REG (SImode
, regno
))
16877 gen_rtx_REG (SImode
, regno
),
16878 gen_rtx_REG (SImode
, regno
+ 1200)));
16881 /* Map internal gcc register numbers to DWARF2 register numbers. */
16884 rs6000_dbx_register_number (unsigned int regno
)
16886 if (regno
<= 63 || write_symbols
!= DWARF2_DEBUG
)
16888 if (regno
== MQ_REGNO
)
16890 if (regno
== LINK_REGISTER_REGNUM
)
16892 if (regno
== COUNT_REGISTER_REGNUM
)
16894 if (CR_REGNO_P (regno
))
16895 return regno
- CR0_REGNO
+ 86;
16896 if (regno
== XER_REGNO
)
16898 if (ALTIVEC_REGNO_P (regno
))
16899 return regno
- FIRST_ALTIVEC_REGNO
+ 1124;
16900 if (regno
== VRSAVE_REGNO
)
16902 if (regno
== VSCR_REGNO
)
16904 if (regno
== SPE_ACC_REGNO
)
16906 if (regno
== SPEFSCR_REGNO
)
16908 /* SPE high reg number. We get these values of regno from
16909 rs6000_dwarf_register_span. */
16910 if (regno
>= 1200 && regno
< 1232)
16916 #include "gt-rs6000.h"