1 /* Subroutines used for code generation on the Tilera TILEPro.
2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "insn-config.h"
29 #include "insn-attr.h"
32 #include "langhooks.h"
33 #include "insn-codes.h"
35 #include "dominance.h"
41 #include "cfgcleanup.h"
43 #include "basic-block.h"
44 #include "sched-int.h"
45 #include "sel-sched.h"
47 #include "tm-constrs.h"
49 #include "target-def.h"
54 #include "hard-reg-set.h"
63 #include "fold-const.h"
64 #include "hash-table.h"
66 #include "tree-ssa-alias.h"
67 #include "internal-fn.h"
68 #include "gimple-fold.h"
70 #include "gimple-expr.h"
73 #include "stringpool.h"
74 #include "stor-layout.h"
79 #include "tilepro-builtins.h"
80 #include "tilepro-multiply.h"
81 #include "diagnostic.h"
84 /* SYMBOL_REF for GOT */
85 static GTY(()) rtx g_got_symbol
= NULL
;
87 /* In case of a POST_INC or POST_DEC memory reference, we must report
88 the mode of the memory reference from TARGET_PRINT_OPERAND to
89 TARGET_PRINT_OPERAND_ADDRESS. */
90 static machine_mode output_memory_reference_mode
;
92 /* Report whether we're printing out the first address fragment of a
93 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
94 TARGET_PRINT_OPERAND_ADDRESS. */
95 static bool output_memory_autoinc_first
;
101 /* Implement TARGET_OPTION_OVERRIDE. */
103 tilepro_option_override (void)
105 /* When modulo scheduling is enabled, we still rely on regular
106 scheduler for bundling. */
107 if (flag_modulo_sched
)
108 flag_resched_modulo_sched
= 1;
113 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
115 tilepro_scalar_mode_supported_p (machine_mode mode
)
135 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
137 tile_vector_mode_supported_p (machine_mode mode
)
139 return mode
== V4QImode
|| mode
== V2HImode
;
143 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
145 tilepro_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
146 rtx x ATTRIBUTE_UNUSED
)
152 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
154 tilepro_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
160 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
161 passed by reference. */
163 tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
164 machine_mode mode ATTRIBUTE_UNUSED
,
165 const_tree type
, bool named ATTRIBUTE_UNUSED
)
167 return (type
&& TYPE_SIZE (type
)
168 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
);
172 /* Implement TARGET_RETURN_IN_MEMORY. */
174 tilepro_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
176 return !IN_RANGE (int_size_in_bytes (type
),
177 0, TILEPRO_NUM_RETURN_REGS
* UNITS_PER_WORD
);
181 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
183 tilepro_function_arg_boundary (machine_mode mode
, const_tree type
)
185 unsigned int alignment
;
187 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
188 if (alignment
< PARM_BOUNDARY
)
189 alignment
= PARM_BOUNDARY
;
190 if (alignment
> STACK_BOUNDARY
)
191 alignment
= STACK_BOUNDARY
;
196 /* Implement TARGET_FUNCTION_ARG. */
198 tilepro_function_arg (cumulative_args_t cum_v
,
200 const_tree type
, bool named ATTRIBUTE_UNUSED
)
202 CUMULATIVE_ARGS cum
= *get_cumulative_args (cum_v
);
203 int byte_size
= ((mode
== BLKmode
)
204 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
205 bool doubleword_aligned_p
;
207 if (cum
>= TILEPRO_NUM_ARG_REGS
)
210 /* See whether the argument has doubleword alignment. */
211 doubleword_aligned_p
=
212 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
214 if (doubleword_aligned_p
)
217 /* The ABI does not allow parameters to be passed partially in reg
218 and partially in stack. */
219 if ((cum
+ (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
220 > TILEPRO_NUM_ARG_REGS
)
223 return gen_rtx_REG (mode
, cum
);
227 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
229 tilepro_function_arg_advance (cumulative_args_t cum_v
,
231 const_tree type
, bool named ATTRIBUTE_UNUSED
)
233 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
235 int byte_size
= ((mode
== BLKmode
)
236 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
237 int word_size
= (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
238 bool doubleword_aligned_p
;
240 /* See whether the argument has doubleword alignment. */
241 doubleword_aligned_p
=
242 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
244 if (doubleword_aligned_p
)
247 /* If the current argument does not fit in the pretend_args space,
249 if (*cum
< TILEPRO_NUM_ARG_REGS
250 && *cum
+ word_size
> TILEPRO_NUM_ARG_REGS
)
251 *cum
= TILEPRO_NUM_ARG_REGS
;
257 /* Implement TARGET_FUNCTION_VALUE. */
259 tilepro_function_value (const_tree valtype
, const_tree fn_decl_or_type
,
260 bool outgoing ATTRIBUTE_UNUSED
)
265 mode
= TYPE_MODE (valtype
);
266 unsigned_p
= TYPE_UNSIGNED (valtype
);
268 mode
= promote_function_mode (valtype
, mode
, &unsigned_p
,
271 return gen_rtx_REG (mode
, 0);
275 /* Implement TARGET_LIBCALL_VALUE. */
277 tilepro_libcall_value (machine_mode mode
,
278 const_rtx fun ATTRIBUTE_UNUSED
)
280 return gen_rtx_REG (mode
, 0);
284 /* Implement FUNCTION_VALUE_REGNO_P. */
286 tilepro_function_value_regno_p (const unsigned int regno
)
288 return regno
< TILEPRO_NUM_RETURN_REGS
;
292 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
294 tilepro_build_builtin_va_list (void)
296 tree f_args
, f_skip
, record
, type_decl
;
299 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
301 type_decl
= build_decl (BUILTINS_LOCATION
, TYPE_DECL
,
302 get_identifier ("__va_list_tag"), record
);
304 f_args
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
305 get_identifier ("__args"), ptr_type_node
);
306 f_skip
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
307 get_identifier ("__skip"), ptr_type_node
);
309 DECL_FIELD_CONTEXT (f_args
) = record
;
311 DECL_FIELD_CONTEXT (f_skip
) = record
;
313 TREE_CHAIN (record
) = type_decl
;
314 TYPE_NAME (record
) = type_decl
;
315 TYPE_FIELDS (record
) = f_args
;
316 TREE_CHAIN (f_args
) = f_skip
;
318 /* We know this is being padded and we want it too. It is an
319 internal type so hide the warnings from the user. */
323 layout_type (record
);
327 /* The correct type is an array type of one element. */
332 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
334 tilepro_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
339 f_args
= TYPE_FIELDS (TREE_TYPE (valist
));
340 f_skip
= TREE_CHAIN (f_args
);
343 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
345 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
347 /* Find the __args area. */
348 t
= make_tree (TREE_TYPE (args
), virtual_incoming_args_rtx
);
349 t
= fold_build_pointer_plus_hwi (t
,
351 (crtl
->args
.info
- TILEPRO_NUM_ARG_REGS
));
353 if (crtl
->args
.pretend_args_size
> 0)
354 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
356 t
= build2 (MODIFY_EXPR
, TREE_TYPE (args
), args
, t
);
357 TREE_SIDE_EFFECTS (t
) = 1;
358 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
360 /* Find the __skip area. */
361 t
= make_tree (TREE_TYPE (skip
), virtual_incoming_args_rtx
);
362 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
363 t
= build2 (MODIFY_EXPR
, TREE_TYPE (skip
), skip
, t
);
364 TREE_SIDE_EFFECTS (t
) = 1;
365 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
369 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
371 tilepro_setup_incoming_varargs (cumulative_args_t cum
,
373 tree type
, int *pretend_args
, int no_rtl
)
375 CUMULATIVE_ARGS local_cum
= *get_cumulative_args (cum
);
378 /* The caller has advanced CUM up to, but not beyond, the last named
379 argument. Advance a local copy of CUM past the last "real" named
380 argument, to find out how many registers are left over. */
381 targetm
.calls
.function_arg_advance (pack_cumulative_args (&local_cum
),
383 first_reg
= local_cum
;
385 if (local_cum
< TILEPRO_NUM_ARG_REGS
)
387 *pretend_args
= UNITS_PER_WORD
* (TILEPRO_NUM_ARG_REGS
- first_reg
);
391 alias_set_type set
= get_varargs_alias_set ();
393 gen_rtx_MEM (BLKmode
, plus_constant (Pmode
, \
394 virtual_incoming_args_rtx
,
395 -STACK_POINTER_OFFSET
-
397 (TILEPRO_NUM_ARG_REGS
-
399 MEM_NOTRAP_P (tmp
) = 1;
400 set_mem_alias_set (tmp
, set
);
401 move_block_from_reg (first_reg
, tmp
,
402 TILEPRO_NUM_ARG_REGS
- first_reg
);
410 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
411 the va_list structure VALIST as required to retrieve an argument of
412 type TYPE, and returning that argument.
414 ret = va_arg(VALIST, TYPE);
416 generates code equivalent to:
418 paddedsize = (sizeof(TYPE) + 3) & -4;
419 if ((VALIST.__args + paddedsize > VALIST.__skip)
420 & (VALIST.__args <= VALIST.__skip))
421 addr = VALIST.__skip + STACK_POINTER_OFFSET;
423 addr = VALIST.__args;
424 VALIST.__args = addr + paddedsize;
425 ret = *(TYPE *)addr; */
427 tilepro_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
* pre_p
,
428 gimple_seq
* post_p ATTRIBUTE_UNUSED
)
432 HOST_WIDE_INT size
, rsize
;
434 bool pass_by_reference_p
;
436 f_args
= TYPE_FIELDS (va_list_type_node
);
437 f_skip
= TREE_CHAIN (f_args
);
440 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
442 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
444 addr
= create_tmp_var (ptr_type_node
, "va_arg");
446 /* if an object is dynamically sized, a pointer to it is passed
447 instead of the object itself. */
448 pass_by_reference_p
= pass_by_reference (NULL
, TYPE_MODE (type
), type
,
451 if (pass_by_reference_p
)
452 type
= build_pointer_type (type
);
454 size
= int_size_in_bytes (type
);
455 rsize
= ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
) * UNITS_PER_WORD
;
457 /* If the alignment of the type is greater than the default for a
458 parameter, align to STACK_BOUNDARY. */
459 if (TYPE_ALIGN (type
) > PARM_BOUNDARY
)
461 /* Assert the only case we generate code for: when
462 stack boundary = 2 * parm boundary. */
463 gcc_assert (STACK_BOUNDARY
== PARM_BOUNDARY
* 2);
465 tmp
= build2 (BIT_AND_EXPR
, sizetype
,
466 fold_convert (sizetype
, unshare_expr (args
)),
467 size_int (PARM_BOUNDARY
/ 8));
468 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
469 unshare_expr (args
), tmp
);
471 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
474 /* Build conditional expression to calculate addr. The expression
475 will be gimplified later. */
476 tmp
= fold_build_pointer_plus_hwi (unshare_expr (args
), rsize
);
477 tmp
= build2 (TRUTH_AND_EXPR
, boolean_type_node
,
478 build2 (GT_EXPR
, boolean_type_node
, tmp
, unshare_expr (skip
)),
479 build2 (LE_EXPR
, boolean_type_node
, unshare_expr (args
),
480 unshare_expr (skip
)));
482 tmp
= build3 (COND_EXPR
, ptr_type_node
, tmp
,
483 build2 (POINTER_PLUS_EXPR
, ptr_type_node
, unshare_expr (skip
),
484 size_int (STACK_POINTER_OFFSET
)),
485 unshare_expr (args
));
487 gimplify_assign (addr
, tmp
, pre_p
);
489 /* Update VALIST.__args. */
490 tmp
= fold_build_pointer_plus_hwi (addr
, rsize
);
491 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
493 addr
= fold_convert (build_pointer_type (type
), addr
);
495 if (pass_by_reference_p
)
496 addr
= build_va_arg_indirect_ref (addr
);
498 return build_va_arg_indirect_ref (addr
);
503 /* Implement TARGET_RTX_COSTS. */
505 tilepro_rtx_costs (rtx x
, int code
, int outer_code
, int opno
, int *total
,
511 /* If this is an 8-bit constant, return zero since it can be
512 used nearly anywhere with no cost. If it is a valid operand
513 for an ADD or AND, likewise return 0 if we know it will be
514 used in that context. Otherwise, return 2 since it might be
515 used there later. All other constants take at least two
517 if (satisfies_constraint_I (x
))
522 else if (outer_code
== PLUS
&& add_operand (x
, VOIDmode
))
524 /* Slightly penalize large constants even though we can add
525 them in one instruction, because it forces the use of
526 2-wide bundling mode. */
530 else if (move_operand (x
, SImode
))
532 /* We can materialize in one move. */
533 *total
= COSTS_N_INSNS (1);
538 /* We can materialize in two moves. */
539 *total
= COSTS_N_INSNS (2);
548 *total
= COSTS_N_INSNS (2);
552 *total
= COSTS_N_INSNS (4);
560 /* If outer-code was a sign or zero extension, a cost of
561 COSTS_N_INSNS (1) was already added in, so account for
563 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
564 *total
= COSTS_N_INSNS (1);
566 *total
= COSTS_N_INSNS (2);
570 /* Convey that s[123]a are efficient. */
571 if (GET_CODE (XEXP (x
, 0)) == MULT
572 && cint_248_operand (XEXP (XEXP (x
, 0), 1), VOIDmode
))
574 *total
= (rtx_cost (XEXP (XEXP (x
, 0), 0),
575 (enum rtx_code
) outer_code
, opno
, speed
)
576 + rtx_cost (XEXP (x
, 1),
577 (enum rtx_code
) outer_code
, opno
, speed
)
578 + COSTS_N_INSNS (1));
584 *total
= COSTS_N_INSNS (2);
589 if (outer_code
== MULT
)
592 *total
= COSTS_N_INSNS (1);
599 /* These are handled by software and are very expensive. */
600 *total
= COSTS_N_INSNS (100);
604 case UNSPEC_VOLATILE
:
606 int num
= XINT (x
, 1);
608 if (num
<= TILEPRO_LAST_LATENCY_1_INSN
)
609 *total
= COSTS_N_INSNS (1);
610 else if (num
<= TILEPRO_LAST_LATENCY_2_INSN
)
611 *total
= COSTS_N_INSNS (2);
612 else if (num
> TILEPRO_LAST_LATENCY_INSN
)
614 if (outer_code
== PLUS
)
617 *total
= COSTS_N_INSNS (1);
623 case UNSPEC_BLOCKAGE
:
624 case UNSPEC_NETWORK_BARRIER
:
628 case UNSPEC_LNK_AND_LABEL
:
630 case UNSPEC_NETWORK_RECEIVE
:
631 case UNSPEC_NETWORK_SEND
:
632 case UNSPEC_TLS_GD_ADD
:
633 *total
= COSTS_N_INSNS (1);
636 case UNSPEC_TLS_IE_LOAD
:
637 *total
= COSTS_N_INSNS (2);
641 *total
= COSTS_N_INSNS (3);
645 *total
= COSTS_N_INSNS (4);
648 case UNSPEC_LATENCY_L2
:
649 *total
= COSTS_N_INSNS (8);
652 case UNSPEC_TLS_GD_CALL
:
653 *total
= COSTS_N_INSNS (30);
656 case UNSPEC_LATENCY_MISS
:
657 *total
= COSTS_N_INSNS (80);
661 *total
= COSTS_N_INSNS (1);
674 /* Returns an SImode integer rtx with value VAL. */
676 gen_int_si (HOST_WIDE_INT val
)
678 return gen_int_mode (val
, SImode
);
682 /* Create a temporary variable to hold a partial result, to enable
685 create_temp_reg_if_possible (machine_mode mode
, rtx default_reg
)
687 return can_create_pseudo_p ()? gen_reg_rtx (mode
) : default_reg
;
691 /* Functions to save and restore machine-specific function data. */
692 static struct machine_function
*
693 tilepro_init_machine_status (void)
695 return ggc_cleared_alloc
<machine_function
> ();
699 /* Do anything needed before RTL is emitted for each function. */
701 tilepro_init_expanders (void)
703 /* Arrange to initialize and mark the machine per-function
705 init_machine_status
= tilepro_init_machine_status
;
707 if (cfun
&& cfun
->machine
&& flag_pic
)
709 static int label_num
= 0;
711 char text_label_name
[32];
713 struct machine_function
*machine
= cfun
->machine
;
715 ASM_GENERATE_INTERNAL_LABEL (text_label_name
, "L_PICLNK", label_num
++);
717 machine
->text_label_symbol
=
718 gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (text_label_name
));
720 machine
->text_label_rtx
=
721 gen_rtx_REG (Pmode
, TILEPRO_PIC_TEXT_LABEL_REGNUM
);
723 machine
->got_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
725 machine
->calls_tls_get_addr
= false;
730 /* Return true if X contains a thread-local symbol. */
732 tilepro_tls_referenced_p (rtx x
)
734 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
)
735 x
= XEXP (XEXP (x
, 0), 0);
737 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
))
740 /* That's all we handle in tilepro_legitimize_tls_address for
746 /* Return true if X requires a scratch register. It is given that
747 flag_pic is on and that X satisfies CONSTANT_P. */
749 tilepro_pic_address_needs_scratch (rtx x
)
751 if (GET_CODE (x
) == CONST
752 && GET_CODE (XEXP (x
, 0)) == PLUS
753 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
754 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
755 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
762 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
763 which we are willing to load the value into a register via a move
764 pattern. TLS cannot be treated as a constant because it can
765 include a function call. */
767 tilepro_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
769 switch (GET_CODE (x
))
773 return !tilepro_tls_referenced_p (x
);
781 /* Return true if the constant value X is a legitimate general operand
782 when generating PIC code. It is given that flag_pic is on and that
783 X satisfies CONSTANT_P. */
785 tilepro_legitimate_pic_operand_p (rtx x
)
787 if (tilepro_pic_address_needs_scratch (x
))
790 if (tilepro_tls_referenced_p (x
))
797 /* Return true if the rtx X can be used as an address operand. */
799 tilepro_legitimate_address_p (machine_mode
ARG_UNUSED (mode
), rtx x
,
802 if (GET_CODE (x
) == SUBREG
)
805 switch (GET_CODE (x
))
809 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
816 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
819 if (GET_CODE (XEXP (x
, 1)) != PLUS
)
822 if (!rtx_equal_p (XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)))
825 if (!satisfies_constraint_I (XEXP (XEXP (x
, 1), 1)))
838 /* Check if x is a valid reg. */
843 return REGNO_OK_FOR_BASE_P (REGNO (x
));
849 /* Return the rtx containing SYMBOL_REF to the text label. */
851 tilepro_text_label_symbol (void)
853 return cfun
->machine
->text_label_symbol
;
857 /* Return the register storing the value of the text label. */
859 tilepro_text_label_rtx (void)
861 return cfun
->machine
->text_label_rtx
;
865 /* Return the register storing the value of the global offset
868 tilepro_got_rtx (void)
870 return cfun
->machine
->got_rtx
;
874 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
876 tilepro_got_symbol (void)
878 if (g_got_symbol
== NULL
)
879 g_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
885 /* Return a reference to the got to be used by tls references. */
887 tilepro_tls_got (void)
892 crtl
->uses_pic_offset_table
= 1;
893 return tilepro_got_rtx ();
896 temp
= gen_reg_rtx (Pmode
);
897 emit_move_insn (temp
, tilepro_got_symbol ());
903 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
904 this (thread-local) address. */
906 tilepro_legitimize_tls_address (rtx addr
)
910 gcc_assert (can_create_pseudo_p ());
912 if (GET_CODE (addr
) == SYMBOL_REF
)
913 switch (SYMBOL_REF_TLS_MODEL (addr
))
915 case TLS_MODEL_GLOBAL_DYNAMIC
:
916 case TLS_MODEL_LOCAL_DYNAMIC
:
918 rtx r0
, temp1
, temp2
, temp3
, got
;
921 ret
= gen_reg_rtx (Pmode
);
922 r0
= gen_rtx_REG (Pmode
, 0);
923 temp1
= gen_reg_rtx (Pmode
);
924 temp2
= gen_reg_rtx (Pmode
);
925 temp3
= gen_reg_rtx (Pmode
);
927 got
= tilepro_tls_got ();
928 emit_insn (gen_tls_gd_addhi (temp1
, got
, addr
));
929 emit_insn (gen_tls_gd_addlo (temp2
, temp1
, addr
));
930 emit_move_insn (r0
, temp2
);
931 emit_insn (gen_tls_gd_call (addr
));
932 emit_move_insn (temp3
, r0
);
933 last
= emit_insn (gen_tls_gd_add (ret
, temp3
, addr
));
934 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
937 case TLS_MODEL_INITIAL_EXEC
:
939 rtx temp1
, temp2
, temp3
, got
;
942 ret
= gen_reg_rtx (Pmode
);
943 temp1
= gen_reg_rtx (Pmode
);
944 temp2
= gen_reg_rtx (Pmode
);
945 temp3
= gen_reg_rtx (Pmode
);
947 got
= tilepro_tls_got ();
948 emit_insn (gen_tls_ie_addhi (temp1
, got
, addr
));
949 emit_insn (gen_tls_ie_addlo (temp2
, temp1
, addr
));
950 emit_insn (gen_tls_ie_load (temp3
, temp2
, addr
));
955 THREAD_POINTER_REGNUM
),
957 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
960 case TLS_MODEL_LOCAL_EXEC
:
965 ret
= gen_reg_rtx (Pmode
);
966 temp1
= gen_reg_rtx (Pmode
);
968 emit_insn (gen_tls_le_addhi (temp1
,
970 THREAD_POINTER_REGNUM
),
972 last
= emit_insn (gen_tls_le_addlo (ret
, temp1
, addr
));
973 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
979 else if (GET_CODE (addr
) == CONST
)
983 gcc_assert (GET_CODE (XEXP (addr
, 0)) == PLUS
);
985 base
= tilepro_legitimize_tls_address (XEXP (XEXP (addr
, 0), 0));
986 offset
= XEXP (XEXP (addr
, 0), 1);
988 base
= force_operand (base
, NULL_RTX
);
989 ret
= force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
998 /* Legitimize PIC addresses. If the address is already
999 position-independent, we return ORIG. Newly generated
1000 position-independent addresses go into a reg. This is REG if
1001 nonzero, otherwise we allocate register(s) as necessary. */
1003 tilepro_legitimize_pic_address (rtx orig
,
1004 machine_mode mode ATTRIBUTE_UNUSED
,
1007 if (GET_CODE (orig
) == SYMBOL_REF
)
1009 rtx address
, pic_ref
;
1013 gcc_assert (can_create_pseudo_p ());
1014 reg
= gen_reg_rtx (Pmode
);
1017 if (SYMBOL_REF_LOCAL_P (orig
))
1019 /* If not during reload, allocate another temp reg here for
1020 loading in the address, so that these instructions can be
1021 optimized properly. */
1022 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1023 rtx text_label_symbol
= tilepro_text_label_symbol ();
1024 rtx text_label_rtx
= tilepro_text_label_rtx ();
1026 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1027 text_label_symbol
));
1028 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1029 text_label_symbol
));
1031 /* Note: this is conservative. We use the text_label but we
1032 don't use the pic_offset_table. However, in some cases
1033 we may need the pic_offset_table (see
1034 tilepro_fixup_pcrel_references). */
1035 crtl
->uses_pic_offset_table
= 1;
1039 emit_move_insn (reg
, address
);
1044 /* If not during reload, allocate another temp reg here for
1045 loading in the address, so that these instructions can be
1046 optimized properly. */
1047 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1049 gcc_assert (flag_pic
);
1052 emit_insn (gen_add_got16 (temp_reg
,
1053 tilepro_got_rtx (), orig
));
1057 rtx temp_reg2
= create_temp_reg_if_possible (Pmode
, reg
);
1058 emit_insn (gen_addhi_got32 (temp_reg2
,
1059 tilepro_got_rtx (), orig
));
1060 emit_insn (gen_addlo_got32 (temp_reg
, temp_reg2
, orig
));
1065 pic_ref
= gen_const_mem (Pmode
, address
);
1066 crtl
->uses_pic_offset_table
= 1;
1067 emit_move_insn (reg
, pic_ref
);
1068 /* The following put a REG_EQUAL note on this insn, so that
1069 it can be optimized by loop. But it causes the label to
1070 be optimized away. */
1071 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1075 else if (GET_CODE (orig
) == CONST
)
1079 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1080 && XEXP (XEXP (orig
, 0), 0) == tilepro_got_rtx ())
1085 gcc_assert (can_create_pseudo_p ());
1086 reg
= gen_reg_rtx (Pmode
);
1089 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
1090 base
= tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
,
1093 tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
1094 base
== reg
? 0 : reg
);
1096 if (CONST_INT_P (offset
))
1098 if (can_create_pseudo_p ())
1099 offset
= force_reg (Pmode
, offset
);
1101 /* If we reach here, then something is seriously
1106 if (can_create_pseudo_p ())
1107 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1111 else if (GET_CODE (orig
) == LABEL_REF
)
1113 rtx address
, temp_reg
;
1114 rtx text_label_symbol
;
1119 gcc_assert (can_create_pseudo_p ());
1120 reg
= gen_reg_rtx (Pmode
);
1123 /* If not during reload, allocate another temp reg here for
1124 loading in the address, so that these instructions can be
1125 optimized properly. */
1126 temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1127 text_label_symbol
= tilepro_text_label_symbol ();
1128 text_label_rtx
= tilepro_text_label_rtx ();
1130 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1131 text_label_symbol
));
1132 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1133 text_label_symbol
));
1135 /* Note: this is conservative. We use the text_label but we
1136 don't use the pic_offset_table. */
1137 crtl
->uses_pic_offset_table
= 1;
1141 emit_move_insn (reg
, address
);
1150 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1152 tilepro_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1155 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
1156 && symbolic_operand (x
, Pmode
) && tilepro_tls_referenced_p (x
))
1158 return tilepro_legitimize_tls_address (x
);
1162 return tilepro_legitimize_pic_address (x
, mode
, 0);
1169 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1171 tilepro_delegitimize_address (rtx x
)
1173 x
= delegitimize_mem_from_attrs (x
);
1175 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1177 switch (XINT (XEXP (x
, 0), 1))
1179 case UNSPEC_PCREL_SYM
:
1180 case UNSPEC_GOT16_SYM
:
1181 case UNSPEC_GOT32_SYM
:
1184 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1193 /* Emit code to load the PIC register. */
1195 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED
)
1197 int orig_flag_pic
= flag_pic
;
1199 rtx got_symbol
= tilepro_got_symbol ();
1200 rtx text_label_symbol
= tilepro_text_label_symbol ();
1201 rtx text_label_rtx
= tilepro_text_label_rtx ();
1204 emit_insn (gen_insn_lnk_and_label (text_label_rtx
, text_label_symbol
));
1206 emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1207 text_label_rtx
, got_symbol
, text_label_symbol
));
1209 emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1211 got_symbol
, text_label_symbol
));
1213 flag_pic
= orig_flag_pic
;
1215 /* Need to emit this whether or not we obey regdecls, since
1216 setjmp/longjmp can cause life info to screw up. ??? In the case
1217 where we don't obey regdecls, this is not sufficient since we may
1218 not fall out the bottom. */
1219 emit_use (tilepro_got_rtx ());
1223 /* Return the simd variant of the constant NUM of mode MODE, by
1224 replicating it to fill an interger of mode SImode. NUM is first
1225 truncated to fit in MODE. */
1227 tilepro_simd_int (rtx num
, machine_mode mode
)
1229 HOST_WIDE_INT n
= 0;
1231 gcc_assert (CONST_INT_P (num
));
1238 n
= 0x01010101 * (n
& 0x000000FF);
1241 n
= 0x00010001 * (n
& 0x0000FFFF);
1251 return gen_int_si (n
);
1255 /* Split one or more DImode RTL references into pairs of SImode
1256 references. The RTL can be REG, offsettable MEM, integer constant,
1257 or CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL
1258 to split and "num" is its length. lo_half and hi_half are output
1259 arrays that parallel "operands". */
1261 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1265 rtx op
= operands
[num
];
1267 /* simplify_subreg refuse to split volatile memory addresses,
1268 but we still have to handle it. */
1271 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1272 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1276 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1277 GET_MODE (op
) == VOIDmode
1278 ? DImode
: GET_MODE (op
), 0);
1279 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1280 GET_MODE (op
) == VOIDmode
1281 ? DImode
: GET_MODE (op
), 4);
1287 /* Returns true iff val can be moved into a register in one
1288 instruction. And if it can, it emits the code to move the
1291 If three_wide_only is true, this insists on an instruction that
1292 works in a bundle containing three instructions. */
1294 expand_set_cint32_one_inst (rtx dest_reg
,
1295 HOST_WIDE_INT val
, bool three_wide_only
)
1297 val
= trunc_int_for_mode (val
, SImode
);
1299 if (val
== trunc_int_for_mode (val
, QImode
))
1302 emit_move_insn (dest_reg
, GEN_INT (val
));
1305 else if (!three_wide_only
)
1307 rtx imm_op
= GEN_INT (val
);
1309 if (satisfies_constraint_J (imm_op
)
1310 || satisfies_constraint_K (imm_op
)
1311 || satisfies_constraint_N (imm_op
)
1312 || satisfies_constraint_P (imm_op
))
1314 emit_move_insn (dest_reg
, imm_op
);
1323 /* Implement SImode rotatert. */
1324 static HOST_WIDE_INT
1325 rotate_right (HOST_WIDE_INT n
, int count
)
1327 unsigned HOST_WIDE_INT x
= n
& 0xFFFFFFFF;
1330 return ((x
>> count
) | (x
<< (32 - count
))) & 0xFFFFFFFF;
1334 /* Return true iff n contains exactly one contiguous sequence of 1
1335 bits, possibly wrapping around from high bits to low bits. */
1337 tilepro_bitfield_operand_p (HOST_WIDE_INT n
, int *first_bit
, int *last_bit
)
1344 for (i
= 0; i
< 32; i
++)
1346 unsigned HOST_WIDE_INT x
= rotate_right (n
, i
);
1350 /* See if x is a power of two minus one, i.e. only consecutive 1
1351 bits starting from bit 0. */
1352 if ((x
& (x
+ 1)) == 0)
1354 if (first_bit
!= NULL
)
1356 if (last_bit
!= NULL
)
1357 *last_bit
= (i
+ exact_log2 (x
^ (x
>> 1))) & 31;
1367 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1369 expand_set_cint32 (rtx dest_reg
, rtx src_val
)
1372 int leading_zeroes
, trailing_zeroes
;
1374 int three_wide_only
;
1377 gcc_assert (CONST_INT_P (src_val
));
1378 val
= trunc_int_for_mode (INTVAL (src_val
), SImode
);
1380 /* See if we can generate the constant in one instruction. */
1381 if (expand_set_cint32_one_inst (dest_reg
, val
, false))
1384 /* Create a temporary variable to hold a partial result, to enable
1386 temp
= create_temp_reg_if_possible (SImode
, dest_reg
);
1388 leading_zeroes
= 31 - floor_log2 (val
& 0xFFFFFFFF);
1389 trailing_zeroes
= exact_log2 (val
& -val
);
1391 lower
= trunc_int_for_mode (val
, HImode
);
1392 upper
= trunc_int_for_mode ((val
- lower
) >> 16, HImode
);
1394 /* First try all three-wide instructions that generate a constant
1395 (i.e. movei) followed by various shifts and rotates. If none of
1396 those work, try various two-wide ways of generating a constant
1397 followed by various shifts and rotates. */
1398 for (three_wide_only
= 1; three_wide_only
>= 0; three_wide_only
--)
1402 if (expand_set_cint32_one_inst (temp
, val
>> trailing_zeroes
,
1405 /* 0xFFFFA500 becomes:
1406 movei temp, 0xFFFFFFA5
1407 shli dest, temp, 8 */
1408 emit_move_insn (dest_reg
,
1409 gen_rtx_ASHIFT (SImode
, temp
,
1410 GEN_INT (trailing_zeroes
)));
1414 if (expand_set_cint32_one_inst (temp
, val
<< leading_zeroes
,
1417 /* 0x7FFFFFFF becomes:
1419 shri dest, temp, 1 */
1420 emit_move_insn (dest_reg
,
1421 gen_rtx_LSHIFTRT (SImode
, temp
,
1422 GEN_INT (leading_zeroes
)));
1426 /* Try rotating a one-instruction immediate, since rotate is
1428 for (count
= 1; count
< 32; count
++)
1430 HOST_WIDE_INT r
= rotate_right (val
, count
);
1431 if (expand_set_cint32_one_inst (temp
, r
, three_wide_only
))
1433 /* 0xFFA5FFFF becomes:
1434 movei temp, 0xFFFFFFA5
1435 rli dest, temp, 16 */
1436 emit_move_insn (dest_reg
,
1437 gen_rtx_ROTATE (SImode
, temp
, GEN_INT (count
)));
1442 if (lower
== trunc_int_for_mode (lower
, QImode
))
1444 /* We failed to use two 3-wide instructions, but the low 16
1445 bits are a small number so just use a 2-wide + 3-wide
1446 auli + addi pair rather than anything more exotic.
1449 auli temp, zero, 0x1234
1450 addi dest, temp, 0x56 */
1455 /* Fallback case: use a auli + addli/addi pair. */
1456 emit_move_insn (temp
, GEN_INT (upper
<< 16));
1457 emit_move_insn (dest_reg
, (gen_rtx_PLUS (SImode
, temp
, GEN_INT (lower
))));
1461 /* Load OP1, a 32-bit constant, into OP0, a register. We know it
1462 can't be done in one insn when we get here, the move expander
1465 tilepro_expand_set_const32 (rtx op0
, rtx op1
)
1467 machine_mode mode
= GET_MODE (op0
);
1470 if (CONST_INT_P (op1
))
1472 /* TODO: I don't know if we want to split large constants now,
1473 or wait until later (with a define_split).
1475 Does splitting early help CSE? Does it harm other
1476 optimizations that might fold loads? */
1477 expand_set_cint32 (op0
, op1
);
1481 temp
= create_temp_reg_if_possible (mode
, op0
);
1483 /* A symbol, emit in the traditional way. */
1484 emit_move_insn (temp
, gen_rtx_HIGH (mode
, op1
));
1485 emit_move_insn (op0
, gen_rtx_LO_SUM (mode
, temp
, op1
));
1490 /* Expand a move instruction. Return true if all work is done. */
1492 tilepro_expand_mov (machine_mode mode
, rtx
*operands
)
1494 /* Handle sets of MEM first. */
1495 if (MEM_P (operands
[0]))
1497 if (can_create_pseudo_p ())
1498 operands
[0] = validize_mem (operands
[0]);
1500 if (reg_or_0_operand (operands
[1], mode
))
1503 if (!reload_in_progress
)
1504 operands
[1] = force_reg (mode
, operands
[1]);
1507 /* Fixup TLS cases. */
1508 if (CONSTANT_P (operands
[1]) && tilepro_tls_referenced_p (operands
[1]))
1510 operands
[1] = tilepro_legitimize_tls_address (operands
[1]);
1514 /* Fixup PIC cases. */
1515 if (flag_pic
&& CONSTANT_P (operands
[1]))
1517 if (tilepro_pic_address_needs_scratch (operands
[1]))
1518 operands
[1] = tilepro_legitimize_pic_address (operands
[1], mode
, 0);
1520 if (symbolic_operand (operands
[1], mode
))
1522 operands
[1] = tilepro_legitimize_pic_address (operands
[1],
1524 (reload_in_progress
?
1531 /* Fixup for UNSPEC addresses. */
1533 && GET_CODE (operands
[1]) == HIGH
1534 && GET_CODE (XEXP (operands
[1], 0)) == CONST
1535 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == UNSPEC
)
1537 rtx unspec
= XEXP (XEXP (operands
[1], 0), 0);
1538 int unspec_num
= XINT (unspec
, 1);
1539 if (unspec_num
== UNSPEC_PCREL_SYM
)
1541 emit_insn (gen_auli_pcrel (operands
[0], const0_rtx
,
1542 XVECEXP (unspec
, 0, 0),
1543 XVECEXP (unspec
, 0, 1)));
1546 else if (flag_pic
== 2 && unspec_num
== UNSPEC_GOT32_SYM
)
1548 emit_insn (gen_addhi_got32 (operands
[0], const0_rtx
,
1549 XVECEXP (unspec
, 0, 0)));
1552 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_GD
)
1554 emit_insn (gen_tls_gd_addhi (operands
[0], const0_rtx
,
1555 XVECEXP (unspec
, 0, 0)));
1558 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_IE
)
1560 emit_insn (gen_tls_ie_addhi (operands
[0], const0_rtx
,
1561 XVECEXP (unspec
, 0, 0)));
1564 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_LE
)
1566 emit_insn (gen_tls_le_addhi (operands
[0], const0_rtx
,
1567 XVECEXP (unspec
, 0, 0)));
1572 /* Accept non-constants and valid constants unmodified. */
1573 if (!CONSTANT_P (operands
[1])
1574 || GET_CODE (operands
[1]) == HIGH
|| move_operand (operands
[1], mode
))
1577 /* Split large integers. */
1578 if (GET_MODE_SIZE (mode
) <= 4)
1580 tilepro_expand_set_const32 (operands
[0], operands
[1]);
1588 /* Expand the "insv" pattern. */
1590 tilepro_expand_insv (rtx operands
[4])
1592 rtx first_rtx
= operands
[2];
1593 HOST_WIDE_INT first
= INTVAL (first_rtx
);
1594 HOST_WIDE_INT width
= INTVAL (operands
[1]);
1595 rtx v
= operands
[3];
1597 /* Shift the inserted bits into position. */
1600 if (CONST_INT_P (v
))
1602 /* Shift the constant into mm position. */
1603 v
= gen_int_si (INTVAL (v
) << first
);
1607 /* Shift over the value to be inserted. */
1608 rtx tmp
= gen_reg_rtx (SImode
);
1609 emit_insn (gen_ashlsi3 (tmp
, v
, first_rtx
));
1614 /* Insert the shifted bits using an 'mm' insn. */
1615 emit_insn (gen_insn_mm (operands
[0], v
, operands
[0], first_rtx
,
1616 GEN_INT (first
+ width
- 1)));
1620 /* Expand unaligned loads. */
1622 tilepro_expand_unaligned_load (rtx dest_reg
, rtx mem
, HOST_WIDE_INT bitsize
,
1623 HOST_WIDE_INT bit_offset
, bool sign
)
1626 rtx addr_lo
, addr_hi
;
1627 rtx mem_lo
, mem_hi
, hi
;
1628 rtx mema
, wide_result
;
1629 int last_byte_offset
;
1630 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1632 mode
= GET_MODE (dest_reg
);
1634 hi
= gen_reg_rtx (mode
);
1636 if (bitsize
== 2 * BITS_PER_UNIT
&& (bit_offset
% BITS_PER_UNIT
) == 0)
1640 /* When just loading a two byte value, we can load the two bytes
1641 individually and combine them efficiently. */
1643 mem_lo
= adjust_address (mem
, QImode
, byte_offset
);
1644 mem_hi
= adjust_address (mem
, QImode
, byte_offset
+ 1);
1646 lo
= gen_reg_rtx (mode
);
1647 emit_insn (gen_zero_extendqisi2 (lo
, mem_lo
));
1651 rtx tmp
= gen_reg_rtx (mode
);
1653 /* Do a signed load of the second byte then shift and OR it
1655 emit_insn (gen_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1656 emit_insn (gen_ashlsi3 (gen_lowpart (SImode
, tmp
),
1657 gen_lowpart (SImode
, hi
), GEN_INT (8)));
1658 emit_insn (gen_iorsi3 (gen_lowpart (SImode
, dest_reg
),
1659 gen_lowpart (SImode
, lo
),
1660 gen_lowpart (SImode
, tmp
)));
1664 /* Do two unsigned loads and use intlb to interleave
1666 emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1667 emit_insn (gen_insn_intlb (gen_lowpart (SImode
, dest_reg
),
1668 gen_lowpart (SImode
, hi
),
1669 gen_lowpart (SImode
, lo
)));
1675 mema
= XEXP (mem
, 0);
1677 /* AND addresses cannot be in any alias set, since they may
1678 implicitly alias surrounding code. Ideally we'd have some alias
1679 set that covered all types except those with alignment 8 or
1681 addr_lo
= force_reg (Pmode
, plus_constant (Pmode
, mema
, byte_offset
));
1682 mem_lo
= change_address (mem
, mode
,
1683 gen_rtx_AND (Pmode
, addr_lo
, GEN_INT (-4)));
1684 set_mem_alias_set (mem_lo
, 0);
1686 /* Load the high word at an address that will not fault if the low
1687 address is aligned and at the very end of a page. */
1688 last_byte_offset
= (bit_offset
+ bitsize
- 1) / BITS_PER_UNIT
;
1689 addr_hi
= force_reg (Pmode
, plus_constant (Pmode
, mema
, last_byte_offset
));
1690 mem_hi
= change_address (mem
, mode
,
1691 gen_rtx_AND (Pmode
, addr_hi
, GEN_INT (-4)));
1692 set_mem_alias_set (mem_hi
, 0);
1696 addr_lo
= make_safe_from (addr_lo
, dest_reg
);
1697 wide_result
= dest_reg
;
1701 wide_result
= gen_reg_rtx (mode
);
1704 /* Load hi first in case dest_reg is used in mema. */
1705 emit_move_insn (hi
, mem_hi
);
1706 emit_move_insn (wide_result
, mem_lo
);
1708 emit_insn (gen_insn_dword_align (gen_lowpart (SImode
, wide_result
),
1709 gen_lowpart (SImode
, wide_result
),
1710 gen_lowpart (SImode
, hi
), addr_lo
));
1715 extract_bit_field (gen_lowpart (SImode
, wide_result
),
1716 bitsize
, bit_offset
% BITS_PER_UNIT
,
1717 !sign
, gen_lowpart (SImode
, dest_reg
),
1720 if (extracted
!= dest_reg
)
1721 emit_move_insn (dest_reg
, gen_lowpart (SImode
, extracted
));
1726 /* Expand unaligned stores. */
1728 tilepro_expand_unaligned_store (rtx mem
, rtx src
, HOST_WIDE_INT bitsize
,
1729 HOST_WIDE_INT bit_offset
)
1731 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1732 HOST_WIDE_INT bytesize
= bitsize
/ BITS_PER_UNIT
;
1733 HOST_WIDE_INT shift_amt
;
1738 for (i
= 0, shift_amt
= 0; i
< bytesize
; i
++, shift_amt
+= BITS_PER_UNIT
)
1740 mem_addr
= adjust_address (mem
, QImode
, byte_offset
+ i
);
1744 store_val
= expand_simple_binop (SImode
, LSHIFTRT
,
1745 gen_lowpart (SImode
, src
),
1746 GEN_INT (shift_amt
), NULL
, 1,
1748 store_val
= gen_lowpart (QImode
, store_val
);
1752 store_val
= gen_lowpart (QImode
, src
);
1755 emit_move_insn (mem_addr
, store_val
);
1760 /* Implement the movmisalign patterns. One of the operands is a
1761 memory that is not naturally aligned. Emit instructions to load
1764 tilepro_expand_movmisalign (machine_mode mode
, rtx
*operands
)
1766 if (MEM_P (operands
[1]))
1770 if (register_operand (operands
[0], mode
))
1773 tmp
= gen_reg_rtx (mode
);
1775 tilepro_expand_unaligned_load (tmp
, operands
[1],
1776 GET_MODE_BITSIZE (mode
), 0, true);
1778 if (tmp
!= operands
[0])
1779 emit_move_insn (operands
[0], tmp
);
1781 else if (MEM_P (operands
[0]))
1783 if (!reg_or_0_operand (operands
[1], mode
))
1784 operands
[1] = force_reg (mode
, operands
[1]);
1786 tilepro_expand_unaligned_store (operands
[0], operands
[1],
1787 GET_MODE_BITSIZE (mode
), 0);
1794 /* Implement the addsi3 pattern. */
1796 tilepro_expand_addsi (rtx op0
, rtx op1
, rtx op2
)
1802 /* Skip anything that only takes one instruction. */
1803 if (add_operand (op2
, SImode
))
1806 /* We can only optimize ints here (it should be impossible to get
1807 here with any other type, but it is harmless to check. */
1808 if (!CONST_INT_P (op2
))
1811 temp
= create_temp_reg_if_possible (SImode
, op0
);
1813 high
= (n
+ (n
& 0x8000)) & ~0xffff;
1815 emit_move_insn (temp
, gen_rtx_PLUS (SImode
, op1
, gen_int_si (high
)));
1816 emit_move_insn (op0
, gen_rtx_PLUS (SImode
, temp
, gen_int_si (n
- high
)));
1822 /* Implement the allocate_stack pattern (alloca). */
1824 tilepro_allocate_stack (rtx op0
, rtx op1
)
1826 /* Technically the correct way to initialize chain_loc is with
1827 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1828 * sets the alias_set to that of a frame reference. Some of our
1829 * tests rely on some unsafe assumption about when the chaining
1830 * update is done, we need to be conservative about reordering the
1831 * chaining instructions.
1833 rtx fp_addr
= gen_reg_rtx (Pmode
);
1834 rtx fp_value
= gen_reg_rtx (Pmode
);
1837 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1838 GEN_INT (UNITS_PER_WORD
)));
1840 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1842 emit_move_insn (fp_value
, fp_loc
);
1844 op1
= force_reg (Pmode
, op1
);
1846 emit_move_insn (stack_pointer_rtx
,
1847 gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, op1
));
1849 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1850 GEN_INT (UNITS_PER_WORD
)));
1852 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1854 emit_move_insn (fp_loc
, fp_value
);
1856 emit_move_insn (op0
, virtual_stack_dynamic_rtx
);
1863 /* Returns the insn_code in ENTRY. */
1864 static enum insn_code
1865 tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1868 return tilepro_multiply_insn_seq_decode_opcode
[entry
->compressed_opcode
];
1872 /* Returns the length of the 'op' array. */
1874 tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq
*seq
)
1876 /* The array either uses all of its allocated slots or is terminated
1877 by a bogus opcode. Either way, the array size is the index of the
1878 last valid opcode plus one. */
1880 for (i
= tilepro_multiply_insn_seq_MAX_OPERATIONS
- 1; i
>= 0; i
--)
1881 if (tilepro_multiply_get_opcode (&seq
->op
[i
]) != CODE_FOR_nothing
)
1884 /* An empty array is not allowed. */
1889 /* We precompute a number of expression trees for multiplying by
1890 constants. This generates code for such an expression tree by
1891 walking through the nodes in the tree (which are conveniently
1892 pre-linearized) and emitting an instruction for each one. */
1894 tilepro_expand_constant_multiply_given_sequence (rtx result
, rtx src
,
1896 tilepro_multiply_insn_seq
1902 /* Keep track of the subexpressions computed so far, so later
1903 instructions can refer to them. We seed the array with zero and
1904 the value being multiplied. */
1905 int num_subexprs
= 2;
1906 rtx subexprs
[tilepro_multiply_insn_seq_MAX_OPERATIONS
+ 2];
1907 subexprs
[0] = const0_rtx
;
1910 /* Determine how many instructions we are going to generate. */
1911 num_ops
= tilepro_multiply_get_num_ops (seq
);
1912 gcc_assert (num_ops
> 0
1913 && num_ops
<= tilepro_multiply_insn_seq_MAX_OPERATIONS
);
1915 for (i
= 0; i
< num_ops
; i
++)
1917 const struct tilepro_multiply_insn_seq_entry
*entry
= &seq
->op
[i
];
1919 /* Figure out where to store the output of this instruction. */
1920 const bool is_last_op
= (i
+ 1 == num_ops
);
1921 rtx out
= is_last_op
? result
: gen_reg_rtx (SImode
);
1923 enum insn_code opcode
= tilepro_multiply_get_opcode (entry
);
1924 if (opcode
== CODE_FOR_ashlsi3
)
1926 /* Handle shift by immediate. This is a special case because
1927 the meaning of the second operand is a constant shift
1928 count rather than an operand index. */
1930 /* Make sure the shift count is in range. Zero should not
1932 const int shift_count
= entry
->rhs
;
1933 gcc_assert (shift_count
> 0 && shift_count
< 32);
1935 /* Emit the actual instruction. */
1936 emit_insn (GEN_FCN (opcode
)
1937 (out
, subexprs
[entry
->lhs
],
1938 gen_rtx_CONST_INT (SImode
, shift_count
)));
1942 /* Handle a normal two-operand instruction, such as add or
1945 /* Make sure we are referring to a previously computed
1947 gcc_assert (entry
->rhs
< num_subexprs
);
1949 /* Emit the actual instruction. */
1950 emit_insn (GEN_FCN (opcode
)
1951 (out
, subexprs
[entry
->lhs
], subexprs
[entry
->rhs
]));
1954 /* Record this subexpression for use by later expressions. */
1955 subexprs
[num_subexprs
++] = out
;
1960 /* bsearch helper function. */
1962 tilepro_compare_multipliers (const void *key
, const void *t
)
1964 return *(const int *) key
-
1965 ((const struct tilepro_multiply_insn_seq
*) t
)->multiplier
;
1969 /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1971 static const struct tilepro_multiply_insn_seq
*
1972 tilepro_find_multiply_insn_seq_for_constant (int multiplier
)
1974 return ((const struct tilepro_multiply_insn_seq
*)
1975 bsearch (&multiplier
, tilepro_multiply_insn_seq_table
,
1976 tilepro_multiply_insn_seq_table_size
,
1977 sizeof tilepro_multiply_insn_seq_table
[0],
1978 tilepro_compare_multipliers
));
1982 /* Try to a expand constant multiply in SImode by looking it up in a
1983 precompiled table. OP0 is the result operand, OP1 is the source
1984 operand, and MULTIPLIER is the value of the constant. Return true
1987 tilepro_expand_const_mulsi (rtx op0
, rtx op1
, int multiplier
)
1989 /* See if we have precomputed an efficient way to multiply by this
1991 const struct tilepro_multiply_insn_seq
*seq
=
1992 tilepro_find_multiply_insn_seq_for_constant (multiplier
);
1995 tilepro_expand_constant_multiply_given_sequence (op0
, op1
, seq
);
2003 /* Expand the mulsi pattern. */
2005 tilepro_expand_mulsi (rtx op0
, rtx op1
, rtx op2
)
2007 if (CONST_INT_P (op2
))
2009 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op2
), SImode
);
2010 return tilepro_expand_const_mulsi (op0
, op1
, n
);
2016 /* Expand a high multiply pattern in SImode. RESULT, OP1, OP2 are the
2017 operands, and SIGN is true if it's a signed multiply, and false if
2018 it's an unsigned multiply. */
2020 tilepro_expand_high_multiply (rtx result
, rtx op1
, rtx op2
, bool sign
)
2022 rtx tmp0
= gen_reg_rtx (SImode
);
2023 rtx tmp1
= gen_reg_rtx (SImode
);
2024 rtx tmp2
= gen_reg_rtx (SImode
);
2025 rtx tmp3
= gen_reg_rtx (SImode
);
2026 rtx tmp4
= gen_reg_rtx (SImode
);
2027 rtx tmp5
= gen_reg_rtx (SImode
);
2028 rtx tmp6
= gen_reg_rtx (SImode
);
2029 rtx tmp7
= gen_reg_rtx (SImode
);
2030 rtx tmp8
= gen_reg_rtx (SImode
);
2031 rtx tmp9
= gen_reg_rtx (SImode
);
2032 rtx tmp10
= gen_reg_rtx (SImode
);
2033 rtx tmp11
= gen_reg_rtx (SImode
);
2034 rtx tmp12
= gen_reg_rtx (SImode
);
2035 rtx tmp13
= gen_reg_rtx (SImode
);
2036 rtx result_lo
= gen_reg_rtx (SImode
);
2040 emit_insn (gen_insn_mulhl_su (tmp0
, op1
, op2
));
2041 emit_insn (gen_insn_mulhl_su (tmp1
, op2
, op1
));
2042 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2043 emit_insn (gen_insn_mulhh_ss (tmp3
, op1
, op2
));
2047 emit_insn (gen_insn_mulhl_uu (tmp0
, op1
, op2
));
2048 emit_insn (gen_insn_mulhl_uu (tmp1
, op2
, op1
));
2049 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2050 emit_insn (gen_insn_mulhh_uu (tmp3
, op1
, op2
));
2053 emit_move_insn (tmp4
, (gen_rtx_ASHIFT (SImode
, tmp0
, GEN_INT (16))));
2055 emit_move_insn (tmp5
, (gen_rtx_ASHIFT (SImode
, tmp1
, GEN_INT (16))));
2057 emit_move_insn (tmp6
, (gen_rtx_PLUS (SImode
, tmp4
, tmp5
)));
2058 emit_move_insn (result_lo
, (gen_rtx_PLUS (SImode
, tmp2
, tmp6
)));
2060 emit_move_insn (tmp7
, gen_rtx_LTU (SImode
, tmp6
, tmp4
));
2061 emit_move_insn (tmp8
, gen_rtx_LTU (SImode
, result_lo
, tmp2
));
2065 emit_move_insn (tmp9
, (gen_rtx_ASHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2066 emit_move_insn (tmp10
, (gen_rtx_ASHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2070 emit_move_insn (tmp9
, (gen_rtx_LSHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2071 emit_move_insn (tmp10
, (gen_rtx_LSHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2074 emit_move_insn (tmp11
, (gen_rtx_PLUS (SImode
, tmp3
, tmp7
)));
2075 emit_move_insn (tmp12
, (gen_rtx_PLUS (SImode
, tmp8
, tmp9
)));
2076 emit_move_insn (tmp13
, (gen_rtx_PLUS (SImode
, tmp11
, tmp12
)));
2077 emit_move_insn (result
, (gen_rtx_PLUS (SImode
, tmp13
, tmp10
)));
2081 /* Implement smulsi3_highpart. */
2083 tilepro_expand_smulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2085 tilepro_expand_high_multiply (op0
, op1
, op2
, true);
2089 /* Implement umulsi3_highpart. */
2091 tilepro_expand_umulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2093 tilepro_expand_high_multiply (op0
, op1
, op2
, false);
2098 /* Compare and branches */
2100 /* Helper function to handle DImode for tilepro_emit_setcc_internal. */
2102 tilepro_emit_setcc_internal_di (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
)
2104 rtx operands
[2], lo_half
[2], hi_half
[2];
2105 rtx tmp
, tmp0
, tmp1
, tmp2
;
2108 /* Reduce the number of cases we need to handle by reversing the
2118 /* We handle these compares directly. */
2125 /* Reverse the operands. */
2130 /* We should not have called this with any other code. */
2136 code
= swap_condition (code
);
2137 tmp
= op0
, op0
= op1
, op1
= tmp
;
2143 split_di (operands
, 2, lo_half
, hi_half
);
2145 if (!reg_or_0_operand (lo_half
[0], SImode
))
2146 lo_half
[0] = force_reg (SImode
, lo_half
[0]);
2148 if (!reg_or_0_operand (hi_half
[0], SImode
))
2149 hi_half
[0] = force_reg (SImode
, hi_half
[0]);
2151 if (!CONST_INT_P (lo_half
[1]) && !register_operand (lo_half
[1], SImode
))
2152 lo_half
[1] = force_reg (SImode
, lo_half
[1]);
2154 if (!CONST_INT_P (hi_half
[1]) && !register_operand (hi_half
[1], SImode
))
2155 hi_half
[1] = force_reg (SImode
, hi_half
[1]);
2157 tmp0
= gen_reg_rtx (SImode
);
2158 tmp1
= gen_reg_rtx (SImode
);
2159 tmp2
= gen_reg_rtx (SImode
);
2164 emit_insn (gen_insn_seq (tmp0
, lo_half
[0], lo_half
[1]));
2165 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2166 emit_insn (gen_andsi3 (res
, tmp0
, tmp1
));
2170 emit_insn (gen_insn_sne (tmp0
, lo_half
[0], lo_half
[1]));
2171 emit_insn (gen_insn_sne (tmp1
, hi_half
[0], hi_half
[1]));
2172 emit_insn (gen_iorsi3 (res
, tmp0
, tmp1
));
2176 emit_insn (gen_insn_slte (tmp0
, hi_half
[0], hi_half
[1]));
2177 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2178 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2179 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2182 if (operands
[1] == const0_rtx
)
2184 emit_insn (gen_lshrsi3 (res
, hi_half
[0], GEN_INT (31)));
2189 emit_insn (gen_insn_slt (tmp0
, hi_half
[0], hi_half
[1]));
2190 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2191 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2192 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2196 emit_insn (gen_insn_slte_u (tmp0
, hi_half
[0], hi_half
[1]));
2197 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2198 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2199 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2202 emit_insn (gen_insn_slt_u (tmp0
, hi_half
[0], hi_half
[1]));
2203 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2204 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2205 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2215 /* Certain simplifications can be done to make invalid setcc
2216 operations valid. Return the final comparison, or NULL if we can't
2219 tilepro_emit_setcc_internal (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
,
2220 machine_mode cmp_mode
)
2225 if (cmp_mode
== DImode
)
2227 return tilepro_emit_setcc_internal_di (res
, code
, op0
, op1
);
2230 /* The general case: fold the comparison code to the types of
2231 compares that we have, choosing the branch as necessary. */
2241 /* We have these compares. */
2248 /* We do not have these compares, so we reverse the
2254 /* We should not have called this with any other code. */
2260 code
= swap_condition (code
);
2261 tmp
= op0
, op0
= op1
, op1
= tmp
;
2264 if (!reg_or_0_operand (op0
, SImode
))
2265 op0
= force_reg (SImode
, op0
);
2267 if (!CONST_INT_P (op1
) && !register_operand (op1
, SImode
))
2268 op1
= force_reg (SImode
, op1
);
2270 /* Return the setcc comparison. */
2271 emit_insn (gen_rtx_SET (VOIDmode
, res
,
2272 gen_rtx_fmt_ee (code
, SImode
, op0
, op1
)));
2278 /* Implement cstore patterns. */
2280 tilepro_emit_setcc (rtx operands
[], machine_mode cmp_mode
)
2283 tilepro_emit_setcc_internal (operands
[0], GET_CODE (operands
[1]),
2284 operands
[2], operands
[3], cmp_mode
);
2288 /* Return whether CODE is a signed comparison. */
2290 signed_compare_p (enum rtx_code code
)
2292 return (code
== EQ
|| code
== NE
|| code
== LT
|| code
== LE
2293 || code
== GT
|| code
== GE
);
2297 /* Generate the comparison for an SImode conditional branch. */
2299 tilepro_emit_cc_test (enum rtx_code code
, rtx op0
, rtx op1
,
2300 machine_mode cmp_mode
, bool eq_ne_only
)
2302 enum rtx_code branch_code
;
2305 /* Check for a compare against zero using a comparison we can do
2307 if (cmp_mode
!= DImode
2308 && op1
== const0_rtx
2309 && (code
== EQ
|| code
== NE
2310 || (!eq_ne_only
&& signed_compare_p (code
))))
2312 op0
= force_reg (SImode
, op0
);
2313 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, const0_rtx
);
2316 /* The general case: fold the comparison code to the types of
2317 compares that we have, choosing the branch as necessary. */
2325 /* We have these compares. */
2334 /* These must be reversed (except NE, but let's
2336 code
= reverse_condition (code
);
2344 if (cmp_mode
!= DImode
2345 && CONST_INT_P (op1
) && (!satisfies_constraint_I (op1
) || code
== LEU
))
2347 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op1
), SImode
);
2352 /* Subtract off the value we want to compare against and see
2353 if we get zero. This is cheaper than creating a constant
2354 in a register. Except that subtracting -128 is more
2355 expensive than seqi to -128, so we leave that alone. */
2356 /* ??? Don't do this when comparing against symbols,
2357 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2358 0), which will be declared false out of hand (at least
2360 if (!(symbolic_operand (op0
, VOIDmode
)
2361 || (REG_P (op0
) && REG_POINTER (op0
))))
2363 /* To compare against MIN_INT, we add MIN_INT and check
2366 if (n
!= -2147483647 - 1)
2371 op0
= force_reg (SImode
, op0
);
2372 temp
= gen_reg_rtx (SImode
);
2373 emit_insn (gen_addsi3 (temp
, op0
, gen_int_si (add
)));
2374 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2375 VOIDmode
, temp
, const0_rtx
);
2385 /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2388 int first
= exact_log2 (code
== LTU
? n
: n
+ 1);
2391 op0
= force_reg (SImode
, op0
);
2392 temp
= gen_reg_rtx (SImode
);
2393 emit_move_insn (temp
,
2394 gen_rtx_LSHIFTRT (SImode
, op0
,
2395 gen_int_si (first
)));
2396 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2397 VOIDmode
, temp
, const0_rtx
);
2407 /* Compute a flag saying whether we should branch. */
2408 temp
= gen_reg_rtx (SImode
);
2409 tilepro_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2411 /* Return the branch comparison. */
2412 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, temp
, const0_rtx
);
2416 /* Generate the comparison for a conditional branch. */
2418 tilepro_emit_conditional_branch (rtx operands
[], machine_mode cmp_mode
)
2421 tilepro_emit_cc_test (GET_CODE (operands
[0]), operands
[1], operands
[2],
2423 rtx branch_rtx
= gen_rtx_SET (VOIDmode
, pc_rtx
,
2424 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp_rtx
,
2429 emit_jump_insn (branch_rtx
);
2433 /* Implement the movsicc pattern. */
2435 tilepro_emit_conditional_move (rtx cmp
)
2438 tilepro_emit_cc_test (GET_CODE (cmp
), XEXP (cmp
, 0), XEXP (cmp
, 1),
2439 GET_MODE (XEXP (cmp
, 0)), true);
2443 /* Return true if INSN is annotated with a REG_BR_PROB note that
2444 indicates it's a branch that's predicted taken. */
2446 cbranch_predicted_p (rtx_insn
*insn
)
2448 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2452 int pred_val
= XINT (x
, 0);
2454 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2461 /* Output assembly code for a specific branch instruction, appending
2462 the branch prediction flag to the opcode if appropriate. */
2464 tilepro_output_simple_cbranch_with_opcode (rtx_insn
*insn
, const char *opcode
,
2465 int regop
, bool netreg_p
,
2466 bool reverse_predicted
)
2468 static char buf
[64];
2469 sprintf (buf
, "%s%s\t%%%c%d, %%l0", opcode
,
2470 (cbranch_predicted_p (insn
) ^ reverse_predicted
) ? "t" : "",
2471 netreg_p
? 'N' : 'r', regop
);
2476 /* Output assembly code for a specific branch instruction, appending
2477 the branch prediction flag to the opcode if appropriate. */
2479 tilepro_output_cbranch_with_opcode (rtx_insn
*insn
, rtx
*operands
,
2481 const char *rev_opcode
,
2482 int regop
, bool netreg_p
)
2484 const char *branch_if_false
;
2485 rtx taken
, not_taken
;
2486 bool is_simple_branch
;
2488 gcc_assert (LABEL_P (operands
[0]));
2490 is_simple_branch
= true;
2491 if (INSN_ADDRESSES_SET_P ())
2493 int from_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2494 int to_addr
= INSN_ADDRESSES (INSN_UID (operands
[0]));
2495 int delta
= to_addr
- from_addr
;
2496 is_simple_branch
= IN_RANGE (delta
, -524288, 524280);
2499 if (is_simple_branch
)
2501 /* Just a simple conditional branch. */
2503 tilepro_output_simple_cbranch_with_opcode (insn
, opcode
, regop
,
2507 /* Generate a reversed branch around a direct jump. This fallback
2508 does not use branch-likely instructions. */
2509 not_taken
= gen_label_rtx ();
2510 taken
= operands
[0];
2512 /* Generate the reversed branch to NOT_TAKEN. */
2513 operands
[0] = not_taken
;
2515 tilepro_output_simple_cbranch_with_opcode (insn
, rev_opcode
, regop
,
2517 output_asm_insn (branch_if_false
, operands
);
2519 output_asm_insn ("j\t%l0", &taken
);
2521 /* Output NOT_TAKEN. */
2522 targetm
.asm_out
.internal_label (asm_out_file
, "L",
2523 CODE_LABEL_NUMBER (not_taken
));
2528 /* Output assembly code for a conditional branch instruction. */
2530 tilepro_output_cbranch (rtx_insn
*insn
, rtx
*operands
, bool reversed
)
2532 enum rtx_code code
= GET_CODE (operands
[1]);
2534 const char *rev_opcode
;
2537 code
= reverse_condition (code
);
2555 rev_opcode
= "blez";
2563 rev_opcode
= "bgez";
2570 tilepro_output_cbranch_with_opcode (insn
, operands
, opcode
, rev_opcode
,
2575 /* Implement the tablejump pattern. */
2577 tilepro_expand_tablejump (rtx op0
, rtx op1
)
2581 rtx table
= gen_rtx_LABEL_REF (Pmode
, op1
);
2582 rtx temp
= gen_reg_rtx (Pmode
);
2583 rtx text_label_symbol
= tilepro_text_label_symbol ();
2584 rtx text_label_rtx
= tilepro_text_label_rtx ();
2586 emit_insn (gen_addli_pcrel (temp
, text_label_rtx
,
2587 table
, text_label_symbol
));
2588 emit_insn (gen_auli_pcrel (temp
, temp
, table
, text_label_symbol
));
2589 emit_move_insn (temp
,
2590 gen_rtx_PLUS (Pmode
,
2591 convert_to_mode (Pmode
, op0
, false),
2596 emit_jump_insn (gen_tablejump_aux (op0
, op1
));
2600 /* Expand a builtin vector binary op, by calling gen function GEN with
2601 operands in the proper modes. DEST is converted to DEST_MODE, and
2602 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2604 tilepro_expand_builtin_vector_binop (rtx (*gen
) (rtx
, rtx
, rtx
),
2605 machine_mode dest_mode
,
2607 machine_mode src_mode
,
2608 rtx src0
, rtx src1
, bool do_src1
)
2610 dest
= gen_lowpart (dest_mode
, dest
);
2612 if (src0
== const0_rtx
)
2613 src0
= CONST0_RTX (src_mode
);
2615 src0
= gen_lowpart (src_mode
, src0
);
2619 if (src1
== const0_rtx
)
2620 src1
= CONST0_RTX (src_mode
);
2622 src1
= gen_lowpart (src_mode
, src1
);
2625 emit_insn ((*gen
) (dest
, src0
, src1
));
2632 struct tile_builtin_info
2634 enum insn_code icode
;
2638 static struct tile_builtin_info tilepro_builtin_info
[TILEPRO_BUILTIN_max
] = {
2639 { CODE_FOR_addsi3
, NULL
}, /* add */
2640 { CODE_FOR_insn_addb
, NULL
}, /* addb */
2641 { CODE_FOR_insn_addbs_u
, NULL
}, /* addbs_u */
2642 { CODE_FOR_insn_addh
, NULL
}, /* addh */
2643 { CODE_FOR_insn_addhs
, NULL
}, /* addhs */
2644 { CODE_FOR_insn_addib
, NULL
}, /* addib */
2645 { CODE_FOR_insn_addih
, NULL
}, /* addih */
2646 { CODE_FOR_insn_addlis
, NULL
}, /* addlis */
2647 { CODE_FOR_ssaddsi3
, NULL
}, /* adds */
2648 { CODE_FOR_insn_adiffb_u
, NULL
}, /* adiffb_u */
2649 { CODE_FOR_insn_adiffh
, NULL
}, /* adiffh */
2650 { CODE_FOR_andsi3
, NULL
}, /* and */
2651 { CODE_FOR_insn_auli
, NULL
}, /* auli */
2652 { CODE_FOR_insn_avgb_u
, NULL
}, /* avgb_u */
2653 { CODE_FOR_insn_avgh
, NULL
}, /* avgh */
2654 { CODE_FOR_insn_bitx
, NULL
}, /* bitx */
2655 { CODE_FOR_bswapsi2
, NULL
}, /* bytex */
2656 { CODE_FOR_clzsi2
, NULL
}, /* clz */
2657 { CODE_FOR_insn_crc32_32
, NULL
}, /* crc32_32 */
2658 { CODE_FOR_insn_crc32_8
, NULL
}, /* crc32_8 */
2659 { CODE_FOR_ctzsi2
, NULL
}, /* ctz */
2660 { CODE_FOR_insn_drain
, NULL
}, /* drain */
2661 { CODE_FOR_insn_dtlbpr
, NULL
}, /* dtlbpr */
2662 { CODE_FOR_insn_dword_align
, NULL
}, /* dword_align */
2663 { CODE_FOR_insn_finv
, NULL
}, /* finv */
2664 { CODE_FOR_insn_flush
, NULL
}, /* flush */
2665 { CODE_FOR_insn_fnop
, NULL
}, /* fnop */
2666 { CODE_FOR_insn_icoh
, NULL
}, /* icoh */
2667 { CODE_FOR_insn_ill
, NULL
}, /* ill */
2668 { CODE_FOR_insn_info
, NULL
}, /* info */
2669 { CODE_FOR_insn_infol
, NULL
}, /* infol */
2670 { CODE_FOR_insn_inthb
, NULL
}, /* inthb */
2671 { CODE_FOR_insn_inthh
, NULL
}, /* inthh */
2672 { CODE_FOR_insn_intlb
, NULL
}, /* intlb */
2673 { CODE_FOR_insn_intlh
, NULL
}, /* intlh */
2674 { CODE_FOR_insn_inv
, NULL
}, /* inv */
2675 { CODE_FOR_insn_lb
, NULL
}, /* lb */
2676 { CODE_FOR_insn_lb_u
, NULL
}, /* lb_u */
2677 { CODE_FOR_insn_lh
, NULL
}, /* lh */
2678 { CODE_FOR_insn_lh_u
, NULL
}, /* lh_u */
2679 { CODE_FOR_insn_lnk
, NULL
}, /* lnk */
2680 { CODE_FOR_insn_lw
, NULL
}, /* lw */
2681 { CODE_FOR_insn_lw_na
, NULL
}, /* lw_na */
2682 { CODE_FOR_insn_lb_L2
, NULL
}, /* lb_L2 */
2683 { CODE_FOR_insn_lb_u_L2
, NULL
}, /* lb_u_L2 */
2684 { CODE_FOR_insn_lh_L2
, NULL
}, /* lh_L2 */
2685 { CODE_FOR_insn_lh_u_L2
, NULL
}, /* lh_u_L2 */
2686 { CODE_FOR_insn_lw_L2
, NULL
}, /* lw_L2 */
2687 { CODE_FOR_insn_lw_na_L2
, NULL
}, /* lw_na_L2 */
2688 { CODE_FOR_insn_lb_miss
, NULL
}, /* lb_miss */
2689 { CODE_FOR_insn_lb_u_miss
, NULL
}, /* lb_u_miss */
2690 { CODE_FOR_insn_lh_miss
, NULL
}, /* lh_miss */
2691 { CODE_FOR_insn_lh_u_miss
, NULL
}, /* lh_u_miss */
2692 { CODE_FOR_insn_lw_miss
, NULL
}, /* lw_miss */
2693 { CODE_FOR_insn_lw_na_miss
, NULL
}, /* lw_na_miss */
2694 { CODE_FOR_insn_maxb_u
, NULL
}, /* maxb_u */
2695 { CODE_FOR_insn_maxh
, NULL
}, /* maxh */
2696 { CODE_FOR_insn_maxib_u
, NULL
}, /* maxib_u */
2697 { CODE_FOR_insn_maxih
, NULL
}, /* maxih */
2698 { CODE_FOR_memory_barrier
, NULL
}, /* mf */
2699 { CODE_FOR_insn_mfspr
, NULL
}, /* mfspr */
2700 { CODE_FOR_insn_minb_u
, NULL
}, /* minb_u */
2701 { CODE_FOR_insn_minh
, NULL
}, /* minh */
2702 { CODE_FOR_insn_minib_u
, NULL
}, /* minib_u */
2703 { CODE_FOR_insn_minih
, NULL
}, /* minih */
2704 { CODE_FOR_insn_mm
, NULL
}, /* mm */
2705 { CODE_FOR_insn_mnz
, NULL
}, /* mnz */
2706 { CODE_FOR_insn_mnzb
, NULL
}, /* mnzb */
2707 { CODE_FOR_insn_mnzh
, NULL
}, /* mnzh */
2708 { CODE_FOR_movsi
, NULL
}, /* move */
2709 { CODE_FOR_insn_movelis
, NULL
}, /* movelis */
2710 { CODE_FOR_insn_mtspr
, NULL
}, /* mtspr */
2711 { CODE_FOR_insn_mulhh_ss
, NULL
}, /* mulhh_ss */
2712 { CODE_FOR_insn_mulhh_su
, NULL
}, /* mulhh_su */
2713 { CODE_FOR_insn_mulhh_uu
, NULL
}, /* mulhh_uu */
2714 { CODE_FOR_insn_mulhha_ss
, NULL
}, /* mulhha_ss */
2715 { CODE_FOR_insn_mulhha_su
, NULL
}, /* mulhha_su */
2716 { CODE_FOR_insn_mulhha_uu
, NULL
}, /* mulhha_uu */
2717 { CODE_FOR_insn_mulhhsa_uu
, NULL
}, /* mulhhsa_uu */
2718 { CODE_FOR_insn_mulhl_ss
, NULL
}, /* mulhl_ss */
2719 { CODE_FOR_insn_mulhl_su
, NULL
}, /* mulhl_su */
2720 { CODE_FOR_insn_mulhl_us
, NULL
}, /* mulhl_us */
2721 { CODE_FOR_insn_mulhl_uu
, NULL
}, /* mulhl_uu */
2722 { CODE_FOR_insn_mulhla_ss
, NULL
}, /* mulhla_ss */
2723 { CODE_FOR_insn_mulhla_su
, NULL
}, /* mulhla_su */
2724 { CODE_FOR_insn_mulhla_us
, NULL
}, /* mulhla_us */
2725 { CODE_FOR_insn_mulhla_uu
, NULL
}, /* mulhla_uu */
2726 { CODE_FOR_insn_mulhlsa_uu
, NULL
}, /* mulhlsa_uu */
2727 { CODE_FOR_insn_mulll_ss
, NULL
}, /* mulll_ss */
2728 { CODE_FOR_insn_mulll_su
, NULL
}, /* mulll_su */
2729 { CODE_FOR_insn_mulll_uu
, NULL
}, /* mulll_uu */
2730 { CODE_FOR_insn_mullla_ss
, NULL
}, /* mullla_ss */
2731 { CODE_FOR_insn_mullla_su
, NULL
}, /* mullla_su */
2732 { CODE_FOR_insn_mullla_uu
, NULL
}, /* mullla_uu */
2733 { CODE_FOR_insn_mulllsa_uu
, NULL
}, /* mulllsa_uu */
2734 { CODE_FOR_insn_mvnz
, NULL
}, /* mvnz */
2735 { CODE_FOR_insn_mvz
, NULL
}, /* mvz */
2736 { CODE_FOR_insn_mz
, NULL
}, /* mz */
2737 { CODE_FOR_insn_mzb
, NULL
}, /* mzb */
2738 { CODE_FOR_insn_mzh
, NULL
}, /* mzh */
2739 { CODE_FOR_insn_nap
, NULL
}, /* nap */
2740 { CODE_FOR_nop
, NULL
}, /* nop */
2741 { CODE_FOR_insn_nor
, NULL
}, /* nor */
2742 { CODE_FOR_iorsi3
, NULL
}, /* or */
2743 { CODE_FOR_insn_packbs_u
, NULL
}, /* packbs_u */
2744 { CODE_FOR_insn_packhb
, NULL
}, /* packhb */
2745 { CODE_FOR_insn_packhs
, NULL
}, /* packhs */
2746 { CODE_FOR_insn_packlb
, NULL
}, /* packlb */
2747 { CODE_FOR_popcountsi2
, NULL
}, /* pcnt */
2748 { CODE_FOR_insn_prefetch
, NULL
}, /* prefetch */
2749 { CODE_FOR_insn_prefetch_L1
, NULL
}, /* prefetch_L1 */
2750 { CODE_FOR_rotlsi3
, NULL
}, /* rl */
2751 { CODE_FOR_insn_s1a
, NULL
}, /* s1a */
2752 { CODE_FOR_insn_s2a
, NULL
}, /* s2a */
2753 { CODE_FOR_insn_s3a
, NULL
}, /* s3a */
2754 { CODE_FOR_insn_sadab_u
, NULL
}, /* sadab_u */
2755 { CODE_FOR_insn_sadah
, NULL
}, /* sadah */
2756 { CODE_FOR_insn_sadah_u
, NULL
}, /* sadah_u */
2757 { CODE_FOR_insn_sadb_u
, NULL
}, /* sadb_u */
2758 { CODE_FOR_insn_sadh
, NULL
}, /* sadh */
2759 { CODE_FOR_insn_sadh_u
, NULL
}, /* sadh_u */
2760 { CODE_FOR_insn_sb
, NULL
}, /* sb */
2761 { CODE_FOR_insn_seq
, NULL
}, /* seq */
2762 { CODE_FOR_insn_seqb
, NULL
}, /* seqb */
2763 { CODE_FOR_insn_seqh
, NULL
}, /* seqh */
2764 { CODE_FOR_insn_seqib
, NULL
}, /* seqib */
2765 { CODE_FOR_insn_seqih
, NULL
}, /* seqih */
2766 { CODE_FOR_insn_sh
, NULL
}, /* sh */
2767 { CODE_FOR_ashlsi3
, NULL
}, /* shl */
2768 { CODE_FOR_insn_shlb
, NULL
}, /* shlb */
2769 { CODE_FOR_insn_shlh
, NULL
}, /* shlh */
2770 { CODE_FOR_insn_shlb
, NULL
}, /* shlib */
2771 { CODE_FOR_insn_shlh
, NULL
}, /* shlih */
2772 { CODE_FOR_lshrsi3
, NULL
}, /* shr */
2773 { CODE_FOR_insn_shrb
, NULL
}, /* shrb */
2774 { CODE_FOR_insn_shrh
, NULL
}, /* shrh */
2775 { CODE_FOR_insn_shrb
, NULL
}, /* shrib */
2776 { CODE_FOR_insn_shrh
, NULL
}, /* shrih */
2777 { CODE_FOR_insn_slt
, NULL
}, /* slt */
2778 { CODE_FOR_insn_slt_u
, NULL
}, /* slt_u */
2779 { CODE_FOR_insn_sltb
, NULL
}, /* sltb */
2780 { CODE_FOR_insn_sltb_u
, NULL
}, /* sltb_u */
2781 { CODE_FOR_insn_slte
, NULL
}, /* slte */
2782 { CODE_FOR_insn_slte_u
, NULL
}, /* slte_u */
2783 { CODE_FOR_insn_slteb
, NULL
}, /* slteb */
2784 { CODE_FOR_insn_slteb_u
, NULL
}, /* slteb_u */
2785 { CODE_FOR_insn_slteh
, NULL
}, /* slteh */
2786 { CODE_FOR_insn_slteh_u
, NULL
}, /* slteh_u */
2787 { CODE_FOR_insn_slth
, NULL
}, /* slth */
2788 { CODE_FOR_insn_slth_u
, NULL
}, /* slth_u */
2789 { CODE_FOR_insn_sltib
, NULL
}, /* sltib */
2790 { CODE_FOR_insn_sltib_u
, NULL
}, /* sltib_u */
2791 { CODE_FOR_insn_sltih
, NULL
}, /* sltih */
2792 { CODE_FOR_insn_sltih_u
, NULL
}, /* sltih_u */
2793 { CODE_FOR_insn_sne
, NULL
}, /* sne */
2794 { CODE_FOR_insn_sneb
, NULL
}, /* sneb */
2795 { CODE_FOR_insn_sneh
, NULL
}, /* sneh */
2796 { CODE_FOR_ashrsi3
, NULL
}, /* sra */
2797 { CODE_FOR_insn_srab
, NULL
}, /* srab */
2798 { CODE_FOR_insn_srah
, NULL
}, /* srah */
2799 { CODE_FOR_insn_srab
, NULL
}, /* sraib */
2800 { CODE_FOR_insn_srah
, NULL
}, /* sraih */
2801 { CODE_FOR_subsi3
, NULL
}, /* sub */
2802 { CODE_FOR_insn_subb
, NULL
}, /* subb */
2803 { CODE_FOR_insn_subbs_u
, NULL
}, /* subbs_u */
2804 { CODE_FOR_insn_subh
, NULL
}, /* subh */
2805 { CODE_FOR_insn_subhs
, NULL
}, /* subhs */
2806 { CODE_FOR_sssubsi3
, NULL
}, /* subs */
2807 { CODE_FOR_insn_sw
, NULL
}, /* sw */
2808 { CODE_FOR_insn_tblidxb0
, NULL
}, /* tblidxb0 */
2809 { CODE_FOR_insn_tblidxb1
, NULL
}, /* tblidxb1 */
2810 { CODE_FOR_insn_tblidxb2
, NULL
}, /* tblidxb2 */
2811 { CODE_FOR_insn_tblidxb3
, NULL
}, /* tblidxb3 */
2812 { CODE_FOR_insn_tns
, NULL
}, /* tns */
2813 { CODE_FOR_insn_wh64
, NULL
}, /* wh64 */
2814 { CODE_FOR_xorsi3
, NULL
}, /* xor */
2815 { CODE_FOR_tilepro_network_barrier
, NULL
}, /* network_barrier */
2816 { CODE_FOR_tilepro_idn0_receive
, NULL
}, /* idn0_receive */
2817 { CODE_FOR_tilepro_idn1_receive
, NULL
}, /* idn1_receive */
2818 { CODE_FOR_tilepro_idn_send
, NULL
}, /* idn_send */
2819 { CODE_FOR_tilepro_sn_receive
, NULL
}, /* sn_receive */
2820 { CODE_FOR_tilepro_sn_send
, NULL
}, /* sn_send */
2821 { CODE_FOR_tilepro_udn0_receive
, NULL
}, /* udn0_receive */
2822 { CODE_FOR_tilepro_udn1_receive
, NULL
}, /* udn1_receive */
2823 { CODE_FOR_tilepro_udn2_receive
, NULL
}, /* udn2_receive */
2824 { CODE_FOR_tilepro_udn3_receive
, NULL
}, /* udn3_receive */
2825 { CODE_FOR_tilepro_udn_send
, NULL
}, /* udn_send */
2829 struct tilepro_builtin_def
2832 enum tilepro_builtin code
;
2834 /* The first character is the return type. Subsequent characters
2835 are the argument types. See char_to_type. */
2840 static const struct tilepro_builtin_def tilepro_builtins
[] = {
2841 { "__insn_add", TILEPRO_INSN_ADD
, true, "lll" },
2842 { "__insn_addb", TILEPRO_INSN_ADDB
, true, "lll" },
2843 { "__insn_addbs_u", TILEPRO_INSN_ADDBS_U
, false, "lll" },
2844 { "__insn_addh", TILEPRO_INSN_ADDH
, true, "lll" },
2845 { "__insn_addhs", TILEPRO_INSN_ADDHS
, false, "lll" },
2846 { "__insn_addi", TILEPRO_INSN_ADD
, true, "lll" },
2847 { "__insn_addib", TILEPRO_INSN_ADDIB
, true, "lll" },
2848 { "__insn_addih", TILEPRO_INSN_ADDIH
, true, "lll" },
2849 { "__insn_addli", TILEPRO_INSN_ADD
, true, "lll" },
2850 { "__insn_addlis", TILEPRO_INSN_ADDLIS
, false, "lll" },
2851 { "__insn_adds", TILEPRO_INSN_ADDS
, false, "lll" },
2852 { "__insn_adiffb_u", TILEPRO_INSN_ADIFFB_U
, true, "lll" },
2853 { "__insn_adiffh", TILEPRO_INSN_ADIFFH
, true, "lll" },
2854 { "__insn_and", TILEPRO_INSN_AND
, true, "lll" },
2855 { "__insn_andi", TILEPRO_INSN_AND
, true, "lll" },
2856 { "__insn_auli", TILEPRO_INSN_AULI
, true, "lll" },
2857 { "__insn_avgb_u", TILEPRO_INSN_AVGB_U
, true, "lll" },
2858 { "__insn_avgh", TILEPRO_INSN_AVGH
, true, "lll" },
2859 { "__insn_bitx", TILEPRO_INSN_BITX
, true, "ll" },
2860 { "__insn_bytex", TILEPRO_INSN_BYTEX
, true, "ll" },
2861 { "__insn_clz", TILEPRO_INSN_CLZ
, true, "ll" },
2862 { "__insn_crc32_32", TILEPRO_INSN_CRC32_32
, true, "lll" },
2863 { "__insn_crc32_8", TILEPRO_INSN_CRC32_8
, true, "lll" },
2864 { "__insn_ctz", TILEPRO_INSN_CTZ
, true, "ll" },
2865 { "__insn_drain", TILEPRO_INSN_DRAIN
, false, "v" },
2866 { "__insn_dtlbpr", TILEPRO_INSN_DTLBPR
, false, "vl" },
2867 { "__insn_dword_align", TILEPRO_INSN_DWORD_ALIGN
, true, "lllk" },
2868 { "__insn_finv", TILEPRO_INSN_FINV
, false, "vk" },
2869 { "__insn_flush", TILEPRO_INSN_FLUSH
, false, "vk" },
2870 { "__insn_fnop", TILEPRO_INSN_FNOP
, false, "v" },
2871 { "__insn_icoh", TILEPRO_INSN_ICOH
, false, "vk" },
2872 { "__insn_ill", TILEPRO_INSN_ILL
, false, "v" },
2873 { "__insn_info", TILEPRO_INSN_INFO
, false, "vl" },
2874 { "__insn_infol", TILEPRO_INSN_INFOL
, false, "vl" },
2875 { "__insn_inthb", TILEPRO_INSN_INTHB
, true, "lll" },
2876 { "__insn_inthh", TILEPRO_INSN_INTHH
, true, "lll" },
2877 { "__insn_intlb", TILEPRO_INSN_INTLB
, true, "lll" },
2878 { "__insn_intlh", TILEPRO_INSN_INTLH
, true, "lll" },
2879 { "__insn_inv", TILEPRO_INSN_INV
, false, "vp" },
2880 { "__insn_lb", TILEPRO_INSN_LB
, false, "lk" },
2881 { "__insn_lb_u", TILEPRO_INSN_LB_U
, false, "lk" },
2882 { "__insn_lh", TILEPRO_INSN_LH
, false, "lk" },
2883 { "__insn_lh_u", TILEPRO_INSN_LH_U
, false, "lk" },
2884 { "__insn_lnk", TILEPRO_INSN_LNK
, true, "l" },
2885 { "__insn_lw", TILEPRO_INSN_LW
, false, "lk" },
2886 { "__insn_lw_na", TILEPRO_INSN_LW_NA
, false, "lk" },
2887 { "__insn_lb_L2", TILEPRO_INSN_LB_L2
, false, "lk" },
2888 { "__insn_lb_u_L2", TILEPRO_INSN_LB_U_L2
, false, "lk" },
2889 { "__insn_lh_L2", TILEPRO_INSN_LH_L2
, false, "lk" },
2890 { "__insn_lh_u_L2", TILEPRO_INSN_LH_U_L2
, false, "lk" },
2891 { "__insn_lw_L2", TILEPRO_INSN_LW_L2
, false, "lk" },
2892 { "__insn_lw_na_L2", TILEPRO_INSN_LW_NA_L2
, false, "lk" },
2893 { "__insn_lb_miss", TILEPRO_INSN_LB_MISS
, false, "lk" },
2894 { "__insn_lb_u_miss", TILEPRO_INSN_LB_U_MISS
, false, "lk" },
2895 { "__insn_lh_miss", TILEPRO_INSN_LH_MISS
, false, "lk" },
2896 { "__insn_lh_u_miss", TILEPRO_INSN_LH_U_MISS
, false, "lk" },
2897 { "__insn_lw_miss", TILEPRO_INSN_LW_MISS
, false, "lk" },
2898 { "__insn_lw_na_miss", TILEPRO_INSN_LW_NA_MISS
, false, "lk" },
2899 { "__insn_maxb_u", TILEPRO_INSN_MAXB_U
, true, "lll" },
2900 { "__insn_maxh", TILEPRO_INSN_MAXH
, true, "lll" },
2901 { "__insn_maxib_u", TILEPRO_INSN_MAXIB_U
, true, "lll" },
2902 { "__insn_maxih", TILEPRO_INSN_MAXIH
, true, "lll" },
2903 { "__insn_mf", TILEPRO_INSN_MF
, false, "v" },
2904 { "__insn_mfspr", TILEPRO_INSN_MFSPR
, false, "ll" },
2905 { "__insn_minb_u", TILEPRO_INSN_MINB_U
, true, "lll" },
2906 { "__insn_minh", TILEPRO_INSN_MINH
, true, "lll" },
2907 { "__insn_minib_u", TILEPRO_INSN_MINIB_U
, true, "lll" },
2908 { "__insn_minih", TILEPRO_INSN_MINIH
, true, "lll" },
2909 { "__insn_mm", TILEPRO_INSN_MM
, true, "lllll" },
2910 { "__insn_mnz", TILEPRO_INSN_MNZ
, true, "lll" },
2911 { "__insn_mnzb", TILEPRO_INSN_MNZB
, true, "lll" },
2912 { "__insn_mnzh", TILEPRO_INSN_MNZH
, true, "lll" },
2913 { "__insn_move", TILEPRO_INSN_MOVE
, true, "ll" },
2914 { "__insn_movei", TILEPRO_INSN_MOVE
, true, "ll" },
2915 { "__insn_moveli", TILEPRO_INSN_MOVE
, true, "ll" },
2916 { "__insn_movelis", TILEPRO_INSN_MOVELIS
, false, "ll" },
2917 { "__insn_mtspr", TILEPRO_INSN_MTSPR
, false, "vll" },
2918 { "__insn_mulhh_ss", TILEPRO_INSN_MULHH_SS
, true, "lll" },
2919 { "__insn_mulhh_su", TILEPRO_INSN_MULHH_SU
, true, "lll" },
2920 { "__insn_mulhh_uu", TILEPRO_INSN_MULHH_UU
, true, "lll" },
2921 { "__insn_mulhha_ss", TILEPRO_INSN_MULHHA_SS
, true, "llll" },
2922 { "__insn_mulhha_su", TILEPRO_INSN_MULHHA_SU
, true, "llll" },
2923 { "__insn_mulhha_uu", TILEPRO_INSN_MULHHA_UU
, true, "llll" },
2924 { "__insn_mulhhsa_uu", TILEPRO_INSN_MULHHSA_UU
, true, "llll" },
2925 { "__insn_mulhl_ss", TILEPRO_INSN_MULHL_SS
, true, "lll" },
2926 { "__insn_mulhl_su", TILEPRO_INSN_MULHL_SU
, true, "lll" },
2927 { "__insn_mulhl_us", TILEPRO_INSN_MULHL_US
, true, "lll" },
2928 { "__insn_mulhl_uu", TILEPRO_INSN_MULHL_UU
, true, "lll" },
2929 { "__insn_mulhla_ss", TILEPRO_INSN_MULHLA_SS
, true, "llll" },
2930 { "__insn_mulhla_su", TILEPRO_INSN_MULHLA_SU
, true, "llll" },
2931 { "__insn_mulhla_us", TILEPRO_INSN_MULHLA_US
, true, "llll" },
2932 { "__insn_mulhla_uu", TILEPRO_INSN_MULHLA_UU
, true, "llll" },
2933 { "__insn_mulhlsa_uu", TILEPRO_INSN_MULHLSA_UU
, true, "llll" },
2934 { "__insn_mulll_ss", TILEPRO_INSN_MULLL_SS
, true, "lll" },
2935 { "__insn_mulll_su", TILEPRO_INSN_MULLL_SU
, true, "lll" },
2936 { "__insn_mulll_uu", TILEPRO_INSN_MULLL_UU
, true, "lll" },
2937 { "__insn_mullla_ss", TILEPRO_INSN_MULLLA_SS
, true, "llll" },
2938 { "__insn_mullla_su", TILEPRO_INSN_MULLLA_SU
, true, "llll" },
2939 { "__insn_mullla_uu", TILEPRO_INSN_MULLLA_UU
, true, "llll" },
2940 { "__insn_mulllsa_uu", TILEPRO_INSN_MULLLSA_UU
, true, "llll" },
2941 { "__insn_mvnz", TILEPRO_INSN_MVNZ
, true, "llll" },
2942 { "__insn_mvz", TILEPRO_INSN_MVZ
, true, "llll" },
2943 { "__insn_mz", TILEPRO_INSN_MZ
, true, "lll" },
2944 { "__insn_mzb", TILEPRO_INSN_MZB
, true, "lll" },
2945 { "__insn_mzh", TILEPRO_INSN_MZH
, true, "lll" },
2946 { "__insn_nap", TILEPRO_INSN_NAP
, false, "v" },
2947 { "__insn_nop", TILEPRO_INSN_NOP
, true, "v" },
2948 { "__insn_nor", TILEPRO_INSN_NOR
, true, "lll" },
2949 { "__insn_or", TILEPRO_INSN_OR
, true, "lll" },
2950 { "__insn_ori", TILEPRO_INSN_OR
, true, "lll" },
2951 { "__insn_packbs_u", TILEPRO_INSN_PACKBS_U
, false, "lll" },
2952 { "__insn_packhb", TILEPRO_INSN_PACKHB
, true, "lll" },
2953 { "__insn_packhs", TILEPRO_INSN_PACKHS
, false, "lll" },
2954 { "__insn_packlb", TILEPRO_INSN_PACKLB
, true, "lll" },
2955 { "__insn_pcnt", TILEPRO_INSN_PCNT
, true, "ll" },
2956 { "__insn_prefetch", TILEPRO_INSN_PREFETCH
, false, "vk" },
2957 { "__insn_prefetch_L1", TILEPRO_INSN_PREFETCH_L1
, false, "vk" },
2958 { "__insn_rl", TILEPRO_INSN_RL
, true, "lll" },
2959 { "__insn_rli", TILEPRO_INSN_RL
, true, "lll" },
2960 { "__insn_s1a", TILEPRO_INSN_S1A
, true, "lll" },
2961 { "__insn_s2a", TILEPRO_INSN_S2A
, true, "lll" },
2962 { "__insn_s3a", TILEPRO_INSN_S3A
, true, "lll" },
2963 { "__insn_sadab_u", TILEPRO_INSN_SADAB_U
, true, "llll" },
2964 { "__insn_sadah", TILEPRO_INSN_SADAH
, true, "llll" },
2965 { "__insn_sadah_u", TILEPRO_INSN_SADAH_U
, true, "llll" },
2966 { "__insn_sadb_u", TILEPRO_INSN_SADB_U
, true, "lll" },
2967 { "__insn_sadh", TILEPRO_INSN_SADH
, true, "lll" },
2968 { "__insn_sadh_u", TILEPRO_INSN_SADH_U
, true, "lll" },
2969 { "__insn_sb", TILEPRO_INSN_SB
, false, "vpl" },
2970 { "__insn_seq", TILEPRO_INSN_SEQ
, true, "lll" },
2971 { "__insn_seqb", TILEPRO_INSN_SEQB
, true, "lll" },
2972 { "__insn_seqh", TILEPRO_INSN_SEQH
, true, "lll" },
2973 { "__insn_seqi", TILEPRO_INSN_SEQ
, true, "lll" },
2974 { "__insn_seqib", TILEPRO_INSN_SEQIB
, true, "lll" },
2975 { "__insn_seqih", TILEPRO_INSN_SEQIH
, true, "lll" },
2976 { "__insn_sh", TILEPRO_INSN_SH
, false, "vpl" },
2977 { "__insn_shl", TILEPRO_INSN_SHL
, true, "lll" },
2978 { "__insn_shlb", TILEPRO_INSN_SHLB
, true, "lll" },
2979 { "__insn_shlh", TILEPRO_INSN_SHLH
, true, "lll" },
2980 { "__insn_shli", TILEPRO_INSN_SHL
, true, "lll" },
2981 { "__insn_shlib", TILEPRO_INSN_SHLIB
, true, "lll" },
2982 { "__insn_shlih", TILEPRO_INSN_SHLIH
, true, "lll" },
2983 { "__insn_shr", TILEPRO_INSN_SHR
, true, "lll" },
2984 { "__insn_shrb", TILEPRO_INSN_SHRB
, true, "lll" },
2985 { "__insn_shrh", TILEPRO_INSN_SHRH
, true, "lll" },
2986 { "__insn_shri", TILEPRO_INSN_SHR
, true, "lll" },
2987 { "__insn_shrib", TILEPRO_INSN_SHRIB
, true, "lll" },
2988 { "__insn_shrih", TILEPRO_INSN_SHRIH
, true, "lll" },
2989 { "__insn_slt", TILEPRO_INSN_SLT
, true, "lll" },
2990 { "__insn_slt_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2991 { "__insn_sltb", TILEPRO_INSN_SLTB
, true, "lll" },
2992 { "__insn_sltb_u", TILEPRO_INSN_SLTB_U
, true, "lll" },
2993 { "__insn_slte", TILEPRO_INSN_SLTE
, true, "lll" },
2994 { "__insn_slte_u", TILEPRO_INSN_SLTE_U
, true, "lll" },
2995 { "__insn_slteb", TILEPRO_INSN_SLTEB
, true, "lll" },
2996 { "__insn_slteb_u", TILEPRO_INSN_SLTEB_U
, true, "lll" },
2997 { "__insn_slteh", TILEPRO_INSN_SLTEH
, true, "lll" },
2998 { "__insn_slteh_u", TILEPRO_INSN_SLTEH_U
, true, "lll" },
2999 { "__insn_slth", TILEPRO_INSN_SLTH
, true, "lll" },
3000 { "__insn_slth_u", TILEPRO_INSN_SLTH_U
, true, "lll" },
3001 { "__insn_slti", TILEPRO_INSN_SLT
, true, "lll" },
3002 { "__insn_slti_u", TILEPRO_INSN_SLT_U
, true, "lll" },
3003 { "__insn_sltib", TILEPRO_INSN_SLTIB
, true, "lll" },
3004 { "__insn_sltib_u", TILEPRO_INSN_SLTIB_U
, true, "lll" },
3005 { "__insn_sltih", TILEPRO_INSN_SLTIH
, true, "lll" },
3006 { "__insn_sltih_u", TILEPRO_INSN_SLTIH_U
, true, "lll" },
3007 { "__insn_sne", TILEPRO_INSN_SNE
, true, "lll" },
3008 { "__insn_sneb", TILEPRO_INSN_SNEB
, true, "lll" },
3009 { "__insn_sneh", TILEPRO_INSN_SNEH
, true, "lll" },
3010 { "__insn_sra", TILEPRO_INSN_SRA
, true, "lll" },
3011 { "__insn_srab", TILEPRO_INSN_SRAB
, true, "lll" },
3012 { "__insn_srah", TILEPRO_INSN_SRAH
, true, "lll" },
3013 { "__insn_srai", TILEPRO_INSN_SRA
, true, "lll" },
3014 { "__insn_sraib", TILEPRO_INSN_SRAIB
, true, "lll" },
3015 { "__insn_sraih", TILEPRO_INSN_SRAIH
, true, "lll" },
3016 { "__insn_sub", TILEPRO_INSN_SUB
, true, "lll" },
3017 { "__insn_subb", TILEPRO_INSN_SUBB
, true, "lll" },
3018 { "__insn_subbs_u", TILEPRO_INSN_SUBBS_U
, false, "lll" },
3019 { "__insn_subh", TILEPRO_INSN_SUBH
, true, "lll" },
3020 { "__insn_subhs", TILEPRO_INSN_SUBHS
, false, "lll" },
3021 { "__insn_subs", TILEPRO_INSN_SUBS
, false, "lll" },
3022 { "__insn_sw", TILEPRO_INSN_SW
, false, "vpl" },
3023 { "__insn_tblidxb0", TILEPRO_INSN_TBLIDXB0
, true, "lll" },
3024 { "__insn_tblidxb1", TILEPRO_INSN_TBLIDXB1
, true, "lll" },
3025 { "__insn_tblidxb2", TILEPRO_INSN_TBLIDXB2
, true, "lll" },
3026 { "__insn_tblidxb3", TILEPRO_INSN_TBLIDXB3
, true, "lll" },
3027 { "__insn_tns", TILEPRO_INSN_TNS
, false, "lp" },
3028 { "__insn_wh64", TILEPRO_INSN_WH64
, false, "vp" },
3029 { "__insn_xor", TILEPRO_INSN_XOR
, true, "lll" },
3030 { "__insn_xori", TILEPRO_INSN_XOR
, true, "lll" },
3031 { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER
, false, "v" },
3032 { "__tile_idn0_receive", TILEPRO_IDN0_RECEIVE
, false, "l" },
3033 { "__tile_idn1_receive", TILEPRO_IDN1_RECEIVE
, false, "l" },
3034 { "__tile_idn_send", TILEPRO_IDN_SEND
, false, "vl" },
3035 { "__tile_sn_receive", TILEPRO_SN_RECEIVE
, false, "l" },
3036 { "__tile_sn_send", TILEPRO_SN_SEND
, false, "vl" },
3037 { "__tile_udn0_receive", TILEPRO_UDN0_RECEIVE
, false, "l" },
3038 { "__tile_udn1_receive", TILEPRO_UDN1_RECEIVE
, false, "l" },
3039 { "__tile_udn2_receive", TILEPRO_UDN2_RECEIVE
, false, "l" },
3040 { "__tile_udn3_receive", TILEPRO_UDN3_RECEIVE
, false, "l" },
3041 { "__tile_udn_send", TILEPRO_UDN_SEND
, false, "vl" },
3045 /* Convert a character in a builtin type string to a tree type. */
3047 char_to_type (char c
)
3049 static tree volatile_ptr_type_node
= NULL
;
3050 static tree volatile_const_ptr_type_node
= NULL
;
3052 if (volatile_ptr_type_node
== NULL
)
3054 volatile_ptr_type_node
=
3055 build_pointer_type (build_qualified_type (void_type_node
,
3056 TYPE_QUAL_VOLATILE
));
3057 volatile_const_ptr_type_node
=
3058 build_pointer_type (build_qualified_type (void_type_node
,
3060 | TYPE_QUAL_VOLATILE
));
3066 return void_type_node
;
3068 return long_unsigned_type_node
;
3070 return volatile_ptr_type_node
;
3072 return volatile_const_ptr_type_node
;
3079 /* Implement TARGET_INIT_BUILTINS. */
3081 tilepro_init_builtins (void)
3085 for (i
= 0; i
< ARRAY_SIZE (tilepro_builtins
); i
++)
3087 const struct tilepro_builtin_def
*p
= &tilepro_builtins
[i
];
3088 tree ftype
, ret_type
, arg_type_list
= void_list_node
;
3092 for (j
= strlen (p
->type
) - 1; j
> 0; j
--)
3095 tree_cons (NULL_TREE
, char_to_type (p
->type
[j
]), arg_type_list
);
3098 ret_type
= char_to_type (p
->type
[0]);
3100 ftype
= build_function_type (ret_type
, arg_type_list
);
3102 decl
= add_builtin_function (p
->name
, ftype
, p
->code
, BUILT_IN_MD
,
3106 TREE_READONLY (decl
) = 1;
3107 TREE_NOTHROW (decl
) = 1;
3109 if (tilepro_builtin_info
[p
->code
].fndecl
== NULL
)
3110 tilepro_builtin_info
[p
->code
].fndecl
= decl
;
3115 /* Implement TARGET_EXPAND_BUILTIN. */
3117 tilepro_expand_builtin (tree exp
,
3119 rtx subtarget ATTRIBUTE_UNUSED
,
3120 machine_mode mode ATTRIBUTE_UNUSED
,
3121 int ignore ATTRIBUTE_UNUSED
)
3123 #define MAX_BUILTIN_ARGS 4
3125 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3126 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3128 call_expr_arg_iterator iter
;
3129 enum insn_code icode
;
3130 rtx op
[MAX_BUILTIN_ARGS
+ 1], pat
;
3135 if (fcode
>= TILEPRO_BUILTIN_max
)
3136 internal_error ("bad builtin fcode");
3137 icode
= tilepro_builtin_info
[fcode
].icode
;
3139 internal_error ("bad builtin icode");
3141 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
3144 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3146 const struct insn_operand_data
*insn_op
;
3148 if (arg
== error_mark_node
)
3150 if (opnum
> MAX_BUILTIN_ARGS
)
3153 insn_op
= &insn_data
[icode
].operand
[opnum
];
3155 op
[opnum
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
3157 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3158 op
[opnum
] = copy_to_mode_reg (insn_op
->mode
, op
[opnum
]);
3160 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3162 /* We still failed to meet the predicate even after moving
3163 into a register. Assume we needed an immediate. */
3164 error_at (EXPR_LOCATION (exp
),
3165 "operand must be an immediate of the right size");
3174 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3176 || GET_MODE (target
) != tmode
3177 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3178 target
= gen_reg_rtx (tmode
);
3182 fn
= GEN_FCN (icode
);
3186 pat
= fn (NULL_RTX
);
3192 pat
= fn (op
[0], op
[1]);
3195 pat
= fn (op
[0], op
[1], op
[2]);
3198 pat
= fn (op
[0], op
[1], op
[2], op
[3]);
3201 pat
= fn (op
[0], op
[1], op
[2], op
[3], op
[4]);
3209 /* If we are generating a prefetch, tell the scheduler not to move
3211 if (GET_CODE (pat
) == PREFETCH
)
3212 PREFETCH_SCHEDULE_BARRIER_P (pat
) = true;
3223 /* Implement TARGET_BUILTIN_DECL. */
3225 tilepro_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3227 if (code
>= TILEPRO_BUILTIN_max
)
3228 return error_mark_node
;
3230 return tilepro_builtin_info
[code
].fndecl
;
3237 /* Return whether REGNO needs to be saved in the stack frame. */
3239 need_to_save_reg (unsigned int regno
)
3241 if (!fixed_regs
[regno
] && !call_used_regs
[regno
]
3242 && df_regs_ever_live_p (regno
))
3246 && (regno
== PIC_OFFSET_TABLE_REGNUM
3247 || regno
== TILEPRO_PIC_TEXT_LABEL_REGNUM
)
3248 && (crtl
->uses_pic_offset_table
|| crtl
->saves_all_registers
))
3251 if (crtl
->calls_eh_return
)
3254 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
3256 if (regno
== EH_RETURN_DATA_REGNO (i
))
3265 /* Return the size of the register savev area. This function is only
3266 correct starting with local register allocation */
3268 tilepro_saved_regs_size (void)
3270 int reg_save_size
= 0;
3272 int offset_to_frame
;
3275 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
3276 if (need_to_save_reg (regno
))
3277 reg_save_size
+= UNITS_PER_WORD
;
3279 /* Pad out the register save area if necessary to make
3280 frame_pointer_rtx be as aligned as the stack pointer. */
3281 offset_to_frame
= crtl
->args
.pretend_args_size
+ reg_save_size
;
3282 align_mask
= (STACK_BOUNDARY
/ BITS_PER_UNIT
) - 1;
3283 reg_save_size
+= (-offset_to_frame
) & align_mask
;
3285 return reg_save_size
;
3289 /* Round up frame size SIZE. */
3291 round_frame_size (int size
)
3293 return ((size
+ STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
3294 & -STACK_BOUNDARY
/ BITS_PER_UNIT
);
3298 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3299 emit the corresponding REG_CFA_OFFSET note described by CFA and
3300 CFA_OFFSET. Return the emitted insn. */
3302 frame_emit_store (int regno
, int regno_note
, rtx addr
, rtx cfa
,
3305 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3306 rtx mem
= gen_frame_mem (Pmode
, addr
);
3307 rtx mov
= gen_movsi (mem
, reg
);
3309 /* Describe what just happened in a way that dwarf understands. We
3310 use temporary registers to hold the address to make scheduling
3311 easier, and use the REG_CFA_OFFSET to describe the address as an
3312 offset from the CFA. */
3313 rtx reg_note
= gen_rtx_REG (Pmode
, regno_note
);
3314 rtx cfa_relative_addr
= gen_rtx_PLUS (Pmode
, cfa
, gen_int_si (cfa_offset
));
3315 rtx cfa_relative_mem
= gen_frame_mem (Pmode
, cfa_relative_addr
);
3316 rtx real
= gen_rtx_SET (VOIDmode
, cfa_relative_mem
, reg_note
);
3317 add_reg_note (mov
, REG_CFA_OFFSET
, real
);
3319 return emit_insn (mov
);
3323 /* Emit a load in the stack frame to load REGNO from address ADDR.
3324 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3325 non-null. Return the emitted insn. */
3327 frame_emit_load (int regno
, rtx addr
, rtx
*cfa_restores
)
3329 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3330 rtx mem
= gen_frame_mem (Pmode
, addr
);
3332 *cfa_restores
= alloc_reg_note (REG_CFA_RESTORE
, reg
, *cfa_restores
);
3333 return emit_insn (gen_movsi (reg
, mem
));
3337 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3338 including sequences. */
3340 set_frame_related_p (void)
3342 rtx_insn
*seq
= get_insns ();
3353 while (insn
!= NULL_RTX
)
3355 RTX_FRAME_RELATED_P (insn
) = 1;
3356 insn
= NEXT_INSN (insn
);
3358 seq
= emit_insn (seq
);
3362 seq
= emit_insn (seq
);
3363 RTX_FRAME_RELATED_P (seq
) = 1;
3369 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3371 /* This emits code for 'sp += offset'.
3373 The ABI only allows us to modify 'sp' in a single 'addi' or
3374 'addli', so the backtracer understands it. Larger amounts cannot
3375 use those instructions, so are added by placing the offset into a
3376 large register and using 'add'.
3378 This happens after reload, so we need to expand it ourselves. */
3380 emit_sp_adjust (int offset
, int *next_scratch_regno
, bool frame_related
,
3384 rtx imm_rtx
= gen_int_si (offset
);
3387 if (satisfies_constraint_J (imm_rtx
))
3389 /* We can add this using a single addi or addli. */
3394 rtx tmp
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3395 tilepro_expand_set_const32 (tmp
, imm_rtx
);
3399 /* Actually adjust the stack pointer. */
3400 insn
= emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3402 REG_NOTES (insn
) = reg_notes
;
3404 /* Describe what just happened in a way that dwarf understands. */
3407 rtx real
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
3408 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3410 RTX_FRAME_RELATED_P (insn
) = 1;
3411 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, real
);
3418 /* Return whether the current function is leaf. This takes into
3419 account whether the function calls tls_get_addr. */
3421 tilepro_current_function_is_leaf (void)
3423 return crtl
->is_leaf
&& !cfun
->machine
->calls_tls_get_addr
;
3427 /* Return the frame size. */
3429 compute_total_frame_size (void)
3431 int total_size
= (get_frame_size () + tilepro_saved_regs_size ()
3432 + crtl
->outgoing_args_size
3433 + crtl
->args
.pretend_args_size
);
3435 if (!tilepro_current_function_is_leaf () || cfun
->calls_alloca
)
3437 /* Make room for save area in callee. */
3438 total_size
+= STACK_POINTER_OFFSET
;
3441 return round_frame_size (total_size
);
3445 /* Return nonzero if this function is known to have a null epilogue.
3446 This allows the optimizer to omit jumps to jumps if no stack was
3449 tilepro_can_use_return_insn_p (void)
3451 return (reload_completed
3452 && cfun
->static_chain_decl
== 0
3453 && compute_total_frame_size () == 0
3454 && tilepro_current_function_is_leaf ()
3455 && !crtl
->profile
&& !df_regs_ever_live_p (TILEPRO_LINK_REGNUM
));
3459 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3460 is a frame pointer, it computes the value relative to
3461 that. Otherwise it uses the stack pointer. */
3463 compute_frame_addr (int offset_from_fp
, int *next_scratch_regno
)
3465 rtx base_reg_rtx
, tmp_reg_rtx
, offset_rtx
;
3466 int offset_from_base
;
3468 if (frame_pointer_needed
)
3470 base_reg_rtx
= hard_frame_pointer_rtx
;
3471 offset_from_base
= offset_from_fp
;
3475 int offset_from_sp
= compute_total_frame_size () + offset_from_fp
;
3476 base_reg_rtx
= stack_pointer_rtx
;
3477 offset_from_base
= offset_from_sp
;
3480 if (offset_from_base
== 0)
3481 return base_reg_rtx
;
3483 /* Compute the new value of the stack pointer. */
3484 tmp_reg_rtx
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3485 offset_rtx
= gen_int_si (offset_from_base
);
3487 if (!tilepro_expand_addsi (tmp_reg_rtx
, base_reg_rtx
, offset_rtx
))
3489 emit_insn (gen_rtx_SET (VOIDmode
, tmp_reg_rtx
,
3490 gen_rtx_PLUS (Pmode
, base_reg_rtx
,
3498 /* The stack frame looks like this:
3503 AP -> +-------------+
3507 HFP -> +-------------+
3509 | reg save | crtl->args.pretend_args_size bytes
3512 | saved regs | tilepro_saved_regs_size() bytes
3513 FP -> +-------------+
3515 | vars | get_frame_size() bytes
3519 | stack args | crtl->outgoing_args_size bytes
3521 | HFP | 4 bytes (only here if nonleaf / alloca)
3523 | callee lr | 4 bytes (only here if nonleaf / alloca)
3525 SP -> +-------------+
3529 For functions with a frame larger than 32767 bytes, or which use
3530 alloca (), r52 is used as a frame pointer. Otherwise there is no
3533 FP is saved at SP+4 before calling a subroutine so the
3534 callee can chain. */
3536 tilepro_expand_prologue (void)
3538 #define ROUND_ROBIN_SIZE 4
3539 /* We round-robin through four scratch registers to hold temporary
3540 addresses for saving registers, to make instruction scheduling
3542 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3543 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3546 unsigned int which_scratch
;
3547 int offset
, start_offset
, regno
;
3549 /* A register that holds a copy of the incoming fp. */
3550 int fp_copy_regno
= -1;
3552 /* A register that holds a copy of the incoming sp. */
3553 int sp_copy_regno
= -1;
3555 /* Next scratch register number to hand out (postdecrementing). */
3556 int next_scratch_regno
= 29;
3558 int total_size
= compute_total_frame_size ();
3560 if (flag_stack_usage_info
)
3561 current_function_static_stack_size
= total_size
;
3563 /* Save lr first in its special location because code after this
3564 might use the link register as a scratch register. */
3565 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
) || crtl
->calls_eh_return
)
3566 FRP (frame_emit_store (TILEPRO_LINK_REGNUM
, TILEPRO_LINK_REGNUM
,
3567 stack_pointer_rtx
, stack_pointer_rtx
, 0));
3569 if (total_size
== 0)
3571 /* Load the PIC register if needed. */
3572 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3573 load_pic_register (false);
3578 cfa
= stack_pointer_rtx
;
3580 if (frame_pointer_needed
)
3582 fp_copy_regno
= next_scratch_regno
--;
3584 /* Copy the old frame pointer aside so we can save it later. */
3585 insn
= FRP (emit_move_insn (gen_rtx_REG (word_mode
, fp_copy_regno
),
3586 hard_frame_pointer_rtx
));
3587 add_reg_note (insn
, REG_CFA_REGISTER
, NULL_RTX
);
3589 /* Set up the frame pointer. */
3590 insn
= FRP (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
3591 add_reg_note (insn
, REG_CFA_DEF_CFA
, hard_frame_pointer_rtx
);
3592 cfa
= hard_frame_pointer_rtx
;
3593 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
3595 /* fp holds a copy of the incoming sp, in case we need to store
3597 sp_copy_regno
= HARD_FRAME_POINTER_REGNUM
;
3599 else if (!tilepro_current_function_is_leaf ())
3601 /* Copy the old stack pointer aside so we can save it later. */
3602 sp_copy_regno
= next_scratch_regno
--;
3603 emit_move_insn (gen_rtx_REG (Pmode
, sp_copy_regno
),
3607 if (tilepro_current_function_is_leaf ())
3609 /* No need to store chain pointer to caller's frame. */
3610 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3611 !frame_pointer_needed
, NULL_RTX
);
3615 /* Save the frame pointer (incoming sp value) to support
3616 backtracing. First we need to create an rtx with the store
3618 rtx chain_addr
= gen_rtx_REG (Pmode
, next_scratch_regno
--);
3619 rtx size_rtx
= gen_int_si (-(total_size
- UNITS_PER_WORD
));
3621 if (add_operand (size_rtx
, Pmode
))
3623 /* Expose more parallelism by computing this value from the
3624 original stack pointer, not the one after we have pushed
3626 rtx p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, size_rtx
);
3627 emit_insn (gen_rtx_SET (VOIDmode
, chain_addr
, p
));
3628 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3629 !frame_pointer_needed
, NULL_RTX
);
3633 /* The stack frame is large, so just store the incoming sp
3634 value at *(new_sp + UNITS_PER_WORD). */
3636 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3637 !frame_pointer_needed
, NULL_RTX
);
3638 p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3639 GEN_INT (UNITS_PER_WORD
));
3640 emit_insn (gen_rtx_SET (VOIDmode
, chain_addr
, p
));
3643 /* Save our frame pointer for backtrace chaining. */
3644 emit_insn (gen_movsi (gen_frame_mem (SImode
, chain_addr
),
3645 gen_rtx_REG (SImode
, sp_copy_regno
)));
3648 /* Compute where to start storing registers we need to save. */
3649 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3650 offset
= start_offset
;
3652 /* Store all registers that need saving. */
3654 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3655 if (need_to_save_reg (regno
))
3657 rtx r
= reg_save_addr
[which_scratch
];
3659 int cfa_offset
= frame_pointer_needed
? offset
: total_size
+ offset
;
3663 rtx p
= compute_frame_addr (offset
, &next_scratch_regno
);
3664 r
= gen_rtx_REG (word_mode
, next_scratch_regno
--);
3665 reg_save_addr
[which_scratch
] = r
;
3667 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3671 /* Advance to the next stack slot to store this register. */
3672 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3673 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3674 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3677 /* Save this register to the stack (but use the old fp value
3678 we copied aside if appropriate). */
3679 from_regno
= (fp_copy_regno
>= 0
3681 HARD_FRAME_POINTER_REGNUM
) ? fp_copy_regno
: regno
;
3682 FRP (frame_emit_store (from_regno
, regno
, r
, cfa
, cfa_offset
));
3684 offset
-= UNITS_PER_WORD
;
3685 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3688 /* If profiling, force that to happen after the frame is set up. */
3690 emit_insn (gen_blockage ());
3692 /* Load the PIC register if needed. */
3693 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3694 load_pic_register (false);
3698 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
3699 true for a sibcall_epilogue pattern, and false for an epilogue
3702 tilepro_expand_epilogue (bool sibcall_p
)
3704 /* We round-robin through four scratch registers to hold temporary
3705 addresses for saving registers, to make instruction scheduling
3707 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3708 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3710 rtx_insn
*last_insn
, *insn
;
3711 unsigned int which_scratch
;
3712 int offset
, start_offset
, regno
;
3713 rtx cfa_restores
= NULL_RTX
;
3715 /* A register that holds a copy of the incoming fp. */
3716 int fp_copy_regno
= -1;
3718 /* Next scratch register number to hand out (postdecrementing). */
3719 int next_scratch_regno
= 29;
3721 int total_size
= compute_total_frame_size ();
3723 last_insn
= get_last_insn ();
3725 /* Load lr first since we are going to need it first. */
3727 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
))
3729 insn
= frame_emit_load (TILEPRO_LINK_REGNUM
,
3730 compute_frame_addr (0, &next_scratch_regno
),
3734 if (total_size
== 0)
3738 RTX_FRAME_RELATED_P (insn
) = 1;
3739 REG_NOTES (insn
) = cfa_restores
;
3744 /* Compute where to start restoring registers. */
3745 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3746 offset
= start_offset
;
3748 if (frame_pointer_needed
)
3749 fp_copy_regno
= next_scratch_regno
--;
3751 /* Restore all callee-saved registers. */
3753 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3754 if (need_to_save_reg (regno
))
3756 rtx r
= reg_save_addr
[which_scratch
];
3759 r
= compute_frame_addr (offset
, &next_scratch_regno
);
3760 reg_save_addr
[which_scratch
] = r
;
3764 /* Advance to the next stack slot to store this
3766 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3767 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3768 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3771 if (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
3772 frame_emit_load (fp_copy_regno
, r
, NULL
);
3774 frame_emit_load (regno
, r
, &cfa_restores
);
3776 offset
-= UNITS_PER_WORD
;
3777 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3780 if (!tilepro_current_function_is_leaf ())
3782 alloc_reg_note (REG_CFA_RESTORE
, stack_pointer_rtx
, cfa_restores
);
3784 emit_insn (gen_blockage ());
3786 if (frame_pointer_needed
)
3788 /* Restore the old stack pointer by copying from the frame
3790 insn
= emit_insn (gen_sp_restore (stack_pointer_rtx
,
3791 hard_frame_pointer_rtx
));
3792 RTX_FRAME_RELATED_P (insn
) = 1;
3793 REG_NOTES (insn
) = cfa_restores
;
3794 add_reg_note (insn
, REG_CFA_DEF_CFA
, stack_pointer_rtx
);
3798 insn
= emit_sp_adjust (total_size
, &next_scratch_regno
, true,
3802 if (crtl
->calls_eh_return
)
3803 emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3804 EH_RETURN_STACKADJ_RTX
));
3806 /* Restore the old frame pointer. */
3807 if (frame_pointer_needed
)
3809 insn
= emit_move_insn (hard_frame_pointer_rtx
,
3810 gen_rtx_REG (Pmode
, fp_copy_regno
));
3811 add_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
);
3814 /* Mark the pic registers as live outside of the function. */
3817 emit_use (cfun
->machine
->text_label_rtx
);
3818 emit_use (cfun
->machine
->got_rtx
);
3824 /* Emit the actual 'return' instruction. */
3825 emit_jump_insn (gen__return ());
3829 emit_use (gen_rtx_REG (Pmode
, TILEPRO_LINK_REGNUM
));
3832 /* Mark all insns we just emitted as frame-related. */
3833 for (; last_insn
!= NULL_RTX
; last_insn
= next_insn (last_insn
))
3834 RTX_FRAME_RELATED_P (last_insn
) = 1;
3837 #undef ROUND_ROBIN_SIZE
3840 /* Implement INITIAL_ELIMINATION_OFFSET. */
3842 tilepro_initial_elimination_offset (int from
, int to
)
3844 int total_size
= compute_total_frame_size ();
3846 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3848 return (total_size
- crtl
->args
.pretend_args_size
3849 - tilepro_saved_regs_size ());
3851 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3853 return -(crtl
->args
.pretend_args_size
+ tilepro_saved_regs_size ());
3855 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3857 return STACK_POINTER_OFFSET
+ total_size
;
3859 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3861 return STACK_POINTER_OFFSET
;
3868 /* Return an RTX indicating where the return address to the
3869 calling function can be found. */
3871 tilepro_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
3876 return get_hard_reg_initial_val (Pmode
, TILEPRO_LINK_REGNUM
);
3880 /* Implement EH_RETURN_HANDLER_RTX. */
3882 tilepro_eh_return_handler_rtx (void)
3884 /* The MEM needs to be volatile to prevent it from being
3886 rtx tmp
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
3887 MEM_VOLATILE_P (tmp
) = true;
3895 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
3897 tilepro_conditional_register_usage (void)
3899 global_regs
[TILEPRO_NETORDER_REGNUM
] = 1;
3900 /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
3901 member of fixed_regs, and therefore must be member of
3902 call_used_regs, but it is not a member of call_really_used_regs[]
3903 because it is not clobbered by a call. */
3904 if (TILEPRO_PIC_TEXT_LABEL_REGNUM
!= INVALID_REGNUM
)
3906 fixed_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3907 call_used_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3909 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3911 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3912 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3917 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
3919 tilepro_frame_pointer_required (void)
3921 return crtl
->calls_eh_return
|| cfun
->calls_alloca
;
3926 /* Scheduling and reorg */
3928 /* Return the length of INSN. LENGTH is the initial length computed
3929 by attributes in the machine-description file. This is where we
3930 account for bundles. */
3932 tilepro_adjust_insn_length (rtx_insn
*insn
, int length
)
3934 machine_mode mode
= GET_MODE (insn
);
3936 /* A non-termininating instruction in a bundle has length 0. */
3940 /* By default, there is not length adjustment. */
3945 /* Implement TARGET_SCHED_ISSUE_RATE. */
3947 tilepro_issue_rate (void)
3953 /* Return the rtx for the jump target. */
3955 get_jump_target (rtx branch
)
3957 if (CALL_P (branch
))
3960 call
= PATTERN (branch
);
3962 if (GET_CODE (call
) == PARALLEL
)
3963 call
= XVECEXP (call
, 0, 0);
3965 if (GET_CODE (call
) == SET
)
3966 call
= SET_SRC (call
);
3968 if (GET_CODE (call
) == CALL
)
3969 return XEXP (XEXP (call
, 0), 0);
3974 /* Implement TARGET_SCHED_ADJUST_COST. */
3976 tilepro_sched_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
,
3979 /* If we have a true dependence, INSN is a call, and DEP_INSN
3980 defines a register that is needed by the call (argument or stack
3981 pointer), set its latency to 0 so that it can be bundled with
3982 the call. Explicitly check for and exclude the case when
3983 DEP_INSN defines the target of the jump. */
3984 if (CALL_P (insn
) && REG_NOTE_KIND (link
) == REG_DEP_TRUE
)
3986 rtx target
= get_jump_target (insn
);
3987 if (!REG_P (target
) || !set_of (target
, dep_insn
))
3995 /* Skip over irrelevant NOTEs and such and look for the next insn we
3996 would consider bundling. */
3998 next_insn_to_bundle (rtx_insn
*r
, rtx_insn
*end
)
4000 for (; r
!= end
; r
= NEXT_INSN (r
))
4002 if (NONDEBUG_INSN_P (r
)
4003 && GET_CODE (PATTERN (r
)) != USE
4004 && GET_CODE (PATTERN (r
)) != CLOBBER
)
4012 /* Go through all insns, and use the information generated during
4013 scheduling to generate SEQUENCEs to represent bundles of
4014 instructions issued simultaneously. */
4016 tilepro_gen_bundles (void)
4019 FOR_EACH_BB_FN (bb
, cfun
)
4021 rtx_insn
*insn
, *next
;
4022 rtx_insn
*end
= NEXT_INSN (BB_END (bb
));
4024 for (insn
= next_insn_to_bundle (BB_HEAD (bb
), end
); insn
; insn
= next
)
4026 next
= next_insn_to_bundle (NEXT_INSN (insn
), end
);
4028 /* Never wrap {} around inline asm. */
4029 if (GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
4031 if (next
== NULL_RTX
|| GET_MODE (next
) == TImode
4032 /* NOTE: The scheduler incorrectly believes a call
4033 insn can execute in the same cycle as the insn
4034 after the call. This is of course impossible.
4035 Really we need to fix the scheduler somehow, so
4036 the code after the call gets scheduled
4040 /* Mark current insn as the end of a bundle. */
4041 PUT_MODE (insn
, QImode
);
4045 /* Mark it as part of a bundle. */
4046 PUT_MODE (insn
, SImode
);
4054 /* Helper function for tilepro_fixup_pcrel_references. */
4056 replace_pc_relative_symbol_ref (rtx_insn
*insn
, rtx opnds
[4], bool first_insn_p
)
4058 rtx_insn
*new_insns
;
4066 emit_insn (gen_add_got16 (opnds
[0], tilepro_got_rtx (),
4068 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4075 emit_insn (gen_addhi_got32 (opnds
[0], tilepro_got_rtx (),
4080 emit_insn (gen_addlo_got32 (opnds
[0], opnds
[1], opnds
[2]));
4081 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4085 new_insns
= get_insns ();
4089 emit_insn_before (new_insns
, insn
);
4095 /* Returns whether INSN is a pc-relative addli insn. */
4097 match_addli_pcrel (rtx_insn
*insn
)
4099 rtx pattern
= PATTERN (insn
);
4102 if (GET_CODE (pattern
) != SET
)
4105 if (GET_CODE (SET_SRC (pattern
)) != LO_SUM
)
4108 if (GET_CODE (XEXP (SET_SRC (pattern
), 1)) != CONST
)
4111 unspec
= XEXP (XEXP (SET_SRC (pattern
), 1), 0);
4113 return (GET_CODE (unspec
) == UNSPEC
4114 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4118 /* Helper function for tilepro_fixup_pcrel_references. */
4120 replace_addli_pcrel (rtx_insn
*insn
)
4122 rtx pattern
= PATTERN (insn
);
4128 gcc_assert (GET_CODE (pattern
) == SET
);
4129 opnds
[0] = SET_DEST (pattern
);
4131 set_src
= SET_SRC (pattern
);
4132 gcc_assert (GET_CODE (set_src
) == LO_SUM
);
4133 gcc_assert (GET_CODE (XEXP (set_src
, 1)) == CONST
);
4134 opnds
[1] = XEXP (set_src
, 0);
4136 unspec
= XEXP (XEXP (set_src
, 1), 0);
4137 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4138 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4139 opnds
[2] = XVECEXP (unspec
, 0, 0);
4140 opnds
[3] = XVECEXP (unspec
, 0, 1);
4142 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4143 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4146 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4148 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4152 /* Returns whether INSN is a pc-relative auli insn. */
4154 match_auli_pcrel (rtx_insn
*insn
)
4156 rtx pattern
= PATTERN (insn
);
4160 if (GET_CODE (pattern
) != SET
)
4163 if (GET_CODE (SET_SRC (pattern
)) != PLUS
)
4166 high
= XEXP (SET_SRC (pattern
), 1);
4168 if (GET_CODE (high
) != HIGH
4169 || GET_CODE (XEXP (high
, 0)) != CONST
)
4172 unspec
= XEXP (XEXP (high
, 0), 0);
4174 return (GET_CODE (unspec
) == UNSPEC
4175 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4179 /* Helper function for tilepro_fixup_pcrel_references. */
4181 replace_auli_pcrel (rtx_insn
*insn
)
4183 rtx pattern
= PATTERN (insn
);
4190 gcc_assert (GET_CODE (pattern
) == SET
);
4191 opnds
[0] = SET_DEST (pattern
);
4193 set_src
= SET_SRC (pattern
);
4194 gcc_assert (GET_CODE (set_src
) == PLUS
);
4195 opnds
[1] = XEXP (set_src
, 0);
4197 high
= XEXP (set_src
, 1);
4198 gcc_assert (GET_CODE (high
) == HIGH
);
4199 gcc_assert (GET_CODE (XEXP (high
, 0)) == CONST
);
4201 unspec
= XEXP (XEXP (high
, 0), 0);
4202 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4203 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4204 opnds
[2] = XVECEXP (unspec
, 0, 0);
4205 opnds
[3] = XVECEXP (unspec
, 0, 1);
4207 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4208 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4211 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4213 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4217 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4218 going through the GOT when the symbol is local to the compilation
4219 unit. But such a symbol requires that the common text_label that
4220 we generate at the beginning of the function be in the same section
4221 as the reference to the SYMBOL_REF. This may not be true if we
4222 generate hot/cold sections. This function looks for such cases and
4223 replaces such references with the longer sequence going through the
4226 We expect one of the following two instruction sequences:
4227 addli tmp1, txt_label_reg, lo16(sym - txt_label)
4228 auli tmp2, tmp1, ha16(sym - txt_label)
4230 auli tmp1, txt_label_reg, ha16(sym - txt_label)
4231 addli tmp2, tmp1, lo16(sym - txt_label)
4233 If we're compiling -fpic, we replace the first instruction with
4234 nothing, and the second instruction with:
4236 addli tmp2, got_rtx, got(sym)
4239 If we're compiling -fPIC, we replace the first instruction with:
4241 auli tmp1, got_rtx, got_ha16(sym)
4243 and the second instruction with:
4245 addli tmp2, tmp1, got_lo16(sym)
4248 Note that we're careful to disturb the instruction sequence as
4249 little as possible, since it's very late in the compilation
4253 tilepro_fixup_pcrel_references (void)
4255 rtx_insn
*insn
, *next_insn
;
4256 bool same_section_as_entry
= true;
4258 for (insn
= get_insns (); insn
; insn
= next_insn
)
4260 next_insn
= NEXT_INSN (insn
);
4262 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
4264 same_section_as_entry
= !same_section_as_entry
;
4268 if (same_section_as_entry
)
4272 && GET_CODE (PATTERN (insn
)) != USE
4273 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
4276 if (match_addli_pcrel (insn
))
4277 replace_addli_pcrel (insn
);
4278 else if (match_auli_pcrel (insn
))
4279 replace_auli_pcrel (insn
);
4284 /* Ensure that no var tracking notes are emitted in the middle of a
4285 three-instruction bundle. */
4287 reorder_var_tracking_notes (void)
4290 FOR_EACH_BB_FN (bb
, cfun
)
4292 rtx_insn
*insn
, *next
;
4293 rtx_insn
*queue
= NULL
;
4294 bool in_bundle
= false;
4296 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4298 next
= NEXT_INSN (insn
);
4302 /* Emit queued up notes at the last instruction of a bundle. */
4303 if (GET_MODE (insn
) == QImode
)
4307 rtx_insn
*next_queue
= PREV_INSN (queue
);
4308 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4309 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4310 SET_NEXT_INSN (insn
) = queue
;
4311 SET_PREV_INSN (queue
) = insn
;
4316 else if (GET_MODE (insn
) == SImode
)
4319 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4323 rtx_insn
*prev
= PREV_INSN (insn
);
4324 SET_PREV_INSN (next
) = prev
;
4325 SET_NEXT_INSN (prev
) = next
;
4327 SET_PREV_INSN (insn
) = queue
;
4336 /* Perform machine dependent operations on the rtl chain INSNS. */
4338 tilepro_reorg (void)
4340 /* We are freeing block_for_insn in the toplev to keep compatibility
4341 with old MDEP_REORGS that are not CFG based. Recompute it
4343 compute_bb_for_insn ();
4345 if (flag_reorder_blocks_and_partition
)
4347 tilepro_fixup_pcrel_references ();
4350 if (flag_schedule_insns_after_reload
)
4354 timevar_push (TV_SCHED2
);
4356 timevar_pop (TV_SCHED2
);
4358 /* Examine the schedule to group into bundles. */
4359 tilepro_gen_bundles ();
4364 if (flag_var_tracking
)
4366 timevar_push (TV_VAR_TRACKING
);
4367 variable_tracking_main ();
4368 reorder_var_tracking_notes ();
4369 timevar_pop (TV_VAR_TRACKING
);
4372 df_finish_pass (false);
4379 /* Select a format to encode pointers in exception handling data.
4380 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4381 GLOBAL is true if the symbol may be affected by dynamic
4384 tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED
, int global
)
4386 return (global
? DW_EH_PE_indirect
: 0) | DW_EH_PE_pcrel
| DW_EH_PE_sdata4
;
4390 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4392 tilepro_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4393 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4396 rtx this_rtx
, funexp
;
4399 /* Pretend to be a post-reload pass while generating rtl. */
4400 reload_completed
= 1;
4402 /* Mark the end of the (empty) prologue. */
4403 emit_note (NOTE_INSN_PROLOGUE_END
);
4405 /* Find the "this" pointer. If the function returns a structure,
4406 the structure return pointer is in $1. */
4407 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4408 this_rtx
= gen_rtx_REG (Pmode
, 1);
4410 this_rtx
= gen_rtx_REG (Pmode
, 0);
4412 /* Add DELTA to THIS_RTX. */
4413 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, GEN_INT (delta
)));
4415 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4420 tmp
= gen_rtx_REG (Pmode
, 29);
4421 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4423 emit_insn (gen_addsi3 (tmp
, tmp
, GEN_INT (vcall_offset
)));
4425 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4427 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, tmp
));
4430 /* Generate a tail call to the target function. */
4431 if (!TREE_USED (function
))
4433 assemble_external (function
);
4434 TREE_USED (function
) = 1;
4436 funexp
= XEXP (DECL_RTL (function
), 0);
4437 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
4438 insn
= emit_call_insn (gen_sibcall (funexp
, const0_rtx
));
4439 SIBLING_CALL_P (insn
) = 1;
4441 /* Run just enough of rest_of_compilation to get the insns emitted.
4442 There's not really enough bulk here to make other passes such as
4443 instruction scheduling worth while. Note that use_thunk calls
4444 assemble_start_function and assemble_end_function.
4446 We don't currently bundle, but the instruciton sequence is all
4447 serial except for the tail call, so we're only wasting one cycle.
4449 insn
= get_insns ();
4450 shorten_branches (insn
);
4451 final_start_function (insn
, file
, 1);
4452 final (insn
, file
, 1);
4453 final_end_function ();
4455 /* Stop pretending to be a post-reload pass. */
4456 reload_completed
= 0;
4460 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4462 tilepro_asm_trampoline_template (FILE *file
)
4464 fprintf (file
, "\tlnk r10\n");
4465 fprintf (file
, "\taddi r10, r10, 32\n");
4466 fprintf (file
, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode
));
4467 fprintf (file
, "\tlw r10, r10\n");
4468 fprintf (file
, "\tjr r11\n");
4469 fprintf (file
, "\t.word 0 # <function address>\n");
4470 fprintf (file
, "\t.word 0 # <static chain value>\n");
4474 /* Implement TARGET_TRAMPOLINE_INIT. */
4476 tilepro_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4480 rtx begin_addr
, end_addr
;
4481 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
4483 fnaddr
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
4484 chaddr
= copy_to_reg (static_chain
);
4486 emit_block_move (m_tramp
, assemble_trampoline_template (),
4487 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
4489 mem
= adjust_address (m_tramp
, ptr_mode
,
4490 TRAMPOLINE_SIZE
- 2 * ptr_mode_size
);
4491 emit_move_insn (mem
, fnaddr
);
4492 mem
= adjust_address (m_tramp
, ptr_mode
,
4493 TRAMPOLINE_SIZE
- ptr_mode_size
);
4494 emit_move_insn (mem
, chaddr
);
4496 /* Get pointers to the beginning and end of the code block. */
4497 begin_addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
4498 end_addr
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0),
4501 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__clear_cache"),
4502 LCT_NORMAL
, VOIDmode
, 2, begin_addr
, Pmode
,
4507 /* Implement TARGET_PRINT_OPERAND. */
4509 tilepro_print_operand (FILE *file
, rtx x
, int code
)
4514 /* Print the compare operator opcode for conditional moves. */
4515 switch (GET_CODE (x
))
4524 output_operand_lossage ("invalid %%c operand");
4529 /* Print the compare operator opcode for conditional moves. */
4530 switch (GET_CODE (x
))
4539 output_operand_lossage ("invalid %%C operand");
4545 /* Print the high 16 bits of a 32-bit constant. */
4547 if (CONST_INT_P (x
))
4549 else if (GET_CODE (x
) == CONST_DOUBLE
)
4550 i
= CONST_DOUBLE_LOW (x
);
4553 output_operand_lossage ("invalid %%h operand");
4556 i
= trunc_int_for_mode (i
>> 16, HImode
);
4557 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4564 const char *opstr
= NULL
;
4566 if (GET_CODE (x
) == CONST
4567 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4569 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4570 switch (XINT (XEXP (x
, 0), 1))
4572 case UNSPEC_GOT32_SYM
:
4575 case UNSPEC_PCREL_SYM
:
4580 opstr
= "tls_gd_ha16";
4583 opstr
= "tls_ie_ha16";
4586 opstr
= "tls_le_ha16";
4589 output_operand_lossage ("invalid %%H operand");
4598 fputs (opstr
, file
);
4600 output_addr_const (file
, addr
);
4604 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4605 fputs (" - " , file
);
4606 output_addr_const (file
, addr2
);
4614 /* Print an auto-inc memory operand. */
4617 output_operand_lossage ("invalid %%I operand");
4621 output_memory_reference_mode
= GET_MODE (x
);
4622 output_memory_autoinc_first
= true;
4623 output_address (XEXP (x
, 0));
4624 output_memory_reference_mode
= VOIDmode
;
4628 /* Print an auto-inc memory operand. */
4631 output_operand_lossage ("invalid %%i operand");
4635 output_memory_reference_mode
= GET_MODE (x
);
4636 output_memory_autoinc_first
= false;
4637 output_address (XEXP (x
, 0));
4638 output_memory_reference_mode
= VOIDmode
;
4643 /* Print the low 8 bits of a constant. */
4645 if (CONST_INT_P (x
))
4647 else if (GET_CODE (x
) == CONST_DOUBLE
)
4648 i
= CONST_DOUBLE_LOW (x
);
4649 else if (GET_CODE (x
) == CONST_VECTOR
4650 && CONST_INT_P (CONST_VECTOR_ELT (x
, 0)))
4651 i
= INTVAL (CONST_VECTOR_ELT (x
, 0));
4654 output_operand_lossage ("invalid %%j operand");
4657 i
= trunc_int_for_mode (i
, QImode
);
4658 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4665 const char *opstr
= NULL
;
4667 if (GET_CODE (x
) == CONST
4668 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4670 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4671 switch (XINT (XEXP (x
, 0), 1))
4673 case UNSPEC_GOT16_SYM
:
4676 case UNSPEC_GOT32_SYM
:
4679 case UNSPEC_PCREL_SYM
:
4684 opstr
= "tls_gd_lo16";
4687 opstr
= "tls_ie_lo16";
4690 opstr
= "tls_le_lo16";
4693 output_operand_lossage ("invalid %%L operand");
4702 fputs (opstr
, file
);
4704 output_addr_const (file
, addr
);
4708 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4709 fputs (" - " , file
);
4710 output_addr_const (file
, addr2
);
4718 if (GET_CODE (x
) == SYMBOL_REF
)
4720 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4721 fprintf (file
, "plt(");
4722 output_addr_const (file
, x
);
4723 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4724 fprintf (file
, ")");
4727 output_addr_const (file
, x
);
4732 /* Print a 32-bit constant plus one. */
4734 if (!CONST_INT_P (x
))
4736 output_operand_lossage ("invalid %%P operand");
4739 i
= trunc_int_for_mode (INTVAL (x
) + 1, SImode
);
4740 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4746 /* Print an mm-style bit range. */
4747 int first_bit
, last_bit
;
4749 if (!CONST_INT_P (x
)
4750 || !tilepro_bitfield_operand_p (INTVAL (x
), &first_bit
,
4753 output_operand_lossage ("invalid %%M operand");
4757 fprintf (file
, "%d, %d", first_bit
, last_bit
);
4763 const char *reg
= NULL
;
4765 /* Print a network register. */
4766 if (!CONST_INT_P (x
))
4768 output_operand_lossage ("invalid %%N operand");
4774 case TILEPRO_NETREG_IDN0
: reg
= "idn0"; break;
4775 case TILEPRO_NETREG_IDN1
: reg
= "idn1"; break;
4776 case TILEPRO_NETREG_SN
: reg
= "sn"; break;
4777 case TILEPRO_NETREG_UDN0
: reg
= "udn0"; break;
4778 case TILEPRO_NETREG_UDN1
: reg
= "udn1"; break;
4779 case TILEPRO_NETREG_UDN2
: reg
= "udn2"; break;
4780 case TILEPRO_NETREG_UDN3
: reg
= "udn3"; break;
4781 default: gcc_unreachable ();
4784 fprintf (file
, reg
);
4790 /* Log base 2 of a power of two. */
4794 if (!CONST_INT_P (x
))
4796 output_operand_lossage ("invalid %%t operand");
4799 n
= trunc_int_for_mode (INTVAL (x
), SImode
);
4803 output_operand_lossage ("invalid %%t operand '"
4804 HOST_WIDE_INT_PRINT_DEC
"'", n
);
4808 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4814 /* In this case we need a register. Use 'zero' if the
4815 operand is const0_rtx. */
4817 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
4819 fputs ("zero", file
);
4822 else if (!REG_P (x
))
4824 output_operand_lossage ("invalid %%r operand");
4832 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
4837 output_memory_reference_mode
= VOIDmode
;
4838 output_address (XEXP (x
, 0));
4843 output_addr_const (file
, x
);
4850 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4855 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
4857 tilepro_print_operand_address (FILE *file
, rtx addr
)
4859 if (GET_CODE (addr
) == POST_DEC
4860 || GET_CODE (addr
) == POST_INC
)
4862 int offset
= GET_MODE_SIZE (output_memory_reference_mode
);
4864 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4866 if (output_memory_autoinc_first
)
4867 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4869 fprintf (file
, "%d",
4870 GET_CODE (addr
) == POST_DEC
? -offset
: offset
);
4872 else if (GET_CODE (addr
) == POST_MODIFY
)
4874 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4876 gcc_assert (GET_CODE (XEXP (addr
, 1)) == PLUS
);
4878 if (output_memory_autoinc_first
)
4879 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4881 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4882 INTVAL (XEXP (XEXP (addr
, 1), 1)));
4885 tilepro_print_operand (file
, addr
, 'r');
4889 /* Machine mode of current insn, for determining curly brace
4891 static machine_mode insn_mode
;
4894 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
4896 tilepro_final_prescan_insn (rtx_insn
*insn
)
4898 /* Record this for tilepro_asm_output_opcode to examine. */
4899 insn_mode
= GET_MODE (insn
);
4903 /* While emitting asm, are we currently inside '{' for a bundle? */
4904 static bool tilepro_in_bundle
= false;
4906 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
4907 appropriate given the bundling information recorded by
4908 tilepro_gen_bundles. */
4910 tilepro_asm_output_opcode (FILE *stream
, const char *code
)
4912 bool pseudo
= !strcmp (code
, "pseudo");
4914 if (!tilepro_in_bundle
&& insn_mode
== SImode
)
4916 /* Start a new bundle. */
4917 fprintf (stream
, "{\n\t");
4918 tilepro_in_bundle
= true;
4921 if (tilepro_in_bundle
&& insn_mode
== QImode
)
4923 /* Close an existing bundle. */
4924 static char buf
[100];
4926 gcc_assert (strlen (code
) + 3 + 1 < sizeof (buf
));
4928 strcpy (buf
, pseudo
? "" : code
);
4929 strcat (buf
, "\n\t}");
4930 tilepro_in_bundle
= false;
4936 return pseudo
? "" : code
;
4941 /* Output assembler code to FILE to increment profiler label # LABELNO
4942 for profiling a function entry. */
4944 tilepro_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
4946 if (tilepro_in_bundle
)
4948 fprintf (file
, "\t}\n");
4957 "\t}\n", MCOUNT_NAME
);
4965 "\t}\n", MCOUNT_NAME
);
4968 tilepro_in_bundle
= false;
4972 /* Implement TARGET_ASM_FILE_END. */
4974 tilepro_file_end (void)
4976 if (NEED_INDICATE_EXEC_STACK
)
4977 file_end_indicate_exec_stack ();
4981 #undef TARGET_HAVE_TLS
4982 #define TARGET_HAVE_TLS HAVE_AS_TLS
4984 #undef TARGET_OPTION_OVERRIDE
4985 #define TARGET_OPTION_OVERRIDE tilepro_option_override
4987 #undef TARGET_SCALAR_MODE_SUPPORTED_P
4988 #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4990 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4991 #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4993 #undef TARGET_CANNOT_FORCE_CONST_MEM
4994 #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4996 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4997 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4999 #undef TARGET_PASS_BY_REFERENCE
5000 #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
5002 #undef TARGET_RETURN_IN_MEMORY
5003 #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
5005 #undef TARGET_FUNCTION_ARG_BOUNDARY
5006 #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
5008 #undef TARGET_FUNCTION_ARG
5009 #define TARGET_FUNCTION_ARG tilepro_function_arg
5011 #undef TARGET_FUNCTION_ARG_ADVANCE
5012 #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
5014 #undef TARGET_FUNCTION_VALUE
5015 #define TARGET_FUNCTION_VALUE tilepro_function_value
5017 #undef TARGET_LIBCALL_VALUE
5018 #define TARGET_LIBCALL_VALUE tilepro_libcall_value
5020 #undef TARGET_FUNCTION_VALUE_REGNO_P
5021 #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
5023 #undef TARGET_PROMOTE_FUNCTION_MODE
5024 #define TARGET_PROMOTE_FUNCTION_MODE \
5025 default_promote_function_mode_always_promote
5027 #undef TARGET_PROMOTE_PROTOTYPES
5028 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5030 #undef TARGET_BUILD_BUILTIN_VA_LIST
5031 #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
5033 #undef TARGET_EXPAND_BUILTIN_VA_START
5034 #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
5036 #undef TARGET_SETUP_INCOMING_VARARGS
5037 #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
5039 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5040 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
5042 #undef TARGET_RTX_COSTS
5043 #define TARGET_RTX_COSTS tilepro_rtx_costs
5045 /* Limit to what we can reach in one addli. */
5046 #undef TARGET_MIN_ANCHOR_OFFSET
5047 #define TARGET_MIN_ANCHOR_OFFSET -32768
5048 #undef TARGET_MAX_ANCHOR_OFFSET
5049 #define TARGET_MAX_ANCHOR_OFFSET 32767
5051 #undef TARGET_LEGITIMATE_CONSTANT_P
5052 #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5054 #undef TARGET_LEGITIMATE_ADDRESS_P
5055 #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5057 #undef TARGET_LEGITIMIZE_ADDRESS
5058 #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5060 #undef TARGET_DELEGITIMIZE_ADDRESS
5061 #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5063 #undef TARGET_INIT_BUILTINS
5064 #define TARGET_INIT_BUILTINS tilepro_init_builtins
5066 #undef TARGET_BUILTIN_DECL
5067 #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5069 #undef TARGET_EXPAND_BUILTIN
5070 #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5072 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5073 #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5075 #undef TARGET_FRAME_POINTER_REQUIRED
5076 #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5078 #undef TARGET_DELAY_SCHED2
5079 #define TARGET_DELAY_SCHED2 true
5081 #undef TARGET_DELAY_VARTRACK
5082 #define TARGET_DELAY_VARTRACK true
5084 #undef TARGET_SCHED_ISSUE_RATE
5085 #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5087 #undef TARGET_SCHED_ADJUST_COST
5088 #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5090 #undef TARGET_MACHINE_DEPENDENT_REORG
5091 #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5093 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5094 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5095 hook_bool_const_tree_hwi_hwi_const_tree_true
5097 #undef TARGET_ASM_OUTPUT_MI_THUNK
5098 #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5100 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5101 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5103 #undef TARGET_TRAMPOLINE_INIT
5104 #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5106 #undef TARGET_PRINT_OPERAND
5107 #define TARGET_PRINT_OPERAND tilepro_print_operand
5109 #undef TARGET_PRINT_OPERAND_ADDRESS
5110 #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5112 #undef TARGET_ASM_FILE_END
5113 #define TARGET_ASM_FILE_END tilepro_file_end
5115 #undef TARGET_CAN_USE_DOLOOP_P
5116 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5118 struct gcc_target targetm
= TARGET_INITIALIZER
;
5120 #include "gt-tilepro.h"