1 /* Subroutines used for code generation on the Tilera TILEPro.
2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "insn-config.h"
32 #include "insn-attr.h"
44 #include "langhooks.h"
45 #include "insn-codes.h"
51 #include "cfgcleanup.h"
52 #include "sched-int.h"
53 #include "sel-sched.h"
55 #include "tm-constrs.h"
59 #include "fold-const.h"
60 #include "internal-fn.h"
61 #include "gimple-fold.h"
63 #include "stringpool.h"
64 #include "stor-layout.h"
67 #include "tilepro-builtins.h"
68 #include "tilepro-multiply.h"
69 #include "diagnostic.h"
72 /* This file should be included last. */
73 #include "target-def.h"
75 /* SYMBOL_REF for GOT */
76 static GTY(()) rtx g_got_symbol
= NULL
;
78 /* In case of a POST_INC or POST_DEC memory reference, we must report
79 the mode of the memory reference from TARGET_PRINT_OPERAND to
80 TARGET_PRINT_OPERAND_ADDRESS. */
81 static machine_mode output_memory_reference_mode
;
83 /* Report whether we're printing out the first address fragment of a
84 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
85 TARGET_PRINT_OPERAND_ADDRESS. */
86 static bool output_memory_autoinc_first
;
92 /* Implement TARGET_OPTION_OVERRIDE. */
94 tilepro_option_override (void)
96 /* When modulo scheduling is enabled, we still rely on regular
97 scheduler for bundling. */
98 if (flag_modulo_sched
)
99 flag_resched_modulo_sched
= 1;
104 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
106 tilepro_scalar_mode_supported_p (machine_mode mode
)
126 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
128 tile_vector_mode_supported_p (machine_mode mode
)
130 return mode
== V4QImode
|| mode
== V2HImode
;
134 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
136 tilepro_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
137 rtx x ATTRIBUTE_UNUSED
)
143 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
145 tilepro_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
151 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
152 passed by reference. */
154 tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
155 machine_mode mode ATTRIBUTE_UNUSED
,
156 const_tree type
, bool named ATTRIBUTE_UNUSED
)
158 return (type
&& TYPE_SIZE (type
)
159 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
);
163 /* Implement TARGET_RETURN_IN_MEMORY. */
165 tilepro_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
167 return !IN_RANGE (int_size_in_bytes (type
),
168 0, TILEPRO_NUM_RETURN_REGS
* UNITS_PER_WORD
);
172 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
174 tilepro_function_arg_boundary (machine_mode mode
, const_tree type
)
176 unsigned int alignment
;
178 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
179 if (alignment
< PARM_BOUNDARY
)
180 alignment
= PARM_BOUNDARY
;
181 if (alignment
> STACK_BOUNDARY
)
182 alignment
= STACK_BOUNDARY
;
187 /* Implement TARGET_FUNCTION_ARG. */
189 tilepro_function_arg (cumulative_args_t cum_v
,
191 const_tree type
, bool named ATTRIBUTE_UNUSED
)
193 CUMULATIVE_ARGS cum
= *get_cumulative_args (cum_v
);
194 int byte_size
= ((mode
== BLKmode
)
195 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
196 bool doubleword_aligned_p
;
198 if (cum
>= TILEPRO_NUM_ARG_REGS
)
201 /* See whether the argument has doubleword alignment. */
202 doubleword_aligned_p
=
203 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
205 if (doubleword_aligned_p
)
208 /* The ABI does not allow parameters to be passed partially in reg
209 and partially in stack. */
210 if ((cum
+ (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
211 > TILEPRO_NUM_ARG_REGS
)
214 return gen_rtx_REG (mode
, cum
);
218 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
220 tilepro_function_arg_advance (cumulative_args_t cum_v
,
222 const_tree type
, bool named ATTRIBUTE_UNUSED
)
224 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
226 int byte_size
= ((mode
== BLKmode
)
227 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
228 int word_size
= (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
229 bool doubleword_aligned_p
;
231 /* See whether the argument has doubleword alignment. */
232 doubleword_aligned_p
=
233 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
235 if (doubleword_aligned_p
)
238 /* If the current argument does not fit in the pretend_args space,
240 if (*cum
< TILEPRO_NUM_ARG_REGS
241 && *cum
+ word_size
> TILEPRO_NUM_ARG_REGS
)
242 *cum
= TILEPRO_NUM_ARG_REGS
;
248 /* Implement TARGET_FUNCTION_VALUE. */
250 tilepro_function_value (const_tree valtype
, const_tree fn_decl_or_type
,
251 bool outgoing ATTRIBUTE_UNUSED
)
256 mode
= TYPE_MODE (valtype
);
257 unsigned_p
= TYPE_UNSIGNED (valtype
);
259 mode
= promote_function_mode (valtype
, mode
, &unsigned_p
,
262 return gen_rtx_REG (mode
, 0);
266 /* Implement TARGET_LIBCALL_VALUE. */
268 tilepro_libcall_value (machine_mode mode
,
269 const_rtx fun ATTRIBUTE_UNUSED
)
271 return gen_rtx_REG (mode
, 0);
275 /* Implement FUNCTION_VALUE_REGNO_P. */
277 tilepro_function_value_regno_p (const unsigned int regno
)
279 return regno
< TILEPRO_NUM_RETURN_REGS
;
283 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
285 tilepro_build_builtin_va_list (void)
287 tree f_args
, f_skip
, record
, type_decl
;
290 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
292 type_decl
= build_decl (BUILTINS_LOCATION
, TYPE_DECL
,
293 get_identifier ("__va_list_tag"), record
);
295 f_args
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
296 get_identifier ("__args"), ptr_type_node
);
297 f_skip
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
298 get_identifier ("__skip"), ptr_type_node
);
300 DECL_FIELD_CONTEXT (f_args
) = record
;
302 DECL_FIELD_CONTEXT (f_skip
) = record
;
304 TREE_CHAIN (record
) = type_decl
;
305 TYPE_NAME (record
) = type_decl
;
306 TYPE_FIELDS (record
) = f_args
;
307 TREE_CHAIN (f_args
) = f_skip
;
309 /* We know this is being padded and we want it too. It is an
310 internal type so hide the warnings from the user. */
314 layout_type (record
);
318 /* The correct type is an array type of one element. */
323 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
325 tilepro_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
330 f_args
= TYPE_FIELDS (TREE_TYPE (valist
));
331 f_skip
= TREE_CHAIN (f_args
);
334 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
336 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
338 /* Find the __args area. */
339 t
= make_tree (TREE_TYPE (args
), virtual_incoming_args_rtx
);
340 t
= fold_build_pointer_plus_hwi (t
,
342 (crtl
->args
.info
- TILEPRO_NUM_ARG_REGS
));
344 if (crtl
->args
.pretend_args_size
> 0)
345 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
347 t
= build2 (MODIFY_EXPR
, TREE_TYPE (args
), args
, t
);
348 TREE_SIDE_EFFECTS (t
) = 1;
349 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
351 /* Find the __skip area. */
352 t
= make_tree (TREE_TYPE (skip
), virtual_incoming_args_rtx
);
353 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
354 t
= build2 (MODIFY_EXPR
, TREE_TYPE (skip
), skip
, t
);
355 TREE_SIDE_EFFECTS (t
) = 1;
356 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
360 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
362 tilepro_setup_incoming_varargs (cumulative_args_t cum
,
364 tree type
, int *pretend_args
, int no_rtl
)
366 CUMULATIVE_ARGS local_cum
= *get_cumulative_args (cum
);
369 /* The caller has advanced CUM up to, but not beyond, the last named
370 argument. Advance a local copy of CUM past the last "real" named
371 argument, to find out how many registers are left over. */
372 targetm
.calls
.function_arg_advance (pack_cumulative_args (&local_cum
),
374 first_reg
= local_cum
;
376 if (local_cum
< TILEPRO_NUM_ARG_REGS
)
378 *pretend_args
= UNITS_PER_WORD
* (TILEPRO_NUM_ARG_REGS
- first_reg
);
382 alias_set_type set
= get_varargs_alias_set ();
384 gen_rtx_MEM (BLKmode
, plus_constant (Pmode
, \
385 virtual_incoming_args_rtx
,
386 -STACK_POINTER_OFFSET
-
388 (TILEPRO_NUM_ARG_REGS
-
390 MEM_NOTRAP_P (tmp
) = 1;
391 set_mem_alias_set (tmp
, set
);
392 move_block_from_reg (first_reg
, tmp
,
393 TILEPRO_NUM_ARG_REGS
- first_reg
);
401 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
402 the va_list structure VALIST as required to retrieve an argument of
403 type TYPE, and returning that argument.
405 ret = va_arg(VALIST, TYPE);
407 generates code equivalent to:
409 paddedsize = (sizeof(TYPE) + 3) & -4;
410 if ((VALIST.__args + paddedsize > VALIST.__skip)
411 & (VALIST.__args <= VALIST.__skip))
412 addr = VALIST.__skip + STACK_POINTER_OFFSET;
414 addr = VALIST.__args;
415 VALIST.__args = addr + paddedsize;
416 ret = *(TYPE *)addr; */
418 tilepro_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
* pre_p
,
419 gimple_seq
* post_p ATTRIBUTE_UNUSED
)
423 HOST_WIDE_INT size
, rsize
;
425 bool pass_by_reference_p
;
427 f_args
= TYPE_FIELDS (va_list_type_node
);
428 f_skip
= TREE_CHAIN (f_args
);
431 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
433 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
435 addr
= create_tmp_var (ptr_type_node
, "va_arg");
437 /* if an object is dynamically sized, a pointer to it is passed
438 instead of the object itself. */
439 pass_by_reference_p
= pass_by_reference (NULL
, TYPE_MODE (type
), type
,
442 if (pass_by_reference_p
)
443 type
= build_pointer_type (type
);
445 size
= int_size_in_bytes (type
);
446 rsize
= ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
) * UNITS_PER_WORD
;
448 /* If the alignment of the type is greater than the default for a
449 parameter, align to STACK_BOUNDARY. */
450 if (TYPE_ALIGN (type
) > PARM_BOUNDARY
)
452 /* Assert the only case we generate code for: when
453 stack boundary = 2 * parm boundary. */
454 gcc_assert (STACK_BOUNDARY
== PARM_BOUNDARY
* 2);
456 tmp
= build2 (BIT_AND_EXPR
, sizetype
,
457 fold_convert (sizetype
, unshare_expr (args
)),
458 size_int (PARM_BOUNDARY
/ 8));
459 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
460 unshare_expr (args
), tmp
);
462 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
465 /* Build conditional expression to calculate addr. The expression
466 will be gimplified later. */
467 tmp
= fold_build_pointer_plus_hwi (unshare_expr (args
), rsize
);
468 tmp
= build2 (TRUTH_AND_EXPR
, boolean_type_node
,
469 build2 (GT_EXPR
, boolean_type_node
, tmp
, unshare_expr (skip
)),
470 build2 (LE_EXPR
, boolean_type_node
, unshare_expr (args
),
471 unshare_expr (skip
)));
473 tmp
= build3 (COND_EXPR
, ptr_type_node
, tmp
,
474 build2 (POINTER_PLUS_EXPR
, ptr_type_node
, unshare_expr (skip
),
475 size_int (STACK_POINTER_OFFSET
)),
476 unshare_expr (args
));
478 gimplify_assign (addr
, tmp
, pre_p
);
480 /* Update VALIST.__args. */
481 tmp
= fold_build_pointer_plus_hwi (addr
, rsize
);
482 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
484 addr
= fold_convert (build_pointer_type (type
), addr
);
486 if (pass_by_reference_p
)
487 addr
= build_va_arg_indirect_ref (addr
);
489 return build_va_arg_indirect_ref (addr
);
494 /* Implement TARGET_RTX_COSTS. */
496 tilepro_rtx_costs (rtx x
, int code
, int outer_code
, int opno
, int *total
,
502 /* If this is an 8-bit constant, return zero since it can be
503 used nearly anywhere with no cost. If it is a valid operand
504 for an ADD or AND, likewise return 0 if we know it will be
505 used in that context. Otherwise, return 2 since it might be
506 used there later. All other constants take at least two
508 if (satisfies_constraint_I (x
))
513 else if (outer_code
== PLUS
&& add_operand (x
, VOIDmode
))
515 /* Slightly penalize large constants even though we can add
516 them in one instruction, because it forces the use of
517 2-wide bundling mode. */
521 else if (move_operand (x
, SImode
))
523 /* We can materialize in one move. */
524 *total
= COSTS_N_INSNS (1);
529 /* We can materialize in two moves. */
530 *total
= COSTS_N_INSNS (2);
539 *total
= COSTS_N_INSNS (2);
543 *total
= COSTS_N_INSNS (4);
551 /* If outer-code was a sign or zero extension, a cost of
552 COSTS_N_INSNS (1) was already added in, so account for
554 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
555 *total
= COSTS_N_INSNS (1);
557 *total
= COSTS_N_INSNS (2);
561 /* Convey that s[123]a are efficient. */
562 if (GET_CODE (XEXP (x
, 0)) == MULT
563 && cint_248_operand (XEXP (XEXP (x
, 0), 1), VOIDmode
))
565 *total
= (rtx_cost (XEXP (XEXP (x
, 0), 0),
566 (enum rtx_code
) outer_code
, opno
, speed
)
567 + rtx_cost (XEXP (x
, 1),
568 (enum rtx_code
) outer_code
, opno
, speed
)
569 + COSTS_N_INSNS (1));
575 *total
= COSTS_N_INSNS (2);
580 if (outer_code
== MULT
)
583 *total
= COSTS_N_INSNS (1);
590 /* These are handled by software and are very expensive. */
591 *total
= COSTS_N_INSNS (100);
595 case UNSPEC_VOLATILE
:
597 int num
= XINT (x
, 1);
599 if (num
<= TILEPRO_LAST_LATENCY_1_INSN
)
600 *total
= COSTS_N_INSNS (1);
601 else if (num
<= TILEPRO_LAST_LATENCY_2_INSN
)
602 *total
= COSTS_N_INSNS (2);
603 else if (num
> TILEPRO_LAST_LATENCY_INSN
)
605 if (outer_code
== PLUS
)
608 *total
= COSTS_N_INSNS (1);
614 case UNSPEC_BLOCKAGE
:
615 case UNSPEC_NETWORK_BARRIER
:
619 case UNSPEC_LNK_AND_LABEL
:
621 case UNSPEC_NETWORK_RECEIVE
:
622 case UNSPEC_NETWORK_SEND
:
623 case UNSPEC_TLS_GD_ADD
:
624 *total
= COSTS_N_INSNS (1);
627 case UNSPEC_TLS_IE_LOAD
:
628 *total
= COSTS_N_INSNS (2);
632 *total
= COSTS_N_INSNS (3);
636 *total
= COSTS_N_INSNS (4);
639 case UNSPEC_LATENCY_L2
:
640 *total
= COSTS_N_INSNS (8);
643 case UNSPEC_TLS_GD_CALL
:
644 *total
= COSTS_N_INSNS (30);
647 case UNSPEC_LATENCY_MISS
:
648 *total
= COSTS_N_INSNS (80);
652 *total
= COSTS_N_INSNS (1);
665 /* Returns an SImode integer rtx with value VAL. */
667 gen_int_si (HOST_WIDE_INT val
)
669 return gen_int_mode (val
, SImode
);
673 /* Create a temporary variable to hold a partial result, to enable
676 create_temp_reg_if_possible (machine_mode mode
, rtx default_reg
)
678 return can_create_pseudo_p ()? gen_reg_rtx (mode
) : default_reg
;
682 /* Functions to save and restore machine-specific function data. */
683 static struct machine_function
*
684 tilepro_init_machine_status (void)
686 return ggc_cleared_alloc
<machine_function
> ();
690 /* Do anything needed before RTL is emitted for each function. */
692 tilepro_init_expanders (void)
694 /* Arrange to initialize and mark the machine per-function
696 init_machine_status
= tilepro_init_machine_status
;
698 if (cfun
&& cfun
->machine
&& flag_pic
)
700 static int label_num
= 0;
702 char text_label_name
[32];
704 struct machine_function
*machine
= cfun
->machine
;
706 ASM_GENERATE_INTERNAL_LABEL (text_label_name
, "L_PICLNK", label_num
++);
708 machine
->text_label_symbol
=
709 gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (text_label_name
));
711 machine
->text_label_rtx
=
712 gen_rtx_REG (Pmode
, TILEPRO_PIC_TEXT_LABEL_REGNUM
);
714 machine
->got_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
716 machine
->calls_tls_get_addr
= false;
721 /* Return true if X contains a thread-local symbol. */
723 tilepro_tls_referenced_p (rtx x
)
725 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
)
726 x
= XEXP (XEXP (x
, 0), 0);
728 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
))
731 /* That's all we handle in tilepro_legitimize_tls_address for
737 /* Return true if X requires a scratch register. It is given that
738 flag_pic is on and that X satisfies CONSTANT_P. */
740 tilepro_pic_address_needs_scratch (rtx x
)
742 if (GET_CODE (x
) == CONST
743 && GET_CODE (XEXP (x
, 0)) == PLUS
744 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
745 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
746 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
753 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
754 which we are willing to load the value into a register via a move
755 pattern. TLS cannot be treated as a constant because it can
756 include a function call. */
758 tilepro_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
760 switch (GET_CODE (x
))
764 return !tilepro_tls_referenced_p (x
);
772 /* Return true if the constant value X is a legitimate general operand
773 when generating PIC code. It is given that flag_pic is on and that
774 X satisfies CONSTANT_P. */
776 tilepro_legitimate_pic_operand_p (rtx x
)
778 if (tilepro_pic_address_needs_scratch (x
))
781 if (tilepro_tls_referenced_p (x
))
788 /* Return true if the rtx X can be used as an address operand. */
790 tilepro_legitimate_address_p (machine_mode
ARG_UNUSED (mode
), rtx x
,
793 if (GET_CODE (x
) == SUBREG
)
796 switch (GET_CODE (x
))
800 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
807 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
810 if (GET_CODE (XEXP (x
, 1)) != PLUS
)
813 if (!rtx_equal_p (XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)))
816 if (!satisfies_constraint_I (XEXP (XEXP (x
, 1), 1)))
829 /* Check if x is a valid reg. */
834 return REGNO_OK_FOR_BASE_P (REGNO (x
));
840 /* Return the rtx containing SYMBOL_REF to the text label. */
842 tilepro_text_label_symbol (void)
844 return cfun
->machine
->text_label_symbol
;
848 /* Return the register storing the value of the text label. */
850 tilepro_text_label_rtx (void)
852 return cfun
->machine
->text_label_rtx
;
856 /* Return the register storing the value of the global offset
859 tilepro_got_rtx (void)
861 return cfun
->machine
->got_rtx
;
865 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
867 tilepro_got_symbol (void)
869 if (g_got_symbol
== NULL
)
870 g_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
876 /* Return a reference to the got to be used by tls references. */
878 tilepro_tls_got (void)
883 crtl
->uses_pic_offset_table
= 1;
884 return tilepro_got_rtx ();
887 temp
= gen_reg_rtx (Pmode
);
888 emit_move_insn (temp
, tilepro_got_symbol ());
894 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
895 this (thread-local) address. */
897 tilepro_legitimize_tls_address (rtx addr
)
901 gcc_assert (can_create_pseudo_p ());
903 if (GET_CODE (addr
) == SYMBOL_REF
)
904 switch (SYMBOL_REF_TLS_MODEL (addr
))
906 case TLS_MODEL_GLOBAL_DYNAMIC
:
907 case TLS_MODEL_LOCAL_DYNAMIC
:
909 rtx r0
, temp1
, temp2
, temp3
, got
;
912 ret
= gen_reg_rtx (Pmode
);
913 r0
= gen_rtx_REG (Pmode
, 0);
914 temp1
= gen_reg_rtx (Pmode
);
915 temp2
= gen_reg_rtx (Pmode
);
916 temp3
= gen_reg_rtx (Pmode
);
918 got
= tilepro_tls_got ();
919 emit_insn (gen_tls_gd_addhi (temp1
, got
, addr
));
920 emit_insn (gen_tls_gd_addlo (temp2
, temp1
, addr
));
921 emit_move_insn (r0
, temp2
);
922 emit_insn (gen_tls_gd_call (addr
));
923 emit_move_insn (temp3
, r0
);
924 last
= emit_insn (gen_tls_gd_add (ret
, temp3
, addr
));
925 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
928 case TLS_MODEL_INITIAL_EXEC
:
930 rtx temp1
, temp2
, temp3
, got
;
933 ret
= gen_reg_rtx (Pmode
);
934 temp1
= gen_reg_rtx (Pmode
);
935 temp2
= gen_reg_rtx (Pmode
);
936 temp3
= gen_reg_rtx (Pmode
);
938 got
= tilepro_tls_got ();
939 emit_insn (gen_tls_ie_addhi (temp1
, got
, addr
));
940 emit_insn (gen_tls_ie_addlo (temp2
, temp1
, addr
));
941 emit_insn (gen_tls_ie_load (temp3
, temp2
, addr
));
946 THREAD_POINTER_REGNUM
),
948 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
951 case TLS_MODEL_LOCAL_EXEC
:
956 ret
= gen_reg_rtx (Pmode
);
957 temp1
= gen_reg_rtx (Pmode
);
959 emit_insn (gen_tls_le_addhi (temp1
,
961 THREAD_POINTER_REGNUM
),
963 last
= emit_insn (gen_tls_le_addlo (ret
, temp1
, addr
));
964 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
970 else if (GET_CODE (addr
) == CONST
)
974 gcc_assert (GET_CODE (XEXP (addr
, 0)) == PLUS
);
976 base
= tilepro_legitimize_tls_address (XEXP (XEXP (addr
, 0), 0));
977 offset
= XEXP (XEXP (addr
, 0), 1);
979 base
= force_operand (base
, NULL_RTX
);
980 ret
= force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
989 /* Legitimize PIC addresses. If the address is already
990 position-independent, we return ORIG. Newly generated
991 position-independent addresses go into a reg. This is REG if
992 nonzero, otherwise we allocate register(s) as necessary. */
994 tilepro_legitimize_pic_address (rtx orig
,
995 machine_mode mode ATTRIBUTE_UNUSED
,
998 if (GET_CODE (orig
) == SYMBOL_REF
)
1000 rtx address
, pic_ref
;
1004 gcc_assert (can_create_pseudo_p ());
1005 reg
= gen_reg_rtx (Pmode
);
1008 if (SYMBOL_REF_LOCAL_P (orig
))
1010 /* If not during reload, allocate another temp reg here for
1011 loading in the address, so that these instructions can be
1012 optimized properly. */
1013 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1014 rtx text_label_symbol
= tilepro_text_label_symbol ();
1015 rtx text_label_rtx
= tilepro_text_label_rtx ();
1017 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1018 text_label_symbol
));
1019 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1020 text_label_symbol
));
1022 /* Note: this is conservative. We use the text_label but we
1023 don't use the pic_offset_table. However, in some cases
1024 we may need the pic_offset_table (see
1025 tilepro_fixup_pcrel_references). */
1026 crtl
->uses_pic_offset_table
= 1;
1030 emit_move_insn (reg
, address
);
1035 /* If not during reload, allocate another temp reg here for
1036 loading in the address, so that these instructions can be
1037 optimized properly. */
1038 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1040 gcc_assert (flag_pic
);
1043 emit_insn (gen_add_got16 (temp_reg
,
1044 tilepro_got_rtx (), orig
));
1048 rtx temp_reg2
= create_temp_reg_if_possible (Pmode
, reg
);
1049 emit_insn (gen_addhi_got32 (temp_reg2
,
1050 tilepro_got_rtx (), orig
));
1051 emit_insn (gen_addlo_got32 (temp_reg
, temp_reg2
, orig
));
1056 pic_ref
= gen_const_mem (Pmode
, address
);
1057 crtl
->uses_pic_offset_table
= 1;
1058 emit_move_insn (reg
, pic_ref
);
1059 /* The following put a REG_EQUAL note on this insn, so that
1060 it can be optimized by loop. But it causes the label to
1061 be optimized away. */
1062 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1066 else if (GET_CODE (orig
) == CONST
)
1070 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1071 && XEXP (XEXP (orig
, 0), 0) == tilepro_got_rtx ())
1076 gcc_assert (can_create_pseudo_p ());
1077 reg
= gen_reg_rtx (Pmode
);
1080 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
1081 base
= tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
,
1084 tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
1085 base
== reg
? 0 : reg
);
1087 if (CONST_INT_P (offset
))
1089 if (can_create_pseudo_p ())
1090 offset
= force_reg (Pmode
, offset
);
1092 /* If we reach here, then something is seriously
1097 if (can_create_pseudo_p ())
1098 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1102 else if (GET_CODE (orig
) == LABEL_REF
)
1104 rtx address
, temp_reg
;
1105 rtx text_label_symbol
;
1110 gcc_assert (can_create_pseudo_p ());
1111 reg
= gen_reg_rtx (Pmode
);
1114 /* If not during reload, allocate another temp reg here for
1115 loading in the address, so that these instructions can be
1116 optimized properly. */
1117 temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1118 text_label_symbol
= tilepro_text_label_symbol ();
1119 text_label_rtx
= tilepro_text_label_rtx ();
1121 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1122 text_label_symbol
));
1123 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1124 text_label_symbol
));
1126 /* Note: this is conservative. We use the text_label but we
1127 don't use the pic_offset_table. */
1128 crtl
->uses_pic_offset_table
= 1;
1132 emit_move_insn (reg
, address
);
1141 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1143 tilepro_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1146 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
1147 && symbolic_operand (x
, Pmode
) && tilepro_tls_referenced_p (x
))
1149 return tilepro_legitimize_tls_address (x
);
1153 return tilepro_legitimize_pic_address (x
, mode
, 0);
1160 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1162 tilepro_delegitimize_address (rtx x
)
1164 x
= delegitimize_mem_from_attrs (x
);
1166 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1168 switch (XINT (XEXP (x
, 0), 1))
1170 case UNSPEC_PCREL_SYM
:
1171 case UNSPEC_GOT16_SYM
:
1172 case UNSPEC_GOT32_SYM
:
1175 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1184 /* Emit code to load the PIC register. */
1186 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED
)
1188 int orig_flag_pic
= flag_pic
;
1190 rtx got_symbol
= tilepro_got_symbol ();
1191 rtx text_label_symbol
= tilepro_text_label_symbol ();
1192 rtx text_label_rtx
= tilepro_text_label_rtx ();
1195 emit_insn (gen_insn_lnk_and_label (text_label_rtx
, text_label_symbol
));
1197 emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1198 text_label_rtx
, got_symbol
, text_label_symbol
));
1200 emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1202 got_symbol
, text_label_symbol
));
1204 flag_pic
= orig_flag_pic
;
1206 /* Need to emit this whether or not we obey regdecls, since
1207 setjmp/longjmp can cause life info to screw up. ??? In the case
1208 where we don't obey regdecls, this is not sufficient since we may
1209 not fall out the bottom. */
1210 emit_use (tilepro_got_rtx ());
1214 /* Return the simd variant of the constant NUM of mode MODE, by
1215 replicating it to fill an interger of mode SImode. NUM is first
1216 truncated to fit in MODE. */
1218 tilepro_simd_int (rtx num
, machine_mode mode
)
1220 HOST_WIDE_INT n
= 0;
1222 gcc_assert (CONST_INT_P (num
));
1229 n
= 0x01010101 * (n
& 0x000000FF);
1232 n
= 0x00010001 * (n
& 0x0000FFFF);
1242 return gen_int_si (n
);
1246 /* Split one or more DImode RTL references into pairs of SImode
1247 references. The RTL can be REG, offsettable MEM, integer constant,
1248 or CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL
1249 to split and "num" is its length. lo_half and hi_half are output
1250 arrays that parallel "operands". */
1252 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1256 rtx op
= operands
[num
];
1258 /* simplify_subreg refuse to split volatile memory addresses,
1259 but we still have to handle it. */
1262 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1263 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1267 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1268 GET_MODE (op
) == VOIDmode
1269 ? DImode
: GET_MODE (op
), 0);
1270 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1271 GET_MODE (op
) == VOIDmode
1272 ? DImode
: GET_MODE (op
), 4);
1278 /* Returns true iff val can be moved into a register in one
1279 instruction. And if it can, it emits the code to move the
1282 If three_wide_only is true, this insists on an instruction that
1283 works in a bundle containing three instructions. */
1285 expand_set_cint32_one_inst (rtx dest_reg
,
1286 HOST_WIDE_INT val
, bool three_wide_only
)
1288 val
= trunc_int_for_mode (val
, SImode
);
1290 if (val
== trunc_int_for_mode (val
, QImode
))
1293 emit_move_insn (dest_reg
, GEN_INT (val
));
1296 else if (!three_wide_only
)
1298 rtx imm_op
= GEN_INT (val
);
1300 if (satisfies_constraint_J (imm_op
)
1301 || satisfies_constraint_K (imm_op
)
1302 || satisfies_constraint_N (imm_op
)
1303 || satisfies_constraint_P (imm_op
))
1305 emit_move_insn (dest_reg
, imm_op
);
1314 /* Implement SImode rotatert. */
1315 static HOST_WIDE_INT
1316 rotate_right (HOST_WIDE_INT n
, int count
)
1318 unsigned HOST_WIDE_INT x
= n
& 0xFFFFFFFF;
1321 return ((x
>> count
) | (x
<< (32 - count
))) & 0xFFFFFFFF;
1325 /* Return true iff n contains exactly one contiguous sequence of 1
1326 bits, possibly wrapping around from high bits to low bits. */
1328 tilepro_bitfield_operand_p (HOST_WIDE_INT n
, int *first_bit
, int *last_bit
)
1335 for (i
= 0; i
< 32; i
++)
1337 unsigned HOST_WIDE_INT x
= rotate_right (n
, i
);
1341 /* See if x is a power of two minus one, i.e. only consecutive 1
1342 bits starting from bit 0. */
1343 if ((x
& (x
+ 1)) == 0)
1345 if (first_bit
!= NULL
)
1347 if (last_bit
!= NULL
)
1348 *last_bit
= (i
+ exact_log2 (x
^ (x
>> 1))) & 31;
1358 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1360 expand_set_cint32 (rtx dest_reg
, rtx src_val
)
1363 int leading_zeroes
, trailing_zeroes
;
1365 int three_wide_only
;
1368 gcc_assert (CONST_INT_P (src_val
));
1369 val
= trunc_int_for_mode (INTVAL (src_val
), SImode
);
1371 /* See if we can generate the constant in one instruction. */
1372 if (expand_set_cint32_one_inst (dest_reg
, val
, false))
1375 /* Create a temporary variable to hold a partial result, to enable
1377 temp
= create_temp_reg_if_possible (SImode
, dest_reg
);
1379 leading_zeroes
= 31 - floor_log2 (val
& 0xFFFFFFFF);
1380 trailing_zeroes
= exact_log2 (val
& -val
);
1382 lower
= trunc_int_for_mode (val
, HImode
);
1383 upper
= trunc_int_for_mode ((val
- lower
) >> 16, HImode
);
1385 /* First try all three-wide instructions that generate a constant
1386 (i.e. movei) followed by various shifts and rotates. If none of
1387 those work, try various two-wide ways of generating a constant
1388 followed by various shifts and rotates. */
1389 for (three_wide_only
= 1; three_wide_only
>= 0; three_wide_only
--)
1393 if (expand_set_cint32_one_inst (temp
, val
>> trailing_zeroes
,
1396 /* 0xFFFFA500 becomes:
1397 movei temp, 0xFFFFFFA5
1398 shli dest, temp, 8 */
1399 emit_move_insn (dest_reg
,
1400 gen_rtx_ASHIFT (SImode
, temp
,
1401 GEN_INT (trailing_zeroes
)));
1405 if (expand_set_cint32_one_inst (temp
, val
<< leading_zeroes
,
1408 /* 0x7FFFFFFF becomes:
1410 shri dest, temp, 1 */
1411 emit_move_insn (dest_reg
,
1412 gen_rtx_LSHIFTRT (SImode
, temp
,
1413 GEN_INT (leading_zeroes
)));
1417 /* Try rotating a one-instruction immediate, since rotate is
1419 for (count
= 1; count
< 32; count
++)
1421 HOST_WIDE_INT r
= rotate_right (val
, count
);
1422 if (expand_set_cint32_one_inst (temp
, r
, three_wide_only
))
1424 /* 0xFFA5FFFF becomes:
1425 movei temp, 0xFFFFFFA5
1426 rli dest, temp, 16 */
1427 emit_move_insn (dest_reg
,
1428 gen_rtx_ROTATE (SImode
, temp
, GEN_INT (count
)));
1433 if (lower
== trunc_int_for_mode (lower
, QImode
))
1435 /* We failed to use two 3-wide instructions, but the low 16
1436 bits are a small number so just use a 2-wide + 3-wide
1437 auli + addi pair rather than anything more exotic.
1440 auli temp, zero, 0x1234
1441 addi dest, temp, 0x56 */
1446 /* Fallback case: use a auli + addli/addi pair. */
1447 emit_move_insn (temp
, GEN_INT (upper
<< 16));
1448 emit_move_insn (dest_reg
, (gen_rtx_PLUS (SImode
, temp
, GEN_INT (lower
))));
1452 /* Load OP1, a 32-bit constant, into OP0, a register. We know it
1453 can't be done in one insn when we get here, the move expander
1456 tilepro_expand_set_const32 (rtx op0
, rtx op1
)
1458 machine_mode mode
= GET_MODE (op0
);
1461 if (CONST_INT_P (op1
))
1463 /* TODO: I don't know if we want to split large constants now,
1464 or wait until later (with a define_split).
1466 Does splitting early help CSE? Does it harm other
1467 optimizations that might fold loads? */
1468 expand_set_cint32 (op0
, op1
);
1472 temp
= create_temp_reg_if_possible (mode
, op0
);
1474 /* A symbol, emit in the traditional way. */
1475 emit_move_insn (temp
, gen_rtx_HIGH (mode
, op1
));
1476 emit_move_insn (op0
, gen_rtx_LO_SUM (mode
, temp
, op1
));
1481 /* Expand a move instruction. Return true if all work is done. */
1483 tilepro_expand_mov (machine_mode mode
, rtx
*operands
)
1485 /* Handle sets of MEM first. */
1486 if (MEM_P (operands
[0]))
1488 if (can_create_pseudo_p ())
1489 operands
[0] = validize_mem (operands
[0]);
1491 if (reg_or_0_operand (operands
[1], mode
))
1494 if (!reload_in_progress
)
1495 operands
[1] = force_reg (mode
, operands
[1]);
1498 /* Fixup TLS cases. */
1499 if (CONSTANT_P (operands
[1]) && tilepro_tls_referenced_p (operands
[1]))
1501 operands
[1] = tilepro_legitimize_tls_address (operands
[1]);
1505 /* Fixup PIC cases. */
1506 if (flag_pic
&& CONSTANT_P (operands
[1]))
1508 if (tilepro_pic_address_needs_scratch (operands
[1]))
1509 operands
[1] = tilepro_legitimize_pic_address (operands
[1], mode
, 0);
1511 if (symbolic_operand (operands
[1], mode
))
1513 operands
[1] = tilepro_legitimize_pic_address (operands
[1],
1515 (reload_in_progress
?
1522 /* Fixup for UNSPEC addresses. */
1524 && GET_CODE (operands
[1]) == HIGH
1525 && GET_CODE (XEXP (operands
[1], 0)) == CONST
1526 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == UNSPEC
)
1528 rtx unspec
= XEXP (XEXP (operands
[1], 0), 0);
1529 int unspec_num
= XINT (unspec
, 1);
1530 if (unspec_num
== UNSPEC_PCREL_SYM
)
1532 emit_insn (gen_auli_pcrel (operands
[0], const0_rtx
,
1533 XVECEXP (unspec
, 0, 0),
1534 XVECEXP (unspec
, 0, 1)));
1537 else if (flag_pic
== 2 && unspec_num
== UNSPEC_GOT32_SYM
)
1539 emit_insn (gen_addhi_got32 (operands
[0], const0_rtx
,
1540 XVECEXP (unspec
, 0, 0)));
1543 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_GD
)
1545 emit_insn (gen_tls_gd_addhi (operands
[0], const0_rtx
,
1546 XVECEXP (unspec
, 0, 0)));
1549 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_IE
)
1551 emit_insn (gen_tls_ie_addhi (operands
[0], const0_rtx
,
1552 XVECEXP (unspec
, 0, 0)));
1555 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_LE
)
1557 emit_insn (gen_tls_le_addhi (operands
[0], const0_rtx
,
1558 XVECEXP (unspec
, 0, 0)));
1563 /* Accept non-constants and valid constants unmodified. */
1564 if (!CONSTANT_P (operands
[1])
1565 || GET_CODE (operands
[1]) == HIGH
|| move_operand (operands
[1], mode
))
1568 /* Split large integers. */
1569 if (GET_MODE_SIZE (mode
) <= 4)
1571 tilepro_expand_set_const32 (operands
[0], operands
[1]);
1579 /* Expand the "insv" pattern. */
1581 tilepro_expand_insv (rtx operands
[4])
1583 rtx first_rtx
= operands
[2];
1584 HOST_WIDE_INT first
= INTVAL (first_rtx
);
1585 HOST_WIDE_INT width
= INTVAL (operands
[1]);
1586 rtx v
= operands
[3];
1588 /* Shift the inserted bits into position. */
1591 if (CONST_INT_P (v
))
1593 /* Shift the constant into mm position. */
1594 v
= gen_int_si (INTVAL (v
) << first
);
1598 /* Shift over the value to be inserted. */
1599 rtx tmp
= gen_reg_rtx (SImode
);
1600 emit_insn (gen_ashlsi3 (tmp
, v
, first_rtx
));
1605 /* Insert the shifted bits using an 'mm' insn. */
1606 emit_insn (gen_insn_mm (operands
[0], v
, operands
[0], first_rtx
,
1607 GEN_INT (first
+ width
- 1)));
1611 /* Expand unaligned loads. */
1613 tilepro_expand_unaligned_load (rtx dest_reg
, rtx mem
, HOST_WIDE_INT bitsize
,
1614 HOST_WIDE_INT bit_offset
, bool sign
)
1617 rtx addr_lo
, addr_hi
;
1618 rtx mem_lo
, mem_hi
, hi
;
1619 rtx mema
, wide_result
;
1620 int last_byte_offset
;
1621 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1623 mode
= GET_MODE (dest_reg
);
1625 hi
= gen_reg_rtx (mode
);
1627 if (bitsize
== 2 * BITS_PER_UNIT
&& (bit_offset
% BITS_PER_UNIT
) == 0)
1631 /* When just loading a two byte value, we can load the two bytes
1632 individually and combine them efficiently. */
1634 mem_lo
= adjust_address (mem
, QImode
, byte_offset
);
1635 mem_hi
= adjust_address (mem
, QImode
, byte_offset
+ 1);
1637 lo
= gen_reg_rtx (mode
);
1638 emit_insn (gen_zero_extendqisi2 (lo
, mem_lo
));
1642 rtx tmp
= gen_reg_rtx (mode
);
1644 /* Do a signed load of the second byte then shift and OR it
1646 emit_insn (gen_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1647 emit_insn (gen_ashlsi3 (gen_lowpart (SImode
, tmp
),
1648 gen_lowpart (SImode
, hi
), GEN_INT (8)));
1649 emit_insn (gen_iorsi3 (gen_lowpart (SImode
, dest_reg
),
1650 gen_lowpart (SImode
, lo
),
1651 gen_lowpart (SImode
, tmp
)));
1655 /* Do two unsigned loads and use intlb to interleave
1657 emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1658 emit_insn (gen_insn_intlb (gen_lowpart (SImode
, dest_reg
),
1659 gen_lowpart (SImode
, hi
),
1660 gen_lowpart (SImode
, lo
)));
1666 mema
= XEXP (mem
, 0);
1668 /* AND addresses cannot be in any alias set, since they may
1669 implicitly alias surrounding code. Ideally we'd have some alias
1670 set that covered all types except those with alignment 8 or
1672 addr_lo
= force_reg (Pmode
, plus_constant (Pmode
, mema
, byte_offset
));
1673 mem_lo
= change_address (mem
, mode
,
1674 gen_rtx_AND (Pmode
, addr_lo
, GEN_INT (-4)));
1675 set_mem_alias_set (mem_lo
, 0);
1677 /* Load the high word at an address that will not fault if the low
1678 address is aligned and at the very end of a page. */
1679 last_byte_offset
= (bit_offset
+ bitsize
- 1) / BITS_PER_UNIT
;
1680 addr_hi
= force_reg (Pmode
, plus_constant (Pmode
, mema
, last_byte_offset
));
1681 mem_hi
= change_address (mem
, mode
,
1682 gen_rtx_AND (Pmode
, addr_hi
, GEN_INT (-4)));
1683 set_mem_alias_set (mem_hi
, 0);
1687 addr_lo
= make_safe_from (addr_lo
, dest_reg
);
1688 wide_result
= dest_reg
;
1692 wide_result
= gen_reg_rtx (mode
);
1695 /* Load hi first in case dest_reg is used in mema. */
1696 emit_move_insn (hi
, mem_hi
);
1697 emit_move_insn (wide_result
, mem_lo
);
1699 emit_insn (gen_insn_dword_align (gen_lowpart (SImode
, wide_result
),
1700 gen_lowpart (SImode
, wide_result
),
1701 gen_lowpart (SImode
, hi
), addr_lo
));
1706 extract_bit_field (gen_lowpart (SImode
, wide_result
),
1707 bitsize
, bit_offset
% BITS_PER_UNIT
,
1708 !sign
, gen_lowpart (SImode
, dest_reg
),
1711 if (extracted
!= dest_reg
)
1712 emit_move_insn (dest_reg
, gen_lowpart (SImode
, extracted
));
1717 /* Expand unaligned stores. */
1719 tilepro_expand_unaligned_store (rtx mem
, rtx src
, HOST_WIDE_INT bitsize
,
1720 HOST_WIDE_INT bit_offset
)
1722 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1723 HOST_WIDE_INT bytesize
= bitsize
/ BITS_PER_UNIT
;
1724 HOST_WIDE_INT shift_amt
;
1729 for (i
= 0, shift_amt
= 0; i
< bytesize
; i
++, shift_amt
+= BITS_PER_UNIT
)
1731 mem_addr
= adjust_address (mem
, QImode
, byte_offset
+ i
);
1735 store_val
= expand_simple_binop (SImode
, LSHIFTRT
,
1736 gen_lowpart (SImode
, src
),
1737 GEN_INT (shift_amt
), NULL
, 1,
1739 store_val
= gen_lowpart (QImode
, store_val
);
1743 store_val
= gen_lowpart (QImode
, src
);
1746 emit_move_insn (mem_addr
, store_val
);
1751 /* Implement the movmisalign patterns. One of the operands is a
1752 memory that is not naturally aligned. Emit instructions to load
1755 tilepro_expand_movmisalign (machine_mode mode
, rtx
*operands
)
1757 if (MEM_P (operands
[1]))
1761 if (register_operand (operands
[0], mode
))
1764 tmp
= gen_reg_rtx (mode
);
1766 tilepro_expand_unaligned_load (tmp
, operands
[1],
1767 GET_MODE_BITSIZE (mode
), 0, true);
1769 if (tmp
!= operands
[0])
1770 emit_move_insn (operands
[0], tmp
);
1772 else if (MEM_P (operands
[0]))
1774 if (!reg_or_0_operand (operands
[1], mode
))
1775 operands
[1] = force_reg (mode
, operands
[1]);
1777 tilepro_expand_unaligned_store (operands
[0], operands
[1],
1778 GET_MODE_BITSIZE (mode
), 0);
1785 /* Implement the addsi3 pattern. */
1787 tilepro_expand_addsi (rtx op0
, rtx op1
, rtx op2
)
1793 /* Skip anything that only takes one instruction. */
1794 if (add_operand (op2
, SImode
))
1797 /* We can only optimize ints here (it should be impossible to get
1798 here with any other type, but it is harmless to check. */
1799 if (!CONST_INT_P (op2
))
1802 temp
= create_temp_reg_if_possible (SImode
, op0
);
1804 high
= (n
+ (n
& 0x8000)) & ~0xffff;
1806 emit_move_insn (temp
, gen_rtx_PLUS (SImode
, op1
, gen_int_si (high
)));
1807 emit_move_insn (op0
, gen_rtx_PLUS (SImode
, temp
, gen_int_si (n
- high
)));
1813 /* Implement the allocate_stack pattern (alloca). */
1815 tilepro_allocate_stack (rtx op0
, rtx op1
)
1817 /* Technically the correct way to initialize chain_loc is with
1818 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1819 * sets the alias_set to that of a frame reference. Some of our
1820 * tests rely on some unsafe assumption about when the chaining
1821 * update is done, we need to be conservative about reordering the
1822 * chaining instructions.
1824 rtx fp_addr
= gen_reg_rtx (Pmode
);
1825 rtx fp_value
= gen_reg_rtx (Pmode
);
1828 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1829 GEN_INT (UNITS_PER_WORD
)));
1831 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1833 emit_move_insn (fp_value
, fp_loc
);
1835 op1
= force_reg (Pmode
, op1
);
1837 emit_move_insn (stack_pointer_rtx
,
1838 gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, op1
));
1840 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1841 GEN_INT (UNITS_PER_WORD
)));
1843 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1845 emit_move_insn (fp_loc
, fp_value
);
1847 emit_move_insn (op0
, virtual_stack_dynamic_rtx
);
1854 /* Returns the insn_code in ENTRY. */
1855 static enum insn_code
1856 tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1859 return tilepro_multiply_insn_seq_decode_opcode
[entry
->compressed_opcode
];
1863 /* Returns the length of the 'op' array. */
1865 tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq
*seq
)
1867 /* The array either uses all of its allocated slots or is terminated
1868 by a bogus opcode. Either way, the array size is the index of the
1869 last valid opcode plus one. */
1871 for (i
= tilepro_multiply_insn_seq_MAX_OPERATIONS
- 1; i
>= 0; i
--)
1872 if (tilepro_multiply_get_opcode (&seq
->op
[i
]) != CODE_FOR_nothing
)
1875 /* An empty array is not allowed. */
1880 /* We precompute a number of expression trees for multiplying by
1881 constants. This generates code for such an expression tree by
1882 walking through the nodes in the tree (which are conveniently
1883 pre-linearized) and emitting an instruction for each one. */
1885 tilepro_expand_constant_multiply_given_sequence (rtx result
, rtx src
,
1887 tilepro_multiply_insn_seq
1893 /* Keep track of the subexpressions computed so far, so later
1894 instructions can refer to them. We seed the array with zero and
1895 the value being multiplied. */
1896 int num_subexprs
= 2;
1897 rtx subexprs
[tilepro_multiply_insn_seq_MAX_OPERATIONS
+ 2];
1898 subexprs
[0] = const0_rtx
;
1901 /* Determine how many instructions we are going to generate. */
1902 num_ops
= tilepro_multiply_get_num_ops (seq
);
1903 gcc_assert (num_ops
> 0
1904 && num_ops
<= tilepro_multiply_insn_seq_MAX_OPERATIONS
);
1906 for (i
= 0; i
< num_ops
; i
++)
1908 const struct tilepro_multiply_insn_seq_entry
*entry
= &seq
->op
[i
];
1910 /* Figure out where to store the output of this instruction. */
1911 const bool is_last_op
= (i
+ 1 == num_ops
);
1912 rtx out
= is_last_op
? result
: gen_reg_rtx (SImode
);
1914 enum insn_code opcode
= tilepro_multiply_get_opcode (entry
);
1915 if (opcode
== CODE_FOR_ashlsi3
)
1917 /* Handle shift by immediate. This is a special case because
1918 the meaning of the second operand is a constant shift
1919 count rather than an operand index. */
1921 /* Make sure the shift count is in range. Zero should not
1923 const int shift_count
= entry
->rhs
;
1924 gcc_assert (shift_count
> 0 && shift_count
< 32);
1926 /* Emit the actual instruction. */
1927 emit_insn (GEN_FCN (opcode
)
1928 (out
, subexprs
[entry
->lhs
],
1929 gen_rtx_CONST_INT (SImode
, shift_count
)));
1933 /* Handle a normal two-operand instruction, such as add or
1936 /* Make sure we are referring to a previously computed
1938 gcc_assert (entry
->rhs
< num_subexprs
);
1940 /* Emit the actual instruction. */
1941 emit_insn (GEN_FCN (opcode
)
1942 (out
, subexprs
[entry
->lhs
], subexprs
[entry
->rhs
]));
1945 /* Record this subexpression for use by later expressions. */
1946 subexprs
[num_subexprs
++] = out
;
1951 /* bsearch helper function. */
1953 tilepro_compare_multipliers (const void *key
, const void *t
)
1955 return *(const int *) key
-
1956 ((const struct tilepro_multiply_insn_seq
*) t
)->multiplier
;
1960 /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1962 static const struct tilepro_multiply_insn_seq
*
1963 tilepro_find_multiply_insn_seq_for_constant (int multiplier
)
1965 return ((const struct tilepro_multiply_insn_seq
*)
1966 bsearch (&multiplier
, tilepro_multiply_insn_seq_table
,
1967 tilepro_multiply_insn_seq_table_size
,
1968 sizeof tilepro_multiply_insn_seq_table
[0],
1969 tilepro_compare_multipliers
));
1973 /* Try to a expand constant multiply in SImode by looking it up in a
1974 precompiled table. OP0 is the result operand, OP1 is the source
1975 operand, and MULTIPLIER is the value of the constant. Return true
1978 tilepro_expand_const_mulsi (rtx op0
, rtx op1
, int multiplier
)
1980 /* See if we have precomputed an efficient way to multiply by this
1982 const struct tilepro_multiply_insn_seq
*seq
=
1983 tilepro_find_multiply_insn_seq_for_constant (multiplier
);
1986 tilepro_expand_constant_multiply_given_sequence (op0
, op1
, seq
);
1994 /* Expand the mulsi pattern. */
1996 tilepro_expand_mulsi (rtx op0
, rtx op1
, rtx op2
)
1998 if (CONST_INT_P (op2
))
2000 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op2
), SImode
);
2001 return tilepro_expand_const_mulsi (op0
, op1
, n
);
2007 /* Expand a high multiply pattern in SImode. RESULT, OP1, OP2 are the
2008 operands, and SIGN is true if it's a signed multiply, and false if
2009 it's an unsigned multiply. */
2011 tilepro_expand_high_multiply (rtx result
, rtx op1
, rtx op2
, bool sign
)
2013 rtx tmp0
= gen_reg_rtx (SImode
);
2014 rtx tmp1
= gen_reg_rtx (SImode
);
2015 rtx tmp2
= gen_reg_rtx (SImode
);
2016 rtx tmp3
= gen_reg_rtx (SImode
);
2017 rtx tmp4
= gen_reg_rtx (SImode
);
2018 rtx tmp5
= gen_reg_rtx (SImode
);
2019 rtx tmp6
= gen_reg_rtx (SImode
);
2020 rtx tmp7
= gen_reg_rtx (SImode
);
2021 rtx tmp8
= gen_reg_rtx (SImode
);
2022 rtx tmp9
= gen_reg_rtx (SImode
);
2023 rtx tmp10
= gen_reg_rtx (SImode
);
2024 rtx tmp11
= gen_reg_rtx (SImode
);
2025 rtx tmp12
= gen_reg_rtx (SImode
);
2026 rtx tmp13
= gen_reg_rtx (SImode
);
2027 rtx result_lo
= gen_reg_rtx (SImode
);
2031 emit_insn (gen_insn_mulhl_su (tmp0
, op1
, op2
));
2032 emit_insn (gen_insn_mulhl_su (tmp1
, op2
, op1
));
2033 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2034 emit_insn (gen_insn_mulhh_ss (tmp3
, op1
, op2
));
2038 emit_insn (gen_insn_mulhl_uu (tmp0
, op1
, op2
));
2039 emit_insn (gen_insn_mulhl_uu (tmp1
, op2
, op1
));
2040 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2041 emit_insn (gen_insn_mulhh_uu (tmp3
, op1
, op2
));
2044 emit_move_insn (tmp4
, (gen_rtx_ASHIFT (SImode
, tmp0
, GEN_INT (16))));
2046 emit_move_insn (tmp5
, (gen_rtx_ASHIFT (SImode
, tmp1
, GEN_INT (16))));
2048 emit_move_insn (tmp6
, (gen_rtx_PLUS (SImode
, tmp4
, tmp5
)));
2049 emit_move_insn (result_lo
, (gen_rtx_PLUS (SImode
, tmp2
, tmp6
)));
2051 emit_move_insn (tmp7
, gen_rtx_LTU (SImode
, tmp6
, tmp4
));
2052 emit_move_insn (tmp8
, gen_rtx_LTU (SImode
, result_lo
, tmp2
));
2056 emit_move_insn (tmp9
, (gen_rtx_ASHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2057 emit_move_insn (tmp10
, (gen_rtx_ASHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2061 emit_move_insn (tmp9
, (gen_rtx_LSHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2062 emit_move_insn (tmp10
, (gen_rtx_LSHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2065 emit_move_insn (tmp11
, (gen_rtx_PLUS (SImode
, tmp3
, tmp7
)));
2066 emit_move_insn (tmp12
, (gen_rtx_PLUS (SImode
, tmp8
, tmp9
)));
2067 emit_move_insn (tmp13
, (gen_rtx_PLUS (SImode
, tmp11
, tmp12
)));
2068 emit_move_insn (result
, (gen_rtx_PLUS (SImode
, tmp13
, tmp10
)));
2072 /* Implement smulsi3_highpart. */
2074 tilepro_expand_smulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2076 tilepro_expand_high_multiply (op0
, op1
, op2
, true);
2080 /* Implement umulsi3_highpart. */
2082 tilepro_expand_umulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2084 tilepro_expand_high_multiply (op0
, op1
, op2
, false);
2089 /* Compare and branches */
2091 /* Helper function to handle DImode for tilepro_emit_setcc_internal. */
2093 tilepro_emit_setcc_internal_di (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
)
2095 rtx operands
[2], lo_half
[2], hi_half
[2];
2096 rtx tmp
, tmp0
, tmp1
, tmp2
;
2099 /* Reduce the number of cases we need to handle by reversing the
2109 /* We handle these compares directly. */
2116 /* Reverse the operands. */
2121 /* We should not have called this with any other code. */
2127 code
= swap_condition (code
);
2128 tmp
= op0
, op0
= op1
, op1
= tmp
;
2134 split_di (operands
, 2, lo_half
, hi_half
);
2136 if (!reg_or_0_operand (lo_half
[0], SImode
))
2137 lo_half
[0] = force_reg (SImode
, lo_half
[0]);
2139 if (!reg_or_0_operand (hi_half
[0], SImode
))
2140 hi_half
[0] = force_reg (SImode
, hi_half
[0]);
2142 if (!CONST_INT_P (lo_half
[1]) && !register_operand (lo_half
[1], SImode
))
2143 lo_half
[1] = force_reg (SImode
, lo_half
[1]);
2145 if (!CONST_INT_P (hi_half
[1]) && !register_operand (hi_half
[1], SImode
))
2146 hi_half
[1] = force_reg (SImode
, hi_half
[1]);
2148 tmp0
= gen_reg_rtx (SImode
);
2149 tmp1
= gen_reg_rtx (SImode
);
2150 tmp2
= gen_reg_rtx (SImode
);
2155 emit_insn (gen_insn_seq (tmp0
, lo_half
[0], lo_half
[1]));
2156 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2157 emit_insn (gen_andsi3 (res
, tmp0
, tmp1
));
2161 emit_insn (gen_insn_sne (tmp0
, lo_half
[0], lo_half
[1]));
2162 emit_insn (gen_insn_sne (tmp1
, hi_half
[0], hi_half
[1]));
2163 emit_insn (gen_iorsi3 (res
, tmp0
, tmp1
));
2167 emit_insn (gen_insn_slte (tmp0
, hi_half
[0], hi_half
[1]));
2168 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2169 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2170 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2173 if (operands
[1] == const0_rtx
)
2175 emit_insn (gen_lshrsi3 (res
, hi_half
[0], GEN_INT (31)));
2180 emit_insn (gen_insn_slt (tmp0
, hi_half
[0], hi_half
[1]));
2181 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2182 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2183 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2187 emit_insn (gen_insn_slte_u (tmp0
, hi_half
[0], hi_half
[1]));
2188 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2189 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2190 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2193 emit_insn (gen_insn_slt_u (tmp0
, hi_half
[0], hi_half
[1]));
2194 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2195 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2196 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2206 /* Certain simplifications can be done to make invalid setcc
2207 operations valid. Return the final comparison, or NULL if we can't
2210 tilepro_emit_setcc_internal (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
,
2211 machine_mode cmp_mode
)
2216 if (cmp_mode
== DImode
)
2218 return tilepro_emit_setcc_internal_di (res
, code
, op0
, op1
);
2221 /* The general case: fold the comparison code to the types of
2222 compares that we have, choosing the branch as necessary. */
2232 /* We have these compares. */
2239 /* We do not have these compares, so we reverse the
2245 /* We should not have called this with any other code. */
2251 code
= swap_condition (code
);
2252 tmp
= op0
, op0
= op1
, op1
= tmp
;
2255 if (!reg_or_0_operand (op0
, SImode
))
2256 op0
= force_reg (SImode
, op0
);
2258 if (!CONST_INT_P (op1
) && !register_operand (op1
, SImode
))
2259 op1
= force_reg (SImode
, op1
);
2261 /* Return the setcc comparison. */
2262 emit_insn (gen_rtx_SET (res
, gen_rtx_fmt_ee (code
, SImode
, op0
, op1
)));
2268 /* Implement cstore patterns. */
2270 tilepro_emit_setcc (rtx operands
[], machine_mode cmp_mode
)
2273 tilepro_emit_setcc_internal (operands
[0], GET_CODE (operands
[1]),
2274 operands
[2], operands
[3], cmp_mode
);
2278 /* Return whether CODE is a signed comparison. */
2280 signed_compare_p (enum rtx_code code
)
2282 return (code
== EQ
|| code
== NE
|| code
== LT
|| code
== LE
2283 || code
== GT
|| code
== GE
);
2287 /* Generate the comparison for an SImode conditional branch. */
2289 tilepro_emit_cc_test (enum rtx_code code
, rtx op0
, rtx op1
,
2290 machine_mode cmp_mode
, bool eq_ne_only
)
2292 enum rtx_code branch_code
;
2295 /* Check for a compare against zero using a comparison we can do
2297 if (cmp_mode
!= DImode
2298 && op1
== const0_rtx
2299 && (code
== EQ
|| code
== NE
2300 || (!eq_ne_only
&& signed_compare_p (code
))))
2302 op0
= force_reg (SImode
, op0
);
2303 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, const0_rtx
);
2306 /* The general case: fold the comparison code to the types of
2307 compares that we have, choosing the branch as necessary. */
2315 /* We have these compares. */
2324 /* These must be reversed (except NE, but let's
2326 code
= reverse_condition (code
);
2334 if (cmp_mode
!= DImode
2335 && CONST_INT_P (op1
) && (!satisfies_constraint_I (op1
) || code
== LEU
))
2337 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op1
), SImode
);
2342 /* Subtract off the value we want to compare against and see
2343 if we get zero. This is cheaper than creating a constant
2344 in a register. Except that subtracting -128 is more
2345 expensive than seqi to -128, so we leave that alone. */
2346 /* ??? Don't do this when comparing against symbols,
2347 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2348 0), which will be declared false out of hand (at least
2350 if (!(symbolic_operand (op0
, VOIDmode
)
2351 || (REG_P (op0
) && REG_POINTER (op0
))))
2353 /* To compare against MIN_INT, we add MIN_INT and check
2356 if (n
!= -2147483647 - 1)
2361 op0
= force_reg (SImode
, op0
);
2362 temp
= gen_reg_rtx (SImode
);
2363 emit_insn (gen_addsi3 (temp
, op0
, gen_int_si (add
)));
2364 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2365 VOIDmode
, temp
, const0_rtx
);
2375 /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2378 int first
= exact_log2 (code
== LTU
? n
: n
+ 1);
2381 op0
= force_reg (SImode
, op0
);
2382 temp
= gen_reg_rtx (SImode
);
2383 emit_move_insn (temp
,
2384 gen_rtx_LSHIFTRT (SImode
, op0
,
2385 gen_int_si (first
)));
2386 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2387 VOIDmode
, temp
, const0_rtx
);
2397 /* Compute a flag saying whether we should branch. */
2398 temp
= gen_reg_rtx (SImode
);
2399 tilepro_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2401 /* Return the branch comparison. */
2402 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, temp
, const0_rtx
);
2406 /* Generate the comparison for a conditional branch. */
2408 tilepro_emit_conditional_branch (rtx operands
[], machine_mode cmp_mode
)
2411 tilepro_emit_cc_test (GET_CODE (operands
[0]), operands
[1], operands
[2],
2413 rtx branch_rtx
= gen_rtx_SET (pc_rtx
,
2414 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp_rtx
,
2419 emit_jump_insn (branch_rtx
);
2423 /* Implement the movsicc pattern. */
2425 tilepro_emit_conditional_move (rtx cmp
)
2428 tilepro_emit_cc_test (GET_CODE (cmp
), XEXP (cmp
, 0), XEXP (cmp
, 1),
2429 GET_MODE (XEXP (cmp
, 0)), true);
2433 /* Return true if INSN is annotated with a REG_BR_PROB note that
2434 indicates it's a branch that's predicted taken. */
2436 cbranch_predicted_p (rtx_insn
*insn
)
2438 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2442 int pred_val
= XINT (x
, 0);
2444 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2451 /* Output assembly code for a specific branch instruction, appending
2452 the branch prediction flag to the opcode if appropriate. */
2454 tilepro_output_simple_cbranch_with_opcode (rtx_insn
*insn
, const char *opcode
,
2455 int regop
, bool netreg_p
,
2456 bool reverse_predicted
)
2458 static char buf
[64];
2459 sprintf (buf
, "%s%s\t%%%c%d, %%l0", opcode
,
2460 (cbranch_predicted_p (insn
) ^ reverse_predicted
) ? "t" : "",
2461 netreg_p
? 'N' : 'r', regop
);
2466 /* Output assembly code for a specific branch instruction, appending
2467 the branch prediction flag to the opcode if appropriate. */
2469 tilepro_output_cbranch_with_opcode (rtx_insn
*insn
, rtx
*operands
,
2471 const char *rev_opcode
,
2472 int regop
, bool netreg_p
)
2474 const char *branch_if_false
;
2475 rtx taken
, not_taken
;
2476 bool is_simple_branch
;
2478 gcc_assert (LABEL_P (operands
[0]));
2480 is_simple_branch
= true;
2481 if (INSN_ADDRESSES_SET_P ())
2483 int from_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2484 int to_addr
= INSN_ADDRESSES (INSN_UID (operands
[0]));
2485 int delta
= to_addr
- from_addr
;
2486 is_simple_branch
= IN_RANGE (delta
, -524288, 524280);
2489 if (is_simple_branch
)
2491 /* Just a simple conditional branch. */
2493 tilepro_output_simple_cbranch_with_opcode (insn
, opcode
, regop
,
2497 /* Generate a reversed branch around a direct jump. This fallback
2498 does not use branch-likely instructions. */
2499 not_taken
= gen_label_rtx ();
2500 taken
= operands
[0];
2502 /* Generate the reversed branch to NOT_TAKEN. */
2503 operands
[0] = not_taken
;
2505 tilepro_output_simple_cbranch_with_opcode (insn
, rev_opcode
, regop
,
2507 output_asm_insn (branch_if_false
, operands
);
2509 output_asm_insn ("j\t%l0", &taken
);
2511 /* Output NOT_TAKEN. */
2512 targetm
.asm_out
.internal_label (asm_out_file
, "L",
2513 CODE_LABEL_NUMBER (not_taken
));
2518 /* Output assembly code for a conditional branch instruction. */
2520 tilepro_output_cbranch (rtx_insn
*insn
, rtx
*operands
, bool reversed
)
2522 enum rtx_code code
= GET_CODE (operands
[1]);
2524 const char *rev_opcode
;
2527 code
= reverse_condition (code
);
2545 rev_opcode
= "blez";
2553 rev_opcode
= "bgez";
2560 tilepro_output_cbranch_with_opcode (insn
, operands
, opcode
, rev_opcode
,
2565 /* Implement the tablejump pattern. */
2567 tilepro_expand_tablejump (rtx op0
, rtx op1
)
2571 rtx table
= gen_rtx_LABEL_REF (Pmode
, op1
);
2572 rtx temp
= gen_reg_rtx (Pmode
);
2573 rtx text_label_symbol
= tilepro_text_label_symbol ();
2574 rtx text_label_rtx
= tilepro_text_label_rtx ();
2576 emit_insn (gen_addli_pcrel (temp
, text_label_rtx
,
2577 table
, text_label_symbol
));
2578 emit_insn (gen_auli_pcrel (temp
, temp
, table
, text_label_symbol
));
2579 emit_move_insn (temp
,
2580 gen_rtx_PLUS (Pmode
,
2581 convert_to_mode (Pmode
, op0
, false),
2586 emit_jump_insn (gen_tablejump_aux (op0
, op1
));
2590 /* Expand a builtin vector binary op, by calling gen function GEN with
2591 operands in the proper modes. DEST is converted to DEST_MODE, and
2592 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2594 tilepro_expand_builtin_vector_binop (rtx (*gen
) (rtx
, rtx
, rtx
),
2595 machine_mode dest_mode
,
2597 machine_mode src_mode
,
2598 rtx src0
, rtx src1
, bool do_src1
)
2600 dest
= gen_lowpart (dest_mode
, dest
);
2602 if (src0
== const0_rtx
)
2603 src0
= CONST0_RTX (src_mode
);
2605 src0
= gen_lowpart (src_mode
, src0
);
2609 if (src1
== const0_rtx
)
2610 src1
= CONST0_RTX (src_mode
);
2612 src1
= gen_lowpart (src_mode
, src1
);
2615 emit_insn ((*gen
) (dest
, src0
, src1
));
2622 struct tile_builtin_info
2624 enum insn_code icode
;
2628 static struct tile_builtin_info tilepro_builtin_info
[TILEPRO_BUILTIN_max
] = {
2629 { CODE_FOR_addsi3
, NULL
}, /* add */
2630 { CODE_FOR_insn_addb
, NULL
}, /* addb */
2631 { CODE_FOR_insn_addbs_u
, NULL
}, /* addbs_u */
2632 { CODE_FOR_insn_addh
, NULL
}, /* addh */
2633 { CODE_FOR_insn_addhs
, NULL
}, /* addhs */
2634 { CODE_FOR_insn_addib
, NULL
}, /* addib */
2635 { CODE_FOR_insn_addih
, NULL
}, /* addih */
2636 { CODE_FOR_insn_addlis
, NULL
}, /* addlis */
2637 { CODE_FOR_ssaddsi3
, NULL
}, /* adds */
2638 { CODE_FOR_insn_adiffb_u
, NULL
}, /* adiffb_u */
2639 { CODE_FOR_insn_adiffh
, NULL
}, /* adiffh */
2640 { CODE_FOR_andsi3
, NULL
}, /* and */
2641 { CODE_FOR_insn_auli
, NULL
}, /* auli */
2642 { CODE_FOR_insn_avgb_u
, NULL
}, /* avgb_u */
2643 { CODE_FOR_insn_avgh
, NULL
}, /* avgh */
2644 { CODE_FOR_insn_bitx
, NULL
}, /* bitx */
2645 { CODE_FOR_bswapsi2
, NULL
}, /* bytex */
2646 { CODE_FOR_clzsi2
, NULL
}, /* clz */
2647 { CODE_FOR_insn_crc32_32
, NULL
}, /* crc32_32 */
2648 { CODE_FOR_insn_crc32_8
, NULL
}, /* crc32_8 */
2649 { CODE_FOR_ctzsi2
, NULL
}, /* ctz */
2650 { CODE_FOR_insn_drain
, NULL
}, /* drain */
2651 { CODE_FOR_insn_dtlbpr
, NULL
}, /* dtlbpr */
2652 { CODE_FOR_insn_dword_align
, NULL
}, /* dword_align */
2653 { CODE_FOR_insn_finv
, NULL
}, /* finv */
2654 { CODE_FOR_insn_flush
, NULL
}, /* flush */
2655 { CODE_FOR_insn_fnop
, NULL
}, /* fnop */
2656 { CODE_FOR_insn_icoh
, NULL
}, /* icoh */
2657 { CODE_FOR_insn_ill
, NULL
}, /* ill */
2658 { CODE_FOR_insn_info
, NULL
}, /* info */
2659 { CODE_FOR_insn_infol
, NULL
}, /* infol */
2660 { CODE_FOR_insn_inthb
, NULL
}, /* inthb */
2661 { CODE_FOR_insn_inthh
, NULL
}, /* inthh */
2662 { CODE_FOR_insn_intlb
, NULL
}, /* intlb */
2663 { CODE_FOR_insn_intlh
, NULL
}, /* intlh */
2664 { CODE_FOR_insn_inv
, NULL
}, /* inv */
2665 { CODE_FOR_insn_lb
, NULL
}, /* lb */
2666 { CODE_FOR_insn_lb_u
, NULL
}, /* lb_u */
2667 { CODE_FOR_insn_lh
, NULL
}, /* lh */
2668 { CODE_FOR_insn_lh_u
, NULL
}, /* lh_u */
2669 { CODE_FOR_insn_lnk
, NULL
}, /* lnk */
2670 { CODE_FOR_insn_lw
, NULL
}, /* lw */
2671 { CODE_FOR_insn_lw_na
, NULL
}, /* lw_na */
2672 { CODE_FOR_insn_lb_L2
, NULL
}, /* lb_L2 */
2673 { CODE_FOR_insn_lb_u_L2
, NULL
}, /* lb_u_L2 */
2674 { CODE_FOR_insn_lh_L2
, NULL
}, /* lh_L2 */
2675 { CODE_FOR_insn_lh_u_L2
, NULL
}, /* lh_u_L2 */
2676 { CODE_FOR_insn_lw_L2
, NULL
}, /* lw_L2 */
2677 { CODE_FOR_insn_lw_na_L2
, NULL
}, /* lw_na_L2 */
2678 { CODE_FOR_insn_lb_miss
, NULL
}, /* lb_miss */
2679 { CODE_FOR_insn_lb_u_miss
, NULL
}, /* lb_u_miss */
2680 { CODE_FOR_insn_lh_miss
, NULL
}, /* lh_miss */
2681 { CODE_FOR_insn_lh_u_miss
, NULL
}, /* lh_u_miss */
2682 { CODE_FOR_insn_lw_miss
, NULL
}, /* lw_miss */
2683 { CODE_FOR_insn_lw_na_miss
, NULL
}, /* lw_na_miss */
2684 { CODE_FOR_insn_maxb_u
, NULL
}, /* maxb_u */
2685 { CODE_FOR_insn_maxh
, NULL
}, /* maxh */
2686 { CODE_FOR_insn_maxib_u
, NULL
}, /* maxib_u */
2687 { CODE_FOR_insn_maxih
, NULL
}, /* maxih */
2688 { CODE_FOR_memory_barrier
, NULL
}, /* mf */
2689 { CODE_FOR_insn_mfspr
, NULL
}, /* mfspr */
2690 { CODE_FOR_insn_minb_u
, NULL
}, /* minb_u */
2691 { CODE_FOR_insn_minh
, NULL
}, /* minh */
2692 { CODE_FOR_insn_minib_u
, NULL
}, /* minib_u */
2693 { CODE_FOR_insn_minih
, NULL
}, /* minih */
2694 { CODE_FOR_insn_mm
, NULL
}, /* mm */
2695 { CODE_FOR_insn_mnz
, NULL
}, /* mnz */
2696 { CODE_FOR_insn_mnzb
, NULL
}, /* mnzb */
2697 { CODE_FOR_insn_mnzh
, NULL
}, /* mnzh */
2698 { CODE_FOR_movsi
, NULL
}, /* move */
2699 { CODE_FOR_insn_movelis
, NULL
}, /* movelis */
2700 { CODE_FOR_insn_mtspr
, NULL
}, /* mtspr */
2701 { CODE_FOR_insn_mulhh_ss
, NULL
}, /* mulhh_ss */
2702 { CODE_FOR_insn_mulhh_su
, NULL
}, /* mulhh_su */
2703 { CODE_FOR_insn_mulhh_uu
, NULL
}, /* mulhh_uu */
2704 { CODE_FOR_insn_mulhha_ss
, NULL
}, /* mulhha_ss */
2705 { CODE_FOR_insn_mulhha_su
, NULL
}, /* mulhha_su */
2706 { CODE_FOR_insn_mulhha_uu
, NULL
}, /* mulhha_uu */
2707 { CODE_FOR_insn_mulhhsa_uu
, NULL
}, /* mulhhsa_uu */
2708 { CODE_FOR_insn_mulhl_ss
, NULL
}, /* mulhl_ss */
2709 { CODE_FOR_insn_mulhl_su
, NULL
}, /* mulhl_su */
2710 { CODE_FOR_insn_mulhl_us
, NULL
}, /* mulhl_us */
2711 { CODE_FOR_insn_mulhl_uu
, NULL
}, /* mulhl_uu */
2712 { CODE_FOR_insn_mulhla_ss
, NULL
}, /* mulhla_ss */
2713 { CODE_FOR_insn_mulhla_su
, NULL
}, /* mulhla_su */
2714 { CODE_FOR_insn_mulhla_us
, NULL
}, /* mulhla_us */
2715 { CODE_FOR_insn_mulhla_uu
, NULL
}, /* mulhla_uu */
2716 { CODE_FOR_insn_mulhlsa_uu
, NULL
}, /* mulhlsa_uu */
2717 { CODE_FOR_insn_mulll_ss
, NULL
}, /* mulll_ss */
2718 { CODE_FOR_insn_mulll_su
, NULL
}, /* mulll_su */
2719 { CODE_FOR_insn_mulll_uu
, NULL
}, /* mulll_uu */
2720 { CODE_FOR_insn_mullla_ss
, NULL
}, /* mullla_ss */
2721 { CODE_FOR_insn_mullla_su
, NULL
}, /* mullla_su */
2722 { CODE_FOR_insn_mullla_uu
, NULL
}, /* mullla_uu */
2723 { CODE_FOR_insn_mulllsa_uu
, NULL
}, /* mulllsa_uu */
2724 { CODE_FOR_insn_mvnz
, NULL
}, /* mvnz */
2725 { CODE_FOR_insn_mvz
, NULL
}, /* mvz */
2726 { CODE_FOR_insn_mz
, NULL
}, /* mz */
2727 { CODE_FOR_insn_mzb
, NULL
}, /* mzb */
2728 { CODE_FOR_insn_mzh
, NULL
}, /* mzh */
2729 { CODE_FOR_insn_nap
, NULL
}, /* nap */
2730 { CODE_FOR_nop
, NULL
}, /* nop */
2731 { CODE_FOR_insn_nor
, NULL
}, /* nor */
2732 { CODE_FOR_iorsi3
, NULL
}, /* or */
2733 { CODE_FOR_insn_packbs_u
, NULL
}, /* packbs_u */
2734 { CODE_FOR_insn_packhb
, NULL
}, /* packhb */
2735 { CODE_FOR_insn_packhs
, NULL
}, /* packhs */
2736 { CODE_FOR_insn_packlb
, NULL
}, /* packlb */
2737 { CODE_FOR_popcountsi2
, NULL
}, /* pcnt */
2738 { CODE_FOR_insn_prefetch
, NULL
}, /* prefetch */
2739 { CODE_FOR_insn_prefetch_L1
, NULL
}, /* prefetch_L1 */
2740 { CODE_FOR_rotlsi3
, NULL
}, /* rl */
2741 { CODE_FOR_insn_s1a
, NULL
}, /* s1a */
2742 { CODE_FOR_insn_s2a
, NULL
}, /* s2a */
2743 { CODE_FOR_insn_s3a
, NULL
}, /* s3a */
2744 { CODE_FOR_insn_sadab_u
, NULL
}, /* sadab_u */
2745 { CODE_FOR_insn_sadah
, NULL
}, /* sadah */
2746 { CODE_FOR_insn_sadah_u
, NULL
}, /* sadah_u */
2747 { CODE_FOR_insn_sadb_u
, NULL
}, /* sadb_u */
2748 { CODE_FOR_insn_sadh
, NULL
}, /* sadh */
2749 { CODE_FOR_insn_sadh_u
, NULL
}, /* sadh_u */
2750 { CODE_FOR_insn_sb
, NULL
}, /* sb */
2751 { CODE_FOR_insn_seq
, NULL
}, /* seq */
2752 { CODE_FOR_insn_seqb
, NULL
}, /* seqb */
2753 { CODE_FOR_insn_seqh
, NULL
}, /* seqh */
2754 { CODE_FOR_insn_seqib
, NULL
}, /* seqib */
2755 { CODE_FOR_insn_seqih
, NULL
}, /* seqih */
2756 { CODE_FOR_insn_sh
, NULL
}, /* sh */
2757 { CODE_FOR_ashlsi3
, NULL
}, /* shl */
2758 { CODE_FOR_insn_shlb
, NULL
}, /* shlb */
2759 { CODE_FOR_insn_shlh
, NULL
}, /* shlh */
2760 { CODE_FOR_insn_shlb
, NULL
}, /* shlib */
2761 { CODE_FOR_insn_shlh
, NULL
}, /* shlih */
2762 { CODE_FOR_lshrsi3
, NULL
}, /* shr */
2763 { CODE_FOR_insn_shrb
, NULL
}, /* shrb */
2764 { CODE_FOR_insn_shrh
, NULL
}, /* shrh */
2765 { CODE_FOR_insn_shrb
, NULL
}, /* shrib */
2766 { CODE_FOR_insn_shrh
, NULL
}, /* shrih */
2767 { CODE_FOR_insn_slt
, NULL
}, /* slt */
2768 { CODE_FOR_insn_slt_u
, NULL
}, /* slt_u */
2769 { CODE_FOR_insn_sltb
, NULL
}, /* sltb */
2770 { CODE_FOR_insn_sltb_u
, NULL
}, /* sltb_u */
2771 { CODE_FOR_insn_slte
, NULL
}, /* slte */
2772 { CODE_FOR_insn_slte_u
, NULL
}, /* slte_u */
2773 { CODE_FOR_insn_slteb
, NULL
}, /* slteb */
2774 { CODE_FOR_insn_slteb_u
, NULL
}, /* slteb_u */
2775 { CODE_FOR_insn_slteh
, NULL
}, /* slteh */
2776 { CODE_FOR_insn_slteh_u
, NULL
}, /* slteh_u */
2777 { CODE_FOR_insn_slth
, NULL
}, /* slth */
2778 { CODE_FOR_insn_slth_u
, NULL
}, /* slth_u */
2779 { CODE_FOR_insn_sltib
, NULL
}, /* sltib */
2780 { CODE_FOR_insn_sltib_u
, NULL
}, /* sltib_u */
2781 { CODE_FOR_insn_sltih
, NULL
}, /* sltih */
2782 { CODE_FOR_insn_sltih_u
, NULL
}, /* sltih_u */
2783 { CODE_FOR_insn_sne
, NULL
}, /* sne */
2784 { CODE_FOR_insn_sneb
, NULL
}, /* sneb */
2785 { CODE_FOR_insn_sneh
, NULL
}, /* sneh */
2786 { CODE_FOR_ashrsi3
, NULL
}, /* sra */
2787 { CODE_FOR_insn_srab
, NULL
}, /* srab */
2788 { CODE_FOR_insn_srah
, NULL
}, /* srah */
2789 { CODE_FOR_insn_srab
, NULL
}, /* sraib */
2790 { CODE_FOR_insn_srah
, NULL
}, /* sraih */
2791 { CODE_FOR_subsi3
, NULL
}, /* sub */
2792 { CODE_FOR_insn_subb
, NULL
}, /* subb */
2793 { CODE_FOR_insn_subbs_u
, NULL
}, /* subbs_u */
2794 { CODE_FOR_insn_subh
, NULL
}, /* subh */
2795 { CODE_FOR_insn_subhs
, NULL
}, /* subhs */
2796 { CODE_FOR_sssubsi3
, NULL
}, /* subs */
2797 { CODE_FOR_insn_sw
, NULL
}, /* sw */
2798 { CODE_FOR_insn_tblidxb0
, NULL
}, /* tblidxb0 */
2799 { CODE_FOR_insn_tblidxb1
, NULL
}, /* tblidxb1 */
2800 { CODE_FOR_insn_tblidxb2
, NULL
}, /* tblidxb2 */
2801 { CODE_FOR_insn_tblidxb3
, NULL
}, /* tblidxb3 */
2802 { CODE_FOR_insn_tns
, NULL
}, /* tns */
2803 { CODE_FOR_insn_wh64
, NULL
}, /* wh64 */
2804 { CODE_FOR_xorsi3
, NULL
}, /* xor */
2805 { CODE_FOR_tilepro_network_barrier
, NULL
}, /* network_barrier */
2806 { CODE_FOR_tilepro_idn0_receive
, NULL
}, /* idn0_receive */
2807 { CODE_FOR_tilepro_idn1_receive
, NULL
}, /* idn1_receive */
2808 { CODE_FOR_tilepro_idn_send
, NULL
}, /* idn_send */
2809 { CODE_FOR_tilepro_sn_receive
, NULL
}, /* sn_receive */
2810 { CODE_FOR_tilepro_sn_send
, NULL
}, /* sn_send */
2811 { CODE_FOR_tilepro_udn0_receive
, NULL
}, /* udn0_receive */
2812 { CODE_FOR_tilepro_udn1_receive
, NULL
}, /* udn1_receive */
2813 { CODE_FOR_tilepro_udn2_receive
, NULL
}, /* udn2_receive */
2814 { CODE_FOR_tilepro_udn3_receive
, NULL
}, /* udn3_receive */
2815 { CODE_FOR_tilepro_udn_send
, NULL
}, /* udn_send */
2819 struct tilepro_builtin_def
2822 enum tilepro_builtin code
;
2824 /* The first character is the return type. Subsequent characters
2825 are the argument types. See char_to_type. */
2830 static const struct tilepro_builtin_def tilepro_builtins
[] = {
2831 { "__insn_add", TILEPRO_INSN_ADD
, true, "lll" },
2832 { "__insn_addb", TILEPRO_INSN_ADDB
, true, "lll" },
2833 { "__insn_addbs_u", TILEPRO_INSN_ADDBS_U
, false, "lll" },
2834 { "__insn_addh", TILEPRO_INSN_ADDH
, true, "lll" },
2835 { "__insn_addhs", TILEPRO_INSN_ADDHS
, false, "lll" },
2836 { "__insn_addi", TILEPRO_INSN_ADD
, true, "lll" },
2837 { "__insn_addib", TILEPRO_INSN_ADDIB
, true, "lll" },
2838 { "__insn_addih", TILEPRO_INSN_ADDIH
, true, "lll" },
2839 { "__insn_addli", TILEPRO_INSN_ADD
, true, "lll" },
2840 { "__insn_addlis", TILEPRO_INSN_ADDLIS
, false, "lll" },
2841 { "__insn_adds", TILEPRO_INSN_ADDS
, false, "lll" },
2842 { "__insn_adiffb_u", TILEPRO_INSN_ADIFFB_U
, true, "lll" },
2843 { "__insn_adiffh", TILEPRO_INSN_ADIFFH
, true, "lll" },
2844 { "__insn_and", TILEPRO_INSN_AND
, true, "lll" },
2845 { "__insn_andi", TILEPRO_INSN_AND
, true, "lll" },
2846 { "__insn_auli", TILEPRO_INSN_AULI
, true, "lll" },
2847 { "__insn_avgb_u", TILEPRO_INSN_AVGB_U
, true, "lll" },
2848 { "__insn_avgh", TILEPRO_INSN_AVGH
, true, "lll" },
2849 { "__insn_bitx", TILEPRO_INSN_BITX
, true, "ll" },
2850 { "__insn_bytex", TILEPRO_INSN_BYTEX
, true, "ll" },
2851 { "__insn_clz", TILEPRO_INSN_CLZ
, true, "ll" },
2852 { "__insn_crc32_32", TILEPRO_INSN_CRC32_32
, true, "lll" },
2853 { "__insn_crc32_8", TILEPRO_INSN_CRC32_8
, true, "lll" },
2854 { "__insn_ctz", TILEPRO_INSN_CTZ
, true, "ll" },
2855 { "__insn_drain", TILEPRO_INSN_DRAIN
, false, "v" },
2856 { "__insn_dtlbpr", TILEPRO_INSN_DTLBPR
, false, "vl" },
2857 { "__insn_dword_align", TILEPRO_INSN_DWORD_ALIGN
, true, "lllk" },
2858 { "__insn_finv", TILEPRO_INSN_FINV
, false, "vk" },
2859 { "__insn_flush", TILEPRO_INSN_FLUSH
, false, "vk" },
2860 { "__insn_fnop", TILEPRO_INSN_FNOP
, false, "v" },
2861 { "__insn_icoh", TILEPRO_INSN_ICOH
, false, "vk" },
2862 { "__insn_ill", TILEPRO_INSN_ILL
, false, "v" },
2863 { "__insn_info", TILEPRO_INSN_INFO
, false, "vl" },
2864 { "__insn_infol", TILEPRO_INSN_INFOL
, false, "vl" },
2865 { "__insn_inthb", TILEPRO_INSN_INTHB
, true, "lll" },
2866 { "__insn_inthh", TILEPRO_INSN_INTHH
, true, "lll" },
2867 { "__insn_intlb", TILEPRO_INSN_INTLB
, true, "lll" },
2868 { "__insn_intlh", TILEPRO_INSN_INTLH
, true, "lll" },
2869 { "__insn_inv", TILEPRO_INSN_INV
, false, "vp" },
2870 { "__insn_lb", TILEPRO_INSN_LB
, false, "lk" },
2871 { "__insn_lb_u", TILEPRO_INSN_LB_U
, false, "lk" },
2872 { "__insn_lh", TILEPRO_INSN_LH
, false, "lk" },
2873 { "__insn_lh_u", TILEPRO_INSN_LH_U
, false, "lk" },
2874 { "__insn_lnk", TILEPRO_INSN_LNK
, true, "l" },
2875 { "__insn_lw", TILEPRO_INSN_LW
, false, "lk" },
2876 { "__insn_lw_na", TILEPRO_INSN_LW_NA
, false, "lk" },
2877 { "__insn_lb_L2", TILEPRO_INSN_LB_L2
, false, "lk" },
2878 { "__insn_lb_u_L2", TILEPRO_INSN_LB_U_L2
, false, "lk" },
2879 { "__insn_lh_L2", TILEPRO_INSN_LH_L2
, false, "lk" },
2880 { "__insn_lh_u_L2", TILEPRO_INSN_LH_U_L2
, false, "lk" },
2881 { "__insn_lw_L2", TILEPRO_INSN_LW_L2
, false, "lk" },
2882 { "__insn_lw_na_L2", TILEPRO_INSN_LW_NA_L2
, false, "lk" },
2883 { "__insn_lb_miss", TILEPRO_INSN_LB_MISS
, false, "lk" },
2884 { "__insn_lb_u_miss", TILEPRO_INSN_LB_U_MISS
, false, "lk" },
2885 { "__insn_lh_miss", TILEPRO_INSN_LH_MISS
, false, "lk" },
2886 { "__insn_lh_u_miss", TILEPRO_INSN_LH_U_MISS
, false, "lk" },
2887 { "__insn_lw_miss", TILEPRO_INSN_LW_MISS
, false, "lk" },
2888 { "__insn_lw_na_miss", TILEPRO_INSN_LW_NA_MISS
, false, "lk" },
2889 { "__insn_maxb_u", TILEPRO_INSN_MAXB_U
, true, "lll" },
2890 { "__insn_maxh", TILEPRO_INSN_MAXH
, true, "lll" },
2891 { "__insn_maxib_u", TILEPRO_INSN_MAXIB_U
, true, "lll" },
2892 { "__insn_maxih", TILEPRO_INSN_MAXIH
, true, "lll" },
2893 { "__insn_mf", TILEPRO_INSN_MF
, false, "v" },
2894 { "__insn_mfspr", TILEPRO_INSN_MFSPR
, false, "ll" },
2895 { "__insn_minb_u", TILEPRO_INSN_MINB_U
, true, "lll" },
2896 { "__insn_minh", TILEPRO_INSN_MINH
, true, "lll" },
2897 { "__insn_minib_u", TILEPRO_INSN_MINIB_U
, true, "lll" },
2898 { "__insn_minih", TILEPRO_INSN_MINIH
, true, "lll" },
2899 { "__insn_mm", TILEPRO_INSN_MM
, true, "lllll" },
2900 { "__insn_mnz", TILEPRO_INSN_MNZ
, true, "lll" },
2901 { "__insn_mnzb", TILEPRO_INSN_MNZB
, true, "lll" },
2902 { "__insn_mnzh", TILEPRO_INSN_MNZH
, true, "lll" },
2903 { "__insn_move", TILEPRO_INSN_MOVE
, true, "ll" },
2904 { "__insn_movei", TILEPRO_INSN_MOVE
, true, "ll" },
2905 { "__insn_moveli", TILEPRO_INSN_MOVE
, true, "ll" },
2906 { "__insn_movelis", TILEPRO_INSN_MOVELIS
, false, "ll" },
2907 { "__insn_mtspr", TILEPRO_INSN_MTSPR
, false, "vll" },
2908 { "__insn_mulhh_ss", TILEPRO_INSN_MULHH_SS
, true, "lll" },
2909 { "__insn_mulhh_su", TILEPRO_INSN_MULHH_SU
, true, "lll" },
2910 { "__insn_mulhh_uu", TILEPRO_INSN_MULHH_UU
, true, "lll" },
2911 { "__insn_mulhha_ss", TILEPRO_INSN_MULHHA_SS
, true, "llll" },
2912 { "__insn_mulhha_su", TILEPRO_INSN_MULHHA_SU
, true, "llll" },
2913 { "__insn_mulhha_uu", TILEPRO_INSN_MULHHA_UU
, true, "llll" },
2914 { "__insn_mulhhsa_uu", TILEPRO_INSN_MULHHSA_UU
, true, "llll" },
2915 { "__insn_mulhl_ss", TILEPRO_INSN_MULHL_SS
, true, "lll" },
2916 { "__insn_mulhl_su", TILEPRO_INSN_MULHL_SU
, true, "lll" },
2917 { "__insn_mulhl_us", TILEPRO_INSN_MULHL_US
, true, "lll" },
2918 { "__insn_mulhl_uu", TILEPRO_INSN_MULHL_UU
, true, "lll" },
2919 { "__insn_mulhla_ss", TILEPRO_INSN_MULHLA_SS
, true, "llll" },
2920 { "__insn_mulhla_su", TILEPRO_INSN_MULHLA_SU
, true, "llll" },
2921 { "__insn_mulhla_us", TILEPRO_INSN_MULHLA_US
, true, "llll" },
2922 { "__insn_mulhla_uu", TILEPRO_INSN_MULHLA_UU
, true, "llll" },
2923 { "__insn_mulhlsa_uu", TILEPRO_INSN_MULHLSA_UU
, true, "llll" },
2924 { "__insn_mulll_ss", TILEPRO_INSN_MULLL_SS
, true, "lll" },
2925 { "__insn_mulll_su", TILEPRO_INSN_MULLL_SU
, true, "lll" },
2926 { "__insn_mulll_uu", TILEPRO_INSN_MULLL_UU
, true, "lll" },
2927 { "__insn_mullla_ss", TILEPRO_INSN_MULLLA_SS
, true, "llll" },
2928 { "__insn_mullla_su", TILEPRO_INSN_MULLLA_SU
, true, "llll" },
2929 { "__insn_mullla_uu", TILEPRO_INSN_MULLLA_UU
, true, "llll" },
2930 { "__insn_mulllsa_uu", TILEPRO_INSN_MULLLSA_UU
, true, "llll" },
2931 { "__insn_mvnz", TILEPRO_INSN_MVNZ
, true, "llll" },
2932 { "__insn_mvz", TILEPRO_INSN_MVZ
, true, "llll" },
2933 { "__insn_mz", TILEPRO_INSN_MZ
, true, "lll" },
2934 { "__insn_mzb", TILEPRO_INSN_MZB
, true, "lll" },
2935 { "__insn_mzh", TILEPRO_INSN_MZH
, true, "lll" },
2936 { "__insn_nap", TILEPRO_INSN_NAP
, false, "v" },
2937 { "__insn_nop", TILEPRO_INSN_NOP
, true, "v" },
2938 { "__insn_nor", TILEPRO_INSN_NOR
, true, "lll" },
2939 { "__insn_or", TILEPRO_INSN_OR
, true, "lll" },
2940 { "__insn_ori", TILEPRO_INSN_OR
, true, "lll" },
2941 { "__insn_packbs_u", TILEPRO_INSN_PACKBS_U
, false, "lll" },
2942 { "__insn_packhb", TILEPRO_INSN_PACKHB
, true, "lll" },
2943 { "__insn_packhs", TILEPRO_INSN_PACKHS
, false, "lll" },
2944 { "__insn_packlb", TILEPRO_INSN_PACKLB
, true, "lll" },
2945 { "__insn_pcnt", TILEPRO_INSN_PCNT
, true, "ll" },
2946 { "__insn_prefetch", TILEPRO_INSN_PREFETCH
, false, "vk" },
2947 { "__insn_prefetch_L1", TILEPRO_INSN_PREFETCH_L1
, false, "vk" },
2948 { "__insn_rl", TILEPRO_INSN_RL
, true, "lll" },
2949 { "__insn_rli", TILEPRO_INSN_RL
, true, "lll" },
2950 { "__insn_s1a", TILEPRO_INSN_S1A
, true, "lll" },
2951 { "__insn_s2a", TILEPRO_INSN_S2A
, true, "lll" },
2952 { "__insn_s3a", TILEPRO_INSN_S3A
, true, "lll" },
2953 { "__insn_sadab_u", TILEPRO_INSN_SADAB_U
, true, "llll" },
2954 { "__insn_sadah", TILEPRO_INSN_SADAH
, true, "llll" },
2955 { "__insn_sadah_u", TILEPRO_INSN_SADAH_U
, true, "llll" },
2956 { "__insn_sadb_u", TILEPRO_INSN_SADB_U
, true, "lll" },
2957 { "__insn_sadh", TILEPRO_INSN_SADH
, true, "lll" },
2958 { "__insn_sadh_u", TILEPRO_INSN_SADH_U
, true, "lll" },
2959 { "__insn_sb", TILEPRO_INSN_SB
, false, "vpl" },
2960 { "__insn_seq", TILEPRO_INSN_SEQ
, true, "lll" },
2961 { "__insn_seqb", TILEPRO_INSN_SEQB
, true, "lll" },
2962 { "__insn_seqh", TILEPRO_INSN_SEQH
, true, "lll" },
2963 { "__insn_seqi", TILEPRO_INSN_SEQ
, true, "lll" },
2964 { "__insn_seqib", TILEPRO_INSN_SEQIB
, true, "lll" },
2965 { "__insn_seqih", TILEPRO_INSN_SEQIH
, true, "lll" },
2966 { "__insn_sh", TILEPRO_INSN_SH
, false, "vpl" },
2967 { "__insn_shl", TILEPRO_INSN_SHL
, true, "lll" },
2968 { "__insn_shlb", TILEPRO_INSN_SHLB
, true, "lll" },
2969 { "__insn_shlh", TILEPRO_INSN_SHLH
, true, "lll" },
2970 { "__insn_shli", TILEPRO_INSN_SHL
, true, "lll" },
2971 { "__insn_shlib", TILEPRO_INSN_SHLIB
, true, "lll" },
2972 { "__insn_shlih", TILEPRO_INSN_SHLIH
, true, "lll" },
2973 { "__insn_shr", TILEPRO_INSN_SHR
, true, "lll" },
2974 { "__insn_shrb", TILEPRO_INSN_SHRB
, true, "lll" },
2975 { "__insn_shrh", TILEPRO_INSN_SHRH
, true, "lll" },
2976 { "__insn_shri", TILEPRO_INSN_SHR
, true, "lll" },
2977 { "__insn_shrib", TILEPRO_INSN_SHRIB
, true, "lll" },
2978 { "__insn_shrih", TILEPRO_INSN_SHRIH
, true, "lll" },
2979 { "__insn_slt", TILEPRO_INSN_SLT
, true, "lll" },
2980 { "__insn_slt_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2981 { "__insn_sltb", TILEPRO_INSN_SLTB
, true, "lll" },
2982 { "__insn_sltb_u", TILEPRO_INSN_SLTB_U
, true, "lll" },
2983 { "__insn_slte", TILEPRO_INSN_SLTE
, true, "lll" },
2984 { "__insn_slte_u", TILEPRO_INSN_SLTE_U
, true, "lll" },
2985 { "__insn_slteb", TILEPRO_INSN_SLTEB
, true, "lll" },
2986 { "__insn_slteb_u", TILEPRO_INSN_SLTEB_U
, true, "lll" },
2987 { "__insn_slteh", TILEPRO_INSN_SLTEH
, true, "lll" },
2988 { "__insn_slteh_u", TILEPRO_INSN_SLTEH_U
, true, "lll" },
2989 { "__insn_slth", TILEPRO_INSN_SLTH
, true, "lll" },
2990 { "__insn_slth_u", TILEPRO_INSN_SLTH_U
, true, "lll" },
2991 { "__insn_slti", TILEPRO_INSN_SLT
, true, "lll" },
2992 { "__insn_slti_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2993 { "__insn_sltib", TILEPRO_INSN_SLTIB
, true, "lll" },
2994 { "__insn_sltib_u", TILEPRO_INSN_SLTIB_U
, true, "lll" },
2995 { "__insn_sltih", TILEPRO_INSN_SLTIH
, true, "lll" },
2996 { "__insn_sltih_u", TILEPRO_INSN_SLTIH_U
, true, "lll" },
2997 { "__insn_sne", TILEPRO_INSN_SNE
, true, "lll" },
2998 { "__insn_sneb", TILEPRO_INSN_SNEB
, true, "lll" },
2999 { "__insn_sneh", TILEPRO_INSN_SNEH
, true, "lll" },
3000 { "__insn_sra", TILEPRO_INSN_SRA
, true, "lll" },
3001 { "__insn_srab", TILEPRO_INSN_SRAB
, true, "lll" },
3002 { "__insn_srah", TILEPRO_INSN_SRAH
, true, "lll" },
3003 { "__insn_srai", TILEPRO_INSN_SRA
, true, "lll" },
3004 { "__insn_sraib", TILEPRO_INSN_SRAIB
, true, "lll" },
3005 { "__insn_sraih", TILEPRO_INSN_SRAIH
, true, "lll" },
3006 { "__insn_sub", TILEPRO_INSN_SUB
, true, "lll" },
3007 { "__insn_subb", TILEPRO_INSN_SUBB
, true, "lll" },
3008 { "__insn_subbs_u", TILEPRO_INSN_SUBBS_U
, false, "lll" },
3009 { "__insn_subh", TILEPRO_INSN_SUBH
, true, "lll" },
3010 { "__insn_subhs", TILEPRO_INSN_SUBHS
, false, "lll" },
3011 { "__insn_subs", TILEPRO_INSN_SUBS
, false, "lll" },
3012 { "__insn_sw", TILEPRO_INSN_SW
, false, "vpl" },
3013 { "__insn_tblidxb0", TILEPRO_INSN_TBLIDXB0
, true, "lll" },
3014 { "__insn_tblidxb1", TILEPRO_INSN_TBLIDXB1
, true, "lll" },
3015 { "__insn_tblidxb2", TILEPRO_INSN_TBLIDXB2
, true, "lll" },
3016 { "__insn_tblidxb3", TILEPRO_INSN_TBLIDXB3
, true, "lll" },
3017 { "__insn_tns", TILEPRO_INSN_TNS
, false, "lp" },
3018 { "__insn_wh64", TILEPRO_INSN_WH64
, false, "vp" },
3019 { "__insn_xor", TILEPRO_INSN_XOR
, true, "lll" },
3020 { "__insn_xori", TILEPRO_INSN_XOR
, true, "lll" },
3021 { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER
, false, "v" },
3022 { "__tile_idn0_receive", TILEPRO_IDN0_RECEIVE
, false, "l" },
3023 { "__tile_idn1_receive", TILEPRO_IDN1_RECEIVE
, false, "l" },
3024 { "__tile_idn_send", TILEPRO_IDN_SEND
, false, "vl" },
3025 { "__tile_sn_receive", TILEPRO_SN_RECEIVE
, false, "l" },
3026 { "__tile_sn_send", TILEPRO_SN_SEND
, false, "vl" },
3027 { "__tile_udn0_receive", TILEPRO_UDN0_RECEIVE
, false, "l" },
3028 { "__tile_udn1_receive", TILEPRO_UDN1_RECEIVE
, false, "l" },
3029 { "__tile_udn2_receive", TILEPRO_UDN2_RECEIVE
, false, "l" },
3030 { "__tile_udn3_receive", TILEPRO_UDN3_RECEIVE
, false, "l" },
3031 { "__tile_udn_send", TILEPRO_UDN_SEND
, false, "vl" },
3035 /* Convert a character in a builtin type string to a tree type. */
3037 char_to_type (char c
)
3039 static tree volatile_ptr_type_node
= NULL
;
3040 static tree volatile_const_ptr_type_node
= NULL
;
3042 if (volatile_ptr_type_node
== NULL
)
3044 volatile_ptr_type_node
=
3045 build_pointer_type (build_qualified_type (void_type_node
,
3046 TYPE_QUAL_VOLATILE
));
3047 volatile_const_ptr_type_node
=
3048 build_pointer_type (build_qualified_type (void_type_node
,
3050 | TYPE_QUAL_VOLATILE
));
3056 return void_type_node
;
3058 return long_unsigned_type_node
;
3060 return volatile_ptr_type_node
;
3062 return volatile_const_ptr_type_node
;
3069 /* Implement TARGET_INIT_BUILTINS. */
3071 tilepro_init_builtins (void)
3075 for (i
= 0; i
< ARRAY_SIZE (tilepro_builtins
); i
++)
3077 const struct tilepro_builtin_def
*p
= &tilepro_builtins
[i
];
3078 tree ftype
, ret_type
, arg_type_list
= void_list_node
;
3082 for (j
= strlen (p
->type
) - 1; j
> 0; j
--)
3085 tree_cons (NULL_TREE
, char_to_type (p
->type
[j
]), arg_type_list
);
3088 ret_type
= char_to_type (p
->type
[0]);
3090 ftype
= build_function_type (ret_type
, arg_type_list
);
3092 decl
= add_builtin_function (p
->name
, ftype
, p
->code
, BUILT_IN_MD
,
3096 TREE_READONLY (decl
) = 1;
3097 TREE_NOTHROW (decl
) = 1;
3099 if (tilepro_builtin_info
[p
->code
].fndecl
== NULL
)
3100 tilepro_builtin_info
[p
->code
].fndecl
= decl
;
3105 /* Implement TARGET_EXPAND_BUILTIN. */
3107 tilepro_expand_builtin (tree exp
,
3109 rtx subtarget ATTRIBUTE_UNUSED
,
3110 machine_mode mode ATTRIBUTE_UNUSED
,
3111 int ignore ATTRIBUTE_UNUSED
)
3113 #define MAX_BUILTIN_ARGS 4
3115 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3116 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3118 call_expr_arg_iterator iter
;
3119 enum insn_code icode
;
3120 rtx op
[MAX_BUILTIN_ARGS
+ 1], pat
;
3125 if (fcode
>= TILEPRO_BUILTIN_max
)
3126 internal_error ("bad builtin fcode");
3127 icode
= tilepro_builtin_info
[fcode
].icode
;
3129 internal_error ("bad builtin icode");
3131 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
3134 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3136 const struct insn_operand_data
*insn_op
;
3138 if (arg
== error_mark_node
)
3140 if (opnum
> MAX_BUILTIN_ARGS
)
3143 insn_op
= &insn_data
[icode
].operand
[opnum
];
3145 op
[opnum
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
3147 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3148 op
[opnum
] = copy_to_mode_reg (insn_op
->mode
, op
[opnum
]);
3150 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3152 /* We still failed to meet the predicate even after moving
3153 into a register. Assume we needed an immediate. */
3154 error_at (EXPR_LOCATION (exp
),
3155 "operand must be an immediate of the right size");
3164 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3166 || GET_MODE (target
) != tmode
3167 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3168 target
= gen_reg_rtx (tmode
);
3172 fn
= GEN_FCN (icode
);
3176 pat
= fn (NULL_RTX
);
3182 pat
= fn (op
[0], op
[1]);
3185 pat
= fn (op
[0], op
[1], op
[2]);
3188 pat
= fn (op
[0], op
[1], op
[2], op
[3]);
3191 pat
= fn (op
[0], op
[1], op
[2], op
[3], op
[4]);
3199 /* If we are generating a prefetch, tell the scheduler not to move
3201 if (GET_CODE (pat
) == PREFETCH
)
3202 PREFETCH_SCHEDULE_BARRIER_P (pat
) = true;
3213 /* Implement TARGET_BUILTIN_DECL. */
3215 tilepro_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3217 if (code
>= TILEPRO_BUILTIN_max
)
3218 return error_mark_node
;
3220 return tilepro_builtin_info
[code
].fndecl
;
3227 /* Return whether REGNO needs to be saved in the stack frame. */
3229 need_to_save_reg (unsigned int regno
)
3231 if (!fixed_regs
[regno
] && !call_used_regs
[regno
]
3232 && df_regs_ever_live_p (regno
))
3236 && (regno
== PIC_OFFSET_TABLE_REGNUM
3237 || regno
== TILEPRO_PIC_TEXT_LABEL_REGNUM
)
3238 && (crtl
->uses_pic_offset_table
|| crtl
->saves_all_registers
))
3241 if (crtl
->calls_eh_return
)
3244 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
3246 if (regno
== EH_RETURN_DATA_REGNO (i
))
3255 /* Return the size of the register savev area. This function is only
3256 correct starting with local register allocation */
3258 tilepro_saved_regs_size (void)
3260 int reg_save_size
= 0;
3262 int offset_to_frame
;
3265 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
3266 if (need_to_save_reg (regno
))
3267 reg_save_size
+= UNITS_PER_WORD
;
3269 /* Pad out the register save area if necessary to make
3270 frame_pointer_rtx be as aligned as the stack pointer. */
3271 offset_to_frame
= crtl
->args
.pretend_args_size
+ reg_save_size
;
3272 align_mask
= (STACK_BOUNDARY
/ BITS_PER_UNIT
) - 1;
3273 reg_save_size
+= (-offset_to_frame
) & align_mask
;
3275 return reg_save_size
;
3279 /* Round up frame size SIZE. */
3281 round_frame_size (int size
)
3283 return ((size
+ STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
3284 & -STACK_BOUNDARY
/ BITS_PER_UNIT
);
3288 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3289 emit the corresponding REG_CFA_OFFSET note described by CFA and
3290 CFA_OFFSET. Return the emitted insn. */
3292 frame_emit_store (int regno
, int regno_note
, rtx addr
, rtx cfa
,
3295 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3296 rtx mem
= gen_frame_mem (Pmode
, addr
);
3297 rtx mov
= gen_movsi (mem
, reg
);
3299 /* Describe what just happened in a way that dwarf understands. We
3300 use temporary registers to hold the address to make scheduling
3301 easier, and use the REG_CFA_OFFSET to describe the address as an
3302 offset from the CFA. */
3303 rtx reg_note
= gen_rtx_REG (Pmode
, regno_note
);
3304 rtx cfa_relative_addr
= gen_rtx_PLUS (Pmode
, cfa
, gen_int_si (cfa_offset
));
3305 rtx cfa_relative_mem
= gen_frame_mem (Pmode
, cfa_relative_addr
);
3306 rtx real
= gen_rtx_SET (cfa_relative_mem
, reg_note
);
3307 add_reg_note (mov
, REG_CFA_OFFSET
, real
);
3309 return emit_insn (mov
);
3313 /* Emit a load in the stack frame to load REGNO from address ADDR.
3314 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3315 non-null. Return the emitted insn. */
3317 frame_emit_load (int regno
, rtx addr
, rtx
*cfa_restores
)
3319 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3320 rtx mem
= gen_frame_mem (Pmode
, addr
);
3322 *cfa_restores
= alloc_reg_note (REG_CFA_RESTORE
, reg
, *cfa_restores
);
3323 return emit_insn (gen_movsi (reg
, mem
));
3327 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3328 including sequences. */
3330 set_frame_related_p (void)
3332 rtx_insn
*seq
= get_insns ();
3343 while (insn
!= NULL_RTX
)
3345 RTX_FRAME_RELATED_P (insn
) = 1;
3346 insn
= NEXT_INSN (insn
);
3348 seq
= emit_insn (seq
);
3352 seq
= emit_insn (seq
);
3353 RTX_FRAME_RELATED_P (seq
) = 1;
3359 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3361 /* This emits code for 'sp += offset'.
3363 The ABI only allows us to modify 'sp' in a single 'addi' or
3364 'addli', so the backtracer understands it. Larger amounts cannot
3365 use those instructions, so are added by placing the offset into a
3366 large register and using 'add'.
3368 This happens after reload, so we need to expand it ourselves. */
3370 emit_sp_adjust (int offset
, int *next_scratch_regno
, bool frame_related
,
3374 rtx imm_rtx
= gen_int_si (offset
);
3377 if (satisfies_constraint_J (imm_rtx
))
3379 /* We can add this using a single addi or addli. */
3384 rtx tmp
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3385 tilepro_expand_set_const32 (tmp
, imm_rtx
);
3389 /* Actually adjust the stack pointer. */
3390 insn
= emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3392 REG_NOTES (insn
) = reg_notes
;
3394 /* Describe what just happened in a way that dwarf understands. */
3397 rtx real
= gen_rtx_SET (stack_pointer_rtx
,
3398 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3400 RTX_FRAME_RELATED_P (insn
) = 1;
3401 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, real
);
3408 /* Return whether the current function is leaf. This takes into
3409 account whether the function calls tls_get_addr. */
3411 tilepro_current_function_is_leaf (void)
3413 return crtl
->is_leaf
&& !cfun
->machine
->calls_tls_get_addr
;
3417 /* Return the frame size. */
3419 compute_total_frame_size (void)
3421 int total_size
= (get_frame_size () + tilepro_saved_regs_size ()
3422 + crtl
->outgoing_args_size
3423 + crtl
->args
.pretend_args_size
);
3425 if (!tilepro_current_function_is_leaf () || cfun
->calls_alloca
)
3427 /* Make room for save area in callee. */
3428 total_size
+= STACK_POINTER_OFFSET
;
3431 return round_frame_size (total_size
);
3435 /* Return nonzero if this function is known to have a null epilogue.
3436 This allows the optimizer to omit jumps to jumps if no stack was
3439 tilepro_can_use_return_insn_p (void)
3441 return (reload_completed
3442 && cfun
->static_chain_decl
== 0
3443 && compute_total_frame_size () == 0
3444 && tilepro_current_function_is_leaf ()
3445 && !crtl
->profile
&& !df_regs_ever_live_p (TILEPRO_LINK_REGNUM
));
3449 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3450 is a frame pointer, it computes the value relative to
3451 that. Otherwise it uses the stack pointer. */
3453 compute_frame_addr (int offset_from_fp
, int *next_scratch_regno
)
3455 rtx base_reg_rtx
, tmp_reg_rtx
, offset_rtx
;
3456 int offset_from_base
;
3458 if (frame_pointer_needed
)
3460 base_reg_rtx
= hard_frame_pointer_rtx
;
3461 offset_from_base
= offset_from_fp
;
3465 int offset_from_sp
= compute_total_frame_size () + offset_from_fp
;
3466 base_reg_rtx
= stack_pointer_rtx
;
3467 offset_from_base
= offset_from_sp
;
3470 if (offset_from_base
== 0)
3471 return base_reg_rtx
;
3473 /* Compute the new value of the stack pointer. */
3474 tmp_reg_rtx
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3475 offset_rtx
= gen_int_si (offset_from_base
);
3477 if (!tilepro_expand_addsi (tmp_reg_rtx
, base_reg_rtx
, offset_rtx
))
3479 emit_insn (gen_rtx_SET (tmp_reg_rtx
,
3480 gen_rtx_PLUS (Pmode
, base_reg_rtx
,
3488 /* The stack frame looks like this:
3493 AP -> +-------------+
3497 HFP -> +-------------+
3499 | reg save | crtl->args.pretend_args_size bytes
3502 | saved regs | tilepro_saved_regs_size() bytes
3503 FP -> +-------------+
3505 | vars | get_frame_size() bytes
3509 | stack args | crtl->outgoing_args_size bytes
3511 | HFP | 4 bytes (only here if nonleaf / alloca)
3513 | callee lr | 4 bytes (only here if nonleaf / alloca)
3515 SP -> +-------------+
3519 For functions with a frame larger than 32767 bytes, or which use
3520 alloca (), r52 is used as a frame pointer. Otherwise there is no
3523 FP is saved at SP+4 before calling a subroutine so the
3524 callee can chain. */
3526 tilepro_expand_prologue (void)
3528 #define ROUND_ROBIN_SIZE 4
3529 /* We round-robin through four scratch registers to hold temporary
3530 addresses for saving registers, to make instruction scheduling
3532 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3533 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3536 unsigned int which_scratch
;
3537 int offset
, start_offset
, regno
;
3539 /* A register that holds a copy of the incoming fp. */
3540 int fp_copy_regno
= -1;
3542 /* A register that holds a copy of the incoming sp. */
3543 int sp_copy_regno
= -1;
3545 /* Next scratch register number to hand out (postdecrementing). */
3546 int next_scratch_regno
= 29;
3548 int total_size
= compute_total_frame_size ();
3550 if (flag_stack_usage_info
)
3551 current_function_static_stack_size
= total_size
;
3553 /* Save lr first in its special location because code after this
3554 might use the link register as a scratch register. */
3555 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
) || crtl
->calls_eh_return
)
3556 FRP (frame_emit_store (TILEPRO_LINK_REGNUM
, TILEPRO_LINK_REGNUM
,
3557 stack_pointer_rtx
, stack_pointer_rtx
, 0));
3559 if (total_size
== 0)
3561 /* Load the PIC register if needed. */
3562 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3563 load_pic_register (false);
3568 cfa
= stack_pointer_rtx
;
3570 if (frame_pointer_needed
)
3572 fp_copy_regno
= next_scratch_regno
--;
3574 /* Copy the old frame pointer aside so we can save it later. */
3575 insn
= FRP (emit_move_insn (gen_rtx_REG (word_mode
, fp_copy_regno
),
3576 hard_frame_pointer_rtx
));
3577 add_reg_note (insn
, REG_CFA_REGISTER
, NULL_RTX
);
3579 /* Set up the frame pointer. */
3580 insn
= FRP (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
3581 add_reg_note (insn
, REG_CFA_DEF_CFA
, hard_frame_pointer_rtx
);
3582 cfa
= hard_frame_pointer_rtx
;
3583 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
3585 /* fp holds a copy of the incoming sp, in case we need to store
3587 sp_copy_regno
= HARD_FRAME_POINTER_REGNUM
;
3589 else if (!tilepro_current_function_is_leaf ())
3591 /* Copy the old stack pointer aside so we can save it later. */
3592 sp_copy_regno
= next_scratch_regno
--;
3593 emit_move_insn (gen_rtx_REG (Pmode
, sp_copy_regno
),
3597 if (tilepro_current_function_is_leaf ())
3599 /* No need to store chain pointer to caller's frame. */
3600 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3601 !frame_pointer_needed
, NULL_RTX
);
3605 /* Save the frame pointer (incoming sp value) to support
3606 backtracing. First we need to create an rtx with the store
3608 rtx chain_addr
= gen_rtx_REG (Pmode
, next_scratch_regno
--);
3609 rtx size_rtx
= gen_int_si (-(total_size
- UNITS_PER_WORD
));
3611 if (add_operand (size_rtx
, Pmode
))
3613 /* Expose more parallelism by computing this value from the
3614 original stack pointer, not the one after we have pushed
3616 rtx p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, size_rtx
);
3617 emit_insn (gen_rtx_SET (chain_addr
, p
));
3618 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3619 !frame_pointer_needed
, NULL_RTX
);
3623 /* The stack frame is large, so just store the incoming sp
3624 value at *(new_sp + UNITS_PER_WORD). */
3626 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3627 !frame_pointer_needed
, NULL_RTX
);
3628 p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3629 GEN_INT (UNITS_PER_WORD
));
3630 emit_insn (gen_rtx_SET (chain_addr
, p
));
3633 /* Save our frame pointer for backtrace chaining. */
3634 emit_insn (gen_movsi (gen_frame_mem (SImode
, chain_addr
),
3635 gen_rtx_REG (SImode
, sp_copy_regno
)));
3638 /* Compute where to start storing registers we need to save. */
3639 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3640 offset
= start_offset
;
3642 /* Store all registers that need saving. */
3644 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3645 if (need_to_save_reg (regno
))
3647 rtx r
= reg_save_addr
[which_scratch
];
3649 int cfa_offset
= frame_pointer_needed
? offset
: total_size
+ offset
;
3653 rtx p
= compute_frame_addr (offset
, &next_scratch_regno
);
3654 r
= gen_rtx_REG (word_mode
, next_scratch_regno
--);
3655 reg_save_addr
[which_scratch
] = r
;
3657 emit_insn (gen_rtx_SET (r
, p
));
3661 /* Advance to the next stack slot to store this register. */
3662 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3663 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3664 emit_insn (gen_rtx_SET (r
, p
));
3667 /* Save this register to the stack (but use the old fp value
3668 we copied aside if appropriate). */
3669 from_regno
= (fp_copy_regno
>= 0
3671 HARD_FRAME_POINTER_REGNUM
) ? fp_copy_regno
: regno
;
3672 FRP (frame_emit_store (from_regno
, regno
, r
, cfa
, cfa_offset
));
3674 offset
-= UNITS_PER_WORD
;
3675 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3678 /* If profiling, force that to happen after the frame is set up. */
3680 emit_insn (gen_blockage ());
3682 /* Load the PIC register if needed. */
3683 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3684 load_pic_register (false);
3688 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
3689 true for a sibcall_epilogue pattern, and false for an epilogue
3692 tilepro_expand_epilogue (bool sibcall_p
)
3694 /* We round-robin through four scratch registers to hold temporary
3695 addresses for saving registers, to make instruction scheduling
3697 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3698 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3700 rtx_insn
*last_insn
, *insn
;
3701 unsigned int which_scratch
;
3702 int offset
, start_offset
, regno
;
3703 rtx cfa_restores
= NULL_RTX
;
3705 /* A register that holds a copy of the incoming fp. */
3706 int fp_copy_regno
= -1;
3708 /* Next scratch register number to hand out (postdecrementing). */
3709 int next_scratch_regno
= 29;
3711 int total_size
= compute_total_frame_size ();
3713 last_insn
= get_last_insn ();
3715 /* Load lr first since we are going to need it first. */
3717 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
))
3719 insn
= frame_emit_load (TILEPRO_LINK_REGNUM
,
3720 compute_frame_addr (0, &next_scratch_regno
),
3724 if (total_size
== 0)
3728 RTX_FRAME_RELATED_P (insn
) = 1;
3729 REG_NOTES (insn
) = cfa_restores
;
3734 /* Compute where to start restoring registers. */
3735 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3736 offset
= start_offset
;
3738 if (frame_pointer_needed
)
3739 fp_copy_regno
= next_scratch_regno
--;
3741 /* Restore all callee-saved registers. */
3743 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3744 if (need_to_save_reg (regno
))
3746 rtx r
= reg_save_addr
[which_scratch
];
3749 r
= compute_frame_addr (offset
, &next_scratch_regno
);
3750 reg_save_addr
[which_scratch
] = r
;
3754 /* Advance to the next stack slot to store this
3756 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3757 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3758 emit_insn (gen_rtx_SET (r
, p
));
3761 if (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
3762 frame_emit_load (fp_copy_regno
, r
, NULL
);
3764 frame_emit_load (regno
, r
, &cfa_restores
);
3766 offset
-= UNITS_PER_WORD
;
3767 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3770 if (!tilepro_current_function_is_leaf ())
3772 alloc_reg_note (REG_CFA_RESTORE
, stack_pointer_rtx
, cfa_restores
);
3774 emit_insn (gen_blockage ());
3776 if (frame_pointer_needed
)
3778 /* Restore the old stack pointer by copying from the frame
3780 insn
= emit_insn (gen_sp_restore (stack_pointer_rtx
,
3781 hard_frame_pointer_rtx
));
3782 RTX_FRAME_RELATED_P (insn
) = 1;
3783 REG_NOTES (insn
) = cfa_restores
;
3784 add_reg_note (insn
, REG_CFA_DEF_CFA
, stack_pointer_rtx
);
3788 insn
= emit_sp_adjust (total_size
, &next_scratch_regno
, true,
3792 if (crtl
->calls_eh_return
)
3793 emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3794 EH_RETURN_STACKADJ_RTX
));
3796 /* Restore the old frame pointer. */
3797 if (frame_pointer_needed
)
3799 insn
= emit_move_insn (hard_frame_pointer_rtx
,
3800 gen_rtx_REG (Pmode
, fp_copy_regno
));
3801 add_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
);
3804 /* Mark the pic registers as live outside of the function. */
3807 emit_use (cfun
->machine
->text_label_rtx
);
3808 emit_use (cfun
->machine
->got_rtx
);
3814 /* Emit the actual 'return' instruction. */
3815 emit_jump_insn (gen__return ());
3819 emit_use (gen_rtx_REG (Pmode
, TILEPRO_LINK_REGNUM
));
3822 /* Mark all insns we just emitted as frame-related. */
3823 for (; last_insn
!= NULL_RTX
; last_insn
= next_insn (last_insn
))
3824 RTX_FRAME_RELATED_P (last_insn
) = 1;
3827 #undef ROUND_ROBIN_SIZE
3830 /* Implement INITIAL_ELIMINATION_OFFSET. */
3832 tilepro_initial_elimination_offset (int from
, int to
)
3834 int total_size
= compute_total_frame_size ();
3836 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3838 return (total_size
- crtl
->args
.pretend_args_size
3839 - tilepro_saved_regs_size ());
3841 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3843 return -(crtl
->args
.pretend_args_size
+ tilepro_saved_regs_size ());
3845 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3847 return STACK_POINTER_OFFSET
+ total_size
;
3849 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3851 return STACK_POINTER_OFFSET
;
3858 /* Return an RTX indicating where the return address to the
3859 calling function can be found. */
3861 tilepro_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
3866 return get_hard_reg_initial_val (Pmode
, TILEPRO_LINK_REGNUM
);
3870 /* Implement EH_RETURN_HANDLER_RTX. */
3872 tilepro_eh_return_handler_rtx (void)
3874 /* The MEM needs to be volatile to prevent it from being
3876 rtx tmp
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
3877 MEM_VOLATILE_P (tmp
) = true;
3885 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
3887 tilepro_conditional_register_usage (void)
3889 global_regs
[TILEPRO_NETORDER_REGNUM
] = 1;
3890 /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
3891 member of fixed_regs, and therefore must be member of
3892 call_used_regs, but it is not a member of call_really_used_regs[]
3893 because it is not clobbered by a call. */
3894 if (TILEPRO_PIC_TEXT_LABEL_REGNUM
!= INVALID_REGNUM
)
3896 fixed_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3897 call_used_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3899 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3901 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3902 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3907 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
3909 tilepro_frame_pointer_required (void)
3911 return crtl
->calls_eh_return
|| cfun
->calls_alloca
;
3916 /* Scheduling and reorg */
3918 /* Return the length of INSN. LENGTH is the initial length computed
3919 by attributes in the machine-description file. This is where we
3920 account for bundles. */
3922 tilepro_adjust_insn_length (rtx_insn
*insn
, int length
)
3924 machine_mode mode
= GET_MODE (insn
);
3926 /* A non-termininating instruction in a bundle has length 0. */
3930 /* By default, there is not length adjustment. */
3935 /* Implement TARGET_SCHED_ISSUE_RATE. */
3937 tilepro_issue_rate (void)
3943 /* Return the rtx for the jump target. */
3945 get_jump_target (rtx branch
)
3947 if (CALL_P (branch
))
3950 call
= PATTERN (branch
);
3952 if (GET_CODE (call
) == PARALLEL
)
3953 call
= XVECEXP (call
, 0, 0);
3955 if (GET_CODE (call
) == SET
)
3956 call
= SET_SRC (call
);
3958 if (GET_CODE (call
) == CALL
)
3959 return XEXP (XEXP (call
, 0), 0);
3964 /* Implement TARGET_SCHED_ADJUST_COST. */
3966 tilepro_sched_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
,
3969 /* If we have a true dependence, INSN is a call, and DEP_INSN
3970 defines a register that is needed by the call (argument or stack
3971 pointer), set its latency to 0 so that it can be bundled with
3972 the call. Explicitly check for and exclude the case when
3973 DEP_INSN defines the target of the jump. */
3974 if (CALL_P (insn
) && REG_NOTE_KIND (link
) == REG_DEP_TRUE
)
3976 rtx target
= get_jump_target (insn
);
3977 if (!REG_P (target
) || !set_of (target
, dep_insn
))
3985 /* Skip over irrelevant NOTEs and such and look for the next insn we
3986 would consider bundling. */
3988 next_insn_to_bundle (rtx_insn
*r
, rtx_insn
*end
)
3990 for (; r
!= end
; r
= NEXT_INSN (r
))
3992 if (NONDEBUG_INSN_P (r
)
3993 && GET_CODE (PATTERN (r
)) != USE
3994 && GET_CODE (PATTERN (r
)) != CLOBBER
)
4002 /* Go through all insns, and use the information generated during
4003 scheduling to generate SEQUENCEs to represent bundles of
4004 instructions issued simultaneously. */
4006 tilepro_gen_bundles (void)
4009 FOR_EACH_BB_FN (bb
, cfun
)
4011 rtx_insn
*insn
, *next
;
4012 rtx_insn
*end
= NEXT_INSN (BB_END (bb
));
4014 for (insn
= next_insn_to_bundle (BB_HEAD (bb
), end
); insn
; insn
= next
)
4016 next
= next_insn_to_bundle (NEXT_INSN (insn
), end
);
4018 /* Never wrap {} around inline asm. */
4019 if (GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
4021 if (next
== NULL_RTX
|| GET_MODE (next
) == TImode
4022 /* NOTE: The scheduler incorrectly believes a call
4023 insn can execute in the same cycle as the insn
4024 after the call. This is of course impossible.
4025 Really we need to fix the scheduler somehow, so
4026 the code after the call gets scheduled
4030 /* Mark current insn as the end of a bundle. */
4031 PUT_MODE (insn
, QImode
);
4035 /* Mark it as part of a bundle. */
4036 PUT_MODE (insn
, SImode
);
4044 /* Helper function for tilepro_fixup_pcrel_references. */
4046 replace_pc_relative_symbol_ref (rtx_insn
*insn
, rtx opnds
[4], bool first_insn_p
)
4048 rtx_insn
*new_insns
;
4056 emit_insn (gen_add_got16 (opnds
[0], tilepro_got_rtx (),
4058 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4065 emit_insn (gen_addhi_got32 (opnds
[0], tilepro_got_rtx (),
4070 emit_insn (gen_addlo_got32 (opnds
[0], opnds
[1], opnds
[2]));
4071 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4075 new_insns
= get_insns ();
4079 emit_insn_before (new_insns
, insn
);
4085 /* Returns whether INSN is a pc-relative addli insn. */
4087 match_addli_pcrel (rtx_insn
*insn
)
4089 rtx pattern
= PATTERN (insn
);
4092 if (GET_CODE (pattern
) != SET
)
4095 if (GET_CODE (SET_SRC (pattern
)) != LO_SUM
)
4098 if (GET_CODE (XEXP (SET_SRC (pattern
), 1)) != CONST
)
4101 unspec
= XEXP (XEXP (SET_SRC (pattern
), 1), 0);
4103 return (GET_CODE (unspec
) == UNSPEC
4104 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4108 /* Helper function for tilepro_fixup_pcrel_references. */
4110 replace_addli_pcrel (rtx_insn
*insn
)
4112 rtx pattern
= PATTERN (insn
);
4118 gcc_assert (GET_CODE (pattern
) == SET
);
4119 opnds
[0] = SET_DEST (pattern
);
4121 set_src
= SET_SRC (pattern
);
4122 gcc_assert (GET_CODE (set_src
) == LO_SUM
);
4123 gcc_assert (GET_CODE (XEXP (set_src
, 1)) == CONST
);
4124 opnds
[1] = XEXP (set_src
, 0);
4126 unspec
= XEXP (XEXP (set_src
, 1), 0);
4127 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4128 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4129 opnds
[2] = XVECEXP (unspec
, 0, 0);
4130 opnds
[3] = XVECEXP (unspec
, 0, 1);
4132 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4133 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4136 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4138 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4142 /* Returns whether INSN is a pc-relative auli insn. */
4144 match_auli_pcrel (rtx_insn
*insn
)
4146 rtx pattern
= PATTERN (insn
);
4150 if (GET_CODE (pattern
) != SET
)
4153 if (GET_CODE (SET_SRC (pattern
)) != PLUS
)
4156 high
= XEXP (SET_SRC (pattern
), 1);
4158 if (GET_CODE (high
) != HIGH
4159 || GET_CODE (XEXP (high
, 0)) != CONST
)
4162 unspec
= XEXP (XEXP (high
, 0), 0);
4164 return (GET_CODE (unspec
) == UNSPEC
4165 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4169 /* Helper function for tilepro_fixup_pcrel_references. */
4171 replace_auli_pcrel (rtx_insn
*insn
)
4173 rtx pattern
= PATTERN (insn
);
4180 gcc_assert (GET_CODE (pattern
) == SET
);
4181 opnds
[0] = SET_DEST (pattern
);
4183 set_src
= SET_SRC (pattern
);
4184 gcc_assert (GET_CODE (set_src
) == PLUS
);
4185 opnds
[1] = XEXP (set_src
, 0);
4187 high
= XEXP (set_src
, 1);
4188 gcc_assert (GET_CODE (high
) == HIGH
);
4189 gcc_assert (GET_CODE (XEXP (high
, 0)) == CONST
);
4191 unspec
= XEXP (XEXP (high
, 0), 0);
4192 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4193 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4194 opnds
[2] = XVECEXP (unspec
, 0, 0);
4195 opnds
[3] = XVECEXP (unspec
, 0, 1);
4197 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4198 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4201 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4203 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4207 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4208 going through the GOT when the symbol is local to the compilation
4209 unit. But such a symbol requires that the common text_label that
4210 we generate at the beginning of the function be in the same section
4211 as the reference to the SYMBOL_REF. This may not be true if we
4212 generate hot/cold sections. This function looks for such cases and
4213 replaces such references with the longer sequence going through the
4216 We expect one of the following two instruction sequences:
4217 addli tmp1, txt_label_reg, lo16(sym - txt_label)
4218 auli tmp2, tmp1, ha16(sym - txt_label)
4220 auli tmp1, txt_label_reg, ha16(sym - txt_label)
4221 addli tmp2, tmp1, lo16(sym - txt_label)
4223 If we're compiling -fpic, we replace the first instruction with
4224 nothing, and the second instruction with:
4226 addli tmp2, got_rtx, got(sym)
4229 If we're compiling -fPIC, we replace the first instruction with:
4231 auli tmp1, got_rtx, got_ha16(sym)
4233 and the second instruction with:
4235 addli tmp2, tmp1, got_lo16(sym)
4238 Note that we're careful to disturb the instruction sequence as
4239 little as possible, since it's very late in the compilation
4243 tilepro_fixup_pcrel_references (void)
4245 rtx_insn
*insn
, *next_insn
;
4246 bool same_section_as_entry
= true;
4248 for (insn
= get_insns (); insn
; insn
= next_insn
)
4250 next_insn
= NEXT_INSN (insn
);
4252 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
4254 same_section_as_entry
= !same_section_as_entry
;
4258 if (same_section_as_entry
)
4262 && GET_CODE (PATTERN (insn
)) != USE
4263 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
4266 if (match_addli_pcrel (insn
))
4267 replace_addli_pcrel (insn
);
4268 else if (match_auli_pcrel (insn
))
4269 replace_auli_pcrel (insn
);
4274 /* Ensure that no var tracking notes are emitted in the middle of a
4275 three-instruction bundle. */
4277 reorder_var_tracking_notes (void)
4280 FOR_EACH_BB_FN (bb
, cfun
)
4282 rtx_insn
*insn
, *next
;
4283 rtx_insn
*queue
= NULL
;
4284 bool in_bundle
= false;
4286 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4288 next
= NEXT_INSN (insn
);
4292 /* Emit queued up notes at the last instruction of a bundle. */
4293 if (GET_MODE (insn
) == QImode
)
4297 rtx_insn
*next_queue
= PREV_INSN (queue
);
4298 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4299 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4300 SET_NEXT_INSN (insn
) = queue
;
4301 SET_PREV_INSN (queue
) = insn
;
4306 else if (GET_MODE (insn
) == SImode
)
4309 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4313 rtx_insn
*prev
= PREV_INSN (insn
);
4314 SET_PREV_INSN (next
) = prev
;
4315 SET_NEXT_INSN (prev
) = next
;
4317 SET_PREV_INSN (insn
) = queue
;
4326 /* Perform machine dependent operations on the rtl chain INSNS. */
4328 tilepro_reorg (void)
4330 /* We are freeing block_for_insn in the toplev to keep compatibility
4331 with old MDEP_REORGS that are not CFG based. Recompute it
4333 compute_bb_for_insn ();
4335 if (flag_reorder_blocks_and_partition
)
4337 tilepro_fixup_pcrel_references ();
4340 if (flag_schedule_insns_after_reload
)
4344 timevar_push (TV_SCHED2
);
4346 timevar_pop (TV_SCHED2
);
4348 /* Examine the schedule to group into bundles. */
4349 tilepro_gen_bundles ();
4354 if (flag_var_tracking
)
4356 timevar_push (TV_VAR_TRACKING
);
4357 variable_tracking_main ();
4358 reorder_var_tracking_notes ();
4359 timevar_pop (TV_VAR_TRACKING
);
4362 df_finish_pass (false);
4369 /* Select a format to encode pointers in exception handling data.
4370 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4371 GLOBAL is true if the symbol may be affected by dynamic
4374 tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED
, int global
)
4376 return (global
? DW_EH_PE_indirect
: 0) | DW_EH_PE_pcrel
| DW_EH_PE_sdata4
;
4380 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4382 tilepro_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4383 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4386 rtx this_rtx
, funexp
;
4389 /* Pretend to be a post-reload pass while generating rtl. */
4390 reload_completed
= 1;
4392 /* Mark the end of the (empty) prologue. */
4393 emit_note (NOTE_INSN_PROLOGUE_END
);
4395 /* Find the "this" pointer. If the function returns a structure,
4396 the structure return pointer is in $1. */
4397 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4398 this_rtx
= gen_rtx_REG (Pmode
, 1);
4400 this_rtx
= gen_rtx_REG (Pmode
, 0);
4402 /* Add DELTA to THIS_RTX. */
4403 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, GEN_INT (delta
)));
4405 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4410 tmp
= gen_rtx_REG (Pmode
, 29);
4411 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4413 emit_insn (gen_addsi3 (tmp
, tmp
, GEN_INT (vcall_offset
)));
4415 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4417 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, tmp
));
4420 /* Generate a tail call to the target function. */
4421 if (!TREE_USED (function
))
4423 assemble_external (function
);
4424 TREE_USED (function
) = 1;
4426 funexp
= XEXP (DECL_RTL (function
), 0);
4427 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
4428 insn
= emit_call_insn (gen_sibcall (funexp
, const0_rtx
));
4429 SIBLING_CALL_P (insn
) = 1;
4431 /* Run just enough of rest_of_compilation to get the insns emitted.
4432 There's not really enough bulk here to make other passes such as
4433 instruction scheduling worth while. Note that use_thunk calls
4434 assemble_start_function and assemble_end_function.
4436 We don't currently bundle, but the instruciton sequence is all
4437 serial except for the tail call, so we're only wasting one cycle.
4439 insn
= get_insns ();
4440 shorten_branches (insn
);
4441 final_start_function (insn
, file
, 1);
4442 final (insn
, file
, 1);
4443 final_end_function ();
4445 /* Stop pretending to be a post-reload pass. */
4446 reload_completed
= 0;
4450 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4452 tilepro_asm_trampoline_template (FILE *file
)
4454 fprintf (file
, "\tlnk r10\n");
4455 fprintf (file
, "\taddi r10, r10, 32\n");
4456 fprintf (file
, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode
));
4457 fprintf (file
, "\tlw r10, r10\n");
4458 fprintf (file
, "\tjr r11\n");
4459 fprintf (file
, "\t.word 0 # <function address>\n");
4460 fprintf (file
, "\t.word 0 # <static chain value>\n");
4464 /* Implement TARGET_TRAMPOLINE_INIT. */
4466 tilepro_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4470 rtx begin_addr
, end_addr
;
4471 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
4473 fnaddr
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
4474 chaddr
= copy_to_reg (static_chain
);
4476 emit_block_move (m_tramp
, assemble_trampoline_template (),
4477 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
4479 mem
= adjust_address (m_tramp
, ptr_mode
,
4480 TRAMPOLINE_SIZE
- 2 * ptr_mode_size
);
4481 emit_move_insn (mem
, fnaddr
);
4482 mem
= adjust_address (m_tramp
, ptr_mode
,
4483 TRAMPOLINE_SIZE
- ptr_mode_size
);
4484 emit_move_insn (mem
, chaddr
);
4486 /* Get pointers to the beginning and end of the code block. */
4487 begin_addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
4488 end_addr
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0),
4491 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__clear_cache"),
4492 LCT_NORMAL
, VOIDmode
, 2, begin_addr
, Pmode
,
4497 /* Implement TARGET_PRINT_OPERAND. */
4499 tilepro_print_operand (FILE *file
, rtx x
, int code
)
4504 /* Print the compare operator opcode for conditional moves. */
4505 switch (GET_CODE (x
))
4514 output_operand_lossage ("invalid %%c operand");
4519 /* Print the compare operator opcode for conditional moves. */
4520 switch (GET_CODE (x
))
4529 output_operand_lossage ("invalid %%C operand");
4535 /* Print the high 16 bits of a 32-bit constant. */
4537 if (CONST_INT_P (x
))
4539 else if (GET_CODE (x
) == CONST_DOUBLE
)
4540 i
= CONST_DOUBLE_LOW (x
);
4543 output_operand_lossage ("invalid %%h operand");
4546 i
= trunc_int_for_mode (i
>> 16, HImode
);
4547 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4554 const char *opstr
= NULL
;
4556 if (GET_CODE (x
) == CONST
4557 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4559 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4560 switch (XINT (XEXP (x
, 0), 1))
4562 case UNSPEC_GOT32_SYM
:
4565 case UNSPEC_PCREL_SYM
:
4570 opstr
= "tls_gd_ha16";
4573 opstr
= "tls_ie_ha16";
4576 opstr
= "tls_le_ha16";
4579 output_operand_lossage ("invalid %%H operand");
4588 fputs (opstr
, file
);
4590 output_addr_const (file
, addr
);
4594 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4595 fputs (" - " , file
);
4596 output_addr_const (file
, addr2
);
4604 /* Print an auto-inc memory operand. */
4607 output_operand_lossage ("invalid %%I operand");
4611 output_memory_reference_mode
= GET_MODE (x
);
4612 output_memory_autoinc_first
= true;
4613 output_address (XEXP (x
, 0));
4614 output_memory_reference_mode
= VOIDmode
;
4618 /* Print an auto-inc memory operand. */
4621 output_operand_lossage ("invalid %%i operand");
4625 output_memory_reference_mode
= GET_MODE (x
);
4626 output_memory_autoinc_first
= false;
4627 output_address (XEXP (x
, 0));
4628 output_memory_reference_mode
= VOIDmode
;
4633 /* Print the low 8 bits of a constant. */
4635 if (CONST_INT_P (x
))
4637 else if (GET_CODE (x
) == CONST_DOUBLE
)
4638 i
= CONST_DOUBLE_LOW (x
);
4639 else if (GET_CODE (x
) == CONST_VECTOR
4640 && CONST_INT_P (CONST_VECTOR_ELT (x
, 0)))
4641 i
= INTVAL (CONST_VECTOR_ELT (x
, 0));
4644 output_operand_lossage ("invalid %%j operand");
4647 i
= trunc_int_for_mode (i
, QImode
);
4648 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4655 const char *opstr
= NULL
;
4657 if (GET_CODE (x
) == CONST
4658 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4660 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4661 switch (XINT (XEXP (x
, 0), 1))
4663 case UNSPEC_GOT16_SYM
:
4666 case UNSPEC_GOT32_SYM
:
4669 case UNSPEC_PCREL_SYM
:
4674 opstr
= "tls_gd_lo16";
4677 opstr
= "tls_ie_lo16";
4680 opstr
= "tls_le_lo16";
4683 output_operand_lossage ("invalid %%L operand");
4692 fputs (opstr
, file
);
4694 output_addr_const (file
, addr
);
4698 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4699 fputs (" - " , file
);
4700 output_addr_const (file
, addr2
);
4708 if (GET_CODE (x
) == SYMBOL_REF
)
4710 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4711 fprintf (file
, "plt(");
4712 output_addr_const (file
, x
);
4713 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4714 fprintf (file
, ")");
4717 output_addr_const (file
, x
);
4722 /* Print a 32-bit constant plus one. */
4724 if (!CONST_INT_P (x
))
4726 output_operand_lossage ("invalid %%P operand");
4729 i
= trunc_int_for_mode (INTVAL (x
) + 1, SImode
);
4730 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4736 /* Print an mm-style bit range. */
4737 int first_bit
, last_bit
;
4739 if (!CONST_INT_P (x
)
4740 || !tilepro_bitfield_operand_p (INTVAL (x
), &first_bit
,
4743 output_operand_lossage ("invalid %%M operand");
4747 fprintf (file
, "%d, %d", first_bit
, last_bit
);
4753 const char *reg
= NULL
;
4755 /* Print a network register. */
4756 if (!CONST_INT_P (x
))
4758 output_operand_lossage ("invalid %%N operand");
4764 case TILEPRO_NETREG_IDN0
: reg
= "idn0"; break;
4765 case TILEPRO_NETREG_IDN1
: reg
= "idn1"; break;
4766 case TILEPRO_NETREG_SN
: reg
= "sn"; break;
4767 case TILEPRO_NETREG_UDN0
: reg
= "udn0"; break;
4768 case TILEPRO_NETREG_UDN1
: reg
= "udn1"; break;
4769 case TILEPRO_NETREG_UDN2
: reg
= "udn2"; break;
4770 case TILEPRO_NETREG_UDN3
: reg
= "udn3"; break;
4771 default: gcc_unreachable ();
4774 fprintf (file
, reg
);
4780 /* Log base 2 of a power of two. */
4784 if (!CONST_INT_P (x
))
4786 output_operand_lossage ("invalid %%t operand");
4789 n
= trunc_int_for_mode (INTVAL (x
), SImode
);
4793 output_operand_lossage ("invalid %%t operand '"
4794 HOST_WIDE_INT_PRINT_DEC
"'", n
);
4798 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4804 /* In this case we need a register. Use 'zero' if the
4805 operand is const0_rtx. */
4807 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
4809 fputs ("zero", file
);
4812 else if (!REG_P (x
))
4814 output_operand_lossage ("invalid %%r operand");
4822 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
4827 output_memory_reference_mode
= VOIDmode
;
4828 output_address (XEXP (x
, 0));
4833 output_addr_const (file
, x
);
4840 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4845 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
4847 tilepro_print_operand_address (FILE *file
, rtx addr
)
4849 if (GET_CODE (addr
) == POST_DEC
4850 || GET_CODE (addr
) == POST_INC
)
4852 int offset
= GET_MODE_SIZE (output_memory_reference_mode
);
4854 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4856 if (output_memory_autoinc_first
)
4857 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4859 fprintf (file
, "%d",
4860 GET_CODE (addr
) == POST_DEC
? -offset
: offset
);
4862 else if (GET_CODE (addr
) == POST_MODIFY
)
4864 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4866 gcc_assert (GET_CODE (XEXP (addr
, 1)) == PLUS
);
4868 if (output_memory_autoinc_first
)
4869 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4871 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4872 INTVAL (XEXP (XEXP (addr
, 1), 1)));
4875 tilepro_print_operand (file
, addr
, 'r');
4879 /* Machine mode of current insn, for determining curly brace
4881 static machine_mode insn_mode
;
4884 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
4886 tilepro_final_prescan_insn (rtx_insn
*insn
)
4888 /* Record this for tilepro_asm_output_opcode to examine. */
4889 insn_mode
= GET_MODE (insn
);
4893 /* While emitting asm, are we currently inside '{' for a bundle? */
4894 static bool tilepro_in_bundle
= false;
4896 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
4897 appropriate given the bundling information recorded by
4898 tilepro_gen_bundles. */
4900 tilepro_asm_output_opcode (FILE *stream
, const char *code
)
4902 bool pseudo
= !strcmp (code
, "pseudo");
4904 if (!tilepro_in_bundle
&& insn_mode
== SImode
)
4906 /* Start a new bundle. */
4907 fprintf (stream
, "{\n\t");
4908 tilepro_in_bundle
= true;
4911 if (tilepro_in_bundle
&& insn_mode
== QImode
)
4913 /* Close an existing bundle. */
4914 static char buf
[100];
4916 gcc_assert (strlen (code
) + 3 + 1 < sizeof (buf
));
4918 strcpy (buf
, pseudo
? "" : code
);
4919 strcat (buf
, "\n\t}");
4920 tilepro_in_bundle
= false;
4926 return pseudo
? "" : code
;
4931 /* Output assembler code to FILE to increment profiler label # LABELNO
4932 for profiling a function entry. */
4934 tilepro_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
4936 if (tilepro_in_bundle
)
4938 fprintf (file
, "\t}\n");
4947 "\t}\n", MCOUNT_NAME
);
4955 "\t}\n", MCOUNT_NAME
);
4958 tilepro_in_bundle
= false;
4962 /* Implement TARGET_ASM_FILE_END. */
4964 tilepro_file_end (void)
4966 if (NEED_INDICATE_EXEC_STACK
)
4967 file_end_indicate_exec_stack ();
4971 #undef TARGET_HAVE_TLS
4972 #define TARGET_HAVE_TLS HAVE_AS_TLS
4974 #undef TARGET_OPTION_OVERRIDE
4975 #define TARGET_OPTION_OVERRIDE tilepro_option_override
4977 #undef TARGET_SCALAR_MODE_SUPPORTED_P
4978 #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4980 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4981 #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4983 #undef TARGET_CANNOT_FORCE_CONST_MEM
4984 #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4986 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4987 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4989 #undef TARGET_PASS_BY_REFERENCE
4990 #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4992 #undef TARGET_RETURN_IN_MEMORY
4993 #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
4995 #undef TARGET_FUNCTION_ARG_BOUNDARY
4996 #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
4998 #undef TARGET_FUNCTION_ARG
4999 #define TARGET_FUNCTION_ARG tilepro_function_arg
5001 #undef TARGET_FUNCTION_ARG_ADVANCE
5002 #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
5004 #undef TARGET_FUNCTION_VALUE
5005 #define TARGET_FUNCTION_VALUE tilepro_function_value
5007 #undef TARGET_LIBCALL_VALUE
5008 #define TARGET_LIBCALL_VALUE tilepro_libcall_value
5010 #undef TARGET_FUNCTION_VALUE_REGNO_P
5011 #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
5013 #undef TARGET_PROMOTE_FUNCTION_MODE
5014 #define TARGET_PROMOTE_FUNCTION_MODE \
5015 default_promote_function_mode_always_promote
5017 #undef TARGET_PROMOTE_PROTOTYPES
5018 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5020 #undef TARGET_BUILD_BUILTIN_VA_LIST
5021 #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
5023 #undef TARGET_EXPAND_BUILTIN_VA_START
5024 #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
5026 #undef TARGET_SETUP_INCOMING_VARARGS
5027 #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
5029 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5030 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
5032 #undef TARGET_RTX_COSTS
5033 #define TARGET_RTX_COSTS tilepro_rtx_costs
5035 /* Limit to what we can reach in one addli. */
5036 #undef TARGET_MIN_ANCHOR_OFFSET
5037 #define TARGET_MIN_ANCHOR_OFFSET -32768
5038 #undef TARGET_MAX_ANCHOR_OFFSET
5039 #define TARGET_MAX_ANCHOR_OFFSET 32767
5041 #undef TARGET_LEGITIMATE_CONSTANT_P
5042 #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5044 #undef TARGET_LEGITIMATE_ADDRESS_P
5045 #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5047 #undef TARGET_LEGITIMIZE_ADDRESS
5048 #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5050 #undef TARGET_DELEGITIMIZE_ADDRESS
5051 #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5053 #undef TARGET_INIT_BUILTINS
5054 #define TARGET_INIT_BUILTINS tilepro_init_builtins
5056 #undef TARGET_BUILTIN_DECL
5057 #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5059 #undef TARGET_EXPAND_BUILTIN
5060 #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5062 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5063 #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5065 #undef TARGET_FRAME_POINTER_REQUIRED
5066 #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5068 #undef TARGET_DELAY_SCHED2
5069 #define TARGET_DELAY_SCHED2 true
5071 #undef TARGET_DELAY_VARTRACK
5072 #define TARGET_DELAY_VARTRACK true
5074 #undef TARGET_SCHED_ISSUE_RATE
5075 #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5077 #undef TARGET_SCHED_ADJUST_COST
5078 #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5080 #undef TARGET_MACHINE_DEPENDENT_REORG
5081 #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5083 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5084 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5085 hook_bool_const_tree_hwi_hwi_const_tree_true
5087 #undef TARGET_ASM_OUTPUT_MI_THUNK
5088 #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5090 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5091 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5093 #undef TARGET_TRAMPOLINE_INIT
5094 #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5096 #undef TARGET_PRINT_OPERAND
5097 #define TARGET_PRINT_OPERAND tilepro_print_operand
5099 #undef TARGET_PRINT_OPERAND_ADDRESS
5100 #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5102 #undef TARGET_ASM_FILE_END
5103 #define TARGET_ASM_FILE_END tilepro_file_end
5105 #undef TARGET_CAN_USE_DOLOOP_P
5106 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5108 struct gcc_target targetm
= TARGET_INITIALIZER
;
5110 #include "gt-tilepro.h"