1 /* Subroutines used for code generation on the Tilera TILEPro.
2 Copyright (C) 2011-2022 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #define IN_TARGET_CODE 1
25 #include "coretypes.h"
34 #include "stringpool.h"
41 #include "diagnostic.h"
43 #include "insn-attr.h"
49 #include "langhooks.h"
51 #include "tm-constrs.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tilepro-builtins.h"
57 #include "tilepro-multiply.h"
60 /* This file should be included last. */
61 #include "target-def.h"
63 /* SYMBOL_REF for GOT */
64 static GTY(()) rtx g_got_symbol
= NULL
;
66 /* Report whether we're printing out the first address fragment of a
67 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
68 TARGET_PRINT_OPERAND_ADDRESS. */
69 static bool output_memory_autoinc_first
;
75 /* Implement TARGET_OPTION_OVERRIDE. */
77 tilepro_option_override (void)
79 /* When modulo scheduling is enabled, we still rely on regular
80 scheduler for bundling. */
81 if (flag_modulo_sched
)
82 flag_resched_modulo_sched
= 1;
87 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
89 tilepro_scalar_mode_supported_p (scalar_mode mode
)
109 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
111 tile_vector_mode_supported_p (machine_mode mode
)
113 return mode
== V4QImode
|| mode
== V2HImode
;
117 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
119 tilepro_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
120 rtx x ATTRIBUTE_UNUSED
)
126 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
128 tilepro_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
134 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
135 passed by reference. */
137 tilepro_pass_by_reference (cumulative_args_t
, const function_arg_info
&arg
)
140 && TYPE_SIZE (arg
.type
)
141 && TREE_CODE (TYPE_SIZE (arg
.type
)) != INTEGER_CST
);
145 /* Implement TARGET_RETURN_IN_MEMORY. */
147 tilepro_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
149 return !IN_RANGE (int_size_in_bytes (type
),
150 0, TILEPRO_NUM_RETURN_REGS
* UNITS_PER_WORD
);
154 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
156 tilepro_function_arg_boundary (machine_mode mode
, const_tree type
)
158 unsigned int alignment
;
160 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
161 if (alignment
< PARM_BOUNDARY
)
162 alignment
= PARM_BOUNDARY
;
163 if (alignment
> STACK_BOUNDARY
)
164 alignment
= STACK_BOUNDARY
;
169 /* Implement TARGET_FUNCTION_ARG. */
171 tilepro_function_arg (cumulative_args_t cum_v
, const function_arg_info
&arg
)
173 CUMULATIVE_ARGS cum
= *get_cumulative_args (cum_v
);
174 int byte_size
= arg
.promoted_size_in_bytes ();
175 bool doubleword_aligned_p
;
177 if (cum
>= TILEPRO_NUM_ARG_REGS
)
180 /* See whether the argument has doubleword alignment. */
181 doubleword_aligned_p
=
182 tilepro_function_arg_boundary (arg
.mode
, arg
.type
) > BITS_PER_WORD
;
184 if (doubleword_aligned_p
)
187 /* The ABI does not allow parameters to be passed partially in reg
188 and partially in stack. */
189 if ((cum
+ (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
190 > TILEPRO_NUM_ARG_REGS
)
193 return gen_rtx_REG (arg
.mode
, cum
);
197 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
199 tilepro_function_arg_advance (cumulative_args_t cum_v
,
200 const function_arg_info
&arg
)
202 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
204 int byte_size
= arg
.promoted_size_in_bytes ();
205 int word_size
= (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
206 bool doubleword_aligned_p
;
208 /* See whether the argument has doubleword alignment. */
209 doubleword_aligned_p
=
210 tilepro_function_arg_boundary (arg
.mode
, arg
.type
) > BITS_PER_WORD
;
212 if (doubleword_aligned_p
)
215 /* If the current argument does not fit in the pretend_args space,
217 if (*cum
< TILEPRO_NUM_ARG_REGS
218 && *cum
+ word_size
> TILEPRO_NUM_ARG_REGS
)
219 *cum
= TILEPRO_NUM_ARG_REGS
;
225 /* Implement TARGET_FUNCTION_VALUE. */
227 tilepro_function_value (const_tree valtype
, const_tree fn_decl_or_type
,
228 bool outgoing ATTRIBUTE_UNUSED
)
233 mode
= TYPE_MODE (valtype
);
234 unsigned_p
= TYPE_UNSIGNED (valtype
);
236 mode
= promote_function_mode (valtype
, mode
, &unsigned_p
,
239 return gen_rtx_REG (mode
, 0);
243 /* Implement TARGET_LIBCALL_VALUE. */
245 tilepro_libcall_value (machine_mode mode
,
246 const_rtx fun ATTRIBUTE_UNUSED
)
248 return gen_rtx_REG (mode
, 0);
252 /* Implement FUNCTION_VALUE_REGNO_P. */
254 tilepro_function_value_regno_p (const unsigned int regno
)
256 return regno
< TILEPRO_NUM_RETURN_REGS
;
260 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
262 tilepro_build_builtin_va_list (void)
264 tree f_args
, f_skip
, record
, type_decl
;
267 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
269 type_decl
= build_decl (BUILTINS_LOCATION
, TYPE_DECL
,
270 get_identifier ("__va_list_tag"), record
);
272 f_args
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
273 get_identifier ("__args"), ptr_type_node
);
274 f_skip
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
275 get_identifier ("__skip"), ptr_type_node
);
277 DECL_FIELD_CONTEXT (f_args
) = record
;
279 DECL_FIELD_CONTEXT (f_skip
) = record
;
281 TREE_CHAIN (record
) = type_decl
;
282 TYPE_NAME (record
) = type_decl
;
283 TYPE_FIELDS (record
) = f_args
;
284 TREE_CHAIN (f_args
) = f_skip
;
286 /* We know this is being padded and we want it too. It is an
287 internal type so hide the warnings from the user. */
291 layout_type (record
);
295 /* The correct type is an array type of one element. */
300 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
302 tilepro_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
307 f_args
= TYPE_FIELDS (TREE_TYPE (valist
));
308 f_skip
= TREE_CHAIN (f_args
);
311 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
313 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
315 /* Find the __args area. */
316 t
= make_tree (TREE_TYPE (args
), virtual_incoming_args_rtx
);
317 t
= fold_build_pointer_plus_hwi (t
,
319 (crtl
->args
.info
- TILEPRO_NUM_ARG_REGS
));
321 if (crtl
->args
.pretend_args_size
> 0)
322 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
324 t
= build2 (MODIFY_EXPR
, TREE_TYPE (args
), args
, t
);
325 TREE_SIDE_EFFECTS (t
) = 1;
326 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
328 /* Find the __skip area. */
329 t
= make_tree (TREE_TYPE (skip
), virtual_incoming_args_rtx
);
330 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
331 t
= build2 (MODIFY_EXPR
, TREE_TYPE (skip
), skip
, t
);
332 TREE_SIDE_EFFECTS (t
) = 1;
333 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
337 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
339 tilepro_setup_incoming_varargs (cumulative_args_t cum
,
340 const function_arg_info
&arg
,
341 int *pretend_args
, int no_rtl
)
343 CUMULATIVE_ARGS local_cum
= *get_cumulative_args (cum
);
346 /* The caller has advanced CUM up to, but not beyond, the last named
347 argument. Advance a local copy of CUM past the last "real" named
348 argument, to find out how many registers are left over. */
349 targetm
.calls
.function_arg_advance (pack_cumulative_args (&local_cum
), arg
);
350 first_reg
= local_cum
;
352 if (local_cum
< TILEPRO_NUM_ARG_REGS
)
354 *pretend_args
= UNITS_PER_WORD
* (TILEPRO_NUM_ARG_REGS
- first_reg
);
358 alias_set_type set
= get_varargs_alias_set ();
360 gen_rtx_MEM (BLKmode
, plus_constant (Pmode
, \
361 virtual_incoming_args_rtx
,
362 -STACK_POINTER_OFFSET
-
364 (TILEPRO_NUM_ARG_REGS
-
366 MEM_NOTRAP_P (tmp
) = 1;
367 set_mem_alias_set (tmp
, set
);
368 move_block_from_reg (first_reg
, tmp
,
369 TILEPRO_NUM_ARG_REGS
- first_reg
);
377 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
378 the va_list structure VALIST as required to retrieve an argument of
379 type TYPE, and returning that argument.
381 ret = va_arg(VALIST, TYPE);
383 generates code equivalent to:
385 paddedsize = (sizeof(TYPE) + 3) & -4;
386 if ((VALIST.__args + paddedsize > VALIST.__skip)
387 & (VALIST.__args <= VALIST.__skip))
388 addr = VALIST.__skip + STACK_POINTER_OFFSET;
390 addr = VALIST.__args;
391 VALIST.__args = addr + paddedsize;
392 ret = *(TYPE *)addr; */
394 tilepro_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
* pre_p
,
395 gimple_seq
* post_p ATTRIBUTE_UNUSED
)
399 HOST_WIDE_INT size
, rsize
;
401 bool pass_by_reference_p
;
403 f_args
= TYPE_FIELDS (va_list_type_node
);
404 f_skip
= TREE_CHAIN (f_args
);
407 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
409 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
411 addr
= create_tmp_var (ptr_type_node
, "va_arg");
413 /* if an object is dynamically sized, a pointer to it is passed
414 instead of the object itself. */
415 pass_by_reference_p
= pass_va_arg_by_reference (type
);
417 if (pass_by_reference_p
)
418 type
= build_pointer_type (type
);
420 size
= int_size_in_bytes (type
);
421 rsize
= ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
) * UNITS_PER_WORD
;
423 /* If the alignment of the type is greater than the default for a
424 parameter, align to STACK_BOUNDARY. */
425 if (TYPE_ALIGN (type
) > PARM_BOUNDARY
)
427 /* Assert the only case we generate code for: when
428 stack boundary = 2 * parm boundary. */
429 gcc_assert (STACK_BOUNDARY
== PARM_BOUNDARY
* 2);
431 tmp
= build2 (BIT_AND_EXPR
, sizetype
,
432 fold_convert (sizetype
, unshare_expr (args
)),
433 size_int (PARM_BOUNDARY
/ 8));
434 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
435 unshare_expr (args
), tmp
);
437 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
440 /* Build conditional expression to calculate addr. The expression
441 will be gimplified later. */
442 tmp
= fold_build_pointer_plus_hwi (unshare_expr (args
), rsize
);
443 tmp
= build2 (TRUTH_AND_EXPR
, boolean_type_node
,
444 build2 (GT_EXPR
, boolean_type_node
, tmp
, unshare_expr (skip
)),
445 build2 (LE_EXPR
, boolean_type_node
, unshare_expr (args
),
446 unshare_expr (skip
)));
448 tmp
= build3 (COND_EXPR
, ptr_type_node
, tmp
,
449 build2 (POINTER_PLUS_EXPR
, ptr_type_node
, unshare_expr (skip
),
450 size_int (STACK_POINTER_OFFSET
)),
451 unshare_expr (args
));
453 gimplify_assign (addr
, tmp
, pre_p
);
455 /* Update VALIST.__args. */
456 tmp
= fold_build_pointer_plus_hwi (addr
, rsize
);
457 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
459 addr
= fold_convert (build_pointer_type (type
), addr
);
461 if (pass_by_reference_p
)
462 addr
= build_va_arg_indirect_ref (addr
);
464 return build_va_arg_indirect_ref (addr
);
469 /* Implement TARGET_RTX_COSTS. */
471 tilepro_rtx_costs (rtx x
, machine_mode mode
, int outer_code
, int opno
,
472 int *total
, bool speed
)
474 int code
= GET_CODE (x
);
479 /* If this is an 8-bit constant, return zero since it can be
480 used nearly anywhere with no cost. If it is a valid operand
481 for an ADD or AND, likewise return 0 if we know it will be
482 used in that context. Otherwise, return 2 since it might be
483 used there later. All other constants take at least two
485 if (satisfies_constraint_I (x
))
490 else if (outer_code
== PLUS
&& add_operand (x
, VOIDmode
))
492 /* Slightly penalize large constants even though we can add
493 them in one instruction, because it forces the use of
494 2-wide bundling mode. */
498 else if (move_operand (x
, SImode
))
500 /* We can materialize in one move. */
501 *total
= COSTS_N_INSNS (1);
506 /* We can materialize in two moves. */
507 *total
= COSTS_N_INSNS (2);
516 *total
= COSTS_N_INSNS (2);
520 *total
= COSTS_N_INSNS (4);
528 /* If outer-code was a sign or zero extension, a cost of
529 COSTS_N_INSNS (1) was already added in, so account for
531 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
532 *total
= COSTS_N_INSNS (1);
534 *total
= COSTS_N_INSNS (2);
538 /* Convey that s[123]a are efficient. */
539 if (GET_CODE (XEXP (x
, 0)) == MULT
540 && cint_248_operand (XEXP (XEXP (x
, 0), 1), VOIDmode
))
542 *total
= (rtx_cost (XEXP (XEXP (x
, 0), 0), mode
,
543 (enum rtx_code
) outer_code
, opno
, speed
)
544 + rtx_cost (XEXP (x
, 1), mode
,
545 (enum rtx_code
) outer_code
, opno
, speed
)
546 + COSTS_N_INSNS (1));
552 *total
= COSTS_N_INSNS (2);
557 if (outer_code
== MULT
)
560 *total
= COSTS_N_INSNS (1);
567 /* These are handled by software and are very expensive. */
568 *total
= COSTS_N_INSNS (100);
572 case UNSPEC_VOLATILE
:
574 int num
= XINT (x
, 1);
576 if (num
<= TILEPRO_LAST_LATENCY_1_INSN
)
577 *total
= COSTS_N_INSNS (1);
578 else if (num
<= TILEPRO_LAST_LATENCY_2_INSN
)
579 *total
= COSTS_N_INSNS (2);
580 else if (num
> TILEPRO_LAST_LATENCY_INSN
)
582 if (outer_code
== PLUS
)
585 *total
= COSTS_N_INSNS (1);
591 case UNSPEC_BLOCKAGE
:
592 case UNSPEC_NETWORK_BARRIER
:
596 case UNSPEC_LNK_AND_LABEL
:
598 case UNSPEC_NETWORK_RECEIVE
:
599 case UNSPEC_NETWORK_SEND
:
600 case UNSPEC_TLS_GD_ADD
:
601 *total
= COSTS_N_INSNS (1);
604 case UNSPEC_TLS_IE_LOAD
:
605 *total
= COSTS_N_INSNS (2);
609 *total
= COSTS_N_INSNS (3);
613 *total
= COSTS_N_INSNS (4);
616 case UNSPEC_LATENCY_L2
:
617 *total
= COSTS_N_INSNS (8);
620 case UNSPEC_TLS_GD_CALL
:
621 *total
= COSTS_N_INSNS (30);
624 case UNSPEC_LATENCY_MISS
:
625 *total
= COSTS_N_INSNS (80);
629 *total
= COSTS_N_INSNS (1);
642 /* Returns an SImode integer rtx with value VAL. */
644 gen_int_si (HOST_WIDE_INT val
)
646 return gen_int_mode (val
, SImode
);
650 /* Create a temporary variable to hold a partial result, to enable
653 create_temp_reg_if_possible (machine_mode mode
, rtx default_reg
)
655 return can_create_pseudo_p ()? gen_reg_rtx (mode
) : default_reg
;
659 /* Functions to save and restore machine-specific function data. */
660 static struct machine_function
*
661 tilepro_init_machine_status (void)
663 return ggc_cleared_alloc
<machine_function
> ();
667 /* Do anything needed before RTL is emitted for each function. */
669 tilepro_init_expanders (void)
671 /* Arrange to initialize and mark the machine per-function
673 init_machine_status
= tilepro_init_machine_status
;
675 if (cfun
&& cfun
->machine
&& flag_pic
)
677 static int label_num
= 0;
679 char text_label_name
[32];
681 struct machine_function
*machine
= cfun
->machine
;
683 ASM_GENERATE_INTERNAL_LABEL (text_label_name
, "L_PICLNK", label_num
++);
685 machine
->text_label_symbol
=
686 gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (text_label_name
));
688 machine
->text_label_rtx
=
689 gen_rtx_REG (Pmode
, TILEPRO_PIC_TEXT_LABEL_REGNUM
);
691 machine
->got_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
693 machine
->calls_tls_get_addr
= false;
698 /* Return true if X contains a thread-local symbol. */
700 tilepro_tls_referenced_p (rtx x
)
702 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
)
703 x
= XEXP (XEXP (x
, 0), 0);
705 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
))
708 /* That's all we handle in tilepro_legitimize_tls_address for
714 /* Return true if X requires a scratch register. It is given that
715 flag_pic is on and that X satisfies CONSTANT_P. */
717 tilepro_pic_address_needs_scratch (rtx x
)
719 if (GET_CODE (x
) == CONST
720 && GET_CODE (XEXP (x
, 0)) == PLUS
721 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
722 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
723 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
730 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
731 which we are willing to load the value into a register via a move
732 pattern. TLS cannot be treated as a constant because it can
733 include a function call. */
735 tilepro_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
737 switch (GET_CODE (x
))
741 return !tilepro_tls_referenced_p (x
);
749 /* Return true if the constant value X is a legitimate general operand
750 when generating PIC code. It is given that flag_pic is on and that
751 X satisfies CONSTANT_P. */
753 tilepro_legitimate_pic_operand_p (rtx x
)
755 if (tilepro_pic_address_needs_scratch (x
))
758 if (tilepro_tls_referenced_p (x
))
765 /* Return true if the rtx X can be used as an address operand. */
767 tilepro_legitimate_address_p (machine_mode
ARG_UNUSED (mode
), rtx x
,
770 if (GET_CODE (x
) == SUBREG
)
773 switch (GET_CODE (x
))
777 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
784 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
787 if (GET_CODE (XEXP (x
, 1)) != PLUS
)
790 if (!rtx_equal_p (XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)))
793 if (!satisfies_constraint_I (XEXP (XEXP (x
, 1), 1)))
806 /* Check if x is a valid reg. */
811 return REGNO_OK_FOR_BASE_P (REGNO (x
));
817 /* Return the rtx containing SYMBOL_REF to the text label. */
819 tilepro_text_label_symbol (void)
821 return cfun
->machine
->text_label_symbol
;
825 /* Return the register storing the value of the text label. */
827 tilepro_text_label_rtx (void)
829 return cfun
->machine
->text_label_rtx
;
833 /* Return the register storing the value of the global offset
836 tilepro_got_rtx (void)
838 return cfun
->machine
->got_rtx
;
842 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
844 tilepro_got_symbol (void)
846 if (g_got_symbol
== NULL
)
847 g_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
853 /* Return a reference to the got to be used by tls references. */
855 tilepro_tls_got (void)
860 crtl
->uses_pic_offset_table
= 1;
861 return tilepro_got_rtx ();
864 temp
= gen_reg_rtx (Pmode
);
865 emit_move_insn (temp
, tilepro_got_symbol ());
871 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
872 this (thread-local) address. */
874 tilepro_legitimize_tls_address (rtx addr
)
878 gcc_assert (can_create_pseudo_p ());
880 if (GET_CODE (addr
) == SYMBOL_REF
)
881 switch (SYMBOL_REF_TLS_MODEL (addr
))
883 case TLS_MODEL_GLOBAL_DYNAMIC
:
884 case TLS_MODEL_LOCAL_DYNAMIC
:
886 rtx r0
, temp1
, temp2
, temp3
, got
;
889 ret
= gen_reg_rtx (Pmode
);
890 r0
= gen_rtx_REG (Pmode
, 0);
891 temp1
= gen_reg_rtx (Pmode
);
892 temp2
= gen_reg_rtx (Pmode
);
893 temp3
= gen_reg_rtx (Pmode
);
895 got
= tilepro_tls_got ();
896 emit_insn (gen_tls_gd_addhi (temp1
, got
, addr
));
897 emit_insn (gen_tls_gd_addlo (temp2
, temp1
, addr
));
898 emit_move_insn (r0
, temp2
);
899 emit_insn (gen_tls_gd_call (addr
));
900 emit_move_insn (temp3
, r0
);
901 last
= emit_insn (gen_tls_gd_add (ret
, temp3
, addr
));
902 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
905 case TLS_MODEL_INITIAL_EXEC
:
907 rtx temp1
, temp2
, temp3
, got
;
910 ret
= gen_reg_rtx (Pmode
);
911 temp1
= gen_reg_rtx (Pmode
);
912 temp2
= gen_reg_rtx (Pmode
);
913 temp3
= gen_reg_rtx (Pmode
);
915 got
= tilepro_tls_got ();
916 emit_insn (gen_tls_ie_addhi (temp1
, got
, addr
));
917 emit_insn (gen_tls_ie_addlo (temp2
, temp1
, addr
));
918 emit_insn (gen_tls_ie_load (temp3
, temp2
, addr
));
923 THREAD_POINTER_REGNUM
),
925 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
928 case TLS_MODEL_LOCAL_EXEC
:
933 ret
= gen_reg_rtx (Pmode
);
934 temp1
= gen_reg_rtx (Pmode
);
936 emit_insn (gen_tls_le_addhi (temp1
,
938 THREAD_POINTER_REGNUM
),
940 last
= emit_insn (gen_tls_le_addlo (ret
, temp1
, addr
));
941 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
947 else if (GET_CODE (addr
) == CONST
)
951 gcc_assert (GET_CODE (XEXP (addr
, 0)) == PLUS
);
953 base
= tilepro_legitimize_tls_address (XEXP (XEXP (addr
, 0), 0));
954 offset
= XEXP (XEXP (addr
, 0), 1);
956 base
= force_operand (base
, NULL_RTX
);
957 ret
= force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
966 /* Legitimize PIC addresses. If the address is already
967 position-independent, we return ORIG. Newly generated
968 position-independent addresses go into a reg. This is REG if
969 nonzero, otherwise we allocate register(s) as necessary. */
971 tilepro_legitimize_pic_address (rtx orig
,
972 machine_mode mode ATTRIBUTE_UNUSED
,
975 if (GET_CODE (orig
) == SYMBOL_REF
)
977 rtx address
, pic_ref
;
981 gcc_assert (can_create_pseudo_p ());
982 reg
= gen_reg_rtx (Pmode
);
985 if (SYMBOL_REF_LOCAL_P (orig
))
987 /* If not during reload, allocate another temp reg here for
988 loading in the address, so that these instructions can be
989 optimized properly. */
990 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
991 rtx text_label_symbol
= tilepro_text_label_symbol ();
992 rtx text_label_rtx
= tilepro_text_label_rtx ();
994 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
996 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
999 /* Note: this is conservative. We use the text_label but we
1000 don't use the pic_offset_table. However, in some cases
1001 we may need the pic_offset_table (see
1002 tilepro_fixup_pcrel_references). */
1003 crtl
->uses_pic_offset_table
= 1;
1007 emit_move_insn (reg
, address
);
1012 /* If not during reload, allocate another temp reg here for
1013 loading in the address, so that these instructions can be
1014 optimized properly. */
1015 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1017 gcc_assert (flag_pic
);
1020 emit_insn (gen_add_got16 (temp_reg
,
1021 tilepro_got_rtx (), orig
));
1025 rtx temp_reg2
= create_temp_reg_if_possible (Pmode
, reg
);
1026 emit_insn (gen_addhi_got32 (temp_reg2
,
1027 tilepro_got_rtx (), orig
));
1028 emit_insn (gen_addlo_got32 (temp_reg
, temp_reg2
, orig
));
1033 pic_ref
= gen_const_mem (Pmode
, address
);
1034 crtl
->uses_pic_offset_table
= 1;
1035 emit_move_insn (reg
, pic_ref
);
1036 /* The following put a REG_EQUAL note on this insn, so that
1037 it can be optimized by loop. But it causes the label to
1038 be optimized away. */
1039 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1043 else if (GET_CODE (orig
) == CONST
)
1047 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1048 && XEXP (XEXP (orig
, 0), 0) == tilepro_got_rtx ())
1053 gcc_assert (can_create_pseudo_p ());
1054 reg
= gen_reg_rtx (Pmode
);
1057 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
1058 base
= tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
,
1061 tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
1062 base
== reg
? 0 : reg
);
1064 if (CONST_INT_P (offset
))
1066 if (can_create_pseudo_p ())
1067 offset
= force_reg (Pmode
, offset
);
1069 /* If we reach here, then something is seriously
1074 if (can_create_pseudo_p ())
1075 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1079 else if (GET_CODE (orig
) == LABEL_REF
)
1081 rtx address
, temp_reg
;
1082 rtx text_label_symbol
;
1087 gcc_assert (can_create_pseudo_p ());
1088 reg
= gen_reg_rtx (Pmode
);
1091 /* If not during reload, allocate another temp reg here for
1092 loading in the address, so that these instructions can be
1093 optimized properly. */
1094 temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1095 text_label_symbol
= tilepro_text_label_symbol ();
1096 text_label_rtx
= tilepro_text_label_rtx ();
1098 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1099 text_label_symbol
));
1100 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1101 text_label_symbol
));
1103 /* Note: this is conservative. We use the text_label but we
1104 don't use the pic_offset_table. */
1105 crtl
->uses_pic_offset_table
= 1;
1109 emit_move_insn (reg
, address
);
1118 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1120 tilepro_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1123 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
1124 && symbolic_operand (x
, Pmode
) && tilepro_tls_referenced_p (x
))
1126 return tilepro_legitimize_tls_address (x
);
1130 return tilepro_legitimize_pic_address (x
, mode
, 0);
1137 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1139 tilepro_delegitimize_address (rtx x
)
1141 x
= delegitimize_mem_from_attrs (x
);
1143 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1145 switch (XINT (XEXP (x
, 0), 1))
1147 case UNSPEC_PCREL_SYM
:
1148 case UNSPEC_GOT16_SYM
:
1149 case UNSPEC_GOT32_SYM
:
1152 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1161 /* Emit code to load the PIC register. */
1163 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED
)
1165 int orig_flag_pic
= flag_pic
;
1167 rtx got_symbol
= tilepro_got_symbol ();
1168 rtx text_label_symbol
= tilepro_text_label_symbol ();
1169 rtx text_label_rtx
= tilepro_text_label_rtx ();
1172 emit_insn (gen_insn_lnk_and_label (text_label_rtx
, text_label_symbol
));
1174 emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1175 text_label_rtx
, got_symbol
, text_label_symbol
));
1177 emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1179 got_symbol
, text_label_symbol
));
1181 flag_pic
= orig_flag_pic
;
1183 /* Need to emit this whether or not we obey regdecls, since
1184 setjmp/longjmp can cause life info to screw up. ??? In the case
1185 where we don't obey regdecls, this is not sufficient since we may
1186 not fall out the bottom. */
1187 emit_use (tilepro_got_rtx ());
1191 /* Return the simd variant of the constant NUM of mode MODE, by
1192 replicating it to fill an interger of mode SImode. NUM is first
1193 truncated to fit in MODE. */
1195 tilepro_simd_int (rtx num
, machine_mode mode
)
1197 HOST_WIDE_INT n
= 0;
1199 gcc_assert (CONST_INT_P (num
));
1206 n
= 0x01010101 * (n
& 0x000000FF);
1209 n
= 0x00010001 * (n
& 0x0000FFFF);
1219 return gen_int_si (n
);
1223 /* Split one or more DImode RTL references into pairs of SImode
1224 references. The RTL can be REG, offsettable MEM, integer constant,
1225 or CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL
1226 to split and "num" is its length. lo_half and hi_half are output
1227 arrays that parallel "operands". */
1229 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1233 rtx op
= operands
[num
];
1235 /* simplify_subreg refuse to split volatile memory addresses,
1236 but we still have to handle it. */
1239 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1240 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1244 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1245 GET_MODE (op
) == VOIDmode
1246 ? DImode
: GET_MODE (op
), 0);
1247 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1248 GET_MODE (op
) == VOIDmode
1249 ? DImode
: GET_MODE (op
), 4);
1255 /* Returns true iff val can be moved into a register in one
1256 instruction. And if it can, it emits the code to move the
1259 If three_wide_only is true, this insists on an instruction that
1260 works in a bundle containing three instructions. */
1262 expand_set_cint32_one_inst (rtx dest_reg
,
1263 HOST_WIDE_INT val
, bool three_wide_only
)
1265 val
= trunc_int_for_mode (val
, SImode
);
1267 if (val
== trunc_int_for_mode (val
, QImode
))
1270 emit_move_insn (dest_reg
, GEN_INT (val
));
1273 else if (!three_wide_only
)
1275 rtx imm_op
= GEN_INT (val
);
1277 if (satisfies_constraint_J (imm_op
)
1278 || satisfies_constraint_K (imm_op
)
1279 || satisfies_constraint_N (imm_op
)
1280 || satisfies_constraint_P (imm_op
))
1282 emit_move_insn (dest_reg
, imm_op
);
1291 /* Implement SImode rotatert. */
1292 static HOST_WIDE_INT
1293 rotate_right (HOST_WIDE_INT n
, int count
)
1295 unsigned HOST_WIDE_INT x
= n
& 0xFFFFFFFF;
1298 return ((x
>> count
) | (x
<< (32 - count
))) & 0xFFFFFFFF;
1302 /* Return true iff n contains exactly one contiguous sequence of 1
1303 bits, possibly wrapping around from high bits to low bits. */
1305 tilepro_bitfield_operand_p (HOST_WIDE_INT n
, int *first_bit
, int *last_bit
)
1312 for (i
= 0; i
< 32; i
++)
1314 unsigned HOST_WIDE_INT x
= rotate_right (n
, i
);
1318 /* See if x is a power of two minus one, i.e. only consecutive 1
1319 bits starting from bit 0. */
1320 if ((x
& (x
+ 1)) == 0)
1322 if (first_bit
!= NULL
)
1324 if (last_bit
!= NULL
)
1325 *last_bit
= (i
+ exact_log2 (x
^ (x
>> 1))) & 31;
1335 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1337 expand_set_cint32 (rtx dest_reg
, rtx src_val
)
1340 int leading_zeroes
, trailing_zeroes
;
1342 int three_wide_only
;
1345 gcc_assert (CONST_INT_P (src_val
));
1346 val
= trunc_int_for_mode (INTVAL (src_val
), SImode
);
1348 /* See if we can generate the constant in one instruction. */
1349 if (expand_set_cint32_one_inst (dest_reg
, val
, false))
1352 /* Create a temporary variable to hold a partial result, to enable
1354 temp
= create_temp_reg_if_possible (SImode
, dest_reg
);
1356 leading_zeroes
= 31 - floor_log2 (val
& 0xFFFFFFFF);
1357 trailing_zeroes
= exact_log2 (val
& -val
);
1359 lower
= trunc_int_for_mode (val
, HImode
);
1360 upper
= trunc_int_for_mode ((val
- lower
) >> 16, HImode
);
1362 /* First try all three-wide instructions that generate a constant
1363 (i.e. movei) followed by various shifts and rotates. If none of
1364 those work, try various two-wide ways of generating a constant
1365 followed by various shifts and rotates. */
1366 for (three_wide_only
= 1; three_wide_only
>= 0; three_wide_only
--)
1370 if (expand_set_cint32_one_inst (temp
, val
>> trailing_zeroes
,
1373 /* 0xFFFFA500 becomes:
1374 movei temp, 0xFFFFFFA5
1375 shli dest, temp, 8 */
1376 emit_move_insn (dest_reg
,
1377 gen_rtx_ASHIFT (SImode
, temp
,
1378 GEN_INT (trailing_zeroes
)));
1382 if (expand_set_cint32_one_inst (temp
, val
<< leading_zeroes
,
1385 /* 0x7FFFFFFF becomes:
1387 shri dest, temp, 1 */
1388 emit_move_insn (dest_reg
,
1389 gen_rtx_LSHIFTRT (SImode
, temp
,
1390 GEN_INT (leading_zeroes
)));
1394 /* Try rotating a one-instruction immediate, since rotate is
1396 for (count
= 1; count
< 32; count
++)
1398 HOST_WIDE_INT r
= rotate_right (val
, count
);
1399 if (expand_set_cint32_one_inst (temp
, r
, three_wide_only
))
1401 /* 0xFFA5FFFF becomes:
1402 movei temp, 0xFFFFFFA5
1403 rli dest, temp, 16 */
1404 emit_move_insn (dest_reg
,
1405 gen_rtx_ROTATE (SImode
, temp
, GEN_INT (count
)));
1410 if (lower
== trunc_int_for_mode (lower
, QImode
))
1412 /* We failed to use two 3-wide instructions, but the low 16
1413 bits are a small number so just use a 2-wide + 3-wide
1414 auli + addi pair rather than anything more exotic.
1417 auli temp, zero, 0x1234
1418 addi dest, temp, 0x56 */
1423 /* Fallback case: use a auli + addli/addi pair. */
1424 emit_move_insn (temp
, GEN_INT (upper
<< 16));
1425 emit_move_insn (dest_reg
, (gen_rtx_PLUS (SImode
, temp
, GEN_INT (lower
))));
1429 /* Load OP1, a 32-bit constant, into OP0, a register. We know it
1430 can't be done in one insn when we get here, the move expander
1433 tilepro_expand_set_const32 (rtx op0
, rtx op1
)
1435 machine_mode mode
= GET_MODE (op0
);
1438 if (CONST_INT_P (op1
))
1440 /* TODO: I don't know if we want to split large constants now,
1441 or wait until later (with a define_split).
1443 Does splitting early help CSE? Does it harm other
1444 optimizations that might fold loads? */
1445 expand_set_cint32 (op0
, op1
);
1449 temp
= create_temp_reg_if_possible (mode
, op0
);
1451 /* A symbol, emit in the traditional way. */
1452 emit_move_insn (temp
, gen_rtx_HIGH (mode
, op1
));
1453 emit_move_insn (op0
, gen_rtx_LO_SUM (mode
, temp
, op1
));
1458 /* Expand a move instruction. Return true if all work is done. */
1460 tilepro_expand_mov (machine_mode mode
, rtx
*operands
)
1462 /* Handle sets of MEM first. */
1463 if (MEM_P (operands
[0]))
1465 if (can_create_pseudo_p ())
1466 operands
[0] = validize_mem (operands
[0]);
1468 if (reg_or_0_operand (operands
[1], mode
))
1471 if (!reload_in_progress
)
1472 operands
[1] = force_reg (mode
, operands
[1]);
1475 /* Fixup TLS cases. */
1476 if (CONSTANT_P (operands
[1]) && tilepro_tls_referenced_p (operands
[1]))
1478 operands
[1] = tilepro_legitimize_tls_address (operands
[1]);
1482 /* Fixup PIC cases. */
1483 if (flag_pic
&& CONSTANT_P (operands
[1]))
1485 if (tilepro_pic_address_needs_scratch (operands
[1]))
1486 operands
[1] = tilepro_legitimize_pic_address (operands
[1], mode
, 0);
1488 if (symbolic_operand (operands
[1], mode
))
1490 operands
[1] = tilepro_legitimize_pic_address (operands
[1],
1492 (reload_in_progress
?
1499 /* Fixup for UNSPEC addresses. */
1501 && GET_CODE (operands
[1]) == HIGH
1502 && GET_CODE (XEXP (operands
[1], 0)) == CONST
1503 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == UNSPEC
)
1505 rtx unspec
= XEXP (XEXP (operands
[1], 0), 0);
1506 int unspec_num
= XINT (unspec
, 1);
1507 if (unspec_num
== UNSPEC_PCREL_SYM
)
1509 emit_insn (gen_auli_pcrel (operands
[0], const0_rtx
,
1510 XVECEXP (unspec
, 0, 0),
1511 XVECEXP (unspec
, 0, 1)));
1514 else if (flag_pic
== 2 && unspec_num
== UNSPEC_GOT32_SYM
)
1516 emit_insn (gen_addhi_got32 (operands
[0], const0_rtx
,
1517 XVECEXP (unspec
, 0, 0)));
1520 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_GD
)
1522 emit_insn (gen_tls_gd_addhi (operands
[0], const0_rtx
,
1523 XVECEXP (unspec
, 0, 0)));
1526 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_IE
)
1528 emit_insn (gen_tls_ie_addhi (operands
[0], const0_rtx
,
1529 XVECEXP (unspec
, 0, 0)));
1532 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_LE
)
1534 emit_insn (gen_tls_le_addhi (operands
[0], const0_rtx
,
1535 XVECEXP (unspec
, 0, 0)));
1540 /* Accept non-constants and valid constants unmodified. */
1541 if (!CONSTANT_P (operands
[1])
1542 || GET_CODE (operands
[1]) == HIGH
|| move_operand (operands
[1], mode
))
1545 /* Split large integers. */
1546 if (GET_MODE_SIZE (mode
) <= 4)
1548 tilepro_expand_set_const32 (operands
[0], operands
[1]);
1556 /* Expand the "insv" pattern. */
1558 tilepro_expand_insv (rtx operands
[4])
1560 rtx first_rtx
= operands
[2];
1561 HOST_WIDE_INT first
= INTVAL (first_rtx
);
1562 HOST_WIDE_INT width
= INTVAL (operands
[1]);
1563 rtx v
= operands
[3];
1565 /* Shift the inserted bits into position. */
1568 if (CONST_INT_P (v
))
1570 /* Shift the constant into mm position. */
1571 v
= gen_int_si (INTVAL (v
) << first
);
1575 /* Shift over the value to be inserted. */
1576 rtx tmp
= gen_reg_rtx (SImode
);
1577 emit_insn (gen_ashlsi3 (tmp
, v
, first_rtx
));
1582 /* Insert the shifted bits using an 'mm' insn. */
1583 emit_insn (gen_insn_mm (operands
[0], v
, operands
[0], first_rtx
,
1584 GEN_INT (first
+ width
- 1)));
1588 /* Expand unaligned loads. */
1590 tilepro_expand_unaligned_load (rtx dest_reg
, rtx mem
, HOST_WIDE_INT bitsize
,
1591 HOST_WIDE_INT bit_offset
, bool sign
)
1594 rtx addr_lo
, addr_hi
;
1595 rtx mem_lo
, mem_hi
, hi
;
1596 rtx mema
, wide_result
;
1597 int last_byte_offset
;
1598 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1600 mode
= GET_MODE (dest_reg
);
1602 hi
= gen_reg_rtx (mode
);
1604 if (bitsize
== 2 * BITS_PER_UNIT
&& (bit_offset
% BITS_PER_UNIT
) == 0)
1608 /* When just loading a two byte value, we can load the two bytes
1609 individually and combine them efficiently. */
1611 mem_lo
= adjust_address (mem
, QImode
, byte_offset
);
1612 mem_hi
= adjust_address (mem
, QImode
, byte_offset
+ 1);
1614 lo
= gen_reg_rtx (mode
);
1615 emit_insn (gen_zero_extendqisi2 (lo
, mem_lo
));
1619 rtx tmp
= gen_reg_rtx (mode
);
1621 /* Do a signed load of the second byte then shift and OR it
1623 emit_insn (gen_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1624 emit_insn (gen_ashlsi3 (gen_lowpart (SImode
, tmp
),
1625 gen_lowpart (SImode
, hi
), GEN_INT (8)));
1626 emit_insn (gen_iorsi3 (gen_lowpart (SImode
, dest_reg
),
1627 gen_lowpart (SImode
, lo
),
1628 gen_lowpart (SImode
, tmp
)));
1632 /* Do two unsigned loads and use intlb to interleave
1634 emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1635 emit_insn (gen_insn_intlb (gen_lowpart (SImode
, dest_reg
),
1636 gen_lowpart (SImode
, hi
),
1637 gen_lowpart (SImode
, lo
)));
1643 mema
= XEXP (mem
, 0);
1645 /* AND addresses cannot be in any alias set, since they may
1646 implicitly alias surrounding code. Ideally we'd have some alias
1647 set that covered all types except those with alignment 8 or
1649 addr_lo
= force_reg (Pmode
, plus_constant (Pmode
, mema
, byte_offset
));
1650 mem_lo
= change_address (mem
, mode
,
1651 gen_rtx_AND (Pmode
, addr_lo
, GEN_INT (-4)));
1652 set_mem_alias_set (mem_lo
, 0);
1654 /* Load the high word at an address that will not fault if the low
1655 address is aligned and at the very end of a page. */
1656 last_byte_offset
= (bit_offset
+ bitsize
- 1) / BITS_PER_UNIT
;
1657 addr_hi
= force_reg (Pmode
, plus_constant (Pmode
, mema
, last_byte_offset
));
1658 mem_hi
= change_address (mem
, mode
,
1659 gen_rtx_AND (Pmode
, addr_hi
, GEN_INT (-4)));
1660 set_mem_alias_set (mem_hi
, 0);
1664 addr_lo
= make_safe_from (addr_lo
, dest_reg
);
1665 wide_result
= dest_reg
;
1669 wide_result
= gen_reg_rtx (mode
);
1672 /* Load hi first in case dest_reg is used in mema. */
1673 emit_move_insn (hi
, mem_hi
);
1674 emit_move_insn (wide_result
, mem_lo
);
1676 emit_insn (gen_insn_dword_align (gen_lowpart (SImode
, wide_result
),
1677 gen_lowpart (SImode
, wide_result
),
1678 gen_lowpart (SImode
, hi
), addr_lo
));
1683 extract_bit_field (gen_lowpart (SImode
, wide_result
),
1684 bitsize
, bit_offset
% BITS_PER_UNIT
,
1685 !sign
, gen_lowpart (SImode
, dest_reg
),
1686 SImode
, SImode
, false, NULL
);
1688 if (extracted
!= dest_reg
)
1689 emit_move_insn (dest_reg
, gen_lowpart (SImode
, extracted
));
1694 /* Expand unaligned stores. */
1696 tilepro_expand_unaligned_store (rtx mem
, rtx src
, HOST_WIDE_INT bitsize
,
1697 HOST_WIDE_INT bit_offset
)
1699 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1700 HOST_WIDE_INT bytesize
= bitsize
/ BITS_PER_UNIT
;
1701 HOST_WIDE_INT shift_amt
;
1706 for (i
= 0, shift_amt
= 0; i
< bytesize
; i
++, shift_amt
+= BITS_PER_UNIT
)
1708 mem_addr
= adjust_address (mem
, QImode
, byte_offset
+ i
);
1712 store_val
= expand_simple_binop (SImode
, LSHIFTRT
,
1713 gen_lowpart (SImode
, src
),
1714 GEN_INT (shift_amt
), NULL
, 1,
1716 store_val
= gen_lowpart (QImode
, store_val
);
1720 store_val
= gen_lowpart (QImode
, src
);
1723 emit_move_insn (mem_addr
, store_val
);
1728 /* Implement the movmisalign patterns. One of the operands is a
1729 memory that is not naturally aligned. Emit instructions to load
1732 tilepro_expand_movmisalign (machine_mode mode
, rtx
*operands
)
1734 if (MEM_P (operands
[1]))
1738 if (register_operand (operands
[0], mode
))
1741 tmp
= gen_reg_rtx (mode
);
1743 tilepro_expand_unaligned_load (tmp
, operands
[1],
1744 GET_MODE_BITSIZE (mode
), 0, true);
1746 if (tmp
!= operands
[0])
1747 emit_move_insn (operands
[0], tmp
);
1749 else if (MEM_P (operands
[0]))
1751 if (!reg_or_0_operand (operands
[1], mode
))
1752 operands
[1] = force_reg (mode
, operands
[1]);
1754 tilepro_expand_unaligned_store (operands
[0], operands
[1],
1755 GET_MODE_BITSIZE (mode
), 0);
1762 /* Implement the addsi3 pattern. */
1764 tilepro_expand_addsi (rtx op0
, rtx op1
, rtx op2
)
1770 /* Skip anything that only takes one instruction. */
1771 if (add_operand (op2
, SImode
))
1774 /* We can only optimize ints here (it should be impossible to get
1775 here with any other type, but it is harmless to check. */
1776 if (!CONST_INT_P (op2
))
1779 temp
= create_temp_reg_if_possible (SImode
, op0
);
1781 high
= (n
+ (n
& 0x8000)) & ~0xffff;
1783 emit_move_insn (temp
, gen_rtx_PLUS (SImode
, op1
, gen_int_si (high
)));
1784 emit_move_insn (op0
, gen_rtx_PLUS (SImode
, temp
, gen_int_si (n
- high
)));
1790 /* Implement the allocate_stack pattern (alloca). */
1792 tilepro_allocate_stack (rtx op0
, rtx op1
)
1794 /* Technically the correct way to initialize chain_loc is with
1795 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1796 * sets the alias_set to that of a frame reference. Some of our
1797 * tests rely on some unsafe assumption about when the chaining
1798 * update is done, we need to be conservative about reordering the
1799 * chaining instructions.
1801 rtx fp_addr
= gen_reg_rtx (Pmode
);
1802 rtx fp_value
= gen_reg_rtx (Pmode
);
1805 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1806 GEN_INT (UNITS_PER_WORD
)));
1808 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1810 emit_move_insn (fp_value
, fp_loc
);
1812 op1
= force_reg (Pmode
, op1
);
1814 emit_move_insn (stack_pointer_rtx
,
1815 gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, op1
));
1817 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1818 GEN_INT (UNITS_PER_WORD
)));
1820 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1822 emit_move_insn (fp_loc
, fp_value
);
1824 emit_move_insn (op0
, virtual_stack_dynamic_rtx
);
1831 /* Returns the insn_code in ENTRY. */
1832 static enum insn_code
1833 tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1836 return tilepro_multiply_insn_seq_decode_opcode
[entry
->compressed_opcode
];
1840 /* Returns the length of the 'op' array. */
1842 tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq
*seq
)
1844 /* The array either uses all of its allocated slots or is terminated
1845 by a bogus opcode. Either way, the array size is the index of the
1846 last valid opcode plus one. */
1848 for (i
= tilepro_multiply_insn_seq_MAX_OPERATIONS
- 1; i
>= 0; i
--)
1849 if (tilepro_multiply_get_opcode (&seq
->op
[i
]) != CODE_FOR_nothing
)
1852 /* An empty array is not allowed. */
1857 /* We precompute a number of expression trees for multiplying by
1858 constants. This generates code for such an expression tree by
1859 walking through the nodes in the tree (which are conveniently
1860 pre-linearized) and emitting an instruction for each one. */
1862 tilepro_expand_constant_multiply_given_sequence (rtx result
, rtx src
,
1864 tilepro_multiply_insn_seq
1870 /* Keep track of the subexpressions computed so far, so later
1871 instructions can refer to them. We seed the array with zero and
1872 the value being multiplied. */
1873 int num_subexprs
= 2;
1874 rtx subexprs
[tilepro_multiply_insn_seq_MAX_OPERATIONS
+ 2];
1875 subexprs
[0] = const0_rtx
;
1878 /* Determine how many instructions we are going to generate. */
1879 num_ops
= tilepro_multiply_get_num_ops (seq
);
1880 gcc_assert (num_ops
> 0
1881 && num_ops
<= tilepro_multiply_insn_seq_MAX_OPERATIONS
);
1883 for (i
= 0; i
< num_ops
; i
++)
1885 const struct tilepro_multiply_insn_seq_entry
*entry
= &seq
->op
[i
];
1887 /* Figure out where to store the output of this instruction. */
1888 const bool is_last_op
= (i
+ 1 == num_ops
);
1889 rtx out
= is_last_op
? result
: gen_reg_rtx (SImode
);
1891 enum insn_code opcode
= tilepro_multiply_get_opcode (entry
);
1892 if (opcode
== CODE_FOR_ashlsi3
)
1894 /* Handle shift by immediate. This is a special case because
1895 the meaning of the second operand is a constant shift
1896 count rather than an operand index. */
1898 /* Make sure the shift count is in range. Zero should not
1900 const int shift_count
= entry
->rhs
;
1901 gcc_assert (shift_count
> 0 && shift_count
< 32);
1903 /* Emit the actual instruction. */
1904 emit_insn (GEN_FCN (opcode
)
1905 (out
, subexprs
[entry
->lhs
],
1906 gen_rtx_CONST_INT (SImode
, shift_count
)));
1910 /* Handle a normal two-operand instruction, such as add or
1913 /* Make sure we are referring to a previously computed
1915 gcc_assert (entry
->rhs
< num_subexprs
);
1917 /* Emit the actual instruction. */
1918 emit_insn (GEN_FCN (opcode
)
1919 (out
, subexprs
[entry
->lhs
], subexprs
[entry
->rhs
]));
1922 /* Record this subexpression for use by later expressions. */
1923 subexprs
[num_subexprs
++] = out
;
1928 /* bsearch helper function. */
1930 tilepro_compare_multipliers (const void *key
, const void *t
)
1932 return *(const int *) key
-
1933 ((const struct tilepro_multiply_insn_seq
*) t
)->multiplier
;
1937 /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1939 static const struct tilepro_multiply_insn_seq
*
1940 tilepro_find_multiply_insn_seq_for_constant (int multiplier
)
1942 return ((const struct tilepro_multiply_insn_seq
*)
1943 bsearch (&multiplier
, tilepro_multiply_insn_seq_table
,
1944 tilepro_multiply_insn_seq_table_size
,
1945 sizeof tilepro_multiply_insn_seq_table
[0],
1946 tilepro_compare_multipliers
));
1950 /* Try to a expand constant multiply in SImode by looking it up in a
1951 precompiled table. OP0 is the result operand, OP1 is the source
1952 operand, and MULTIPLIER is the value of the constant. Return true
1955 tilepro_expand_const_mulsi (rtx op0
, rtx op1
, int multiplier
)
1957 /* See if we have precomputed an efficient way to multiply by this
1959 const struct tilepro_multiply_insn_seq
*seq
=
1960 tilepro_find_multiply_insn_seq_for_constant (multiplier
);
1963 tilepro_expand_constant_multiply_given_sequence (op0
, op1
, seq
);
1971 /* Expand the mulsi pattern. */
1973 tilepro_expand_mulsi (rtx op0
, rtx op1
, rtx op2
)
1975 if (CONST_INT_P (op2
))
1977 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op2
), SImode
);
1978 return tilepro_expand_const_mulsi (op0
, op1
, n
);
1984 /* Expand a high multiply pattern in SImode. RESULT, OP1, OP2 are the
1985 operands, and SIGN is true if it's a signed multiply, and false if
1986 it's an unsigned multiply. */
1988 tilepro_expand_high_multiply (rtx result
, rtx op1
, rtx op2
, bool sign
)
1990 rtx tmp0
= gen_reg_rtx (SImode
);
1991 rtx tmp1
= gen_reg_rtx (SImode
);
1992 rtx tmp2
= gen_reg_rtx (SImode
);
1993 rtx tmp3
= gen_reg_rtx (SImode
);
1994 rtx tmp4
= gen_reg_rtx (SImode
);
1995 rtx tmp5
= gen_reg_rtx (SImode
);
1996 rtx tmp6
= gen_reg_rtx (SImode
);
1997 rtx tmp7
= gen_reg_rtx (SImode
);
1998 rtx tmp8
= gen_reg_rtx (SImode
);
1999 rtx tmp9
= gen_reg_rtx (SImode
);
2000 rtx tmp10
= gen_reg_rtx (SImode
);
2001 rtx tmp11
= gen_reg_rtx (SImode
);
2002 rtx tmp12
= gen_reg_rtx (SImode
);
2003 rtx tmp13
= gen_reg_rtx (SImode
);
2004 rtx result_lo
= gen_reg_rtx (SImode
);
2008 emit_insn (gen_insn_mulhl_su (tmp0
, op1
, op2
));
2009 emit_insn (gen_insn_mulhl_su (tmp1
, op2
, op1
));
2010 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2011 emit_insn (gen_insn_mulhh_ss (tmp3
, op1
, op2
));
2015 emit_insn (gen_insn_mulhl_uu (tmp0
, op1
, op2
));
2016 emit_insn (gen_insn_mulhl_uu (tmp1
, op2
, op1
));
2017 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2018 emit_insn (gen_insn_mulhh_uu (tmp3
, op1
, op2
));
2021 emit_move_insn (tmp4
, (gen_rtx_ASHIFT (SImode
, tmp0
, GEN_INT (16))));
2023 emit_move_insn (tmp5
, (gen_rtx_ASHIFT (SImode
, tmp1
, GEN_INT (16))));
2025 emit_move_insn (tmp6
, (gen_rtx_PLUS (SImode
, tmp4
, tmp5
)));
2026 emit_move_insn (result_lo
, (gen_rtx_PLUS (SImode
, tmp2
, tmp6
)));
2028 emit_move_insn (tmp7
, gen_rtx_LTU (SImode
, tmp6
, tmp4
));
2029 emit_move_insn (tmp8
, gen_rtx_LTU (SImode
, result_lo
, tmp2
));
2033 emit_move_insn (tmp9
, (gen_rtx_ASHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2034 emit_move_insn (tmp10
, (gen_rtx_ASHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2038 emit_move_insn (tmp9
, (gen_rtx_LSHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2039 emit_move_insn (tmp10
, (gen_rtx_LSHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2042 emit_move_insn (tmp11
, (gen_rtx_PLUS (SImode
, tmp3
, tmp7
)));
2043 emit_move_insn (tmp12
, (gen_rtx_PLUS (SImode
, tmp8
, tmp9
)));
2044 emit_move_insn (tmp13
, (gen_rtx_PLUS (SImode
, tmp11
, tmp12
)));
2045 emit_move_insn (result
, (gen_rtx_PLUS (SImode
, tmp13
, tmp10
)));
2049 /* Implement smulsi3_highpart. */
2051 tilepro_expand_smulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2053 tilepro_expand_high_multiply (op0
, op1
, op2
, true);
2057 /* Implement umulsi3_highpart. */
2059 tilepro_expand_umulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2061 tilepro_expand_high_multiply (op0
, op1
, op2
, false);
2066 /* Compare and branches */
2068 /* Helper function to handle DImode for tilepro_emit_setcc_internal. */
2070 tilepro_emit_setcc_internal_di (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
)
2072 rtx operands
[2], lo_half
[2], hi_half
[2];
2073 rtx tmp
, tmp0
, tmp1
, tmp2
;
2076 /* Reduce the number of cases we need to handle by reversing the
2086 /* We handle these compares directly. */
2093 /* Reverse the operands. */
2098 /* We should not have called this with any other code. */
2104 code
= swap_condition (code
);
2105 tmp
= op0
, op0
= op1
, op1
= tmp
;
2111 split_di (operands
, 2, lo_half
, hi_half
);
2113 if (!reg_or_0_operand (lo_half
[0], SImode
))
2114 lo_half
[0] = force_reg (SImode
, lo_half
[0]);
2116 if (!reg_or_0_operand (hi_half
[0], SImode
))
2117 hi_half
[0] = force_reg (SImode
, hi_half
[0]);
2119 if (!CONST_INT_P (lo_half
[1]) && !register_operand (lo_half
[1], SImode
))
2120 lo_half
[1] = force_reg (SImode
, lo_half
[1]);
2122 if (!CONST_INT_P (hi_half
[1]) && !register_operand (hi_half
[1], SImode
))
2123 hi_half
[1] = force_reg (SImode
, hi_half
[1]);
2125 tmp0
= gen_reg_rtx (SImode
);
2126 tmp1
= gen_reg_rtx (SImode
);
2127 tmp2
= gen_reg_rtx (SImode
);
2132 emit_insn (gen_insn_seq (tmp0
, lo_half
[0], lo_half
[1]));
2133 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2134 emit_insn (gen_andsi3 (res
, tmp0
, tmp1
));
2137 emit_insn (gen_insn_sne (tmp0
, lo_half
[0], lo_half
[1]));
2138 emit_insn (gen_insn_sne (tmp1
, hi_half
[0], hi_half
[1]));
2139 emit_insn (gen_iorsi3 (res
, tmp0
, tmp1
));
2142 emit_insn (gen_insn_slte (tmp0
, hi_half
[0], hi_half
[1]));
2143 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2144 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2145 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2148 if (operands
[1] == const0_rtx
)
2150 emit_insn (gen_lshrsi3 (res
, hi_half
[0], GEN_INT (31)));
2155 emit_insn (gen_insn_slt (tmp0
, hi_half
[0], hi_half
[1]));
2156 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2157 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2158 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2162 emit_insn (gen_insn_slte_u (tmp0
, hi_half
[0], hi_half
[1]));
2163 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2164 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2165 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2168 emit_insn (gen_insn_slt_u (tmp0
, hi_half
[0], hi_half
[1]));
2169 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2170 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2171 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2181 /* Certain simplifications can be done to make invalid setcc
2182 operations valid. Return the final comparison, or NULL if we can't
2185 tilepro_emit_setcc_internal (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
,
2186 machine_mode cmp_mode
)
2191 if (cmp_mode
== DImode
)
2193 return tilepro_emit_setcc_internal_di (res
, code
, op0
, op1
);
2196 /* The general case: fold the comparison code to the types of
2197 compares that we have, choosing the branch as necessary. */
2207 /* We have these compares. */
2214 /* We do not have these compares, so we reverse the
2220 /* We should not have called this with any other code. */
2226 code
= swap_condition (code
);
2227 tmp
= op0
, op0
= op1
, op1
= tmp
;
2230 if (!reg_or_0_operand (op0
, SImode
))
2231 op0
= force_reg (SImode
, op0
);
2233 if (!CONST_INT_P (op1
) && !register_operand (op1
, SImode
))
2234 op1
= force_reg (SImode
, op1
);
2236 /* Return the setcc comparison. */
2237 emit_insn (gen_rtx_SET (res
, gen_rtx_fmt_ee (code
, SImode
, op0
, op1
)));
2243 /* Implement cstore patterns. */
2245 tilepro_emit_setcc (rtx operands
[], machine_mode cmp_mode
)
2248 tilepro_emit_setcc_internal (operands
[0], GET_CODE (operands
[1]),
2249 operands
[2], operands
[3], cmp_mode
);
2253 /* Return whether CODE is a signed comparison. */
2255 signed_compare_p (enum rtx_code code
)
2257 return (code
== EQ
|| code
== NE
|| code
== LT
|| code
== LE
2258 || code
== GT
|| code
== GE
);
2262 /* Generate the comparison for an SImode conditional branch. */
2264 tilepro_emit_cc_test (enum rtx_code code
, rtx op0
, rtx op1
,
2265 machine_mode cmp_mode
, bool eq_ne_only
)
2267 enum rtx_code branch_code
;
2270 /* Check for a compare against zero using a comparison we can do
2272 if (cmp_mode
!= DImode
2273 && op1
== const0_rtx
2274 && (code
== EQ
|| code
== NE
2275 || (!eq_ne_only
&& signed_compare_p (code
))))
2277 op0
= force_reg (SImode
, op0
);
2278 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, const0_rtx
);
2281 /* The general case: fold the comparison code to the types of
2282 compares that we have, choosing the branch as necessary. */
2290 /* We have these compares. */
2299 /* These must be reversed (except NE, but let's
2301 code
= reverse_condition (code
);
2309 if (cmp_mode
!= DImode
2310 && CONST_INT_P (op1
) && (!satisfies_constraint_I (op1
) || code
== LEU
))
2312 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op1
), SImode
);
2317 /* Subtract off the value we want to compare against and see
2318 if we get zero. This is cheaper than creating a constant
2319 in a register. Except that subtracting -128 is more
2320 expensive than seqi to -128, so we leave that alone. */
2321 /* ??? Don't do this when comparing against symbols,
2322 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2323 0), which will be declared false out of hand (at least
2325 if (!(symbolic_operand (op0
, VOIDmode
)
2326 || (REG_P (op0
) && REG_POINTER (op0
))))
2328 /* To compare against MIN_INT, we add MIN_INT and check
2331 if (n
!= -2147483647 - 1)
2336 op0
= force_reg (SImode
, op0
);
2337 temp
= gen_reg_rtx (SImode
);
2338 emit_insn (gen_addsi3 (temp
, op0
, gen_int_si (add
)));
2339 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2340 VOIDmode
, temp
, const0_rtx
);
2350 /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2353 int first
= exact_log2 (code
== LTU
? n
: n
+ 1);
2356 op0
= force_reg (SImode
, op0
);
2357 temp
= gen_reg_rtx (SImode
);
2358 emit_move_insn (temp
,
2359 gen_rtx_LSHIFTRT (SImode
, op0
,
2360 gen_int_si (first
)));
2361 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2362 VOIDmode
, temp
, const0_rtx
);
2372 /* Compute a flag saying whether we should branch. */
2373 temp
= gen_reg_rtx (SImode
);
2374 tilepro_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2376 /* Return the branch comparison. */
2377 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, temp
, const0_rtx
);
2381 /* Generate the comparison for a conditional branch. */
2383 tilepro_emit_conditional_branch (rtx operands
[], machine_mode cmp_mode
)
2386 tilepro_emit_cc_test (GET_CODE (operands
[0]), operands
[1], operands
[2],
2388 rtx branch_rtx
= gen_rtx_SET (pc_rtx
,
2389 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp_rtx
,
2394 emit_jump_insn (branch_rtx
);
2398 /* Implement the movsicc pattern. */
2400 tilepro_emit_conditional_move (rtx cmp
)
2403 tilepro_emit_cc_test (GET_CODE (cmp
), XEXP (cmp
, 0), XEXP (cmp
, 1),
2404 GET_MODE (XEXP (cmp
, 0)), true);
2408 /* Return true if INSN is annotated with a REG_BR_PROB note that
2409 indicates it's a branch that's predicted taken. */
2411 cbranch_predicted_p (rtx_insn
*insn
)
2413 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2417 return profile_probability::from_reg_br_prob_note (XINT (x
, 0))
2418 >= profile_probability::even ();
2425 /* Output assembly code for a specific branch instruction, appending
2426 the branch prediction flag to the opcode if appropriate. */
2428 tilepro_output_simple_cbranch_with_opcode (rtx_insn
*insn
, const char *opcode
,
2429 int regop
, bool netreg_p
,
2430 bool reverse_predicted
)
2432 static char buf
[64];
2433 sprintf (buf
, "%s%s\t%%%c%d, %%l0", opcode
,
2434 (cbranch_predicted_p (insn
) ^ reverse_predicted
) ? "t" : "",
2435 netreg_p
? 'N' : 'r', regop
);
2440 /* Output assembly code for a specific branch instruction, appending
2441 the branch prediction flag to the opcode if appropriate. */
2443 tilepro_output_cbranch_with_opcode (rtx_insn
*insn
, rtx
*operands
,
2445 const char *rev_opcode
,
2446 int regop
, bool netreg_p
)
2448 const char *branch_if_false
;
2449 rtx taken
, not_taken
;
2450 bool is_simple_branch
;
2452 gcc_assert (LABEL_P (operands
[0]));
2454 is_simple_branch
= true;
2455 if (INSN_ADDRESSES_SET_P ())
2457 int from_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2458 int to_addr
= INSN_ADDRESSES (INSN_UID (operands
[0]));
2459 int delta
= to_addr
- from_addr
;
2460 is_simple_branch
= IN_RANGE (delta
, -524288, 524280);
2463 if (is_simple_branch
)
2465 /* Just a simple conditional branch. */
2467 tilepro_output_simple_cbranch_with_opcode (insn
, opcode
, regop
,
2471 /* Generate a reversed branch around a direct jump. This fallback
2472 does not use branch-likely instructions. */
2473 not_taken
= gen_label_rtx ();
2474 taken
= operands
[0];
2476 /* Generate the reversed branch to NOT_TAKEN. */
2477 operands
[0] = not_taken
;
2479 tilepro_output_simple_cbranch_with_opcode (insn
, rev_opcode
, regop
,
2481 output_asm_insn (branch_if_false
, operands
);
2483 output_asm_insn ("j\t%l0", &taken
);
2485 /* Output NOT_TAKEN. */
2486 targetm
.asm_out
.internal_label (asm_out_file
, "L",
2487 CODE_LABEL_NUMBER (not_taken
));
2492 /* Output assembly code for a conditional branch instruction. */
2494 tilepro_output_cbranch (rtx_insn
*insn
, rtx
*operands
, bool reversed
)
2496 enum rtx_code code
= GET_CODE (operands
[1]);
2498 const char *rev_opcode
;
2501 code
= reverse_condition (code
);
2519 rev_opcode
= "blez";
2527 rev_opcode
= "bgez";
2534 tilepro_output_cbranch_with_opcode (insn
, operands
, opcode
, rev_opcode
,
2539 /* Implement the tablejump pattern. */
2541 tilepro_expand_tablejump (rtx op0
, rtx op1
)
2545 rtx table
= gen_rtx_LABEL_REF (Pmode
, op1
);
2546 rtx temp
= gen_reg_rtx (Pmode
);
2547 rtx text_label_symbol
= tilepro_text_label_symbol ();
2548 rtx text_label_rtx
= tilepro_text_label_rtx ();
2550 emit_insn (gen_addli_pcrel (temp
, text_label_rtx
,
2551 table
, text_label_symbol
));
2552 emit_insn (gen_auli_pcrel (temp
, temp
, table
, text_label_symbol
));
2553 emit_move_insn (temp
,
2554 gen_rtx_PLUS (Pmode
,
2555 convert_to_mode (Pmode
, op0
, false),
2560 emit_jump_insn (gen_tablejump_aux (op0
, op1
));
2564 /* Expand a builtin vector binary op, by calling gen function GEN with
2565 operands in the proper modes. DEST is converted to DEST_MODE, and
2566 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2568 tilepro_expand_builtin_vector_binop (rtx (*gen
) (rtx
, rtx
, rtx
),
2569 machine_mode dest_mode
,
2571 machine_mode src_mode
,
2572 rtx src0
, rtx src1
, bool do_src1
)
2574 dest
= gen_lowpart (dest_mode
, dest
);
2576 if (src0
== const0_rtx
)
2577 src0
= CONST0_RTX (src_mode
);
2579 src0
= gen_lowpart (src_mode
, src0
);
2583 if (src1
== const0_rtx
)
2584 src1
= CONST0_RTX (src_mode
);
2586 src1
= gen_lowpart (src_mode
, src1
);
2589 emit_insn ((*gen
) (dest
, src0
, src1
));
2596 struct tile_builtin_info
2598 enum insn_code icode
;
2602 static struct tile_builtin_info tilepro_builtin_info
[TILEPRO_BUILTIN_max
] = {
2603 { CODE_FOR_addsi3
, NULL
}, /* add */
2604 { CODE_FOR_insn_addb
, NULL
}, /* addb */
2605 { CODE_FOR_insn_addbs_u
, NULL
}, /* addbs_u */
2606 { CODE_FOR_insn_addh
, NULL
}, /* addh */
2607 { CODE_FOR_insn_addhs
, NULL
}, /* addhs */
2608 { CODE_FOR_insn_addib
, NULL
}, /* addib */
2609 { CODE_FOR_insn_addih
, NULL
}, /* addih */
2610 { CODE_FOR_insn_addlis
, NULL
}, /* addlis */
2611 { CODE_FOR_ssaddsi3
, NULL
}, /* adds */
2612 { CODE_FOR_insn_adiffb_u
, NULL
}, /* adiffb_u */
2613 { CODE_FOR_insn_adiffh
, NULL
}, /* adiffh */
2614 { CODE_FOR_andsi3
, NULL
}, /* and */
2615 { CODE_FOR_insn_auli
, NULL
}, /* auli */
2616 { CODE_FOR_insn_avgb_u
, NULL
}, /* avgb_u */
2617 { CODE_FOR_insn_avgh
, NULL
}, /* avgh */
2618 { CODE_FOR_insn_bitx
, NULL
}, /* bitx */
2619 { CODE_FOR_bswapsi2
, NULL
}, /* bytex */
2620 { CODE_FOR_clzsi2
, NULL
}, /* clz */
2621 { CODE_FOR_insn_crc32_32
, NULL
}, /* crc32_32 */
2622 { CODE_FOR_insn_crc32_8
, NULL
}, /* crc32_8 */
2623 { CODE_FOR_ctzsi2
, NULL
}, /* ctz */
2624 { CODE_FOR_insn_drain
, NULL
}, /* drain */
2625 { CODE_FOR_insn_dtlbpr
, NULL
}, /* dtlbpr */
2626 { CODE_FOR_insn_dword_align
, NULL
}, /* dword_align */
2627 { CODE_FOR_insn_finv
, NULL
}, /* finv */
2628 { CODE_FOR_insn_flush
, NULL
}, /* flush */
2629 { CODE_FOR_insn_fnop
, NULL
}, /* fnop */
2630 { CODE_FOR_insn_icoh
, NULL
}, /* icoh */
2631 { CODE_FOR_insn_ill
, NULL
}, /* ill */
2632 { CODE_FOR_insn_info
, NULL
}, /* info */
2633 { CODE_FOR_insn_infol
, NULL
}, /* infol */
2634 { CODE_FOR_insn_inthb
, NULL
}, /* inthb */
2635 { CODE_FOR_insn_inthh
, NULL
}, /* inthh */
2636 { CODE_FOR_insn_intlb
, NULL
}, /* intlb */
2637 { CODE_FOR_insn_intlh
, NULL
}, /* intlh */
2638 { CODE_FOR_insn_inv
, NULL
}, /* inv */
2639 { CODE_FOR_insn_lb
, NULL
}, /* lb */
2640 { CODE_FOR_insn_lb_u
, NULL
}, /* lb_u */
2641 { CODE_FOR_insn_lh
, NULL
}, /* lh */
2642 { CODE_FOR_insn_lh_u
, NULL
}, /* lh_u */
2643 { CODE_FOR_insn_lnk
, NULL
}, /* lnk */
2644 { CODE_FOR_insn_lw
, NULL
}, /* lw */
2645 { CODE_FOR_insn_lw_na
, NULL
}, /* lw_na */
2646 { CODE_FOR_insn_lb_L2
, NULL
}, /* lb_L2 */
2647 { CODE_FOR_insn_lb_u_L2
, NULL
}, /* lb_u_L2 */
2648 { CODE_FOR_insn_lh_L2
, NULL
}, /* lh_L2 */
2649 { CODE_FOR_insn_lh_u_L2
, NULL
}, /* lh_u_L2 */
2650 { CODE_FOR_insn_lw_L2
, NULL
}, /* lw_L2 */
2651 { CODE_FOR_insn_lw_na_L2
, NULL
}, /* lw_na_L2 */
2652 { CODE_FOR_insn_lb_miss
, NULL
}, /* lb_miss */
2653 { CODE_FOR_insn_lb_u_miss
, NULL
}, /* lb_u_miss */
2654 { CODE_FOR_insn_lh_miss
, NULL
}, /* lh_miss */
2655 { CODE_FOR_insn_lh_u_miss
, NULL
}, /* lh_u_miss */
2656 { CODE_FOR_insn_lw_miss
, NULL
}, /* lw_miss */
2657 { CODE_FOR_insn_lw_na_miss
, NULL
}, /* lw_na_miss */
2658 { CODE_FOR_insn_maxb_u
, NULL
}, /* maxb_u */
2659 { CODE_FOR_insn_maxh
, NULL
}, /* maxh */
2660 { CODE_FOR_insn_maxib_u
, NULL
}, /* maxib_u */
2661 { CODE_FOR_insn_maxih
, NULL
}, /* maxih */
2662 { CODE_FOR_memory_barrier
, NULL
}, /* mf */
2663 { CODE_FOR_insn_mfspr
, NULL
}, /* mfspr */
2664 { CODE_FOR_insn_minb_u
, NULL
}, /* minb_u */
2665 { CODE_FOR_insn_minh
, NULL
}, /* minh */
2666 { CODE_FOR_insn_minib_u
, NULL
}, /* minib_u */
2667 { CODE_FOR_insn_minih
, NULL
}, /* minih */
2668 { CODE_FOR_insn_mm
, NULL
}, /* mm */
2669 { CODE_FOR_insn_mnz
, NULL
}, /* mnz */
2670 { CODE_FOR_insn_mnzb
, NULL
}, /* mnzb */
2671 { CODE_FOR_insn_mnzh
, NULL
}, /* mnzh */
2672 { CODE_FOR_movsi
, NULL
}, /* move */
2673 { CODE_FOR_insn_movelis
, NULL
}, /* movelis */
2674 { CODE_FOR_insn_mtspr
, NULL
}, /* mtspr */
2675 { CODE_FOR_insn_mulhh_ss
, NULL
}, /* mulhh_ss */
2676 { CODE_FOR_insn_mulhh_su
, NULL
}, /* mulhh_su */
2677 { CODE_FOR_insn_mulhh_uu
, NULL
}, /* mulhh_uu */
2678 { CODE_FOR_insn_mulhha_ss
, NULL
}, /* mulhha_ss */
2679 { CODE_FOR_insn_mulhha_su
, NULL
}, /* mulhha_su */
2680 { CODE_FOR_insn_mulhha_uu
, NULL
}, /* mulhha_uu */
2681 { CODE_FOR_insn_mulhhsa_uu
, NULL
}, /* mulhhsa_uu */
2682 { CODE_FOR_insn_mulhl_ss
, NULL
}, /* mulhl_ss */
2683 { CODE_FOR_insn_mulhl_su
, NULL
}, /* mulhl_su */
2684 { CODE_FOR_insn_mulhl_us
, NULL
}, /* mulhl_us */
2685 { CODE_FOR_insn_mulhl_uu
, NULL
}, /* mulhl_uu */
2686 { CODE_FOR_insn_mulhla_ss
, NULL
}, /* mulhla_ss */
2687 { CODE_FOR_insn_mulhla_su
, NULL
}, /* mulhla_su */
2688 { CODE_FOR_insn_mulhla_us
, NULL
}, /* mulhla_us */
2689 { CODE_FOR_insn_mulhla_uu
, NULL
}, /* mulhla_uu */
2690 { CODE_FOR_insn_mulhlsa_uu
, NULL
}, /* mulhlsa_uu */
2691 { CODE_FOR_insn_mulll_ss
, NULL
}, /* mulll_ss */
2692 { CODE_FOR_insn_mulll_su
, NULL
}, /* mulll_su */
2693 { CODE_FOR_insn_mulll_uu
, NULL
}, /* mulll_uu */
2694 { CODE_FOR_insn_mullla_ss
, NULL
}, /* mullla_ss */
2695 { CODE_FOR_insn_mullla_su
, NULL
}, /* mullla_su */
2696 { CODE_FOR_insn_mullla_uu
, NULL
}, /* mullla_uu */
2697 { CODE_FOR_insn_mulllsa_uu
, NULL
}, /* mulllsa_uu */
2698 { CODE_FOR_insn_mvnz
, NULL
}, /* mvnz */
2699 { CODE_FOR_insn_mvz
, NULL
}, /* mvz */
2700 { CODE_FOR_insn_mz
, NULL
}, /* mz */
2701 { CODE_FOR_insn_mzb
, NULL
}, /* mzb */
2702 { CODE_FOR_insn_mzh
, NULL
}, /* mzh */
2703 { CODE_FOR_insn_nap
, NULL
}, /* nap */
2704 { CODE_FOR_nop
, NULL
}, /* nop */
2705 { CODE_FOR_insn_nor
, NULL
}, /* nor */
2706 { CODE_FOR_iorsi3
, NULL
}, /* or */
2707 { CODE_FOR_insn_packbs_u
, NULL
}, /* packbs_u */
2708 { CODE_FOR_insn_packhb
, NULL
}, /* packhb */
2709 { CODE_FOR_insn_packhs
, NULL
}, /* packhs */
2710 { CODE_FOR_insn_packlb
, NULL
}, /* packlb */
2711 { CODE_FOR_popcountsi2
, NULL
}, /* pcnt */
2712 { CODE_FOR_insn_prefetch
, NULL
}, /* prefetch */
2713 { CODE_FOR_insn_prefetch_L1
, NULL
}, /* prefetch_L1 */
2714 { CODE_FOR_rotlsi3
, NULL
}, /* rl */
2715 { CODE_FOR_insn_s1a
, NULL
}, /* s1a */
2716 { CODE_FOR_insn_s2a
, NULL
}, /* s2a */
2717 { CODE_FOR_insn_s3a
, NULL
}, /* s3a */
2718 { CODE_FOR_insn_sadab_u
, NULL
}, /* sadab_u */
2719 { CODE_FOR_insn_sadah
, NULL
}, /* sadah */
2720 { CODE_FOR_insn_sadah_u
, NULL
}, /* sadah_u */
2721 { CODE_FOR_insn_sadb_u
, NULL
}, /* sadb_u */
2722 { CODE_FOR_insn_sadh
, NULL
}, /* sadh */
2723 { CODE_FOR_insn_sadh_u
, NULL
}, /* sadh_u */
2724 { CODE_FOR_insn_sb
, NULL
}, /* sb */
2725 { CODE_FOR_insn_seq
, NULL
}, /* seq */
2726 { CODE_FOR_insn_seqb
, NULL
}, /* seqb */
2727 { CODE_FOR_insn_seqh
, NULL
}, /* seqh */
2728 { CODE_FOR_insn_seqib
, NULL
}, /* seqib */
2729 { CODE_FOR_insn_seqih
, NULL
}, /* seqih */
2730 { CODE_FOR_insn_sh
, NULL
}, /* sh */
2731 { CODE_FOR_ashlsi3
, NULL
}, /* shl */
2732 { CODE_FOR_insn_shlb
, NULL
}, /* shlb */
2733 { CODE_FOR_insn_shlh
, NULL
}, /* shlh */
2734 { CODE_FOR_insn_shlb
, NULL
}, /* shlib */
2735 { CODE_FOR_insn_shlh
, NULL
}, /* shlih */
2736 { CODE_FOR_lshrsi3
, NULL
}, /* shr */
2737 { CODE_FOR_insn_shrb
, NULL
}, /* shrb */
2738 { CODE_FOR_insn_shrh
, NULL
}, /* shrh */
2739 { CODE_FOR_insn_shrb
, NULL
}, /* shrib */
2740 { CODE_FOR_insn_shrh
, NULL
}, /* shrih */
2741 { CODE_FOR_insn_slt
, NULL
}, /* slt */
2742 { CODE_FOR_insn_slt_u
, NULL
}, /* slt_u */
2743 { CODE_FOR_insn_sltb
, NULL
}, /* sltb */
2744 { CODE_FOR_insn_sltb_u
, NULL
}, /* sltb_u */
2745 { CODE_FOR_insn_slte
, NULL
}, /* slte */
2746 { CODE_FOR_insn_slte_u
, NULL
}, /* slte_u */
2747 { CODE_FOR_insn_slteb
, NULL
}, /* slteb */
2748 { CODE_FOR_insn_slteb_u
, NULL
}, /* slteb_u */
2749 { CODE_FOR_insn_slteh
, NULL
}, /* slteh */
2750 { CODE_FOR_insn_slteh_u
, NULL
}, /* slteh_u */
2751 { CODE_FOR_insn_slth
, NULL
}, /* slth */
2752 { CODE_FOR_insn_slth_u
, NULL
}, /* slth_u */
2753 { CODE_FOR_insn_sltib
, NULL
}, /* sltib */
2754 { CODE_FOR_insn_sltib_u
, NULL
}, /* sltib_u */
2755 { CODE_FOR_insn_sltih
, NULL
}, /* sltih */
2756 { CODE_FOR_insn_sltih_u
, NULL
}, /* sltih_u */
2757 { CODE_FOR_insn_sne
, NULL
}, /* sne */
2758 { CODE_FOR_insn_sneb
, NULL
}, /* sneb */
2759 { CODE_FOR_insn_sneh
, NULL
}, /* sneh */
2760 { CODE_FOR_ashrsi3
, NULL
}, /* sra */
2761 { CODE_FOR_insn_srab
, NULL
}, /* srab */
2762 { CODE_FOR_insn_srah
, NULL
}, /* srah */
2763 { CODE_FOR_insn_srab
, NULL
}, /* sraib */
2764 { CODE_FOR_insn_srah
, NULL
}, /* sraih */
2765 { CODE_FOR_subsi3
, NULL
}, /* sub */
2766 { CODE_FOR_insn_subb
, NULL
}, /* subb */
2767 { CODE_FOR_insn_subbs_u
, NULL
}, /* subbs_u */
2768 { CODE_FOR_insn_subh
, NULL
}, /* subh */
2769 { CODE_FOR_insn_subhs
, NULL
}, /* subhs */
2770 { CODE_FOR_sssubsi3
, NULL
}, /* subs */
2771 { CODE_FOR_insn_sw
, NULL
}, /* sw */
2772 { CODE_FOR_insn_tblidxb0
, NULL
}, /* tblidxb0 */
2773 { CODE_FOR_insn_tblidxb1
, NULL
}, /* tblidxb1 */
2774 { CODE_FOR_insn_tblidxb2
, NULL
}, /* tblidxb2 */
2775 { CODE_FOR_insn_tblidxb3
, NULL
}, /* tblidxb3 */
2776 { CODE_FOR_insn_tns
, NULL
}, /* tns */
2777 { CODE_FOR_insn_wh64
, NULL
}, /* wh64 */
2778 { CODE_FOR_xorsi3
, NULL
}, /* xor */
2779 { CODE_FOR_tilepro_network_barrier
, NULL
}, /* network_barrier */
2780 { CODE_FOR_tilepro_idn0_receive
, NULL
}, /* idn0_receive */
2781 { CODE_FOR_tilepro_idn1_receive
, NULL
}, /* idn1_receive */
2782 { CODE_FOR_tilepro_idn_send
, NULL
}, /* idn_send */
2783 { CODE_FOR_tilepro_sn_receive
, NULL
}, /* sn_receive */
2784 { CODE_FOR_tilepro_sn_send
, NULL
}, /* sn_send */
2785 { CODE_FOR_tilepro_udn0_receive
, NULL
}, /* udn0_receive */
2786 { CODE_FOR_tilepro_udn1_receive
, NULL
}, /* udn1_receive */
2787 { CODE_FOR_tilepro_udn2_receive
, NULL
}, /* udn2_receive */
2788 { CODE_FOR_tilepro_udn3_receive
, NULL
}, /* udn3_receive */
2789 { CODE_FOR_tilepro_udn_send
, NULL
}, /* udn_send */
2793 struct tilepro_builtin_def
2796 enum tilepro_builtin code
;
2798 /* The first character is the return type. Subsequent characters
2799 are the argument types. See char_to_type. */
2804 static const struct tilepro_builtin_def tilepro_builtins
[] = {
2805 { "__insn_add", TILEPRO_INSN_ADD
, true, "lll" },
2806 { "__insn_addb", TILEPRO_INSN_ADDB
, true, "lll" },
2807 { "__insn_addbs_u", TILEPRO_INSN_ADDBS_U
, false, "lll" },
2808 { "__insn_addh", TILEPRO_INSN_ADDH
, true, "lll" },
2809 { "__insn_addhs", TILEPRO_INSN_ADDHS
, false, "lll" },
2810 { "__insn_addi", TILEPRO_INSN_ADD
, true, "lll" },
2811 { "__insn_addib", TILEPRO_INSN_ADDIB
, true, "lll" },
2812 { "__insn_addih", TILEPRO_INSN_ADDIH
, true, "lll" },
2813 { "__insn_addli", TILEPRO_INSN_ADD
, true, "lll" },
2814 { "__insn_addlis", TILEPRO_INSN_ADDLIS
, false, "lll" },
2815 { "__insn_adds", TILEPRO_INSN_ADDS
, false, "lll" },
2816 { "__insn_adiffb_u", TILEPRO_INSN_ADIFFB_U
, true, "lll" },
2817 { "__insn_adiffh", TILEPRO_INSN_ADIFFH
, true, "lll" },
2818 { "__insn_and", TILEPRO_INSN_AND
, true, "lll" },
2819 { "__insn_andi", TILEPRO_INSN_AND
, true, "lll" },
2820 { "__insn_auli", TILEPRO_INSN_AULI
, true, "lll" },
2821 { "__insn_avgb_u", TILEPRO_INSN_AVGB_U
, true, "lll" },
2822 { "__insn_avgh", TILEPRO_INSN_AVGH
, true, "lll" },
2823 { "__insn_bitx", TILEPRO_INSN_BITX
, true, "ll" },
2824 { "__insn_bytex", TILEPRO_INSN_BYTEX
, true, "ll" },
2825 { "__insn_clz", TILEPRO_INSN_CLZ
, true, "ll" },
2826 { "__insn_crc32_32", TILEPRO_INSN_CRC32_32
, true, "lll" },
2827 { "__insn_crc32_8", TILEPRO_INSN_CRC32_8
, true, "lll" },
2828 { "__insn_ctz", TILEPRO_INSN_CTZ
, true, "ll" },
2829 { "__insn_drain", TILEPRO_INSN_DRAIN
, false, "v" },
2830 { "__insn_dtlbpr", TILEPRO_INSN_DTLBPR
, false, "vl" },
2831 { "__insn_dword_align", TILEPRO_INSN_DWORD_ALIGN
, true, "lllk" },
2832 { "__insn_finv", TILEPRO_INSN_FINV
, false, "vk" },
2833 { "__insn_flush", TILEPRO_INSN_FLUSH
, false, "vk" },
2834 { "__insn_fnop", TILEPRO_INSN_FNOP
, false, "v" },
2835 { "__insn_icoh", TILEPRO_INSN_ICOH
, false, "vk" },
2836 { "__insn_ill", TILEPRO_INSN_ILL
, false, "v" },
2837 { "__insn_info", TILEPRO_INSN_INFO
, false, "vl" },
2838 { "__insn_infol", TILEPRO_INSN_INFOL
, false, "vl" },
2839 { "__insn_inthb", TILEPRO_INSN_INTHB
, true, "lll" },
2840 { "__insn_inthh", TILEPRO_INSN_INTHH
, true, "lll" },
2841 { "__insn_intlb", TILEPRO_INSN_INTLB
, true, "lll" },
2842 { "__insn_intlh", TILEPRO_INSN_INTLH
, true, "lll" },
2843 { "__insn_inv", TILEPRO_INSN_INV
, false, "vp" },
2844 { "__insn_lb", TILEPRO_INSN_LB
, false, "lk" },
2845 { "__insn_lb_u", TILEPRO_INSN_LB_U
, false, "lk" },
2846 { "__insn_lh", TILEPRO_INSN_LH
, false, "lk" },
2847 { "__insn_lh_u", TILEPRO_INSN_LH_U
, false, "lk" },
2848 { "__insn_lnk", TILEPRO_INSN_LNK
, true, "l" },
2849 { "__insn_lw", TILEPRO_INSN_LW
, false, "lk" },
2850 { "__insn_lw_na", TILEPRO_INSN_LW_NA
, false, "lk" },
2851 { "__insn_lb_L2", TILEPRO_INSN_LB_L2
, false, "lk" },
2852 { "__insn_lb_u_L2", TILEPRO_INSN_LB_U_L2
, false, "lk" },
2853 { "__insn_lh_L2", TILEPRO_INSN_LH_L2
, false, "lk" },
2854 { "__insn_lh_u_L2", TILEPRO_INSN_LH_U_L2
, false, "lk" },
2855 { "__insn_lw_L2", TILEPRO_INSN_LW_L2
, false, "lk" },
2856 { "__insn_lw_na_L2", TILEPRO_INSN_LW_NA_L2
, false, "lk" },
2857 { "__insn_lb_miss", TILEPRO_INSN_LB_MISS
, false, "lk" },
2858 { "__insn_lb_u_miss", TILEPRO_INSN_LB_U_MISS
, false, "lk" },
2859 { "__insn_lh_miss", TILEPRO_INSN_LH_MISS
, false, "lk" },
2860 { "__insn_lh_u_miss", TILEPRO_INSN_LH_U_MISS
, false, "lk" },
2861 { "__insn_lw_miss", TILEPRO_INSN_LW_MISS
, false, "lk" },
2862 { "__insn_lw_na_miss", TILEPRO_INSN_LW_NA_MISS
, false, "lk" },
2863 { "__insn_maxb_u", TILEPRO_INSN_MAXB_U
, true, "lll" },
2864 { "__insn_maxh", TILEPRO_INSN_MAXH
, true, "lll" },
2865 { "__insn_maxib_u", TILEPRO_INSN_MAXIB_U
, true, "lll" },
2866 { "__insn_maxih", TILEPRO_INSN_MAXIH
, true, "lll" },
2867 { "__insn_mf", TILEPRO_INSN_MF
, false, "v" },
2868 { "__insn_mfspr", TILEPRO_INSN_MFSPR
, false, "ll" },
2869 { "__insn_minb_u", TILEPRO_INSN_MINB_U
, true, "lll" },
2870 { "__insn_minh", TILEPRO_INSN_MINH
, true, "lll" },
2871 { "__insn_minib_u", TILEPRO_INSN_MINIB_U
, true, "lll" },
2872 { "__insn_minih", TILEPRO_INSN_MINIH
, true, "lll" },
2873 { "__insn_mm", TILEPRO_INSN_MM
, true, "lllll" },
2874 { "__insn_mnz", TILEPRO_INSN_MNZ
, true, "lll" },
2875 { "__insn_mnzb", TILEPRO_INSN_MNZB
, true, "lll" },
2876 { "__insn_mnzh", TILEPRO_INSN_MNZH
, true, "lll" },
2877 { "__insn_move", TILEPRO_INSN_MOVE
, true, "ll" },
2878 { "__insn_movei", TILEPRO_INSN_MOVE
, true, "ll" },
2879 { "__insn_moveli", TILEPRO_INSN_MOVE
, true, "ll" },
2880 { "__insn_movelis", TILEPRO_INSN_MOVELIS
, false, "ll" },
2881 { "__insn_mtspr", TILEPRO_INSN_MTSPR
, false, "vll" },
2882 { "__insn_mulhh_ss", TILEPRO_INSN_MULHH_SS
, true, "lll" },
2883 { "__insn_mulhh_su", TILEPRO_INSN_MULHH_SU
, true, "lll" },
2884 { "__insn_mulhh_uu", TILEPRO_INSN_MULHH_UU
, true, "lll" },
2885 { "__insn_mulhha_ss", TILEPRO_INSN_MULHHA_SS
, true, "llll" },
2886 { "__insn_mulhha_su", TILEPRO_INSN_MULHHA_SU
, true, "llll" },
2887 { "__insn_mulhha_uu", TILEPRO_INSN_MULHHA_UU
, true, "llll" },
2888 { "__insn_mulhhsa_uu", TILEPRO_INSN_MULHHSA_UU
, true, "llll" },
2889 { "__insn_mulhl_ss", TILEPRO_INSN_MULHL_SS
, true, "lll" },
2890 { "__insn_mulhl_su", TILEPRO_INSN_MULHL_SU
, true, "lll" },
2891 { "__insn_mulhl_us", TILEPRO_INSN_MULHL_US
, true, "lll" },
2892 { "__insn_mulhl_uu", TILEPRO_INSN_MULHL_UU
, true, "lll" },
2893 { "__insn_mulhla_ss", TILEPRO_INSN_MULHLA_SS
, true, "llll" },
2894 { "__insn_mulhla_su", TILEPRO_INSN_MULHLA_SU
, true, "llll" },
2895 { "__insn_mulhla_us", TILEPRO_INSN_MULHLA_US
, true, "llll" },
2896 { "__insn_mulhla_uu", TILEPRO_INSN_MULHLA_UU
, true, "llll" },
2897 { "__insn_mulhlsa_uu", TILEPRO_INSN_MULHLSA_UU
, true, "llll" },
2898 { "__insn_mulll_ss", TILEPRO_INSN_MULLL_SS
, true, "lll" },
2899 { "__insn_mulll_su", TILEPRO_INSN_MULLL_SU
, true, "lll" },
2900 { "__insn_mulll_uu", TILEPRO_INSN_MULLL_UU
, true, "lll" },
2901 { "__insn_mullla_ss", TILEPRO_INSN_MULLLA_SS
, true, "llll" },
2902 { "__insn_mullla_su", TILEPRO_INSN_MULLLA_SU
, true, "llll" },
2903 { "__insn_mullla_uu", TILEPRO_INSN_MULLLA_UU
, true, "llll" },
2904 { "__insn_mulllsa_uu", TILEPRO_INSN_MULLLSA_UU
, true, "llll" },
2905 { "__insn_mvnz", TILEPRO_INSN_MVNZ
, true, "llll" },
2906 { "__insn_mvz", TILEPRO_INSN_MVZ
, true, "llll" },
2907 { "__insn_mz", TILEPRO_INSN_MZ
, true, "lll" },
2908 { "__insn_mzb", TILEPRO_INSN_MZB
, true, "lll" },
2909 { "__insn_mzh", TILEPRO_INSN_MZH
, true, "lll" },
2910 { "__insn_nap", TILEPRO_INSN_NAP
, false, "v" },
2911 { "__insn_nop", TILEPRO_INSN_NOP
, true, "v" },
2912 { "__insn_nor", TILEPRO_INSN_NOR
, true, "lll" },
2913 { "__insn_or", TILEPRO_INSN_OR
, true, "lll" },
2914 { "__insn_ori", TILEPRO_INSN_OR
, true, "lll" },
2915 { "__insn_packbs_u", TILEPRO_INSN_PACKBS_U
, false, "lll" },
2916 { "__insn_packhb", TILEPRO_INSN_PACKHB
, true, "lll" },
2917 { "__insn_packhs", TILEPRO_INSN_PACKHS
, false, "lll" },
2918 { "__insn_packlb", TILEPRO_INSN_PACKLB
, true, "lll" },
2919 { "__insn_pcnt", TILEPRO_INSN_PCNT
, true, "ll" },
2920 { "__insn_prefetch", TILEPRO_INSN_PREFETCH
, false, "vk" },
2921 { "__insn_prefetch_L1", TILEPRO_INSN_PREFETCH_L1
, false, "vk" },
2922 { "__insn_rl", TILEPRO_INSN_RL
, true, "lll" },
2923 { "__insn_rli", TILEPRO_INSN_RL
, true, "lll" },
2924 { "__insn_s1a", TILEPRO_INSN_S1A
, true, "lll" },
2925 { "__insn_s2a", TILEPRO_INSN_S2A
, true, "lll" },
2926 { "__insn_s3a", TILEPRO_INSN_S3A
, true, "lll" },
2927 { "__insn_sadab_u", TILEPRO_INSN_SADAB_U
, true, "llll" },
2928 { "__insn_sadah", TILEPRO_INSN_SADAH
, true, "llll" },
2929 { "__insn_sadah_u", TILEPRO_INSN_SADAH_U
, true, "llll" },
2930 { "__insn_sadb_u", TILEPRO_INSN_SADB_U
, true, "lll" },
2931 { "__insn_sadh", TILEPRO_INSN_SADH
, true, "lll" },
2932 { "__insn_sadh_u", TILEPRO_INSN_SADH_U
, true, "lll" },
2933 { "__insn_sb", TILEPRO_INSN_SB
, false, "vpl" },
2934 { "__insn_seq", TILEPRO_INSN_SEQ
, true, "lll" },
2935 { "__insn_seqb", TILEPRO_INSN_SEQB
, true, "lll" },
2936 { "__insn_seqh", TILEPRO_INSN_SEQH
, true, "lll" },
2937 { "__insn_seqi", TILEPRO_INSN_SEQ
, true, "lll" },
2938 { "__insn_seqib", TILEPRO_INSN_SEQIB
, true, "lll" },
2939 { "__insn_seqih", TILEPRO_INSN_SEQIH
, true, "lll" },
2940 { "__insn_sh", TILEPRO_INSN_SH
, false, "vpl" },
2941 { "__insn_shl", TILEPRO_INSN_SHL
, true, "lll" },
2942 { "__insn_shlb", TILEPRO_INSN_SHLB
, true, "lll" },
2943 { "__insn_shlh", TILEPRO_INSN_SHLH
, true, "lll" },
2944 { "__insn_shli", TILEPRO_INSN_SHL
, true, "lll" },
2945 { "__insn_shlib", TILEPRO_INSN_SHLIB
, true, "lll" },
2946 { "__insn_shlih", TILEPRO_INSN_SHLIH
, true, "lll" },
2947 { "__insn_shr", TILEPRO_INSN_SHR
, true, "lll" },
2948 { "__insn_shrb", TILEPRO_INSN_SHRB
, true, "lll" },
2949 { "__insn_shrh", TILEPRO_INSN_SHRH
, true, "lll" },
2950 { "__insn_shri", TILEPRO_INSN_SHR
, true, "lll" },
2951 { "__insn_shrib", TILEPRO_INSN_SHRIB
, true, "lll" },
2952 { "__insn_shrih", TILEPRO_INSN_SHRIH
, true, "lll" },
2953 { "__insn_slt", TILEPRO_INSN_SLT
, true, "lll" },
2954 { "__insn_slt_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2955 { "__insn_sltb", TILEPRO_INSN_SLTB
, true, "lll" },
2956 { "__insn_sltb_u", TILEPRO_INSN_SLTB_U
, true, "lll" },
2957 { "__insn_slte", TILEPRO_INSN_SLTE
, true, "lll" },
2958 { "__insn_slte_u", TILEPRO_INSN_SLTE_U
, true, "lll" },
2959 { "__insn_slteb", TILEPRO_INSN_SLTEB
, true, "lll" },
2960 { "__insn_slteb_u", TILEPRO_INSN_SLTEB_U
, true, "lll" },
2961 { "__insn_slteh", TILEPRO_INSN_SLTEH
, true, "lll" },
2962 { "__insn_slteh_u", TILEPRO_INSN_SLTEH_U
, true, "lll" },
2963 { "__insn_slth", TILEPRO_INSN_SLTH
, true, "lll" },
2964 { "__insn_slth_u", TILEPRO_INSN_SLTH_U
, true, "lll" },
2965 { "__insn_slti", TILEPRO_INSN_SLT
, true, "lll" },
2966 { "__insn_slti_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2967 { "__insn_sltib", TILEPRO_INSN_SLTIB
, true, "lll" },
2968 { "__insn_sltib_u", TILEPRO_INSN_SLTIB_U
, true, "lll" },
2969 { "__insn_sltih", TILEPRO_INSN_SLTIH
, true, "lll" },
2970 { "__insn_sltih_u", TILEPRO_INSN_SLTIH_U
, true, "lll" },
2971 { "__insn_sne", TILEPRO_INSN_SNE
, true, "lll" },
2972 { "__insn_sneb", TILEPRO_INSN_SNEB
, true, "lll" },
2973 { "__insn_sneh", TILEPRO_INSN_SNEH
, true, "lll" },
2974 { "__insn_sra", TILEPRO_INSN_SRA
, true, "lll" },
2975 { "__insn_srab", TILEPRO_INSN_SRAB
, true, "lll" },
2976 { "__insn_srah", TILEPRO_INSN_SRAH
, true, "lll" },
2977 { "__insn_srai", TILEPRO_INSN_SRA
, true, "lll" },
2978 { "__insn_sraib", TILEPRO_INSN_SRAIB
, true, "lll" },
2979 { "__insn_sraih", TILEPRO_INSN_SRAIH
, true, "lll" },
2980 { "__insn_sub", TILEPRO_INSN_SUB
, true, "lll" },
2981 { "__insn_subb", TILEPRO_INSN_SUBB
, true, "lll" },
2982 { "__insn_subbs_u", TILEPRO_INSN_SUBBS_U
, false, "lll" },
2983 { "__insn_subh", TILEPRO_INSN_SUBH
, true, "lll" },
2984 { "__insn_subhs", TILEPRO_INSN_SUBHS
, false, "lll" },
2985 { "__insn_subs", TILEPRO_INSN_SUBS
, false, "lll" },
2986 { "__insn_sw", TILEPRO_INSN_SW
, false, "vpl" },
2987 { "__insn_tblidxb0", TILEPRO_INSN_TBLIDXB0
, true, "lll" },
2988 { "__insn_tblidxb1", TILEPRO_INSN_TBLIDXB1
, true, "lll" },
2989 { "__insn_tblidxb2", TILEPRO_INSN_TBLIDXB2
, true, "lll" },
2990 { "__insn_tblidxb3", TILEPRO_INSN_TBLIDXB3
, true, "lll" },
2991 { "__insn_tns", TILEPRO_INSN_TNS
, false, "lp" },
2992 { "__insn_wh64", TILEPRO_INSN_WH64
, false, "vp" },
2993 { "__insn_xor", TILEPRO_INSN_XOR
, true, "lll" },
2994 { "__insn_xori", TILEPRO_INSN_XOR
, true, "lll" },
2995 { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER
, false, "v" },
2996 { "__tile_idn0_receive", TILEPRO_IDN0_RECEIVE
, false, "l" },
2997 { "__tile_idn1_receive", TILEPRO_IDN1_RECEIVE
, false, "l" },
2998 { "__tile_idn_send", TILEPRO_IDN_SEND
, false, "vl" },
2999 { "__tile_sn_receive", TILEPRO_SN_RECEIVE
, false, "l" },
3000 { "__tile_sn_send", TILEPRO_SN_SEND
, false, "vl" },
3001 { "__tile_udn0_receive", TILEPRO_UDN0_RECEIVE
, false, "l" },
3002 { "__tile_udn1_receive", TILEPRO_UDN1_RECEIVE
, false, "l" },
3003 { "__tile_udn2_receive", TILEPRO_UDN2_RECEIVE
, false, "l" },
3004 { "__tile_udn3_receive", TILEPRO_UDN3_RECEIVE
, false, "l" },
3005 { "__tile_udn_send", TILEPRO_UDN_SEND
, false, "vl" },
3009 /* Convert a character in a builtin type string to a tree type. */
3011 char_to_type (char c
)
3013 static tree volatile_ptr_type_node
= NULL
;
3014 static tree volatile_const_ptr_type_node
= NULL
;
3016 if (volatile_ptr_type_node
== NULL
)
3018 volatile_ptr_type_node
=
3019 build_pointer_type (build_qualified_type (void_type_node
,
3020 TYPE_QUAL_VOLATILE
));
3021 volatile_const_ptr_type_node
=
3022 build_pointer_type (build_qualified_type (void_type_node
,
3024 | TYPE_QUAL_VOLATILE
));
3030 return void_type_node
;
3032 return long_unsigned_type_node
;
3034 return volatile_ptr_type_node
;
3036 return volatile_const_ptr_type_node
;
3043 /* Implement TARGET_INIT_BUILTINS. */
3045 tilepro_init_builtins (void)
3049 for (i
= 0; i
< ARRAY_SIZE (tilepro_builtins
); i
++)
3051 const struct tilepro_builtin_def
*p
= &tilepro_builtins
[i
];
3052 tree ftype
, ret_type
, arg_type_list
= void_list_node
;
3056 for (j
= strlen (p
->type
) - 1; j
> 0; j
--)
3059 tree_cons (NULL_TREE
, char_to_type (p
->type
[j
]), arg_type_list
);
3062 ret_type
= char_to_type (p
->type
[0]);
3064 ftype
= build_function_type (ret_type
, arg_type_list
);
3066 decl
= add_builtin_function (p
->name
, ftype
, p
->code
, BUILT_IN_MD
,
3070 TREE_READONLY (decl
) = 1;
3071 TREE_NOTHROW (decl
) = 1;
3073 if (tilepro_builtin_info
[p
->code
].fndecl
== NULL
)
3074 tilepro_builtin_info
[p
->code
].fndecl
= decl
;
3079 /* Implement TARGET_EXPAND_BUILTIN. */
3081 tilepro_expand_builtin (tree exp
,
3083 rtx subtarget ATTRIBUTE_UNUSED
,
3084 machine_mode mode ATTRIBUTE_UNUSED
,
3085 int ignore ATTRIBUTE_UNUSED
)
3087 #define MAX_BUILTIN_ARGS 4
3089 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3090 unsigned int fcode
= DECL_MD_FUNCTION_CODE (fndecl
);
3092 call_expr_arg_iterator iter
;
3093 enum insn_code icode
;
3094 rtx op
[MAX_BUILTIN_ARGS
+ 1], pat
;
3099 if (fcode
>= TILEPRO_BUILTIN_max
)
3100 internal_error ("bad builtin fcode");
3101 icode
= tilepro_builtin_info
[fcode
].icode
;
3103 internal_error ("bad builtin icode");
3105 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
3108 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3110 const struct insn_operand_data
*insn_op
;
3112 if (arg
== error_mark_node
)
3114 if (opnum
> MAX_BUILTIN_ARGS
)
3117 insn_op
= &insn_data
[icode
].operand
[opnum
];
3119 op
[opnum
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
3121 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3122 op
[opnum
] = copy_to_mode_reg (insn_op
->mode
, op
[opnum
]);
3124 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3126 /* We still failed to meet the predicate even after moving
3127 into a register. Assume we needed an immediate. */
3128 error_at (EXPR_LOCATION (exp
),
3129 "operand must be an immediate of the right size");
3138 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3140 || GET_MODE (target
) != tmode
3141 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3142 target
= gen_reg_rtx (tmode
);
3146 fn
= GEN_FCN (icode
);
3150 pat
= fn (NULL_RTX
);
3156 pat
= fn (op
[0], op
[1]);
3159 pat
= fn (op
[0], op
[1], op
[2]);
3162 pat
= fn (op
[0], op
[1], op
[2], op
[3]);
3165 pat
= fn (op
[0], op
[1], op
[2], op
[3], op
[4]);
3173 /* If we are generating a prefetch, tell the scheduler not to move
3175 if (GET_CODE (pat
) == PREFETCH
)
3176 PREFETCH_SCHEDULE_BARRIER_P (pat
) = true;
3187 /* Implement TARGET_BUILTIN_DECL. */
3189 tilepro_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3191 if (code
>= TILEPRO_BUILTIN_max
)
3192 return error_mark_node
;
3194 return tilepro_builtin_info
[code
].fndecl
;
3201 /* Return whether REGNO needs to be saved in the stack frame. */
3203 need_to_save_reg (unsigned int regno
)
3205 if (!call_used_or_fixed_reg_p (regno
)
3206 && df_regs_ever_live_p (regno
))
3210 && (regno
== PIC_OFFSET_TABLE_REGNUM
3211 || regno
== TILEPRO_PIC_TEXT_LABEL_REGNUM
)
3212 && (crtl
->uses_pic_offset_table
|| crtl
->saves_all_registers
))
3215 if (crtl
->calls_eh_return
)
3218 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
3220 if (regno
== EH_RETURN_DATA_REGNO (i
))
3229 /* Return the size of the register savev area. This function is only
3230 correct starting with local register allocation */
3232 tilepro_saved_regs_size (void)
3234 int reg_save_size
= 0;
3236 int offset_to_frame
;
3239 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
3240 if (need_to_save_reg (regno
))
3241 reg_save_size
+= UNITS_PER_WORD
;
3243 /* Pad out the register save area if necessary to make
3244 frame_pointer_rtx be as aligned as the stack pointer. */
3245 offset_to_frame
= crtl
->args
.pretend_args_size
+ reg_save_size
;
3246 align_mask
= (STACK_BOUNDARY
/ BITS_PER_UNIT
) - 1;
3247 reg_save_size
+= (-offset_to_frame
) & align_mask
;
3249 return reg_save_size
;
3253 /* Round up frame size SIZE. */
3255 round_frame_size (int size
)
3257 return ((size
+ STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
3258 & -STACK_BOUNDARY
/ BITS_PER_UNIT
);
3262 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3263 emit the corresponding REG_CFA_OFFSET note described by CFA and
3264 CFA_OFFSET. Return the emitted insn. */
3266 frame_emit_store (int regno
, int regno_note
, rtx addr
, rtx cfa
,
3269 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3270 rtx mem
= gen_frame_mem (Pmode
, addr
);
3271 rtx mov
= gen_movsi (mem
, reg
);
3273 /* Describe what just happened in a way that dwarf understands. We
3274 use temporary registers to hold the address to make scheduling
3275 easier, and use the REG_CFA_OFFSET to describe the address as an
3276 offset from the CFA. */
3277 rtx reg_note
= gen_rtx_REG (Pmode
, regno_note
);
3278 rtx cfa_relative_addr
= gen_rtx_PLUS (Pmode
, cfa
, gen_int_si (cfa_offset
));
3279 rtx cfa_relative_mem
= gen_frame_mem (Pmode
, cfa_relative_addr
);
3280 rtx real
= gen_rtx_SET (cfa_relative_mem
, reg_note
);
3281 add_reg_note (mov
, REG_CFA_OFFSET
, real
);
3283 return emit_insn (mov
);
3287 /* Emit a load in the stack frame to load REGNO from address ADDR.
3288 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3289 non-null. Return the emitted insn. */
3291 frame_emit_load (int regno
, rtx addr
, rtx
*cfa_restores
)
3293 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3294 rtx mem
= gen_frame_mem (Pmode
, addr
);
3296 *cfa_restores
= alloc_reg_note (REG_CFA_RESTORE
, reg
, *cfa_restores
);
3297 return emit_insn (gen_movsi (reg
, mem
));
3301 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3302 including sequences. */
3304 set_frame_related_p (void)
3306 rtx_insn
*seq
= get_insns ();
3317 while (insn
!= NULL_RTX
)
3319 RTX_FRAME_RELATED_P (insn
) = 1;
3320 insn
= NEXT_INSN (insn
);
3322 seq
= emit_insn (seq
);
3326 seq
= emit_insn (seq
);
3327 RTX_FRAME_RELATED_P (seq
) = 1;
3333 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3335 /* This emits code for 'sp += offset'.
3337 The ABI only allows us to modify 'sp' in a single 'addi' or
3338 'addli', so the backtracer understands it. Larger amounts cannot
3339 use those instructions, so are added by placing the offset into a
3340 large register and using 'add'.
3342 This happens after reload, so we need to expand it ourselves. */
3344 emit_sp_adjust (int offset
, int *next_scratch_regno
, bool frame_related
,
3348 rtx imm_rtx
= gen_int_si (offset
);
3351 if (satisfies_constraint_J (imm_rtx
))
3353 /* We can add this using a single addi or addli. */
3358 rtx tmp
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3359 tilepro_expand_set_const32 (tmp
, imm_rtx
);
3363 /* Actually adjust the stack pointer. */
3364 insn
= emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3366 REG_NOTES (insn
) = reg_notes
;
3368 /* Describe what just happened in a way that dwarf understands. */
3371 rtx real
= gen_rtx_SET (stack_pointer_rtx
,
3372 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3374 RTX_FRAME_RELATED_P (insn
) = 1;
3375 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, real
);
3382 /* Return whether the current function is leaf. This takes into
3383 account whether the function calls tls_get_addr. */
3385 tilepro_current_function_is_leaf (void)
3387 return crtl
->is_leaf
&& !cfun
->machine
->calls_tls_get_addr
;
3391 /* Return the frame size. */
3393 compute_total_frame_size (void)
3395 int total_size
= (get_frame_size () + tilepro_saved_regs_size ()
3396 + crtl
->outgoing_args_size
3397 + crtl
->args
.pretend_args_size
);
3399 if (!tilepro_current_function_is_leaf () || cfun
->calls_alloca
)
3401 /* Make room for save area in callee. */
3402 total_size
+= STACK_POINTER_OFFSET
;
3405 return round_frame_size (total_size
);
3409 /* Return nonzero if this function is known to have a null epilogue.
3410 This allows the optimizer to omit jumps to jumps if no stack was
3413 tilepro_can_use_return_insn_p (void)
3415 return (reload_completed
3416 && cfun
->static_chain_decl
== 0
3417 && compute_total_frame_size () == 0
3418 && tilepro_current_function_is_leaf ()
3419 && !crtl
->profile
&& !df_regs_ever_live_p (TILEPRO_LINK_REGNUM
));
3423 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3424 is a frame pointer, it computes the value relative to
3425 that. Otherwise it uses the stack pointer. */
3427 compute_frame_addr (int offset_from_fp
, int *next_scratch_regno
)
3429 rtx base_reg_rtx
, tmp_reg_rtx
, offset_rtx
;
3430 int offset_from_base
;
3432 if (frame_pointer_needed
)
3434 base_reg_rtx
= hard_frame_pointer_rtx
;
3435 offset_from_base
= offset_from_fp
;
3439 int offset_from_sp
= compute_total_frame_size () + offset_from_fp
;
3440 base_reg_rtx
= stack_pointer_rtx
;
3441 offset_from_base
= offset_from_sp
;
3444 if (offset_from_base
== 0)
3445 return base_reg_rtx
;
3447 /* Compute the new value of the stack pointer. */
3448 tmp_reg_rtx
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3449 offset_rtx
= gen_int_si (offset_from_base
);
3451 if (!tilepro_expand_addsi (tmp_reg_rtx
, base_reg_rtx
, offset_rtx
))
3453 emit_insn (gen_rtx_SET (tmp_reg_rtx
,
3454 gen_rtx_PLUS (Pmode
, base_reg_rtx
,
3462 /* The stack frame looks like this:
3467 AP -> +-------------+
3471 HFP -> +-------------+
3473 | reg save | crtl->args.pretend_args_size bytes
3476 | saved regs | tilepro_saved_regs_size() bytes
3477 FP -> +-------------+
3479 | vars | get_frame_size() bytes
3483 | stack args | crtl->outgoing_args_size bytes
3485 | HFP | 4 bytes (only here if nonleaf / alloca)
3487 | callee lr | 4 bytes (only here if nonleaf / alloca)
3489 SP -> +-------------+
3493 For functions with a frame larger than 32767 bytes, or which use
3494 alloca (), r52 is used as a frame pointer. Otherwise there is no
3497 FP is saved at SP+4 before calling a subroutine so the
3498 callee can chain. */
3500 tilepro_expand_prologue (void)
3502 #define ROUND_ROBIN_SIZE 4
3503 /* We round-robin through four scratch registers to hold temporary
3504 addresses for saving registers, to make instruction scheduling
3506 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3507 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3510 unsigned int which_scratch
;
3511 int offset
, start_offset
, regno
;
3513 /* A register that holds a copy of the incoming fp. */
3514 int fp_copy_regno
= -1;
3516 /* A register that holds a copy of the incoming sp. */
3517 int sp_copy_regno
= -1;
3519 /* Next scratch register number to hand out (postdecrementing). */
3520 int next_scratch_regno
= 29;
3522 int total_size
= compute_total_frame_size ();
3524 if (flag_stack_usage_info
)
3525 current_function_static_stack_size
= total_size
;
3527 /* Save lr first in its special location because code after this
3528 might use the link register as a scratch register. */
3529 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
) || crtl
->calls_eh_return
)
3531 FRP (frame_emit_store (TILEPRO_LINK_REGNUM
, TILEPRO_LINK_REGNUM
,
3532 stack_pointer_rtx
, stack_pointer_rtx
, 0));
3533 emit_insn (gen_blockage ());
3536 if (total_size
== 0)
3538 /* Load the PIC register if needed. */
3539 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3540 load_pic_register (false);
3545 cfa
= stack_pointer_rtx
;
3547 if (frame_pointer_needed
)
3549 fp_copy_regno
= next_scratch_regno
--;
3551 /* Copy the old frame pointer aside so we can save it later. */
3552 insn
= FRP (emit_move_insn (gen_rtx_REG (word_mode
, fp_copy_regno
),
3553 hard_frame_pointer_rtx
));
3554 add_reg_note (insn
, REG_CFA_REGISTER
, NULL_RTX
);
3556 /* Set up the frame pointer. */
3557 insn
= FRP (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
3558 add_reg_note (insn
, REG_CFA_DEF_CFA
, hard_frame_pointer_rtx
);
3559 cfa
= hard_frame_pointer_rtx
;
3560 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
3562 /* fp holds a copy of the incoming sp, in case we need to store
3564 sp_copy_regno
= HARD_FRAME_POINTER_REGNUM
;
3566 else if (!tilepro_current_function_is_leaf ())
3568 /* Copy the old stack pointer aside so we can save it later. */
3569 sp_copy_regno
= next_scratch_regno
--;
3570 emit_move_insn (gen_rtx_REG (Pmode
, sp_copy_regno
),
3574 if (tilepro_current_function_is_leaf ())
3576 /* No need to store chain pointer to caller's frame. */
3577 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3578 !frame_pointer_needed
, NULL_RTX
);
3582 /* Save the frame pointer (incoming sp value) to support
3583 backtracing. First we need to create an rtx with the store
3585 rtx chain_addr
= gen_rtx_REG (Pmode
, next_scratch_regno
--);
3586 rtx size_rtx
= gen_int_si (-(total_size
- UNITS_PER_WORD
));
3588 if (add_operand (size_rtx
, Pmode
))
3590 /* Expose more parallelism by computing this value from the
3591 original stack pointer, not the one after we have pushed
3593 rtx p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, size_rtx
);
3594 emit_insn (gen_rtx_SET (chain_addr
, p
));
3595 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3596 !frame_pointer_needed
, NULL_RTX
);
3600 /* The stack frame is large, so just store the incoming sp
3601 value at *(new_sp + UNITS_PER_WORD). */
3603 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3604 !frame_pointer_needed
, NULL_RTX
);
3605 p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3606 GEN_INT (UNITS_PER_WORD
));
3607 emit_insn (gen_rtx_SET (chain_addr
, p
));
3610 /* Save our frame pointer for backtrace chaining. */
3611 emit_insn (gen_movsi (gen_frame_mem (SImode
, chain_addr
),
3612 gen_rtx_REG (SImode
, sp_copy_regno
)));
3615 /* Compute where to start storing registers we need to save. */
3616 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3617 offset
= start_offset
;
3619 /* Store all registers that need saving. */
3621 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3622 if (need_to_save_reg (regno
))
3624 rtx r
= reg_save_addr
[which_scratch
];
3626 int cfa_offset
= frame_pointer_needed
? offset
: total_size
+ offset
;
3630 rtx p
= compute_frame_addr (offset
, &next_scratch_regno
);
3631 r
= gen_rtx_REG (word_mode
, next_scratch_regno
--);
3632 reg_save_addr
[which_scratch
] = r
;
3634 emit_insn (gen_rtx_SET (r
, p
));
3638 /* Advance to the next stack slot to store this register. */
3639 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3640 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3641 emit_insn (gen_rtx_SET (r
, p
));
3644 /* Save this register to the stack (but use the old fp value
3645 we copied aside if appropriate). */
3646 from_regno
= (fp_copy_regno
>= 0
3648 HARD_FRAME_POINTER_REGNUM
) ? fp_copy_regno
: regno
;
3649 FRP (frame_emit_store (from_regno
, regno
, r
, cfa
, cfa_offset
));
3651 offset
-= UNITS_PER_WORD
;
3652 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3655 /* If profiling, force that to happen after the frame is set up. */
3657 emit_insn (gen_blockage ());
3659 /* Load the PIC register if needed. */
3660 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3661 load_pic_register (false);
3665 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
3666 true for a sibcall_epilogue pattern, and false for an epilogue
3669 tilepro_expand_epilogue (bool sibcall_p
)
3671 /* We round-robin through four scratch registers to hold temporary
3672 addresses for saving registers, to make instruction scheduling
3674 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3675 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3677 rtx_insn
*last_insn
, *insn
;
3678 unsigned int which_scratch
;
3679 int offset
, start_offset
, regno
;
3680 rtx cfa_restores
= NULL_RTX
;
3682 /* A register that holds a copy of the incoming fp. */
3683 int fp_copy_regno
= -1;
3685 /* Next scratch register number to hand out (postdecrementing). */
3686 int next_scratch_regno
= 29;
3688 int total_size
= compute_total_frame_size ();
3690 last_insn
= get_last_insn ();
3692 /* Load lr first since we are going to need it first. */
3694 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
))
3696 insn
= frame_emit_load (TILEPRO_LINK_REGNUM
,
3697 compute_frame_addr (0, &next_scratch_regno
),
3701 if (total_size
== 0)
3705 RTX_FRAME_RELATED_P (insn
) = 1;
3706 REG_NOTES (insn
) = cfa_restores
;
3711 /* Compute where to start restoring registers. */
3712 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3713 offset
= start_offset
;
3715 if (frame_pointer_needed
)
3716 fp_copy_regno
= next_scratch_regno
--;
3718 /* Restore all callee-saved registers. */
3720 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3721 if (need_to_save_reg (regno
))
3723 rtx r
= reg_save_addr
[which_scratch
];
3726 r
= compute_frame_addr (offset
, &next_scratch_regno
);
3727 reg_save_addr
[which_scratch
] = r
;
3731 /* Advance to the next stack slot to store this
3733 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3734 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3735 emit_insn (gen_rtx_SET (r
, p
));
3738 if (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
3739 frame_emit_load (fp_copy_regno
, r
, NULL
);
3741 frame_emit_load (regno
, r
, &cfa_restores
);
3743 offset
-= UNITS_PER_WORD
;
3744 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3747 if (!tilepro_current_function_is_leaf ())
3749 alloc_reg_note (REG_CFA_RESTORE
, stack_pointer_rtx
, cfa_restores
);
3751 emit_insn (gen_blockage ());
3753 if (frame_pointer_needed
)
3755 /* Restore the old stack pointer by copying from the frame
3757 insn
= emit_insn (gen_sp_restore (stack_pointer_rtx
,
3758 hard_frame_pointer_rtx
));
3759 RTX_FRAME_RELATED_P (insn
) = 1;
3760 REG_NOTES (insn
) = cfa_restores
;
3761 add_reg_note (insn
, REG_CFA_DEF_CFA
, stack_pointer_rtx
);
3765 insn
= emit_sp_adjust (total_size
, &next_scratch_regno
, true,
3769 if (crtl
->calls_eh_return
)
3770 emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3771 EH_RETURN_STACKADJ_RTX
));
3773 /* Restore the old frame pointer. */
3774 if (frame_pointer_needed
)
3776 insn
= emit_move_insn (hard_frame_pointer_rtx
,
3777 gen_rtx_REG (Pmode
, fp_copy_regno
));
3778 add_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
);
3781 /* Mark the pic registers as live outside of the function. */
3784 emit_use (cfun
->machine
->text_label_rtx
);
3785 emit_use (cfun
->machine
->got_rtx
);
3791 /* Emit the actual 'return' instruction. */
3792 emit_jump_insn (gen__return ());
3796 emit_use (gen_rtx_REG (Pmode
, TILEPRO_LINK_REGNUM
));
3799 /* Mark all insns we just emitted as frame-related. */
3800 for (; last_insn
!= NULL_RTX
; last_insn
= next_insn (last_insn
))
3801 RTX_FRAME_RELATED_P (last_insn
) = 1;
3804 #undef ROUND_ROBIN_SIZE
3807 /* Implement INITIAL_ELIMINATION_OFFSET. */
3809 tilepro_initial_elimination_offset (int from
, int to
)
3811 int total_size
= compute_total_frame_size ();
3813 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3815 return (total_size
- crtl
->args
.pretend_args_size
3816 - tilepro_saved_regs_size ());
3818 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3820 return -(crtl
->args
.pretend_args_size
+ tilepro_saved_regs_size ());
3822 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3824 return STACK_POINTER_OFFSET
+ total_size
;
3826 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3828 return STACK_POINTER_OFFSET
;
3835 /* Return an RTX indicating where the return address to the
3836 calling function can be found. */
3838 tilepro_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
3843 return get_hard_reg_initial_val (Pmode
, TILEPRO_LINK_REGNUM
);
3847 /* Implement EH_RETURN_HANDLER_RTX. */
3849 tilepro_eh_return_handler_rtx (void)
3851 /* The MEM needs to be volatile to prevent it from being
3853 rtx tmp
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
3854 MEM_VOLATILE_P (tmp
) = true;
3862 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
3864 tilepro_conditional_register_usage (void)
3866 global_regs
[TILEPRO_NETORDER_REGNUM
] = 1;
3867 /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used. */
3868 if (TILEPRO_PIC_TEXT_LABEL_REGNUM
!= INVALID_REGNUM
)
3869 fixed_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3870 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3871 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3875 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
3877 tilepro_frame_pointer_required (void)
3879 return crtl
->calls_eh_return
|| cfun
->calls_alloca
;
3884 /* Scheduling and reorg */
3886 /* Return the length of INSN. LENGTH is the initial length computed
3887 by attributes in the machine-description file. This is where we
3888 account for bundles. */
3890 tilepro_adjust_insn_length (rtx_insn
*insn
, int length
)
3892 machine_mode mode
= GET_MODE (insn
);
3894 /* A non-termininating instruction in a bundle has length 0. */
3898 /* By default, there is not length adjustment. */
3903 /* Implement TARGET_SCHED_ISSUE_RATE. */
3905 tilepro_issue_rate (void)
3911 /* Return the rtx for the jump target. */
3913 get_jump_target (rtx branch
)
3915 if (CALL_P (branch
))
3918 call
= PATTERN (branch
);
3920 if (GET_CODE (call
) == PARALLEL
)
3921 call
= XVECEXP (call
, 0, 0);
3923 if (GET_CODE (call
) == SET
)
3924 call
= SET_SRC (call
);
3926 if (GET_CODE (call
) == CALL
)
3927 return XEXP (XEXP (call
, 0), 0);
3932 /* Implement TARGET_SCHED_ADJUST_COST. */
3934 tilepro_sched_adjust_cost (rtx_insn
*insn
, int dep_type
, rtx_insn
*dep_insn
,
3935 int cost
, unsigned int)
3937 /* If we have a true dependence, INSN is a call, and DEP_INSN
3938 defines a register that is needed by the call (argument or stack
3939 pointer), set its latency to 0 so that it can be bundled with
3940 the call. Explicitly check for and exclude the case when
3941 DEP_INSN defines the target of the jump. */
3942 if (CALL_P (insn
) && dep_type
== REG_DEP_TRUE
)
3944 rtx target
= get_jump_target (insn
);
3945 if (!REG_P (target
) || !set_of (target
, dep_insn
))
3953 /* Skip over irrelevant NOTEs and such and look for the next insn we
3954 would consider bundling. */
3956 next_insn_to_bundle (rtx_insn
*r
, rtx_insn
*end
)
3958 for (; r
!= end
; r
= NEXT_INSN (r
))
3960 if (NONDEBUG_INSN_P (r
)
3961 && GET_CODE (PATTERN (r
)) != USE
3962 && GET_CODE (PATTERN (r
)) != CLOBBER
)
3970 /* Go through all insns, and use the information generated during
3971 scheduling to generate SEQUENCEs to represent bundles of
3972 instructions issued simultaneously. */
3974 tilepro_gen_bundles (void)
3977 FOR_EACH_BB_FN (bb
, cfun
)
3979 rtx_insn
*insn
, *next
;
3980 rtx_insn
*end
= NEXT_INSN (BB_END (bb
));
3982 for (insn
= next_insn_to_bundle (BB_HEAD (bb
), end
); insn
; insn
= next
)
3984 next
= next_insn_to_bundle (NEXT_INSN (insn
), end
);
3986 /* Never wrap {} around inline asm. */
3987 if (GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
3989 if (next
== NULL_RTX
|| GET_MODE (next
) == TImode
3990 /* NOTE: The scheduler incorrectly believes a call
3991 insn can execute in the same cycle as the insn
3992 after the call. This is of course impossible.
3993 Really we need to fix the scheduler somehow, so
3994 the code after the call gets scheduled
3998 /* Mark current insn as the end of a bundle. */
3999 PUT_MODE (insn
, QImode
);
4003 /* Mark it as part of a bundle. */
4004 PUT_MODE (insn
, SImode
);
4012 /* Helper function for tilepro_fixup_pcrel_references. */
4014 replace_pc_relative_symbol_ref (rtx_insn
*insn
, rtx opnds
[4], bool first_insn_p
)
4016 rtx_insn
*new_insns
;
4024 emit_insn (gen_add_got16 (opnds
[0], tilepro_got_rtx (),
4026 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4033 emit_insn (gen_addhi_got32 (opnds
[0], tilepro_got_rtx (),
4038 emit_insn (gen_addlo_got32 (opnds
[0], opnds
[1], opnds
[2]));
4039 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4043 new_insns
= get_insns ();
4047 emit_insn_before (new_insns
, insn
);
4053 /* Returns whether INSN is a pc-relative addli insn. */
4055 match_addli_pcrel (rtx_insn
*insn
)
4057 rtx pattern
= PATTERN (insn
);
4060 if (GET_CODE (pattern
) != SET
)
4063 if (GET_CODE (SET_SRC (pattern
)) != LO_SUM
)
4066 if (GET_CODE (XEXP (SET_SRC (pattern
), 1)) != CONST
)
4069 unspec
= XEXP (XEXP (SET_SRC (pattern
), 1), 0);
4071 return (GET_CODE (unspec
) == UNSPEC
4072 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4076 /* Helper function for tilepro_fixup_pcrel_references. */
4078 replace_addli_pcrel (rtx_insn
*insn
)
4080 rtx pattern
= PATTERN (insn
);
4086 gcc_assert (GET_CODE (pattern
) == SET
);
4087 opnds
[0] = SET_DEST (pattern
);
4089 set_src
= SET_SRC (pattern
);
4090 gcc_assert (GET_CODE (set_src
) == LO_SUM
);
4091 gcc_assert (GET_CODE (XEXP (set_src
, 1)) == CONST
);
4092 opnds
[1] = XEXP (set_src
, 0);
4094 unspec
= XEXP (XEXP (set_src
, 1), 0);
4095 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4096 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4097 opnds
[2] = XVECEXP (unspec
, 0, 0);
4098 opnds
[3] = XVECEXP (unspec
, 0, 1);
4100 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4101 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4104 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4106 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4110 /* Returns whether INSN is a pc-relative auli insn. */
4112 match_auli_pcrel (rtx_insn
*insn
)
4114 rtx pattern
= PATTERN (insn
);
4118 if (GET_CODE (pattern
) != SET
)
4121 if (GET_CODE (SET_SRC (pattern
)) != PLUS
)
4124 high
= XEXP (SET_SRC (pattern
), 1);
4126 if (GET_CODE (high
) != HIGH
4127 || GET_CODE (XEXP (high
, 0)) != CONST
)
4130 unspec
= XEXP (XEXP (high
, 0), 0);
4132 return (GET_CODE (unspec
) == UNSPEC
4133 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4137 /* Helper function for tilepro_fixup_pcrel_references. */
4139 replace_auli_pcrel (rtx_insn
*insn
)
4141 rtx pattern
= PATTERN (insn
);
4148 gcc_assert (GET_CODE (pattern
) == SET
);
4149 opnds
[0] = SET_DEST (pattern
);
4151 set_src
= SET_SRC (pattern
);
4152 gcc_assert (GET_CODE (set_src
) == PLUS
);
4153 opnds
[1] = XEXP (set_src
, 0);
4155 high
= XEXP (set_src
, 1);
4156 gcc_assert (GET_CODE (high
) == HIGH
);
4157 gcc_assert (GET_CODE (XEXP (high
, 0)) == CONST
);
4159 unspec
= XEXP (XEXP (high
, 0), 0);
4160 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4161 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4162 opnds
[2] = XVECEXP (unspec
, 0, 0);
4163 opnds
[3] = XVECEXP (unspec
, 0, 1);
4165 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4166 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4169 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4171 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4175 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4176 going through the GOT when the symbol is local to the compilation
4177 unit. But such a symbol requires that the common text_label that
4178 we generate at the beginning of the function be in the same section
4179 as the reference to the SYMBOL_REF. This may not be true if we
4180 generate hot/cold sections. This function looks for such cases and
4181 replaces such references with the longer sequence going through the
4184 We expect one of the following two instruction sequences:
4185 addli tmp1, txt_label_reg, lo16(sym - txt_label)
4186 auli tmp2, tmp1, ha16(sym - txt_label)
4188 auli tmp1, txt_label_reg, ha16(sym - txt_label)
4189 addli tmp2, tmp1, lo16(sym - txt_label)
4191 If we're compiling -fpic, we replace the first instruction with
4192 nothing, and the second instruction with:
4194 addli tmp2, got_rtx, got(sym)
4197 If we're compiling -fPIC, we replace the first instruction with:
4199 auli tmp1, got_rtx, got_ha16(sym)
4201 and the second instruction with:
4203 addli tmp2, tmp1, got_lo16(sym)
4206 Note that we're careful to disturb the instruction sequence as
4207 little as possible, since it's very late in the compilation
4211 tilepro_fixup_pcrel_references (void)
4213 rtx_insn
*insn
, *next_insn
;
4214 bool same_section_as_entry
= true;
4216 for (insn
= get_insns (); insn
; insn
= next_insn
)
4218 next_insn
= NEXT_INSN (insn
);
4220 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
4222 same_section_as_entry
= !same_section_as_entry
;
4226 if (same_section_as_entry
)
4230 && GET_CODE (PATTERN (insn
)) != USE
4231 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
4234 if (match_addli_pcrel (insn
))
4235 replace_addli_pcrel (insn
);
4236 else if (match_auli_pcrel (insn
))
4237 replace_auli_pcrel (insn
);
4242 /* Ensure that no var tracking notes are emitted in the middle of a
4243 three-instruction bundle. */
4245 reorder_var_tracking_notes (void)
4248 FOR_EACH_BB_FN (bb
, cfun
)
4250 rtx_insn
*insn
, *next
;
4251 rtx_insn
*queue
= NULL
;
4252 bool in_bundle
= false;
4254 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4256 next
= NEXT_INSN (insn
);
4260 /* Emit queued up notes at the last instruction of a bundle. */
4261 if (GET_MODE (insn
) == QImode
)
4265 rtx_insn
*next_queue
= PREV_INSN (queue
);
4266 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4267 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4268 SET_NEXT_INSN (insn
) = queue
;
4269 SET_PREV_INSN (queue
) = insn
;
4274 else if (GET_MODE (insn
) == SImode
)
4277 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4281 rtx_insn
*prev
= PREV_INSN (insn
);
4282 SET_PREV_INSN (next
) = prev
;
4283 SET_NEXT_INSN (prev
) = next
;
4285 SET_PREV_INSN (insn
) = queue
;
4294 /* Perform machine dependent operations on the rtl chain INSNS. */
4296 tilepro_reorg (void)
4298 /* We are freeing block_for_insn in the toplev to keep compatibility
4299 with old MDEP_REORGS that are not CFG based. Recompute it
4301 compute_bb_for_insn ();
4303 if (flag_reorder_blocks_and_partition
)
4305 tilepro_fixup_pcrel_references ();
4308 if (flag_schedule_insns_after_reload
)
4312 timevar_push (TV_SCHED2
);
4314 timevar_pop (TV_SCHED2
);
4316 /* Examine the schedule to group into bundles. */
4317 tilepro_gen_bundles ();
4322 if (flag_var_tracking
)
4324 timevar_push (TV_VAR_TRACKING
);
4325 variable_tracking_main ();
4326 reorder_var_tracking_notes ();
4327 timevar_pop (TV_VAR_TRACKING
);
4330 df_finish_pass (false);
4337 /* Select a format to encode pointers in exception handling data.
4338 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4339 GLOBAL is true if the symbol may be affected by dynamic
4342 tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED
, int global
)
4344 return (global
? DW_EH_PE_indirect
: 0) | DW_EH_PE_pcrel
| DW_EH_PE_sdata4
;
4348 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4350 tilepro_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4351 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4354 const char *fnname
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl
));
4355 rtx this_rtx
, funexp
;
4358 /* Pretend to be a post-reload pass while generating rtl. */
4359 reload_completed
= 1;
4361 /* Mark the end of the (empty) prologue. */
4362 emit_note (NOTE_INSN_PROLOGUE_END
);
4364 /* Find the "this" pointer. If the function returns a structure,
4365 the structure return pointer is in $1. */
4366 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4367 this_rtx
= gen_rtx_REG (Pmode
, 1);
4369 this_rtx
= gen_rtx_REG (Pmode
, 0);
4371 /* Add DELTA to THIS_RTX. */
4372 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, GEN_INT (delta
)));
4374 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4379 tmp
= gen_rtx_REG (Pmode
, 29);
4380 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4382 emit_insn (gen_addsi3 (tmp
, tmp
, GEN_INT (vcall_offset
)));
4384 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4386 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, tmp
));
4389 /* Generate a tail call to the target function. */
4390 if (!TREE_USED (function
))
4392 assemble_external (function
);
4393 TREE_USED (function
) = 1;
4395 funexp
= XEXP (DECL_RTL (function
), 0);
4396 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
4397 insn
= emit_call_insn (gen_sibcall (funexp
, const0_rtx
));
4398 SIBLING_CALL_P (insn
) = 1;
4400 /* Run just enough of rest_of_compilation to get the insns emitted.
4401 There's not really enough bulk here to make other passes such as
4402 instruction scheduling worth while.
4404 We don't currently bundle, but the instruciton sequence is all
4405 serial except for the tail call, so we're only wasting one cycle.
4407 insn
= get_insns ();
4408 shorten_branches (insn
);
4409 assemble_start_function (thunk_fndecl
, fnname
);
4410 final_start_function (insn
, file
, 1);
4411 final (insn
, file
, 1);
4412 final_end_function ();
4413 assemble_end_function (thunk_fndecl
, fnname
);
4415 /* Stop pretending to be a post-reload pass. */
4416 reload_completed
= 0;
4420 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4422 tilepro_asm_trampoline_template (FILE *file
)
4424 fprintf (file
, "\tlnk r10\n");
4425 fprintf (file
, "\taddi r10, r10, 32\n");
4426 fprintf (file
, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode
));
4427 fprintf (file
, "\tlw r10, r10\n");
4428 fprintf (file
, "\tjr r11\n");
4429 fprintf (file
, "\t.word 0 # <function address>\n");
4430 fprintf (file
, "\t.word 0 # <static chain value>\n");
4434 /* Implement TARGET_TRAMPOLINE_INIT. */
4436 tilepro_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4440 rtx begin_addr
, end_addr
;
4441 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
4443 fnaddr
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
4444 chaddr
= copy_to_reg (static_chain
);
4446 emit_block_move (m_tramp
, assemble_trampoline_template (),
4447 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
4449 mem
= adjust_address (m_tramp
, ptr_mode
,
4450 TRAMPOLINE_SIZE
- 2 * ptr_mode_size
);
4451 emit_move_insn (mem
, fnaddr
);
4452 mem
= adjust_address (m_tramp
, ptr_mode
,
4453 TRAMPOLINE_SIZE
- ptr_mode_size
);
4454 emit_move_insn (mem
, chaddr
);
4456 /* Get pointers to the beginning and end of the code block. */
4457 begin_addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
4458 end_addr
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0),
4461 maybe_emit_call_builtin___clear_cache (begin_addr
, end_addr
);
4465 /* Implement TARGET_PRINT_OPERAND. */
4467 tilepro_print_operand (FILE *file
, rtx x
, int code
)
4472 /* Print the compare operator opcode for conditional moves. */
4473 switch (GET_CODE (x
))
4482 output_operand_lossage ("invalid %%c operand");
4487 /* Print the compare operator opcode for conditional moves. */
4488 switch (GET_CODE (x
))
4497 output_operand_lossage ("invalid %%C operand");
4503 /* Print the high 16 bits of a 32-bit constant. */
4505 if (CONST_INT_P (x
))
4507 else if (GET_CODE (x
) == CONST_DOUBLE
)
4508 i
= CONST_DOUBLE_LOW (x
);
4511 output_operand_lossage ("invalid %%h operand");
4514 i
= trunc_int_for_mode (i
>> 16, HImode
);
4515 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4522 const char *opstr
= NULL
;
4524 if (GET_CODE (x
) == CONST
4525 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4527 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4528 switch (XINT (XEXP (x
, 0), 1))
4530 case UNSPEC_GOT32_SYM
:
4533 case UNSPEC_PCREL_SYM
:
4538 opstr
= "tls_gd_ha16";
4541 opstr
= "tls_ie_ha16";
4544 opstr
= "tls_le_ha16";
4547 output_operand_lossage ("invalid %%H operand");
4556 fputs (opstr
, file
);
4558 output_addr_const (file
, addr
);
4562 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4563 fputs (" - " , file
);
4564 output_addr_const (file
, addr2
);
4572 /* Print an auto-inc memory operand. */
4575 output_operand_lossage ("invalid %%I operand");
4579 output_memory_autoinc_first
= true;
4580 output_address (GET_MODE (x
), XEXP (x
, 0));
4584 /* Print an auto-inc memory operand. */
4587 output_operand_lossage ("invalid %%i operand");
4591 output_memory_autoinc_first
= false;
4592 output_address (GET_MODE (x
), XEXP (x
, 0));
4597 /* Print the low 8 bits of a constant. */
4599 if (CONST_INT_P (x
))
4601 else if (GET_CODE (x
) == CONST_DOUBLE
)
4602 i
= CONST_DOUBLE_LOW (x
);
4603 else if (GET_CODE (x
) == CONST_VECTOR
4604 && CONST_INT_P (CONST_VECTOR_ELT (x
, 0)))
4605 i
= INTVAL (CONST_VECTOR_ELT (x
, 0));
4608 output_operand_lossage ("invalid %%j operand");
4611 i
= trunc_int_for_mode (i
, QImode
);
4612 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4619 const char *opstr
= NULL
;
4621 if (GET_CODE (x
) == CONST
4622 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4624 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4625 switch (XINT (XEXP (x
, 0), 1))
4627 case UNSPEC_GOT16_SYM
:
4630 case UNSPEC_GOT32_SYM
:
4633 case UNSPEC_PCREL_SYM
:
4638 opstr
= "tls_gd_lo16";
4641 opstr
= "tls_ie_lo16";
4644 opstr
= "tls_le_lo16";
4647 output_operand_lossage ("invalid %%L operand");
4656 fputs (opstr
, file
);
4658 output_addr_const (file
, addr
);
4662 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4663 fputs (" - " , file
);
4664 output_addr_const (file
, addr2
);
4672 if (GET_CODE (x
) == SYMBOL_REF
)
4674 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4675 fprintf (file
, "plt(");
4676 output_addr_const (file
, x
);
4677 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4678 fprintf (file
, ")");
4681 output_addr_const (file
, x
);
4686 /* Print a 32-bit constant plus one. */
4688 if (!CONST_INT_P (x
))
4690 output_operand_lossage ("invalid %%P operand");
4693 i
= trunc_int_for_mode (INTVAL (x
) + 1, SImode
);
4694 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4700 /* Print an mm-style bit range. */
4701 int first_bit
, last_bit
;
4703 if (!CONST_INT_P (x
)
4704 || !tilepro_bitfield_operand_p (INTVAL (x
), &first_bit
,
4707 output_operand_lossage ("invalid %%M operand");
4711 fprintf (file
, "%d, %d", first_bit
, last_bit
);
4717 const char *reg
= NULL
;
4719 /* Print a network register. */
4720 if (!CONST_INT_P (x
))
4722 output_operand_lossage ("invalid %%N operand");
4728 case TILEPRO_NETREG_IDN0
: reg
= "idn0"; break;
4729 case TILEPRO_NETREG_IDN1
: reg
= "idn1"; break;
4730 case TILEPRO_NETREG_SN
: reg
= "sn"; break;
4731 case TILEPRO_NETREG_UDN0
: reg
= "udn0"; break;
4732 case TILEPRO_NETREG_UDN1
: reg
= "udn1"; break;
4733 case TILEPRO_NETREG_UDN2
: reg
= "udn2"; break;
4734 case TILEPRO_NETREG_UDN3
: reg
= "udn3"; break;
4735 default: gcc_unreachable ();
4738 fprintf (file
, reg
);
4744 /* Log base 2 of a power of two. */
4748 if (!CONST_INT_P (x
))
4750 output_operand_lossage ("invalid %%t operand");
4753 n
= trunc_int_for_mode (INTVAL (x
), SImode
);
4757 output_operand_lossage ("invalid %%t operand");
4761 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4767 /* In this case we need a register. Use 'zero' if the
4768 operand is const0_rtx. */
4770 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
4772 fputs ("zero", file
);
4775 else if (!REG_P (x
))
4777 output_operand_lossage ("invalid %%r operand");
4785 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
4790 output_address (VOIDmode
, XEXP (x
, 0));
4795 output_addr_const (file
, x
);
4802 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4807 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
4809 tilepro_print_operand_address (FILE *file
, machine_mode mode
, rtx addr
)
4811 if (GET_CODE (addr
) == POST_DEC
4812 || GET_CODE (addr
) == POST_INC
)
4814 int offset
= GET_MODE_SIZE (mode
);
4816 gcc_assert (mode
!= VOIDmode
);
4818 if (output_memory_autoinc_first
)
4819 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4821 fprintf (file
, "%d",
4822 GET_CODE (addr
) == POST_DEC
? -offset
: offset
);
4824 else if (GET_CODE (addr
) == POST_MODIFY
)
4826 gcc_assert (mode
!= VOIDmode
);
4828 gcc_assert (GET_CODE (XEXP (addr
, 1)) == PLUS
);
4830 if (output_memory_autoinc_first
)
4831 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4833 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4834 INTVAL (XEXP (XEXP (addr
, 1), 1)));
4837 tilepro_print_operand (file
, addr
, 'r');
4841 /* Machine mode of current insn, for determining curly brace
4843 static machine_mode insn_mode
;
4846 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
4848 tilepro_final_prescan_insn (rtx_insn
*insn
)
4850 /* Record this for tilepro_asm_output_opcode to examine. */
4851 insn_mode
= GET_MODE (insn
);
4855 /* While emitting asm, are we currently inside '{' for a bundle? */
4856 static bool tilepro_in_bundle
= false;
4858 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
4859 appropriate given the bundling information recorded by
4860 tilepro_gen_bundles. */
4862 tilepro_asm_output_opcode (FILE *stream
, const char *code
)
4864 bool pseudo
= !strcmp (code
, "pseudo");
4866 if (!tilepro_in_bundle
&& insn_mode
== SImode
)
4868 /* Start a new bundle. */
4869 fprintf (stream
, "{\n\t");
4870 tilepro_in_bundle
= true;
4873 if (tilepro_in_bundle
&& insn_mode
== QImode
)
4875 /* Close an existing bundle. */
4876 static char buf
[100];
4878 gcc_assert (strlen (code
) + 3 + 1 < sizeof (buf
));
4880 strcpy (buf
, pseudo
? "" : code
);
4881 strcat (buf
, "\n\t}");
4882 tilepro_in_bundle
= false;
4888 return pseudo
? "" : code
;
4893 /* Output assembler code to FILE to increment profiler label # LABELNO
4894 for profiling a function entry. */
4896 tilepro_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
4898 if (tilepro_in_bundle
)
4900 fprintf (file
, "\t}\n");
4909 "\t}\n", MCOUNT_NAME
);
4917 "\t}\n", MCOUNT_NAME
);
4920 tilepro_in_bundle
= false;
4924 /* Implement TARGET_ASM_FILE_END. */
4926 tilepro_file_end (void)
4928 if (NEED_INDICATE_EXEC_STACK
)
4929 file_end_indicate_exec_stack ();
4933 #undef TARGET_HAVE_TLS
4934 #define TARGET_HAVE_TLS HAVE_AS_TLS
4936 #undef TARGET_OPTION_OVERRIDE
4937 #define TARGET_OPTION_OVERRIDE tilepro_option_override
4939 #ifdef TARGET_THREAD_SSP_OFFSET
4940 #undef TARGET_STACK_PROTECT_GUARD
4941 #define TARGET_STACK_PROTECT_GUARD hook_tree_void_null
4944 #undef TARGET_SCALAR_MODE_SUPPORTED_P
4945 #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4947 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4948 #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4950 #undef TARGET_CANNOT_FORCE_CONST_MEM
4951 #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4953 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4954 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4956 #undef TARGET_PASS_BY_REFERENCE
4957 #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4959 #undef TARGET_RETURN_IN_MEMORY
4960 #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
4962 #undef TARGET_FUNCTION_ARG_BOUNDARY
4963 #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
4965 #undef TARGET_FUNCTION_ARG
4966 #define TARGET_FUNCTION_ARG tilepro_function_arg
4968 #undef TARGET_FUNCTION_ARG_ADVANCE
4969 #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
4971 #undef TARGET_FUNCTION_VALUE
4972 #define TARGET_FUNCTION_VALUE tilepro_function_value
4974 #undef TARGET_LIBCALL_VALUE
4975 #define TARGET_LIBCALL_VALUE tilepro_libcall_value
4977 #undef TARGET_FUNCTION_VALUE_REGNO_P
4978 #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
4980 #undef TARGET_PROMOTE_FUNCTION_MODE
4981 #define TARGET_PROMOTE_FUNCTION_MODE \
4982 default_promote_function_mode_always_promote
4984 #undef TARGET_PROMOTE_PROTOTYPES
4985 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
4987 #undef TARGET_BUILD_BUILTIN_VA_LIST
4988 #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
4990 #undef TARGET_EXPAND_BUILTIN_VA_START
4991 #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
4993 #undef TARGET_SETUP_INCOMING_VARARGS
4994 #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
4996 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
4997 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
4999 #undef TARGET_RTX_COSTS
5000 #define TARGET_RTX_COSTS tilepro_rtx_costs
5002 /* Limit to what we can reach in one addli. */
5003 #undef TARGET_MIN_ANCHOR_OFFSET
5004 #define TARGET_MIN_ANCHOR_OFFSET -32768
5005 #undef TARGET_MAX_ANCHOR_OFFSET
5006 #define TARGET_MAX_ANCHOR_OFFSET 32767
5008 #undef TARGET_LEGITIMATE_CONSTANT_P
5009 #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5012 #define TARGET_LRA_P hook_bool_void_false
5014 #undef TARGET_LEGITIMATE_ADDRESS_P
5015 #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5017 #undef TARGET_LEGITIMIZE_ADDRESS
5018 #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5020 #undef TARGET_DELEGITIMIZE_ADDRESS
5021 #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5023 #undef TARGET_INIT_BUILTINS
5024 #define TARGET_INIT_BUILTINS tilepro_init_builtins
5026 #undef TARGET_BUILTIN_DECL
5027 #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5029 #undef TARGET_EXPAND_BUILTIN
5030 #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5032 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5033 #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5035 #undef TARGET_FRAME_POINTER_REQUIRED
5036 #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5038 #undef TARGET_DELAY_SCHED2
5039 #define TARGET_DELAY_SCHED2 true
5041 #undef TARGET_DELAY_VARTRACK
5042 #define TARGET_DELAY_VARTRACK true
5044 #undef TARGET_SCHED_ISSUE_RATE
5045 #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5047 #undef TARGET_SCHED_ADJUST_COST
5048 #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5050 #undef TARGET_MACHINE_DEPENDENT_REORG
5051 #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5053 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5054 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5055 hook_bool_const_tree_hwi_hwi_const_tree_true
5057 #undef TARGET_ASM_OUTPUT_MI_THUNK
5058 #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5060 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5061 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5063 #undef TARGET_TRAMPOLINE_INIT
5064 #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5066 #undef TARGET_PRINT_OPERAND
5067 #define TARGET_PRINT_OPERAND tilepro_print_operand
5069 #undef TARGET_PRINT_OPERAND_ADDRESS
5070 #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5072 #undef TARGET_ASM_FILE_END
5073 #define TARGET_ASM_FILE_END tilepro_file_end
5075 #undef TARGET_CAN_USE_DOLOOP_P
5076 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5078 #undef TARGET_CONSTANT_ALIGNMENT
5079 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
5081 struct gcc_target targetm
= TARGET_INITIALIZER
;
5083 #include "gt-tilepro.h"