1 /* Subroutines used for code generation on the Tilera TILEPro.
2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "insn-config.h"
29 #include "insn-attr.h"
31 #include "hard-reg-set.h"
45 #include "langhooks.h"
46 #include "insn-codes.h"
48 #include "dominance.h"
54 #include "cfgcleanup.h"
56 #include "basic-block.h"
57 #include "sched-int.h"
58 #include "sel-sched.h"
60 #include "tm-constrs.h"
62 #include "target-def.h"
65 #include "fold-const.h"
66 #include "tree-ssa-alias.h"
67 #include "internal-fn.h"
68 #include "gimple-fold.h"
70 #include "gimple-expr.h"
72 #include "stringpool.h"
73 #include "stor-layout.h"
76 #include "tilepro-builtins.h"
77 #include "tilepro-multiply.h"
78 #include "diagnostic.h"
81 /* SYMBOL_REF for GOT */
82 static GTY(()) rtx g_got_symbol
= NULL
;
84 /* In case of a POST_INC or POST_DEC memory reference, we must report
85 the mode of the memory reference from TARGET_PRINT_OPERAND to
86 TARGET_PRINT_OPERAND_ADDRESS. */
87 static machine_mode output_memory_reference_mode
;
89 /* Report whether we're printing out the first address fragment of a
90 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
91 TARGET_PRINT_OPERAND_ADDRESS. */
92 static bool output_memory_autoinc_first
;
98 /* Implement TARGET_OPTION_OVERRIDE. */
100 tilepro_option_override (void)
102 /* When modulo scheduling is enabled, we still rely on regular
103 scheduler for bundling. */
104 if (flag_modulo_sched
)
105 flag_resched_modulo_sched
= 1;
110 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
112 tilepro_scalar_mode_supported_p (machine_mode mode
)
132 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
134 tile_vector_mode_supported_p (machine_mode mode
)
136 return mode
== V4QImode
|| mode
== V2HImode
;
140 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
142 tilepro_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
143 rtx x ATTRIBUTE_UNUSED
)
149 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
151 tilepro_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
157 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
158 passed by reference. */
160 tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
161 machine_mode mode ATTRIBUTE_UNUSED
,
162 const_tree type
, bool named ATTRIBUTE_UNUSED
)
164 return (type
&& TYPE_SIZE (type
)
165 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
);
169 /* Implement TARGET_RETURN_IN_MEMORY. */
171 tilepro_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
173 return !IN_RANGE (int_size_in_bytes (type
),
174 0, TILEPRO_NUM_RETURN_REGS
* UNITS_PER_WORD
);
178 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
180 tilepro_function_arg_boundary (machine_mode mode
, const_tree type
)
182 unsigned int alignment
;
184 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
185 if (alignment
< PARM_BOUNDARY
)
186 alignment
= PARM_BOUNDARY
;
187 if (alignment
> STACK_BOUNDARY
)
188 alignment
= STACK_BOUNDARY
;
193 /* Implement TARGET_FUNCTION_ARG. */
195 tilepro_function_arg (cumulative_args_t cum_v
,
197 const_tree type
, bool named ATTRIBUTE_UNUSED
)
199 CUMULATIVE_ARGS cum
= *get_cumulative_args (cum_v
);
200 int byte_size
= ((mode
== BLKmode
)
201 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
202 bool doubleword_aligned_p
;
204 if (cum
>= TILEPRO_NUM_ARG_REGS
)
207 /* See whether the argument has doubleword alignment. */
208 doubleword_aligned_p
=
209 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
211 if (doubleword_aligned_p
)
214 /* The ABI does not allow parameters to be passed partially in reg
215 and partially in stack. */
216 if ((cum
+ (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
217 > TILEPRO_NUM_ARG_REGS
)
220 return gen_rtx_REG (mode
, cum
);
224 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
226 tilepro_function_arg_advance (cumulative_args_t cum_v
,
228 const_tree type
, bool named ATTRIBUTE_UNUSED
)
230 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
232 int byte_size
= ((mode
== BLKmode
)
233 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
234 int word_size
= (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
235 bool doubleword_aligned_p
;
237 /* See whether the argument has doubleword alignment. */
238 doubleword_aligned_p
=
239 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
241 if (doubleword_aligned_p
)
244 /* If the current argument does not fit in the pretend_args space,
246 if (*cum
< TILEPRO_NUM_ARG_REGS
247 && *cum
+ word_size
> TILEPRO_NUM_ARG_REGS
)
248 *cum
= TILEPRO_NUM_ARG_REGS
;
254 /* Implement TARGET_FUNCTION_VALUE. */
256 tilepro_function_value (const_tree valtype
, const_tree fn_decl_or_type
,
257 bool outgoing ATTRIBUTE_UNUSED
)
262 mode
= TYPE_MODE (valtype
);
263 unsigned_p
= TYPE_UNSIGNED (valtype
);
265 mode
= promote_function_mode (valtype
, mode
, &unsigned_p
,
268 return gen_rtx_REG (mode
, 0);
272 /* Implement TARGET_LIBCALL_VALUE. */
274 tilepro_libcall_value (machine_mode mode
,
275 const_rtx fun ATTRIBUTE_UNUSED
)
277 return gen_rtx_REG (mode
, 0);
281 /* Implement FUNCTION_VALUE_REGNO_P. */
283 tilepro_function_value_regno_p (const unsigned int regno
)
285 return regno
< TILEPRO_NUM_RETURN_REGS
;
289 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
291 tilepro_build_builtin_va_list (void)
293 tree f_args
, f_skip
, record
, type_decl
;
296 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
298 type_decl
= build_decl (BUILTINS_LOCATION
, TYPE_DECL
,
299 get_identifier ("__va_list_tag"), record
);
301 f_args
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
302 get_identifier ("__args"), ptr_type_node
);
303 f_skip
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
304 get_identifier ("__skip"), ptr_type_node
);
306 DECL_FIELD_CONTEXT (f_args
) = record
;
308 DECL_FIELD_CONTEXT (f_skip
) = record
;
310 TREE_CHAIN (record
) = type_decl
;
311 TYPE_NAME (record
) = type_decl
;
312 TYPE_FIELDS (record
) = f_args
;
313 TREE_CHAIN (f_args
) = f_skip
;
315 /* We know this is being padded and we want it too. It is an
316 internal type so hide the warnings from the user. */
320 layout_type (record
);
324 /* The correct type is an array type of one element. */
329 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
331 tilepro_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
336 f_args
= TYPE_FIELDS (TREE_TYPE (valist
));
337 f_skip
= TREE_CHAIN (f_args
);
340 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
342 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
344 /* Find the __args area. */
345 t
= make_tree (TREE_TYPE (args
), virtual_incoming_args_rtx
);
346 t
= fold_build_pointer_plus_hwi (t
,
348 (crtl
->args
.info
- TILEPRO_NUM_ARG_REGS
));
350 if (crtl
->args
.pretend_args_size
> 0)
351 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
353 t
= build2 (MODIFY_EXPR
, TREE_TYPE (args
), args
, t
);
354 TREE_SIDE_EFFECTS (t
) = 1;
355 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
357 /* Find the __skip area. */
358 t
= make_tree (TREE_TYPE (skip
), virtual_incoming_args_rtx
);
359 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
360 t
= build2 (MODIFY_EXPR
, TREE_TYPE (skip
), skip
, t
);
361 TREE_SIDE_EFFECTS (t
) = 1;
362 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
366 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
368 tilepro_setup_incoming_varargs (cumulative_args_t cum
,
370 tree type
, int *pretend_args
, int no_rtl
)
372 CUMULATIVE_ARGS local_cum
= *get_cumulative_args (cum
);
375 /* The caller has advanced CUM up to, but not beyond, the last named
376 argument. Advance a local copy of CUM past the last "real" named
377 argument, to find out how many registers are left over. */
378 targetm
.calls
.function_arg_advance (pack_cumulative_args (&local_cum
),
380 first_reg
= local_cum
;
382 if (local_cum
< TILEPRO_NUM_ARG_REGS
)
384 *pretend_args
= UNITS_PER_WORD
* (TILEPRO_NUM_ARG_REGS
- first_reg
);
388 alias_set_type set
= get_varargs_alias_set ();
390 gen_rtx_MEM (BLKmode
, plus_constant (Pmode
, \
391 virtual_incoming_args_rtx
,
392 -STACK_POINTER_OFFSET
-
394 (TILEPRO_NUM_ARG_REGS
-
396 MEM_NOTRAP_P (tmp
) = 1;
397 set_mem_alias_set (tmp
, set
);
398 move_block_from_reg (first_reg
, tmp
,
399 TILEPRO_NUM_ARG_REGS
- first_reg
);
407 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
408 the va_list structure VALIST as required to retrieve an argument of
409 type TYPE, and returning that argument.
411 ret = va_arg(VALIST, TYPE);
413 generates code equivalent to:
415 paddedsize = (sizeof(TYPE) + 3) & -4;
416 if ((VALIST.__args + paddedsize > VALIST.__skip)
417 & (VALIST.__args <= VALIST.__skip))
418 addr = VALIST.__skip + STACK_POINTER_OFFSET;
420 addr = VALIST.__args;
421 VALIST.__args = addr + paddedsize;
422 ret = *(TYPE *)addr; */
424 tilepro_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
* pre_p
,
425 gimple_seq
* post_p ATTRIBUTE_UNUSED
)
429 HOST_WIDE_INT size
, rsize
;
431 bool pass_by_reference_p
;
433 f_args
= TYPE_FIELDS (va_list_type_node
);
434 f_skip
= TREE_CHAIN (f_args
);
437 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
439 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
441 addr
= create_tmp_var (ptr_type_node
, "va_arg");
443 /* if an object is dynamically sized, a pointer to it is passed
444 instead of the object itself. */
445 pass_by_reference_p
= pass_by_reference (NULL
, TYPE_MODE (type
), type
,
448 if (pass_by_reference_p
)
449 type
= build_pointer_type (type
);
451 size
= int_size_in_bytes (type
);
452 rsize
= ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
) * UNITS_PER_WORD
;
454 /* If the alignment of the type is greater than the default for a
455 parameter, align to STACK_BOUNDARY. */
456 if (TYPE_ALIGN (type
) > PARM_BOUNDARY
)
458 /* Assert the only case we generate code for: when
459 stack boundary = 2 * parm boundary. */
460 gcc_assert (STACK_BOUNDARY
== PARM_BOUNDARY
* 2);
462 tmp
= build2 (BIT_AND_EXPR
, sizetype
,
463 fold_convert (sizetype
, unshare_expr (args
)),
464 size_int (PARM_BOUNDARY
/ 8));
465 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
466 unshare_expr (args
), tmp
);
468 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
471 /* Build conditional expression to calculate addr. The expression
472 will be gimplified later. */
473 tmp
= fold_build_pointer_plus_hwi (unshare_expr (args
), rsize
);
474 tmp
= build2 (TRUTH_AND_EXPR
, boolean_type_node
,
475 build2 (GT_EXPR
, boolean_type_node
, tmp
, unshare_expr (skip
)),
476 build2 (LE_EXPR
, boolean_type_node
, unshare_expr (args
),
477 unshare_expr (skip
)));
479 tmp
= build3 (COND_EXPR
, ptr_type_node
, tmp
,
480 build2 (POINTER_PLUS_EXPR
, ptr_type_node
, unshare_expr (skip
),
481 size_int (STACK_POINTER_OFFSET
)),
482 unshare_expr (args
));
484 gimplify_assign (addr
, tmp
, pre_p
);
486 /* Update VALIST.__args. */
487 tmp
= fold_build_pointer_plus_hwi (addr
, rsize
);
488 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
490 addr
= fold_convert (build_pointer_type (type
), addr
);
492 if (pass_by_reference_p
)
493 addr
= build_va_arg_indirect_ref (addr
);
495 return build_va_arg_indirect_ref (addr
);
500 /* Implement TARGET_RTX_COSTS. */
502 tilepro_rtx_costs (rtx x
, int code
, int outer_code
, int opno
, int *total
,
508 /* If this is an 8-bit constant, return zero since it can be
509 used nearly anywhere with no cost. If it is a valid operand
510 for an ADD or AND, likewise return 0 if we know it will be
511 used in that context. Otherwise, return 2 since it might be
512 used there later. All other constants take at least two
514 if (satisfies_constraint_I (x
))
519 else if (outer_code
== PLUS
&& add_operand (x
, VOIDmode
))
521 /* Slightly penalize large constants even though we can add
522 them in one instruction, because it forces the use of
523 2-wide bundling mode. */
527 else if (move_operand (x
, SImode
))
529 /* We can materialize in one move. */
530 *total
= COSTS_N_INSNS (1);
535 /* We can materialize in two moves. */
536 *total
= COSTS_N_INSNS (2);
545 *total
= COSTS_N_INSNS (2);
549 *total
= COSTS_N_INSNS (4);
557 /* If outer-code was a sign or zero extension, a cost of
558 COSTS_N_INSNS (1) was already added in, so account for
560 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
561 *total
= COSTS_N_INSNS (1);
563 *total
= COSTS_N_INSNS (2);
567 /* Convey that s[123]a are efficient. */
568 if (GET_CODE (XEXP (x
, 0)) == MULT
569 && cint_248_operand (XEXP (XEXP (x
, 0), 1), VOIDmode
))
571 *total
= (rtx_cost (XEXP (XEXP (x
, 0), 0),
572 (enum rtx_code
) outer_code
, opno
, speed
)
573 + rtx_cost (XEXP (x
, 1),
574 (enum rtx_code
) outer_code
, opno
, speed
)
575 + COSTS_N_INSNS (1));
581 *total
= COSTS_N_INSNS (2);
586 if (outer_code
== MULT
)
589 *total
= COSTS_N_INSNS (1);
596 /* These are handled by software and are very expensive. */
597 *total
= COSTS_N_INSNS (100);
601 case UNSPEC_VOLATILE
:
603 int num
= XINT (x
, 1);
605 if (num
<= TILEPRO_LAST_LATENCY_1_INSN
)
606 *total
= COSTS_N_INSNS (1);
607 else if (num
<= TILEPRO_LAST_LATENCY_2_INSN
)
608 *total
= COSTS_N_INSNS (2);
609 else if (num
> TILEPRO_LAST_LATENCY_INSN
)
611 if (outer_code
== PLUS
)
614 *total
= COSTS_N_INSNS (1);
620 case UNSPEC_BLOCKAGE
:
621 case UNSPEC_NETWORK_BARRIER
:
625 case UNSPEC_LNK_AND_LABEL
:
627 case UNSPEC_NETWORK_RECEIVE
:
628 case UNSPEC_NETWORK_SEND
:
629 case UNSPEC_TLS_GD_ADD
:
630 *total
= COSTS_N_INSNS (1);
633 case UNSPEC_TLS_IE_LOAD
:
634 *total
= COSTS_N_INSNS (2);
638 *total
= COSTS_N_INSNS (3);
642 *total
= COSTS_N_INSNS (4);
645 case UNSPEC_LATENCY_L2
:
646 *total
= COSTS_N_INSNS (8);
649 case UNSPEC_TLS_GD_CALL
:
650 *total
= COSTS_N_INSNS (30);
653 case UNSPEC_LATENCY_MISS
:
654 *total
= COSTS_N_INSNS (80);
658 *total
= COSTS_N_INSNS (1);
671 /* Returns an SImode integer rtx with value VAL. */
673 gen_int_si (HOST_WIDE_INT val
)
675 return gen_int_mode (val
, SImode
);
679 /* Create a temporary variable to hold a partial result, to enable
682 create_temp_reg_if_possible (machine_mode mode
, rtx default_reg
)
684 return can_create_pseudo_p ()? gen_reg_rtx (mode
) : default_reg
;
688 /* Functions to save and restore machine-specific function data. */
689 static struct machine_function
*
690 tilepro_init_machine_status (void)
692 return ggc_cleared_alloc
<machine_function
> ();
696 /* Do anything needed before RTL is emitted for each function. */
698 tilepro_init_expanders (void)
700 /* Arrange to initialize and mark the machine per-function
702 init_machine_status
= tilepro_init_machine_status
;
704 if (cfun
&& cfun
->machine
&& flag_pic
)
706 static int label_num
= 0;
708 char text_label_name
[32];
710 struct machine_function
*machine
= cfun
->machine
;
712 ASM_GENERATE_INTERNAL_LABEL (text_label_name
, "L_PICLNK", label_num
++);
714 machine
->text_label_symbol
=
715 gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (text_label_name
));
717 machine
->text_label_rtx
=
718 gen_rtx_REG (Pmode
, TILEPRO_PIC_TEXT_LABEL_REGNUM
);
720 machine
->got_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
722 machine
->calls_tls_get_addr
= false;
727 /* Return true if X contains a thread-local symbol. */
729 tilepro_tls_referenced_p (rtx x
)
731 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
)
732 x
= XEXP (XEXP (x
, 0), 0);
734 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
))
737 /* That's all we handle in tilepro_legitimize_tls_address for
743 /* Return true if X requires a scratch register. It is given that
744 flag_pic is on and that X satisfies CONSTANT_P. */
746 tilepro_pic_address_needs_scratch (rtx x
)
748 if (GET_CODE (x
) == CONST
749 && GET_CODE (XEXP (x
, 0)) == PLUS
750 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
751 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
752 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
759 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
760 which we are willing to load the value into a register via a move
761 pattern. TLS cannot be treated as a constant because it can
762 include a function call. */
764 tilepro_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
766 switch (GET_CODE (x
))
770 return !tilepro_tls_referenced_p (x
);
778 /* Return true if the constant value X is a legitimate general operand
779 when generating PIC code. It is given that flag_pic is on and that
780 X satisfies CONSTANT_P. */
782 tilepro_legitimate_pic_operand_p (rtx x
)
784 if (tilepro_pic_address_needs_scratch (x
))
787 if (tilepro_tls_referenced_p (x
))
794 /* Return true if the rtx X can be used as an address operand. */
796 tilepro_legitimate_address_p (machine_mode
ARG_UNUSED (mode
), rtx x
,
799 if (GET_CODE (x
) == SUBREG
)
802 switch (GET_CODE (x
))
806 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
813 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
816 if (GET_CODE (XEXP (x
, 1)) != PLUS
)
819 if (!rtx_equal_p (XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)))
822 if (!satisfies_constraint_I (XEXP (XEXP (x
, 1), 1)))
835 /* Check if x is a valid reg. */
840 return REGNO_OK_FOR_BASE_P (REGNO (x
));
846 /* Return the rtx containing SYMBOL_REF to the text label. */
848 tilepro_text_label_symbol (void)
850 return cfun
->machine
->text_label_symbol
;
854 /* Return the register storing the value of the text label. */
856 tilepro_text_label_rtx (void)
858 return cfun
->machine
->text_label_rtx
;
862 /* Return the register storing the value of the global offset
865 tilepro_got_rtx (void)
867 return cfun
->machine
->got_rtx
;
871 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
873 tilepro_got_symbol (void)
875 if (g_got_symbol
== NULL
)
876 g_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
882 /* Return a reference to the got to be used by tls references. */
884 tilepro_tls_got (void)
889 crtl
->uses_pic_offset_table
= 1;
890 return tilepro_got_rtx ();
893 temp
= gen_reg_rtx (Pmode
);
894 emit_move_insn (temp
, tilepro_got_symbol ());
900 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
901 this (thread-local) address. */
903 tilepro_legitimize_tls_address (rtx addr
)
907 gcc_assert (can_create_pseudo_p ());
909 if (GET_CODE (addr
) == SYMBOL_REF
)
910 switch (SYMBOL_REF_TLS_MODEL (addr
))
912 case TLS_MODEL_GLOBAL_DYNAMIC
:
913 case TLS_MODEL_LOCAL_DYNAMIC
:
915 rtx r0
, temp1
, temp2
, temp3
, got
;
918 ret
= gen_reg_rtx (Pmode
);
919 r0
= gen_rtx_REG (Pmode
, 0);
920 temp1
= gen_reg_rtx (Pmode
);
921 temp2
= gen_reg_rtx (Pmode
);
922 temp3
= gen_reg_rtx (Pmode
);
924 got
= tilepro_tls_got ();
925 emit_insn (gen_tls_gd_addhi (temp1
, got
, addr
));
926 emit_insn (gen_tls_gd_addlo (temp2
, temp1
, addr
));
927 emit_move_insn (r0
, temp2
);
928 emit_insn (gen_tls_gd_call (addr
));
929 emit_move_insn (temp3
, r0
);
930 last
= emit_insn (gen_tls_gd_add (ret
, temp3
, addr
));
931 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
934 case TLS_MODEL_INITIAL_EXEC
:
936 rtx temp1
, temp2
, temp3
, got
;
939 ret
= gen_reg_rtx (Pmode
);
940 temp1
= gen_reg_rtx (Pmode
);
941 temp2
= gen_reg_rtx (Pmode
);
942 temp3
= gen_reg_rtx (Pmode
);
944 got
= tilepro_tls_got ();
945 emit_insn (gen_tls_ie_addhi (temp1
, got
, addr
));
946 emit_insn (gen_tls_ie_addlo (temp2
, temp1
, addr
));
947 emit_insn (gen_tls_ie_load (temp3
, temp2
, addr
));
952 THREAD_POINTER_REGNUM
),
954 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
957 case TLS_MODEL_LOCAL_EXEC
:
962 ret
= gen_reg_rtx (Pmode
);
963 temp1
= gen_reg_rtx (Pmode
);
965 emit_insn (gen_tls_le_addhi (temp1
,
967 THREAD_POINTER_REGNUM
),
969 last
= emit_insn (gen_tls_le_addlo (ret
, temp1
, addr
));
970 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
976 else if (GET_CODE (addr
) == CONST
)
980 gcc_assert (GET_CODE (XEXP (addr
, 0)) == PLUS
);
982 base
= tilepro_legitimize_tls_address (XEXP (XEXP (addr
, 0), 0));
983 offset
= XEXP (XEXP (addr
, 0), 1);
985 base
= force_operand (base
, NULL_RTX
);
986 ret
= force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
995 /* Legitimize PIC addresses. If the address is already
996 position-independent, we return ORIG. Newly generated
997 position-independent addresses go into a reg. This is REG if
998 nonzero, otherwise we allocate register(s) as necessary. */
1000 tilepro_legitimize_pic_address (rtx orig
,
1001 machine_mode mode ATTRIBUTE_UNUSED
,
1004 if (GET_CODE (orig
) == SYMBOL_REF
)
1006 rtx address
, pic_ref
;
1010 gcc_assert (can_create_pseudo_p ());
1011 reg
= gen_reg_rtx (Pmode
);
1014 if (SYMBOL_REF_LOCAL_P (orig
))
1016 /* If not during reload, allocate another temp reg here for
1017 loading in the address, so that these instructions can be
1018 optimized properly. */
1019 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1020 rtx text_label_symbol
= tilepro_text_label_symbol ();
1021 rtx text_label_rtx
= tilepro_text_label_rtx ();
1023 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1024 text_label_symbol
));
1025 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1026 text_label_symbol
));
1028 /* Note: this is conservative. We use the text_label but we
1029 don't use the pic_offset_table. However, in some cases
1030 we may need the pic_offset_table (see
1031 tilepro_fixup_pcrel_references). */
1032 crtl
->uses_pic_offset_table
= 1;
1036 emit_move_insn (reg
, address
);
1041 /* If not during reload, allocate another temp reg here for
1042 loading in the address, so that these instructions can be
1043 optimized properly. */
1044 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1046 gcc_assert (flag_pic
);
1049 emit_insn (gen_add_got16 (temp_reg
,
1050 tilepro_got_rtx (), orig
));
1054 rtx temp_reg2
= create_temp_reg_if_possible (Pmode
, reg
);
1055 emit_insn (gen_addhi_got32 (temp_reg2
,
1056 tilepro_got_rtx (), orig
));
1057 emit_insn (gen_addlo_got32 (temp_reg
, temp_reg2
, orig
));
1062 pic_ref
= gen_const_mem (Pmode
, address
);
1063 crtl
->uses_pic_offset_table
= 1;
1064 emit_move_insn (reg
, pic_ref
);
1065 /* The following put a REG_EQUAL note on this insn, so that
1066 it can be optimized by loop. But it causes the label to
1067 be optimized away. */
1068 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1072 else if (GET_CODE (orig
) == CONST
)
1076 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1077 && XEXP (XEXP (orig
, 0), 0) == tilepro_got_rtx ())
1082 gcc_assert (can_create_pseudo_p ());
1083 reg
= gen_reg_rtx (Pmode
);
1086 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
1087 base
= tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
,
1090 tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
1091 base
== reg
? 0 : reg
);
1093 if (CONST_INT_P (offset
))
1095 if (can_create_pseudo_p ())
1096 offset
= force_reg (Pmode
, offset
);
1098 /* If we reach here, then something is seriously
1103 if (can_create_pseudo_p ())
1104 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1108 else if (GET_CODE (orig
) == LABEL_REF
)
1110 rtx address
, temp_reg
;
1111 rtx text_label_symbol
;
1116 gcc_assert (can_create_pseudo_p ());
1117 reg
= gen_reg_rtx (Pmode
);
1120 /* If not during reload, allocate another temp reg here for
1121 loading in the address, so that these instructions can be
1122 optimized properly. */
1123 temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1124 text_label_symbol
= tilepro_text_label_symbol ();
1125 text_label_rtx
= tilepro_text_label_rtx ();
1127 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1128 text_label_symbol
));
1129 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1130 text_label_symbol
));
1132 /* Note: this is conservative. We use the text_label but we
1133 don't use the pic_offset_table. */
1134 crtl
->uses_pic_offset_table
= 1;
1138 emit_move_insn (reg
, address
);
1147 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1149 tilepro_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1152 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
1153 && symbolic_operand (x
, Pmode
) && tilepro_tls_referenced_p (x
))
1155 return tilepro_legitimize_tls_address (x
);
1159 return tilepro_legitimize_pic_address (x
, mode
, 0);
1166 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1168 tilepro_delegitimize_address (rtx x
)
1170 x
= delegitimize_mem_from_attrs (x
);
1172 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1174 switch (XINT (XEXP (x
, 0), 1))
1176 case UNSPEC_PCREL_SYM
:
1177 case UNSPEC_GOT16_SYM
:
1178 case UNSPEC_GOT32_SYM
:
1181 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1190 /* Emit code to load the PIC register. */
1192 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED
)
1194 int orig_flag_pic
= flag_pic
;
1196 rtx got_symbol
= tilepro_got_symbol ();
1197 rtx text_label_symbol
= tilepro_text_label_symbol ();
1198 rtx text_label_rtx
= tilepro_text_label_rtx ();
1201 emit_insn (gen_insn_lnk_and_label (text_label_rtx
, text_label_symbol
));
1203 emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1204 text_label_rtx
, got_symbol
, text_label_symbol
));
1206 emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1208 got_symbol
, text_label_symbol
));
1210 flag_pic
= orig_flag_pic
;
1212 /* Need to emit this whether or not we obey regdecls, since
1213 setjmp/longjmp can cause life info to screw up. ??? In the case
1214 where we don't obey regdecls, this is not sufficient since we may
1215 not fall out the bottom. */
1216 emit_use (tilepro_got_rtx ());
1220 /* Return the simd variant of the constant NUM of mode MODE, by
1221 replicating it to fill an interger of mode SImode. NUM is first
1222 truncated to fit in MODE. */
1224 tilepro_simd_int (rtx num
, machine_mode mode
)
1226 HOST_WIDE_INT n
= 0;
1228 gcc_assert (CONST_INT_P (num
));
1235 n
= 0x01010101 * (n
& 0x000000FF);
1238 n
= 0x00010001 * (n
& 0x0000FFFF);
1248 return gen_int_si (n
);
1252 /* Split one or more DImode RTL references into pairs of SImode
1253 references. The RTL can be REG, offsettable MEM, integer constant,
1254 or CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL
1255 to split and "num" is its length. lo_half and hi_half are output
1256 arrays that parallel "operands". */
1258 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1262 rtx op
= operands
[num
];
1264 /* simplify_subreg refuse to split volatile memory addresses,
1265 but we still have to handle it. */
1268 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1269 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1273 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1274 GET_MODE (op
) == VOIDmode
1275 ? DImode
: GET_MODE (op
), 0);
1276 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1277 GET_MODE (op
) == VOIDmode
1278 ? DImode
: GET_MODE (op
), 4);
1284 /* Returns true iff val can be moved into a register in one
1285 instruction. And if it can, it emits the code to move the
1288 If three_wide_only is true, this insists on an instruction that
1289 works in a bundle containing three instructions. */
1291 expand_set_cint32_one_inst (rtx dest_reg
,
1292 HOST_WIDE_INT val
, bool three_wide_only
)
1294 val
= trunc_int_for_mode (val
, SImode
);
1296 if (val
== trunc_int_for_mode (val
, QImode
))
1299 emit_move_insn (dest_reg
, GEN_INT (val
));
1302 else if (!three_wide_only
)
1304 rtx imm_op
= GEN_INT (val
);
1306 if (satisfies_constraint_J (imm_op
)
1307 || satisfies_constraint_K (imm_op
)
1308 || satisfies_constraint_N (imm_op
)
1309 || satisfies_constraint_P (imm_op
))
1311 emit_move_insn (dest_reg
, imm_op
);
1320 /* Implement SImode rotatert. */
1321 static HOST_WIDE_INT
1322 rotate_right (HOST_WIDE_INT n
, int count
)
1324 unsigned HOST_WIDE_INT x
= n
& 0xFFFFFFFF;
1327 return ((x
>> count
) | (x
<< (32 - count
))) & 0xFFFFFFFF;
1331 /* Return true iff n contains exactly one contiguous sequence of 1
1332 bits, possibly wrapping around from high bits to low bits. */
1334 tilepro_bitfield_operand_p (HOST_WIDE_INT n
, int *first_bit
, int *last_bit
)
1341 for (i
= 0; i
< 32; i
++)
1343 unsigned HOST_WIDE_INT x
= rotate_right (n
, i
);
1347 /* See if x is a power of two minus one, i.e. only consecutive 1
1348 bits starting from bit 0. */
1349 if ((x
& (x
+ 1)) == 0)
1351 if (first_bit
!= NULL
)
1353 if (last_bit
!= NULL
)
1354 *last_bit
= (i
+ exact_log2 (x
^ (x
>> 1))) & 31;
1364 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1366 expand_set_cint32 (rtx dest_reg
, rtx src_val
)
1369 int leading_zeroes
, trailing_zeroes
;
1371 int three_wide_only
;
1374 gcc_assert (CONST_INT_P (src_val
));
1375 val
= trunc_int_for_mode (INTVAL (src_val
), SImode
);
1377 /* See if we can generate the constant in one instruction. */
1378 if (expand_set_cint32_one_inst (dest_reg
, val
, false))
1381 /* Create a temporary variable to hold a partial result, to enable
1383 temp
= create_temp_reg_if_possible (SImode
, dest_reg
);
1385 leading_zeroes
= 31 - floor_log2 (val
& 0xFFFFFFFF);
1386 trailing_zeroes
= exact_log2 (val
& -val
);
1388 lower
= trunc_int_for_mode (val
, HImode
);
1389 upper
= trunc_int_for_mode ((val
- lower
) >> 16, HImode
);
1391 /* First try all three-wide instructions that generate a constant
1392 (i.e. movei) followed by various shifts and rotates. If none of
1393 those work, try various two-wide ways of generating a constant
1394 followed by various shifts and rotates. */
1395 for (three_wide_only
= 1; three_wide_only
>= 0; three_wide_only
--)
1399 if (expand_set_cint32_one_inst (temp
, val
>> trailing_zeroes
,
1402 /* 0xFFFFA500 becomes:
1403 movei temp, 0xFFFFFFA5
1404 shli dest, temp, 8 */
1405 emit_move_insn (dest_reg
,
1406 gen_rtx_ASHIFT (SImode
, temp
,
1407 GEN_INT (trailing_zeroes
)));
1411 if (expand_set_cint32_one_inst (temp
, val
<< leading_zeroes
,
1414 /* 0x7FFFFFFF becomes:
1416 shri dest, temp, 1 */
1417 emit_move_insn (dest_reg
,
1418 gen_rtx_LSHIFTRT (SImode
, temp
,
1419 GEN_INT (leading_zeroes
)));
1423 /* Try rotating a one-instruction immediate, since rotate is
1425 for (count
= 1; count
< 32; count
++)
1427 HOST_WIDE_INT r
= rotate_right (val
, count
);
1428 if (expand_set_cint32_one_inst (temp
, r
, three_wide_only
))
1430 /* 0xFFA5FFFF becomes:
1431 movei temp, 0xFFFFFFA5
1432 rli dest, temp, 16 */
1433 emit_move_insn (dest_reg
,
1434 gen_rtx_ROTATE (SImode
, temp
, GEN_INT (count
)));
1439 if (lower
== trunc_int_for_mode (lower
, QImode
))
1441 /* We failed to use two 3-wide instructions, but the low 16
1442 bits are a small number so just use a 2-wide + 3-wide
1443 auli + addi pair rather than anything more exotic.
1446 auli temp, zero, 0x1234
1447 addi dest, temp, 0x56 */
1452 /* Fallback case: use a auli + addli/addi pair. */
1453 emit_move_insn (temp
, GEN_INT (upper
<< 16));
1454 emit_move_insn (dest_reg
, (gen_rtx_PLUS (SImode
, temp
, GEN_INT (lower
))));
1458 /* Load OP1, a 32-bit constant, into OP0, a register. We know it
1459 can't be done in one insn when we get here, the move expander
1462 tilepro_expand_set_const32 (rtx op0
, rtx op1
)
1464 machine_mode mode
= GET_MODE (op0
);
1467 if (CONST_INT_P (op1
))
1469 /* TODO: I don't know if we want to split large constants now,
1470 or wait until later (with a define_split).
1472 Does splitting early help CSE? Does it harm other
1473 optimizations that might fold loads? */
1474 expand_set_cint32 (op0
, op1
);
1478 temp
= create_temp_reg_if_possible (mode
, op0
);
1480 /* A symbol, emit in the traditional way. */
1481 emit_move_insn (temp
, gen_rtx_HIGH (mode
, op1
));
1482 emit_move_insn (op0
, gen_rtx_LO_SUM (mode
, temp
, op1
));
1487 /* Expand a move instruction. Return true if all work is done. */
1489 tilepro_expand_mov (machine_mode mode
, rtx
*operands
)
1491 /* Handle sets of MEM first. */
1492 if (MEM_P (operands
[0]))
1494 if (can_create_pseudo_p ())
1495 operands
[0] = validize_mem (operands
[0]);
1497 if (reg_or_0_operand (operands
[1], mode
))
1500 if (!reload_in_progress
)
1501 operands
[1] = force_reg (mode
, operands
[1]);
1504 /* Fixup TLS cases. */
1505 if (CONSTANT_P (operands
[1]) && tilepro_tls_referenced_p (operands
[1]))
1507 operands
[1] = tilepro_legitimize_tls_address (operands
[1]);
1511 /* Fixup PIC cases. */
1512 if (flag_pic
&& CONSTANT_P (operands
[1]))
1514 if (tilepro_pic_address_needs_scratch (operands
[1]))
1515 operands
[1] = tilepro_legitimize_pic_address (operands
[1], mode
, 0);
1517 if (symbolic_operand (operands
[1], mode
))
1519 operands
[1] = tilepro_legitimize_pic_address (operands
[1],
1521 (reload_in_progress
?
1528 /* Fixup for UNSPEC addresses. */
1530 && GET_CODE (operands
[1]) == HIGH
1531 && GET_CODE (XEXP (operands
[1], 0)) == CONST
1532 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == UNSPEC
)
1534 rtx unspec
= XEXP (XEXP (operands
[1], 0), 0);
1535 int unspec_num
= XINT (unspec
, 1);
1536 if (unspec_num
== UNSPEC_PCREL_SYM
)
1538 emit_insn (gen_auli_pcrel (operands
[0], const0_rtx
,
1539 XVECEXP (unspec
, 0, 0),
1540 XVECEXP (unspec
, 0, 1)));
1543 else if (flag_pic
== 2 && unspec_num
== UNSPEC_GOT32_SYM
)
1545 emit_insn (gen_addhi_got32 (operands
[0], const0_rtx
,
1546 XVECEXP (unspec
, 0, 0)));
1549 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_GD
)
1551 emit_insn (gen_tls_gd_addhi (operands
[0], const0_rtx
,
1552 XVECEXP (unspec
, 0, 0)));
1555 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_IE
)
1557 emit_insn (gen_tls_ie_addhi (operands
[0], const0_rtx
,
1558 XVECEXP (unspec
, 0, 0)));
1561 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_LE
)
1563 emit_insn (gen_tls_le_addhi (operands
[0], const0_rtx
,
1564 XVECEXP (unspec
, 0, 0)));
1569 /* Accept non-constants and valid constants unmodified. */
1570 if (!CONSTANT_P (operands
[1])
1571 || GET_CODE (operands
[1]) == HIGH
|| move_operand (operands
[1], mode
))
1574 /* Split large integers. */
1575 if (GET_MODE_SIZE (mode
) <= 4)
1577 tilepro_expand_set_const32 (operands
[0], operands
[1]);
1585 /* Expand the "insv" pattern. */
1587 tilepro_expand_insv (rtx operands
[4])
1589 rtx first_rtx
= operands
[2];
1590 HOST_WIDE_INT first
= INTVAL (first_rtx
);
1591 HOST_WIDE_INT width
= INTVAL (operands
[1]);
1592 rtx v
= operands
[3];
1594 /* Shift the inserted bits into position. */
1597 if (CONST_INT_P (v
))
1599 /* Shift the constant into mm position. */
1600 v
= gen_int_si (INTVAL (v
) << first
);
1604 /* Shift over the value to be inserted. */
1605 rtx tmp
= gen_reg_rtx (SImode
);
1606 emit_insn (gen_ashlsi3 (tmp
, v
, first_rtx
));
1611 /* Insert the shifted bits using an 'mm' insn. */
1612 emit_insn (gen_insn_mm (operands
[0], v
, operands
[0], first_rtx
,
1613 GEN_INT (first
+ width
- 1)));
1617 /* Expand unaligned loads. */
1619 tilepro_expand_unaligned_load (rtx dest_reg
, rtx mem
, HOST_WIDE_INT bitsize
,
1620 HOST_WIDE_INT bit_offset
, bool sign
)
1623 rtx addr_lo
, addr_hi
;
1624 rtx mem_lo
, mem_hi
, hi
;
1625 rtx mema
, wide_result
;
1626 int last_byte_offset
;
1627 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1629 mode
= GET_MODE (dest_reg
);
1631 hi
= gen_reg_rtx (mode
);
1633 if (bitsize
== 2 * BITS_PER_UNIT
&& (bit_offset
% BITS_PER_UNIT
) == 0)
1637 /* When just loading a two byte value, we can load the two bytes
1638 individually and combine them efficiently. */
1640 mem_lo
= adjust_address (mem
, QImode
, byte_offset
);
1641 mem_hi
= adjust_address (mem
, QImode
, byte_offset
+ 1);
1643 lo
= gen_reg_rtx (mode
);
1644 emit_insn (gen_zero_extendqisi2 (lo
, mem_lo
));
1648 rtx tmp
= gen_reg_rtx (mode
);
1650 /* Do a signed load of the second byte then shift and OR it
1652 emit_insn (gen_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1653 emit_insn (gen_ashlsi3 (gen_lowpart (SImode
, tmp
),
1654 gen_lowpart (SImode
, hi
), GEN_INT (8)));
1655 emit_insn (gen_iorsi3 (gen_lowpart (SImode
, dest_reg
),
1656 gen_lowpart (SImode
, lo
),
1657 gen_lowpart (SImode
, tmp
)));
1661 /* Do two unsigned loads and use intlb to interleave
1663 emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1664 emit_insn (gen_insn_intlb (gen_lowpart (SImode
, dest_reg
),
1665 gen_lowpart (SImode
, hi
),
1666 gen_lowpart (SImode
, lo
)));
1672 mema
= XEXP (mem
, 0);
1674 /* AND addresses cannot be in any alias set, since they may
1675 implicitly alias surrounding code. Ideally we'd have some alias
1676 set that covered all types except those with alignment 8 or
1678 addr_lo
= force_reg (Pmode
, plus_constant (Pmode
, mema
, byte_offset
));
1679 mem_lo
= change_address (mem
, mode
,
1680 gen_rtx_AND (Pmode
, addr_lo
, GEN_INT (-4)));
1681 set_mem_alias_set (mem_lo
, 0);
1683 /* Load the high word at an address that will not fault if the low
1684 address is aligned and at the very end of a page. */
1685 last_byte_offset
= (bit_offset
+ bitsize
- 1) / BITS_PER_UNIT
;
1686 addr_hi
= force_reg (Pmode
, plus_constant (Pmode
, mema
, last_byte_offset
));
1687 mem_hi
= change_address (mem
, mode
,
1688 gen_rtx_AND (Pmode
, addr_hi
, GEN_INT (-4)));
1689 set_mem_alias_set (mem_hi
, 0);
1693 addr_lo
= make_safe_from (addr_lo
, dest_reg
);
1694 wide_result
= dest_reg
;
1698 wide_result
= gen_reg_rtx (mode
);
1701 /* Load hi first in case dest_reg is used in mema. */
1702 emit_move_insn (hi
, mem_hi
);
1703 emit_move_insn (wide_result
, mem_lo
);
1705 emit_insn (gen_insn_dword_align (gen_lowpart (SImode
, wide_result
),
1706 gen_lowpart (SImode
, wide_result
),
1707 gen_lowpart (SImode
, hi
), addr_lo
));
1712 extract_bit_field (gen_lowpart (SImode
, wide_result
),
1713 bitsize
, bit_offset
% BITS_PER_UNIT
,
1714 !sign
, gen_lowpart (SImode
, dest_reg
),
1717 if (extracted
!= dest_reg
)
1718 emit_move_insn (dest_reg
, gen_lowpart (SImode
, extracted
));
1723 /* Expand unaligned stores. */
1725 tilepro_expand_unaligned_store (rtx mem
, rtx src
, HOST_WIDE_INT bitsize
,
1726 HOST_WIDE_INT bit_offset
)
1728 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1729 HOST_WIDE_INT bytesize
= bitsize
/ BITS_PER_UNIT
;
1730 HOST_WIDE_INT shift_amt
;
1735 for (i
= 0, shift_amt
= 0; i
< bytesize
; i
++, shift_amt
+= BITS_PER_UNIT
)
1737 mem_addr
= adjust_address (mem
, QImode
, byte_offset
+ i
);
1741 store_val
= expand_simple_binop (SImode
, LSHIFTRT
,
1742 gen_lowpart (SImode
, src
),
1743 GEN_INT (shift_amt
), NULL
, 1,
1745 store_val
= gen_lowpart (QImode
, store_val
);
1749 store_val
= gen_lowpart (QImode
, src
);
1752 emit_move_insn (mem_addr
, store_val
);
1757 /* Implement the movmisalign patterns. One of the operands is a
1758 memory that is not naturally aligned. Emit instructions to load
1761 tilepro_expand_movmisalign (machine_mode mode
, rtx
*operands
)
1763 if (MEM_P (operands
[1]))
1767 if (register_operand (operands
[0], mode
))
1770 tmp
= gen_reg_rtx (mode
);
1772 tilepro_expand_unaligned_load (tmp
, operands
[1],
1773 GET_MODE_BITSIZE (mode
), 0, true);
1775 if (tmp
!= operands
[0])
1776 emit_move_insn (operands
[0], tmp
);
1778 else if (MEM_P (operands
[0]))
1780 if (!reg_or_0_operand (operands
[1], mode
))
1781 operands
[1] = force_reg (mode
, operands
[1]);
1783 tilepro_expand_unaligned_store (operands
[0], operands
[1],
1784 GET_MODE_BITSIZE (mode
), 0);
1791 /* Implement the addsi3 pattern. */
1793 tilepro_expand_addsi (rtx op0
, rtx op1
, rtx op2
)
1799 /* Skip anything that only takes one instruction. */
1800 if (add_operand (op2
, SImode
))
1803 /* We can only optimize ints here (it should be impossible to get
1804 here with any other type, but it is harmless to check. */
1805 if (!CONST_INT_P (op2
))
1808 temp
= create_temp_reg_if_possible (SImode
, op0
);
1810 high
= (n
+ (n
& 0x8000)) & ~0xffff;
1812 emit_move_insn (temp
, gen_rtx_PLUS (SImode
, op1
, gen_int_si (high
)));
1813 emit_move_insn (op0
, gen_rtx_PLUS (SImode
, temp
, gen_int_si (n
- high
)));
1819 /* Implement the allocate_stack pattern (alloca). */
1821 tilepro_allocate_stack (rtx op0
, rtx op1
)
1823 /* Technically the correct way to initialize chain_loc is with
1824 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1825 * sets the alias_set to that of a frame reference. Some of our
1826 * tests rely on some unsafe assumption about when the chaining
1827 * update is done, we need to be conservative about reordering the
1828 * chaining instructions.
1830 rtx fp_addr
= gen_reg_rtx (Pmode
);
1831 rtx fp_value
= gen_reg_rtx (Pmode
);
1834 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1835 GEN_INT (UNITS_PER_WORD
)));
1837 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1839 emit_move_insn (fp_value
, fp_loc
);
1841 op1
= force_reg (Pmode
, op1
);
1843 emit_move_insn (stack_pointer_rtx
,
1844 gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, op1
));
1846 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1847 GEN_INT (UNITS_PER_WORD
)));
1849 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1851 emit_move_insn (fp_loc
, fp_value
);
1853 emit_move_insn (op0
, virtual_stack_dynamic_rtx
);
1860 /* Returns the insn_code in ENTRY. */
1861 static enum insn_code
1862 tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1865 return tilepro_multiply_insn_seq_decode_opcode
[entry
->compressed_opcode
];
1869 /* Returns the length of the 'op' array. */
1871 tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq
*seq
)
1873 /* The array either uses all of its allocated slots or is terminated
1874 by a bogus opcode. Either way, the array size is the index of the
1875 last valid opcode plus one. */
1877 for (i
= tilepro_multiply_insn_seq_MAX_OPERATIONS
- 1; i
>= 0; i
--)
1878 if (tilepro_multiply_get_opcode (&seq
->op
[i
]) != CODE_FOR_nothing
)
1881 /* An empty array is not allowed. */
1886 /* We precompute a number of expression trees for multiplying by
1887 constants. This generates code for such an expression tree by
1888 walking through the nodes in the tree (which are conveniently
1889 pre-linearized) and emitting an instruction for each one. */
1891 tilepro_expand_constant_multiply_given_sequence (rtx result
, rtx src
,
1893 tilepro_multiply_insn_seq
1899 /* Keep track of the subexpressions computed so far, so later
1900 instructions can refer to them. We seed the array with zero and
1901 the value being multiplied. */
1902 int num_subexprs
= 2;
1903 rtx subexprs
[tilepro_multiply_insn_seq_MAX_OPERATIONS
+ 2];
1904 subexprs
[0] = const0_rtx
;
1907 /* Determine how many instructions we are going to generate. */
1908 num_ops
= tilepro_multiply_get_num_ops (seq
);
1909 gcc_assert (num_ops
> 0
1910 && num_ops
<= tilepro_multiply_insn_seq_MAX_OPERATIONS
);
1912 for (i
= 0; i
< num_ops
; i
++)
1914 const struct tilepro_multiply_insn_seq_entry
*entry
= &seq
->op
[i
];
1916 /* Figure out where to store the output of this instruction. */
1917 const bool is_last_op
= (i
+ 1 == num_ops
);
1918 rtx out
= is_last_op
? result
: gen_reg_rtx (SImode
);
1920 enum insn_code opcode
= tilepro_multiply_get_opcode (entry
);
1921 if (opcode
== CODE_FOR_ashlsi3
)
1923 /* Handle shift by immediate. This is a special case because
1924 the meaning of the second operand is a constant shift
1925 count rather than an operand index. */
1927 /* Make sure the shift count is in range. Zero should not
1929 const int shift_count
= entry
->rhs
;
1930 gcc_assert (shift_count
> 0 && shift_count
< 32);
1932 /* Emit the actual instruction. */
1933 emit_insn (GEN_FCN (opcode
)
1934 (out
, subexprs
[entry
->lhs
],
1935 gen_rtx_CONST_INT (SImode
, shift_count
)));
1939 /* Handle a normal two-operand instruction, such as add or
1942 /* Make sure we are referring to a previously computed
1944 gcc_assert (entry
->rhs
< num_subexprs
);
1946 /* Emit the actual instruction. */
1947 emit_insn (GEN_FCN (opcode
)
1948 (out
, subexprs
[entry
->lhs
], subexprs
[entry
->rhs
]));
1951 /* Record this subexpression for use by later expressions. */
1952 subexprs
[num_subexprs
++] = out
;
1957 /* bsearch helper function. */
1959 tilepro_compare_multipliers (const void *key
, const void *t
)
1961 return *(const int *) key
-
1962 ((const struct tilepro_multiply_insn_seq
*) t
)->multiplier
;
1966 /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1968 static const struct tilepro_multiply_insn_seq
*
1969 tilepro_find_multiply_insn_seq_for_constant (int multiplier
)
1971 return ((const struct tilepro_multiply_insn_seq
*)
1972 bsearch (&multiplier
, tilepro_multiply_insn_seq_table
,
1973 tilepro_multiply_insn_seq_table_size
,
1974 sizeof tilepro_multiply_insn_seq_table
[0],
1975 tilepro_compare_multipliers
));
1979 /* Try to a expand constant multiply in SImode by looking it up in a
1980 precompiled table. OP0 is the result operand, OP1 is the source
1981 operand, and MULTIPLIER is the value of the constant. Return true
1984 tilepro_expand_const_mulsi (rtx op0
, rtx op1
, int multiplier
)
1986 /* See if we have precomputed an efficient way to multiply by this
1988 const struct tilepro_multiply_insn_seq
*seq
=
1989 tilepro_find_multiply_insn_seq_for_constant (multiplier
);
1992 tilepro_expand_constant_multiply_given_sequence (op0
, op1
, seq
);
2000 /* Expand the mulsi pattern. */
2002 tilepro_expand_mulsi (rtx op0
, rtx op1
, rtx op2
)
2004 if (CONST_INT_P (op2
))
2006 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op2
), SImode
);
2007 return tilepro_expand_const_mulsi (op0
, op1
, n
);
2013 /* Expand a high multiply pattern in SImode. RESULT, OP1, OP2 are the
2014 operands, and SIGN is true if it's a signed multiply, and false if
2015 it's an unsigned multiply. */
2017 tilepro_expand_high_multiply (rtx result
, rtx op1
, rtx op2
, bool sign
)
2019 rtx tmp0
= gen_reg_rtx (SImode
);
2020 rtx tmp1
= gen_reg_rtx (SImode
);
2021 rtx tmp2
= gen_reg_rtx (SImode
);
2022 rtx tmp3
= gen_reg_rtx (SImode
);
2023 rtx tmp4
= gen_reg_rtx (SImode
);
2024 rtx tmp5
= gen_reg_rtx (SImode
);
2025 rtx tmp6
= gen_reg_rtx (SImode
);
2026 rtx tmp7
= gen_reg_rtx (SImode
);
2027 rtx tmp8
= gen_reg_rtx (SImode
);
2028 rtx tmp9
= gen_reg_rtx (SImode
);
2029 rtx tmp10
= gen_reg_rtx (SImode
);
2030 rtx tmp11
= gen_reg_rtx (SImode
);
2031 rtx tmp12
= gen_reg_rtx (SImode
);
2032 rtx tmp13
= gen_reg_rtx (SImode
);
2033 rtx result_lo
= gen_reg_rtx (SImode
);
2037 emit_insn (gen_insn_mulhl_su (tmp0
, op1
, op2
));
2038 emit_insn (gen_insn_mulhl_su (tmp1
, op2
, op1
));
2039 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2040 emit_insn (gen_insn_mulhh_ss (tmp3
, op1
, op2
));
2044 emit_insn (gen_insn_mulhl_uu (tmp0
, op1
, op2
));
2045 emit_insn (gen_insn_mulhl_uu (tmp1
, op2
, op1
));
2046 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2047 emit_insn (gen_insn_mulhh_uu (tmp3
, op1
, op2
));
2050 emit_move_insn (tmp4
, (gen_rtx_ASHIFT (SImode
, tmp0
, GEN_INT (16))));
2052 emit_move_insn (tmp5
, (gen_rtx_ASHIFT (SImode
, tmp1
, GEN_INT (16))));
2054 emit_move_insn (tmp6
, (gen_rtx_PLUS (SImode
, tmp4
, tmp5
)));
2055 emit_move_insn (result_lo
, (gen_rtx_PLUS (SImode
, tmp2
, tmp6
)));
2057 emit_move_insn (tmp7
, gen_rtx_LTU (SImode
, tmp6
, tmp4
));
2058 emit_move_insn (tmp8
, gen_rtx_LTU (SImode
, result_lo
, tmp2
));
2062 emit_move_insn (tmp9
, (gen_rtx_ASHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2063 emit_move_insn (tmp10
, (gen_rtx_ASHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2067 emit_move_insn (tmp9
, (gen_rtx_LSHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2068 emit_move_insn (tmp10
, (gen_rtx_LSHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2071 emit_move_insn (tmp11
, (gen_rtx_PLUS (SImode
, tmp3
, tmp7
)));
2072 emit_move_insn (tmp12
, (gen_rtx_PLUS (SImode
, tmp8
, tmp9
)));
2073 emit_move_insn (tmp13
, (gen_rtx_PLUS (SImode
, tmp11
, tmp12
)));
2074 emit_move_insn (result
, (gen_rtx_PLUS (SImode
, tmp13
, tmp10
)));
2078 /* Implement smulsi3_highpart. */
2080 tilepro_expand_smulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2082 tilepro_expand_high_multiply (op0
, op1
, op2
, true);
2086 /* Implement umulsi3_highpart. */
2088 tilepro_expand_umulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2090 tilepro_expand_high_multiply (op0
, op1
, op2
, false);
2095 /* Compare and branches */
2097 /* Helper function to handle DImode for tilepro_emit_setcc_internal. */
2099 tilepro_emit_setcc_internal_di (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
)
2101 rtx operands
[2], lo_half
[2], hi_half
[2];
2102 rtx tmp
, tmp0
, tmp1
, tmp2
;
2105 /* Reduce the number of cases we need to handle by reversing the
2115 /* We handle these compares directly. */
2122 /* Reverse the operands. */
2127 /* We should not have called this with any other code. */
2133 code
= swap_condition (code
);
2134 tmp
= op0
, op0
= op1
, op1
= tmp
;
2140 split_di (operands
, 2, lo_half
, hi_half
);
2142 if (!reg_or_0_operand (lo_half
[0], SImode
))
2143 lo_half
[0] = force_reg (SImode
, lo_half
[0]);
2145 if (!reg_or_0_operand (hi_half
[0], SImode
))
2146 hi_half
[0] = force_reg (SImode
, hi_half
[0]);
2148 if (!CONST_INT_P (lo_half
[1]) && !register_operand (lo_half
[1], SImode
))
2149 lo_half
[1] = force_reg (SImode
, lo_half
[1]);
2151 if (!CONST_INT_P (hi_half
[1]) && !register_operand (hi_half
[1], SImode
))
2152 hi_half
[1] = force_reg (SImode
, hi_half
[1]);
2154 tmp0
= gen_reg_rtx (SImode
);
2155 tmp1
= gen_reg_rtx (SImode
);
2156 tmp2
= gen_reg_rtx (SImode
);
2161 emit_insn (gen_insn_seq (tmp0
, lo_half
[0], lo_half
[1]));
2162 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2163 emit_insn (gen_andsi3 (res
, tmp0
, tmp1
));
2167 emit_insn (gen_insn_sne (tmp0
, lo_half
[0], lo_half
[1]));
2168 emit_insn (gen_insn_sne (tmp1
, hi_half
[0], hi_half
[1]));
2169 emit_insn (gen_iorsi3 (res
, tmp0
, tmp1
));
2173 emit_insn (gen_insn_slte (tmp0
, hi_half
[0], hi_half
[1]));
2174 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2175 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2176 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2179 if (operands
[1] == const0_rtx
)
2181 emit_insn (gen_lshrsi3 (res
, hi_half
[0], GEN_INT (31)));
2186 emit_insn (gen_insn_slt (tmp0
, hi_half
[0], hi_half
[1]));
2187 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2188 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2189 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2193 emit_insn (gen_insn_slte_u (tmp0
, hi_half
[0], hi_half
[1]));
2194 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2195 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2196 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2199 emit_insn (gen_insn_slt_u (tmp0
, hi_half
[0], hi_half
[1]));
2200 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2201 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2202 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2212 /* Certain simplifications can be done to make invalid setcc
2213 operations valid. Return the final comparison, or NULL if we can't
2216 tilepro_emit_setcc_internal (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
,
2217 machine_mode cmp_mode
)
2222 if (cmp_mode
== DImode
)
2224 return tilepro_emit_setcc_internal_di (res
, code
, op0
, op1
);
2227 /* The general case: fold the comparison code to the types of
2228 compares that we have, choosing the branch as necessary. */
2238 /* We have these compares. */
2245 /* We do not have these compares, so we reverse the
2251 /* We should not have called this with any other code. */
2257 code
= swap_condition (code
);
2258 tmp
= op0
, op0
= op1
, op1
= tmp
;
2261 if (!reg_or_0_operand (op0
, SImode
))
2262 op0
= force_reg (SImode
, op0
);
2264 if (!CONST_INT_P (op1
) && !register_operand (op1
, SImode
))
2265 op1
= force_reg (SImode
, op1
);
2267 /* Return the setcc comparison. */
2268 emit_insn (gen_rtx_SET (res
, gen_rtx_fmt_ee (code
, SImode
, op0
, op1
)));
2274 /* Implement cstore patterns. */
2276 tilepro_emit_setcc (rtx operands
[], machine_mode cmp_mode
)
2279 tilepro_emit_setcc_internal (operands
[0], GET_CODE (operands
[1]),
2280 operands
[2], operands
[3], cmp_mode
);
2284 /* Return whether CODE is a signed comparison. */
2286 signed_compare_p (enum rtx_code code
)
2288 return (code
== EQ
|| code
== NE
|| code
== LT
|| code
== LE
2289 || code
== GT
|| code
== GE
);
2293 /* Generate the comparison for an SImode conditional branch. */
2295 tilepro_emit_cc_test (enum rtx_code code
, rtx op0
, rtx op1
,
2296 machine_mode cmp_mode
, bool eq_ne_only
)
2298 enum rtx_code branch_code
;
2301 /* Check for a compare against zero using a comparison we can do
2303 if (cmp_mode
!= DImode
2304 && op1
== const0_rtx
2305 && (code
== EQ
|| code
== NE
2306 || (!eq_ne_only
&& signed_compare_p (code
))))
2308 op0
= force_reg (SImode
, op0
);
2309 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, const0_rtx
);
2312 /* The general case: fold the comparison code to the types of
2313 compares that we have, choosing the branch as necessary. */
2321 /* We have these compares. */
2330 /* These must be reversed (except NE, but let's
2332 code
= reverse_condition (code
);
2340 if (cmp_mode
!= DImode
2341 && CONST_INT_P (op1
) && (!satisfies_constraint_I (op1
) || code
== LEU
))
2343 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op1
), SImode
);
2348 /* Subtract off the value we want to compare against and see
2349 if we get zero. This is cheaper than creating a constant
2350 in a register. Except that subtracting -128 is more
2351 expensive than seqi to -128, so we leave that alone. */
2352 /* ??? Don't do this when comparing against symbols,
2353 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2354 0), which will be declared false out of hand (at least
2356 if (!(symbolic_operand (op0
, VOIDmode
)
2357 || (REG_P (op0
) && REG_POINTER (op0
))))
2359 /* To compare against MIN_INT, we add MIN_INT and check
2362 if (n
!= -2147483647 - 1)
2367 op0
= force_reg (SImode
, op0
);
2368 temp
= gen_reg_rtx (SImode
);
2369 emit_insn (gen_addsi3 (temp
, op0
, gen_int_si (add
)));
2370 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2371 VOIDmode
, temp
, const0_rtx
);
2381 /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2384 int first
= exact_log2 (code
== LTU
? n
: n
+ 1);
2387 op0
= force_reg (SImode
, op0
);
2388 temp
= gen_reg_rtx (SImode
);
2389 emit_move_insn (temp
,
2390 gen_rtx_LSHIFTRT (SImode
, op0
,
2391 gen_int_si (first
)));
2392 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2393 VOIDmode
, temp
, const0_rtx
);
2403 /* Compute a flag saying whether we should branch. */
2404 temp
= gen_reg_rtx (SImode
);
2405 tilepro_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2407 /* Return the branch comparison. */
2408 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, temp
, const0_rtx
);
2412 /* Generate the comparison for a conditional branch. */
2414 tilepro_emit_conditional_branch (rtx operands
[], machine_mode cmp_mode
)
2417 tilepro_emit_cc_test (GET_CODE (operands
[0]), operands
[1], operands
[2],
2419 rtx branch_rtx
= gen_rtx_SET (pc_rtx
,
2420 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp_rtx
,
2425 emit_jump_insn (branch_rtx
);
2429 /* Implement the movsicc pattern. */
2431 tilepro_emit_conditional_move (rtx cmp
)
2434 tilepro_emit_cc_test (GET_CODE (cmp
), XEXP (cmp
, 0), XEXP (cmp
, 1),
2435 GET_MODE (XEXP (cmp
, 0)), true);
2439 /* Return true if INSN is annotated with a REG_BR_PROB note that
2440 indicates it's a branch that's predicted taken. */
2442 cbranch_predicted_p (rtx_insn
*insn
)
2444 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2448 int pred_val
= XINT (x
, 0);
2450 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2457 /* Output assembly code for a specific branch instruction, appending
2458 the branch prediction flag to the opcode if appropriate. */
2460 tilepro_output_simple_cbranch_with_opcode (rtx_insn
*insn
, const char *opcode
,
2461 int regop
, bool netreg_p
,
2462 bool reverse_predicted
)
2464 static char buf
[64];
2465 sprintf (buf
, "%s%s\t%%%c%d, %%l0", opcode
,
2466 (cbranch_predicted_p (insn
) ^ reverse_predicted
) ? "t" : "",
2467 netreg_p
? 'N' : 'r', regop
);
2472 /* Output assembly code for a specific branch instruction, appending
2473 the branch prediction flag to the opcode if appropriate. */
2475 tilepro_output_cbranch_with_opcode (rtx_insn
*insn
, rtx
*operands
,
2477 const char *rev_opcode
,
2478 int regop
, bool netreg_p
)
2480 const char *branch_if_false
;
2481 rtx taken
, not_taken
;
2482 bool is_simple_branch
;
2484 gcc_assert (LABEL_P (operands
[0]));
2486 is_simple_branch
= true;
2487 if (INSN_ADDRESSES_SET_P ())
2489 int from_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2490 int to_addr
= INSN_ADDRESSES (INSN_UID (operands
[0]));
2491 int delta
= to_addr
- from_addr
;
2492 is_simple_branch
= IN_RANGE (delta
, -524288, 524280);
2495 if (is_simple_branch
)
2497 /* Just a simple conditional branch. */
2499 tilepro_output_simple_cbranch_with_opcode (insn
, opcode
, regop
,
2503 /* Generate a reversed branch around a direct jump. This fallback
2504 does not use branch-likely instructions. */
2505 not_taken
= gen_label_rtx ();
2506 taken
= operands
[0];
2508 /* Generate the reversed branch to NOT_TAKEN. */
2509 operands
[0] = not_taken
;
2511 tilepro_output_simple_cbranch_with_opcode (insn
, rev_opcode
, regop
,
2513 output_asm_insn (branch_if_false
, operands
);
2515 output_asm_insn ("j\t%l0", &taken
);
2517 /* Output NOT_TAKEN. */
2518 targetm
.asm_out
.internal_label (asm_out_file
, "L",
2519 CODE_LABEL_NUMBER (not_taken
));
2524 /* Output assembly code for a conditional branch instruction. */
2526 tilepro_output_cbranch (rtx_insn
*insn
, rtx
*operands
, bool reversed
)
2528 enum rtx_code code
= GET_CODE (operands
[1]);
2530 const char *rev_opcode
;
2533 code
= reverse_condition (code
);
2551 rev_opcode
= "blez";
2559 rev_opcode
= "bgez";
2566 tilepro_output_cbranch_with_opcode (insn
, operands
, opcode
, rev_opcode
,
2571 /* Implement the tablejump pattern. */
2573 tilepro_expand_tablejump (rtx op0
, rtx op1
)
2577 rtx table
= gen_rtx_LABEL_REF (Pmode
, op1
);
2578 rtx temp
= gen_reg_rtx (Pmode
);
2579 rtx text_label_symbol
= tilepro_text_label_symbol ();
2580 rtx text_label_rtx
= tilepro_text_label_rtx ();
2582 emit_insn (gen_addli_pcrel (temp
, text_label_rtx
,
2583 table
, text_label_symbol
));
2584 emit_insn (gen_auli_pcrel (temp
, temp
, table
, text_label_symbol
));
2585 emit_move_insn (temp
,
2586 gen_rtx_PLUS (Pmode
,
2587 convert_to_mode (Pmode
, op0
, false),
2592 emit_jump_insn (gen_tablejump_aux (op0
, op1
));
2596 /* Expand a builtin vector binary op, by calling gen function GEN with
2597 operands in the proper modes. DEST is converted to DEST_MODE, and
2598 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2600 tilepro_expand_builtin_vector_binop (rtx (*gen
) (rtx
, rtx
, rtx
),
2601 machine_mode dest_mode
,
2603 machine_mode src_mode
,
2604 rtx src0
, rtx src1
, bool do_src1
)
2606 dest
= gen_lowpart (dest_mode
, dest
);
2608 if (src0
== const0_rtx
)
2609 src0
= CONST0_RTX (src_mode
);
2611 src0
= gen_lowpart (src_mode
, src0
);
2615 if (src1
== const0_rtx
)
2616 src1
= CONST0_RTX (src_mode
);
2618 src1
= gen_lowpart (src_mode
, src1
);
2621 emit_insn ((*gen
) (dest
, src0
, src1
));
2628 struct tile_builtin_info
2630 enum insn_code icode
;
2634 static struct tile_builtin_info tilepro_builtin_info
[TILEPRO_BUILTIN_max
] = {
2635 { CODE_FOR_addsi3
, NULL
}, /* add */
2636 { CODE_FOR_insn_addb
, NULL
}, /* addb */
2637 { CODE_FOR_insn_addbs_u
, NULL
}, /* addbs_u */
2638 { CODE_FOR_insn_addh
, NULL
}, /* addh */
2639 { CODE_FOR_insn_addhs
, NULL
}, /* addhs */
2640 { CODE_FOR_insn_addib
, NULL
}, /* addib */
2641 { CODE_FOR_insn_addih
, NULL
}, /* addih */
2642 { CODE_FOR_insn_addlis
, NULL
}, /* addlis */
2643 { CODE_FOR_ssaddsi3
, NULL
}, /* adds */
2644 { CODE_FOR_insn_adiffb_u
, NULL
}, /* adiffb_u */
2645 { CODE_FOR_insn_adiffh
, NULL
}, /* adiffh */
2646 { CODE_FOR_andsi3
, NULL
}, /* and */
2647 { CODE_FOR_insn_auli
, NULL
}, /* auli */
2648 { CODE_FOR_insn_avgb_u
, NULL
}, /* avgb_u */
2649 { CODE_FOR_insn_avgh
, NULL
}, /* avgh */
2650 { CODE_FOR_insn_bitx
, NULL
}, /* bitx */
2651 { CODE_FOR_bswapsi2
, NULL
}, /* bytex */
2652 { CODE_FOR_clzsi2
, NULL
}, /* clz */
2653 { CODE_FOR_insn_crc32_32
, NULL
}, /* crc32_32 */
2654 { CODE_FOR_insn_crc32_8
, NULL
}, /* crc32_8 */
2655 { CODE_FOR_ctzsi2
, NULL
}, /* ctz */
2656 { CODE_FOR_insn_drain
, NULL
}, /* drain */
2657 { CODE_FOR_insn_dtlbpr
, NULL
}, /* dtlbpr */
2658 { CODE_FOR_insn_dword_align
, NULL
}, /* dword_align */
2659 { CODE_FOR_insn_finv
, NULL
}, /* finv */
2660 { CODE_FOR_insn_flush
, NULL
}, /* flush */
2661 { CODE_FOR_insn_fnop
, NULL
}, /* fnop */
2662 { CODE_FOR_insn_icoh
, NULL
}, /* icoh */
2663 { CODE_FOR_insn_ill
, NULL
}, /* ill */
2664 { CODE_FOR_insn_info
, NULL
}, /* info */
2665 { CODE_FOR_insn_infol
, NULL
}, /* infol */
2666 { CODE_FOR_insn_inthb
, NULL
}, /* inthb */
2667 { CODE_FOR_insn_inthh
, NULL
}, /* inthh */
2668 { CODE_FOR_insn_intlb
, NULL
}, /* intlb */
2669 { CODE_FOR_insn_intlh
, NULL
}, /* intlh */
2670 { CODE_FOR_insn_inv
, NULL
}, /* inv */
2671 { CODE_FOR_insn_lb
, NULL
}, /* lb */
2672 { CODE_FOR_insn_lb_u
, NULL
}, /* lb_u */
2673 { CODE_FOR_insn_lh
, NULL
}, /* lh */
2674 { CODE_FOR_insn_lh_u
, NULL
}, /* lh_u */
2675 { CODE_FOR_insn_lnk
, NULL
}, /* lnk */
2676 { CODE_FOR_insn_lw
, NULL
}, /* lw */
2677 { CODE_FOR_insn_lw_na
, NULL
}, /* lw_na */
2678 { CODE_FOR_insn_lb_L2
, NULL
}, /* lb_L2 */
2679 { CODE_FOR_insn_lb_u_L2
, NULL
}, /* lb_u_L2 */
2680 { CODE_FOR_insn_lh_L2
, NULL
}, /* lh_L2 */
2681 { CODE_FOR_insn_lh_u_L2
, NULL
}, /* lh_u_L2 */
2682 { CODE_FOR_insn_lw_L2
, NULL
}, /* lw_L2 */
2683 { CODE_FOR_insn_lw_na_L2
, NULL
}, /* lw_na_L2 */
2684 { CODE_FOR_insn_lb_miss
, NULL
}, /* lb_miss */
2685 { CODE_FOR_insn_lb_u_miss
, NULL
}, /* lb_u_miss */
2686 { CODE_FOR_insn_lh_miss
, NULL
}, /* lh_miss */
2687 { CODE_FOR_insn_lh_u_miss
, NULL
}, /* lh_u_miss */
2688 { CODE_FOR_insn_lw_miss
, NULL
}, /* lw_miss */
2689 { CODE_FOR_insn_lw_na_miss
, NULL
}, /* lw_na_miss */
2690 { CODE_FOR_insn_maxb_u
, NULL
}, /* maxb_u */
2691 { CODE_FOR_insn_maxh
, NULL
}, /* maxh */
2692 { CODE_FOR_insn_maxib_u
, NULL
}, /* maxib_u */
2693 { CODE_FOR_insn_maxih
, NULL
}, /* maxih */
2694 { CODE_FOR_memory_barrier
, NULL
}, /* mf */
2695 { CODE_FOR_insn_mfspr
, NULL
}, /* mfspr */
2696 { CODE_FOR_insn_minb_u
, NULL
}, /* minb_u */
2697 { CODE_FOR_insn_minh
, NULL
}, /* minh */
2698 { CODE_FOR_insn_minib_u
, NULL
}, /* minib_u */
2699 { CODE_FOR_insn_minih
, NULL
}, /* minih */
2700 { CODE_FOR_insn_mm
, NULL
}, /* mm */
2701 { CODE_FOR_insn_mnz
, NULL
}, /* mnz */
2702 { CODE_FOR_insn_mnzb
, NULL
}, /* mnzb */
2703 { CODE_FOR_insn_mnzh
, NULL
}, /* mnzh */
2704 { CODE_FOR_movsi
, NULL
}, /* move */
2705 { CODE_FOR_insn_movelis
, NULL
}, /* movelis */
2706 { CODE_FOR_insn_mtspr
, NULL
}, /* mtspr */
2707 { CODE_FOR_insn_mulhh_ss
, NULL
}, /* mulhh_ss */
2708 { CODE_FOR_insn_mulhh_su
, NULL
}, /* mulhh_su */
2709 { CODE_FOR_insn_mulhh_uu
, NULL
}, /* mulhh_uu */
2710 { CODE_FOR_insn_mulhha_ss
, NULL
}, /* mulhha_ss */
2711 { CODE_FOR_insn_mulhha_su
, NULL
}, /* mulhha_su */
2712 { CODE_FOR_insn_mulhha_uu
, NULL
}, /* mulhha_uu */
2713 { CODE_FOR_insn_mulhhsa_uu
, NULL
}, /* mulhhsa_uu */
2714 { CODE_FOR_insn_mulhl_ss
, NULL
}, /* mulhl_ss */
2715 { CODE_FOR_insn_mulhl_su
, NULL
}, /* mulhl_su */
2716 { CODE_FOR_insn_mulhl_us
, NULL
}, /* mulhl_us */
2717 { CODE_FOR_insn_mulhl_uu
, NULL
}, /* mulhl_uu */
2718 { CODE_FOR_insn_mulhla_ss
, NULL
}, /* mulhla_ss */
2719 { CODE_FOR_insn_mulhla_su
, NULL
}, /* mulhla_su */
2720 { CODE_FOR_insn_mulhla_us
, NULL
}, /* mulhla_us */
2721 { CODE_FOR_insn_mulhla_uu
, NULL
}, /* mulhla_uu */
2722 { CODE_FOR_insn_mulhlsa_uu
, NULL
}, /* mulhlsa_uu */
2723 { CODE_FOR_insn_mulll_ss
, NULL
}, /* mulll_ss */
2724 { CODE_FOR_insn_mulll_su
, NULL
}, /* mulll_su */
2725 { CODE_FOR_insn_mulll_uu
, NULL
}, /* mulll_uu */
2726 { CODE_FOR_insn_mullla_ss
, NULL
}, /* mullla_ss */
2727 { CODE_FOR_insn_mullla_su
, NULL
}, /* mullla_su */
2728 { CODE_FOR_insn_mullla_uu
, NULL
}, /* mullla_uu */
2729 { CODE_FOR_insn_mulllsa_uu
, NULL
}, /* mulllsa_uu */
2730 { CODE_FOR_insn_mvnz
, NULL
}, /* mvnz */
2731 { CODE_FOR_insn_mvz
, NULL
}, /* mvz */
2732 { CODE_FOR_insn_mz
, NULL
}, /* mz */
2733 { CODE_FOR_insn_mzb
, NULL
}, /* mzb */
2734 { CODE_FOR_insn_mzh
, NULL
}, /* mzh */
2735 { CODE_FOR_insn_nap
, NULL
}, /* nap */
2736 { CODE_FOR_nop
, NULL
}, /* nop */
2737 { CODE_FOR_insn_nor
, NULL
}, /* nor */
2738 { CODE_FOR_iorsi3
, NULL
}, /* or */
2739 { CODE_FOR_insn_packbs_u
, NULL
}, /* packbs_u */
2740 { CODE_FOR_insn_packhb
, NULL
}, /* packhb */
2741 { CODE_FOR_insn_packhs
, NULL
}, /* packhs */
2742 { CODE_FOR_insn_packlb
, NULL
}, /* packlb */
2743 { CODE_FOR_popcountsi2
, NULL
}, /* pcnt */
2744 { CODE_FOR_insn_prefetch
, NULL
}, /* prefetch */
2745 { CODE_FOR_insn_prefetch_L1
, NULL
}, /* prefetch_L1 */
2746 { CODE_FOR_rotlsi3
, NULL
}, /* rl */
2747 { CODE_FOR_insn_s1a
, NULL
}, /* s1a */
2748 { CODE_FOR_insn_s2a
, NULL
}, /* s2a */
2749 { CODE_FOR_insn_s3a
, NULL
}, /* s3a */
2750 { CODE_FOR_insn_sadab_u
, NULL
}, /* sadab_u */
2751 { CODE_FOR_insn_sadah
, NULL
}, /* sadah */
2752 { CODE_FOR_insn_sadah_u
, NULL
}, /* sadah_u */
2753 { CODE_FOR_insn_sadb_u
, NULL
}, /* sadb_u */
2754 { CODE_FOR_insn_sadh
, NULL
}, /* sadh */
2755 { CODE_FOR_insn_sadh_u
, NULL
}, /* sadh_u */
2756 { CODE_FOR_insn_sb
, NULL
}, /* sb */
2757 { CODE_FOR_insn_seq
, NULL
}, /* seq */
2758 { CODE_FOR_insn_seqb
, NULL
}, /* seqb */
2759 { CODE_FOR_insn_seqh
, NULL
}, /* seqh */
2760 { CODE_FOR_insn_seqib
, NULL
}, /* seqib */
2761 { CODE_FOR_insn_seqih
, NULL
}, /* seqih */
2762 { CODE_FOR_insn_sh
, NULL
}, /* sh */
2763 { CODE_FOR_ashlsi3
, NULL
}, /* shl */
2764 { CODE_FOR_insn_shlb
, NULL
}, /* shlb */
2765 { CODE_FOR_insn_shlh
, NULL
}, /* shlh */
2766 { CODE_FOR_insn_shlb
, NULL
}, /* shlib */
2767 { CODE_FOR_insn_shlh
, NULL
}, /* shlih */
2768 { CODE_FOR_lshrsi3
, NULL
}, /* shr */
2769 { CODE_FOR_insn_shrb
, NULL
}, /* shrb */
2770 { CODE_FOR_insn_shrh
, NULL
}, /* shrh */
2771 { CODE_FOR_insn_shrb
, NULL
}, /* shrib */
2772 { CODE_FOR_insn_shrh
, NULL
}, /* shrih */
2773 { CODE_FOR_insn_slt
, NULL
}, /* slt */
2774 { CODE_FOR_insn_slt_u
, NULL
}, /* slt_u */
2775 { CODE_FOR_insn_sltb
, NULL
}, /* sltb */
2776 { CODE_FOR_insn_sltb_u
, NULL
}, /* sltb_u */
2777 { CODE_FOR_insn_slte
, NULL
}, /* slte */
2778 { CODE_FOR_insn_slte_u
, NULL
}, /* slte_u */
2779 { CODE_FOR_insn_slteb
, NULL
}, /* slteb */
2780 { CODE_FOR_insn_slteb_u
, NULL
}, /* slteb_u */
2781 { CODE_FOR_insn_slteh
, NULL
}, /* slteh */
2782 { CODE_FOR_insn_slteh_u
, NULL
}, /* slteh_u */
2783 { CODE_FOR_insn_slth
, NULL
}, /* slth */
2784 { CODE_FOR_insn_slth_u
, NULL
}, /* slth_u */
2785 { CODE_FOR_insn_sltib
, NULL
}, /* sltib */
2786 { CODE_FOR_insn_sltib_u
, NULL
}, /* sltib_u */
2787 { CODE_FOR_insn_sltih
, NULL
}, /* sltih */
2788 { CODE_FOR_insn_sltih_u
, NULL
}, /* sltih_u */
2789 { CODE_FOR_insn_sne
, NULL
}, /* sne */
2790 { CODE_FOR_insn_sneb
, NULL
}, /* sneb */
2791 { CODE_FOR_insn_sneh
, NULL
}, /* sneh */
2792 { CODE_FOR_ashrsi3
, NULL
}, /* sra */
2793 { CODE_FOR_insn_srab
, NULL
}, /* srab */
2794 { CODE_FOR_insn_srah
, NULL
}, /* srah */
2795 { CODE_FOR_insn_srab
, NULL
}, /* sraib */
2796 { CODE_FOR_insn_srah
, NULL
}, /* sraih */
2797 { CODE_FOR_subsi3
, NULL
}, /* sub */
2798 { CODE_FOR_insn_subb
, NULL
}, /* subb */
2799 { CODE_FOR_insn_subbs_u
, NULL
}, /* subbs_u */
2800 { CODE_FOR_insn_subh
, NULL
}, /* subh */
2801 { CODE_FOR_insn_subhs
, NULL
}, /* subhs */
2802 { CODE_FOR_sssubsi3
, NULL
}, /* subs */
2803 { CODE_FOR_insn_sw
, NULL
}, /* sw */
2804 { CODE_FOR_insn_tblidxb0
, NULL
}, /* tblidxb0 */
2805 { CODE_FOR_insn_tblidxb1
, NULL
}, /* tblidxb1 */
2806 { CODE_FOR_insn_tblidxb2
, NULL
}, /* tblidxb2 */
2807 { CODE_FOR_insn_tblidxb3
, NULL
}, /* tblidxb3 */
2808 { CODE_FOR_insn_tns
, NULL
}, /* tns */
2809 { CODE_FOR_insn_wh64
, NULL
}, /* wh64 */
2810 { CODE_FOR_xorsi3
, NULL
}, /* xor */
2811 { CODE_FOR_tilepro_network_barrier
, NULL
}, /* network_barrier */
2812 { CODE_FOR_tilepro_idn0_receive
, NULL
}, /* idn0_receive */
2813 { CODE_FOR_tilepro_idn1_receive
, NULL
}, /* idn1_receive */
2814 { CODE_FOR_tilepro_idn_send
, NULL
}, /* idn_send */
2815 { CODE_FOR_tilepro_sn_receive
, NULL
}, /* sn_receive */
2816 { CODE_FOR_tilepro_sn_send
, NULL
}, /* sn_send */
2817 { CODE_FOR_tilepro_udn0_receive
, NULL
}, /* udn0_receive */
2818 { CODE_FOR_tilepro_udn1_receive
, NULL
}, /* udn1_receive */
2819 { CODE_FOR_tilepro_udn2_receive
, NULL
}, /* udn2_receive */
2820 { CODE_FOR_tilepro_udn3_receive
, NULL
}, /* udn3_receive */
2821 { CODE_FOR_tilepro_udn_send
, NULL
}, /* udn_send */
2825 struct tilepro_builtin_def
2828 enum tilepro_builtin code
;
2830 /* The first character is the return type. Subsequent characters
2831 are the argument types. See char_to_type. */
2836 static const struct tilepro_builtin_def tilepro_builtins
[] = {
2837 { "__insn_add", TILEPRO_INSN_ADD
, true, "lll" },
2838 { "__insn_addb", TILEPRO_INSN_ADDB
, true, "lll" },
2839 { "__insn_addbs_u", TILEPRO_INSN_ADDBS_U
, false, "lll" },
2840 { "__insn_addh", TILEPRO_INSN_ADDH
, true, "lll" },
2841 { "__insn_addhs", TILEPRO_INSN_ADDHS
, false, "lll" },
2842 { "__insn_addi", TILEPRO_INSN_ADD
, true, "lll" },
2843 { "__insn_addib", TILEPRO_INSN_ADDIB
, true, "lll" },
2844 { "__insn_addih", TILEPRO_INSN_ADDIH
, true, "lll" },
2845 { "__insn_addli", TILEPRO_INSN_ADD
, true, "lll" },
2846 { "__insn_addlis", TILEPRO_INSN_ADDLIS
, false, "lll" },
2847 { "__insn_adds", TILEPRO_INSN_ADDS
, false, "lll" },
2848 { "__insn_adiffb_u", TILEPRO_INSN_ADIFFB_U
, true, "lll" },
2849 { "__insn_adiffh", TILEPRO_INSN_ADIFFH
, true, "lll" },
2850 { "__insn_and", TILEPRO_INSN_AND
, true, "lll" },
2851 { "__insn_andi", TILEPRO_INSN_AND
, true, "lll" },
2852 { "__insn_auli", TILEPRO_INSN_AULI
, true, "lll" },
2853 { "__insn_avgb_u", TILEPRO_INSN_AVGB_U
, true, "lll" },
2854 { "__insn_avgh", TILEPRO_INSN_AVGH
, true, "lll" },
2855 { "__insn_bitx", TILEPRO_INSN_BITX
, true, "ll" },
2856 { "__insn_bytex", TILEPRO_INSN_BYTEX
, true, "ll" },
2857 { "__insn_clz", TILEPRO_INSN_CLZ
, true, "ll" },
2858 { "__insn_crc32_32", TILEPRO_INSN_CRC32_32
, true, "lll" },
2859 { "__insn_crc32_8", TILEPRO_INSN_CRC32_8
, true, "lll" },
2860 { "__insn_ctz", TILEPRO_INSN_CTZ
, true, "ll" },
2861 { "__insn_drain", TILEPRO_INSN_DRAIN
, false, "v" },
2862 { "__insn_dtlbpr", TILEPRO_INSN_DTLBPR
, false, "vl" },
2863 { "__insn_dword_align", TILEPRO_INSN_DWORD_ALIGN
, true, "lllk" },
2864 { "__insn_finv", TILEPRO_INSN_FINV
, false, "vk" },
2865 { "__insn_flush", TILEPRO_INSN_FLUSH
, false, "vk" },
2866 { "__insn_fnop", TILEPRO_INSN_FNOP
, false, "v" },
2867 { "__insn_icoh", TILEPRO_INSN_ICOH
, false, "vk" },
2868 { "__insn_ill", TILEPRO_INSN_ILL
, false, "v" },
2869 { "__insn_info", TILEPRO_INSN_INFO
, false, "vl" },
2870 { "__insn_infol", TILEPRO_INSN_INFOL
, false, "vl" },
2871 { "__insn_inthb", TILEPRO_INSN_INTHB
, true, "lll" },
2872 { "__insn_inthh", TILEPRO_INSN_INTHH
, true, "lll" },
2873 { "__insn_intlb", TILEPRO_INSN_INTLB
, true, "lll" },
2874 { "__insn_intlh", TILEPRO_INSN_INTLH
, true, "lll" },
2875 { "__insn_inv", TILEPRO_INSN_INV
, false, "vp" },
2876 { "__insn_lb", TILEPRO_INSN_LB
, false, "lk" },
2877 { "__insn_lb_u", TILEPRO_INSN_LB_U
, false, "lk" },
2878 { "__insn_lh", TILEPRO_INSN_LH
, false, "lk" },
2879 { "__insn_lh_u", TILEPRO_INSN_LH_U
, false, "lk" },
2880 { "__insn_lnk", TILEPRO_INSN_LNK
, true, "l" },
2881 { "__insn_lw", TILEPRO_INSN_LW
, false, "lk" },
2882 { "__insn_lw_na", TILEPRO_INSN_LW_NA
, false, "lk" },
2883 { "__insn_lb_L2", TILEPRO_INSN_LB_L2
, false, "lk" },
2884 { "__insn_lb_u_L2", TILEPRO_INSN_LB_U_L2
, false, "lk" },
2885 { "__insn_lh_L2", TILEPRO_INSN_LH_L2
, false, "lk" },
2886 { "__insn_lh_u_L2", TILEPRO_INSN_LH_U_L2
, false, "lk" },
2887 { "__insn_lw_L2", TILEPRO_INSN_LW_L2
, false, "lk" },
2888 { "__insn_lw_na_L2", TILEPRO_INSN_LW_NA_L2
, false, "lk" },
2889 { "__insn_lb_miss", TILEPRO_INSN_LB_MISS
, false, "lk" },
2890 { "__insn_lb_u_miss", TILEPRO_INSN_LB_U_MISS
, false, "lk" },
2891 { "__insn_lh_miss", TILEPRO_INSN_LH_MISS
, false, "lk" },
2892 { "__insn_lh_u_miss", TILEPRO_INSN_LH_U_MISS
, false, "lk" },
2893 { "__insn_lw_miss", TILEPRO_INSN_LW_MISS
, false, "lk" },
2894 { "__insn_lw_na_miss", TILEPRO_INSN_LW_NA_MISS
, false, "lk" },
2895 { "__insn_maxb_u", TILEPRO_INSN_MAXB_U
, true, "lll" },
2896 { "__insn_maxh", TILEPRO_INSN_MAXH
, true, "lll" },
2897 { "__insn_maxib_u", TILEPRO_INSN_MAXIB_U
, true, "lll" },
2898 { "__insn_maxih", TILEPRO_INSN_MAXIH
, true, "lll" },
2899 { "__insn_mf", TILEPRO_INSN_MF
, false, "v" },
2900 { "__insn_mfspr", TILEPRO_INSN_MFSPR
, false, "ll" },
2901 { "__insn_minb_u", TILEPRO_INSN_MINB_U
, true, "lll" },
2902 { "__insn_minh", TILEPRO_INSN_MINH
, true, "lll" },
2903 { "__insn_minib_u", TILEPRO_INSN_MINIB_U
, true, "lll" },
2904 { "__insn_minih", TILEPRO_INSN_MINIH
, true, "lll" },
2905 { "__insn_mm", TILEPRO_INSN_MM
, true, "lllll" },
2906 { "__insn_mnz", TILEPRO_INSN_MNZ
, true, "lll" },
2907 { "__insn_mnzb", TILEPRO_INSN_MNZB
, true, "lll" },
2908 { "__insn_mnzh", TILEPRO_INSN_MNZH
, true, "lll" },
2909 { "__insn_move", TILEPRO_INSN_MOVE
, true, "ll" },
2910 { "__insn_movei", TILEPRO_INSN_MOVE
, true, "ll" },
2911 { "__insn_moveli", TILEPRO_INSN_MOVE
, true, "ll" },
2912 { "__insn_movelis", TILEPRO_INSN_MOVELIS
, false, "ll" },
2913 { "__insn_mtspr", TILEPRO_INSN_MTSPR
, false, "vll" },
2914 { "__insn_mulhh_ss", TILEPRO_INSN_MULHH_SS
, true, "lll" },
2915 { "__insn_mulhh_su", TILEPRO_INSN_MULHH_SU
, true, "lll" },
2916 { "__insn_mulhh_uu", TILEPRO_INSN_MULHH_UU
, true, "lll" },
2917 { "__insn_mulhha_ss", TILEPRO_INSN_MULHHA_SS
, true, "llll" },
2918 { "__insn_mulhha_su", TILEPRO_INSN_MULHHA_SU
, true, "llll" },
2919 { "__insn_mulhha_uu", TILEPRO_INSN_MULHHA_UU
, true, "llll" },
2920 { "__insn_mulhhsa_uu", TILEPRO_INSN_MULHHSA_UU
, true, "llll" },
2921 { "__insn_mulhl_ss", TILEPRO_INSN_MULHL_SS
, true, "lll" },
2922 { "__insn_mulhl_su", TILEPRO_INSN_MULHL_SU
, true, "lll" },
2923 { "__insn_mulhl_us", TILEPRO_INSN_MULHL_US
, true, "lll" },
2924 { "__insn_mulhl_uu", TILEPRO_INSN_MULHL_UU
, true, "lll" },
2925 { "__insn_mulhla_ss", TILEPRO_INSN_MULHLA_SS
, true, "llll" },
2926 { "__insn_mulhla_su", TILEPRO_INSN_MULHLA_SU
, true, "llll" },
2927 { "__insn_mulhla_us", TILEPRO_INSN_MULHLA_US
, true, "llll" },
2928 { "__insn_mulhla_uu", TILEPRO_INSN_MULHLA_UU
, true, "llll" },
2929 { "__insn_mulhlsa_uu", TILEPRO_INSN_MULHLSA_UU
, true, "llll" },
2930 { "__insn_mulll_ss", TILEPRO_INSN_MULLL_SS
, true, "lll" },
2931 { "__insn_mulll_su", TILEPRO_INSN_MULLL_SU
, true, "lll" },
2932 { "__insn_mulll_uu", TILEPRO_INSN_MULLL_UU
, true, "lll" },
2933 { "__insn_mullla_ss", TILEPRO_INSN_MULLLA_SS
, true, "llll" },
2934 { "__insn_mullla_su", TILEPRO_INSN_MULLLA_SU
, true, "llll" },
2935 { "__insn_mullla_uu", TILEPRO_INSN_MULLLA_UU
, true, "llll" },
2936 { "__insn_mulllsa_uu", TILEPRO_INSN_MULLLSA_UU
, true, "llll" },
2937 { "__insn_mvnz", TILEPRO_INSN_MVNZ
, true, "llll" },
2938 { "__insn_mvz", TILEPRO_INSN_MVZ
, true, "llll" },
2939 { "__insn_mz", TILEPRO_INSN_MZ
, true, "lll" },
2940 { "__insn_mzb", TILEPRO_INSN_MZB
, true, "lll" },
2941 { "__insn_mzh", TILEPRO_INSN_MZH
, true, "lll" },
2942 { "__insn_nap", TILEPRO_INSN_NAP
, false, "v" },
2943 { "__insn_nop", TILEPRO_INSN_NOP
, true, "v" },
2944 { "__insn_nor", TILEPRO_INSN_NOR
, true, "lll" },
2945 { "__insn_or", TILEPRO_INSN_OR
, true, "lll" },
2946 { "__insn_ori", TILEPRO_INSN_OR
, true, "lll" },
2947 { "__insn_packbs_u", TILEPRO_INSN_PACKBS_U
, false, "lll" },
2948 { "__insn_packhb", TILEPRO_INSN_PACKHB
, true, "lll" },
2949 { "__insn_packhs", TILEPRO_INSN_PACKHS
, false, "lll" },
2950 { "__insn_packlb", TILEPRO_INSN_PACKLB
, true, "lll" },
2951 { "__insn_pcnt", TILEPRO_INSN_PCNT
, true, "ll" },
2952 { "__insn_prefetch", TILEPRO_INSN_PREFETCH
, false, "vk" },
2953 { "__insn_prefetch_L1", TILEPRO_INSN_PREFETCH_L1
, false, "vk" },
2954 { "__insn_rl", TILEPRO_INSN_RL
, true, "lll" },
2955 { "__insn_rli", TILEPRO_INSN_RL
, true, "lll" },
2956 { "__insn_s1a", TILEPRO_INSN_S1A
, true, "lll" },
2957 { "__insn_s2a", TILEPRO_INSN_S2A
, true, "lll" },
2958 { "__insn_s3a", TILEPRO_INSN_S3A
, true, "lll" },
2959 { "__insn_sadab_u", TILEPRO_INSN_SADAB_U
, true, "llll" },
2960 { "__insn_sadah", TILEPRO_INSN_SADAH
, true, "llll" },
2961 { "__insn_sadah_u", TILEPRO_INSN_SADAH_U
, true, "llll" },
2962 { "__insn_sadb_u", TILEPRO_INSN_SADB_U
, true, "lll" },
2963 { "__insn_sadh", TILEPRO_INSN_SADH
, true, "lll" },
2964 { "__insn_sadh_u", TILEPRO_INSN_SADH_U
, true, "lll" },
2965 { "__insn_sb", TILEPRO_INSN_SB
, false, "vpl" },
2966 { "__insn_seq", TILEPRO_INSN_SEQ
, true, "lll" },
2967 { "__insn_seqb", TILEPRO_INSN_SEQB
, true, "lll" },
2968 { "__insn_seqh", TILEPRO_INSN_SEQH
, true, "lll" },
2969 { "__insn_seqi", TILEPRO_INSN_SEQ
, true, "lll" },
2970 { "__insn_seqib", TILEPRO_INSN_SEQIB
, true, "lll" },
2971 { "__insn_seqih", TILEPRO_INSN_SEQIH
, true, "lll" },
2972 { "__insn_sh", TILEPRO_INSN_SH
, false, "vpl" },
2973 { "__insn_shl", TILEPRO_INSN_SHL
, true, "lll" },
2974 { "__insn_shlb", TILEPRO_INSN_SHLB
, true, "lll" },
2975 { "__insn_shlh", TILEPRO_INSN_SHLH
, true, "lll" },
2976 { "__insn_shli", TILEPRO_INSN_SHL
, true, "lll" },
2977 { "__insn_shlib", TILEPRO_INSN_SHLIB
, true, "lll" },
2978 { "__insn_shlih", TILEPRO_INSN_SHLIH
, true, "lll" },
2979 { "__insn_shr", TILEPRO_INSN_SHR
, true, "lll" },
2980 { "__insn_shrb", TILEPRO_INSN_SHRB
, true, "lll" },
2981 { "__insn_shrh", TILEPRO_INSN_SHRH
, true, "lll" },
2982 { "__insn_shri", TILEPRO_INSN_SHR
, true, "lll" },
2983 { "__insn_shrib", TILEPRO_INSN_SHRIB
, true, "lll" },
2984 { "__insn_shrih", TILEPRO_INSN_SHRIH
, true, "lll" },
2985 { "__insn_slt", TILEPRO_INSN_SLT
, true, "lll" },
2986 { "__insn_slt_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2987 { "__insn_sltb", TILEPRO_INSN_SLTB
, true, "lll" },
2988 { "__insn_sltb_u", TILEPRO_INSN_SLTB_U
, true, "lll" },
2989 { "__insn_slte", TILEPRO_INSN_SLTE
, true, "lll" },
2990 { "__insn_slte_u", TILEPRO_INSN_SLTE_U
, true, "lll" },
2991 { "__insn_slteb", TILEPRO_INSN_SLTEB
, true, "lll" },
2992 { "__insn_slteb_u", TILEPRO_INSN_SLTEB_U
, true, "lll" },
2993 { "__insn_slteh", TILEPRO_INSN_SLTEH
, true, "lll" },
2994 { "__insn_slteh_u", TILEPRO_INSN_SLTEH_U
, true, "lll" },
2995 { "__insn_slth", TILEPRO_INSN_SLTH
, true, "lll" },
2996 { "__insn_slth_u", TILEPRO_INSN_SLTH_U
, true, "lll" },
2997 { "__insn_slti", TILEPRO_INSN_SLT
, true, "lll" },
2998 { "__insn_slti_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2999 { "__insn_sltib", TILEPRO_INSN_SLTIB
, true, "lll" },
3000 { "__insn_sltib_u", TILEPRO_INSN_SLTIB_U
, true, "lll" },
3001 { "__insn_sltih", TILEPRO_INSN_SLTIH
, true, "lll" },
3002 { "__insn_sltih_u", TILEPRO_INSN_SLTIH_U
, true, "lll" },
3003 { "__insn_sne", TILEPRO_INSN_SNE
, true, "lll" },
3004 { "__insn_sneb", TILEPRO_INSN_SNEB
, true, "lll" },
3005 { "__insn_sneh", TILEPRO_INSN_SNEH
, true, "lll" },
3006 { "__insn_sra", TILEPRO_INSN_SRA
, true, "lll" },
3007 { "__insn_srab", TILEPRO_INSN_SRAB
, true, "lll" },
3008 { "__insn_srah", TILEPRO_INSN_SRAH
, true, "lll" },
3009 { "__insn_srai", TILEPRO_INSN_SRA
, true, "lll" },
3010 { "__insn_sraib", TILEPRO_INSN_SRAIB
, true, "lll" },
3011 { "__insn_sraih", TILEPRO_INSN_SRAIH
, true, "lll" },
3012 { "__insn_sub", TILEPRO_INSN_SUB
, true, "lll" },
3013 { "__insn_subb", TILEPRO_INSN_SUBB
, true, "lll" },
3014 { "__insn_subbs_u", TILEPRO_INSN_SUBBS_U
, false, "lll" },
3015 { "__insn_subh", TILEPRO_INSN_SUBH
, true, "lll" },
3016 { "__insn_subhs", TILEPRO_INSN_SUBHS
, false, "lll" },
3017 { "__insn_subs", TILEPRO_INSN_SUBS
, false, "lll" },
3018 { "__insn_sw", TILEPRO_INSN_SW
, false, "vpl" },
3019 { "__insn_tblidxb0", TILEPRO_INSN_TBLIDXB0
, true, "lll" },
3020 { "__insn_tblidxb1", TILEPRO_INSN_TBLIDXB1
, true, "lll" },
3021 { "__insn_tblidxb2", TILEPRO_INSN_TBLIDXB2
, true, "lll" },
3022 { "__insn_tblidxb3", TILEPRO_INSN_TBLIDXB3
, true, "lll" },
3023 { "__insn_tns", TILEPRO_INSN_TNS
, false, "lp" },
3024 { "__insn_wh64", TILEPRO_INSN_WH64
, false, "vp" },
3025 { "__insn_xor", TILEPRO_INSN_XOR
, true, "lll" },
3026 { "__insn_xori", TILEPRO_INSN_XOR
, true, "lll" },
3027 { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER
, false, "v" },
3028 { "__tile_idn0_receive", TILEPRO_IDN0_RECEIVE
, false, "l" },
3029 { "__tile_idn1_receive", TILEPRO_IDN1_RECEIVE
, false, "l" },
3030 { "__tile_idn_send", TILEPRO_IDN_SEND
, false, "vl" },
3031 { "__tile_sn_receive", TILEPRO_SN_RECEIVE
, false, "l" },
3032 { "__tile_sn_send", TILEPRO_SN_SEND
, false, "vl" },
3033 { "__tile_udn0_receive", TILEPRO_UDN0_RECEIVE
, false, "l" },
3034 { "__tile_udn1_receive", TILEPRO_UDN1_RECEIVE
, false, "l" },
3035 { "__tile_udn2_receive", TILEPRO_UDN2_RECEIVE
, false, "l" },
3036 { "__tile_udn3_receive", TILEPRO_UDN3_RECEIVE
, false, "l" },
3037 { "__tile_udn_send", TILEPRO_UDN_SEND
, false, "vl" },
3041 /* Convert a character in a builtin type string to a tree type. */
3043 char_to_type (char c
)
3045 static tree volatile_ptr_type_node
= NULL
;
3046 static tree volatile_const_ptr_type_node
= NULL
;
3048 if (volatile_ptr_type_node
== NULL
)
3050 volatile_ptr_type_node
=
3051 build_pointer_type (build_qualified_type (void_type_node
,
3052 TYPE_QUAL_VOLATILE
));
3053 volatile_const_ptr_type_node
=
3054 build_pointer_type (build_qualified_type (void_type_node
,
3056 | TYPE_QUAL_VOLATILE
));
3062 return void_type_node
;
3064 return long_unsigned_type_node
;
3066 return volatile_ptr_type_node
;
3068 return volatile_const_ptr_type_node
;
3075 /* Implement TARGET_INIT_BUILTINS. */
3077 tilepro_init_builtins (void)
3081 for (i
= 0; i
< ARRAY_SIZE (tilepro_builtins
); i
++)
3083 const struct tilepro_builtin_def
*p
= &tilepro_builtins
[i
];
3084 tree ftype
, ret_type
, arg_type_list
= void_list_node
;
3088 for (j
= strlen (p
->type
) - 1; j
> 0; j
--)
3091 tree_cons (NULL_TREE
, char_to_type (p
->type
[j
]), arg_type_list
);
3094 ret_type
= char_to_type (p
->type
[0]);
3096 ftype
= build_function_type (ret_type
, arg_type_list
);
3098 decl
= add_builtin_function (p
->name
, ftype
, p
->code
, BUILT_IN_MD
,
3102 TREE_READONLY (decl
) = 1;
3103 TREE_NOTHROW (decl
) = 1;
3105 if (tilepro_builtin_info
[p
->code
].fndecl
== NULL
)
3106 tilepro_builtin_info
[p
->code
].fndecl
= decl
;
3111 /* Implement TARGET_EXPAND_BUILTIN. */
3113 tilepro_expand_builtin (tree exp
,
3115 rtx subtarget ATTRIBUTE_UNUSED
,
3116 machine_mode mode ATTRIBUTE_UNUSED
,
3117 int ignore ATTRIBUTE_UNUSED
)
3119 #define MAX_BUILTIN_ARGS 4
3121 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3122 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3124 call_expr_arg_iterator iter
;
3125 enum insn_code icode
;
3126 rtx op
[MAX_BUILTIN_ARGS
+ 1], pat
;
3131 if (fcode
>= TILEPRO_BUILTIN_max
)
3132 internal_error ("bad builtin fcode");
3133 icode
= tilepro_builtin_info
[fcode
].icode
;
3135 internal_error ("bad builtin icode");
3137 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
3140 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3142 const struct insn_operand_data
*insn_op
;
3144 if (arg
== error_mark_node
)
3146 if (opnum
> MAX_BUILTIN_ARGS
)
3149 insn_op
= &insn_data
[icode
].operand
[opnum
];
3151 op
[opnum
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
3153 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3154 op
[opnum
] = copy_to_mode_reg (insn_op
->mode
, op
[opnum
]);
3156 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3158 /* We still failed to meet the predicate even after moving
3159 into a register. Assume we needed an immediate. */
3160 error_at (EXPR_LOCATION (exp
),
3161 "operand must be an immediate of the right size");
3170 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3172 || GET_MODE (target
) != tmode
3173 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3174 target
= gen_reg_rtx (tmode
);
3178 fn
= GEN_FCN (icode
);
3182 pat
= fn (NULL_RTX
);
3188 pat
= fn (op
[0], op
[1]);
3191 pat
= fn (op
[0], op
[1], op
[2]);
3194 pat
= fn (op
[0], op
[1], op
[2], op
[3]);
3197 pat
= fn (op
[0], op
[1], op
[2], op
[3], op
[4]);
3205 /* If we are generating a prefetch, tell the scheduler not to move
3207 if (GET_CODE (pat
) == PREFETCH
)
3208 PREFETCH_SCHEDULE_BARRIER_P (pat
) = true;
3219 /* Implement TARGET_BUILTIN_DECL. */
3221 tilepro_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3223 if (code
>= TILEPRO_BUILTIN_max
)
3224 return error_mark_node
;
3226 return tilepro_builtin_info
[code
].fndecl
;
3233 /* Return whether REGNO needs to be saved in the stack frame. */
3235 need_to_save_reg (unsigned int regno
)
3237 if (!fixed_regs
[regno
] && !call_used_regs
[regno
]
3238 && df_regs_ever_live_p (regno
))
3242 && (regno
== PIC_OFFSET_TABLE_REGNUM
3243 || regno
== TILEPRO_PIC_TEXT_LABEL_REGNUM
)
3244 && (crtl
->uses_pic_offset_table
|| crtl
->saves_all_registers
))
3247 if (crtl
->calls_eh_return
)
3250 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
3252 if (regno
== EH_RETURN_DATA_REGNO (i
))
3261 /* Return the size of the register savev area. This function is only
3262 correct starting with local register allocation */
3264 tilepro_saved_regs_size (void)
3266 int reg_save_size
= 0;
3268 int offset_to_frame
;
3271 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
3272 if (need_to_save_reg (regno
))
3273 reg_save_size
+= UNITS_PER_WORD
;
3275 /* Pad out the register save area if necessary to make
3276 frame_pointer_rtx be as aligned as the stack pointer. */
3277 offset_to_frame
= crtl
->args
.pretend_args_size
+ reg_save_size
;
3278 align_mask
= (STACK_BOUNDARY
/ BITS_PER_UNIT
) - 1;
3279 reg_save_size
+= (-offset_to_frame
) & align_mask
;
3281 return reg_save_size
;
3285 /* Round up frame size SIZE. */
3287 round_frame_size (int size
)
3289 return ((size
+ STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
3290 & -STACK_BOUNDARY
/ BITS_PER_UNIT
);
3294 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3295 emit the corresponding REG_CFA_OFFSET note described by CFA and
3296 CFA_OFFSET. Return the emitted insn. */
3298 frame_emit_store (int regno
, int regno_note
, rtx addr
, rtx cfa
,
3301 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3302 rtx mem
= gen_frame_mem (Pmode
, addr
);
3303 rtx mov
= gen_movsi (mem
, reg
);
3305 /* Describe what just happened in a way that dwarf understands. We
3306 use temporary registers to hold the address to make scheduling
3307 easier, and use the REG_CFA_OFFSET to describe the address as an
3308 offset from the CFA. */
3309 rtx reg_note
= gen_rtx_REG (Pmode
, regno_note
);
3310 rtx cfa_relative_addr
= gen_rtx_PLUS (Pmode
, cfa
, gen_int_si (cfa_offset
));
3311 rtx cfa_relative_mem
= gen_frame_mem (Pmode
, cfa_relative_addr
);
3312 rtx real
= gen_rtx_SET (cfa_relative_mem
, reg_note
);
3313 add_reg_note (mov
, REG_CFA_OFFSET
, real
);
3315 return emit_insn (mov
);
3319 /* Emit a load in the stack frame to load REGNO from address ADDR.
3320 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3321 non-null. Return the emitted insn. */
3323 frame_emit_load (int regno
, rtx addr
, rtx
*cfa_restores
)
3325 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3326 rtx mem
= gen_frame_mem (Pmode
, addr
);
3328 *cfa_restores
= alloc_reg_note (REG_CFA_RESTORE
, reg
, *cfa_restores
);
3329 return emit_insn (gen_movsi (reg
, mem
));
3333 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3334 including sequences. */
3336 set_frame_related_p (void)
3338 rtx_insn
*seq
= get_insns ();
3349 while (insn
!= NULL_RTX
)
3351 RTX_FRAME_RELATED_P (insn
) = 1;
3352 insn
= NEXT_INSN (insn
);
3354 seq
= emit_insn (seq
);
3358 seq
= emit_insn (seq
);
3359 RTX_FRAME_RELATED_P (seq
) = 1;
3365 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3367 /* This emits code for 'sp += offset'.
3369 The ABI only allows us to modify 'sp' in a single 'addi' or
3370 'addli', so the backtracer understands it. Larger amounts cannot
3371 use those instructions, so are added by placing the offset into a
3372 large register and using 'add'.
3374 This happens after reload, so we need to expand it ourselves. */
3376 emit_sp_adjust (int offset
, int *next_scratch_regno
, bool frame_related
,
3380 rtx imm_rtx
= gen_int_si (offset
);
3383 if (satisfies_constraint_J (imm_rtx
))
3385 /* We can add this using a single addi or addli. */
3390 rtx tmp
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3391 tilepro_expand_set_const32 (tmp
, imm_rtx
);
3395 /* Actually adjust the stack pointer. */
3396 insn
= emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3398 REG_NOTES (insn
) = reg_notes
;
3400 /* Describe what just happened in a way that dwarf understands. */
3403 rtx real
= gen_rtx_SET (stack_pointer_rtx
,
3404 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3406 RTX_FRAME_RELATED_P (insn
) = 1;
3407 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, real
);
3414 /* Return whether the current function is leaf. This takes into
3415 account whether the function calls tls_get_addr. */
3417 tilepro_current_function_is_leaf (void)
3419 return crtl
->is_leaf
&& !cfun
->machine
->calls_tls_get_addr
;
3423 /* Return the frame size. */
3425 compute_total_frame_size (void)
3427 int total_size
= (get_frame_size () + tilepro_saved_regs_size ()
3428 + crtl
->outgoing_args_size
3429 + crtl
->args
.pretend_args_size
);
3431 if (!tilepro_current_function_is_leaf () || cfun
->calls_alloca
)
3433 /* Make room for save area in callee. */
3434 total_size
+= STACK_POINTER_OFFSET
;
3437 return round_frame_size (total_size
);
3441 /* Return nonzero if this function is known to have a null epilogue.
3442 This allows the optimizer to omit jumps to jumps if no stack was
3445 tilepro_can_use_return_insn_p (void)
3447 return (reload_completed
3448 && cfun
->static_chain_decl
== 0
3449 && compute_total_frame_size () == 0
3450 && tilepro_current_function_is_leaf ()
3451 && !crtl
->profile
&& !df_regs_ever_live_p (TILEPRO_LINK_REGNUM
));
3455 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3456 is a frame pointer, it computes the value relative to
3457 that. Otherwise it uses the stack pointer. */
3459 compute_frame_addr (int offset_from_fp
, int *next_scratch_regno
)
3461 rtx base_reg_rtx
, tmp_reg_rtx
, offset_rtx
;
3462 int offset_from_base
;
3464 if (frame_pointer_needed
)
3466 base_reg_rtx
= hard_frame_pointer_rtx
;
3467 offset_from_base
= offset_from_fp
;
3471 int offset_from_sp
= compute_total_frame_size () + offset_from_fp
;
3472 base_reg_rtx
= stack_pointer_rtx
;
3473 offset_from_base
= offset_from_sp
;
3476 if (offset_from_base
== 0)
3477 return base_reg_rtx
;
3479 /* Compute the new value of the stack pointer. */
3480 tmp_reg_rtx
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3481 offset_rtx
= gen_int_si (offset_from_base
);
3483 if (!tilepro_expand_addsi (tmp_reg_rtx
, base_reg_rtx
, offset_rtx
))
3485 emit_insn (gen_rtx_SET (tmp_reg_rtx
,
3486 gen_rtx_PLUS (Pmode
, base_reg_rtx
,
3494 /* The stack frame looks like this:
3499 AP -> +-------------+
3503 HFP -> +-------------+
3505 | reg save | crtl->args.pretend_args_size bytes
3508 | saved regs | tilepro_saved_regs_size() bytes
3509 FP -> +-------------+
3511 | vars | get_frame_size() bytes
3515 | stack args | crtl->outgoing_args_size bytes
3517 | HFP | 4 bytes (only here if nonleaf / alloca)
3519 | callee lr | 4 bytes (only here if nonleaf / alloca)
3521 SP -> +-------------+
3525 For functions with a frame larger than 32767 bytes, or which use
3526 alloca (), r52 is used as a frame pointer. Otherwise there is no
3529 FP is saved at SP+4 before calling a subroutine so the
3530 callee can chain. */
3532 tilepro_expand_prologue (void)
3534 #define ROUND_ROBIN_SIZE 4
3535 /* We round-robin through four scratch registers to hold temporary
3536 addresses for saving registers, to make instruction scheduling
3538 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3539 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3542 unsigned int which_scratch
;
3543 int offset
, start_offset
, regno
;
3545 /* A register that holds a copy of the incoming fp. */
3546 int fp_copy_regno
= -1;
3548 /* A register that holds a copy of the incoming sp. */
3549 int sp_copy_regno
= -1;
3551 /* Next scratch register number to hand out (postdecrementing). */
3552 int next_scratch_regno
= 29;
3554 int total_size
= compute_total_frame_size ();
3556 if (flag_stack_usage_info
)
3557 current_function_static_stack_size
= total_size
;
3559 /* Save lr first in its special location because code after this
3560 might use the link register as a scratch register. */
3561 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
) || crtl
->calls_eh_return
)
3562 FRP (frame_emit_store (TILEPRO_LINK_REGNUM
, TILEPRO_LINK_REGNUM
,
3563 stack_pointer_rtx
, stack_pointer_rtx
, 0));
3565 if (total_size
== 0)
3567 /* Load the PIC register if needed. */
3568 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3569 load_pic_register (false);
3574 cfa
= stack_pointer_rtx
;
3576 if (frame_pointer_needed
)
3578 fp_copy_regno
= next_scratch_regno
--;
3580 /* Copy the old frame pointer aside so we can save it later. */
3581 insn
= FRP (emit_move_insn (gen_rtx_REG (word_mode
, fp_copy_regno
),
3582 hard_frame_pointer_rtx
));
3583 add_reg_note (insn
, REG_CFA_REGISTER
, NULL_RTX
);
3585 /* Set up the frame pointer. */
3586 insn
= FRP (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
3587 add_reg_note (insn
, REG_CFA_DEF_CFA
, hard_frame_pointer_rtx
);
3588 cfa
= hard_frame_pointer_rtx
;
3589 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
3591 /* fp holds a copy of the incoming sp, in case we need to store
3593 sp_copy_regno
= HARD_FRAME_POINTER_REGNUM
;
3595 else if (!tilepro_current_function_is_leaf ())
3597 /* Copy the old stack pointer aside so we can save it later. */
3598 sp_copy_regno
= next_scratch_regno
--;
3599 emit_move_insn (gen_rtx_REG (Pmode
, sp_copy_regno
),
3603 if (tilepro_current_function_is_leaf ())
3605 /* No need to store chain pointer to caller's frame. */
3606 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3607 !frame_pointer_needed
, NULL_RTX
);
3611 /* Save the frame pointer (incoming sp value) to support
3612 backtracing. First we need to create an rtx with the store
3614 rtx chain_addr
= gen_rtx_REG (Pmode
, next_scratch_regno
--);
3615 rtx size_rtx
= gen_int_si (-(total_size
- UNITS_PER_WORD
));
3617 if (add_operand (size_rtx
, Pmode
))
3619 /* Expose more parallelism by computing this value from the
3620 original stack pointer, not the one after we have pushed
3622 rtx p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, size_rtx
);
3623 emit_insn (gen_rtx_SET (chain_addr
, p
));
3624 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3625 !frame_pointer_needed
, NULL_RTX
);
3629 /* The stack frame is large, so just store the incoming sp
3630 value at *(new_sp + UNITS_PER_WORD). */
3632 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3633 !frame_pointer_needed
, NULL_RTX
);
3634 p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3635 GEN_INT (UNITS_PER_WORD
));
3636 emit_insn (gen_rtx_SET (chain_addr
, p
));
3639 /* Save our frame pointer for backtrace chaining. */
3640 emit_insn (gen_movsi (gen_frame_mem (SImode
, chain_addr
),
3641 gen_rtx_REG (SImode
, sp_copy_regno
)));
3644 /* Compute where to start storing registers we need to save. */
3645 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3646 offset
= start_offset
;
3648 /* Store all registers that need saving. */
3650 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3651 if (need_to_save_reg (regno
))
3653 rtx r
= reg_save_addr
[which_scratch
];
3655 int cfa_offset
= frame_pointer_needed
? offset
: total_size
+ offset
;
3659 rtx p
= compute_frame_addr (offset
, &next_scratch_regno
);
3660 r
= gen_rtx_REG (word_mode
, next_scratch_regno
--);
3661 reg_save_addr
[which_scratch
] = r
;
3663 emit_insn (gen_rtx_SET (r
, p
));
3667 /* Advance to the next stack slot to store this register. */
3668 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3669 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3670 emit_insn (gen_rtx_SET (r
, p
));
3673 /* Save this register to the stack (but use the old fp value
3674 we copied aside if appropriate). */
3675 from_regno
= (fp_copy_regno
>= 0
3677 HARD_FRAME_POINTER_REGNUM
) ? fp_copy_regno
: regno
;
3678 FRP (frame_emit_store (from_regno
, regno
, r
, cfa
, cfa_offset
));
3680 offset
-= UNITS_PER_WORD
;
3681 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3684 /* If profiling, force that to happen after the frame is set up. */
3686 emit_insn (gen_blockage ());
3688 /* Load the PIC register if needed. */
3689 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3690 load_pic_register (false);
3694 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
3695 true for a sibcall_epilogue pattern, and false for an epilogue
3698 tilepro_expand_epilogue (bool sibcall_p
)
3700 /* We round-robin through four scratch registers to hold temporary
3701 addresses for saving registers, to make instruction scheduling
3703 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3704 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3706 rtx_insn
*last_insn
, *insn
;
3707 unsigned int which_scratch
;
3708 int offset
, start_offset
, regno
;
3709 rtx cfa_restores
= NULL_RTX
;
3711 /* A register that holds a copy of the incoming fp. */
3712 int fp_copy_regno
= -1;
3714 /* Next scratch register number to hand out (postdecrementing). */
3715 int next_scratch_regno
= 29;
3717 int total_size
= compute_total_frame_size ();
3719 last_insn
= get_last_insn ();
3721 /* Load lr first since we are going to need it first. */
3723 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
))
3725 insn
= frame_emit_load (TILEPRO_LINK_REGNUM
,
3726 compute_frame_addr (0, &next_scratch_regno
),
3730 if (total_size
== 0)
3734 RTX_FRAME_RELATED_P (insn
) = 1;
3735 REG_NOTES (insn
) = cfa_restores
;
3740 /* Compute where to start restoring registers. */
3741 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3742 offset
= start_offset
;
3744 if (frame_pointer_needed
)
3745 fp_copy_regno
= next_scratch_regno
--;
3747 /* Restore all callee-saved registers. */
3749 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3750 if (need_to_save_reg (regno
))
3752 rtx r
= reg_save_addr
[which_scratch
];
3755 r
= compute_frame_addr (offset
, &next_scratch_regno
);
3756 reg_save_addr
[which_scratch
] = r
;
3760 /* Advance to the next stack slot to store this
3762 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3763 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3764 emit_insn (gen_rtx_SET (r
, p
));
3767 if (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
3768 frame_emit_load (fp_copy_regno
, r
, NULL
);
3770 frame_emit_load (regno
, r
, &cfa_restores
);
3772 offset
-= UNITS_PER_WORD
;
3773 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3776 if (!tilepro_current_function_is_leaf ())
3778 alloc_reg_note (REG_CFA_RESTORE
, stack_pointer_rtx
, cfa_restores
);
3780 emit_insn (gen_blockage ());
3782 if (frame_pointer_needed
)
3784 /* Restore the old stack pointer by copying from the frame
3786 insn
= emit_insn (gen_sp_restore (stack_pointer_rtx
,
3787 hard_frame_pointer_rtx
));
3788 RTX_FRAME_RELATED_P (insn
) = 1;
3789 REG_NOTES (insn
) = cfa_restores
;
3790 add_reg_note (insn
, REG_CFA_DEF_CFA
, stack_pointer_rtx
);
3794 insn
= emit_sp_adjust (total_size
, &next_scratch_regno
, true,
3798 if (crtl
->calls_eh_return
)
3799 emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3800 EH_RETURN_STACKADJ_RTX
));
3802 /* Restore the old frame pointer. */
3803 if (frame_pointer_needed
)
3805 insn
= emit_move_insn (hard_frame_pointer_rtx
,
3806 gen_rtx_REG (Pmode
, fp_copy_regno
));
3807 add_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
);
3810 /* Mark the pic registers as live outside of the function. */
3813 emit_use (cfun
->machine
->text_label_rtx
);
3814 emit_use (cfun
->machine
->got_rtx
);
3820 /* Emit the actual 'return' instruction. */
3821 emit_jump_insn (gen__return ());
3825 emit_use (gen_rtx_REG (Pmode
, TILEPRO_LINK_REGNUM
));
3828 /* Mark all insns we just emitted as frame-related. */
3829 for (; last_insn
!= NULL_RTX
; last_insn
= next_insn (last_insn
))
3830 RTX_FRAME_RELATED_P (last_insn
) = 1;
3833 #undef ROUND_ROBIN_SIZE
3836 /* Implement INITIAL_ELIMINATION_OFFSET. */
3838 tilepro_initial_elimination_offset (int from
, int to
)
3840 int total_size
= compute_total_frame_size ();
3842 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3844 return (total_size
- crtl
->args
.pretend_args_size
3845 - tilepro_saved_regs_size ());
3847 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3849 return -(crtl
->args
.pretend_args_size
+ tilepro_saved_regs_size ());
3851 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3853 return STACK_POINTER_OFFSET
+ total_size
;
3855 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3857 return STACK_POINTER_OFFSET
;
3864 /* Return an RTX indicating where the return address to the
3865 calling function can be found. */
3867 tilepro_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
3872 return get_hard_reg_initial_val (Pmode
, TILEPRO_LINK_REGNUM
);
3876 /* Implement EH_RETURN_HANDLER_RTX. */
3878 tilepro_eh_return_handler_rtx (void)
3880 /* The MEM needs to be volatile to prevent it from being
3882 rtx tmp
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
3883 MEM_VOLATILE_P (tmp
) = true;
3891 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
3893 tilepro_conditional_register_usage (void)
3895 global_regs
[TILEPRO_NETORDER_REGNUM
] = 1;
3896 /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
3897 member of fixed_regs, and therefore must be member of
3898 call_used_regs, but it is not a member of call_really_used_regs[]
3899 because it is not clobbered by a call. */
3900 if (TILEPRO_PIC_TEXT_LABEL_REGNUM
!= INVALID_REGNUM
)
3902 fixed_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3903 call_used_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3905 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3907 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3908 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3913 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
3915 tilepro_frame_pointer_required (void)
3917 return crtl
->calls_eh_return
|| cfun
->calls_alloca
;
3922 /* Scheduling and reorg */
3924 /* Return the length of INSN. LENGTH is the initial length computed
3925 by attributes in the machine-description file. This is where we
3926 account for bundles. */
3928 tilepro_adjust_insn_length (rtx_insn
*insn
, int length
)
3930 machine_mode mode
= GET_MODE (insn
);
3932 /* A non-termininating instruction in a bundle has length 0. */
3936 /* By default, there is not length adjustment. */
3941 /* Implement TARGET_SCHED_ISSUE_RATE. */
3943 tilepro_issue_rate (void)
3949 /* Return the rtx for the jump target. */
3951 get_jump_target (rtx branch
)
3953 if (CALL_P (branch
))
3956 call
= PATTERN (branch
);
3958 if (GET_CODE (call
) == PARALLEL
)
3959 call
= XVECEXP (call
, 0, 0);
3961 if (GET_CODE (call
) == SET
)
3962 call
= SET_SRC (call
);
3964 if (GET_CODE (call
) == CALL
)
3965 return XEXP (XEXP (call
, 0), 0);
3970 /* Implement TARGET_SCHED_ADJUST_COST. */
3972 tilepro_sched_adjust_cost (rtx_insn
*insn
, rtx link
, rtx_insn
*dep_insn
,
3975 /* If we have a true dependence, INSN is a call, and DEP_INSN
3976 defines a register that is needed by the call (argument or stack
3977 pointer), set its latency to 0 so that it can be bundled with
3978 the call. Explicitly check for and exclude the case when
3979 DEP_INSN defines the target of the jump. */
3980 if (CALL_P (insn
) && REG_NOTE_KIND (link
) == REG_DEP_TRUE
)
3982 rtx target
= get_jump_target (insn
);
3983 if (!REG_P (target
) || !set_of (target
, dep_insn
))
3991 /* Skip over irrelevant NOTEs and such and look for the next insn we
3992 would consider bundling. */
3994 next_insn_to_bundle (rtx_insn
*r
, rtx_insn
*end
)
3996 for (; r
!= end
; r
= NEXT_INSN (r
))
3998 if (NONDEBUG_INSN_P (r
)
3999 && GET_CODE (PATTERN (r
)) != USE
4000 && GET_CODE (PATTERN (r
)) != CLOBBER
)
4008 /* Go through all insns, and use the information generated during
4009 scheduling to generate SEQUENCEs to represent bundles of
4010 instructions issued simultaneously. */
4012 tilepro_gen_bundles (void)
4015 FOR_EACH_BB_FN (bb
, cfun
)
4017 rtx_insn
*insn
, *next
;
4018 rtx_insn
*end
= NEXT_INSN (BB_END (bb
));
4020 for (insn
= next_insn_to_bundle (BB_HEAD (bb
), end
); insn
; insn
= next
)
4022 next
= next_insn_to_bundle (NEXT_INSN (insn
), end
);
4024 /* Never wrap {} around inline asm. */
4025 if (GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
4027 if (next
== NULL_RTX
|| GET_MODE (next
) == TImode
4028 /* NOTE: The scheduler incorrectly believes a call
4029 insn can execute in the same cycle as the insn
4030 after the call. This is of course impossible.
4031 Really we need to fix the scheduler somehow, so
4032 the code after the call gets scheduled
4036 /* Mark current insn as the end of a bundle. */
4037 PUT_MODE (insn
, QImode
);
4041 /* Mark it as part of a bundle. */
4042 PUT_MODE (insn
, SImode
);
4050 /* Helper function for tilepro_fixup_pcrel_references. */
4052 replace_pc_relative_symbol_ref (rtx_insn
*insn
, rtx opnds
[4], bool first_insn_p
)
4054 rtx_insn
*new_insns
;
4062 emit_insn (gen_add_got16 (opnds
[0], tilepro_got_rtx (),
4064 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4071 emit_insn (gen_addhi_got32 (opnds
[0], tilepro_got_rtx (),
4076 emit_insn (gen_addlo_got32 (opnds
[0], opnds
[1], opnds
[2]));
4077 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4081 new_insns
= get_insns ();
4085 emit_insn_before (new_insns
, insn
);
4091 /* Returns whether INSN is a pc-relative addli insn. */
4093 match_addli_pcrel (rtx_insn
*insn
)
4095 rtx pattern
= PATTERN (insn
);
4098 if (GET_CODE (pattern
) != SET
)
4101 if (GET_CODE (SET_SRC (pattern
)) != LO_SUM
)
4104 if (GET_CODE (XEXP (SET_SRC (pattern
), 1)) != CONST
)
4107 unspec
= XEXP (XEXP (SET_SRC (pattern
), 1), 0);
4109 return (GET_CODE (unspec
) == UNSPEC
4110 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4114 /* Helper function for tilepro_fixup_pcrel_references. */
4116 replace_addli_pcrel (rtx_insn
*insn
)
4118 rtx pattern
= PATTERN (insn
);
4124 gcc_assert (GET_CODE (pattern
) == SET
);
4125 opnds
[0] = SET_DEST (pattern
);
4127 set_src
= SET_SRC (pattern
);
4128 gcc_assert (GET_CODE (set_src
) == LO_SUM
);
4129 gcc_assert (GET_CODE (XEXP (set_src
, 1)) == CONST
);
4130 opnds
[1] = XEXP (set_src
, 0);
4132 unspec
= XEXP (XEXP (set_src
, 1), 0);
4133 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4134 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4135 opnds
[2] = XVECEXP (unspec
, 0, 0);
4136 opnds
[3] = XVECEXP (unspec
, 0, 1);
4138 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4139 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4142 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4144 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4148 /* Returns whether INSN is a pc-relative auli insn. */
4150 match_auli_pcrel (rtx_insn
*insn
)
4152 rtx pattern
= PATTERN (insn
);
4156 if (GET_CODE (pattern
) != SET
)
4159 if (GET_CODE (SET_SRC (pattern
)) != PLUS
)
4162 high
= XEXP (SET_SRC (pattern
), 1);
4164 if (GET_CODE (high
) != HIGH
4165 || GET_CODE (XEXP (high
, 0)) != CONST
)
4168 unspec
= XEXP (XEXP (high
, 0), 0);
4170 return (GET_CODE (unspec
) == UNSPEC
4171 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4175 /* Helper function for tilepro_fixup_pcrel_references. */
4177 replace_auli_pcrel (rtx_insn
*insn
)
4179 rtx pattern
= PATTERN (insn
);
4186 gcc_assert (GET_CODE (pattern
) == SET
);
4187 opnds
[0] = SET_DEST (pattern
);
4189 set_src
= SET_SRC (pattern
);
4190 gcc_assert (GET_CODE (set_src
) == PLUS
);
4191 opnds
[1] = XEXP (set_src
, 0);
4193 high
= XEXP (set_src
, 1);
4194 gcc_assert (GET_CODE (high
) == HIGH
);
4195 gcc_assert (GET_CODE (XEXP (high
, 0)) == CONST
);
4197 unspec
= XEXP (XEXP (high
, 0), 0);
4198 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4199 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4200 opnds
[2] = XVECEXP (unspec
, 0, 0);
4201 opnds
[3] = XVECEXP (unspec
, 0, 1);
4203 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4204 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4207 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4209 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4213 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4214 going through the GOT when the symbol is local to the compilation
4215 unit. But such a symbol requires that the common text_label that
4216 we generate at the beginning of the function be in the same section
4217 as the reference to the SYMBOL_REF. This may not be true if we
4218 generate hot/cold sections. This function looks for such cases and
4219 replaces such references with the longer sequence going through the
4222 We expect one of the following two instruction sequences:
4223 addli tmp1, txt_label_reg, lo16(sym - txt_label)
4224 auli tmp2, tmp1, ha16(sym - txt_label)
4226 auli tmp1, txt_label_reg, ha16(sym - txt_label)
4227 addli tmp2, tmp1, lo16(sym - txt_label)
4229 If we're compiling -fpic, we replace the first instruction with
4230 nothing, and the second instruction with:
4232 addli tmp2, got_rtx, got(sym)
4235 If we're compiling -fPIC, we replace the first instruction with:
4237 auli tmp1, got_rtx, got_ha16(sym)
4239 and the second instruction with:
4241 addli tmp2, tmp1, got_lo16(sym)
4244 Note that we're careful to disturb the instruction sequence as
4245 little as possible, since it's very late in the compilation
4249 tilepro_fixup_pcrel_references (void)
4251 rtx_insn
*insn
, *next_insn
;
4252 bool same_section_as_entry
= true;
4254 for (insn
= get_insns (); insn
; insn
= next_insn
)
4256 next_insn
= NEXT_INSN (insn
);
4258 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
4260 same_section_as_entry
= !same_section_as_entry
;
4264 if (same_section_as_entry
)
4268 && GET_CODE (PATTERN (insn
)) != USE
4269 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
4272 if (match_addli_pcrel (insn
))
4273 replace_addli_pcrel (insn
);
4274 else if (match_auli_pcrel (insn
))
4275 replace_auli_pcrel (insn
);
4280 /* Ensure that no var tracking notes are emitted in the middle of a
4281 three-instruction bundle. */
4283 reorder_var_tracking_notes (void)
4286 FOR_EACH_BB_FN (bb
, cfun
)
4288 rtx_insn
*insn
, *next
;
4289 rtx_insn
*queue
= NULL
;
4290 bool in_bundle
= false;
4292 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4294 next
= NEXT_INSN (insn
);
4298 /* Emit queued up notes at the last instruction of a bundle. */
4299 if (GET_MODE (insn
) == QImode
)
4303 rtx_insn
*next_queue
= PREV_INSN (queue
);
4304 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4305 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4306 SET_NEXT_INSN (insn
) = queue
;
4307 SET_PREV_INSN (queue
) = insn
;
4312 else if (GET_MODE (insn
) == SImode
)
4315 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4319 rtx_insn
*prev
= PREV_INSN (insn
);
4320 SET_PREV_INSN (next
) = prev
;
4321 SET_NEXT_INSN (prev
) = next
;
4323 SET_PREV_INSN (insn
) = queue
;
4332 /* Perform machine dependent operations on the rtl chain INSNS. */
4334 tilepro_reorg (void)
4336 /* We are freeing block_for_insn in the toplev to keep compatibility
4337 with old MDEP_REORGS that are not CFG based. Recompute it
4339 compute_bb_for_insn ();
4341 if (flag_reorder_blocks_and_partition
)
4343 tilepro_fixup_pcrel_references ();
4346 if (flag_schedule_insns_after_reload
)
4350 timevar_push (TV_SCHED2
);
4352 timevar_pop (TV_SCHED2
);
4354 /* Examine the schedule to group into bundles. */
4355 tilepro_gen_bundles ();
4360 if (flag_var_tracking
)
4362 timevar_push (TV_VAR_TRACKING
);
4363 variable_tracking_main ();
4364 reorder_var_tracking_notes ();
4365 timevar_pop (TV_VAR_TRACKING
);
4368 df_finish_pass (false);
4375 /* Select a format to encode pointers in exception handling data.
4376 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4377 GLOBAL is true if the symbol may be affected by dynamic
4380 tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED
, int global
)
4382 return (global
? DW_EH_PE_indirect
: 0) | DW_EH_PE_pcrel
| DW_EH_PE_sdata4
;
4386 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4388 tilepro_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4389 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4392 rtx this_rtx
, funexp
;
4395 /* Pretend to be a post-reload pass while generating rtl. */
4396 reload_completed
= 1;
4398 /* Mark the end of the (empty) prologue. */
4399 emit_note (NOTE_INSN_PROLOGUE_END
);
4401 /* Find the "this" pointer. If the function returns a structure,
4402 the structure return pointer is in $1. */
4403 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4404 this_rtx
= gen_rtx_REG (Pmode
, 1);
4406 this_rtx
= gen_rtx_REG (Pmode
, 0);
4408 /* Add DELTA to THIS_RTX. */
4409 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, GEN_INT (delta
)));
4411 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4416 tmp
= gen_rtx_REG (Pmode
, 29);
4417 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4419 emit_insn (gen_addsi3 (tmp
, tmp
, GEN_INT (vcall_offset
)));
4421 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4423 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, tmp
));
4426 /* Generate a tail call to the target function. */
4427 if (!TREE_USED (function
))
4429 assemble_external (function
);
4430 TREE_USED (function
) = 1;
4432 funexp
= XEXP (DECL_RTL (function
), 0);
4433 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
4434 insn
= emit_call_insn (gen_sibcall (funexp
, const0_rtx
));
4435 SIBLING_CALL_P (insn
) = 1;
4437 /* Run just enough of rest_of_compilation to get the insns emitted.
4438 There's not really enough bulk here to make other passes such as
4439 instruction scheduling worth while. Note that use_thunk calls
4440 assemble_start_function and assemble_end_function.
4442 We don't currently bundle, but the instruciton sequence is all
4443 serial except for the tail call, so we're only wasting one cycle.
4445 insn
= get_insns ();
4446 shorten_branches (insn
);
4447 final_start_function (insn
, file
, 1);
4448 final (insn
, file
, 1);
4449 final_end_function ();
4451 /* Stop pretending to be a post-reload pass. */
4452 reload_completed
= 0;
4456 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4458 tilepro_asm_trampoline_template (FILE *file
)
4460 fprintf (file
, "\tlnk r10\n");
4461 fprintf (file
, "\taddi r10, r10, 32\n");
4462 fprintf (file
, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode
));
4463 fprintf (file
, "\tlw r10, r10\n");
4464 fprintf (file
, "\tjr r11\n");
4465 fprintf (file
, "\t.word 0 # <function address>\n");
4466 fprintf (file
, "\t.word 0 # <static chain value>\n");
4470 /* Implement TARGET_TRAMPOLINE_INIT. */
4472 tilepro_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4476 rtx begin_addr
, end_addr
;
4477 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
4479 fnaddr
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
4480 chaddr
= copy_to_reg (static_chain
);
4482 emit_block_move (m_tramp
, assemble_trampoline_template (),
4483 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
4485 mem
= adjust_address (m_tramp
, ptr_mode
,
4486 TRAMPOLINE_SIZE
- 2 * ptr_mode_size
);
4487 emit_move_insn (mem
, fnaddr
);
4488 mem
= adjust_address (m_tramp
, ptr_mode
,
4489 TRAMPOLINE_SIZE
- ptr_mode_size
);
4490 emit_move_insn (mem
, chaddr
);
4492 /* Get pointers to the beginning and end of the code block. */
4493 begin_addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
4494 end_addr
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0),
4497 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__clear_cache"),
4498 LCT_NORMAL
, VOIDmode
, 2, begin_addr
, Pmode
,
4503 /* Implement TARGET_PRINT_OPERAND. */
4505 tilepro_print_operand (FILE *file
, rtx x
, int code
)
4510 /* Print the compare operator opcode for conditional moves. */
4511 switch (GET_CODE (x
))
4520 output_operand_lossage ("invalid %%c operand");
4525 /* Print the compare operator opcode for conditional moves. */
4526 switch (GET_CODE (x
))
4535 output_operand_lossage ("invalid %%C operand");
4541 /* Print the high 16 bits of a 32-bit constant. */
4543 if (CONST_INT_P (x
))
4545 else if (GET_CODE (x
) == CONST_DOUBLE
)
4546 i
= CONST_DOUBLE_LOW (x
);
4549 output_operand_lossage ("invalid %%h operand");
4552 i
= trunc_int_for_mode (i
>> 16, HImode
);
4553 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4560 const char *opstr
= NULL
;
4562 if (GET_CODE (x
) == CONST
4563 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4565 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4566 switch (XINT (XEXP (x
, 0), 1))
4568 case UNSPEC_GOT32_SYM
:
4571 case UNSPEC_PCREL_SYM
:
4576 opstr
= "tls_gd_ha16";
4579 opstr
= "tls_ie_ha16";
4582 opstr
= "tls_le_ha16";
4585 output_operand_lossage ("invalid %%H operand");
4594 fputs (opstr
, file
);
4596 output_addr_const (file
, addr
);
4600 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4601 fputs (" - " , file
);
4602 output_addr_const (file
, addr2
);
4610 /* Print an auto-inc memory operand. */
4613 output_operand_lossage ("invalid %%I operand");
4617 output_memory_reference_mode
= GET_MODE (x
);
4618 output_memory_autoinc_first
= true;
4619 output_address (XEXP (x
, 0));
4620 output_memory_reference_mode
= VOIDmode
;
4624 /* Print an auto-inc memory operand. */
4627 output_operand_lossage ("invalid %%i operand");
4631 output_memory_reference_mode
= GET_MODE (x
);
4632 output_memory_autoinc_first
= false;
4633 output_address (XEXP (x
, 0));
4634 output_memory_reference_mode
= VOIDmode
;
4639 /* Print the low 8 bits of a constant. */
4641 if (CONST_INT_P (x
))
4643 else if (GET_CODE (x
) == CONST_DOUBLE
)
4644 i
= CONST_DOUBLE_LOW (x
);
4645 else if (GET_CODE (x
) == CONST_VECTOR
4646 && CONST_INT_P (CONST_VECTOR_ELT (x
, 0)))
4647 i
= INTVAL (CONST_VECTOR_ELT (x
, 0));
4650 output_operand_lossage ("invalid %%j operand");
4653 i
= trunc_int_for_mode (i
, QImode
);
4654 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4661 const char *opstr
= NULL
;
4663 if (GET_CODE (x
) == CONST
4664 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4666 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4667 switch (XINT (XEXP (x
, 0), 1))
4669 case UNSPEC_GOT16_SYM
:
4672 case UNSPEC_GOT32_SYM
:
4675 case UNSPEC_PCREL_SYM
:
4680 opstr
= "tls_gd_lo16";
4683 opstr
= "tls_ie_lo16";
4686 opstr
= "tls_le_lo16";
4689 output_operand_lossage ("invalid %%L operand");
4698 fputs (opstr
, file
);
4700 output_addr_const (file
, addr
);
4704 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4705 fputs (" - " , file
);
4706 output_addr_const (file
, addr2
);
4714 if (GET_CODE (x
) == SYMBOL_REF
)
4716 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4717 fprintf (file
, "plt(");
4718 output_addr_const (file
, x
);
4719 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4720 fprintf (file
, ")");
4723 output_addr_const (file
, x
);
4728 /* Print a 32-bit constant plus one. */
4730 if (!CONST_INT_P (x
))
4732 output_operand_lossage ("invalid %%P operand");
4735 i
= trunc_int_for_mode (INTVAL (x
) + 1, SImode
);
4736 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4742 /* Print an mm-style bit range. */
4743 int first_bit
, last_bit
;
4745 if (!CONST_INT_P (x
)
4746 || !tilepro_bitfield_operand_p (INTVAL (x
), &first_bit
,
4749 output_operand_lossage ("invalid %%M operand");
4753 fprintf (file
, "%d, %d", first_bit
, last_bit
);
4759 const char *reg
= NULL
;
4761 /* Print a network register. */
4762 if (!CONST_INT_P (x
))
4764 output_operand_lossage ("invalid %%N operand");
4770 case TILEPRO_NETREG_IDN0
: reg
= "idn0"; break;
4771 case TILEPRO_NETREG_IDN1
: reg
= "idn1"; break;
4772 case TILEPRO_NETREG_SN
: reg
= "sn"; break;
4773 case TILEPRO_NETREG_UDN0
: reg
= "udn0"; break;
4774 case TILEPRO_NETREG_UDN1
: reg
= "udn1"; break;
4775 case TILEPRO_NETREG_UDN2
: reg
= "udn2"; break;
4776 case TILEPRO_NETREG_UDN3
: reg
= "udn3"; break;
4777 default: gcc_unreachable ();
4780 fprintf (file
, reg
);
4786 /* Log base 2 of a power of two. */
4790 if (!CONST_INT_P (x
))
4792 output_operand_lossage ("invalid %%t operand");
4795 n
= trunc_int_for_mode (INTVAL (x
), SImode
);
4799 output_operand_lossage ("invalid %%t operand '"
4800 HOST_WIDE_INT_PRINT_DEC
"'", n
);
4804 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4810 /* In this case we need a register. Use 'zero' if the
4811 operand is const0_rtx. */
4813 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
4815 fputs ("zero", file
);
4818 else if (!REG_P (x
))
4820 output_operand_lossage ("invalid %%r operand");
4828 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
4833 output_memory_reference_mode
= VOIDmode
;
4834 output_address (XEXP (x
, 0));
4839 output_addr_const (file
, x
);
4846 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4851 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
4853 tilepro_print_operand_address (FILE *file
, rtx addr
)
4855 if (GET_CODE (addr
) == POST_DEC
4856 || GET_CODE (addr
) == POST_INC
)
4858 int offset
= GET_MODE_SIZE (output_memory_reference_mode
);
4860 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4862 if (output_memory_autoinc_first
)
4863 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4865 fprintf (file
, "%d",
4866 GET_CODE (addr
) == POST_DEC
? -offset
: offset
);
4868 else if (GET_CODE (addr
) == POST_MODIFY
)
4870 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4872 gcc_assert (GET_CODE (XEXP (addr
, 1)) == PLUS
);
4874 if (output_memory_autoinc_first
)
4875 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4877 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4878 INTVAL (XEXP (XEXP (addr
, 1), 1)));
4881 tilepro_print_operand (file
, addr
, 'r');
4885 /* Machine mode of current insn, for determining curly brace
4887 static machine_mode insn_mode
;
4890 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
4892 tilepro_final_prescan_insn (rtx_insn
*insn
)
4894 /* Record this for tilepro_asm_output_opcode to examine. */
4895 insn_mode
= GET_MODE (insn
);
4899 /* While emitting asm, are we currently inside '{' for a bundle? */
4900 static bool tilepro_in_bundle
= false;
4902 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
4903 appropriate given the bundling information recorded by
4904 tilepro_gen_bundles. */
4906 tilepro_asm_output_opcode (FILE *stream
, const char *code
)
4908 bool pseudo
= !strcmp (code
, "pseudo");
4910 if (!tilepro_in_bundle
&& insn_mode
== SImode
)
4912 /* Start a new bundle. */
4913 fprintf (stream
, "{\n\t");
4914 tilepro_in_bundle
= true;
4917 if (tilepro_in_bundle
&& insn_mode
== QImode
)
4919 /* Close an existing bundle. */
4920 static char buf
[100];
4922 gcc_assert (strlen (code
) + 3 + 1 < sizeof (buf
));
4924 strcpy (buf
, pseudo
? "" : code
);
4925 strcat (buf
, "\n\t}");
4926 tilepro_in_bundle
= false;
4932 return pseudo
? "" : code
;
4937 /* Output assembler code to FILE to increment profiler label # LABELNO
4938 for profiling a function entry. */
4940 tilepro_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
4942 if (tilepro_in_bundle
)
4944 fprintf (file
, "\t}\n");
4953 "\t}\n", MCOUNT_NAME
);
4961 "\t}\n", MCOUNT_NAME
);
4964 tilepro_in_bundle
= false;
4968 /* Implement TARGET_ASM_FILE_END. */
4970 tilepro_file_end (void)
4972 if (NEED_INDICATE_EXEC_STACK
)
4973 file_end_indicate_exec_stack ();
4977 #undef TARGET_HAVE_TLS
4978 #define TARGET_HAVE_TLS HAVE_AS_TLS
4980 #undef TARGET_OPTION_OVERRIDE
4981 #define TARGET_OPTION_OVERRIDE tilepro_option_override
4983 #undef TARGET_SCALAR_MODE_SUPPORTED_P
4984 #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4986 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4987 #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4989 #undef TARGET_CANNOT_FORCE_CONST_MEM
4990 #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4992 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4993 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4995 #undef TARGET_PASS_BY_REFERENCE
4996 #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4998 #undef TARGET_RETURN_IN_MEMORY
4999 #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
5001 #undef TARGET_FUNCTION_ARG_BOUNDARY
5002 #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
5004 #undef TARGET_FUNCTION_ARG
5005 #define TARGET_FUNCTION_ARG tilepro_function_arg
5007 #undef TARGET_FUNCTION_ARG_ADVANCE
5008 #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
5010 #undef TARGET_FUNCTION_VALUE
5011 #define TARGET_FUNCTION_VALUE tilepro_function_value
5013 #undef TARGET_LIBCALL_VALUE
5014 #define TARGET_LIBCALL_VALUE tilepro_libcall_value
5016 #undef TARGET_FUNCTION_VALUE_REGNO_P
5017 #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
5019 #undef TARGET_PROMOTE_FUNCTION_MODE
5020 #define TARGET_PROMOTE_FUNCTION_MODE \
5021 default_promote_function_mode_always_promote
5023 #undef TARGET_PROMOTE_PROTOTYPES
5024 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5026 #undef TARGET_BUILD_BUILTIN_VA_LIST
5027 #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
5029 #undef TARGET_EXPAND_BUILTIN_VA_START
5030 #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
5032 #undef TARGET_SETUP_INCOMING_VARARGS
5033 #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
5035 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5036 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
5038 #undef TARGET_RTX_COSTS
5039 #define TARGET_RTX_COSTS tilepro_rtx_costs
5041 /* Limit to what we can reach in one addli. */
5042 #undef TARGET_MIN_ANCHOR_OFFSET
5043 #define TARGET_MIN_ANCHOR_OFFSET -32768
5044 #undef TARGET_MAX_ANCHOR_OFFSET
5045 #define TARGET_MAX_ANCHOR_OFFSET 32767
5047 #undef TARGET_LEGITIMATE_CONSTANT_P
5048 #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5050 #undef TARGET_LEGITIMATE_ADDRESS_P
5051 #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5053 #undef TARGET_LEGITIMIZE_ADDRESS
5054 #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5056 #undef TARGET_DELEGITIMIZE_ADDRESS
5057 #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5059 #undef TARGET_INIT_BUILTINS
5060 #define TARGET_INIT_BUILTINS tilepro_init_builtins
5062 #undef TARGET_BUILTIN_DECL
5063 #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5065 #undef TARGET_EXPAND_BUILTIN
5066 #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5068 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5069 #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5071 #undef TARGET_FRAME_POINTER_REQUIRED
5072 #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5074 #undef TARGET_DELAY_SCHED2
5075 #define TARGET_DELAY_SCHED2 true
5077 #undef TARGET_DELAY_VARTRACK
5078 #define TARGET_DELAY_VARTRACK true
5080 #undef TARGET_SCHED_ISSUE_RATE
5081 #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5083 #undef TARGET_SCHED_ADJUST_COST
5084 #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5086 #undef TARGET_MACHINE_DEPENDENT_REORG
5087 #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5089 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5090 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5091 hook_bool_const_tree_hwi_hwi_const_tree_true
5093 #undef TARGET_ASM_OUTPUT_MI_THUNK
5094 #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5096 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5097 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5099 #undef TARGET_TRAMPOLINE_INIT
5100 #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5102 #undef TARGET_PRINT_OPERAND
5103 #define TARGET_PRINT_OPERAND tilepro_print_operand
5105 #undef TARGET_PRINT_OPERAND_ADDRESS
5106 #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5108 #undef TARGET_ASM_FILE_END
5109 #define TARGET_ASM_FILE_END tilepro_file_end
5111 #undef TARGET_CAN_USE_DOLOOP_P
5112 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5114 struct gcc_target targetm
= TARGET_INITIALIZER
;
5116 #include "gt-tilepro.h"