1 /* Subroutines used for code generation on the Tilera TILEPro.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "insn-config.h"
29 #include "insn-attr.h"
32 #include "langhooks.h"
34 #include "sched-int.h"
35 #include "sel-sched.h"
37 #include "tm-constrs.h"
39 #include "target-def.h"
44 #include "hash-table.h"
47 #include "basic-block.h"
48 #include "tree-ssa-alias.h"
49 #include "internal-fn.h"
50 #include "gimple-fold.h"
52 #include "gimple-expr.h"
55 #include "stringpool.h"
56 #include "stor-layout.h"
61 #include "tilepro-builtins.h"
62 #include "tilepro-multiply.h"
63 #include "diagnostic.h"
66 /* SYMBOL_REF for GOT */
67 static GTY(()) rtx g_got_symbol
= NULL
;
69 /* In case of a POST_INC or POST_DEC memory reference, we must report
70 the mode of the memory reference from TARGET_PRINT_OPERAND to
71 TARGET_PRINT_OPERAND_ADDRESS. */
72 static enum machine_mode output_memory_reference_mode
;
74 /* Report whether we're printing out the first address fragment of a
75 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
76 TARGET_PRINT_OPERAND_ADDRESS. */
77 static bool output_memory_autoinc_first
;
83 /* Implement TARGET_OPTION_OVERRIDE. */
85 tilepro_option_override (void)
87 /* When modulo scheduling is enabled, we still rely on regular
88 scheduler for bundling. */
89 if (flag_modulo_sched
)
90 flag_resched_modulo_sched
= 1;
95 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
97 tilepro_scalar_mode_supported_p (enum machine_mode mode
)
117 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
119 tile_vector_mode_supported_p (enum machine_mode mode
)
121 return mode
== V4QImode
|| mode
== V2HImode
;
125 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
127 tilepro_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
,
128 rtx x ATTRIBUTE_UNUSED
)
134 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
136 tilepro_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
142 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
143 passed by reference. */
145 tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
146 enum machine_mode mode ATTRIBUTE_UNUSED
,
147 const_tree type
, bool named ATTRIBUTE_UNUSED
)
149 return (type
&& TYPE_SIZE (type
)
150 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
);
154 /* Implement TARGET_RETURN_IN_MEMORY. */
156 tilepro_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
158 return !IN_RANGE (int_size_in_bytes (type
),
159 0, TILEPRO_NUM_RETURN_REGS
* UNITS_PER_WORD
);
163 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
165 tilepro_function_arg_boundary (enum machine_mode mode
, const_tree type
)
167 unsigned int alignment
;
169 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
170 if (alignment
< PARM_BOUNDARY
)
171 alignment
= PARM_BOUNDARY
;
172 if (alignment
> STACK_BOUNDARY
)
173 alignment
= STACK_BOUNDARY
;
178 /* Implement TARGET_FUNCTION_ARG. */
180 tilepro_function_arg (cumulative_args_t cum_v
,
181 enum machine_mode mode
,
182 const_tree type
, bool named ATTRIBUTE_UNUSED
)
184 CUMULATIVE_ARGS cum
= *get_cumulative_args (cum_v
);
185 int byte_size
= ((mode
== BLKmode
)
186 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
187 bool doubleword_aligned_p
;
189 if (cum
>= TILEPRO_NUM_ARG_REGS
)
192 /* See whether the argument has doubleword alignment. */
193 doubleword_aligned_p
=
194 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
196 if (doubleword_aligned_p
)
199 /* The ABI does not allow parameters to be passed partially in reg
200 and partially in stack. */
201 if ((cum
+ (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
202 > TILEPRO_NUM_ARG_REGS
)
205 return gen_rtx_REG (mode
, cum
);
209 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
211 tilepro_function_arg_advance (cumulative_args_t cum_v
,
212 enum machine_mode mode
,
213 const_tree type
, bool named ATTRIBUTE_UNUSED
)
215 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
217 int byte_size
= ((mode
== BLKmode
)
218 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
219 int word_size
= (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
220 bool doubleword_aligned_p
;
222 /* See whether the argument has doubleword alignment. */
223 doubleword_aligned_p
=
224 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
226 if (doubleword_aligned_p
)
229 /* If the current argument does not fit in the pretend_args space,
231 if (*cum
< TILEPRO_NUM_ARG_REGS
232 && *cum
+ word_size
> TILEPRO_NUM_ARG_REGS
)
233 *cum
= TILEPRO_NUM_ARG_REGS
;
239 /* Implement TARGET_FUNCTION_VALUE. */
241 tilepro_function_value (const_tree valtype
, const_tree fn_decl_or_type
,
242 bool outgoing ATTRIBUTE_UNUSED
)
244 enum machine_mode mode
;
247 mode
= TYPE_MODE (valtype
);
248 unsigned_p
= TYPE_UNSIGNED (valtype
);
250 mode
= promote_function_mode (valtype
, mode
, &unsigned_p
,
253 return gen_rtx_REG (mode
, 0);
257 /* Implement TARGET_LIBCALL_VALUE. */
259 tilepro_libcall_value (enum machine_mode mode
,
260 const_rtx fun ATTRIBUTE_UNUSED
)
262 return gen_rtx_REG (mode
, 0);
266 /* Implement FUNCTION_VALUE_REGNO_P. */
268 tilepro_function_value_regno_p (const unsigned int regno
)
270 return regno
< TILEPRO_NUM_RETURN_REGS
;
274 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
276 tilepro_build_builtin_va_list (void)
278 tree f_args
, f_skip
, record
, type_decl
;
281 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
283 type_decl
= build_decl (BUILTINS_LOCATION
, TYPE_DECL
,
284 get_identifier ("__va_list_tag"), record
);
286 f_args
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
287 get_identifier ("__args"), ptr_type_node
);
288 f_skip
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
289 get_identifier ("__skip"), ptr_type_node
);
291 DECL_FIELD_CONTEXT (f_args
) = record
;
293 DECL_FIELD_CONTEXT (f_skip
) = record
;
295 TREE_CHAIN (record
) = type_decl
;
296 TYPE_NAME (record
) = type_decl
;
297 TYPE_FIELDS (record
) = f_args
;
298 TREE_CHAIN (f_args
) = f_skip
;
300 /* We know this is being padded and we want it too. It is an
301 internal type so hide the warnings from the user. */
305 layout_type (record
);
309 /* The correct type is an array type of one element. */
314 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
316 tilepro_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
321 f_args
= TYPE_FIELDS (TREE_TYPE (valist
));
322 f_skip
= TREE_CHAIN (f_args
);
325 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
327 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
329 /* Find the __args area. */
330 t
= make_tree (TREE_TYPE (args
), virtual_incoming_args_rtx
);
331 t
= fold_build_pointer_plus_hwi (t
,
333 (crtl
->args
.info
- TILEPRO_NUM_ARG_REGS
));
335 if (crtl
->args
.pretend_args_size
> 0)
336 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
338 t
= build2 (MODIFY_EXPR
, TREE_TYPE (args
), args
, t
);
339 TREE_SIDE_EFFECTS (t
) = 1;
340 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
342 /* Find the __skip area. */
343 t
= make_tree (TREE_TYPE (skip
), virtual_incoming_args_rtx
);
344 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
345 t
= build2 (MODIFY_EXPR
, TREE_TYPE (skip
), skip
, t
);
346 TREE_SIDE_EFFECTS (t
) = 1;
347 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
351 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
353 tilepro_setup_incoming_varargs (cumulative_args_t cum
,
354 enum machine_mode mode
,
355 tree type
, int *pretend_args
, int no_rtl
)
357 CUMULATIVE_ARGS local_cum
= *get_cumulative_args (cum
);
360 /* The caller has advanced CUM up to, but not beyond, the last named
361 argument. Advance a local copy of CUM past the last "real" named
362 argument, to find out how many registers are left over. */
363 targetm
.calls
.function_arg_advance (pack_cumulative_args (&local_cum
),
365 first_reg
= local_cum
;
367 if (local_cum
< TILEPRO_NUM_ARG_REGS
)
369 *pretend_args
= UNITS_PER_WORD
* (TILEPRO_NUM_ARG_REGS
- first_reg
);
373 alias_set_type set
= get_varargs_alias_set ();
375 gen_rtx_MEM (BLKmode
, plus_constant (Pmode
, \
376 virtual_incoming_args_rtx
,
377 -STACK_POINTER_OFFSET
-
379 (TILEPRO_NUM_ARG_REGS
-
381 MEM_NOTRAP_P (tmp
) = 1;
382 set_mem_alias_set (tmp
, set
);
383 move_block_from_reg (first_reg
, tmp
,
384 TILEPRO_NUM_ARG_REGS
- first_reg
);
392 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
393 the va_list structure VALIST as required to retrieve an argument of
394 type TYPE, and returning that argument.
396 ret = va_arg(VALIST, TYPE);
398 generates code equivalent to:
400 paddedsize = (sizeof(TYPE) + 3) & -4;
401 if ((VALIST.__args + paddedsize > VALIST.__skip)
402 & (VALIST.__args <= VALIST.__skip))
403 addr = VALIST.__skip + STACK_POINTER_OFFSET;
405 addr = VALIST.__args;
406 VALIST.__args = addr + paddedsize;
407 ret = *(TYPE *)addr; */
409 tilepro_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
* pre_p
,
410 gimple_seq
* post_p ATTRIBUTE_UNUSED
)
414 HOST_WIDE_INT size
, rsize
;
416 bool pass_by_reference_p
;
418 f_args
= TYPE_FIELDS (va_list_type_node
);
419 f_skip
= TREE_CHAIN (f_args
);
422 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
424 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
426 addr
= create_tmp_var (ptr_type_node
, "va_arg");
428 /* if an object is dynamically sized, a pointer to it is passed
429 instead of the object itself. */
430 pass_by_reference_p
= pass_by_reference (NULL
, TYPE_MODE (type
), type
,
433 if (pass_by_reference_p
)
434 type
= build_pointer_type (type
);
436 size
= int_size_in_bytes (type
);
437 rsize
= ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
) * UNITS_PER_WORD
;
439 /* If the alignment of the type is greater than the default for a
440 parameter, align to STACK_BOUNDARY. */
441 if (TYPE_ALIGN (type
) > PARM_BOUNDARY
)
443 /* Assert the only case we generate code for: when
444 stack boundary = 2 * parm boundary. */
445 gcc_assert (STACK_BOUNDARY
== PARM_BOUNDARY
* 2);
447 tmp
= build2 (BIT_AND_EXPR
, sizetype
,
448 fold_convert (sizetype
, unshare_expr (args
)),
449 size_int (PARM_BOUNDARY
/ 8));
450 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
451 unshare_expr (args
), tmp
);
453 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
456 /* Build conditional expression to calculate addr. The expression
457 will be gimplified later. */
458 tmp
= fold_build_pointer_plus_hwi (unshare_expr (args
), rsize
);
459 tmp
= build2 (TRUTH_AND_EXPR
, boolean_type_node
,
460 build2 (GT_EXPR
, boolean_type_node
, tmp
, unshare_expr (skip
)),
461 build2 (LE_EXPR
, boolean_type_node
, unshare_expr (args
),
462 unshare_expr (skip
)));
464 tmp
= build3 (COND_EXPR
, ptr_type_node
, tmp
,
465 build2 (POINTER_PLUS_EXPR
, ptr_type_node
, unshare_expr (skip
),
466 size_int (STACK_POINTER_OFFSET
)),
467 unshare_expr (args
));
469 gimplify_assign (addr
, tmp
, pre_p
);
471 /* Update VALIST.__args. */
472 tmp
= fold_build_pointer_plus_hwi (addr
, rsize
);
473 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
475 addr
= fold_convert (build_pointer_type (type
), addr
);
477 if (pass_by_reference_p
)
478 addr
= build_va_arg_indirect_ref (addr
);
480 return build_va_arg_indirect_ref (addr
);
485 /* Implement TARGET_RTX_COSTS. */
487 tilepro_rtx_costs (rtx x
, int code
, int outer_code
, int opno
, int *total
,
493 /* If this is an 8-bit constant, return zero since it can be
494 used nearly anywhere with no cost. If it is a valid operand
495 for an ADD or AND, likewise return 0 if we know it will be
496 used in that context. Otherwise, return 2 since it might be
497 used there later. All other constants take at least two
499 if (satisfies_constraint_I (x
))
504 else if (outer_code
== PLUS
&& add_operand (x
, VOIDmode
))
506 /* Slightly penalize large constants even though we can add
507 them in one instruction, because it forces the use of
508 2-wide bundling mode. */
512 else if (move_operand (x
, SImode
))
514 /* We can materialize in one move. */
515 *total
= COSTS_N_INSNS (1);
520 /* We can materialize in two moves. */
521 *total
= COSTS_N_INSNS (2);
530 *total
= COSTS_N_INSNS (2);
534 *total
= COSTS_N_INSNS (4);
542 /* If outer-code was a sign or zero extension, a cost of
543 COSTS_N_INSNS (1) was already added in, so account for
545 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
546 *total
= COSTS_N_INSNS (1);
548 *total
= COSTS_N_INSNS (2);
552 /* Convey that s[123]a are efficient. */
553 if (GET_CODE (XEXP (x
, 0)) == MULT
554 && cint_248_operand (XEXP (XEXP (x
, 0), 1), VOIDmode
))
556 *total
= (rtx_cost (XEXP (XEXP (x
, 0), 0),
557 (enum rtx_code
) outer_code
, opno
, speed
)
558 + rtx_cost (XEXP (x
, 1),
559 (enum rtx_code
) outer_code
, opno
, speed
)
560 + COSTS_N_INSNS (1));
566 *total
= COSTS_N_INSNS (2);
571 if (outer_code
== MULT
)
574 *total
= COSTS_N_INSNS (1);
581 /* These are handled by software and are very expensive. */
582 *total
= COSTS_N_INSNS (100);
586 case UNSPEC_VOLATILE
:
588 int num
= XINT (x
, 1);
590 if (num
<= TILEPRO_LAST_LATENCY_1_INSN
)
591 *total
= COSTS_N_INSNS (1);
592 else if (num
<= TILEPRO_LAST_LATENCY_2_INSN
)
593 *total
= COSTS_N_INSNS (2);
594 else if (num
> TILEPRO_LAST_LATENCY_INSN
)
596 if (outer_code
== PLUS
)
599 *total
= COSTS_N_INSNS (1);
605 case UNSPEC_BLOCKAGE
:
606 case UNSPEC_NETWORK_BARRIER
:
610 case UNSPEC_LNK_AND_LABEL
:
612 case UNSPEC_NETWORK_RECEIVE
:
613 case UNSPEC_NETWORK_SEND
:
614 case UNSPEC_TLS_GD_ADD
:
615 *total
= COSTS_N_INSNS (1);
618 case UNSPEC_TLS_IE_LOAD
:
619 *total
= COSTS_N_INSNS (2);
623 *total
= COSTS_N_INSNS (3);
627 *total
= COSTS_N_INSNS (4);
630 case UNSPEC_LATENCY_L2
:
631 *total
= COSTS_N_INSNS (8);
634 case UNSPEC_TLS_GD_CALL
:
635 *total
= COSTS_N_INSNS (30);
638 case UNSPEC_LATENCY_MISS
:
639 *total
= COSTS_N_INSNS (80);
643 *total
= COSTS_N_INSNS (1);
656 /* Returns an SImode integer rtx with value VAL. */
658 gen_int_si (HOST_WIDE_INT val
)
660 return gen_int_mode (val
, SImode
);
664 /* Create a temporary variable to hold a partial result, to enable
667 create_temp_reg_if_possible (enum machine_mode mode
, rtx default_reg
)
669 return can_create_pseudo_p ()? gen_reg_rtx (mode
) : default_reg
;
673 /* Functions to save and restore machine-specific function data. */
674 static struct machine_function
*
675 tilepro_init_machine_status (void)
677 return ggc_cleared_alloc
<machine_function
> ();
681 /* Do anything needed before RTL is emitted for each function. */
683 tilepro_init_expanders (void)
685 /* Arrange to initialize and mark the machine per-function
687 init_machine_status
= tilepro_init_machine_status
;
689 if (cfun
&& cfun
->machine
&& flag_pic
)
691 static int label_num
= 0;
693 char text_label_name
[32];
695 struct machine_function
*machine
= cfun
->machine
;
697 ASM_GENERATE_INTERNAL_LABEL (text_label_name
, "L_PICLNK", label_num
++);
699 machine
->text_label_symbol
=
700 gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (text_label_name
));
702 machine
->text_label_rtx
=
703 gen_rtx_REG (Pmode
, TILEPRO_PIC_TEXT_LABEL_REGNUM
);
705 machine
->got_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
707 machine
->calls_tls_get_addr
= false;
712 /* Return true if X contains a thread-local symbol. */
714 tilepro_tls_referenced_p (rtx x
)
716 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
)
717 x
= XEXP (XEXP (x
, 0), 0);
719 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
))
722 /* That's all we handle in tilepro_legitimize_tls_address for
728 /* Return true if X requires a scratch register. It is given that
729 flag_pic is on and that X satisfies CONSTANT_P. */
731 tilepro_pic_address_needs_scratch (rtx x
)
733 if (GET_CODE (x
) == CONST
734 && GET_CODE (XEXP (x
, 0)) == PLUS
735 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
736 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
737 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
744 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
745 which we are willing to load the value into a register via a move
746 pattern. TLS cannot be treated as a constant because it can
747 include a function call. */
749 tilepro_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
751 switch (GET_CODE (x
))
755 return !tilepro_tls_referenced_p (x
);
763 /* Return true if the constant value X is a legitimate general operand
764 when generating PIC code. It is given that flag_pic is on and that
765 X satisfies CONSTANT_P. */
767 tilepro_legitimate_pic_operand_p (rtx x
)
769 if (tilepro_pic_address_needs_scratch (x
))
772 if (tilepro_tls_referenced_p (x
))
779 /* Return true if the rtx X can be used as an address operand. */
781 tilepro_legitimate_address_p (enum machine_mode
ARG_UNUSED (mode
), rtx x
,
784 if (GET_CODE (x
) == SUBREG
)
787 switch (GET_CODE (x
))
791 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
798 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
801 if (GET_CODE (XEXP (x
, 1)) != PLUS
)
804 if (!rtx_equal_p (XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)))
807 if (!satisfies_constraint_I (XEXP (XEXP (x
, 1), 1)))
820 /* Check if x is a valid reg. */
825 return REGNO_OK_FOR_BASE_P (REGNO (x
));
831 /* Return the rtx containing SYMBOL_REF to the text label. */
833 tilepro_text_label_symbol (void)
835 return cfun
->machine
->text_label_symbol
;
839 /* Return the register storing the value of the text label. */
841 tilepro_text_label_rtx (void)
843 return cfun
->machine
->text_label_rtx
;
847 /* Return the register storing the value of the global offset
850 tilepro_got_rtx (void)
852 return cfun
->machine
->got_rtx
;
856 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
858 tilepro_got_symbol (void)
860 if (g_got_symbol
== NULL
)
861 g_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
867 /* Return a reference to the got to be used by tls references. */
869 tilepro_tls_got (void)
874 crtl
->uses_pic_offset_table
= 1;
875 return tilepro_got_rtx ();
878 temp
= gen_reg_rtx (Pmode
);
879 emit_move_insn (temp
, tilepro_got_symbol ());
885 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
886 this (thread-local) address. */
888 tilepro_legitimize_tls_address (rtx addr
)
892 gcc_assert (can_create_pseudo_p ());
894 if (GET_CODE (addr
) == SYMBOL_REF
)
895 switch (SYMBOL_REF_TLS_MODEL (addr
))
897 case TLS_MODEL_GLOBAL_DYNAMIC
:
898 case TLS_MODEL_LOCAL_DYNAMIC
:
900 rtx r0
, temp1
, temp2
, temp3
, got
;
903 ret
= gen_reg_rtx (Pmode
);
904 r0
= gen_rtx_REG (Pmode
, 0);
905 temp1
= gen_reg_rtx (Pmode
);
906 temp2
= gen_reg_rtx (Pmode
);
907 temp3
= gen_reg_rtx (Pmode
);
909 got
= tilepro_tls_got ();
910 emit_insn (gen_tls_gd_addhi (temp1
, got
, addr
));
911 emit_insn (gen_tls_gd_addlo (temp2
, temp1
, addr
));
912 emit_move_insn (r0
, temp2
);
913 emit_insn (gen_tls_gd_call (addr
));
914 emit_move_insn (temp3
, r0
);
915 last
= emit_insn (gen_tls_gd_add (ret
, temp3
, addr
));
916 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
919 case TLS_MODEL_INITIAL_EXEC
:
921 rtx temp1
, temp2
, temp3
, got
;
924 ret
= gen_reg_rtx (Pmode
);
925 temp1
= gen_reg_rtx (Pmode
);
926 temp2
= gen_reg_rtx (Pmode
);
927 temp3
= gen_reg_rtx (Pmode
);
929 got
= tilepro_tls_got ();
930 emit_insn (gen_tls_ie_addhi (temp1
, got
, addr
));
931 emit_insn (gen_tls_ie_addlo (temp2
, temp1
, addr
));
932 emit_insn (gen_tls_ie_load (temp3
, temp2
, addr
));
937 THREAD_POINTER_REGNUM
),
939 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
942 case TLS_MODEL_LOCAL_EXEC
:
947 ret
= gen_reg_rtx (Pmode
);
948 temp1
= gen_reg_rtx (Pmode
);
950 emit_insn (gen_tls_le_addhi (temp1
,
952 THREAD_POINTER_REGNUM
),
954 last
= emit_insn (gen_tls_le_addlo (ret
, temp1
, addr
));
955 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
961 else if (GET_CODE (addr
) == CONST
)
965 gcc_assert (GET_CODE (XEXP (addr
, 0)) == PLUS
);
967 base
= tilepro_legitimize_tls_address (XEXP (XEXP (addr
, 0), 0));
968 offset
= XEXP (XEXP (addr
, 0), 1);
970 base
= force_operand (base
, NULL_RTX
);
971 ret
= force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
980 /* Legitimize PIC addresses. If the address is already
981 position-independent, we return ORIG. Newly generated
982 position-independent addresses go into a reg. This is REG if
983 nonzero, otherwise we allocate register(s) as necessary. */
985 tilepro_legitimize_pic_address (rtx orig
,
986 enum machine_mode mode ATTRIBUTE_UNUSED
,
989 if (GET_CODE (orig
) == SYMBOL_REF
)
991 rtx address
, pic_ref
;
995 gcc_assert (can_create_pseudo_p ());
996 reg
= gen_reg_rtx (Pmode
);
999 if (SYMBOL_REF_LOCAL_P (orig
))
1001 /* If not during reload, allocate another temp reg here for
1002 loading in the address, so that these instructions can be
1003 optimized properly. */
1004 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1005 rtx text_label_symbol
= tilepro_text_label_symbol ();
1006 rtx text_label_rtx
= tilepro_text_label_rtx ();
1008 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1009 text_label_symbol
));
1010 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1011 text_label_symbol
));
1013 /* Note: this is conservative. We use the text_label but we
1014 don't use the pic_offset_table. However, in some cases
1015 we may need the pic_offset_table (see
1016 tilepro_fixup_pcrel_references). */
1017 crtl
->uses_pic_offset_table
= 1;
1021 emit_move_insn (reg
, address
);
1026 /* If not during reload, allocate another temp reg here for
1027 loading in the address, so that these instructions can be
1028 optimized properly. */
1029 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1031 gcc_assert (flag_pic
);
1034 emit_insn (gen_add_got16 (temp_reg
,
1035 tilepro_got_rtx (), orig
));
1039 rtx temp_reg2
= create_temp_reg_if_possible (Pmode
, reg
);
1040 emit_insn (gen_addhi_got32 (temp_reg2
,
1041 tilepro_got_rtx (), orig
));
1042 emit_insn (gen_addlo_got32 (temp_reg
, temp_reg2
, orig
));
1047 pic_ref
= gen_const_mem (Pmode
, address
);
1048 crtl
->uses_pic_offset_table
= 1;
1049 emit_move_insn (reg
, pic_ref
);
1050 /* The following put a REG_EQUAL note on this insn, so that
1051 it can be optimized by loop. But it causes the label to
1052 be optimized away. */
1053 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1057 else if (GET_CODE (orig
) == CONST
)
1061 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1062 && XEXP (XEXP (orig
, 0), 0) == tilepro_got_rtx ())
1067 gcc_assert (can_create_pseudo_p ());
1068 reg
= gen_reg_rtx (Pmode
);
1071 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
1072 base
= tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
,
1075 tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
1076 base
== reg
? 0 : reg
);
1078 if (CONST_INT_P (offset
))
1080 if (can_create_pseudo_p ())
1081 offset
= force_reg (Pmode
, offset
);
1083 /* If we reach here, then something is seriously
1088 if (can_create_pseudo_p ())
1089 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1093 else if (GET_CODE (orig
) == LABEL_REF
)
1095 rtx address
, temp_reg
;
1096 rtx text_label_symbol
;
1101 gcc_assert (can_create_pseudo_p ());
1102 reg
= gen_reg_rtx (Pmode
);
1105 /* If not during reload, allocate another temp reg here for
1106 loading in the address, so that these instructions can be
1107 optimized properly. */
1108 temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1109 text_label_symbol
= tilepro_text_label_symbol ();
1110 text_label_rtx
= tilepro_text_label_rtx ();
1112 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1113 text_label_symbol
));
1114 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1115 text_label_symbol
));
1117 /* Note: this is conservative. We use the text_label but we
1118 don't use the pic_offset_table. */
1119 crtl
->uses_pic_offset_table
= 1;
1123 emit_move_insn (reg
, address
);
1132 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1134 tilepro_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1135 enum machine_mode mode
)
1137 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
1138 && symbolic_operand (x
, Pmode
) && tilepro_tls_referenced_p (x
))
1140 return tilepro_legitimize_tls_address (x
);
1144 return tilepro_legitimize_pic_address (x
, mode
, 0);
1151 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1153 tilepro_delegitimize_address (rtx x
)
1155 x
= delegitimize_mem_from_attrs (x
);
1157 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1159 switch (XINT (XEXP (x
, 0), 1))
1161 case UNSPEC_PCREL_SYM
:
1162 case UNSPEC_GOT16_SYM
:
1163 case UNSPEC_GOT32_SYM
:
1166 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1175 /* Emit code to load the PIC register. */
1177 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED
)
1179 int orig_flag_pic
= flag_pic
;
1181 rtx got_symbol
= tilepro_got_symbol ();
1182 rtx text_label_symbol
= tilepro_text_label_symbol ();
1183 rtx text_label_rtx
= tilepro_text_label_rtx ();
1186 emit_insn (gen_insn_lnk_and_label (text_label_rtx
, text_label_symbol
));
1188 emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1189 text_label_rtx
, got_symbol
, text_label_symbol
));
1191 emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1193 got_symbol
, text_label_symbol
));
1195 flag_pic
= orig_flag_pic
;
1197 /* Need to emit this whether or not we obey regdecls, since
1198 setjmp/longjmp can cause life info to screw up. ??? In the case
1199 where we don't obey regdecls, this is not sufficient since we may
1200 not fall out the bottom. */
1201 emit_use (tilepro_got_rtx ());
1205 /* Return the simd variant of the constant NUM of mode MODE, by
1206 replicating it to fill an interger of mode SImode. NUM is first
1207 truncated to fit in MODE. */
1209 tilepro_simd_int (rtx num
, enum machine_mode mode
)
1211 HOST_WIDE_INT n
= 0;
1213 gcc_assert (CONST_INT_P (num
));
1220 n
= 0x01010101 * (n
& 0x000000FF);
1223 n
= 0x00010001 * (n
& 0x0000FFFF);
1233 return gen_int_si (n
);
1237 /* Split one or more DImode RTL references into pairs of SImode
1238 references. The RTL can be REG, offsettable MEM, integer constant,
1239 or CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL
1240 to split and "num" is its length. lo_half and hi_half are output
1241 arrays that parallel "operands". */
1243 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1247 rtx op
= operands
[num
];
1249 /* simplify_subreg refuse to split volatile memory addresses,
1250 but we still have to handle it. */
1253 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1254 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1258 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1259 GET_MODE (op
) == VOIDmode
1260 ? DImode
: GET_MODE (op
), 0);
1261 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1262 GET_MODE (op
) == VOIDmode
1263 ? DImode
: GET_MODE (op
), 4);
1269 /* Returns true iff val can be moved into a register in one
1270 instruction. And if it can, it emits the code to move the
1273 If three_wide_only is true, this insists on an instruction that
1274 works in a bundle containing three instructions. */
1276 expand_set_cint32_one_inst (rtx dest_reg
,
1277 HOST_WIDE_INT val
, bool three_wide_only
)
1279 val
= trunc_int_for_mode (val
, SImode
);
1281 if (val
== trunc_int_for_mode (val
, QImode
))
1284 emit_move_insn (dest_reg
, GEN_INT (val
));
1287 else if (!three_wide_only
)
1289 rtx imm_op
= GEN_INT (val
);
1291 if (satisfies_constraint_J (imm_op
)
1292 || satisfies_constraint_K (imm_op
)
1293 || satisfies_constraint_N (imm_op
)
1294 || satisfies_constraint_P (imm_op
))
1296 emit_move_insn (dest_reg
, imm_op
);
1305 /* Implement SImode rotatert. */
1306 static HOST_WIDE_INT
1307 rotate_right (HOST_WIDE_INT n
, int count
)
1309 unsigned HOST_WIDE_INT x
= n
& 0xFFFFFFFF;
1312 return ((x
>> count
) | (x
<< (32 - count
))) & 0xFFFFFFFF;
1316 /* Return true iff n contains exactly one contiguous sequence of 1
1317 bits, possibly wrapping around from high bits to low bits. */
1319 tilepro_bitfield_operand_p (HOST_WIDE_INT n
, int *first_bit
, int *last_bit
)
1326 for (i
= 0; i
< 32; i
++)
1328 unsigned HOST_WIDE_INT x
= rotate_right (n
, i
);
1332 /* See if x is a power of two minus one, i.e. only consecutive 1
1333 bits starting from bit 0. */
1334 if ((x
& (x
+ 1)) == 0)
1336 if (first_bit
!= NULL
)
1338 if (last_bit
!= NULL
)
1339 *last_bit
= (i
+ exact_log2 (x
^ (x
>> 1))) & 31;
1349 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1351 expand_set_cint32 (rtx dest_reg
, rtx src_val
)
1354 int leading_zeroes
, trailing_zeroes
;
1356 int three_wide_only
;
1359 gcc_assert (CONST_INT_P (src_val
));
1360 val
= trunc_int_for_mode (INTVAL (src_val
), SImode
);
1362 /* See if we can generate the constant in one instruction. */
1363 if (expand_set_cint32_one_inst (dest_reg
, val
, false))
1366 /* Create a temporary variable to hold a partial result, to enable
1368 temp
= create_temp_reg_if_possible (SImode
, dest_reg
);
1370 leading_zeroes
= 31 - floor_log2 (val
& 0xFFFFFFFF);
1371 trailing_zeroes
= exact_log2 (val
& -val
);
1373 lower
= trunc_int_for_mode (val
, HImode
);
1374 upper
= trunc_int_for_mode ((val
- lower
) >> 16, HImode
);
1376 /* First try all three-wide instructions that generate a constant
1377 (i.e. movei) followed by various shifts and rotates. If none of
1378 those work, try various two-wide ways of generating a constant
1379 followed by various shifts and rotates. */
1380 for (three_wide_only
= 1; three_wide_only
>= 0; three_wide_only
--)
1384 if (expand_set_cint32_one_inst (temp
, val
>> trailing_zeroes
,
1387 /* 0xFFFFA500 becomes:
1388 movei temp, 0xFFFFFFA5
1389 shli dest, temp, 8 */
1390 emit_move_insn (dest_reg
,
1391 gen_rtx_ASHIFT (SImode
, temp
,
1392 GEN_INT (trailing_zeroes
)));
1396 if (expand_set_cint32_one_inst (temp
, val
<< leading_zeroes
,
1399 /* 0x7FFFFFFF becomes:
1401 shri dest, temp, 1 */
1402 emit_move_insn (dest_reg
,
1403 gen_rtx_LSHIFTRT (SImode
, temp
,
1404 GEN_INT (leading_zeroes
)));
1408 /* Try rotating a one-instruction immediate, since rotate is
1410 for (count
= 1; count
< 32; count
++)
1412 HOST_WIDE_INT r
= rotate_right (val
, count
);
1413 if (expand_set_cint32_one_inst (temp
, r
, three_wide_only
))
1415 /* 0xFFA5FFFF becomes:
1416 movei temp, 0xFFFFFFA5
1417 rli dest, temp, 16 */
1418 emit_move_insn (dest_reg
,
1419 gen_rtx_ROTATE (SImode
, temp
, GEN_INT (count
)));
1424 if (lower
== trunc_int_for_mode (lower
, QImode
))
1426 /* We failed to use two 3-wide instructions, but the low 16
1427 bits are a small number so just use a 2-wide + 3-wide
1428 auli + addi pair rather than anything more exotic.
1431 auli temp, zero, 0x1234
1432 addi dest, temp, 0x56 */
1437 /* Fallback case: use a auli + addli/addi pair. */
1438 emit_move_insn (temp
, GEN_INT (upper
<< 16));
1439 emit_move_insn (dest_reg
, (gen_rtx_PLUS (SImode
, temp
, GEN_INT (lower
))));
1443 /* Load OP1, a 32-bit constant, into OP0, a register. We know it
1444 can't be done in one insn when we get here, the move expander
1447 tilepro_expand_set_const32 (rtx op0
, rtx op1
)
1449 enum machine_mode mode
= GET_MODE (op0
);
1452 if (CONST_INT_P (op1
))
1454 /* TODO: I don't know if we want to split large constants now,
1455 or wait until later (with a define_split).
1457 Does splitting early help CSE? Does it harm other
1458 optimizations that might fold loads? */
1459 expand_set_cint32 (op0
, op1
);
1463 temp
= create_temp_reg_if_possible (mode
, op0
);
1465 /* A symbol, emit in the traditional way. */
1466 emit_move_insn (temp
, gen_rtx_HIGH (mode
, op1
));
1467 emit_move_insn (op0
, gen_rtx_LO_SUM (mode
, temp
, op1
));
1472 /* Expand a move instruction. Return true if all work is done. */
1474 tilepro_expand_mov (enum machine_mode mode
, rtx
*operands
)
1476 /* Handle sets of MEM first. */
1477 if (MEM_P (operands
[0]))
1479 if (can_create_pseudo_p ())
1480 operands
[0] = validize_mem (operands
[0]);
1482 if (reg_or_0_operand (operands
[1], mode
))
1485 if (!reload_in_progress
)
1486 operands
[1] = force_reg (mode
, operands
[1]);
1489 /* Fixup TLS cases. */
1490 if (CONSTANT_P (operands
[1]) && tilepro_tls_referenced_p (operands
[1]))
1492 operands
[1] = tilepro_legitimize_tls_address (operands
[1]);
1496 /* Fixup PIC cases. */
1497 if (flag_pic
&& CONSTANT_P (operands
[1]))
1499 if (tilepro_pic_address_needs_scratch (operands
[1]))
1500 operands
[1] = tilepro_legitimize_pic_address (operands
[1], mode
, 0);
1502 if (symbolic_operand (operands
[1], mode
))
1504 operands
[1] = tilepro_legitimize_pic_address (operands
[1],
1506 (reload_in_progress
?
1513 /* Fixup for UNSPEC addresses. */
1515 && GET_CODE (operands
[1]) == HIGH
1516 && GET_CODE (XEXP (operands
[1], 0)) == CONST
1517 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == UNSPEC
)
1519 rtx unspec
= XEXP (XEXP (operands
[1], 0), 0);
1520 int unspec_num
= XINT (unspec
, 1);
1521 if (unspec_num
== UNSPEC_PCREL_SYM
)
1523 emit_insn (gen_auli_pcrel (operands
[0], const0_rtx
,
1524 XVECEXP (unspec
, 0, 0),
1525 XVECEXP (unspec
, 0, 1)));
1528 else if (flag_pic
== 2 && unspec_num
== UNSPEC_GOT32_SYM
)
1530 emit_insn (gen_addhi_got32 (operands
[0], const0_rtx
,
1531 XVECEXP (unspec
, 0, 0)));
1534 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_GD
)
1536 emit_insn (gen_tls_gd_addhi (operands
[0], const0_rtx
,
1537 XVECEXP (unspec
, 0, 0)));
1540 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_IE
)
1542 emit_insn (gen_tls_ie_addhi (operands
[0], const0_rtx
,
1543 XVECEXP (unspec
, 0, 0)));
1546 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_LE
)
1548 emit_insn (gen_tls_le_addhi (operands
[0], const0_rtx
,
1549 XVECEXP (unspec
, 0, 0)));
1554 /* Accept non-constants and valid constants unmodified. */
1555 if (!CONSTANT_P (operands
[1])
1556 || GET_CODE (operands
[1]) == HIGH
|| move_operand (operands
[1], mode
))
1559 /* Split large integers. */
1560 if (GET_MODE_SIZE (mode
) <= 4)
1562 tilepro_expand_set_const32 (operands
[0], operands
[1]);
1570 /* Expand the "insv" pattern. */
1572 tilepro_expand_insv (rtx operands
[4])
1574 rtx first_rtx
= operands
[2];
1575 HOST_WIDE_INT first
= INTVAL (first_rtx
);
1576 HOST_WIDE_INT width
= INTVAL (operands
[1]);
1577 rtx v
= operands
[3];
1579 /* Shift the inserted bits into position. */
1582 if (CONST_INT_P (v
))
1584 /* Shift the constant into mm position. */
1585 v
= gen_int_si (INTVAL (v
) << first
);
1589 /* Shift over the value to be inserted. */
1590 rtx tmp
= gen_reg_rtx (SImode
);
1591 emit_insn (gen_ashlsi3 (tmp
, v
, first_rtx
));
1596 /* Insert the shifted bits using an 'mm' insn. */
1597 emit_insn (gen_insn_mm (operands
[0], v
, operands
[0], first_rtx
,
1598 GEN_INT (first
+ width
- 1)));
1602 /* Expand unaligned loads. */
1604 tilepro_expand_unaligned_load (rtx dest_reg
, rtx mem
, HOST_WIDE_INT bitsize
,
1605 HOST_WIDE_INT bit_offset
, bool sign
)
1607 enum machine_mode mode
;
1608 rtx addr_lo
, addr_hi
;
1609 rtx mem_lo
, mem_hi
, hi
;
1610 rtx mema
, wide_result
;
1611 int last_byte_offset
;
1612 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1614 mode
= GET_MODE (dest_reg
);
1616 hi
= gen_reg_rtx (mode
);
1618 if (bitsize
== 2 * BITS_PER_UNIT
&& (bit_offset
% BITS_PER_UNIT
) == 0)
1622 /* When just loading a two byte value, we can load the two bytes
1623 individually and combine them efficiently. */
1625 mem_lo
= adjust_address (mem
, QImode
, byte_offset
);
1626 mem_hi
= adjust_address (mem
, QImode
, byte_offset
+ 1);
1628 lo
= gen_reg_rtx (mode
);
1629 emit_insn (gen_zero_extendqisi2 (lo
, mem_lo
));
1633 rtx tmp
= gen_reg_rtx (mode
);
1635 /* Do a signed load of the second byte then shift and OR it
1637 emit_insn (gen_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1638 emit_insn (gen_ashlsi3 (gen_lowpart (SImode
, tmp
),
1639 gen_lowpart (SImode
, hi
), GEN_INT (8)));
1640 emit_insn (gen_iorsi3 (gen_lowpart (SImode
, dest_reg
),
1641 gen_lowpart (SImode
, lo
),
1642 gen_lowpart (SImode
, tmp
)));
1646 /* Do two unsigned loads and use intlb to interleave
1648 emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1649 emit_insn (gen_insn_intlb (gen_lowpart (SImode
, dest_reg
),
1650 gen_lowpart (SImode
, hi
),
1651 gen_lowpart (SImode
, lo
)));
1657 mema
= XEXP (mem
, 0);
1659 /* AND addresses cannot be in any alias set, since they may
1660 implicitly alias surrounding code. Ideally we'd have some alias
1661 set that covered all types except those with alignment 8 or
1663 addr_lo
= force_reg (Pmode
, plus_constant (Pmode
, mema
, byte_offset
));
1664 mem_lo
= change_address (mem
, mode
,
1665 gen_rtx_AND (Pmode
, addr_lo
, GEN_INT (-4)));
1666 set_mem_alias_set (mem_lo
, 0);
1668 /* Load the high word at an address that will not fault if the low
1669 address is aligned and at the very end of a page. */
1670 last_byte_offset
= (bit_offset
+ bitsize
- 1) / BITS_PER_UNIT
;
1671 addr_hi
= force_reg (Pmode
, plus_constant (Pmode
, mema
, last_byte_offset
));
1672 mem_hi
= change_address (mem
, mode
,
1673 gen_rtx_AND (Pmode
, addr_hi
, GEN_INT (-4)));
1674 set_mem_alias_set (mem_hi
, 0);
1678 addr_lo
= make_safe_from (addr_lo
, dest_reg
);
1679 wide_result
= dest_reg
;
1683 wide_result
= gen_reg_rtx (mode
);
1686 /* Load hi first in case dest_reg is used in mema. */
1687 emit_move_insn (hi
, mem_hi
);
1688 emit_move_insn (wide_result
, mem_lo
);
1690 emit_insn (gen_insn_dword_align (gen_lowpart (SImode
, wide_result
),
1691 gen_lowpart (SImode
, wide_result
),
1692 gen_lowpart (SImode
, hi
), addr_lo
));
1697 extract_bit_field (gen_lowpart (SImode
, wide_result
),
1698 bitsize
, bit_offset
% BITS_PER_UNIT
,
1699 !sign
, gen_lowpart (SImode
, dest_reg
),
1702 if (extracted
!= dest_reg
)
1703 emit_move_insn (dest_reg
, gen_lowpart (SImode
, extracted
));
1708 /* Expand unaligned stores. */
1710 tilepro_expand_unaligned_store (rtx mem
, rtx src
, HOST_WIDE_INT bitsize
,
1711 HOST_WIDE_INT bit_offset
)
1713 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1714 HOST_WIDE_INT bytesize
= bitsize
/ BITS_PER_UNIT
;
1715 HOST_WIDE_INT shift_amt
;
1720 for (i
= 0, shift_amt
= 0; i
< bytesize
; i
++, shift_amt
+= BITS_PER_UNIT
)
1722 mem_addr
= adjust_address (mem
, QImode
, byte_offset
+ i
);
1726 store_val
= expand_simple_binop (SImode
, LSHIFTRT
,
1727 gen_lowpart (SImode
, src
),
1728 GEN_INT (shift_amt
), NULL
, 1,
1730 store_val
= gen_lowpart (QImode
, store_val
);
1734 store_val
= gen_lowpart (QImode
, src
);
1737 emit_move_insn (mem_addr
, store_val
);
1742 /* Implement the movmisalign patterns. One of the operands is a
1743 memory that is not naturally aligned. Emit instructions to load
1746 tilepro_expand_movmisalign (enum machine_mode mode
, rtx
*operands
)
1748 if (MEM_P (operands
[1]))
1752 if (register_operand (operands
[0], mode
))
1755 tmp
= gen_reg_rtx (mode
);
1757 tilepro_expand_unaligned_load (tmp
, operands
[1],
1758 GET_MODE_BITSIZE (mode
), 0, true);
1760 if (tmp
!= operands
[0])
1761 emit_move_insn (operands
[0], tmp
);
1763 else if (MEM_P (operands
[0]))
1765 if (!reg_or_0_operand (operands
[1], mode
))
1766 operands
[1] = force_reg (mode
, operands
[1]);
1768 tilepro_expand_unaligned_store (operands
[0], operands
[1],
1769 GET_MODE_BITSIZE (mode
), 0);
1776 /* Implement the addsi3 pattern. */
1778 tilepro_expand_addsi (rtx op0
, rtx op1
, rtx op2
)
1784 /* Skip anything that only takes one instruction. */
1785 if (add_operand (op2
, SImode
))
1788 /* We can only optimize ints here (it should be impossible to get
1789 here with any other type, but it is harmless to check. */
1790 if (!CONST_INT_P (op2
))
1793 temp
= create_temp_reg_if_possible (SImode
, op0
);
1795 high
= (n
+ (n
& 0x8000)) & ~0xffff;
1797 emit_move_insn (temp
, gen_rtx_PLUS (SImode
, op1
, gen_int_si (high
)));
1798 emit_move_insn (op0
, gen_rtx_PLUS (SImode
, temp
, gen_int_si (n
- high
)));
1804 /* Implement the allocate_stack pattern (alloca). */
1806 tilepro_allocate_stack (rtx op0
, rtx op1
)
1808 /* Technically the correct way to initialize chain_loc is with
1809 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1810 * sets the alias_set to that of a frame reference. Some of our
1811 * tests rely on some unsafe assumption about when the chaining
1812 * update is done, we need to be conservative about reordering the
1813 * chaining instructions.
1815 rtx fp_addr
= gen_reg_rtx (Pmode
);
1816 rtx fp_value
= gen_reg_rtx (Pmode
);
1819 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1820 GEN_INT (UNITS_PER_WORD
)));
1822 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1824 emit_move_insn (fp_value
, fp_loc
);
1826 op1
= force_reg (Pmode
, op1
);
1828 emit_move_insn (stack_pointer_rtx
,
1829 gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, op1
));
1831 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1832 GEN_INT (UNITS_PER_WORD
)));
1834 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1836 emit_move_insn (fp_loc
, fp_value
);
1838 emit_move_insn (op0
, virtual_stack_dynamic_rtx
);
1845 /* Returns the insn_code in ENTRY. */
1846 static enum insn_code
1847 tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1850 return tilepro_multiply_insn_seq_decode_opcode
[entry
->compressed_opcode
];
1854 /* Returns the length of the 'op' array. */
1856 tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq
*seq
)
1858 /* The array either uses all of its allocated slots or is terminated
1859 by a bogus opcode. Either way, the array size is the index of the
1860 last valid opcode plus one. */
1862 for (i
= tilepro_multiply_insn_seq_MAX_OPERATIONS
- 1; i
>= 0; i
--)
1863 if (tilepro_multiply_get_opcode (&seq
->op
[i
]) != CODE_FOR_nothing
)
1866 /* An empty array is not allowed. */
1871 /* We precompute a number of expression trees for multiplying by
1872 constants. This generates code for such an expression tree by
1873 walking through the nodes in the tree (which are conveniently
1874 pre-linearized) and emitting an instruction for each one. */
1876 tilepro_expand_constant_multiply_given_sequence (rtx result
, rtx src
,
1878 tilepro_multiply_insn_seq
1884 /* Keep track of the subexpressions computed so far, so later
1885 instructions can refer to them. We seed the array with zero and
1886 the value being multiplied. */
1887 int num_subexprs
= 2;
1888 rtx subexprs
[tilepro_multiply_insn_seq_MAX_OPERATIONS
+ 2];
1889 subexprs
[0] = const0_rtx
;
1892 /* Determine how many instructions we are going to generate. */
1893 num_ops
= tilepro_multiply_get_num_ops (seq
);
1894 gcc_assert (num_ops
> 0
1895 && num_ops
<= tilepro_multiply_insn_seq_MAX_OPERATIONS
);
1897 for (i
= 0; i
< num_ops
; i
++)
1899 const struct tilepro_multiply_insn_seq_entry
*entry
= &seq
->op
[i
];
1901 /* Figure out where to store the output of this instruction. */
1902 const bool is_last_op
= (i
+ 1 == num_ops
);
1903 rtx out
= is_last_op
? result
: gen_reg_rtx (SImode
);
1905 enum insn_code opcode
= tilepro_multiply_get_opcode (entry
);
1906 if (opcode
== CODE_FOR_ashlsi3
)
1908 /* Handle shift by immediate. This is a special case because
1909 the meaning of the second operand is a constant shift
1910 count rather than an operand index. */
1912 /* Make sure the shift count is in range. Zero should not
1914 const int shift_count
= entry
->rhs
;
1915 gcc_assert (shift_count
> 0 && shift_count
< 32);
1917 /* Emit the actual instruction. */
1918 emit_insn (GEN_FCN (opcode
)
1919 (out
, subexprs
[entry
->lhs
],
1920 gen_rtx_CONST_INT (SImode
, shift_count
)));
1924 /* Handle a normal two-operand instruction, such as add or
1927 /* Make sure we are referring to a previously computed
1929 gcc_assert (entry
->rhs
< num_subexprs
);
1931 /* Emit the actual instruction. */
1932 emit_insn (GEN_FCN (opcode
)
1933 (out
, subexprs
[entry
->lhs
], subexprs
[entry
->rhs
]));
1936 /* Record this subexpression for use by later expressions. */
1937 subexprs
[num_subexprs
++] = out
;
1942 /* bsearch helper function. */
1944 tilepro_compare_multipliers (const void *key
, const void *t
)
1946 return *(const int *) key
-
1947 ((const struct tilepro_multiply_insn_seq
*) t
)->multiplier
;
1951 /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1953 static const struct tilepro_multiply_insn_seq
*
1954 tilepro_find_multiply_insn_seq_for_constant (int multiplier
)
1956 return ((const struct tilepro_multiply_insn_seq
*)
1957 bsearch (&multiplier
, tilepro_multiply_insn_seq_table
,
1958 tilepro_multiply_insn_seq_table_size
,
1959 sizeof tilepro_multiply_insn_seq_table
[0],
1960 tilepro_compare_multipliers
));
1964 /* Try to a expand constant multiply in SImode by looking it up in a
1965 precompiled table. OP0 is the result operand, OP1 is the source
1966 operand, and MULTIPLIER is the value of the constant. Return true
1969 tilepro_expand_const_mulsi (rtx op0
, rtx op1
, int multiplier
)
1971 /* See if we have precomputed an efficient way to multiply by this
1973 const struct tilepro_multiply_insn_seq
*seq
=
1974 tilepro_find_multiply_insn_seq_for_constant (multiplier
);
1977 tilepro_expand_constant_multiply_given_sequence (op0
, op1
, seq
);
1985 /* Expand the mulsi pattern. */
1987 tilepro_expand_mulsi (rtx op0
, rtx op1
, rtx op2
)
1989 if (CONST_INT_P (op2
))
1991 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op2
), SImode
);
1992 return tilepro_expand_const_mulsi (op0
, op1
, n
);
1998 /* Expand a high multiply pattern in SImode. RESULT, OP1, OP2 are the
1999 operands, and SIGN is true if it's a signed multiply, and false if
2000 it's an unsigned multiply. */
2002 tilepro_expand_high_multiply (rtx result
, rtx op1
, rtx op2
, bool sign
)
2004 rtx tmp0
= gen_reg_rtx (SImode
);
2005 rtx tmp1
= gen_reg_rtx (SImode
);
2006 rtx tmp2
= gen_reg_rtx (SImode
);
2007 rtx tmp3
= gen_reg_rtx (SImode
);
2008 rtx tmp4
= gen_reg_rtx (SImode
);
2009 rtx tmp5
= gen_reg_rtx (SImode
);
2010 rtx tmp6
= gen_reg_rtx (SImode
);
2011 rtx tmp7
= gen_reg_rtx (SImode
);
2012 rtx tmp8
= gen_reg_rtx (SImode
);
2013 rtx tmp9
= gen_reg_rtx (SImode
);
2014 rtx tmp10
= gen_reg_rtx (SImode
);
2015 rtx tmp11
= gen_reg_rtx (SImode
);
2016 rtx tmp12
= gen_reg_rtx (SImode
);
2017 rtx tmp13
= gen_reg_rtx (SImode
);
2018 rtx result_lo
= gen_reg_rtx (SImode
);
2022 emit_insn (gen_insn_mulhl_su (tmp0
, op1
, op2
));
2023 emit_insn (gen_insn_mulhl_su (tmp1
, op2
, op1
));
2024 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2025 emit_insn (gen_insn_mulhh_ss (tmp3
, op1
, op2
));
2029 emit_insn (gen_insn_mulhl_uu (tmp0
, op1
, op2
));
2030 emit_insn (gen_insn_mulhl_uu (tmp1
, op2
, op1
));
2031 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2032 emit_insn (gen_insn_mulhh_uu (tmp3
, op1
, op2
));
2035 emit_move_insn (tmp4
, (gen_rtx_ASHIFT (SImode
, tmp0
, GEN_INT (16))));
2037 emit_move_insn (tmp5
, (gen_rtx_ASHIFT (SImode
, tmp1
, GEN_INT (16))));
2039 emit_move_insn (tmp6
, (gen_rtx_PLUS (SImode
, tmp4
, tmp5
)));
2040 emit_move_insn (result_lo
, (gen_rtx_PLUS (SImode
, tmp2
, tmp6
)));
2042 emit_move_insn (tmp7
, gen_rtx_LTU (SImode
, tmp6
, tmp4
));
2043 emit_move_insn (tmp8
, gen_rtx_LTU (SImode
, result_lo
, tmp2
));
2047 emit_move_insn (tmp9
, (gen_rtx_ASHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2048 emit_move_insn (tmp10
, (gen_rtx_ASHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2052 emit_move_insn (tmp9
, (gen_rtx_LSHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2053 emit_move_insn (tmp10
, (gen_rtx_LSHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2056 emit_move_insn (tmp11
, (gen_rtx_PLUS (SImode
, tmp3
, tmp7
)));
2057 emit_move_insn (tmp12
, (gen_rtx_PLUS (SImode
, tmp8
, tmp9
)));
2058 emit_move_insn (tmp13
, (gen_rtx_PLUS (SImode
, tmp11
, tmp12
)));
2059 emit_move_insn (result
, (gen_rtx_PLUS (SImode
, tmp13
, tmp10
)));
2063 /* Implement smulsi3_highpart. */
2065 tilepro_expand_smulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2067 tilepro_expand_high_multiply (op0
, op1
, op2
, true);
2071 /* Implement umulsi3_highpart. */
2073 tilepro_expand_umulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2075 tilepro_expand_high_multiply (op0
, op1
, op2
, false);
2080 /* Compare and branches */
2082 /* Helper function to handle DImode for tilepro_emit_setcc_internal. */
2084 tilepro_emit_setcc_internal_di (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
)
2086 rtx operands
[2], lo_half
[2], hi_half
[2];
2087 rtx tmp
, tmp0
, tmp1
, tmp2
;
2090 /* Reduce the number of cases we need to handle by reversing the
2100 /* We handle these compares directly. */
2107 /* Reverse the operands. */
2112 /* We should not have called this with any other code. */
2118 code
= swap_condition (code
);
2119 tmp
= op0
, op0
= op1
, op1
= tmp
;
2125 split_di (operands
, 2, lo_half
, hi_half
);
2127 if (!reg_or_0_operand (lo_half
[0], SImode
))
2128 lo_half
[0] = force_reg (SImode
, lo_half
[0]);
2130 if (!reg_or_0_operand (hi_half
[0], SImode
))
2131 hi_half
[0] = force_reg (SImode
, hi_half
[0]);
2133 if (!CONST_INT_P (lo_half
[1]) && !register_operand (lo_half
[1], SImode
))
2134 lo_half
[1] = force_reg (SImode
, lo_half
[1]);
2136 if (!CONST_INT_P (hi_half
[1]) && !register_operand (hi_half
[1], SImode
))
2137 hi_half
[1] = force_reg (SImode
, hi_half
[1]);
2139 tmp0
= gen_reg_rtx (SImode
);
2140 tmp1
= gen_reg_rtx (SImode
);
2141 tmp2
= gen_reg_rtx (SImode
);
2146 emit_insn (gen_insn_seq (tmp0
, lo_half
[0], lo_half
[1]));
2147 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2148 emit_insn (gen_andsi3 (res
, tmp0
, tmp1
));
2152 emit_insn (gen_insn_sne (tmp0
, lo_half
[0], lo_half
[1]));
2153 emit_insn (gen_insn_sne (tmp1
, hi_half
[0], hi_half
[1]));
2154 emit_insn (gen_iorsi3 (res
, tmp0
, tmp1
));
2158 emit_insn (gen_insn_slte (tmp0
, hi_half
[0], hi_half
[1]));
2159 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2160 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2161 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2164 if (operands
[1] == const0_rtx
)
2166 emit_insn (gen_lshrsi3 (res
, hi_half
[0], GEN_INT (31)));
2171 emit_insn (gen_insn_slt (tmp0
, hi_half
[0], hi_half
[1]));
2172 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2173 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2174 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2178 emit_insn (gen_insn_slte_u (tmp0
, hi_half
[0], hi_half
[1]));
2179 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2180 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2181 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2184 emit_insn (gen_insn_slt_u (tmp0
, hi_half
[0], hi_half
[1]));
2185 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2186 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2187 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2197 /* Certain simplifications can be done to make invalid setcc
2198 operations valid. Return the final comparison, or NULL if we can't
2201 tilepro_emit_setcc_internal (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
,
2202 enum machine_mode cmp_mode
)
2207 if (cmp_mode
== DImode
)
2209 return tilepro_emit_setcc_internal_di (res
, code
, op0
, op1
);
2212 /* The general case: fold the comparison code to the types of
2213 compares that we have, choosing the branch as necessary. */
2223 /* We have these compares. */
2230 /* We do not have these compares, so we reverse the
2236 /* We should not have called this with any other code. */
2242 code
= swap_condition (code
);
2243 tmp
= op0
, op0
= op1
, op1
= tmp
;
2246 if (!reg_or_0_operand (op0
, SImode
))
2247 op0
= force_reg (SImode
, op0
);
2249 if (!CONST_INT_P (op1
) && !register_operand (op1
, SImode
))
2250 op1
= force_reg (SImode
, op1
);
2252 /* Return the setcc comparison. */
2253 emit_insn (gen_rtx_SET (VOIDmode
, res
,
2254 gen_rtx_fmt_ee (code
, SImode
, op0
, op1
)));
2260 /* Implement cstore patterns. */
2262 tilepro_emit_setcc (rtx operands
[], enum machine_mode cmp_mode
)
2265 tilepro_emit_setcc_internal (operands
[0], GET_CODE (operands
[1]),
2266 operands
[2], operands
[3], cmp_mode
);
2270 /* Return whether CODE is a signed comparison. */
2272 signed_compare_p (enum rtx_code code
)
2274 return (code
== EQ
|| code
== NE
|| code
== LT
|| code
== LE
2275 || code
== GT
|| code
== GE
);
2279 /* Generate the comparison for an SImode conditional branch. */
2281 tilepro_emit_cc_test (enum rtx_code code
, rtx op0
, rtx op1
,
2282 enum machine_mode cmp_mode
, bool eq_ne_only
)
2284 enum rtx_code branch_code
;
2287 /* Check for a compare against zero using a comparison we can do
2289 if (cmp_mode
!= DImode
2290 && op1
== const0_rtx
2291 && (code
== EQ
|| code
== NE
2292 || (!eq_ne_only
&& signed_compare_p (code
))))
2294 op0
= force_reg (SImode
, op0
);
2295 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, const0_rtx
);
2298 /* The general case: fold the comparison code to the types of
2299 compares that we have, choosing the branch as necessary. */
2307 /* We have these compares. */
2316 /* These must be reversed (except NE, but let's
2318 code
= reverse_condition (code
);
2326 if (cmp_mode
!= DImode
2327 && CONST_INT_P (op1
) && (!satisfies_constraint_I (op1
) || code
== LEU
))
2329 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op1
), SImode
);
2334 /* Subtract off the value we want to compare against and see
2335 if we get zero. This is cheaper than creating a constant
2336 in a register. Except that subtracting -128 is more
2337 expensive than seqi to -128, so we leave that alone. */
2338 /* ??? Don't do this when comparing against symbols,
2339 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2340 0), which will be declared false out of hand (at least
2342 if (!(symbolic_operand (op0
, VOIDmode
)
2343 || (REG_P (op0
) && REG_POINTER (op0
))))
2345 /* To compare against MIN_INT, we add MIN_INT and check
2348 if (n
!= -2147483647 - 1)
2353 op0
= force_reg (SImode
, op0
);
2354 temp
= gen_reg_rtx (SImode
);
2355 emit_insn (gen_addsi3 (temp
, op0
, gen_int_si (add
)));
2356 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2357 VOIDmode
, temp
, const0_rtx
);
2367 /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2370 int first
= exact_log2 (code
== LTU
? n
: n
+ 1);
2373 op0
= force_reg (SImode
, op0
);
2374 temp
= gen_reg_rtx (SImode
);
2375 emit_move_insn (temp
,
2376 gen_rtx_LSHIFTRT (SImode
, op0
,
2377 gen_int_si (first
)));
2378 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2379 VOIDmode
, temp
, const0_rtx
);
2389 /* Compute a flag saying whether we should branch. */
2390 temp
= gen_reg_rtx (SImode
);
2391 tilepro_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2393 /* Return the branch comparison. */
2394 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, temp
, const0_rtx
);
2398 /* Generate the comparison for a conditional branch. */
2400 tilepro_emit_conditional_branch (rtx operands
[], enum machine_mode cmp_mode
)
2403 tilepro_emit_cc_test (GET_CODE (operands
[0]), operands
[1], operands
[2],
2405 rtx branch_rtx
= gen_rtx_SET (VOIDmode
, pc_rtx
,
2406 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp_rtx
,
2411 emit_jump_insn (branch_rtx
);
2415 /* Implement the movsicc pattern. */
2417 tilepro_emit_conditional_move (rtx cmp
)
2420 tilepro_emit_cc_test (GET_CODE (cmp
), XEXP (cmp
, 0), XEXP (cmp
, 1),
2421 GET_MODE (XEXP (cmp
, 0)), true);
2425 /* Return true if INSN is annotated with a REG_BR_PROB note that
2426 indicates it's a branch that's predicted taken. */
2428 cbranch_predicted_p (rtx_insn
*insn
)
2430 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2434 int pred_val
= XINT (x
, 0);
2436 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2443 /* Output assembly code for a specific branch instruction, appending
2444 the branch prediction flag to the opcode if appropriate. */
2446 tilepro_output_simple_cbranch_with_opcode (rtx_insn
*insn
, const char *opcode
,
2447 int regop
, bool netreg_p
,
2448 bool reverse_predicted
)
2450 static char buf
[64];
2451 sprintf (buf
, "%s%s\t%%%c%d, %%l0", opcode
,
2452 (cbranch_predicted_p (insn
) ^ reverse_predicted
) ? "t" : "",
2453 netreg_p
? 'N' : 'r', regop
);
2458 /* Output assembly code for a specific branch instruction, appending
2459 the branch prediction flag to the opcode if appropriate. */
2461 tilepro_output_cbranch_with_opcode (rtx_insn
*insn
, rtx
*operands
,
2463 const char *rev_opcode
,
2464 int regop
, bool netreg_p
)
2466 const char *branch_if_false
;
2467 rtx taken
, not_taken
;
2468 bool is_simple_branch
;
2470 gcc_assert (LABEL_P (operands
[0]));
2472 is_simple_branch
= true;
2473 if (INSN_ADDRESSES_SET_P ())
2475 int from_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2476 int to_addr
= INSN_ADDRESSES (INSN_UID (operands
[0]));
2477 int delta
= to_addr
- from_addr
;
2478 is_simple_branch
= IN_RANGE (delta
, -524288, 524280);
2481 if (is_simple_branch
)
2483 /* Just a simple conditional branch. */
2485 tilepro_output_simple_cbranch_with_opcode (insn
, opcode
, regop
,
2489 /* Generate a reversed branch around a direct jump. This fallback
2490 does not use branch-likely instructions. */
2491 not_taken
= gen_label_rtx ();
2492 taken
= operands
[0];
2494 /* Generate the reversed branch to NOT_TAKEN. */
2495 operands
[0] = not_taken
;
2497 tilepro_output_simple_cbranch_with_opcode (insn
, rev_opcode
, regop
,
2499 output_asm_insn (branch_if_false
, operands
);
2501 output_asm_insn ("j\t%l0", &taken
);
2503 /* Output NOT_TAKEN. */
2504 targetm
.asm_out
.internal_label (asm_out_file
, "L",
2505 CODE_LABEL_NUMBER (not_taken
));
2510 /* Output assembly code for a conditional branch instruction. */
2512 tilepro_output_cbranch (rtx_insn
*insn
, rtx
*operands
, bool reversed
)
2514 enum rtx_code code
= GET_CODE (operands
[1]);
2516 const char *rev_opcode
;
2519 code
= reverse_condition (code
);
2537 rev_opcode
= "blez";
2545 rev_opcode
= "bgez";
2552 tilepro_output_cbranch_with_opcode (insn
, operands
, opcode
, rev_opcode
,
2557 /* Implement the tablejump pattern. */
2559 tilepro_expand_tablejump (rtx op0
, rtx op1
)
2563 rtx table
= gen_rtx_LABEL_REF (Pmode
, op1
);
2564 rtx temp
= gen_reg_rtx (Pmode
);
2565 rtx text_label_symbol
= tilepro_text_label_symbol ();
2566 rtx text_label_rtx
= tilepro_text_label_rtx ();
2568 emit_insn (gen_addli_pcrel (temp
, text_label_rtx
,
2569 table
, text_label_symbol
));
2570 emit_insn (gen_auli_pcrel (temp
, temp
, table
, text_label_symbol
));
2571 emit_move_insn (temp
,
2572 gen_rtx_PLUS (Pmode
,
2573 convert_to_mode (Pmode
, op0
, false),
2578 emit_jump_insn (gen_tablejump_aux (op0
, op1
));
2582 /* Expand a builtin vector binary op, by calling gen function GEN with
2583 operands in the proper modes. DEST is converted to DEST_MODE, and
2584 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2586 tilepro_expand_builtin_vector_binop (rtx (*gen
) (rtx
, rtx
, rtx
),
2587 enum machine_mode dest_mode
,
2589 enum machine_mode src_mode
,
2590 rtx src0
, rtx src1
, bool do_src1
)
2592 dest
= gen_lowpart (dest_mode
, dest
);
2594 if (src0
== const0_rtx
)
2595 src0
= CONST0_RTX (src_mode
);
2597 src0
= gen_lowpart (src_mode
, src0
);
2601 if (src1
== const0_rtx
)
2602 src1
= CONST0_RTX (src_mode
);
2604 src1
= gen_lowpart (src_mode
, src1
);
2607 emit_insn ((*gen
) (dest
, src0
, src1
));
2614 struct tile_builtin_info
2616 enum insn_code icode
;
2620 static struct tile_builtin_info tilepro_builtin_info
[TILEPRO_BUILTIN_max
] = {
2621 { CODE_FOR_addsi3
, NULL
}, /* add */
2622 { CODE_FOR_insn_addb
, NULL
}, /* addb */
2623 { CODE_FOR_insn_addbs_u
, NULL
}, /* addbs_u */
2624 { CODE_FOR_insn_addh
, NULL
}, /* addh */
2625 { CODE_FOR_insn_addhs
, NULL
}, /* addhs */
2626 { CODE_FOR_insn_addib
, NULL
}, /* addib */
2627 { CODE_FOR_insn_addih
, NULL
}, /* addih */
2628 { CODE_FOR_insn_addlis
, NULL
}, /* addlis */
2629 { CODE_FOR_ssaddsi3
, NULL
}, /* adds */
2630 { CODE_FOR_insn_adiffb_u
, NULL
}, /* adiffb_u */
2631 { CODE_FOR_insn_adiffh
, NULL
}, /* adiffh */
2632 { CODE_FOR_andsi3
, NULL
}, /* and */
2633 { CODE_FOR_insn_auli
, NULL
}, /* auli */
2634 { CODE_FOR_insn_avgb_u
, NULL
}, /* avgb_u */
2635 { CODE_FOR_insn_avgh
, NULL
}, /* avgh */
2636 { CODE_FOR_insn_bitx
, NULL
}, /* bitx */
2637 { CODE_FOR_bswapsi2
, NULL
}, /* bytex */
2638 { CODE_FOR_clzsi2
, NULL
}, /* clz */
2639 { CODE_FOR_insn_crc32_32
, NULL
}, /* crc32_32 */
2640 { CODE_FOR_insn_crc32_8
, NULL
}, /* crc32_8 */
2641 { CODE_FOR_ctzsi2
, NULL
}, /* ctz */
2642 { CODE_FOR_insn_drain
, NULL
}, /* drain */
2643 { CODE_FOR_insn_dtlbpr
, NULL
}, /* dtlbpr */
2644 { CODE_FOR_insn_dword_align
, NULL
}, /* dword_align */
2645 { CODE_FOR_insn_finv
, NULL
}, /* finv */
2646 { CODE_FOR_insn_flush
, NULL
}, /* flush */
2647 { CODE_FOR_insn_fnop
, NULL
}, /* fnop */
2648 { CODE_FOR_insn_icoh
, NULL
}, /* icoh */
2649 { CODE_FOR_insn_ill
, NULL
}, /* ill */
2650 { CODE_FOR_insn_info
, NULL
}, /* info */
2651 { CODE_FOR_insn_infol
, NULL
}, /* infol */
2652 { CODE_FOR_insn_inthb
, NULL
}, /* inthb */
2653 { CODE_FOR_insn_inthh
, NULL
}, /* inthh */
2654 { CODE_FOR_insn_intlb
, NULL
}, /* intlb */
2655 { CODE_FOR_insn_intlh
, NULL
}, /* intlh */
2656 { CODE_FOR_insn_inv
, NULL
}, /* inv */
2657 { CODE_FOR_insn_lb
, NULL
}, /* lb */
2658 { CODE_FOR_insn_lb_u
, NULL
}, /* lb_u */
2659 { CODE_FOR_insn_lh
, NULL
}, /* lh */
2660 { CODE_FOR_insn_lh_u
, NULL
}, /* lh_u */
2661 { CODE_FOR_insn_lnk
, NULL
}, /* lnk */
2662 { CODE_FOR_insn_lw
, NULL
}, /* lw */
2663 { CODE_FOR_insn_lw_na
, NULL
}, /* lw_na */
2664 { CODE_FOR_insn_lb_L2
, NULL
}, /* lb_L2 */
2665 { CODE_FOR_insn_lb_u_L2
, NULL
}, /* lb_u_L2 */
2666 { CODE_FOR_insn_lh_L2
, NULL
}, /* lh_L2 */
2667 { CODE_FOR_insn_lh_u_L2
, NULL
}, /* lh_u_L2 */
2668 { CODE_FOR_insn_lw_L2
, NULL
}, /* lw_L2 */
2669 { CODE_FOR_insn_lw_na_L2
, NULL
}, /* lw_na_L2 */
2670 { CODE_FOR_insn_lb_miss
, NULL
}, /* lb_miss */
2671 { CODE_FOR_insn_lb_u_miss
, NULL
}, /* lb_u_miss */
2672 { CODE_FOR_insn_lh_miss
, NULL
}, /* lh_miss */
2673 { CODE_FOR_insn_lh_u_miss
, NULL
}, /* lh_u_miss */
2674 { CODE_FOR_insn_lw_miss
, NULL
}, /* lw_miss */
2675 { CODE_FOR_insn_lw_na_miss
, NULL
}, /* lw_na_miss */
2676 { CODE_FOR_insn_maxb_u
, NULL
}, /* maxb_u */
2677 { CODE_FOR_insn_maxh
, NULL
}, /* maxh */
2678 { CODE_FOR_insn_maxib_u
, NULL
}, /* maxib_u */
2679 { CODE_FOR_insn_maxih
, NULL
}, /* maxih */
2680 { CODE_FOR_memory_barrier
, NULL
}, /* mf */
2681 { CODE_FOR_insn_mfspr
, NULL
}, /* mfspr */
2682 { CODE_FOR_insn_minb_u
, NULL
}, /* minb_u */
2683 { CODE_FOR_insn_minh
, NULL
}, /* minh */
2684 { CODE_FOR_insn_minib_u
, NULL
}, /* minib_u */
2685 { CODE_FOR_insn_minih
, NULL
}, /* minih */
2686 { CODE_FOR_insn_mm
, NULL
}, /* mm */
2687 { CODE_FOR_insn_mnz
, NULL
}, /* mnz */
2688 { CODE_FOR_insn_mnzb
, NULL
}, /* mnzb */
2689 { CODE_FOR_insn_mnzh
, NULL
}, /* mnzh */
2690 { CODE_FOR_movsi
, NULL
}, /* move */
2691 { CODE_FOR_insn_movelis
, NULL
}, /* movelis */
2692 { CODE_FOR_insn_mtspr
, NULL
}, /* mtspr */
2693 { CODE_FOR_insn_mulhh_ss
, NULL
}, /* mulhh_ss */
2694 { CODE_FOR_insn_mulhh_su
, NULL
}, /* mulhh_su */
2695 { CODE_FOR_insn_mulhh_uu
, NULL
}, /* mulhh_uu */
2696 { CODE_FOR_insn_mulhha_ss
, NULL
}, /* mulhha_ss */
2697 { CODE_FOR_insn_mulhha_su
, NULL
}, /* mulhha_su */
2698 { CODE_FOR_insn_mulhha_uu
, NULL
}, /* mulhha_uu */
2699 { CODE_FOR_insn_mulhhsa_uu
, NULL
}, /* mulhhsa_uu */
2700 { CODE_FOR_insn_mulhl_ss
, NULL
}, /* mulhl_ss */
2701 { CODE_FOR_insn_mulhl_su
, NULL
}, /* mulhl_su */
2702 { CODE_FOR_insn_mulhl_us
, NULL
}, /* mulhl_us */
2703 { CODE_FOR_insn_mulhl_uu
, NULL
}, /* mulhl_uu */
2704 { CODE_FOR_insn_mulhla_ss
, NULL
}, /* mulhla_ss */
2705 { CODE_FOR_insn_mulhla_su
, NULL
}, /* mulhla_su */
2706 { CODE_FOR_insn_mulhla_us
, NULL
}, /* mulhla_us */
2707 { CODE_FOR_insn_mulhla_uu
, NULL
}, /* mulhla_uu */
2708 { CODE_FOR_insn_mulhlsa_uu
, NULL
}, /* mulhlsa_uu */
2709 { CODE_FOR_insn_mulll_ss
, NULL
}, /* mulll_ss */
2710 { CODE_FOR_insn_mulll_su
, NULL
}, /* mulll_su */
2711 { CODE_FOR_insn_mulll_uu
, NULL
}, /* mulll_uu */
2712 { CODE_FOR_insn_mullla_ss
, NULL
}, /* mullla_ss */
2713 { CODE_FOR_insn_mullla_su
, NULL
}, /* mullla_su */
2714 { CODE_FOR_insn_mullla_uu
, NULL
}, /* mullla_uu */
2715 { CODE_FOR_insn_mulllsa_uu
, NULL
}, /* mulllsa_uu */
2716 { CODE_FOR_insn_mvnz
, NULL
}, /* mvnz */
2717 { CODE_FOR_insn_mvz
, NULL
}, /* mvz */
2718 { CODE_FOR_insn_mz
, NULL
}, /* mz */
2719 { CODE_FOR_insn_mzb
, NULL
}, /* mzb */
2720 { CODE_FOR_insn_mzh
, NULL
}, /* mzh */
2721 { CODE_FOR_insn_nap
, NULL
}, /* nap */
2722 { CODE_FOR_nop
, NULL
}, /* nop */
2723 { CODE_FOR_insn_nor
, NULL
}, /* nor */
2724 { CODE_FOR_iorsi3
, NULL
}, /* or */
2725 { CODE_FOR_insn_packbs_u
, NULL
}, /* packbs_u */
2726 { CODE_FOR_insn_packhb
, NULL
}, /* packhb */
2727 { CODE_FOR_insn_packhs
, NULL
}, /* packhs */
2728 { CODE_FOR_insn_packlb
, NULL
}, /* packlb */
2729 { CODE_FOR_popcountsi2
, NULL
}, /* pcnt */
2730 { CODE_FOR_insn_prefetch
, NULL
}, /* prefetch */
2731 { CODE_FOR_insn_prefetch_L1
, NULL
}, /* prefetch_L1 */
2732 { CODE_FOR_rotlsi3
, NULL
}, /* rl */
2733 { CODE_FOR_insn_s1a
, NULL
}, /* s1a */
2734 { CODE_FOR_insn_s2a
, NULL
}, /* s2a */
2735 { CODE_FOR_insn_s3a
, NULL
}, /* s3a */
2736 { CODE_FOR_insn_sadab_u
, NULL
}, /* sadab_u */
2737 { CODE_FOR_insn_sadah
, NULL
}, /* sadah */
2738 { CODE_FOR_insn_sadah_u
, NULL
}, /* sadah_u */
2739 { CODE_FOR_insn_sadb_u
, NULL
}, /* sadb_u */
2740 { CODE_FOR_insn_sadh
, NULL
}, /* sadh */
2741 { CODE_FOR_insn_sadh_u
, NULL
}, /* sadh_u */
2742 { CODE_FOR_insn_sb
, NULL
}, /* sb */
2743 { CODE_FOR_insn_seq
, NULL
}, /* seq */
2744 { CODE_FOR_insn_seqb
, NULL
}, /* seqb */
2745 { CODE_FOR_insn_seqh
, NULL
}, /* seqh */
2746 { CODE_FOR_insn_seqib
, NULL
}, /* seqib */
2747 { CODE_FOR_insn_seqih
, NULL
}, /* seqih */
2748 { CODE_FOR_insn_sh
, NULL
}, /* sh */
2749 { CODE_FOR_ashlsi3
, NULL
}, /* shl */
2750 { CODE_FOR_insn_shlb
, NULL
}, /* shlb */
2751 { CODE_FOR_insn_shlh
, NULL
}, /* shlh */
2752 { CODE_FOR_insn_shlb
, NULL
}, /* shlib */
2753 { CODE_FOR_insn_shlh
, NULL
}, /* shlih */
2754 { CODE_FOR_lshrsi3
, NULL
}, /* shr */
2755 { CODE_FOR_insn_shrb
, NULL
}, /* shrb */
2756 { CODE_FOR_insn_shrh
, NULL
}, /* shrh */
2757 { CODE_FOR_insn_shrb
, NULL
}, /* shrib */
2758 { CODE_FOR_insn_shrh
, NULL
}, /* shrih */
2759 { CODE_FOR_insn_slt
, NULL
}, /* slt */
2760 { CODE_FOR_insn_slt_u
, NULL
}, /* slt_u */
2761 { CODE_FOR_insn_sltb
, NULL
}, /* sltb */
2762 { CODE_FOR_insn_sltb_u
, NULL
}, /* sltb_u */
2763 { CODE_FOR_insn_slte
, NULL
}, /* slte */
2764 { CODE_FOR_insn_slte_u
, NULL
}, /* slte_u */
2765 { CODE_FOR_insn_slteb
, NULL
}, /* slteb */
2766 { CODE_FOR_insn_slteb_u
, NULL
}, /* slteb_u */
2767 { CODE_FOR_insn_slteh
, NULL
}, /* slteh */
2768 { CODE_FOR_insn_slteh_u
, NULL
}, /* slteh_u */
2769 { CODE_FOR_insn_slth
, NULL
}, /* slth */
2770 { CODE_FOR_insn_slth_u
, NULL
}, /* slth_u */
2771 { CODE_FOR_insn_sltib
, NULL
}, /* sltib */
2772 { CODE_FOR_insn_sltib_u
, NULL
}, /* sltib_u */
2773 { CODE_FOR_insn_sltih
, NULL
}, /* sltih */
2774 { CODE_FOR_insn_sltih_u
, NULL
}, /* sltih_u */
2775 { CODE_FOR_insn_sne
, NULL
}, /* sne */
2776 { CODE_FOR_insn_sneb
, NULL
}, /* sneb */
2777 { CODE_FOR_insn_sneh
, NULL
}, /* sneh */
2778 { CODE_FOR_ashrsi3
, NULL
}, /* sra */
2779 { CODE_FOR_insn_srab
, NULL
}, /* srab */
2780 { CODE_FOR_insn_srah
, NULL
}, /* srah */
2781 { CODE_FOR_insn_srab
, NULL
}, /* sraib */
2782 { CODE_FOR_insn_srah
, NULL
}, /* sraih */
2783 { CODE_FOR_subsi3
, NULL
}, /* sub */
2784 { CODE_FOR_insn_subb
, NULL
}, /* subb */
2785 { CODE_FOR_insn_subbs_u
, NULL
}, /* subbs_u */
2786 { CODE_FOR_insn_subh
, NULL
}, /* subh */
2787 { CODE_FOR_insn_subhs
, NULL
}, /* subhs */
2788 { CODE_FOR_sssubsi3
, NULL
}, /* subs */
2789 { CODE_FOR_insn_sw
, NULL
}, /* sw */
2790 { CODE_FOR_insn_tblidxb0
, NULL
}, /* tblidxb0 */
2791 { CODE_FOR_insn_tblidxb1
, NULL
}, /* tblidxb1 */
2792 { CODE_FOR_insn_tblidxb2
, NULL
}, /* tblidxb2 */
2793 { CODE_FOR_insn_tblidxb3
, NULL
}, /* tblidxb3 */
2794 { CODE_FOR_insn_tns
, NULL
}, /* tns */
2795 { CODE_FOR_insn_wh64
, NULL
}, /* wh64 */
2796 { CODE_FOR_xorsi3
, NULL
}, /* xor */
2797 { CODE_FOR_tilepro_network_barrier
, NULL
}, /* network_barrier */
2798 { CODE_FOR_tilepro_idn0_receive
, NULL
}, /* idn0_receive */
2799 { CODE_FOR_tilepro_idn1_receive
, NULL
}, /* idn1_receive */
2800 { CODE_FOR_tilepro_idn_send
, NULL
}, /* idn_send */
2801 { CODE_FOR_tilepro_sn_receive
, NULL
}, /* sn_receive */
2802 { CODE_FOR_tilepro_sn_send
, NULL
}, /* sn_send */
2803 { CODE_FOR_tilepro_udn0_receive
, NULL
}, /* udn0_receive */
2804 { CODE_FOR_tilepro_udn1_receive
, NULL
}, /* udn1_receive */
2805 { CODE_FOR_tilepro_udn2_receive
, NULL
}, /* udn2_receive */
2806 { CODE_FOR_tilepro_udn3_receive
, NULL
}, /* udn3_receive */
2807 { CODE_FOR_tilepro_udn_send
, NULL
}, /* udn_send */
2811 struct tilepro_builtin_def
2814 enum tilepro_builtin code
;
2816 /* The first character is the return type. Subsequent characters
2817 are the argument types. See char_to_type. */
2822 static const struct tilepro_builtin_def tilepro_builtins
[] = {
2823 { "__insn_add", TILEPRO_INSN_ADD
, true, "lll" },
2824 { "__insn_addb", TILEPRO_INSN_ADDB
, true, "lll" },
2825 { "__insn_addbs_u", TILEPRO_INSN_ADDBS_U
, false, "lll" },
2826 { "__insn_addh", TILEPRO_INSN_ADDH
, true, "lll" },
2827 { "__insn_addhs", TILEPRO_INSN_ADDHS
, false, "lll" },
2828 { "__insn_addi", TILEPRO_INSN_ADD
, true, "lll" },
2829 { "__insn_addib", TILEPRO_INSN_ADDIB
, true, "lll" },
2830 { "__insn_addih", TILEPRO_INSN_ADDIH
, true, "lll" },
2831 { "__insn_addli", TILEPRO_INSN_ADD
, true, "lll" },
2832 { "__insn_addlis", TILEPRO_INSN_ADDLIS
, false, "lll" },
2833 { "__insn_adds", TILEPRO_INSN_ADDS
, false, "lll" },
2834 { "__insn_adiffb_u", TILEPRO_INSN_ADIFFB_U
, true, "lll" },
2835 { "__insn_adiffh", TILEPRO_INSN_ADIFFH
, true, "lll" },
2836 { "__insn_and", TILEPRO_INSN_AND
, true, "lll" },
2837 { "__insn_andi", TILEPRO_INSN_AND
, true, "lll" },
2838 { "__insn_auli", TILEPRO_INSN_AULI
, true, "lll" },
2839 { "__insn_avgb_u", TILEPRO_INSN_AVGB_U
, true, "lll" },
2840 { "__insn_avgh", TILEPRO_INSN_AVGH
, true, "lll" },
2841 { "__insn_bitx", TILEPRO_INSN_BITX
, true, "ll" },
2842 { "__insn_bytex", TILEPRO_INSN_BYTEX
, true, "ll" },
2843 { "__insn_clz", TILEPRO_INSN_CLZ
, true, "ll" },
2844 { "__insn_crc32_32", TILEPRO_INSN_CRC32_32
, true, "lll" },
2845 { "__insn_crc32_8", TILEPRO_INSN_CRC32_8
, true, "lll" },
2846 { "__insn_ctz", TILEPRO_INSN_CTZ
, true, "ll" },
2847 { "__insn_drain", TILEPRO_INSN_DRAIN
, false, "v" },
2848 { "__insn_dtlbpr", TILEPRO_INSN_DTLBPR
, false, "vl" },
2849 { "__insn_dword_align", TILEPRO_INSN_DWORD_ALIGN
, true, "lllk" },
2850 { "__insn_finv", TILEPRO_INSN_FINV
, false, "vk" },
2851 { "__insn_flush", TILEPRO_INSN_FLUSH
, false, "vk" },
2852 { "__insn_fnop", TILEPRO_INSN_FNOP
, false, "v" },
2853 { "__insn_icoh", TILEPRO_INSN_ICOH
, false, "vk" },
2854 { "__insn_ill", TILEPRO_INSN_ILL
, false, "v" },
2855 { "__insn_info", TILEPRO_INSN_INFO
, false, "vl" },
2856 { "__insn_infol", TILEPRO_INSN_INFOL
, false, "vl" },
2857 { "__insn_inthb", TILEPRO_INSN_INTHB
, true, "lll" },
2858 { "__insn_inthh", TILEPRO_INSN_INTHH
, true, "lll" },
2859 { "__insn_intlb", TILEPRO_INSN_INTLB
, true, "lll" },
2860 { "__insn_intlh", TILEPRO_INSN_INTLH
, true, "lll" },
2861 { "__insn_inv", TILEPRO_INSN_INV
, false, "vp" },
2862 { "__insn_lb", TILEPRO_INSN_LB
, false, "lk" },
2863 { "__insn_lb_u", TILEPRO_INSN_LB_U
, false, "lk" },
2864 { "__insn_lh", TILEPRO_INSN_LH
, false, "lk" },
2865 { "__insn_lh_u", TILEPRO_INSN_LH_U
, false, "lk" },
2866 { "__insn_lnk", TILEPRO_INSN_LNK
, true, "l" },
2867 { "__insn_lw", TILEPRO_INSN_LW
, false, "lk" },
2868 { "__insn_lw_na", TILEPRO_INSN_LW_NA
, false, "lk" },
2869 { "__insn_lb_L2", TILEPRO_INSN_LB_L2
, false, "lk" },
2870 { "__insn_lb_u_L2", TILEPRO_INSN_LB_U_L2
, false, "lk" },
2871 { "__insn_lh_L2", TILEPRO_INSN_LH_L2
, false, "lk" },
2872 { "__insn_lh_u_L2", TILEPRO_INSN_LH_U_L2
, false, "lk" },
2873 { "__insn_lw_L2", TILEPRO_INSN_LW_L2
, false, "lk" },
2874 { "__insn_lw_na_L2", TILEPRO_INSN_LW_NA_L2
, false, "lk" },
2875 { "__insn_lb_miss", TILEPRO_INSN_LB_MISS
, false, "lk" },
2876 { "__insn_lb_u_miss", TILEPRO_INSN_LB_U_MISS
, false, "lk" },
2877 { "__insn_lh_miss", TILEPRO_INSN_LH_MISS
, false, "lk" },
2878 { "__insn_lh_u_miss", TILEPRO_INSN_LH_U_MISS
, false, "lk" },
2879 { "__insn_lw_miss", TILEPRO_INSN_LW_MISS
, false, "lk" },
2880 { "__insn_lw_na_miss", TILEPRO_INSN_LW_NA_MISS
, false, "lk" },
2881 { "__insn_maxb_u", TILEPRO_INSN_MAXB_U
, true, "lll" },
2882 { "__insn_maxh", TILEPRO_INSN_MAXH
, true, "lll" },
2883 { "__insn_maxib_u", TILEPRO_INSN_MAXIB_U
, true, "lll" },
2884 { "__insn_maxih", TILEPRO_INSN_MAXIH
, true, "lll" },
2885 { "__insn_mf", TILEPRO_INSN_MF
, false, "v" },
2886 { "__insn_mfspr", TILEPRO_INSN_MFSPR
, false, "ll" },
2887 { "__insn_minb_u", TILEPRO_INSN_MINB_U
, true, "lll" },
2888 { "__insn_minh", TILEPRO_INSN_MINH
, true, "lll" },
2889 { "__insn_minib_u", TILEPRO_INSN_MINIB_U
, true, "lll" },
2890 { "__insn_minih", TILEPRO_INSN_MINIH
, true, "lll" },
2891 { "__insn_mm", TILEPRO_INSN_MM
, true, "lllll" },
2892 { "__insn_mnz", TILEPRO_INSN_MNZ
, true, "lll" },
2893 { "__insn_mnzb", TILEPRO_INSN_MNZB
, true, "lll" },
2894 { "__insn_mnzh", TILEPRO_INSN_MNZH
, true, "lll" },
2895 { "__insn_move", TILEPRO_INSN_MOVE
, true, "ll" },
2896 { "__insn_movei", TILEPRO_INSN_MOVE
, true, "ll" },
2897 { "__insn_moveli", TILEPRO_INSN_MOVE
, true, "ll" },
2898 { "__insn_movelis", TILEPRO_INSN_MOVELIS
, false, "ll" },
2899 { "__insn_mtspr", TILEPRO_INSN_MTSPR
, false, "vll" },
2900 { "__insn_mulhh_ss", TILEPRO_INSN_MULHH_SS
, true, "lll" },
2901 { "__insn_mulhh_su", TILEPRO_INSN_MULHH_SU
, true, "lll" },
2902 { "__insn_mulhh_uu", TILEPRO_INSN_MULHH_UU
, true, "lll" },
2903 { "__insn_mulhha_ss", TILEPRO_INSN_MULHHA_SS
, true, "llll" },
2904 { "__insn_mulhha_su", TILEPRO_INSN_MULHHA_SU
, true, "llll" },
2905 { "__insn_mulhha_uu", TILEPRO_INSN_MULHHA_UU
, true, "llll" },
2906 { "__insn_mulhhsa_uu", TILEPRO_INSN_MULHHSA_UU
, true, "llll" },
2907 { "__insn_mulhl_ss", TILEPRO_INSN_MULHL_SS
, true, "lll" },
2908 { "__insn_mulhl_su", TILEPRO_INSN_MULHL_SU
, true, "lll" },
2909 { "__insn_mulhl_us", TILEPRO_INSN_MULHL_US
, true, "lll" },
2910 { "__insn_mulhl_uu", TILEPRO_INSN_MULHL_UU
, true, "lll" },
2911 { "__insn_mulhla_ss", TILEPRO_INSN_MULHLA_SS
, true, "llll" },
2912 { "__insn_mulhla_su", TILEPRO_INSN_MULHLA_SU
, true, "llll" },
2913 { "__insn_mulhla_us", TILEPRO_INSN_MULHLA_US
, true, "llll" },
2914 { "__insn_mulhla_uu", TILEPRO_INSN_MULHLA_UU
, true, "llll" },
2915 { "__insn_mulhlsa_uu", TILEPRO_INSN_MULHLSA_UU
, true, "llll" },
2916 { "__insn_mulll_ss", TILEPRO_INSN_MULLL_SS
, true, "lll" },
2917 { "__insn_mulll_su", TILEPRO_INSN_MULLL_SU
, true, "lll" },
2918 { "__insn_mulll_uu", TILEPRO_INSN_MULLL_UU
, true, "lll" },
2919 { "__insn_mullla_ss", TILEPRO_INSN_MULLLA_SS
, true, "llll" },
2920 { "__insn_mullla_su", TILEPRO_INSN_MULLLA_SU
, true, "llll" },
2921 { "__insn_mullla_uu", TILEPRO_INSN_MULLLA_UU
, true, "llll" },
2922 { "__insn_mulllsa_uu", TILEPRO_INSN_MULLLSA_UU
, true, "llll" },
2923 { "__insn_mvnz", TILEPRO_INSN_MVNZ
, true, "llll" },
2924 { "__insn_mvz", TILEPRO_INSN_MVZ
, true, "llll" },
2925 { "__insn_mz", TILEPRO_INSN_MZ
, true, "lll" },
2926 { "__insn_mzb", TILEPRO_INSN_MZB
, true, "lll" },
2927 { "__insn_mzh", TILEPRO_INSN_MZH
, true, "lll" },
2928 { "__insn_nap", TILEPRO_INSN_NAP
, false, "v" },
2929 { "__insn_nop", TILEPRO_INSN_NOP
, true, "v" },
2930 { "__insn_nor", TILEPRO_INSN_NOR
, true, "lll" },
2931 { "__insn_or", TILEPRO_INSN_OR
, true, "lll" },
2932 { "__insn_ori", TILEPRO_INSN_OR
, true, "lll" },
2933 { "__insn_packbs_u", TILEPRO_INSN_PACKBS_U
, false, "lll" },
2934 { "__insn_packhb", TILEPRO_INSN_PACKHB
, true, "lll" },
2935 { "__insn_packhs", TILEPRO_INSN_PACKHS
, false, "lll" },
2936 { "__insn_packlb", TILEPRO_INSN_PACKLB
, true, "lll" },
2937 { "__insn_pcnt", TILEPRO_INSN_PCNT
, true, "ll" },
2938 { "__insn_prefetch", TILEPRO_INSN_PREFETCH
, false, "vk" },
2939 { "__insn_prefetch_L1", TILEPRO_INSN_PREFETCH_L1
, false, "vk" },
2940 { "__insn_rl", TILEPRO_INSN_RL
, true, "lll" },
2941 { "__insn_rli", TILEPRO_INSN_RL
, true, "lll" },
2942 { "__insn_s1a", TILEPRO_INSN_S1A
, true, "lll" },
2943 { "__insn_s2a", TILEPRO_INSN_S2A
, true, "lll" },
2944 { "__insn_s3a", TILEPRO_INSN_S3A
, true, "lll" },
2945 { "__insn_sadab_u", TILEPRO_INSN_SADAB_U
, true, "llll" },
2946 { "__insn_sadah", TILEPRO_INSN_SADAH
, true, "llll" },
2947 { "__insn_sadah_u", TILEPRO_INSN_SADAH_U
, true, "llll" },
2948 { "__insn_sadb_u", TILEPRO_INSN_SADB_U
, true, "lll" },
2949 { "__insn_sadh", TILEPRO_INSN_SADH
, true, "lll" },
2950 { "__insn_sadh_u", TILEPRO_INSN_SADH_U
, true, "lll" },
2951 { "__insn_sb", TILEPRO_INSN_SB
, false, "vpl" },
2952 { "__insn_seq", TILEPRO_INSN_SEQ
, true, "lll" },
2953 { "__insn_seqb", TILEPRO_INSN_SEQB
, true, "lll" },
2954 { "__insn_seqh", TILEPRO_INSN_SEQH
, true, "lll" },
2955 { "__insn_seqi", TILEPRO_INSN_SEQ
, true, "lll" },
2956 { "__insn_seqib", TILEPRO_INSN_SEQIB
, true, "lll" },
2957 { "__insn_seqih", TILEPRO_INSN_SEQIH
, true, "lll" },
2958 { "__insn_sh", TILEPRO_INSN_SH
, false, "vpl" },
2959 { "__insn_shl", TILEPRO_INSN_SHL
, true, "lll" },
2960 { "__insn_shlb", TILEPRO_INSN_SHLB
, true, "lll" },
2961 { "__insn_shlh", TILEPRO_INSN_SHLH
, true, "lll" },
2962 { "__insn_shli", TILEPRO_INSN_SHL
, true, "lll" },
2963 { "__insn_shlib", TILEPRO_INSN_SHLIB
, true, "lll" },
2964 { "__insn_shlih", TILEPRO_INSN_SHLIH
, true, "lll" },
2965 { "__insn_shr", TILEPRO_INSN_SHR
, true, "lll" },
2966 { "__insn_shrb", TILEPRO_INSN_SHRB
, true, "lll" },
2967 { "__insn_shrh", TILEPRO_INSN_SHRH
, true, "lll" },
2968 { "__insn_shri", TILEPRO_INSN_SHR
, true, "lll" },
2969 { "__insn_shrib", TILEPRO_INSN_SHRIB
, true, "lll" },
2970 { "__insn_shrih", TILEPRO_INSN_SHRIH
, true, "lll" },
2971 { "__insn_slt", TILEPRO_INSN_SLT
, true, "lll" },
2972 { "__insn_slt_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2973 { "__insn_sltb", TILEPRO_INSN_SLTB
, true, "lll" },
2974 { "__insn_sltb_u", TILEPRO_INSN_SLTB_U
, true, "lll" },
2975 { "__insn_slte", TILEPRO_INSN_SLTE
, true, "lll" },
2976 { "__insn_slte_u", TILEPRO_INSN_SLTE_U
, true, "lll" },
2977 { "__insn_slteb", TILEPRO_INSN_SLTEB
, true, "lll" },
2978 { "__insn_slteb_u", TILEPRO_INSN_SLTEB_U
, true, "lll" },
2979 { "__insn_slteh", TILEPRO_INSN_SLTEH
, true, "lll" },
2980 { "__insn_slteh_u", TILEPRO_INSN_SLTEH_U
, true, "lll" },
2981 { "__insn_slth", TILEPRO_INSN_SLTH
, true, "lll" },
2982 { "__insn_slth_u", TILEPRO_INSN_SLTH_U
, true, "lll" },
2983 { "__insn_slti", TILEPRO_INSN_SLT
, true, "lll" },
2984 { "__insn_slti_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2985 { "__insn_sltib", TILEPRO_INSN_SLTIB
, true, "lll" },
2986 { "__insn_sltib_u", TILEPRO_INSN_SLTIB_U
, true, "lll" },
2987 { "__insn_sltih", TILEPRO_INSN_SLTIH
, true, "lll" },
2988 { "__insn_sltih_u", TILEPRO_INSN_SLTIH_U
, true, "lll" },
2989 { "__insn_sne", TILEPRO_INSN_SNE
, true, "lll" },
2990 { "__insn_sneb", TILEPRO_INSN_SNEB
, true, "lll" },
2991 { "__insn_sneh", TILEPRO_INSN_SNEH
, true, "lll" },
2992 { "__insn_sra", TILEPRO_INSN_SRA
, true, "lll" },
2993 { "__insn_srab", TILEPRO_INSN_SRAB
, true, "lll" },
2994 { "__insn_srah", TILEPRO_INSN_SRAH
, true, "lll" },
2995 { "__insn_srai", TILEPRO_INSN_SRA
, true, "lll" },
2996 { "__insn_sraib", TILEPRO_INSN_SRAIB
, true, "lll" },
2997 { "__insn_sraih", TILEPRO_INSN_SRAIH
, true, "lll" },
2998 { "__insn_sub", TILEPRO_INSN_SUB
, true, "lll" },
2999 { "__insn_subb", TILEPRO_INSN_SUBB
, true, "lll" },
3000 { "__insn_subbs_u", TILEPRO_INSN_SUBBS_U
, false, "lll" },
3001 { "__insn_subh", TILEPRO_INSN_SUBH
, true, "lll" },
3002 { "__insn_subhs", TILEPRO_INSN_SUBHS
, false, "lll" },
3003 { "__insn_subs", TILEPRO_INSN_SUBS
, false, "lll" },
3004 { "__insn_sw", TILEPRO_INSN_SW
, false, "vpl" },
3005 { "__insn_tblidxb0", TILEPRO_INSN_TBLIDXB0
, true, "lll" },
3006 { "__insn_tblidxb1", TILEPRO_INSN_TBLIDXB1
, true, "lll" },
3007 { "__insn_tblidxb2", TILEPRO_INSN_TBLIDXB2
, true, "lll" },
3008 { "__insn_tblidxb3", TILEPRO_INSN_TBLIDXB3
, true, "lll" },
3009 { "__insn_tns", TILEPRO_INSN_TNS
, false, "lp" },
3010 { "__insn_wh64", TILEPRO_INSN_WH64
, false, "vp" },
3011 { "__insn_xor", TILEPRO_INSN_XOR
, true, "lll" },
3012 { "__insn_xori", TILEPRO_INSN_XOR
, true, "lll" },
3013 { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER
, false, "v" },
3014 { "__tile_idn0_receive", TILEPRO_IDN0_RECEIVE
, false, "l" },
3015 { "__tile_idn1_receive", TILEPRO_IDN1_RECEIVE
, false, "l" },
3016 { "__tile_idn_send", TILEPRO_IDN_SEND
, false, "vl" },
3017 { "__tile_sn_receive", TILEPRO_SN_RECEIVE
, false, "l" },
3018 { "__tile_sn_send", TILEPRO_SN_SEND
, false, "vl" },
3019 { "__tile_udn0_receive", TILEPRO_UDN0_RECEIVE
, false, "l" },
3020 { "__tile_udn1_receive", TILEPRO_UDN1_RECEIVE
, false, "l" },
3021 { "__tile_udn2_receive", TILEPRO_UDN2_RECEIVE
, false, "l" },
3022 { "__tile_udn3_receive", TILEPRO_UDN3_RECEIVE
, false, "l" },
3023 { "__tile_udn_send", TILEPRO_UDN_SEND
, false, "vl" },
3027 /* Convert a character in a builtin type string to a tree type. */
3029 char_to_type (char c
)
3031 static tree volatile_ptr_type_node
= NULL
;
3032 static tree volatile_const_ptr_type_node
= NULL
;
3034 if (volatile_ptr_type_node
== NULL
)
3036 volatile_ptr_type_node
=
3037 build_pointer_type (build_qualified_type (void_type_node
,
3038 TYPE_QUAL_VOLATILE
));
3039 volatile_const_ptr_type_node
=
3040 build_pointer_type (build_qualified_type (void_type_node
,
3042 | TYPE_QUAL_VOLATILE
));
3048 return void_type_node
;
3050 return long_unsigned_type_node
;
3052 return volatile_ptr_type_node
;
3054 return volatile_const_ptr_type_node
;
3061 /* Implement TARGET_INIT_BUILTINS. */
3063 tilepro_init_builtins (void)
3067 for (i
= 0; i
< ARRAY_SIZE (tilepro_builtins
); i
++)
3069 const struct tilepro_builtin_def
*p
= &tilepro_builtins
[i
];
3070 tree ftype
, ret_type
, arg_type_list
= void_list_node
;
3074 for (j
= strlen (p
->type
) - 1; j
> 0; j
--)
3077 tree_cons (NULL_TREE
, char_to_type (p
->type
[j
]), arg_type_list
);
3080 ret_type
= char_to_type (p
->type
[0]);
3082 ftype
= build_function_type (ret_type
, arg_type_list
);
3084 decl
= add_builtin_function (p
->name
, ftype
, p
->code
, BUILT_IN_MD
,
3088 TREE_READONLY (decl
) = 1;
3089 TREE_NOTHROW (decl
) = 1;
3091 if (tilepro_builtin_info
[p
->code
].fndecl
== NULL
)
3092 tilepro_builtin_info
[p
->code
].fndecl
= decl
;
3097 /* Implement TARGET_EXPAND_BUILTIN. */
3099 tilepro_expand_builtin (tree exp
,
3101 rtx subtarget ATTRIBUTE_UNUSED
,
3102 enum machine_mode mode ATTRIBUTE_UNUSED
,
3103 int ignore ATTRIBUTE_UNUSED
)
3105 #define MAX_BUILTIN_ARGS 4
3107 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3108 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3110 call_expr_arg_iterator iter
;
3111 enum insn_code icode
;
3112 rtx op
[MAX_BUILTIN_ARGS
+ 1], pat
;
3117 if (fcode
>= TILEPRO_BUILTIN_max
)
3118 internal_error ("bad builtin fcode");
3119 icode
= tilepro_builtin_info
[fcode
].icode
;
3121 internal_error ("bad builtin icode");
3123 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
3126 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3128 const struct insn_operand_data
*insn_op
;
3130 if (arg
== error_mark_node
)
3132 if (opnum
> MAX_BUILTIN_ARGS
)
3135 insn_op
= &insn_data
[icode
].operand
[opnum
];
3137 op
[opnum
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
3139 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3140 op
[opnum
] = copy_to_mode_reg (insn_op
->mode
, op
[opnum
]);
3142 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3144 /* We still failed to meet the predicate even after moving
3145 into a register. Assume we needed an immediate. */
3146 error_at (EXPR_LOCATION (exp
),
3147 "operand must be an immediate of the right size");
3156 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3158 || GET_MODE (target
) != tmode
3159 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3160 target
= gen_reg_rtx (tmode
);
3164 fn
= GEN_FCN (icode
);
3168 pat
= fn (NULL_RTX
);
3174 pat
= fn (op
[0], op
[1]);
3177 pat
= fn (op
[0], op
[1], op
[2]);
3180 pat
= fn (op
[0], op
[1], op
[2], op
[3]);
3183 pat
= fn (op
[0], op
[1], op
[2], op
[3], op
[4]);
3191 /* If we are generating a prefetch, tell the scheduler not to move
3193 if (GET_CODE (pat
) == PREFETCH
)
3194 PREFETCH_SCHEDULE_BARRIER_P (pat
) = true;
3205 /* Implement TARGET_BUILTIN_DECL. */
3207 tilepro_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3209 if (code
>= TILEPRO_BUILTIN_max
)
3210 return error_mark_node
;
3212 return tilepro_builtin_info
[code
].fndecl
;
3219 /* Return whether REGNO needs to be saved in the stack frame. */
3221 need_to_save_reg (unsigned int regno
)
3223 if (!fixed_regs
[regno
] && !call_used_regs
[regno
]
3224 && df_regs_ever_live_p (regno
))
3228 && (regno
== PIC_OFFSET_TABLE_REGNUM
3229 || regno
== TILEPRO_PIC_TEXT_LABEL_REGNUM
)
3230 && (crtl
->uses_pic_offset_table
|| crtl
->saves_all_registers
))
3233 if (crtl
->calls_eh_return
)
3236 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
3238 if (regno
== EH_RETURN_DATA_REGNO (i
))
3247 /* Return the size of the register savev area. This function is only
3248 correct starting with local register allocation */
3250 tilepro_saved_regs_size (void)
3252 int reg_save_size
= 0;
3254 int offset_to_frame
;
3257 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
3258 if (need_to_save_reg (regno
))
3259 reg_save_size
+= UNITS_PER_WORD
;
3261 /* Pad out the register save area if necessary to make
3262 frame_pointer_rtx be as aligned as the stack pointer. */
3263 offset_to_frame
= crtl
->args
.pretend_args_size
+ reg_save_size
;
3264 align_mask
= (STACK_BOUNDARY
/ BITS_PER_UNIT
) - 1;
3265 reg_save_size
+= (-offset_to_frame
) & align_mask
;
3267 return reg_save_size
;
3271 /* Round up frame size SIZE. */
3273 round_frame_size (int size
)
3275 return ((size
+ STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
3276 & -STACK_BOUNDARY
/ BITS_PER_UNIT
);
3280 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3281 emit the corresponding REG_CFA_OFFSET note described by CFA and
3282 CFA_OFFSET. Return the emitted insn. */
3284 frame_emit_store (int regno
, int regno_note
, rtx addr
, rtx cfa
,
3287 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3288 rtx mem
= gen_frame_mem (Pmode
, addr
);
3289 rtx mov
= gen_movsi (mem
, reg
);
3291 /* Describe what just happened in a way that dwarf understands. We
3292 use temporary registers to hold the address to make scheduling
3293 easier, and use the REG_CFA_OFFSET to describe the address as an
3294 offset from the CFA. */
3295 rtx reg_note
= gen_rtx_REG (Pmode
, regno_note
);
3296 rtx cfa_relative_addr
= gen_rtx_PLUS (Pmode
, cfa
, gen_int_si (cfa_offset
));
3297 rtx cfa_relative_mem
= gen_frame_mem (Pmode
, cfa_relative_addr
);
3298 rtx real
= gen_rtx_SET (VOIDmode
, cfa_relative_mem
, reg_note
);
3299 add_reg_note (mov
, REG_CFA_OFFSET
, real
);
3301 return emit_insn (mov
);
3305 /* Emit a load in the stack frame to load REGNO from address ADDR.
3306 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3307 non-null. Return the emitted insn. */
3309 frame_emit_load (int regno
, rtx addr
, rtx
*cfa_restores
)
3311 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3312 rtx mem
= gen_frame_mem (Pmode
, addr
);
3314 *cfa_restores
= alloc_reg_note (REG_CFA_RESTORE
, reg
, *cfa_restores
);
3315 return emit_insn (gen_movsi (reg
, mem
));
3319 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3320 including sequences. */
3322 set_frame_related_p (void)
3324 rtx_insn
*seq
= get_insns ();
3335 while (insn
!= NULL_RTX
)
3337 RTX_FRAME_RELATED_P (insn
) = 1;
3338 insn
= NEXT_INSN (insn
);
3340 seq
= emit_insn (seq
);
3344 seq
= emit_insn (seq
);
3345 RTX_FRAME_RELATED_P (seq
) = 1;
3351 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3353 /* This emits code for 'sp += offset'.
3355 The ABI only allows us to modify 'sp' in a single 'addi' or
3356 'addli', so the backtracer understands it. Larger amounts cannot
3357 use those instructions, so are added by placing the offset into a
3358 large register and using 'add'.
3360 This happens after reload, so we need to expand it ourselves. */
3362 emit_sp_adjust (int offset
, int *next_scratch_regno
, bool frame_related
,
3366 rtx imm_rtx
= gen_int_si (offset
);
3369 if (satisfies_constraint_J (imm_rtx
))
3371 /* We can add this using a single addi or addli. */
3376 rtx tmp
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3377 tilepro_expand_set_const32 (tmp
, imm_rtx
);
3381 /* Actually adjust the stack pointer. */
3382 insn
= emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3384 REG_NOTES (insn
) = reg_notes
;
3386 /* Describe what just happened in a way that dwarf understands. */
3389 rtx real
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
3390 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3392 RTX_FRAME_RELATED_P (insn
) = 1;
3393 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, real
);
3400 /* Return whether the current function is leaf. This takes into
3401 account whether the function calls tls_get_addr. */
3403 tilepro_current_function_is_leaf (void)
3405 return crtl
->is_leaf
&& !cfun
->machine
->calls_tls_get_addr
;
3409 /* Return the frame size. */
3411 compute_total_frame_size (void)
3413 int total_size
= (get_frame_size () + tilepro_saved_regs_size ()
3414 + crtl
->outgoing_args_size
3415 + crtl
->args
.pretend_args_size
);
3417 if (!tilepro_current_function_is_leaf () || cfun
->calls_alloca
)
3419 /* Make room for save area in callee. */
3420 total_size
+= STACK_POINTER_OFFSET
;
3423 return round_frame_size (total_size
);
3427 /* Return nonzero if this function is known to have a null epilogue.
3428 This allows the optimizer to omit jumps to jumps if no stack was
3431 tilepro_can_use_return_insn_p (void)
3433 return (reload_completed
3434 && cfun
->static_chain_decl
== 0
3435 && compute_total_frame_size () == 0
3436 && tilepro_current_function_is_leaf ()
3437 && !crtl
->profile
&& !df_regs_ever_live_p (TILEPRO_LINK_REGNUM
));
3441 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3442 is a frame pointer, it computes the value relative to
3443 that. Otherwise it uses the stack pointer. */
3445 compute_frame_addr (int offset_from_fp
, int *next_scratch_regno
)
3447 rtx base_reg_rtx
, tmp_reg_rtx
, offset_rtx
;
3448 int offset_from_base
;
3450 if (frame_pointer_needed
)
3452 base_reg_rtx
= hard_frame_pointer_rtx
;
3453 offset_from_base
= offset_from_fp
;
3457 int offset_from_sp
= compute_total_frame_size () + offset_from_fp
;
3458 base_reg_rtx
= stack_pointer_rtx
;
3459 offset_from_base
= offset_from_sp
;
3462 if (offset_from_base
== 0)
3463 return base_reg_rtx
;
3465 /* Compute the new value of the stack pointer. */
3466 tmp_reg_rtx
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3467 offset_rtx
= gen_int_si (offset_from_base
);
3469 if (!tilepro_expand_addsi (tmp_reg_rtx
, base_reg_rtx
, offset_rtx
))
3471 emit_insn (gen_rtx_SET (VOIDmode
, tmp_reg_rtx
,
3472 gen_rtx_PLUS (Pmode
, base_reg_rtx
,
3480 /* The stack frame looks like this:
3485 AP -> +-------------+
3489 HFP -> +-------------+
3491 | reg save | crtl->args.pretend_args_size bytes
3494 | saved regs | tilepro_saved_regs_size() bytes
3495 FP -> +-------------+
3497 | vars | get_frame_size() bytes
3501 | stack args | crtl->outgoing_args_size bytes
3503 | HFP | 4 bytes (only here if nonleaf / alloca)
3505 | callee lr | 4 bytes (only here if nonleaf / alloca)
3507 SP -> +-------------+
3511 For functions with a frame larger than 32767 bytes, or which use
3512 alloca (), r52 is used as a frame pointer. Otherwise there is no
3515 FP is saved at SP+4 before calling a subroutine so the
3516 callee can chain. */
3518 tilepro_expand_prologue (void)
3520 #define ROUND_ROBIN_SIZE 4
3521 /* We round-robin through four scratch registers to hold temporary
3522 addresses for saving registers, to make instruction scheduling
3524 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3525 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3528 unsigned int which_scratch
;
3529 int offset
, start_offset
, regno
;
3531 /* A register that holds a copy of the incoming fp. */
3532 int fp_copy_regno
= -1;
3534 /* A register that holds a copy of the incoming sp. */
3535 int sp_copy_regno
= -1;
3537 /* Next scratch register number to hand out (postdecrementing). */
3538 int next_scratch_regno
= 29;
3540 int total_size
= compute_total_frame_size ();
3542 if (flag_stack_usage_info
)
3543 current_function_static_stack_size
= total_size
;
3545 /* Save lr first in its special location because code after this
3546 might use the link register as a scratch register. */
3547 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
) || crtl
->calls_eh_return
)
3548 FRP (frame_emit_store (TILEPRO_LINK_REGNUM
, TILEPRO_LINK_REGNUM
,
3549 stack_pointer_rtx
, stack_pointer_rtx
, 0));
3551 if (total_size
== 0)
3553 /* Load the PIC register if needed. */
3554 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3555 load_pic_register (false);
3560 cfa
= stack_pointer_rtx
;
3562 if (frame_pointer_needed
)
3564 fp_copy_regno
= next_scratch_regno
--;
3566 /* Copy the old frame pointer aside so we can save it later. */
3567 insn
= FRP (emit_move_insn (gen_rtx_REG (word_mode
, fp_copy_regno
),
3568 hard_frame_pointer_rtx
));
3569 add_reg_note (insn
, REG_CFA_REGISTER
, NULL_RTX
);
3571 /* Set up the frame pointer. */
3572 insn
= FRP (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
3573 add_reg_note (insn
, REG_CFA_DEF_CFA
, hard_frame_pointer_rtx
);
3574 cfa
= hard_frame_pointer_rtx
;
3575 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
3577 /* fp holds a copy of the incoming sp, in case we need to store
3579 sp_copy_regno
= HARD_FRAME_POINTER_REGNUM
;
3581 else if (!tilepro_current_function_is_leaf ())
3583 /* Copy the old stack pointer aside so we can save it later. */
3584 sp_copy_regno
= next_scratch_regno
--;
3585 emit_move_insn (gen_rtx_REG (Pmode
, sp_copy_regno
),
3589 if (tilepro_current_function_is_leaf ())
3591 /* No need to store chain pointer to caller's frame. */
3592 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3593 !frame_pointer_needed
, NULL_RTX
);
3597 /* Save the frame pointer (incoming sp value) to support
3598 backtracing. First we need to create an rtx with the store
3600 rtx chain_addr
= gen_rtx_REG (Pmode
, next_scratch_regno
--);
3601 rtx size_rtx
= gen_int_si (-(total_size
- UNITS_PER_WORD
));
3603 if (add_operand (size_rtx
, Pmode
))
3605 /* Expose more parallelism by computing this value from the
3606 original stack pointer, not the one after we have pushed
3608 rtx p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, size_rtx
);
3609 emit_insn (gen_rtx_SET (VOIDmode
, chain_addr
, p
));
3610 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3611 !frame_pointer_needed
, NULL_RTX
);
3615 /* The stack frame is large, so just store the incoming sp
3616 value at *(new_sp + UNITS_PER_WORD). */
3618 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3619 !frame_pointer_needed
, NULL_RTX
);
3620 p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3621 GEN_INT (UNITS_PER_WORD
));
3622 emit_insn (gen_rtx_SET (VOIDmode
, chain_addr
, p
));
3625 /* Save our frame pointer for backtrace chaining. */
3626 emit_insn (gen_movsi (gen_frame_mem (SImode
, chain_addr
),
3627 gen_rtx_REG (SImode
, sp_copy_regno
)));
3630 /* Compute where to start storing registers we need to save. */
3631 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3632 offset
= start_offset
;
3634 /* Store all registers that need saving. */
3636 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3637 if (need_to_save_reg (regno
))
3639 rtx r
= reg_save_addr
[which_scratch
];
3641 int cfa_offset
= frame_pointer_needed
? offset
: total_size
+ offset
;
3645 rtx p
= compute_frame_addr (offset
, &next_scratch_regno
);
3646 r
= gen_rtx_REG (word_mode
, next_scratch_regno
--);
3647 reg_save_addr
[which_scratch
] = r
;
3649 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3653 /* Advance to the next stack slot to store this register. */
3654 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3655 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3656 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3659 /* Save this register to the stack (but use the old fp value
3660 we copied aside if appropriate). */
3661 from_regno
= (fp_copy_regno
>= 0
3663 HARD_FRAME_POINTER_REGNUM
) ? fp_copy_regno
: regno
;
3664 FRP (frame_emit_store (from_regno
, regno
, r
, cfa
, cfa_offset
));
3666 offset
-= UNITS_PER_WORD
;
3667 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3670 /* If profiling, force that to happen after the frame is set up. */
3672 emit_insn (gen_blockage ());
3674 /* Load the PIC register if needed. */
3675 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3676 load_pic_register (false);
3680 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
3681 true for a sibcall_epilogue pattern, and false for an epilogue
3684 tilepro_expand_epilogue (bool sibcall_p
)
3686 /* We round-robin through four scratch registers to hold temporary
3687 addresses for saving registers, to make instruction scheduling
3689 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3690 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3692 rtx_insn
*last_insn
, *insn
;
3693 unsigned int which_scratch
;
3694 int offset
, start_offset
, regno
;
3695 rtx cfa_restores
= NULL_RTX
;
3697 /* A register that holds a copy of the incoming fp. */
3698 int fp_copy_regno
= -1;
3700 /* Next scratch register number to hand out (postdecrementing). */
3701 int next_scratch_regno
= 29;
3703 int total_size
= compute_total_frame_size ();
3705 last_insn
= get_last_insn ();
3707 /* Load lr first since we are going to need it first. */
3709 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
))
3711 insn
= frame_emit_load (TILEPRO_LINK_REGNUM
,
3712 compute_frame_addr (0, &next_scratch_regno
),
3716 if (total_size
== 0)
3720 RTX_FRAME_RELATED_P (insn
) = 1;
3721 REG_NOTES (insn
) = cfa_restores
;
3726 /* Compute where to start restoring registers. */
3727 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3728 offset
= start_offset
;
3730 if (frame_pointer_needed
)
3731 fp_copy_regno
= next_scratch_regno
--;
3733 /* Restore all callee-saved registers. */
3735 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3736 if (need_to_save_reg (regno
))
3738 rtx r
= reg_save_addr
[which_scratch
];
3741 r
= compute_frame_addr (offset
, &next_scratch_regno
);
3742 reg_save_addr
[which_scratch
] = r
;
3746 /* Advance to the next stack slot to store this
3748 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3749 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3750 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3753 if (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
3754 frame_emit_load (fp_copy_regno
, r
, NULL
);
3756 frame_emit_load (regno
, r
, &cfa_restores
);
3758 offset
-= UNITS_PER_WORD
;
3759 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3762 if (!tilepro_current_function_is_leaf ())
3764 alloc_reg_note (REG_CFA_RESTORE
, stack_pointer_rtx
, cfa_restores
);
3766 emit_insn (gen_blockage ());
3768 if (frame_pointer_needed
)
3770 /* Restore the old stack pointer by copying from the frame
3772 insn
= emit_insn (gen_sp_restore (stack_pointer_rtx
,
3773 hard_frame_pointer_rtx
));
3774 RTX_FRAME_RELATED_P (insn
) = 1;
3775 REG_NOTES (insn
) = cfa_restores
;
3776 add_reg_note (insn
, REG_CFA_DEF_CFA
, stack_pointer_rtx
);
3780 insn
= emit_sp_adjust (total_size
, &next_scratch_regno
, true,
3784 if (crtl
->calls_eh_return
)
3785 emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3786 EH_RETURN_STACKADJ_RTX
));
3788 /* Restore the old frame pointer. */
3789 if (frame_pointer_needed
)
3791 insn
= emit_move_insn (hard_frame_pointer_rtx
,
3792 gen_rtx_REG (Pmode
, fp_copy_regno
));
3793 add_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
);
3796 /* Mark the pic registers as live outside of the function. */
3799 emit_use (cfun
->machine
->text_label_rtx
);
3800 emit_use (cfun
->machine
->got_rtx
);
3806 /* Emit the actual 'return' instruction. */
3807 emit_jump_insn (gen__return ());
3811 emit_use (gen_rtx_REG (Pmode
, TILEPRO_LINK_REGNUM
));
3814 /* Mark all insns we just emitted as frame-related. */
3815 for (; last_insn
!= NULL_RTX
; last_insn
= next_insn (last_insn
))
3816 RTX_FRAME_RELATED_P (last_insn
) = 1;
3819 #undef ROUND_ROBIN_SIZE
3822 /* Implement INITIAL_ELIMINATION_OFFSET. */
3824 tilepro_initial_elimination_offset (int from
, int to
)
3826 int total_size
= compute_total_frame_size ();
3828 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3830 return (total_size
- crtl
->args
.pretend_args_size
3831 - tilepro_saved_regs_size ());
3833 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3835 return -(crtl
->args
.pretend_args_size
+ tilepro_saved_regs_size ());
3837 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3839 return STACK_POINTER_OFFSET
+ total_size
;
3841 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3843 return STACK_POINTER_OFFSET
;
3850 /* Return an RTX indicating where the return address to the
3851 calling function can be found. */
3853 tilepro_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
3858 return get_hard_reg_initial_val (Pmode
, TILEPRO_LINK_REGNUM
);
3862 /* Implement EH_RETURN_HANDLER_RTX. */
3864 tilepro_eh_return_handler_rtx (void)
3866 /* The MEM needs to be volatile to prevent it from being
3868 rtx tmp
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
3869 MEM_VOLATILE_P (tmp
) = true;
3877 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
3879 tilepro_conditional_register_usage (void)
3881 global_regs
[TILEPRO_NETORDER_REGNUM
] = 1;
3882 /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
3883 member of fixed_regs, and therefore must be member of
3884 call_used_regs, but it is not a member of call_really_used_regs[]
3885 because it is not clobbered by a call. */
3886 if (TILEPRO_PIC_TEXT_LABEL_REGNUM
!= INVALID_REGNUM
)
3888 fixed_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3889 call_used_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3891 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3893 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3894 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3899 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
3901 tilepro_frame_pointer_required (void)
3903 return crtl
->calls_eh_return
|| cfun
->calls_alloca
;
3908 /* Scheduling and reorg */
3910 /* Return the length of INSN. LENGTH is the initial length computed
3911 by attributes in the machine-description file. This is where we
3912 account for bundles. */
3914 tilepro_adjust_insn_length (rtx_insn
*insn
, int length
)
3916 enum machine_mode mode
= GET_MODE (insn
);
3918 /* A non-termininating instruction in a bundle has length 0. */
3922 /* By default, there is not length adjustment. */
3927 /* Implement TARGET_SCHED_ISSUE_RATE. */
3929 tilepro_issue_rate (void)
3935 /* Return the rtx for the jump target. */
3937 get_jump_target (rtx branch
)
3939 if (CALL_P (branch
))
3942 call
= PATTERN (branch
);
3944 if (GET_CODE (call
) == PARALLEL
)
3945 call
= XVECEXP (call
, 0, 0);
3947 if (GET_CODE (call
) == SET
)
3948 call
= SET_SRC (call
);
3950 if (GET_CODE (call
) == CALL
)
3951 return XEXP (XEXP (call
, 0), 0);
3956 /* Implement TARGET_SCHED_ADJUST_COST. */
3958 tilepro_sched_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3960 /* If we have a true dependence, INSN is a call, and DEP_INSN
3961 defines a register that is needed by the call (argument or stack
3962 pointer), set its latency to 0 so that it can be bundled with
3963 the call. Explicitly check for and exclude the case when
3964 DEP_INSN defines the target of the jump. */
3965 if (CALL_P (insn
) && REG_NOTE_KIND (link
) == REG_DEP_TRUE
)
3967 rtx target
= get_jump_target (insn
);
3968 if (!REG_P (target
) || !set_of (target
, dep_insn
))
3976 /* Skip over irrelevant NOTEs and such and look for the next insn we
3977 would consider bundling. */
3979 next_insn_to_bundle (rtx_insn
*r
, rtx_insn
*end
)
3981 for (; r
!= end
; r
= NEXT_INSN (r
))
3983 if (NONDEBUG_INSN_P (r
)
3984 && GET_CODE (PATTERN (r
)) != USE
3985 && GET_CODE (PATTERN (r
)) != CLOBBER
)
3993 /* Go through all insns, and use the information generated during
3994 scheduling to generate SEQUENCEs to represent bundles of
3995 instructions issued simultaneously. */
3997 tilepro_gen_bundles (void)
4000 FOR_EACH_BB_FN (bb
, cfun
)
4002 rtx_insn
*insn
, *next
;
4003 rtx_insn
*end
= NEXT_INSN (BB_END (bb
));
4005 for (insn
= next_insn_to_bundle (BB_HEAD (bb
), end
); insn
; insn
= next
)
4007 next
= next_insn_to_bundle (NEXT_INSN (insn
), end
);
4009 /* Never wrap {} around inline asm. */
4010 if (GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
4012 if (next
== NULL_RTX
|| GET_MODE (next
) == TImode
4013 /* NOTE: The scheduler incorrectly believes a call
4014 insn can execute in the same cycle as the insn
4015 after the call. This is of course impossible.
4016 Really we need to fix the scheduler somehow, so
4017 the code after the call gets scheduled
4021 /* Mark current insn as the end of a bundle. */
4022 PUT_MODE (insn
, QImode
);
4026 /* Mark it as part of a bundle. */
4027 PUT_MODE (insn
, SImode
);
4035 /* Helper function for tilepro_fixup_pcrel_references. */
4037 replace_pc_relative_symbol_ref (rtx_insn
*insn
, rtx opnds
[4], bool first_insn_p
)
4039 rtx_insn
*new_insns
;
4047 emit_insn (gen_add_got16 (opnds
[0], tilepro_got_rtx (),
4049 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4056 emit_insn (gen_addhi_got32 (opnds
[0], tilepro_got_rtx (),
4061 emit_insn (gen_addlo_got32 (opnds
[0], opnds
[1], opnds
[2]));
4062 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4066 new_insns
= get_insns ();
4070 emit_insn_before (new_insns
, insn
);
4076 /* Returns whether INSN is a pc-relative addli insn. */
4078 match_addli_pcrel (rtx_insn
*insn
)
4080 rtx pattern
= PATTERN (insn
);
4083 if (GET_CODE (pattern
) != SET
)
4086 if (GET_CODE (SET_SRC (pattern
)) != LO_SUM
)
4089 if (GET_CODE (XEXP (SET_SRC (pattern
), 1)) != CONST
)
4092 unspec
= XEXP (XEXP (SET_SRC (pattern
), 1), 0);
4094 return (GET_CODE (unspec
) == UNSPEC
4095 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4099 /* Helper function for tilepro_fixup_pcrel_references. */
4101 replace_addli_pcrel (rtx_insn
*insn
)
4103 rtx pattern
= PATTERN (insn
);
4109 gcc_assert (GET_CODE (pattern
) == SET
);
4110 opnds
[0] = SET_DEST (pattern
);
4112 set_src
= SET_SRC (pattern
);
4113 gcc_assert (GET_CODE (set_src
) == LO_SUM
);
4114 gcc_assert (GET_CODE (XEXP (set_src
, 1)) == CONST
);
4115 opnds
[1] = XEXP (set_src
, 0);
4117 unspec
= XEXP (XEXP (set_src
, 1), 0);
4118 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4119 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4120 opnds
[2] = XVECEXP (unspec
, 0, 0);
4121 opnds
[3] = XVECEXP (unspec
, 0, 1);
4123 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4124 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4127 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4129 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4133 /* Returns whether INSN is a pc-relative auli insn. */
4135 match_auli_pcrel (rtx_insn
*insn
)
4137 rtx pattern
= PATTERN (insn
);
4141 if (GET_CODE (pattern
) != SET
)
4144 if (GET_CODE (SET_SRC (pattern
)) != PLUS
)
4147 high
= XEXP (SET_SRC (pattern
), 1);
4149 if (GET_CODE (high
) != HIGH
4150 || GET_CODE (XEXP (high
, 0)) != CONST
)
4153 unspec
= XEXP (XEXP (high
, 0), 0);
4155 return (GET_CODE (unspec
) == UNSPEC
4156 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4160 /* Helper function for tilepro_fixup_pcrel_references. */
4162 replace_auli_pcrel (rtx_insn
*insn
)
4164 rtx pattern
= PATTERN (insn
);
4171 gcc_assert (GET_CODE (pattern
) == SET
);
4172 opnds
[0] = SET_DEST (pattern
);
4174 set_src
= SET_SRC (pattern
);
4175 gcc_assert (GET_CODE (set_src
) == PLUS
);
4176 opnds
[1] = XEXP (set_src
, 0);
4178 high
= XEXP (set_src
, 1);
4179 gcc_assert (GET_CODE (high
) == HIGH
);
4180 gcc_assert (GET_CODE (XEXP (high
, 0)) == CONST
);
4182 unspec
= XEXP (XEXP (high
, 0), 0);
4183 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4184 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4185 opnds
[2] = XVECEXP (unspec
, 0, 0);
4186 opnds
[3] = XVECEXP (unspec
, 0, 1);
4188 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4189 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4192 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4194 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4198 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4199 going through the GOT when the symbol is local to the compilation
4200 unit. But such a symbol requires that the common text_label that
4201 we generate at the beginning of the function be in the same section
4202 as the reference to the SYMBOL_REF. This may not be true if we
4203 generate hot/cold sections. This function looks for such cases and
4204 replaces such references with the longer sequence going through the
4207 We expect one of the following two instruction sequences:
4208 addli tmp1, txt_label_reg, lo16(sym - txt_label)
4209 auli tmp2, tmp1, ha16(sym - txt_label)
4211 auli tmp1, txt_label_reg, ha16(sym - txt_label)
4212 addli tmp2, tmp1, lo16(sym - txt_label)
4214 If we're compiling -fpic, we replace the first instruction with
4215 nothing, and the second instruction with:
4217 addli tmp2, got_rtx, got(sym)
4220 If we're compiling -fPIC, we replace the first instruction with:
4222 auli tmp1, got_rtx, got_ha16(sym)
4224 and the second instruction with:
4226 addli tmp2, tmp1, got_lo16(sym)
4229 Note that we're careful to disturb the instruction sequence as
4230 little as possible, since it's very late in the compilation
4234 tilepro_fixup_pcrel_references (void)
4236 rtx_insn
*insn
, *next_insn
;
4237 bool same_section_as_entry
= true;
4239 for (insn
= get_insns (); insn
; insn
= next_insn
)
4241 next_insn
= NEXT_INSN (insn
);
4243 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
4245 same_section_as_entry
= !same_section_as_entry
;
4249 if (same_section_as_entry
)
4253 && GET_CODE (PATTERN (insn
)) != USE
4254 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
4257 if (match_addli_pcrel (insn
))
4258 replace_addli_pcrel (insn
);
4259 else if (match_auli_pcrel (insn
))
4260 replace_auli_pcrel (insn
);
4265 /* Ensure that no var tracking notes are emitted in the middle of a
4266 three-instruction bundle. */
4268 reorder_var_tracking_notes (void)
4271 FOR_EACH_BB_FN (bb
, cfun
)
4273 rtx_insn
*insn
, *next
;
4274 rtx_insn
*queue
= NULL
;
4275 bool in_bundle
= false;
4277 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4279 next
= NEXT_INSN (insn
);
4283 /* Emit queued up notes at the last instruction of a bundle. */
4284 if (GET_MODE (insn
) == QImode
)
4288 rtx_insn
*next_queue
= PREV_INSN (queue
);
4289 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4290 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4291 SET_NEXT_INSN (insn
) = queue
;
4292 SET_PREV_INSN (queue
) = insn
;
4297 else if (GET_MODE (insn
) == SImode
)
4300 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4304 rtx_insn
*prev
= PREV_INSN (insn
);
4305 SET_PREV_INSN (next
) = prev
;
4306 SET_NEXT_INSN (prev
) = next
;
4308 SET_PREV_INSN (insn
) = queue
;
4317 /* Perform machine dependent operations on the rtl chain INSNS. */
4319 tilepro_reorg (void)
4321 /* We are freeing block_for_insn in the toplev to keep compatibility
4322 with old MDEP_REORGS that are not CFG based. Recompute it
4324 compute_bb_for_insn ();
4326 if (flag_reorder_blocks_and_partition
)
4328 tilepro_fixup_pcrel_references ();
4331 if (flag_schedule_insns_after_reload
)
4335 timevar_push (TV_SCHED2
);
4337 timevar_pop (TV_SCHED2
);
4339 /* Examine the schedule to group into bundles. */
4340 tilepro_gen_bundles ();
4345 if (flag_var_tracking
)
4347 timevar_push (TV_VAR_TRACKING
);
4348 variable_tracking_main ();
4349 reorder_var_tracking_notes ();
4350 timevar_pop (TV_VAR_TRACKING
);
4353 df_finish_pass (false);
4360 /* Select a format to encode pointers in exception handling data.
4361 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4362 GLOBAL is true if the symbol may be affected by dynamic
4365 tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED
, int global
)
4367 return (global
? DW_EH_PE_indirect
: 0) | DW_EH_PE_pcrel
| DW_EH_PE_sdata4
;
4371 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4373 tilepro_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4374 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4377 rtx this_rtx
, funexp
;
4380 /* Pretend to be a post-reload pass while generating rtl. */
4381 reload_completed
= 1;
4383 /* Mark the end of the (empty) prologue. */
4384 emit_note (NOTE_INSN_PROLOGUE_END
);
4386 /* Find the "this" pointer. If the function returns a structure,
4387 the structure return pointer is in $1. */
4388 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4389 this_rtx
= gen_rtx_REG (Pmode
, 1);
4391 this_rtx
= gen_rtx_REG (Pmode
, 0);
4393 /* Add DELTA to THIS_RTX. */
4394 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, GEN_INT (delta
)));
4396 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4401 tmp
= gen_rtx_REG (Pmode
, 29);
4402 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4404 emit_insn (gen_addsi3 (tmp
, tmp
, GEN_INT (vcall_offset
)));
4406 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4408 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, tmp
));
4411 /* Generate a tail call to the target function. */
4412 if (!TREE_USED (function
))
4414 assemble_external (function
);
4415 TREE_USED (function
) = 1;
4417 funexp
= XEXP (DECL_RTL (function
), 0);
4418 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
4419 insn
= emit_call_insn (gen_sibcall (funexp
, const0_rtx
));
4420 SIBLING_CALL_P (insn
) = 1;
4422 /* Run just enough of rest_of_compilation to get the insns emitted.
4423 There's not really enough bulk here to make other passes such as
4424 instruction scheduling worth while. Note that use_thunk calls
4425 assemble_start_function and assemble_end_function.
4427 We don't currently bundle, but the instruciton sequence is all
4428 serial except for the tail call, so we're only wasting one cycle.
4430 insn
= get_insns ();
4431 shorten_branches (insn
);
4432 final_start_function (insn
, file
, 1);
4433 final (insn
, file
, 1);
4434 final_end_function ();
4436 /* Stop pretending to be a post-reload pass. */
4437 reload_completed
= 0;
4441 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4443 tilepro_asm_trampoline_template (FILE *file
)
4445 fprintf (file
, "\tlnk r10\n");
4446 fprintf (file
, "\taddi r10, r10, 32\n");
4447 fprintf (file
, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode
));
4448 fprintf (file
, "\tlw r10, r10\n");
4449 fprintf (file
, "\tjr r11\n");
4450 fprintf (file
, "\t.word 0 # <function address>\n");
4451 fprintf (file
, "\t.word 0 # <static chain value>\n");
4455 /* Implement TARGET_TRAMPOLINE_INIT. */
4457 tilepro_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4461 rtx begin_addr
, end_addr
;
4462 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
4464 fnaddr
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
4465 chaddr
= copy_to_reg (static_chain
);
4467 emit_block_move (m_tramp
, assemble_trampoline_template (),
4468 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
4470 mem
= adjust_address (m_tramp
, ptr_mode
,
4471 TRAMPOLINE_SIZE
- 2 * ptr_mode_size
);
4472 emit_move_insn (mem
, fnaddr
);
4473 mem
= adjust_address (m_tramp
, ptr_mode
,
4474 TRAMPOLINE_SIZE
- ptr_mode_size
);
4475 emit_move_insn (mem
, chaddr
);
4477 /* Get pointers to the beginning and end of the code block. */
4478 begin_addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
4479 end_addr
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0),
4482 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__clear_cache"),
4483 LCT_NORMAL
, VOIDmode
, 2, begin_addr
, Pmode
,
4488 /* Implement TARGET_PRINT_OPERAND. */
4490 tilepro_print_operand (FILE *file
, rtx x
, int code
)
4495 /* Print the compare operator opcode for conditional moves. */
4496 switch (GET_CODE (x
))
4505 output_operand_lossage ("invalid %%c operand");
4510 /* Print the compare operator opcode for conditional moves. */
4511 switch (GET_CODE (x
))
4520 output_operand_lossage ("invalid %%C operand");
4526 /* Print the high 16 bits of a 32-bit constant. */
4528 if (CONST_INT_P (x
))
4530 else if (GET_CODE (x
) == CONST_DOUBLE
)
4531 i
= CONST_DOUBLE_LOW (x
);
4534 output_operand_lossage ("invalid %%h operand");
4537 i
= trunc_int_for_mode (i
>> 16, HImode
);
4538 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4545 const char *opstr
= NULL
;
4547 if (GET_CODE (x
) == CONST
4548 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4550 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4551 switch (XINT (XEXP (x
, 0), 1))
4553 case UNSPEC_GOT32_SYM
:
4556 case UNSPEC_PCREL_SYM
:
4561 opstr
= "tls_gd_ha16";
4564 opstr
= "tls_ie_ha16";
4567 opstr
= "tls_le_ha16";
4570 output_operand_lossage ("invalid %%H operand");
4579 fputs (opstr
, file
);
4581 output_addr_const (file
, addr
);
4585 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4586 fputs (" - " , file
);
4587 output_addr_const (file
, addr2
);
4595 /* Print an auto-inc memory operand. */
4598 output_operand_lossage ("invalid %%I operand");
4602 output_memory_reference_mode
= GET_MODE (x
);
4603 output_memory_autoinc_first
= true;
4604 output_address (XEXP (x
, 0));
4605 output_memory_reference_mode
= VOIDmode
;
4609 /* Print an auto-inc memory operand. */
4612 output_operand_lossage ("invalid %%i operand");
4616 output_memory_reference_mode
= GET_MODE (x
);
4617 output_memory_autoinc_first
= false;
4618 output_address (XEXP (x
, 0));
4619 output_memory_reference_mode
= VOIDmode
;
4624 /* Print the low 8 bits of a constant. */
4626 if (CONST_INT_P (x
))
4628 else if (GET_CODE (x
) == CONST_DOUBLE
)
4629 i
= CONST_DOUBLE_LOW (x
);
4630 else if (GET_CODE (x
) == CONST_VECTOR
4631 && CONST_INT_P (CONST_VECTOR_ELT (x
, 0)))
4632 i
= INTVAL (CONST_VECTOR_ELT (x
, 0));
4635 output_operand_lossage ("invalid %%j operand");
4638 i
= trunc_int_for_mode (i
, QImode
);
4639 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4646 const char *opstr
= NULL
;
4648 if (GET_CODE (x
) == CONST
4649 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4651 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4652 switch (XINT (XEXP (x
, 0), 1))
4654 case UNSPEC_GOT16_SYM
:
4657 case UNSPEC_GOT32_SYM
:
4660 case UNSPEC_PCREL_SYM
:
4665 opstr
= "tls_gd_lo16";
4668 opstr
= "tls_ie_lo16";
4671 opstr
= "tls_le_lo16";
4674 output_operand_lossage ("invalid %%L operand");
4683 fputs (opstr
, file
);
4685 output_addr_const (file
, addr
);
4689 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4690 fputs (" - " , file
);
4691 output_addr_const (file
, addr2
);
4699 if (GET_CODE (x
) == SYMBOL_REF
)
4701 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4702 fprintf (file
, "plt(");
4703 output_addr_const (file
, x
);
4704 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4705 fprintf (file
, ")");
4708 output_addr_const (file
, x
);
4713 /* Print a 32-bit constant plus one. */
4715 if (!CONST_INT_P (x
))
4717 output_operand_lossage ("invalid %%P operand");
4720 i
= trunc_int_for_mode (INTVAL (x
) + 1, SImode
);
4721 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4727 /* Print an mm-style bit range. */
4728 int first_bit
, last_bit
;
4730 if (!CONST_INT_P (x
)
4731 || !tilepro_bitfield_operand_p (INTVAL (x
), &first_bit
,
4734 output_operand_lossage ("invalid %%M operand");
4738 fprintf (file
, "%d, %d", first_bit
, last_bit
);
4744 const char *reg
= NULL
;
4746 /* Print a network register. */
4747 if (!CONST_INT_P (x
))
4749 output_operand_lossage ("invalid %%N operand");
4755 case TILEPRO_NETREG_IDN0
: reg
= "idn0"; break;
4756 case TILEPRO_NETREG_IDN1
: reg
= "idn1"; break;
4757 case TILEPRO_NETREG_SN
: reg
= "sn"; break;
4758 case TILEPRO_NETREG_UDN0
: reg
= "udn0"; break;
4759 case TILEPRO_NETREG_UDN1
: reg
= "udn1"; break;
4760 case TILEPRO_NETREG_UDN2
: reg
= "udn2"; break;
4761 case TILEPRO_NETREG_UDN3
: reg
= "udn3"; break;
4762 default: gcc_unreachable ();
4765 fprintf (file
, reg
);
4771 /* Log base 2 of a power of two. */
4775 if (!CONST_INT_P (x
))
4777 output_operand_lossage ("invalid %%t operand");
4780 n
= trunc_int_for_mode (INTVAL (x
), SImode
);
4784 output_operand_lossage ("invalid %%t operand '"
4785 HOST_WIDE_INT_PRINT_DEC
"'", n
);
4789 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4795 /* In this case we need a register. Use 'zero' if the
4796 operand is const0_rtx. */
4798 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
4800 fputs ("zero", file
);
4803 else if (!REG_P (x
))
4805 output_operand_lossage ("invalid %%r operand");
4813 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
4818 output_memory_reference_mode
= VOIDmode
;
4819 output_address (XEXP (x
, 0));
4824 output_addr_const (file
, x
);
4831 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4836 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
4838 tilepro_print_operand_address (FILE *file
, rtx addr
)
4840 if (GET_CODE (addr
) == POST_DEC
4841 || GET_CODE (addr
) == POST_INC
)
4843 int offset
= GET_MODE_SIZE (output_memory_reference_mode
);
4845 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4847 if (output_memory_autoinc_first
)
4848 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4850 fprintf (file
, "%d",
4851 GET_CODE (addr
) == POST_DEC
? -offset
: offset
);
4853 else if (GET_CODE (addr
) == POST_MODIFY
)
4855 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4857 gcc_assert (GET_CODE (XEXP (addr
, 1)) == PLUS
);
4859 if (output_memory_autoinc_first
)
4860 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4862 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4863 INTVAL (XEXP (XEXP (addr
, 1), 1)));
4866 tilepro_print_operand (file
, addr
, 'r');
4870 /* Machine mode of current insn, for determining curly brace
4872 static enum machine_mode insn_mode
;
4875 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
4877 tilepro_final_prescan_insn (rtx_insn
*insn
)
4879 /* Record this for tilepro_asm_output_opcode to examine. */
4880 insn_mode
= GET_MODE (insn
);
4884 /* While emitting asm, are we currently inside '{' for a bundle? */
4885 static bool tilepro_in_bundle
= false;
4887 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
4888 appropriate given the bundling information recorded by
4889 tilepro_gen_bundles. */
4891 tilepro_asm_output_opcode (FILE *stream
, const char *code
)
4893 bool pseudo
= !strcmp (code
, "pseudo");
4895 if (!tilepro_in_bundle
&& insn_mode
== SImode
)
4897 /* Start a new bundle. */
4898 fprintf (stream
, "{\n\t");
4899 tilepro_in_bundle
= true;
4902 if (tilepro_in_bundle
&& insn_mode
== QImode
)
4904 /* Close an existing bundle. */
4905 static char buf
[100];
4907 gcc_assert (strlen (code
) + 3 + 1 < sizeof (buf
));
4909 strcpy (buf
, pseudo
? "" : code
);
4910 strcat (buf
, "\n\t}");
4911 tilepro_in_bundle
= false;
4917 return pseudo
? "" : code
;
4922 /* Output assembler code to FILE to increment profiler label # LABELNO
4923 for profiling a function entry. */
4925 tilepro_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
4927 if (tilepro_in_bundle
)
4929 fprintf (file
, "\t}\n");
4938 "\t}\n", MCOUNT_NAME
);
4946 "\t}\n", MCOUNT_NAME
);
4949 tilepro_in_bundle
= false;
4953 /* Implement TARGET_ASM_FILE_END. */
4955 tilepro_file_end (void)
4957 if (NEED_INDICATE_EXEC_STACK
)
4958 file_end_indicate_exec_stack ();
4962 #undef TARGET_HAVE_TLS
4963 #define TARGET_HAVE_TLS HAVE_AS_TLS
4965 #undef TARGET_OPTION_OVERRIDE
4966 #define TARGET_OPTION_OVERRIDE tilepro_option_override
4968 #undef TARGET_SCALAR_MODE_SUPPORTED_P
4969 #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4971 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4972 #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4974 #undef TARGET_CANNOT_FORCE_CONST_MEM
4975 #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4977 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4978 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4980 #undef TARGET_PASS_BY_REFERENCE
4981 #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4983 #undef TARGET_RETURN_IN_MEMORY
4984 #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
4986 #undef TARGET_FUNCTION_ARG_BOUNDARY
4987 #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
4989 #undef TARGET_FUNCTION_ARG
4990 #define TARGET_FUNCTION_ARG tilepro_function_arg
4992 #undef TARGET_FUNCTION_ARG_ADVANCE
4993 #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
4995 #undef TARGET_FUNCTION_VALUE
4996 #define TARGET_FUNCTION_VALUE tilepro_function_value
4998 #undef TARGET_LIBCALL_VALUE
4999 #define TARGET_LIBCALL_VALUE tilepro_libcall_value
5001 #undef TARGET_FUNCTION_VALUE_REGNO_P
5002 #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
5004 #undef TARGET_PROMOTE_FUNCTION_MODE
5005 #define TARGET_PROMOTE_FUNCTION_MODE \
5006 default_promote_function_mode_always_promote
5008 #undef TARGET_PROMOTE_PROTOTYPES
5009 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5011 #undef TARGET_BUILD_BUILTIN_VA_LIST
5012 #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
5014 #undef TARGET_EXPAND_BUILTIN_VA_START
5015 #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
5017 #undef TARGET_SETUP_INCOMING_VARARGS
5018 #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
5020 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5021 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
5023 #undef TARGET_RTX_COSTS
5024 #define TARGET_RTX_COSTS tilepro_rtx_costs
5026 /* Limit to what we can reach in one addli. */
5027 #undef TARGET_MIN_ANCHOR_OFFSET
5028 #define TARGET_MIN_ANCHOR_OFFSET -32768
5029 #undef TARGET_MAX_ANCHOR_OFFSET
5030 #define TARGET_MAX_ANCHOR_OFFSET 32767
5032 #undef TARGET_LEGITIMATE_CONSTANT_P
5033 #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5035 #undef TARGET_LEGITIMATE_ADDRESS_P
5036 #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5038 #undef TARGET_LEGITIMIZE_ADDRESS
5039 #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5041 #undef TARGET_DELEGITIMIZE_ADDRESS
5042 #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5044 #undef TARGET_INIT_BUILTINS
5045 #define TARGET_INIT_BUILTINS tilepro_init_builtins
5047 #undef TARGET_BUILTIN_DECL
5048 #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5050 #undef TARGET_EXPAND_BUILTIN
5051 #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5053 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5054 #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5056 #undef TARGET_FRAME_POINTER_REQUIRED
5057 #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5059 #undef TARGET_DELAY_SCHED2
5060 #define TARGET_DELAY_SCHED2 true
5062 #undef TARGET_DELAY_VARTRACK
5063 #define TARGET_DELAY_VARTRACK true
5065 #undef TARGET_SCHED_ISSUE_RATE
5066 #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5068 #undef TARGET_SCHED_ADJUST_COST
5069 #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5071 #undef TARGET_MACHINE_DEPENDENT_REORG
5072 #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5074 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5075 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5076 hook_bool_const_tree_hwi_hwi_const_tree_true
5078 #undef TARGET_ASM_OUTPUT_MI_THUNK
5079 #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5081 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5082 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5084 #undef TARGET_TRAMPOLINE_INIT
5085 #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5087 #undef TARGET_PRINT_OPERAND
5088 #define TARGET_PRINT_OPERAND tilepro_print_operand
5090 #undef TARGET_PRINT_OPERAND_ADDRESS
5091 #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5093 #undef TARGET_ASM_FILE_END
5094 #define TARGET_ASM_FILE_END tilepro_file_end
5096 #undef TARGET_CAN_USE_DOLOOP_P
5097 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5099 struct gcc_target targetm
= TARGET_INITIALIZER
;
5101 #include "gt-tilepro.h"