1 /* Default target hook functions.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* The migration of target macros to target hooks works as follows:
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
25 2. Convert all the MI files to use the hook instead of the macro.
27 3. Repeat for a majority of the remaining target macros. This will
30 4. Tell target maintainers to start migrating.
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
35 6. TBD when, poison the macros. Unmigrated targets will break at
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
51 #include "coretypes.h"
56 #include "tree-ssa-alias.h"
57 #include "gimple-expr.h"
60 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "profile-count.h"
67 #include "diagnostic-core.h"
68 #include "fold-const.h"
69 #include "stor-layout.h"
76 #include "common/common-target.h"
84 #include "langhooks.h"
88 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
89 rtx addr ATTRIBUTE_UNUSED
,
90 bool strict ATTRIBUTE_UNUSED
)
92 #ifdef GO_IF_LEGITIMATE_ADDRESS
93 /* Defer to the old implementation using a goto. */
95 return strict_memory_address_p (mode
, addr
);
97 return memory_address_p (mode
, addr
);
104 default_external_libcall (rtx fun ATTRIBUTE_UNUSED
)
106 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
107 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file
, fun
);
112 default_unspec_may_trap_p (const_rtx x
, unsigned flags
)
116 /* Any floating arithmetic may trap. */
117 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x
)) && flag_trapping_math
))
120 for (i
= 0; i
< XVECLEN (x
, 0); ++i
)
122 if (may_trap_p_1 (XVECEXP (x
, 0, i
), flags
))
130 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED
,
132 int *punsignedp ATTRIBUTE_UNUSED
,
133 const_tree funtype ATTRIBUTE_UNUSED
,
134 int for_return ATTRIBUTE_UNUSED
)
136 if (type
!= NULL_TREE
&& for_return
== 2)
137 return promote_mode (type
, mode
, punsignedp
);
142 default_promote_function_mode_always_promote (const_tree type
,
145 const_tree funtype ATTRIBUTE_UNUSED
,
146 int for_return ATTRIBUTE_UNUSED
)
148 return promote_mode (type
, mode
, punsignedp
);
152 default_cc_modes_compatible (machine_mode m1
, machine_mode m2
)
160 default_return_in_memory (const_tree type
,
161 const_tree fntype ATTRIBUTE_UNUSED
)
163 return (TYPE_MODE (type
) == BLKmode
);
167 default_legitimize_address (rtx x
, rtx orig_x ATTRIBUTE_UNUSED
,
168 machine_mode mode ATTRIBUTE_UNUSED
)
174 default_legitimize_address_displacement (rtx
*, rtx
*, poly_int64
,
181 default_const_not_ok_for_debug_p (rtx x
)
183 if (GET_CODE (x
) == UNSPEC
)
189 default_expand_builtin_saveregs (void)
191 error ("%<__builtin_saveregs%> not supported by this target");
196 default_setup_incoming_varargs (cumulative_args_t
,
197 const function_arg_info
&, int *, int)
201 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
204 default_builtin_setjmp_frame_value (void)
206 return virtual_stack_vars_rtx
;
209 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
212 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED
)
218 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED
)
220 return (targetm
.calls
.setup_incoming_varargs
221 != default_setup_incoming_varargs
);
225 default_eh_return_filter_mode (void)
227 return targetm
.unwind_word_mode ();
231 default_libgcc_cmp_return_mode (void)
237 default_libgcc_shift_count_mode (void)
243 default_unwind_word_mode (void)
248 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
250 unsigned HOST_WIDE_INT
251 default_shift_truncation_mask (machine_mode mode
)
253 return SHIFT_COUNT_TRUNCATED
? GET_MODE_UNIT_BITSIZE (mode
) - 1 : 0;
256 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
259 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED
)
261 return have_insn_for (DIV
, mode
) ? 3 : 2;
264 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
267 default_mode_rep_extended (scalar_int_mode
, scalar_int_mode
)
272 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
275 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED
)
280 /* Return machine mode for non-standard suffix
281 or VOIDmode if non-standard suffixes are unsupported. */
283 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED
)
288 /* The generic C++ ABI specifies this is a 64-bit value. */
290 default_cxx_guard_type (void)
292 return long_long_integer_type_node
;
295 /* Returns the size of the cookie to use when allocating an array
296 whose elements have the indicated TYPE. Assumes that it is already
297 known that a cookie is needed. */
300 default_cxx_get_cookie_size (tree type
)
304 /* We need to allocate an additional max (sizeof (size_t), alignof
305 (true_type)) bytes. */
309 sizetype_size
= size_in_bytes (sizetype
);
310 type_align
= size_int (TYPE_ALIGN_UNIT (type
));
311 if (tree_int_cst_lt (type_align
, sizetype_size
))
312 cookie_size
= sizetype_size
;
314 cookie_size
= type_align
;
319 /* Return true if a parameter must be passed by reference. This version
320 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
323 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t
,
324 const function_arg_info
&arg
)
326 return targetm
.calls
.must_pass_in_stack (arg
.mode
, arg
.type
);
329 /* Return true if a parameter follows callee copies conventions. This
330 version of the hook is true for all named arguments. */
333 hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED
,
334 machine_mode mode ATTRIBUTE_UNUSED
,
335 const_tree type ATTRIBUTE_UNUSED
, bool named
)
340 /* Emit to STREAM the assembler syntax for insn operand X. */
343 default_print_operand (FILE *stream ATTRIBUTE_UNUSED
, rtx x ATTRIBUTE_UNUSED
,
344 int code ATTRIBUTE_UNUSED
)
347 PRINT_OPERAND (stream
, x
, code
);
353 /* Emit to STREAM the assembler syntax for an insn operand whose memory
357 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED
,
358 machine_mode
/*mode*/,
359 rtx x ATTRIBUTE_UNUSED
)
361 #ifdef PRINT_OPERAND_ADDRESS
362 PRINT_OPERAND_ADDRESS (stream
, x
);
368 /* Return true if CODE is a valid punctuation character for the
369 `print_operand' hook. */
372 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED
)
374 #ifdef PRINT_OPERAND_PUNCT_VALID_P
375 return PRINT_OPERAND_PUNCT_VALID_P (code
);
381 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
383 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED
)
385 const char *skipped
= name
+ (*name
== '*' ? 1 : 0);
386 const char *stripped
= targetm
.strip_name_encoding (skipped
);
387 if (*name
!= '*' && user_label_prefix
[0])
388 stripped
= ACONCAT ((user_label_prefix
, stripped
, NULL
));
389 return get_identifier (stripped
);
392 /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
395 default_translate_mode_attribute (machine_mode mode
)
400 /* True if MODE is valid for the target. By "valid", we mean able to
401 be manipulated in non-trivial ways. In particular, this means all
402 the arithmetic is supported.
404 By default we guess this means that any C type is supported. If
405 we can't map the mode back to a type that would be available in C,
406 then reject it. Special case, here, is the double-word arithmetic
407 supported by optabs.c. */
410 default_scalar_mode_supported_p (scalar_mode mode
)
412 int precision
= GET_MODE_PRECISION (mode
);
414 switch (GET_MODE_CLASS (mode
))
416 case MODE_PARTIAL_INT
:
418 if (precision
== CHAR_TYPE_SIZE
)
420 if (precision
== SHORT_TYPE_SIZE
)
422 if (precision
== INT_TYPE_SIZE
)
424 if (precision
== LONG_TYPE_SIZE
)
426 if (precision
== LONG_LONG_TYPE_SIZE
)
428 if (precision
== 2 * BITS_PER_WORD
)
433 if (precision
== FLOAT_TYPE_SIZE
)
435 if (precision
== DOUBLE_TYPE_SIZE
)
437 if (precision
== LONG_DOUBLE_TYPE_SIZE
)
441 case MODE_DECIMAL_FLOAT
:
453 /* Return true if libgcc supports floating-point mode MODE (known to
454 be supported as a scalar mode). */
457 default_libgcc_floating_mode_supported_p (scalar_float_mode mode
)
480 /* Return the machine mode to use for the type _FloatN, if EXTENDED is
481 false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
483 opt_scalar_float_mode
484 default_floatn_mode (int n
, bool extended
)
488 opt_scalar_float_mode cand1
, cand2
;
489 scalar_float_mode mode
;
511 /* Those are the only valid _FloatNx types. */
514 if (cand1
.exists (&mode
)
515 && REAL_MODE_FORMAT (mode
)->ieee_bits
> n
516 && targetm
.scalar_mode_supported_p (mode
)
517 && targetm
.libgcc_floating_mode_supported_p (mode
))
519 if (cand2
.exists (&mode
)
520 && REAL_MODE_FORMAT (mode
)->ieee_bits
> n
521 && targetm
.scalar_mode_supported_p (mode
)
522 && targetm
.libgcc_floating_mode_supported_p (mode
))
527 opt_scalar_float_mode cand
;
528 scalar_float_mode mode
;
532 /* Always enable _Float16 if we have basic support for the mode.
533 Targets can control the range and precision of operations on
534 the _Float16 type using TARGET_C_EXCESS_PRECISION. */
561 if (cand
.exists (&mode
)
562 && REAL_MODE_FORMAT (mode
)->ieee_bits
== n
563 && targetm
.scalar_mode_supported_p (mode
)
564 && targetm
.libgcc_floating_mode_supported_p (mode
))
567 return opt_scalar_float_mode ();
570 /* Define this to return true if the _Floatn and _Floatnx built-in functions
571 should implicitly enable the built-in function without the __builtin_ prefix
572 in addition to the normal built-in function with the __builtin_ prefix. The
573 default is to only enable built-in functions without the __builtin_ prefix
574 for the GNU C langauge. The argument FUNC is the enum builtin_in_function
575 id of the function to be enabled. */
578 default_floatn_builtin_p (int func ATTRIBUTE_UNUSED
)
580 static bool first_time_p
= true;
581 static bool c_or_objective_c
;
585 first_time_p
= false;
586 c_or_objective_c
= lang_GNU_C () || lang_GNU_OBJC ();
589 return c_or_objective_c
;
592 /* Make some target macros useable by target-independent code. */
594 targhook_words_big_endian (void)
596 return !!WORDS_BIG_ENDIAN
;
600 targhook_float_words_big_endian (void)
602 return !!FLOAT_WORDS_BIG_ENDIAN
;
605 /* True if the target supports floating-point exceptions and rounding
609 default_float_exceptions_rounding_supported_p (void)
618 /* True if the target supports decimal floating point. */
621 default_decimal_float_supported_p (void)
623 return ENABLE_DECIMAL_FLOAT
;
626 /* True if the target supports fixed-point arithmetic. */
629 default_fixed_point_supported_p (void)
631 return ENABLE_FIXED_POINT
;
634 /* True if the target supports GNU indirect functions. */
637 default_has_ifunc_p (void)
639 return HAVE_GNU_INDIRECT_FUNCTION
;
642 /* Return true if we predict the loop LOOP will be transformed to a
643 low-overhead loop, otherwise return false.
645 By default, false is returned, as this hook's applicability should be
646 verified for each target. Target maintainers should re-define the hook
647 if the target can take advantage of it. */
650 default_predict_doloop_p (class loop
*loop ATTRIBUTE_UNUSED
)
655 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
658 This function checks whether a given INSN is valid within a low-overhead
659 loop. If INSN is invalid it returns the reason for that, otherwise it
660 returns NULL. A called function may clobber any special registers required
661 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
662 register for branch on table instructions. We reject the doloop pattern in
666 default_invalid_within_doloop (const rtx_insn
*insn
)
669 return "Function call in loop.";
671 if (tablejump_p (insn
, NULL
, NULL
) || computed_jump_p (insn
))
672 return "Computed branch in the loop.";
677 /* Mapping of builtin functions to vectorized variants. */
680 default_builtin_vectorized_function (unsigned int, tree
, tree
)
685 /* Mapping of target builtin functions to vectorized variants. */
688 default_builtin_md_vectorized_function (tree
, tree
, tree
)
693 /* Vectorized conversion. */
696 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED
,
697 tree dest_type ATTRIBUTE_UNUSED
,
698 tree src_type ATTRIBUTE_UNUSED
)
703 /* Default vectorizer cost model values. */
706 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost
,
708 int misalign ATTRIBUTE_UNUSED
)
710 switch (type_of_cost
)
720 case cond_branch_not_taken
:
722 case vec_promote_demote
:
726 case unaligned_store
:
729 case cond_branch_taken
:
733 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype
)) - 1;
743 default_builtin_reciprocal (tree
)
749 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
750 cumulative_args_t ca ATTRIBUTE_UNUSED
,
751 machine_mode mode ATTRIBUTE_UNUSED
,
752 const_tree type ATTRIBUTE_UNUSED
, bool named ATTRIBUTE_UNUSED
)
758 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
759 cumulative_args_t ca ATTRIBUTE_UNUSED
,
760 machine_mode mode ATTRIBUTE_UNUSED
,
761 const_tree type ATTRIBUTE_UNUSED
, bool named ATTRIBUTE_UNUSED
)
767 hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t
,
768 const function_arg_info
&)
774 hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t
,
775 const function_arg_info
&)
781 hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED
,
782 tree ATTRIBUTE_UNUSED
)
787 default_function_arg_advance (cumulative_args_t
, const function_arg_info
&)
792 /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
795 default_function_arg_offset (machine_mode
, const_tree
)
800 /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
801 upward, but pad short args downward on big-endian machines. */
804 default_function_arg_padding (machine_mode mode
, const_tree type
)
806 if (!BYTES_BIG_ENDIAN
)
809 unsigned HOST_WIDE_INT size
;
812 if (!type
|| TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
814 size
= int_size_in_bytes (type
);
817 /* Targets with variable-sized modes must override this hook
818 and handle variable-sized modes explicitly. */
819 size
= GET_MODE_SIZE (mode
).to_constant ();
821 if (size
< (PARM_BOUNDARY
/ BITS_PER_UNIT
))
828 default_function_arg (cumulative_args_t
, const function_arg_info
&)
834 default_function_incoming_arg (cumulative_args_t
, const function_arg_info
&)
840 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED
,
841 const_tree type ATTRIBUTE_UNUSED
)
843 return PARM_BOUNDARY
;
847 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED
,
848 const_tree type ATTRIBUTE_UNUSED
)
850 return PARM_BOUNDARY
;
854 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED
)
859 hook_invalid_arg_for_unprototyped_fn (
860 const_tree typelist ATTRIBUTE_UNUSED
,
861 const_tree funcdecl ATTRIBUTE_UNUSED
,
862 const_tree val ATTRIBUTE_UNUSED
)
867 /* Initialize the stack protection decls. */
869 /* Stack protection related decls living in libgcc. */
870 static GTY(()) tree stack_chk_guard_decl
;
873 default_stack_protect_guard (void)
875 tree t
= stack_chk_guard_decl
;
881 t
= build_decl (UNKNOWN_LOCATION
,
882 VAR_DECL
, get_identifier ("__stack_chk_guard"),
886 DECL_EXTERNAL (t
) = 1;
888 TREE_THIS_VOLATILE (t
) = 1;
889 DECL_ARTIFICIAL (t
) = 1;
890 DECL_IGNORED_P (t
) = 1;
892 /* Do not share RTL as the declaration is visible outside of
895 RTX_FLAG (x
, used
) = 1;
897 stack_chk_guard_decl
= t
;
903 static GTY(()) tree stack_chk_fail_decl
;
906 default_external_stack_protect_fail (void)
908 tree t
= stack_chk_fail_decl
;
912 t
= build_function_type_list (void_type_node
, NULL_TREE
);
913 t
= build_decl (UNKNOWN_LOCATION
,
914 FUNCTION_DECL
, get_identifier ("__stack_chk_fail"), t
);
917 DECL_EXTERNAL (t
) = 1;
919 TREE_THIS_VOLATILE (t
) = 1;
920 TREE_NOTHROW (t
) = 1;
921 DECL_ARTIFICIAL (t
) = 1;
922 DECL_IGNORED_P (t
) = 1;
923 DECL_VISIBILITY (t
) = VISIBILITY_DEFAULT
;
924 DECL_VISIBILITY_SPECIFIED (t
) = 1;
926 stack_chk_fail_decl
= t
;
929 return build_call_expr (t
, 0);
933 default_hidden_stack_protect_fail (void)
935 #ifndef HAVE_GAS_HIDDEN
936 return default_external_stack_protect_fail ();
938 tree t
= stack_chk_fail_decl
;
941 return default_external_stack_protect_fail ();
945 t
= build_function_type_list (void_type_node
, NULL_TREE
);
946 t
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
947 get_identifier ("__stack_chk_fail_local"), t
);
950 DECL_EXTERNAL (t
) = 1;
952 TREE_THIS_VOLATILE (t
) = 1;
953 TREE_NOTHROW (t
) = 1;
954 DECL_ARTIFICIAL (t
) = 1;
955 DECL_IGNORED_P (t
) = 1;
956 DECL_VISIBILITY_SPECIFIED (t
) = 1;
957 DECL_VISIBILITY (t
) = VISIBILITY_HIDDEN
;
959 stack_chk_fail_decl
= t
;
962 return build_call_expr (t
, 0);
967 hook_bool_const_rtx_commutative_p (const_rtx x
,
968 int outer_code ATTRIBUTE_UNUSED
)
970 return COMMUTATIVE_P (x
);
974 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED
,
975 const_tree fn_decl_or_type
,
976 bool outgoing ATTRIBUTE_UNUSED
)
978 /* The old interface doesn't handle receiving the function type. */
980 && !DECL_P (fn_decl_or_type
))
981 fn_decl_or_type
= NULL
;
983 #ifdef FUNCTION_VALUE
984 return FUNCTION_VALUE (ret_type
, fn_decl_or_type
);
991 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED
,
992 const_rtx fun ATTRIBUTE_UNUSED
)
995 return LIBCALL_VALUE (MACRO_MODE (mode
));
1001 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1004 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED
)
1006 #ifdef FUNCTION_VALUE_REGNO_P
1007 return FUNCTION_VALUE_REGNO_P (regno
);
1014 default_internal_arg_pointer (void)
1016 /* If the reg that the virtual arg pointer will be translated into is
1017 not a fixed reg or is the stack pointer, make a copy of the virtual
1018 arg pointer, and address parms via the copy. The frame pointer is
1019 considered fixed even though it is not marked as such. */
1020 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
1021 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
1022 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
1023 return copy_to_reg (virtual_incoming_args_rtx
);
1025 return virtual_incoming_args_rtx
;
1029 default_static_chain (const_tree
ARG_UNUSED (fndecl_or_type
), bool incoming_p
)
1033 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1034 return gen_rtx_REG (Pmode
, STATIC_CHAIN_INCOMING_REGNUM
);
1038 #ifdef STATIC_CHAIN_REGNUM
1039 return gen_rtx_REG (Pmode
, STATIC_CHAIN_REGNUM
);
1043 static bool issued_error
;
1046 issued_error
= true;
1047 sorry ("nested functions not supported on this target");
1050 /* It really doesn't matter what we return here, so long at it
1051 doesn't cause the rest of the compiler to crash. */
1052 return gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
1057 default_trampoline_init (rtx
ARG_UNUSED (m_tramp
), tree
ARG_UNUSED (t_func
),
1058 rtx
ARG_UNUSED (r_chain
))
1060 sorry ("nested function trampolines not supported on this target");
1064 default_return_pops_args (tree
, tree
, poly_int64
)
1070 default_branch_target_register_class (void)
1076 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED
,
1078 reg_class_t best_cl ATTRIBUTE_UNUSED
)
1084 default_lra_p (void)
1090 default_register_priority (int hard_regno ATTRIBUTE_UNUSED
)
1096 default_register_usage_leveling_p (void)
1102 default_different_addr_displacement_p (void)
1108 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED
, rtx x ATTRIBUTE_UNUSED
,
1109 reg_class_t reload_class_i ATTRIBUTE_UNUSED
,
1110 machine_mode reload_mode ATTRIBUTE_UNUSED
,
1111 secondary_reload_info
*sri
)
1113 enum reg_class rclass
= NO_REGS
;
1114 enum reg_class reload_class
= (enum reg_class
) reload_class_i
;
1116 if (sri
->prev_sri
&& sri
->prev_sri
->t_icode
!= CODE_FOR_nothing
)
1118 sri
->icode
= sri
->prev_sri
->t_icode
;
1121 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1123 rclass
= SECONDARY_INPUT_RELOAD_CLASS (reload_class
,
1124 MACRO_MODE (reload_mode
), x
);
1126 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1128 rclass
= SECONDARY_OUTPUT_RELOAD_CLASS (reload_class
,
1129 MACRO_MODE (reload_mode
), x
);
1131 if (rclass
!= NO_REGS
)
1133 enum insn_code icode
1134 = direct_optab_handler (in_p
? reload_in_optab
: reload_out_optab
,
1137 if (icode
!= CODE_FOR_nothing
1138 && !insn_operand_matches (icode
, in_p
, x
))
1139 icode
= CODE_FOR_nothing
;
1140 else if (icode
!= CODE_FOR_nothing
)
1142 const char *insn_constraint
, *scratch_constraint
;
1143 enum reg_class insn_class
, scratch_class
;
1145 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
1146 insn_constraint
= insn_data
[(int) icode
].operand
[!in_p
].constraint
;
1147 if (!*insn_constraint
)
1148 insn_class
= ALL_REGS
;
1153 gcc_assert (*insn_constraint
== '=');
1156 insn_class
= (reg_class_for_constraint
1157 (lookup_constraint (insn_constraint
)));
1158 gcc_assert (insn_class
!= NO_REGS
);
1161 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
1162 /* The scratch register's constraint must start with "=&",
1163 except for an input reload, where only "=" is necessary,
1164 and where it might be beneficial to re-use registers from
1166 gcc_assert (scratch_constraint
[0] == '='
1167 && (in_p
|| scratch_constraint
[1] == '&'));
1168 scratch_constraint
++;
1169 if (*scratch_constraint
== '&')
1170 scratch_constraint
++;
1171 scratch_class
= (reg_class_for_constraint
1172 (lookup_constraint (scratch_constraint
)));
1174 if (reg_class_subset_p (reload_class
, insn_class
))
1176 gcc_assert (scratch_class
== rclass
);
1180 rclass
= insn_class
;
1183 if (rclass
== NO_REGS
)
1186 sri
->t_icode
= icode
;
1191 /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1194 default_secondary_memory_needed_mode (machine_mode mode
)
1196 if (!targetm
.lra_p ()
1197 && known_lt (GET_MODE_BITSIZE (mode
), BITS_PER_WORD
)
1198 && INTEGRAL_MODE_P (mode
))
1199 return mode_for_size (BITS_PER_WORD
, GET_MODE_CLASS (mode
), 0).require ();
1203 /* By default, if flag_pic is true, then neither local nor global relocs
1204 should be placed in readonly memory. */
1207 default_reloc_rw_mask (void)
1209 return flag_pic
? 3 : 0;
1212 /* By default, address diff vectors are generated
1213 for jump tables when flag_pic is true. */
1216 default_generate_pic_addr_diff_vec (void)
1221 /* By default, do no modification. */
1222 tree
default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED
,
1228 /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1231 default_static_rtx_alignment (machine_mode mode
)
1233 return GET_MODE_ALIGNMENT (mode
);
1236 /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1239 default_constant_alignment (const_tree
, HOST_WIDE_INT align
)
1244 /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1245 to at least BITS_PER_WORD but otherwise makes no changes. */
1248 constant_alignment_word_strings (const_tree exp
, HOST_WIDE_INT align
)
1250 if (TREE_CODE (exp
) == STRING_CST
)
1251 return MAX (align
, BITS_PER_WORD
);
1255 /* Default to natural alignment for vector types, bounded by
1256 MAX_OFILE_ALIGNMENT. */
1259 default_vector_alignment (const_tree type
)
1261 unsigned HOST_WIDE_INT align
= MAX_OFILE_ALIGNMENT
;
1262 tree size
= TYPE_SIZE (type
);
1263 if (tree_fits_uhwi_p (size
))
1264 align
= tree_to_uhwi (size
);
1266 return align
< MAX_OFILE_ALIGNMENT
? align
: MAX_OFILE_ALIGNMENT
;
1269 /* The default implementation of
1270 TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1273 default_preferred_vector_alignment (const_tree type
)
1275 return TYPE_ALIGN (type
);
1278 /* By default assume vectors of element TYPE require a multiple of the natural
1279 alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1281 default_builtin_vector_alignment_reachable (const_tree
/*type*/, bool is_packed
)
1286 /* By default, assume that a target supports any factor of misalignment
1287 memory access if it supports movmisalign patten.
1288 is_packed is true if the memory access is defined in a packed struct. */
1290 default_builtin_support_vector_misalignment (machine_mode mode
,
1298 if (optab_handler (movmisalign_optab
, mode
) != CODE_FOR_nothing
)
1303 /* By default, only attempt to parallelize bitwise operations, and
1304 possibly adds/subtracts using bit-twiddling. */
1307 default_preferred_simd_mode (scalar_mode
)
1312 /* By default do not split reductions further. */
1315 default_split_reduction (machine_mode mode
)
1320 /* By default only the size derived from the preferred vector mode
1324 default_autovectorize_vector_sizes (vector_sizes
*, bool)
1328 /* By default a vector of integers is used as a mask. */
1331 default_get_mask_mode (poly_uint64 nunits
, poly_uint64 vector_size
)
1333 unsigned int elem_size
= vector_element_size (vector_size
, nunits
);
1334 scalar_int_mode elem_mode
1335 = smallest_int_mode_for_size (elem_size
* BITS_PER_UNIT
);
1336 machine_mode vector_mode
;
1338 gcc_assert (known_eq (elem_size
* nunits
, vector_size
));
1340 if (mode_for_vector (elem_mode
, nunits
).exists (&vector_mode
)
1341 && VECTOR_MODE_P (vector_mode
)
1342 && targetm
.vector_mode_supported_p (vector_mode
))
1345 return opt_machine_mode ();
1348 /* By default consider masked stores to be expensive. */
1351 default_empty_mask_is_expensive (unsigned ifn
)
1353 return ifn
== IFN_MASK_STORE
;
1356 /* By default, the cost model accumulates three separate costs (prologue,
1357 loop body, and epilogue) for a vectorized loop or block. So allocate an
1358 array of three unsigned ints, set it to zero, and return its address. */
1361 default_init_cost (class loop
*loop_info ATTRIBUTE_UNUSED
)
1363 unsigned *cost
= XNEWVEC (unsigned, 3);
1364 cost
[vect_prologue
] = cost
[vect_body
] = cost
[vect_epilogue
] = 0;
1368 /* By default, the cost model looks up the cost of the given statement
1369 kind and mode, multiplies it by the occurrence count, accumulates
1370 it into the cost specified by WHERE, and returns the cost added. */
1373 default_add_stmt_cost (void *data
, int count
, enum vect_cost_for_stmt kind
,
1374 class _stmt_vec_info
*stmt_info
, int misalign
,
1375 enum vect_cost_model_location where
)
1377 unsigned *cost
= (unsigned *) data
;
1378 unsigned retval
= 0;
1380 tree vectype
= stmt_info
? stmt_vectype (stmt_info
) : NULL_TREE
;
1381 int stmt_cost
= targetm
.vectorize
.builtin_vectorization_cost (kind
, vectype
,
1383 /* Statements in an inner loop relative to the loop being
1384 vectorized are weighted more heavily. The value here is
1385 arbitrary and could potentially be improved with analysis. */
1386 if (where
== vect_body
&& stmt_info
&& stmt_in_inner_loop_p (stmt_info
))
1387 count
*= 50; /* FIXME. */
1389 retval
= (unsigned) (count
* stmt_cost
);
1390 cost
[where
] += retval
;
1395 /* By default, the cost model just returns the accumulated costs. */
1398 default_finish_cost (void *data
, unsigned *prologue_cost
,
1399 unsigned *body_cost
, unsigned *epilogue_cost
)
1401 unsigned *cost
= (unsigned *) data
;
1402 *prologue_cost
= cost
[vect_prologue
];
1403 *body_cost
= cost
[vect_body
];
1404 *epilogue_cost
= cost
[vect_epilogue
];
1407 /* Free the cost data. */
1410 default_destroy_cost_data (void *data
)
1415 /* Determine whether or not a pointer mode is valid. Assume defaults
1416 of ptr_mode or Pmode - can be overridden. */
1418 default_valid_pointer_mode (scalar_int_mode mode
)
1420 return (mode
== ptr_mode
|| mode
== Pmode
);
1423 /* Determine whether the memory reference specified by REF may alias
1424 the C libraries errno location. */
1426 default_ref_may_alias_errno (ao_ref
*ref
)
1428 tree base
= ao_ref_base (ref
);
1429 /* The default implementation assumes the errno location is
1430 a declaration of type int or is always accessed via a
1431 pointer to int. We assume that accesses to errno are
1432 not deliberately obfuscated (even in conforming ways). */
1433 if (TYPE_UNSIGNED (TREE_TYPE (base
))
1434 || TYPE_MODE (TREE_TYPE (base
)) != TYPE_MODE (integer_type_node
))
1436 /* The default implementation assumes an errno location
1437 declaration is never defined in the current compilation unit. */
1439 && !TREE_STATIC (base
))
1441 else if (TREE_CODE (base
) == MEM_REF
1442 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
1444 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
1445 return !pi
|| pi
->pt
.anything
|| pi
->pt
.nonlocal
;
1450 /* Return the mode for a pointer to a given ADDRSPACE,
1451 defaulting to ptr_mode for all address spaces. */
1454 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED
)
1459 /* Return the mode for an address in a given ADDRSPACE,
1460 defaulting to Pmode for all address spaces. */
1463 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED
)
1468 /* Named address space version of valid_pointer_mode.
1469 To match the above, the same modes apply to all address spaces. */
1472 default_addr_space_valid_pointer_mode (scalar_int_mode mode
,
1473 addr_space_t as ATTRIBUTE_UNUSED
)
1475 return targetm
.valid_pointer_mode (mode
);
1478 /* Some places still assume that all pointer or address modes are the
1479 standard Pmode and ptr_mode. These optimizations become invalid if
1480 the target actually supports multiple different modes. For now,
1481 we disable such optimizations on such targets, using this function. */
1484 target_default_pointer_address_modes_p (void)
1486 if (targetm
.addr_space
.address_mode
!= default_addr_space_address_mode
)
1488 if (targetm
.addr_space
.pointer_mode
!= default_addr_space_pointer_mode
)
1494 /* Named address space version of legitimate_address_p.
1495 By default, all address spaces have the same form. */
1498 default_addr_space_legitimate_address_p (machine_mode mode
, rtx mem
,
1500 addr_space_t as ATTRIBUTE_UNUSED
)
1502 return targetm
.legitimate_address_p (mode
, mem
, strict
);
1505 /* Named address space version of LEGITIMIZE_ADDRESS.
1506 By default, all address spaces have the same form. */
1509 default_addr_space_legitimize_address (rtx x
, rtx oldx
, machine_mode mode
,
1510 addr_space_t as ATTRIBUTE_UNUSED
)
1512 return targetm
.legitimize_address (x
, oldx
, mode
);
1515 /* The default hook for determining if one named address space is a subset of
1516 another and to return which address space to use as the common address
1520 default_addr_space_subset_p (addr_space_t subset
, addr_space_t superset
)
1522 return (subset
== superset
);
1525 /* The default hook for determining if 0 within a named address
1526 space is a valid address. */
1529 default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED
)
1534 /* The default hook for debugging the address space is to return the
1535 address space number to indicate DW_AT_address_class. */
1537 default_addr_space_debug (addr_space_t as
)
1542 /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1543 Don't complain about any address space. */
1546 default_addr_space_diagnose_usage (addr_space_t
, location_t
)
1551 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1552 called for targets with only a generic address space. */
1555 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED
,
1556 tree from_type ATTRIBUTE_UNUSED
,
1557 tree to_type ATTRIBUTE_UNUSED
)
1562 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1565 default_hard_regno_nregs (unsigned int, machine_mode mode
)
1567 /* Targets with variable-sized modes must provide their own definition
1569 return CEIL (GET_MODE_SIZE (mode
).to_constant (), UNITS_PER_WORD
);
1573 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED
)
1578 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1581 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
,
1582 addr_space_t addrspace ATTRIBUTE_UNUSED
)
1588 default_target_option_valid_attribute_p (tree
ARG_UNUSED (fndecl
),
1589 tree
ARG_UNUSED (name
),
1590 tree
ARG_UNUSED (args
),
1591 int ARG_UNUSED (flags
))
1593 warning (OPT_Wattributes
,
1594 "target attribute is not supported on this machine");
1600 default_target_option_pragma_parse (tree
ARG_UNUSED (args
),
1601 tree
ARG_UNUSED (pop_target
))
1603 /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1604 emit no warning because "#pragma GCC pop_target" is valid on targets that
1605 do not have the "target" pragma. */
1607 warning (OPT_Wpragmas
,
1608 "%<#pragma GCC target%> is not supported for this machine");
1614 default_target_can_inline_p (tree caller
, tree callee
)
1616 tree callee_opts
= DECL_FUNCTION_SPECIFIC_TARGET (callee
);
1617 tree caller_opts
= DECL_FUNCTION_SPECIFIC_TARGET (caller
);
1619 callee_opts
= target_option_default_node
;
1621 caller_opts
= target_option_default_node
;
1623 /* If both caller and callee have attributes, assume that if the
1624 pointer is different, the two functions have different target
1625 options since build_target_option_node uses a hash table for the
1627 return callee_opts
== caller_opts
;
1630 /* If the machine does not have a case insn that compares the bounds,
1631 this means extra overhead for dispatch tables, which raises the
1632 threshold for using them. */
1635 default_case_values_threshold (void)
1637 return (targetm
.have_casesi () ? 4 : 5);
1641 default_have_conditional_execution (void)
1643 return HAVE_conditional_execution
;
1646 /* By default we assume that c99 functions are present at the runtime,
1647 but sincos is not. */
1649 default_libc_has_function (enum function_class fn_class
)
1651 if (fn_class
== function_c94
1652 || fn_class
== function_c99_misc
1653 || fn_class
== function_c99_math_complex
)
1659 /* By default assume that libc has not a fast implementation. */
1662 default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED
)
1668 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
1674 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
1680 default_builtin_tm_load_store (tree
ARG_UNUSED (type
))
1685 /* Compute cost of moving registers to/from memory. */
1688 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
1689 reg_class_t rclass ATTRIBUTE_UNUSED
,
1690 bool in ATTRIBUTE_UNUSED
)
1692 #ifndef MEMORY_MOVE_COST
1693 return (4 + memory_move_secondary_cost (mode
, (enum reg_class
) rclass
, in
));
1695 return MEMORY_MOVE_COST (MACRO_MODE (mode
), (enum reg_class
) rclass
, in
);
1699 /* Compute cost of moving data from a register of class FROM to one of
1703 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
1704 reg_class_t from ATTRIBUTE_UNUSED
,
1705 reg_class_t to ATTRIBUTE_UNUSED
)
1707 #ifndef REGISTER_MOVE_COST
1710 return REGISTER_MOVE_COST (MACRO_MODE (mode
),
1711 (enum reg_class
) from
, (enum reg_class
) to
);
1715 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
1718 default_slow_unaligned_access (machine_mode
, unsigned int)
1720 return STRICT_ALIGNMENT
;
1723 /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
1726 default_estimated_poly_value (poly_int64 x
)
1731 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1732 behavior. SPEED_P is true if we are compiling for speed. */
1735 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED
)
1737 unsigned int move_ratio
;
1739 move_ratio
= (unsigned int) MOVE_RATIO (speed_p
);
1741 #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
1743 #else /* No cpymem patterns, pick a default. */
1744 move_ratio
= ((speed_p
) ? 15 : 3);
1750 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1751 used; return FALSE if the cpymem/setmem optab should be expanded, or
1752 a call to memcpy emitted. */
1755 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size
,
1756 unsigned int alignment
,
1757 enum by_pieces_operation op
,
1760 unsigned int max_size
= 0;
1761 unsigned int ratio
= 0;
1765 case CLEAR_BY_PIECES
:
1766 max_size
= STORE_MAX_PIECES
;
1767 ratio
= CLEAR_RATIO (speed_p
);
1769 case MOVE_BY_PIECES
:
1770 max_size
= MOVE_MAX_PIECES
;
1771 ratio
= get_move_ratio (speed_p
);
1774 max_size
= STORE_MAX_PIECES
;
1775 ratio
= SET_RATIO (speed_p
);
1777 case STORE_BY_PIECES
:
1778 max_size
= STORE_MAX_PIECES
;
1779 ratio
= get_move_ratio (speed_p
);
1781 case COMPARE_BY_PIECES
:
1782 max_size
= COMPARE_MAX_PIECES
;
1783 /* Pick a likely default, just as in get_move_ratio. */
1784 ratio
= speed_p
? 15 : 3;
1788 return by_pieces_ninsns (size
, alignment
, max_size
+ 1, op
) < ratio
;
1791 /* This hook controls code generation for expanding a memcmp operation by
1792 pieces. Return 1 for the normal pattern of compare/jump after each pair
1793 of loads, or a higher number to reduce the number of branches. */
1796 default_compare_by_pieces_branch_ratio (machine_mode
)
1801 /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
1802 entry. If RECORD_P is true and the target supports named sections,
1803 the location of the NOPs will be recorded in a special object section
1804 called "__patchable_function_entries". This routine may be called
1805 twice per function to put NOPs before and after the function
1809 default_print_patchable_function_entry (FILE *file
,
1810 unsigned HOST_WIDE_INT patch_area_size
,
1813 const char *nop_templ
= 0;
1815 rtx_insn
*my_nop
= make_insn_raw (gen_nop ());
1817 /* We use the template alone, relying on the (currently sane) assumption
1818 that the NOP template does not have variable operands. */
1819 code_num
= recog_memoized (my_nop
);
1820 nop_templ
= get_insn_template (code_num
, my_nop
);
1822 if (record_p
&& targetm_common
.have_named_sections
)
1825 static int patch_area_number
;
1826 section
*previous_section
= in_section
;
1827 const char *asm_op
= integer_asm_op (POINTER_SIZE_UNITS
, false);
1829 gcc_assert (asm_op
!= NULL
);
1830 patch_area_number
++;
1831 ASM_GENERATE_INTERNAL_LABEL (buf
, "LPFE", patch_area_number
);
1833 switch_to_section (get_section ("__patchable_function_entries",
1834 SECTION_WRITE
| SECTION_RELRO
, NULL
));
1835 fputs (asm_op
, file
);
1836 assemble_name_raw (file
, buf
);
1839 switch_to_section (previous_section
);
1840 ASM_OUTPUT_LABEL (file
, buf
);
1844 for (i
= 0; i
< patch_area_size
; ++i
)
1845 fprintf (file
, "\t%s\n", nop_templ
);
1849 default_profile_before_prologue (void)
1851 #ifdef PROFILE_BEFORE_PROLOGUE
1858 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1861 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
,
1864 #ifdef PREFERRED_RELOAD_CLASS
1865 return (reg_class_t
) PREFERRED_RELOAD_CLASS (x
, (enum reg_class
) rclass
);
1871 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1874 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED
,
1880 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1882 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED
)
1887 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1890 default_class_likely_spilled_p (reg_class_t rclass
)
1892 return (reg_class_size
[(int) rclass
] == 1);
1895 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
1898 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED
,
1899 machine_mode mode ATTRIBUTE_UNUSED
)
1901 #ifdef CLASS_MAX_NREGS
1902 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class
) rclass
,
1905 /* Targets with variable-sized modes must provide their own definition
1907 unsigned int size
= GET_MODE_SIZE (mode
).to_constant ();
1908 return (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1912 /* Determine the debugging unwind mechanism for the target. */
1914 enum unwind_info_type
1915 default_debug_unwind_info (void)
1917 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1918 /* ??? Change all users to the hook, then poison this. */
1919 #ifdef DWARF2_FRAME_INFO
1920 if (DWARF2_FRAME_INFO
)
1924 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1925 #ifdef DWARF2_DEBUGGING_INFO
1926 if (write_symbols
== DWARF2_DEBUG
|| write_symbols
== VMS_AND_DWARF2_DEBUG
)
1933 /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
1934 must define this hook. */
1937 default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
1942 /* Determine the correct mode for a Dwarf frame register that represents
1946 default_dwarf_frame_reg_mode (int regno
)
1948 machine_mode save_mode
= reg_raw_mode
[regno
];
1950 if (targetm
.hard_regno_call_part_clobbered (NULL
, regno
, save_mode
))
1951 save_mode
= choose_hard_reg_mode (regno
, 1, true);
1955 /* To be used by targets where reg_raw_mode doesn't return the right
1956 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1959 default_get_reg_raw_mode (int regno
)
1961 /* Targets must override this hook if the underlying register is
1963 return as_a
<fixed_size_mode
> (reg_raw_mode
[regno
]);
1966 /* Return true if a leaf function should stay leaf even with profiling
1970 default_keep_leaf_when_profiled ()
1975 /* Return true if the state of option OPTION should be stored in PCH files
1976 and checked by default_pch_valid_p. Store the option's current state
1980 option_affects_pch_p (int option
, struct cl_option_state
*state
)
1982 if ((cl_options
[option
].flags
& CL_TARGET
) == 0)
1984 if ((cl_options
[option
].flags
& CL_PCH_IGNORE
) != 0)
1986 if (option_flag_var (option
, &global_options
) == &target_flags
)
1987 if (targetm
.check_pch_target_flags
)
1989 return get_option_state (&global_options
, option
, state
);
1992 /* Default version of get_pch_validity.
1993 By default, every flag difference is fatal; that will be mostly right for
1994 most targets, but completely right for very few. */
1997 default_get_pch_validity (size_t *sz
)
1999 struct cl_option_state state
;
2004 if (targetm
.check_pch_target_flags
)
2005 *sz
+= sizeof (target_flags
);
2006 for (i
= 0; i
< cl_options_count
; i
++)
2007 if (option_affects_pch_p (i
, &state
))
2010 result
= r
= XNEWVEC (char, *sz
);
2014 if (targetm
.check_pch_target_flags
)
2016 memcpy (r
, &target_flags
, sizeof (target_flags
));
2017 r
+= sizeof (target_flags
);
2020 for (i
= 0; i
< cl_options_count
; i
++)
2021 if (option_affects_pch_p (i
, &state
))
2023 memcpy (r
, state
.data
, state
.size
);
2030 /* Return a message which says that a PCH file was created with a different
2031 setting of OPTION. */
2034 pch_option_mismatch (const char *option
)
2036 return xasprintf (_("created and used with differing settings of '%s'"),
2040 /* Default version of pch_valid_p. */
2043 default_pch_valid_p (const void *data_p
, size_t len
)
2045 struct cl_option_state state
;
2046 const char *data
= (const char *)data_p
;
2049 /* -fpic and -fpie also usually make a PCH invalid. */
2050 if (data
[0] != flag_pic
)
2051 return _("created and used with different settings of %<-fpic%>");
2052 if (data
[1] != flag_pie
)
2053 return _("created and used with different settings of %<-fpie%>");
2056 /* Check target_flags. */
2057 if (targetm
.check_pch_target_flags
)
2062 memcpy (&tf
, data
, sizeof (target_flags
));
2063 data
+= sizeof (target_flags
);
2064 len
-= sizeof (target_flags
);
2065 r
= targetm
.check_pch_target_flags (tf
);
2070 for (i
= 0; i
< cl_options_count
; i
++)
2071 if (option_affects_pch_p (i
, &state
))
2073 if (memcmp (data
, state
.data
, state
.size
) != 0)
2074 return pch_option_mismatch (cl_options
[i
].opt_text
);
2082 /* Default version of cstore_mode. */
2085 default_cstore_mode (enum insn_code icode
)
2087 return as_a
<scalar_int_mode
> (insn_data
[(int) icode
].operand
[0].mode
);
2090 /* Default version of member_type_forces_blk. */
2093 default_member_type_forces_blk (const_tree
, machine_mode
)
2099 default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED
,
2100 rtx ptr ATTRIBUTE_UNUSED
,
2101 rtx bnd ATTRIBUTE_UNUSED
)
2107 default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED
,
2108 rtx addr ATTRIBUTE_UNUSED
,
2109 rtx bounds ATTRIBUTE_UNUSED
,
2110 rtx to ATTRIBUTE_UNUSED
)
2116 default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED
)
2122 default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED
,
2123 rtx bounds ATTRIBUTE_UNUSED
)
2128 /* Default version of canonicalize_comparison. */
2131 default_canonicalize_comparison (int *, rtx
*, rtx
*, bool)
2135 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2138 default_atomic_assign_expand_fenv (tree
*, tree
*, tree
*)
2142 #ifndef PAD_VARARGS_DOWN
2143 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2146 /* Build an indirect-ref expression over the given TREE, which represents a
2147 piece of a va_arg() expansion. */
2149 build_va_arg_indirect_ref (tree addr
)
2151 addr
= build_simple_mem_ref_loc (EXPR_LOCATION (addr
), addr
);
2155 /* The "standard" implementation of va_arg: read the value from the
2156 current (padded) address and increment by the (padded) size. */
2159 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
2162 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
2163 unsigned HOST_WIDE_INT align
, boundary
;
2166 /* All of the alignment and movement below is for args-grow-up machines.
2167 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2168 implement their own specialized gimplify_va_arg_expr routines. */
2169 if (ARGS_GROW_DOWNWARD
)
2172 indirect
= pass_va_arg_by_reference (type
);
2174 type
= build_pointer_type (type
);
2176 if (targetm
.calls
.split_complex_arg
2177 && TREE_CODE (type
) == COMPLEX_TYPE
2178 && targetm
.calls
.split_complex_arg (type
))
2180 tree real_part
, imag_part
;
2182 real_part
= std_gimplify_va_arg_expr (valist
,
2183 TREE_TYPE (type
), pre_p
, NULL
);
2184 real_part
= get_initialized_tmp_var (real_part
, pre_p
, NULL
);
2186 imag_part
= std_gimplify_va_arg_expr (unshare_expr (valist
),
2187 TREE_TYPE (type
), pre_p
, NULL
);
2188 imag_part
= get_initialized_tmp_var (imag_part
, pre_p
, NULL
);
2190 return build2 (COMPLEX_EXPR
, type
, real_part
, imag_part
);
2193 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
2194 boundary
= targetm
.calls
.function_arg_boundary (TYPE_MODE (type
), type
);
2196 /* When we align parameter on stack for caller, if the parameter
2197 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2198 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2199 here with caller. */
2200 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
2201 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
2203 boundary
/= BITS_PER_UNIT
;
2205 /* Hoist the valist value into a temporary for the moment. */
2206 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
2208 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2209 requires greater alignment, we must perform dynamic alignment. */
2210 if (boundary
> align
2211 && !TYPE_EMPTY_P (type
)
2212 && !integer_zerop (TYPE_SIZE (type
)))
2214 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
2215 fold_build_pointer_plus_hwi (valist_tmp
, boundary
- 1));
2216 gimplify_and_add (t
, pre_p
);
2218 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
2219 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (valist
),
2221 build_int_cst (TREE_TYPE (valist
), -boundary
)));
2222 gimplify_and_add (t
, pre_p
);
2227 /* If the actual alignment is less than the alignment of the type,
2228 adjust the type accordingly so that we don't assume strict alignment
2229 when dereferencing the pointer. */
2230 boundary
*= BITS_PER_UNIT
;
2231 if (boundary
< TYPE_ALIGN (type
))
2233 type
= build_variant_type_copy (type
);
2234 SET_TYPE_ALIGN (type
, boundary
);
2237 /* Compute the rounded size of the type. */
2238 type_size
= arg_size_in_bytes (type
);
2239 rounded_size
= round_up (type_size
, align
);
2241 /* Reduce rounded_size so it's sharable with the postqueue. */
2242 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
2246 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
2248 /* Small args are padded downward. */
2249 t
= fold_build2_loc (input_location
, GT_EXPR
, sizetype
,
2250 rounded_size
, size_int (align
));
2251 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
2252 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
2253 addr
= fold_build_pointer_plus (addr
, t
);
2256 /* Compute new value for AP. */
2257 t
= fold_build_pointer_plus (valist_tmp
, rounded_size
);
2258 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
2259 gimplify_and_add (t
, pre_p
);
2261 addr
= fold_convert (build_pointer_type (type
), addr
);
2264 addr
= build_va_arg_indirect_ref (addr
);
2266 return build_va_arg_indirect_ref (addr
);
2269 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2270 not support nested low-overhead loops. */
2273 can_use_doloop_if_innermost (const widest_int
&, const widest_int
&,
2274 unsigned int loop_depth
, bool)
2276 return loop_depth
== 1;
2279 /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2282 default_optab_supported_p (int, machine_mode
, machine_mode
, optimization_type
)
2287 /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2290 default_max_noce_ifcvt_seq_cost (edge e
)
2292 bool predictable_p
= predictable_edge_p (e
);
2294 enum compiler_param param
2296 ? PARAM_MAX_RTL_IF_CONVERSION_PREDICTABLE_COST
2297 : PARAM_MAX_RTL_IF_CONVERSION_UNPREDICTABLE_COST
);
2299 /* If we have a parameter set, use that, otherwise take a guess using
2301 if (global_options_set
.x_param_values
[param
])
2302 return PARAM_VALUE (param
);
2304 return BRANCH_COST (true, predictable_p
) * COSTS_N_INSNS (3);
2307 /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2310 default_min_arithmetic_precision (void)
2312 return WORD_REGISTER_OPERATIONS
? BITS_PER_WORD
: BITS_PER_UNIT
;
2315 /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2317 enum flt_eval_method
2318 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED
)
2320 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
;
2323 /* Default implementation for
2324 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2326 default_stack_clash_protection_alloca_probe_range (void)
2331 /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2334 default_select_early_remat_modes (sbitmap
)
2338 /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2341 default_preferred_else_value (unsigned, tree type
, unsigned, tree
*)
2343 return build_zero_cst (type
);
2346 /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2348 default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED
)
2350 #ifdef HAVE_speculation_barrier
2351 return active
? HAVE_speculation_barrier
: true;
2356 /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2357 that can be used on targets that never have speculative execution. */
2359 speculation_safe_value_not_needed (bool active
)
2364 /* Default implementation of the speculation-safe-load builtin. This
2365 implementation simply copies val to result and generates a
2366 speculation_barrier insn, if such a pattern is defined. */
2368 default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED
,
2369 rtx result
, rtx val
,
2370 rtx failval ATTRIBUTE_UNUSED
)
2372 emit_move_insn (result
, val
);
2374 #ifdef HAVE_speculation_barrier
2375 /* Assume the target knows what it is doing: if it defines a
2376 speculation barrier, but it is not enabled, then assume that one
2378 if (HAVE_speculation_barrier
)
2379 emit_insn (gen_speculation_barrier ());
2386 default_remove_extra_call_preserved_regs (rtx_insn
*, HARD_REG_SET
*)
2390 #include "gt-targhooks.h"