1 /* Default target hook functions.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* The migration of target macros to target hooks works as follows:
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
25 2. Convert all the MI files to use the hook instead of the macro.
27 3. Repeat for a majority of the remaining target macros. This will
30 4. Tell target maintainers to start migrating.
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
35 6. TBD when, poison the macros. Unmigrated targets will break at
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
51 #include "coretypes.h"
56 #include "tree-ssa-alias.h"
57 #include "gimple-expr.h"
60 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "profile-count.h"
67 #include "diagnostic-core.h"
68 #include "fold-const.h"
69 #include "stor-layout.h"
76 #include "common/common-target.h"
84 #include "langhooks.h"
88 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
,
89 rtx addr ATTRIBUTE_UNUSED
,
90 bool strict ATTRIBUTE_UNUSED
)
92 #ifdef GO_IF_LEGITIMATE_ADDRESS
93 /* Defer to the old implementation using a goto. */
95 return strict_memory_address_p (mode
, addr
);
97 return memory_address_p (mode
, addr
);
104 default_external_libcall (rtx fun ATTRIBUTE_UNUSED
)
106 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
107 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file
, fun
);
112 default_unspec_may_trap_p (const_rtx x
, unsigned flags
)
116 /* Any floating arithmetic may trap. */
117 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x
)) && flag_trapping_math
))
120 for (i
= 0; i
< XVECLEN (x
, 0); ++i
)
122 if (may_trap_p_1 (XVECEXP (x
, 0, i
), flags
))
130 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED
,
132 int *punsignedp ATTRIBUTE_UNUSED
,
133 const_tree funtype ATTRIBUTE_UNUSED
,
134 int for_return ATTRIBUTE_UNUSED
)
136 if (type
!= NULL_TREE
&& for_return
== 2)
137 return promote_mode (type
, mode
, punsignedp
);
142 default_promote_function_mode_always_promote (const_tree type
,
145 const_tree funtype ATTRIBUTE_UNUSED
,
146 int for_return ATTRIBUTE_UNUSED
)
148 return promote_mode (type
, mode
, punsignedp
);
152 default_cc_modes_compatible (machine_mode m1
, machine_mode m2
)
160 default_return_in_memory (const_tree type
,
161 const_tree fntype ATTRIBUTE_UNUSED
)
163 return (TYPE_MODE (type
) == BLKmode
);
167 default_legitimize_address (rtx x
, rtx orig_x ATTRIBUTE_UNUSED
,
168 machine_mode mode ATTRIBUTE_UNUSED
)
174 default_legitimize_address_displacement (rtx
*, rtx
*, poly_int64
,
181 default_const_not_ok_for_debug_p (rtx x
)
183 if (GET_CODE (x
) == UNSPEC
)
189 default_expand_builtin_saveregs (void)
191 error ("%<__builtin_saveregs%> not supported by this target");
196 default_setup_incoming_varargs (cumulative_args_t
,
197 const function_arg_info
&, int *, int)
201 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
204 default_builtin_setjmp_frame_value (void)
206 return virtual_stack_vars_rtx
;
209 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
212 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED
)
218 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED
)
220 return (targetm
.calls
.setup_incoming_varargs
221 != default_setup_incoming_varargs
);
225 default_eh_return_filter_mode (void)
227 return targetm
.unwind_word_mode ();
231 default_libgcc_cmp_return_mode (void)
237 default_libgcc_shift_count_mode (void)
243 default_unwind_word_mode (void)
248 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
250 unsigned HOST_WIDE_INT
251 default_shift_truncation_mask (machine_mode mode
)
253 return SHIFT_COUNT_TRUNCATED
? GET_MODE_UNIT_BITSIZE (mode
) - 1 : 0;
256 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
259 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED
)
261 return have_insn_for (DIV
, mode
) ? 3 : 2;
264 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
267 default_mode_rep_extended (scalar_int_mode
, scalar_int_mode
)
272 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
275 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED
)
280 /* Return machine mode for non-standard suffix
281 or VOIDmode if non-standard suffixes are unsupported. */
283 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED
)
288 /* The generic C++ ABI specifies this is a 64-bit value. */
290 default_cxx_guard_type (void)
292 return long_long_integer_type_node
;
295 /* Returns the size of the cookie to use when allocating an array
296 whose elements have the indicated TYPE. Assumes that it is already
297 known that a cookie is needed. */
300 default_cxx_get_cookie_size (tree type
)
304 /* We need to allocate an additional max (sizeof (size_t), alignof
305 (true_type)) bytes. */
309 sizetype_size
= size_in_bytes (sizetype
);
310 type_align
= size_int (TYPE_ALIGN_UNIT (type
));
311 if (tree_int_cst_lt (type_align
, sizetype_size
))
312 cookie_size
= sizetype_size
;
314 cookie_size
= type_align
;
319 /* Return true if a parameter must be passed by reference. This version
320 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
323 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t
,
324 const function_arg_info
&arg
)
326 return targetm
.calls
.must_pass_in_stack (arg
.mode
, arg
.type
);
329 /* Return true if a parameter follows callee copies conventions. This
330 version of the hook is true for all named arguments. */
333 hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED
,
334 machine_mode mode ATTRIBUTE_UNUSED
,
335 const_tree type ATTRIBUTE_UNUSED
, bool named
)
340 /* Emit to STREAM the assembler syntax for insn operand X. */
343 default_print_operand (FILE *stream ATTRIBUTE_UNUSED
, rtx x ATTRIBUTE_UNUSED
,
344 int code ATTRIBUTE_UNUSED
)
347 PRINT_OPERAND (stream
, x
, code
);
353 /* Emit to STREAM the assembler syntax for an insn operand whose memory
357 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED
,
358 machine_mode
/*mode*/,
359 rtx x ATTRIBUTE_UNUSED
)
361 #ifdef PRINT_OPERAND_ADDRESS
362 PRINT_OPERAND_ADDRESS (stream
, x
);
368 /* Return true if CODE is a valid punctuation character for the
369 `print_operand' hook. */
372 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED
)
374 #ifdef PRINT_OPERAND_PUNCT_VALID_P
375 return PRINT_OPERAND_PUNCT_VALID_P (code
);
381 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
383 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED
)
385 const char *skipped
= name
+ (*name
== '*' ? 1 : 0);
386 const char *stripped
= targetm
.strip_name_encoding (skipped
);
387 if (*name
!= '*' && user_label_prefix
[0])
388 stripped
= ACONCAT ((user_label_prefix
, stripped
, NULL
));
389 return get_identifier (stripped
);
392 /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
395 default_translate_mode_attribute (machine_mode mode
)
400 /* True if MODE is valid for the target. By "valid", we mean able to
401 be manipulated in non-trivial ways. In particular, this means all
402 the arithmetic is supported.
404 By default we guess this means that any C type is supported. If
405 we can't map the mode back to a type that would be available in C,
406 then reject it. Special case, here, is the double-word arithmetic
407 supported by optabs.c. */
410 default_scalar_mode_supported_p (scalar_mode mode
)
412 int precision
= GET_MODE_PRECISION (mode
);
414 switch (GET_MODE_CLASS (mode
))
416 case MODE_PARTIAL_INT
:
418 if (precision
== CHAR_TYPE_SIZE
)
420 if (precision
== SHORT_TYPE_SIZE
)
422 if (precision
== INT_TYPE_SIZE
)
424 if (precision
== LONG_TYPE_SIZE
)
426 if (precision
== LONG_LONG_TYPE_SIZE
)
428 if (precision
== 2 * BITS_PER_WORD
)
433 if (precision
== FLOAT_TYPE_SIZE
)
435 if (precision
== DOUBLE_TYPE_SIZE
)
437 if (precision
== LONG_DOUBLE_TYPE_SIZE
)
441 case MODE_DECIMAL_FLOAT
:
453 /* Return true if libgcc supports floating-point mode MODE (known to
454 be supported as a scalar mode). */
457 default_libgcc_floating_mode_supported_p (scalar_float_mode mode
)
480 /* Return the machine mode to use for the type _FloatN, if EXTENDED is
481 false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
483 opt_scalar_float_mode
484 default_floatn_mode (int n
, bool extended
)
488 opt_scalar_float_mode cand1
, cand2
;
489 scalar_float_mode mode
;
511 /* Those are the only valid _FloatNx types. */
514 if (cand1
.exists (&mode
)
515 && REAL_MODE_FORMAT (mode
)->ieee_bits
> n
516 && targetm
.scalar_mode_supported_p (mode
)
517 && targetm
.libgcc_floating_mode_supported_p (mode
))
519 if (cand2
.exists (&mode
)
520 && REAL_MODE_FORMAT (mode
)->ieee_bits
> n
521 && targetm
.scalar_mode_supported_p (mode
)
522 && targetm
.libgcc_floating_mode_supported_p (mode
))
527 opt_scalar_float_mode cand
;
528 scalar_float_mode mode
;
532 /* Always enable _Float16 if we have basic support for the mode.
533 Targets can control the range and precision of operations on
534 the _Float16 type using TARGET_C_EXCESS_PRECISION. */
561 if (cand
.exists (&mode
)
562 && REAL_MODE_FORMAT (mode
)->ieee_bits
== n
563 && targetm
.scalar_mode_supported_p (mode
)
564 && targetm
.libgcc_floating_mode_supported_p (mode
))
567 return opt_scalar_float_mode ();
570 /* Define this to return true if the _Floatn and _Floatnx built-in functions
571 should implicitly enable the built-in function without the __builtin_ prefix
572 in addition to the normal built-in function with the __builtin_ prefix. The
573 default is to only enable built-in functions without the __builtin_ prefix
574 for the GNU C langauge. The argument FUNC is the enum builtin_in_function
575 id of the function to be enabled. */
578 default_floatn_builtin_p (int func ATTRIBUTE_UNUSED
)
580 static bool first_time_p
= true;
581 static bool c_or_objective_c
;
585 first_time_p
= false;
586 c_or_objective_c
= lang_GNU_C () || lang_GNU_OBJC ();
589 return c_or_objective_c
;
592 /* Make some target macros useable by target-independent code. */
594 targhook_words_big_endian (void)
596 return !!WORDS_BIG_ENDIAN
;
600 targhook_float_words_big_endian (void)
602 return !!FLOAT_WORDS_BIG_ENDIAN
;
605 /* True if the target supports floating-point exceptions and rounding
609 default_float_exceptions_rounding_supported_p (void)
618 /* True if the target supports decimal floating point. */
621 default_decimal_float_supported_p (void)
623 return ENABLE_DECIMAL_FLOAT
;
626 /* True if the target supports fixed-point arithmetic. */
629 default_fixed_point_supported_p (void)
631 return ENABLE_FIXED_POINT
;
634 /* True if the target supports GNU indirect functions. */
637 default_has_ifunc_p (void)
639 return HAVE_GNU_INDIRECT_FUNCTION
;
642 /* Return true if we predict the loop LOOP will be transformed to a
643 low-overhead loop, otherwise return false.
645 By default, false is returned, as this hook's applicability should be
646 verified for each target. Target maintainers should re-define the hook
647 if the target can take advantage of it. */
650 default_predict_doloop_p (class loop
*loop ATTRIBUTE_UNUSED
)
655 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
658 This function checks whether a given INSN is valid within a low-overhead
659 loop. If INSN is invalid it returns the reason for that, otherwise it
660 returns NULL. A called function may clobber any special registers required
661 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
662 register for branch on table instructions. We reject the doloop pattern in
666 default_invalid_within_doloop (const rtx_insn
*insn
)
669 return "Function call in loop.";
671 if (tablejump_p (insn
, NULL
, NULL
) || computed_jump_p (insn
))
672 return "Computed branch in the loop.";
677 /* Mapping of builtin functions to vectorized variants. */
680 default_builtin_vectorized_function (unsigned int, tree
, tree
)
685 /* Mapping of target builtin functions to vectorized variants. */
688 default_builtin_md_vectorized_function (tree
, tree
, tree
)
693 /* Vectorized conversion. */
696 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED
,
697 tree dest_type ATTRIBUTE_UNUSED
,
698 tree src_type ATTRIBUTE_UNUSED
)
703 /* Default vectorizer cost model values. */
706 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost
,
708 int misalign ATTRIBUTE_UNUSED
)
710 switch (type_of_cost
)
720 case cond_branch_not_taken
:
722 case vec_promote_demote
:
726 case unaligned_store
:
729 case cond_branch_taken
:
733 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype
)) - 1;
743 default_builtin_reciprocal (tree
)
749 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
750 cumulative_args_t ca ATTRIBUTE_UNUSED
,
751 machine_mode mode ATTRIBUTE_UNUSED
,
752 const_tree type ATTRIBUTE_UNUSED
, bool named ATTRIBUTE_UNUSED
)
758 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
759 cumulative_args_t ca ATTRIBUTE_UNUSED
,
760 machine_mode mode ATTRIBUTE_UNUSED
,
761 const_tree type ATTRIBUTE_UNUSED
, bool named ATTRIBUTE_UNUSED
)
767 hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t
,
768 const function_arg_info
&)
774 hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t
,
775 const function_arg_info
&)
781 hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED
,
782 tree ATTRIBUTE_UNUSED
)
787 default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED
,
788 machine_mode mode ATTRIBUTE_UNUSED
,
789 const_tree type ATTRIBUTE_UNUSED
,
790 bool named ATTRIBUTE_UNUSED
)
795 /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
798 default_function_arg_offset (machine_mode
, const_tree
)
803 /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
804 upward, but pad short args downward on big-endian machines. */
807 default_function_arg_padding (machine_mode mode
, const_tree type
)
809 if (!BYTES_BIG_ENDIAN
)
812 unsigned HOST_WIDE_INT size
;
815 if (!type
|| TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
817 size
= int_size_in_bytes (type
);
820 /* Targets with variable-sized modes must override this hook
821 and handle variable-sized modes explicitly. */
822 size
= GET_MODE_SIZE (mode
).to_constant ();
824 if (size
< (PARM_BOUNDARY
/ BITS_PER_UNIT
))
831 default_function_arg (cumulative_args_t
, const function_arg_info
&)
837 default_function_incoming_arg (cumulative_args_t
, const function_arg_info
&)
843 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED
,
844 const_tree type ATTRIBUTE_UNUSED
)
846 return PARM_BOUNDARY
;
850 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED
,
851 const_tree type ATTRIBUTE_UNUSED
)
853 return PARM_BOUNDARY
;
857 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED
)
862 hook_invalid_arg_for_unprototyped_fn (
863 const_tree typelist ATTRIBUTE_UNUSED
,
864 const_tree funcdecl ATTRIBUTE_UNUSED
,
865 const_tree val ATTRIBUTE_UNUSED
)
870 /* Initialize the stack protection decls. */
872 /* Stack protection related decls living in libgcc. */
873 static GTY(()) tree stack_chk_guard_decl
;
876 default_stack_protect_guard (void)
878 tree t
= stack_chk_guard_decl
;
884 t
= build_decl (UNKNOWN_LOCATION
,
885 VAR_DECL
, get_identifier ("__stack_chk_guard"),
889 DECL_EXTERNAL (t
) = 1;
891 TREE_THIS_VOLATILE (t
) = 1;
892 DECL_ARTIFICIAL (t
) = 1;
893 DECL_IGNORED_P (t
) = 1;
895 /* Do not share RTL as the declaration is visible outside of
898 RTX_FLAG (x
, used
) = 1;
900 stack_chk_guard_decl
= t
;
906 static GTY(()) tree stack_chk_fail_decl
;
909 default_external_stack_protect_fail (void)
911 tree t
= stack_chk_fail_decl
;
915 t
= build_function_type_list (void_type_node
, NULL_TREE
);
916 t
= build_decl (UNKNOWN_LOCATION
,
917 FUNCTION_DECL
, get_identifier ("__stack_chk_fail"), t
);
920 DECL_EXTERNAL (t
) = 1;
922 TREE_THIS_VOLATILE (t
) = 1;
923 TREE_NOTHROW (t
) = 1;
924 DECL_ARTIFICIAL (t
) = 1;
925 DECL_IGNORED_P (t
) = 1;
926 DECL_VISIBILITY (t
) = VISIBILITY_DEFAULT
;
927 DECL_VISIBILITY_SPECIFIED (t
) = 1;
929 stack_chk_fail_decl
= t
;
932 return build_call_expr (t
, 0);
936 default_hidden_stack_protect_fail (void)
938 #ifndef HAVE_GAS_HIDDEN
939 return default_external_stack_protect_fail ();
941 tree t
= stack_chk_fail_decl
;
944 return default_external_stack_protect_fail ();
948 t
= build_function_type_list (void_type_node
, NULL_TREE
);
949 t
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
950 get_identifier ("__stack_chk_fail_local"), t
);
953 DECL_EXTERNAL (t
) = 1;
955 TREE_THIS_VOLATILE (t
) = 1;
956 TREE_NOTHROW (t
) = 1;
957 DECL_ARTIFICIAL (t
) = 1;
958 DECL_IGNORED_P (t
) = 1;
959 DECL_VISIBILITY_SPECIFIED (t
) = 1;
960 DECL_VISIBILITY (t
) = VISIBILITY_HIDDEN
;
962 stack_chk_fail_decl
= t
;
965 return build_call_expr (t
, 0);
970 hook_bool_const_rtx_commutative_p (const_rtx x
,
971 int outer_code ATTRIBUTE_UNUSED
)
973 return COMMUTATIVE_P (x
);
977 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED
,
978 const_tree fn_decl_or_type
,
979 bool outgoing ATTRIBUTE_UNUSED
)
981 /* The old interface doesn't handle receiving the function type. */
983 && !DECL_P (fn_decl_or_type
))
984 fn_decl_or_type
= NULL
;
986 #ifdef FUNCTION_VALUE
987 return FUNCTION_VALUE (ret_type
, fn_decl_or_type
);
994 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED
,
995 const_rtx fun ATTRIBUTE_UNUSED
)
998 return LIBCALL_VALUE (MACRO_MODE (mode
));
1004 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1007 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED
)
1009 #ifdef FUNCTION_VALUE_REGNO_P
1010 return FUNCTION_VALUE_REGNO_P (regno
);
1017 default_internal_arg_pointer (void)
1019 /* If the reg that the virtual arg pointer will be translated into is
1020 not a fixed reg or is the stack pointer, make a copy of the virtual
1021 arg pointer, and address parms via the copy. The frame pointer is
1022 considered fixed even though it is not marked as such. */
1023 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
1024 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
1025 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
)))
1026 return copy_to_reg (virtual_incoming_args_rtx
);
1028 return virtual_incoming_args_rtx
;
1032 default_static_chain (const_tree
ARG_UNUSED (fndecl_or_type
), bool incoming_p
)
1036 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1037 return gen_rtx_REG (Pmode
, STATIC_CHAIN_INCOMING_REGNUM
);
1041 #ifdef STATIC_CHAIN_REGNUM
1042 return gen_rtx_REG (Pmode
, STATIC_CHAIN_REGNUM
);
1046 static bool issued_error
;
1049 issued_error
= true;
1050 sorry ("nested functions not supported on this target");
1053 /* It really doesn't matter what we return here, so long at it
1054 doesn't cause the rest of the compiler to crash. */
1055 return gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
1060 default_trampoline_init (rtx
ARG_UNUSED (m_tramp
), tree
ARG_UNUSED (t_func
),
1061 rtx
ARG_UNUSED (r_chain
))
1063 sorry ("nested function trampolines not supported on this target");
1067 default_return_pops_args (tree
, tree
, poly_int64
)
1073 default_branch_target_register_class (void)
1079 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED
,
1081 reg_class_t best_cl ATTRIBUTE_UNUSED
)
1087 default_lra_p (void)
1093 default_register_priority (int hard_regno ATTRIBUTE_UNUSED
)
1099 default_register_usage_leveling_p (void)
1105 default_different_addr_displacement_p (void)
1111 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED
, rtx x ATTRIBUTE_UNUSED
,
1112 reg_class_t reload_class_i ATTRIBUTE_UNUSED
,
1113 machine_mode reload_mode ATTRIBUTE_UNUSED
,
1114 secondary_reload_info
*sri
)
1116 enum reg_class rclass
= NO_REGS
;
1117 enum reg_class reload_class
= (enum reg_class
) reload_class_i
;
1119 if (sri
->prev_sri
&& sri
->prev_sri
->t_icode
!= CODE_FOR_nothing
)
1121 sri
->icode
= sri
->prev_sri
->t_icode
;
1124 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1126 rclass
= SECONDARY_INPUT_RELOAD_CLASS (reload_class
,
1127 MACRO_MODE (reload_mode
), x
);
1129 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1131 rclass
= SECONDARY_OUTPUT_RELOAD_CLASS (reload_class
,
1132 MACRO_MODE (reload_mode
), x
);
1134 if (rclass
!= NO_REGS
)
1136 enum insn_code icode
1137 = direct_optab_handler (in_p
? reload_in_optab
: reload_out_optab
,
1140 if (icode
!= CODE_FOR_nothing
1141 && !insn_operand_matches (icode
, in_p
, x
))
1142 icode
= CODE_FOR_nothing
;
1143 else if (icode
!= CODE_FOR_nothing
)
1145 const char *insn_constraint
, *scratch_constraint
;
1146 enum reg_class insn_class
, scratch_class
;
1148 gcc_assert (insn_data
[(int) icode
].n_operands
== 3);
1149 insn_constraint
= insn_data
[(int) icode
].operand
[!in_p
].constraint
;
1150 if (!*insn_constraint
)
1151 insn_class
= ALL_REGS
;
1156 gcc_assert (*insn_constraint
== '=');
1159 insn_class
= (reg_class_for_constraint
1160 (lookup_constraint (insn_constraint
)));
1161 gcc_assert (insn_class
!= NO_REGS
);
1164 scratch_constraint
= insn_data
[(int) icode
].operand
[2].constraint
;
1165 /* The scratch register's constraint must start with "=&",
1166 except for an input reload, where only "=" is necessary,
1167 and where it might be beneficial to re-use registers from
1169 gcc_assert (scratch_constraint
[0] == '='
1170 && (in_p
|| scratch_constraint
[1] == '&'));
1171 scratch_constraint
++;
1172 if (*scratch_constraint
== '&')
1173 scratch_constraint
++;
1174 scratch_class
= (reg_class_for_constraint
1175 (lookup_constraint (scratch_constraint
)));
1177 if (reg_class_subset_p (reload_class
, insn_class
))
1179 gcc_assert (scratch_class
== rclass
);
1183 rclass
= insn_class
;
1186 if (rclass
== NO_REGS
)
1189 sri
->t_icode
= icode
;
1194 /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1197 default_secondary_memory_needed_mode (machine_mode mode
)
1199 if (!targetm
.lra_p ()
1200 && known_lt (GET_MODE_BITSIZE (mode
), BITS_PER_WORD
)
1201 && INTEGRAL_MODE_P (mode
))
1202 return mode_for_size (BITS_PER_WORD
, GET_MODE_CLASS (mode
), 0).require ();
1206 /* By default, if flag_pic is true, then neither local nor global relocs
1207 should be placed in readonly memory. */
1210 default_reloc_rw_mask (void)
1212 return flag_pic
? 3 : 0;
1215 /* By default, address diff vectors are generated
1216 for jump tables when flag_pic is true. */
1219 default_generate_pic_addr_diff_vec (void)
1224 /* By default, do no modification. */
1225 tree
default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED
,
1231 /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1234 default_static_rtx_alignment (machine_mode mode
)
1236 return GET_MODE_ALIGNMENT (mode
);
1239 /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1242 default_constant_alignment (const_tree
, HOST_WIDE_INT align
)
1247 /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1248 to at least BITS_PER_WORD but otherwise makes no changes. */
1251 constant_alignment_word_strings (const_tree exp
, HOST_WIDE_INT align
)
1253 if (TREE_CODE (exp
) == STRING_CST
)
1254 return MAX (align
, BITS_PER_WORD
);
1258 /* Default to natural alignment for vector types, bounded by
1259 MAX_OFILE_ALIGNMENT. */
1262 default_vector_alignment (const_tree type
)
1264 unsigned HOST_WIDE_INT align
= MAX_OFILE_ALIGNMENT
;
1265 tree size
= TYPE_SIZE (type
);
1266 if (tree_fits_uhwi_p (size
))
1267 align
= tree_to_uhwi (size
);
1269 return align
< MAX_OFILE_ALIGNMENT
? align
: MAX_OFILE_ALIGNMENT
;
1272 /* The default implementation of
1273 TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1276 default_preferred_vector_alignment (const_tree type
)
1278 return TYPE_ALIGN (type
);
1281 /* By default assume vectors of element TYPE require a multiple of the natural
1282 alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1284 default_builtin_vector_alignment_reachable (const_tree
/*type*/, bool is_packed
)
1289 /* By default, assume that a target supports any factor of misalignment
1290 memory access if it supports movmisalign patten.
1291 is_packed is true if the memory access is defined in a packed struct. */
1293 default_builtin_support_vector_misalignment (machine_mode mode
,
1301 if (optab_handler (movmisalign_optab
, mode
) != CODE_FOR_nothing
)
1306 /* By default, only attempt to parallelize bitwise operations, and
1307 possibly adds/subtracts using bit-twiddling. */
1310 default_preferred_simd_mode (scalar_mode
)
1315 /* By default do not split reductions further. */
1318 default_split_reduction (machine_mode mode
)
1323 /* By default only the size derived from the preferred vector mode
1327 default_autovectorize_vector_sizes (vector_sizes
*, bool)
1331 /* By default a vector of integers is used as a mask. */
1334 default_get_mask_mode (poly_uint64 nunits
, poly_uint64 vector_size
)
1336 unsigned int elem_size
= vector_element_size (vector_size
, nunits
);
1337 scalar_int_mode elem_mode
1338 = smallest_int_mode_for_size (elem_size
* BITS_PER_UNIT
);
1339 machine_mode vector_mode
;
1341 gcc_assert (known_eq (elem_size
* nunits
, vector_size
));
1343 if (mode_for_vector (elem_mode
, nunits
).exists (&vector_mode
)
1344 && VECTOR_MODE_P (vector_mode
)
1345 && targetm
.vector_mode_supported_p (vector_mode
))
1348 return opt_machine_mode ();
1351 /* By default consider masked stores to be expensive. */
1354 default_empty_mask_is_expensive (unsigned ifn
)
1356 return ifn
== IFN_MASK_STORE
;
1359 /* By default, the cost model accumulates three separate costs (prologue,
1360 loop body, and epilogue) for a vectorized loop or block. So allocate an
1361 array of three unsigned ints, set it to zero, and return its address. */
1364 default_init_cost (class loop
*loop_info ATTRIBUTE_UNUSED
)
1366 unsigned *cost
= XNEWVEC (unsigned, 3);
1367 cost
[vect_prologue
] = cost
[vect_body
] = cost
[vect_epilogue
] = 0;
1371 /* By default, the cost model looks up the cost of the given statement
1372 kind and mode, multiplies it by the occurrence count, accumulates
1373 it into the cost specified by WHERE, and returns the cost added. */
1376 default_add_stmt_cost (void *data
, int count
, enum vect_cost_for_stmt kind
,
1377 class _stmt_vec_info
*stmt_info
, int misalign
,
1378 enum vect_cost_model_location where
)
1380 unsigned *cost
= (unsigned *) data
;
1381 unsigned retval
= 0;
1383 tree vectype
= stmt_info
? stmt_vectype (stmt_info
) : NULL_TREE
;
1384 int stmt_cost
= targetm
.vectorize
.builtin_vectorization_cost (kind
, vectype
,
1386 /* Statements in an inner loop relative to the loop being
1387 vectorized are weighted more heavily. The value here is
1388 arbitrary and could potentially be improved with analysis. */
1389 if (where
== vect_body
&& stmt_info
&& stmt_in_inner_loop_p (stmt_info
))
1390 count
*= 50; /* FIXME. */
1392 retval
= (unsigned) (count
* stmt_cost
);
1393 cost
[where
] += retval
;
1398 /* By default, the cost model just returns the accumulated costs. */
1401 default_finish_cost (void *data
, unsigned *prologue_cost
,
1402 unsigned *body_cost
, unsigned *epilogue_cost
)
1404 unsigned *cost
= (unsigned *) data
;
1405 *prologue_cost
= cost
[vect_prologue
];
1406 *body_cost
= cost
[vect_body
];
1407 *epilogue_cost
= cost
[vect_epilogue
];
1410 /* Free the cost data. */
1413 default_destroy_cost_data (void *data
)
1418 /* Determine whether or not a pointer mode is valid. Assume defaults
1419 of ptr_mode or Pmode - can be overridden. */
1421 default_valid_pointer_mode (scalar_int_mode mode
)
1423 return (mode
== ptr_mode
|| mode
== Pmode
);
1426 /* Determine whether the memory reference specified by REF may alias
1427 the C libraries errno location. */
1429 default_ref_may_alias_errno (ao_ref
*ref
)
1431 tree base
= ao_ref_base (ref
);
1432 /* The default implementation assumes the errno location is
1433 a declaration of type int or is always accessed via a
1434 pointer to int. We assume that accesses to errno are
1435 not deliberately obfuscated (even in conforming ways). */
1436 if (TYPE_UNSIGNED (TREE_TYPE (base
))
1437 || TYPE_MODE (TREE_TYPE (base
)) != TYPE_MODE (integer_type_node
))
1439 /* The default implementation assumes an errno location
1440 declaration is never defined in the current compilation unit. */
1442 && !TREE_STATIC (base
))
1444 else if (TREE_CODE (base
) == MEM_REF
1445 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
1447 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
1448 return !pi
|| pi
->pt
.anything
|| pi
->pt
.nonlocal
;
1453 /* Return the mode for a pointer to a given ADDRSPACE,
1454 defaulting to ptr_mode for all address spaces. */
1457 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED
)
1462 /* Return the mode for an address in a given ADDRSPACE,
1463 defaulting to Pmode for all address spaces. */
1466 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED
)
1471 /* Named address space version of valid_pointer_mode.
1472 To match the above, the same modes apply to all address spaces. */
1475 default_addr_space_valid_pointer_mode (scalar_int_mode mode
,
1476 addr_space_t as ATTRIBUTE_UNUSED
)
1478 return targetm
.valid_pointer_mode (mode
);
1481 /* Some places still assume that all pointer or address modes are the
1482 standard Pmode and ptr_mode. These optimizations become invalid if
1483 the target actually supports multiple different modes. For now,
1484 we disable such optimizations on such targets, using this function. */
1487 target_default_pointer_address_modes_p (void)
1489 if (targetm
.addr_space
.address_mode
!= default_addr_space_address_mode
)
1491 if (targetm
.addr_space
.pointer_mode
!= default_addr_space_pointer_mode
)
1497 /* Named address space version of legitimate_address_p.
1498 By default, all address spaces have the same form. */
1501 default_addr_space_legitimate_address_p (machine_mode mode
, rtx mem
,
1503 addr_space_t as ATTRIBUTE_UNUSED
)
1505 return targetm
.legitimate_address_p (mode
, mem
, strict
);
1508 /* Named address space version of LEGITIMIZE_ADDRESS.
1509 By default, all address spaces have the same form. */
1512 default_addr_space_legitimize_address (rtx x
, rtx oldx
, machine_mode mode
,
1513 addr_space_t as ATTRIBUTE_UNUSED
)
1515 return targetm
.legitimize_address (x
, oldx
, mode
);
1518 /* The default hook for determining if one named address space is a subset of
1519 another and to return which address space to use as the common address
1523 default_addr_space_subset_p (addr_space_t subset
, addr_space_t superset
)
1525 return (subset
== superset
);
1528 /* The default hook for determining if 0 within a named address
1529 space is a valid address. */
1532 default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED
)
1537 /* The default hook for debugging the address space is to return the
1538 address space number to indicate DW_AT_address_class. */
1540 default_addr_space_debug (addr_space_t as
)
1545 /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1546 Don't complain about any address space. */
1549 default_addr_space_diagnose_usage (addr_space_t
, location_t
)
1554 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1555 called for targets with only a generic address space. */
1558 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED
,
1559 tree from_type ATTRIBUTE_UNUSED
,
1560 tree to_type ATTRIBUTE_UNUSED
)
1565 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1568 default_hard_regno_nregs (unsigned int, machine_mode mode
)
1570 /* Targets with variable-sized modes must provide their own definition
1572 return CEIL (GET_MODE_SIZE (mode
).to_constant (), UNITS_PER_WORD
);
1576 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED
)
1581 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1584 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED
,
1585 addr_space_t addrspace ATTRIBUTE_UNUSED
)
1591 default_target_option_valid_attribute_p (tree
ARG_UNUSED (fndecl
),
1592 tree
ARG_UNUSED (name
),
1593 tree
ARG_UNUSED (args
),
1594 int ARG_UNUSED (flags
))
1596 warning (OPT_Wattributes
,
1597 "target attribute is not supported on this machine");
1603 default_target_option_pragma_parse (tree
ARG_UNUSED (args
),
1604 tree
ARG_UNUSED (pop_target
))
1606 /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1607 emit no warning because "#pragma GCC pop_target" is valid on targets that
1608 do not have the "target" pragma. */
1610 warning (OPT_Wpragmas
,
1611 "%<#pragma GCC target%> is not supported for this machine");
1617 default_target_can_inline_p (tree caller
, tree callee
)
1619 tree callee_opts
= DECL_FUNCTION_SPECIFIC_TARGET (callee
);
1620 tree caller_opts
= DECL_FUNCTION_SPECIFIC_TARGET (caller
);
1622 callee_opts
= target_option_default_node
;
1624 caller_opts
= target_option_default_node
;
1626 /* If both caller and callee have attributes, assume that if the
1627 pointer is different, the two functions have different target
1628 options since build_target_option_node uses a hash table for the
1630 return callee_opts
== caller_opts
;
1633 /* If the machine does not have a case insn that compares the bounds,
1634 this means extra overhead for dispatch tables, which raises the
1635 threshold for using them. */
1638 default_case_values_threshold (void)
1640 return (targetm
.have_casesi () ? 4 : 5);
1644 default_have_conditional_execution (void)
1646 return HAVE_conditional_execution
;
1649 /* By default we assume that c99 functions are present at the runtime,
1650 but sincos is not. */
1652 default_libc_has_function (enum function_class fn_class
)
1654 if (fn_class
== function_c94
1655 || fn_class
== function_c99_misc
1656 || fn_class
== function_c99_math_complex
)
1662 /* By default assume that libc has not a fast implementation. */
1665 default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED
)
1671 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
1677 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED
)
1683 default_builtin_tm_load_store (tree
ARG_UNUSED (type
))
1688 /* Compute cost of moving registers to/from memory. */
1691 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
1692 reg_class_t rclass ATTRIBUTE_UNUSED
,
1693 bool in ATTRIBUTE_UNUSED
)
1695 #ifndef MEMORY_MOVE_COST
1696 return (4 + memory_move_secondary_cost (mode
, (enum reg_class
) rclass
, in
));
1698 return MEMORY_MOVE_COST (MACRO_MODE (mode
), (enum reg_class
) rclass
, in
);
1702 /* Compute cost of moving data from a register of class FROM to one of
1706 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED
,
1707 reg_class_t from ATTRIBUTE_UNUSED
,
1708 reg_class_t to ATTRIBUTE_UNUSED
)
1710 #ifndef REGISTER_MOVE_COST
1713 return REGISTER_MOVE_COST (MACRO_MODE (mode
),
1714 (enum reg_class
) from
, (enum reg_class
) to
);
1718 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
1721 default_slow_unaligned_access (machine_mode
, unsigned int)
1723 return STRICT_ALIGNMENT
;
1726 /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
1729 default_estimated_poly_value (poly_int64 x
)
1734 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1735 behavior. SPEED_P is true if we are compiling for speed. */
1738 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED
)
1740 unsigned int move_ratio
;
1742 move_ratio
= (unsigned int) MOVE_RATIO (speed_p
);
1744 #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
1746 #else /* No cpymem patterns, pick a default. */
1747 move_ratio
= ((speed_p
) ? 15 : 3);
1753 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1754 used; return FALSE if the cpymem/setmem optab should be expanded, or
1755 a call to memcpy emitted. */
1758 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size
,
1759 unsigned int alignment
,
1760 enum by_pieces_operation op
,
1763 unsigned int max_size
= 0;
1764 unsigned int ratio
= 0;
1768 case CLEAR_BY_PIECES
:
1769 max_size
= STORE_MAX_PIECES
;
1770 ratio
= CLEAR_RATIO (speed_p
);
1772 case MOVE_BY_PIECES
:
1773 max_size
= MOVE_MAX_PIECES
;
1774 ratio
= get_move_ratio (speed_p
);
1777 max_size
= STORE_MAX_PIECES
;
1778 ratio
= SET_RATIO (speed_p
);
1780 case STORE_BY_PIECES
:
1781 max_size
= STORE_MAX_PIECES
;
1782 ratio
= get_move_ratio (speed_p
);
1784 case COMPARE_BY_PIECES
:
1785 max_size
= COMPARE_MAX_PIECES
;
1786 /* Pick a likely default, just as in get_move_ratio. */
1787 ratio
= speed_p
? 15 : 3;
1791 return by_pieces_ninsns (size
, alignment
, max_size
+ 1, op
) < ratio
;
1794 /* This hook controls code generation for expanding a memcmp operation by
1795 pieces. Return 1 for the normal pattern of compare/jump after each pair
1796 of loads, or a higher number to reduce the number of branches. */
1799 default_compare_by_pieces_branch_ratio (machine_mode
)
1804 /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
1805 entry. If RECORD_P is true and the target supports named sections,
1806 the location of the NOPs will be recorded in a special object section
1807 called "__patchable_function_entries". This routine may be called
1808 twice per function to put NOPs before and after the function
1812 default_print_patchable_function_entry (FILE *file
,
1813 unsigned HOST_WIDE_INT patch_area_size
,
1816 const char *nop_templ
= 0;
1818 rtx_insn
*my_nop
= make_insn_raw (gen_nop ());
1820 /* We use the template alone, relying on the (currently sane) assumption
1821 that the NOP template does not have variable operands. */
1822 code_num
= recog_memoized (my_nop
);
1823 nop_templ
= get_insn_template (code_num
, my_nop
);
1825 if (record_p
&& targetm_common
.have_named_sections
)
1828 static int patch_area_number
;
1829 section
*previous_section
= in_section
;
1830 const char *asm_op
= integer_asm_op (POINTER_SIZE_UNITS
, false);
1832 gcc_assert (asm_op
!= NULL
);
1833 patch_area_number
++;
1834 ASM_GENERATE_INTERNAL_LABEL (buf
, "LPFE", patch_area_number
);
1836 switch_to_section (get_section ("__patchable_function_entries",
1837 SECTION_WRITE
| SECTION_RELRO
, NULL
));
1838 fputs (asm_op
, file
);
1839 assemble_name_raw (file
, buf
);
1842 switch_to_section (previous_section
);
1843 ASM_OUTPUT_LABEL (file
, buf
);
1847 for (i
= 0; i
< patch_area_size
; ++i
)
1848 fprintf (file
, "\t%s\n", nop_templ
);
1852 default_profile_before_prologue (void)
1854 #ifdef PROFILE_BEFORE_PROLOGUE
1861 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1864 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED
,
1867 #ifdef PREFERRED_RELOAD_CLASS
1868 return (reg_class_t
) PREFERRED_RELOAD_CLASS (x
, (enum reg_class
) rclass
);
1874 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1877 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED
,
1883 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1885 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED
)
1890 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1893 default_class_likely_spilled_p (reg_class_t rclass
)
1895 return (reg_class_size
[(int) rclass
] == 1);
1898 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
1901 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED
,
1902 machine_mode mode ATTRIBUTE_UNUSED
)
1904 #ifdef CLASS_MAX_NREGS
1905 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class
) rclass
,
1908 /* Targets with variable-sized modes must provide their own definition
1910 unsigned int size
= GET_MODE_SIZE (mode
).to_constant ();
1911 return (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1915 /* Determine the debugging unwind mechanism for the target. */
1917 enum unwind_info_type
1918 default_debug_unwind_info (void)
1920 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1921 /* ??? Change all users to the hook, then poison this. */
1922 #ifdef DWARF2_FRAME_INFO
1923 if (DWARF2_FRAME_INFO
)
1927 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1928 #ifdef DWARF2_DEBUGGING_INFO
1929 if (write_symbols
== DWARF2_DEBUG
|| write_symbols
== VMS_AND_DWARF2_DEBUG
)
1936 /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
1937 must define this hook. */
1940 default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
1945 /* Determine the correct mode for a Dwarf frame register that represents
1949 default_dwarf_frame_reg_mode (int regno
)
1951 machine_mode save_mode
= reg_raw_mode
[regno
];
1953 if (targetm
.hard_regno_call_part_clobbered (NULL
, regno
, save_mode
))
1954 save_mode
= choose_hard_reg_mode (regno
, 1, true);
1958 /* To be used by targets where reg_raw_mode doesn't return the right
1959 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1962 default_get_reg_raw_mode (int regno
)
1964 /* Targets must override this hook if the underlying register is
1966 return as_a
<fixed_size_mode
> (reg_raw_mode
[regno
]);
1969 /* Return true if a leaf function should stay leaf even with profiling
1973 default_keep_leaf_when_profiled ()
1978 /* Return true if the state of option OPTION should be stored in PCH files
1979 and checked by default_pch_valid_p. Store the option's current state
1983 option_affects_pch_p (int option
, struct cl_option_state
*state
)
1985 if ((cl_options
[option
].flags
& CL_TARGET
) == 0)
1987 if ((cl_options
[option
].flags
& CL_PCH_IGNORE
) != 0)
1989 if (option_flag_var (option
, &global_options
) == &target_flags
)
1990 if (targetm
.check_pch_target_flags
)
1992 return get_option_state (&global_options
, option
, state
);
1995 /* Default version of get_pch_validity.
1996 By default, every flag difference is fatal; that will be mostly right for
1997 most targets, but completely right for very few. */
2000 default_get_pch_validity (size_t *sz
)
2002 struct cl_option_state state
;
2007 if (targetm
.check_pch_target_flags
)
2008 *sz
+= sizeof (target_flags
);
2009 for (i
= 0; i
< cl_options_count
; i
++)
2010 if (option_affects_pch_p (i
, &state
))
2013 result
= r
= XNEWVEC (char, *sz
);
2017 if (targetm
.check_pch_target_flags
)
2019 memcpy (r
, &target_flags
, sizeof (target_flags
));
2020 r
+= sizeof (target_flags
);
2023 for (i
= 0; i
< cl_options_count
; i
++)
2024 if (option_affects_pch_p (i
, &state
))
2026 memcpy (r
, state
.data
, state
.size
);
2033 /* Return a message which says that a PCH file was created with a different
2034 setting of OPTION. */
2037 pch_option_mismatch (const char *option
)
2039 return xasprintf (_("created and used with differing settings of '%s'"),
2043 /* Default version of pch_valid_p. */
2046 default_pch_valid_p (const void *data_p
, size_t len
)
2048 struct cl_option_state state
;
2049 const char *data
= (const char *)data_p
;
2052 /* -fpic and -fpie also usually make a PCH invalid. */
2053 if (data
[0] != flag_pic
)
2054 return _("created and used with different settings of %<-fpic%>");
2055 if (data
[1] != flag_pie
)
2056 return _("created and used with different settings of %<-fpie%>");
2059 /* Check target_flags. */
2060 if (targetm
.check_pch_target_flags
)
2065 memcpy (&tf
, data
, sizeof (target_flags
));
2066 data
+= sizeof (target_flags
);
2067 len
-= sizeof (target_flags
);
2068 r
= targetm
.check_pch_target_flags (tf
);
2073 for (i
= 0; i
< cl_options_count
; i
++)
2074 if (option_affects_pch_p (i
, &state
))
2076 if (memcmp (data
, state
.data
, state
.size
) != 0)
2077 return pch_option_mismatch (cl_options
[i
].opt_text
);
2085 /* Default version of cstore_mode. */
2088 default_cstore_mode (enum insn_code icode
)
2090 return as_a
<scalar_int_mode
> (insn_data
[(int) icode
].operand
[0].mode
);
2093 /* Default version of member_type_forces_blk. */
2096 default_member_type_forces_blk (const_tree
, machine_mode
)
2102 default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED
,
2103 rtx ptr ATTRIBUTE_UNUSED
,
2104 rtx bnd ATTRIBUTE_UNUSED
)
2110 default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED
,
2111 rtx addr ATTRIBUTE_UNUSED
,
2112 rtx bounds ATTRIBUTE_UNUSED
,
2113 rtx to ATTRIBUTE_UNUSED
)
2119 default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED
)
2125 default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED
,
2126 rtx bounds ATTRIBUTE_UNUSED
)
2131 /* Default version of canonicalize_comparison. */
2134 default_canonicalize_comparison (int *, rtx
*, rtx
*, bool)
2138 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2141 default_atomic_assign_expand_fenv (tree
*, tree
*, tree
*)
2145 #ifndef PAD_VARARGS_DOWN
2146 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2149 /* Build an indirect-ref expression over the given TREE, which represents a
2150 piece of a va_arg() expansion. */
2152 build_va_arg_indirect_ref (tree addr
)
2154 addr
= build_simple_mem_ref_loc (EXPR_LOCATION (addr
), addr
);
2158 /* The "standard" implementation of va_arg: read the value from the
2159 current (padded) address and increment by the (padded) size. */
2162 std_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
2165 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
2166 unsigned HOST_WIDE_INT align
, boundary
;
2169 /* All of the alignment and movement below is for args-grow-up machines.
2170 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2171 implement their own specialized gimplify_va_arg_expr routines. */
2172 if (ARGS_GROW_DOWNWARD
)
2175 indirect
= pass_va_arg_by_reference (type
);
2177 type
= build_pointer_type (type
);
2179 if (targetm
.calls
.split_complex_arg
2180 && TREE_CODE (type
) == COMPLEX_TYPE
2181 && targetm
.calls
.split_complex_arg (type
))
2183 tree real_part
, imag_part
;
2185 real_part
= std_gimplify_va_arg_expr (valist
,
2186 TREE_TYPE (type
), pre_p
, NULL
);
2187 real_part
= get_initialized_tmp_var (real_part
, pre_p
, NULL
);
2189 imag_part
= std_gimplify_va_arg_expr (unshare_expr (valist
),
2190 TREE_TYPE (type
), pre_p
, NULL
);
2191 imag_part
= get_initialized_tmp_var (imag_part
, pre_p
, NULL
);
2193 return build2 (COMPLEX_EXPR
, type
, real_part
, imag_part
);
2196 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
2197 boundary
= targetm
.calls
.function_arg_boundary (TYPE_MODE (type
), type
);
2199 /* When we align parameter on stack for caller, if the parameter
2200 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2201 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2202 here with caller. */
2203 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
2204 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
2206 boundary
/= BITS_PER_UNIT
;
2208 /* Hoist the valist value into a temporary for the moment. */
2209 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
2211 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2212 requires greater alignment, we must perform dynamic alignment. */
2213 if (boundary
> align
2214 && !TYPE_EMPTY_P (type
)
2215 && !integer_zerop (TYPE_SIZE (type
)))
2217 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
2218 fold_build_pointer_plus_hwi (valist_tmp
, boundary
- 1));
2219 gimplify_and_add (t
, pre_p
);
2221 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
2222 fold_build2 (BIT_AND_EXPR
, TREE_TYPE (valist
),
2224 build_int_cst (TREE_TYPE (valist
), -boundary
)));
2225 gimplify_and_add (t
, pre_p
);
2230 /* If the actual alignment is less than the alignment of the type,
2231 adjust the type accordingly so that we don't assume strict alignment
2232 when dereferencing the pointer. */
2233 boundary
*= BITS_PER_UNIT
;
2234 if (boundary
< TYPE_ALIGN (type
))
2236 type
= build_variant_type_copy (type
);
2237 SET_TYPE_ALIGN (type
, boundary
);
2240 /* Compute the rounded size of the type. */
2241 type_size
= arg_size_in_bytes (type
);
2242 rounded_size
= round_up (type_size
, align
);
2244 /* Reduce rounded_size so it's sharable with the postqueue. */
2245 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
2249 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
2251 /* Small args are padded downward. */
2252 t
= fold_build2_loc (input_location
, GT_EXPR
, sizetype
,
2253 rounded_size
, size_int (align
));
2254 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
2255 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
2256 addr
= fold_build_pointer_plus (addr
, t
);
2259 /* Compute new value for AP. */
2260 t
= fold_build_pointer_plus (valist_tmp
, rounded_size
);
2261 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
2262 gimplify_and_add (t
, pre_p
);
2264 addr
= fold_convert (build_pointer_type (type
), addr
);
2267 addr
= build_va_arg_indirect_ref (addr
);
2269 return build_va_arg_indirect_ref (addr
);
2272 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2273 not support nested low-overhead loops. */
2276 can_use_doloop_if_innermost (const widest_int
&, const widest_int
&,
2277 unsigned int loop_depth
, bool)
2279 return loop_depth
== 1;
2282 /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2285 default_optab_supported_p (int, machine_mode
, machine_mode
, optimization_type
)
2290 /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2293 default_max_noce_ifcvt_seq_cost (edge e
)
2295 bool predictable_p
= predictable_edge_p (e
);
2297 enum compiler_param param
2299 ? PARAM_MAX_RTL_IF_CONVERSION_PREDICTABLE_COST
2300 : PARAM_MAX_RTL_IF_CONVERSION_UNPREDICTABLE_COST
);
2302 /* If we have a parameter set, use that, otherwise take a guess using
2304 if (global_options_set
.x_param_values
[param
])
2305 return PARAM_VALUE (param
);
2307 return BRANCH_COST (true, predictable_p
) * COSTS_N_INSNS (3);
2310 /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2313 default_min_arithmetic_precision (void)
2315 return WORD_REGISTER_OPERATIONS
? BITS_PER_WORD
: BITS_PER_UNIT
;
2318 /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2320 enum flt_eval_method
2321 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED
)
2323 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
;
2326 /* Default implementation for
2327 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2329 default_stack_clash_protection_alloca_probe_range (void)
2334 /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2337 default_select_early_remat_modes (sbitmap
)
2341 /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2344 default_preferred_else_value (unsigned, tree type
, unsigned, tree
*)
2346 return build_zero_cst (type
);
2349 /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2351 default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED
)
2353 #ifdef HAVE_speculation_barrier
2354 return active
? HAVE_speculation_barrier
: true;
2359 /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2360 that can be used on targets that never have speculative execution. */
2362 speculation_safe_value_not_needed (bool active
)
2367 /* Default implementation of the speculation-safe-load builtin. This
2368 implementation simply copies val to result and generates a
2369 speculation_barrier insn, if such a pattern is defined. */
2371 default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED
,
2372 rtx result
, rtx val
,
2373 rtx failval ATTRIBUTE_UNUSED
)
2375 emit_move_insn (result
, val
);
2377 #ifdef HAVE_speculation_barrier
2378 /* Assume the target knows what it is doing: if it defines a
2379 speculation barrier, but it is not enabled, then assume that one
2381 if (HAVE_speculation_barrier
)
2382 emit_insn (gen_speculation_barrier ());
2389 default_remove_extra_call_preserved_regs (rtx_insn
*, HARD_REG_SET
*)
2393 #include "gt-targhooks.h"