ASM_OUTPUT_ALIGNED_BSS. */
void
-x86_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
- const char *name, unsigned HOST_WIDE_INT size,
- int align)
+x86_output_aligned_bss (FILE *file, tree decl, const char *name,
+ unsigned HOST_WIDE_INT size, int align)
{
if ((ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_MEDIUM_PIC)
&& size > (unsigned int)ix86_section_threshold)
static tree
ix86_handle_cconv_attribute (tree *node, tree name,
tree args,
- int flags ATTRIBUTE_UNUSED,
+ int,
bool *no_add_attrs)
{
if (TREE_CODE (*node) != FUNCTION_TYPE
attributes that we expect elsewhere. */
static tree
-ix86_handle_tm_regparm_attribute (tree *node, tree name ATTRIBUTE_UNUSED,
- tree args ATTRIBUTE_UNUSED,
+ix86_handle_tm_regparm_attribute (tree *node, tree, tree,
int flags, bool *no_add_attrs)
{
tree alt;
static bool
ix86_pass_by_reference (cumulative_args_t cum_v, enum machine_mode mode,
- const_tree type, bool named ATTRIBUTE_UNUSED)
+ const_tree type, bool)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
}
static rtx
-ix86_function_value (const_tree valtype, const_tree fntype_or_decl,
- bool outgoing ATTRIBUTE_UNUSED)
+ix86_function_value (const_tree valtype, const_tree fntype_or_decl, bool)
{
enum machine_mode mode, orig_mode;
/* Return true iff type is returned in memory. */
static bool
-ix86_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
+ix86_return_in_memory (const_tree type, const_tree fntype)
{
#ifdef SUBTARGET_RETURN_IN_MEMORY
return SUBTARGET_RETURN_IN_MEMORY (type, fntype);
static void
ix86_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
- tree type, int *pretend_size ATTRIBUTE_UNUSED,
- int no_rtl)
+ tree type, int *, int no_rtl)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
CUMULATIVE_ARGS next_cum;
/* Reset from the function's potential modifications. */
static void
-ix86_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
- HOST_WIDE_INT size ATTRIBUTE_UNUSED)
+ix86_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED, HOST_WIDE_INT)
{
if (pic_offset_table_rtx)
SET_REGNO (pic_offset_table_rtx, REAL_PIC_OFFSET_TABLE_REGNUM);
requires to two regs - that would mean more pseudos with longer
lifetimes. */
static int
-ix86_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
- addr_space_t as ATTRIBUTE_UNUSED,
- bool speed ATTRIBUTE_UNUSED)
+ix86_address_cost (rtx x, enum machine_mode, addr_space_t, bool)
{
struct ix86_address parts;
int cost = 1;
satisfies CONSTANT_P. */
static bool
-ix86_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
+ix86_legitimate_constant_p (enum machine_mode, rtx x)
{
switch (GET_CODE (x))
{
0 if it should not. */
bool
-ix86_legitimize_reload_address (rtx x,
- enum machine_mode mode ATTRIBUTE_UNUSED,
- int opnum, int type,
- int ind_levels ATTRIBUTE_UNUSED)
+ix86_legitimize_reload_address (rtx x, enum machine_mode, int opnum, int type,
+ int)
{
/* Reload can generate:
be recognized. */
static bool
-ix86_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
- rtx addr, bool strict)
+ix86_legitimate_address_p (enum machine_mode, rtx addr, bool strict)
{
struct ix86_address parts;
rtx base, index, disp;
See comments by legitimize_pic_address in i386.c for details. */
static rtx
-ix86_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ix86_legitimize_address (rtx x, rtx, enum machine_mode mode)
{
int changed = 0;
unsigned log;
pattern. */
static int
-get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
+get_some_local_dynamic_name_1 (rtx *px, void *)
{
rtx x = *px;
/* Check if a 256bit AVX register is referenced inside of EXP. */
static int
-ix86_check_avx256_register (rtx *pexp, void *data ATTRIBUTE_UNUSED)
+ix86_check_avx256_register (rtx *pexp, void *)
{
rtx exp = *pexp;
/* Check if a 256bit AVX register is referenced in stores. */
static void
-ix86_check_avx256_stores (rtx dest, const_rtx set ATTRIBUTE_UNUSED, void *data)
+ix86_check_avx256_stores (rtx dest, const_rtx, void *data)
{
if (ix86_check_avx256_register (&dest, NULL))
{
}
static int
-ix86_mode_priority (int entity ATTRIBUTE_UNUSED, int n)
+ix86_mode_priority (int, int n)
{
return n;
}
appropriate constraints. */
bool
-ix86_unary_operator_ok (enum rtx_code code ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ix86_unary_operator_ok (enum rtx_code,
+ enum machine_mode,
rtx operands[2])
{
/* If one of operands is memory, source and destination must match. */
/* Not used, but eases macroization of patterns. */
void
-ix86_expand_convert_uns_sixf_sse (rtx target ATTRIBUTE_UNUSED,
- rtx input ATTRIBUTE_UNUSED)
+ix86_expand_convert_uns_sixf_sse (rtx, rtx)
{
gcc_unreachable ();
}
Return the appropriate mode to use. */
enum machine_mode
-ix86_fp_compare_mode (enum rtx_code code ATTRIBUTE_UNUSED)
+ix86_fp_compare_mode (enum rtx_code)
{
/* ??? In order to make all comparisons reversible, we do all comparisons
non-trapping when compiling for IEEE. Once gcc is able to distinguish
(2 bytes, vs. 3 for fnstsw+sahf and at least 5 for fnstsw+test). */
enum ix86_fpcmp_strategy
-ix86_fp_comparison_strategy (enum rtx_code code ATTRIBUTE_UNUSED)
+ix86_fp_comparison_strategy (enum rtx_code)
{
/* Do fcomi/sahf based test when profitable. */
/* Prepare for scheduling pass. */
static void
-ix86_sched_init_global (FILE *dump ATTRIBUTE_UNUSED,
- int verbose ATTRIBUTE_UNUSED,
- int max_uid ATTRIBUTE_UNUSED)
+ix86_sched_init_global (FILE *, int, int)
{
/* Install scheduling hooks for current CPU. Some of these hooks are used
in time-critical parts of the scheduler, so we only set them up when
/* Return the ix86 builtin for CODE. */
static tree
-ix86_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
+ix86_builtin_decl (unsigned code, bool)
{
if (code >= IX86_BUILTIN_MAX)
return error_mark_node;
reciprocal of the function, or NULL_TREE if not available. */
static tree
-ix86_builtin_reciprocal (unsigned int fn, bool md_fn,
- bool sqrt ATTRIBUTE_UNUSED)
+ix86_builtin_reciprocal (unsigned int fn, bool md_fn, bool)
{
if (! (TARGET_SSE_MATH && !optimize_insn_for_size_p ()
&& flag_finite_math_only && !flag_trapping_math
static tree
ix86_handle_callee_pop_aggregate_return (tree *node, tree name,
tree args,
- int flags ATTRIBUTE_UNUSED,
+ int,
bool *no_add_attrs)
{
if (TREE_CODE (*node) != FUNCTION_TYPE
/* Handle a "ms_abi" or "sysv" attribute; arguments as in
struct attribute_spec.handler. */
static tree
-ix86_handle_abi_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
+ix86_handle_abi_attribute (tree *node, tree name, tree, int,
+ bool *no_add_attrs)
{
if (TREE_CODE (*node) != FUNCTION_TYPE
&& TREE_CODE (*node) != METHOD_TYPE
/* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
struct attribute_spec.handler. */
static tree
-ix86_handle_struct_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
+ix86_handle_struct_attribute (tree *node, tree name, tree, int,
+ bool *no_add_attrs)
{
tree *type = NULL;
if (DECL_P (*node))
}
static tree
-ix86_handle_fndecl_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
+ix86_handle_fndecl_attribute (tree *node, tree name, tree, int,
+ bool *no_add_attrs)
{
if (TREE_CODE (*node) != FUNCTION_DECL)
{
/* Determine whether x86_output_mi_thunk can succeed. */
static bool
-x86_can_output_mi_thunk (const_tree thunk ATTRIBUTE_UNUSED,
- HOST_WIDE_INT delta ATTRIBUTE_UNUSED,
- HOST_WIDE_INT vcall_offset, const_tree function)
+x86_can_output_mi_thunk (const_tree, HOST_WIDE_INT, HOST_WIDE_INT vcall_offset,
+ const_tree function)
{
/* 64-bit can handle anything. */
if (TARGET_64BIT)
*(*this + vcall_offset) should be added to THIS. */
static void
-x86_output_mi_thunk (FILE *file,
- tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
+x86_output_mi_thunk (FILE *file, tree, HOST_WIDE_INT delta,
HOST_WIDE_INT vcall_offset, tree function)
{
rtx this_param = x86_this_parameter (function);
/* Return nonzero when P points to register encoded via REX prefix.
Called via for_each_rtx. */
static int
-extended_reg_mentioned_1 (rtx *p, void *data ATTRIBUTE_UNUSED)
+extended_reg_mentioned_1 (rtx *p, void *)
{
unsigned int regno;
if (!REG_P (*p))
with the old cc0-based compiler. */
static tree
-ix86_md_asm_clobbers (tree outputs ATTRIBUTE_UNUSED,
- tree inputs ATTRIBUTE_UNUSED,
- tree clobbers)
+ix86_md_asm_clobbers (tree, tree, tree clobbers)
{
clobbers = tree_cons (NULL_TREE, build_string (5, "flags"),
clobbers);
/* Implement targetm.vectorize.builtin_vectorization_cost. */
static int
ix86_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
- tree vectype,
- int misalign ATTRIBUTE_UNUSED)
+ tree vectype, int)
{
unsigned elements;
enabled for other processors. */
static int
-ix86_reassociation_width (unsigned int opc ATTRIBUTE_UNUSED,
- enum machine_mode mode)
+ix86_reassociation_width (unsigned int, enum machine_mode mode)
{
int res = 1;
/* Implement targetm.vectorize.init_cost. */
static void *
-ix86_init_cost (struct loop *loop_info ATTRIBUTE_UNUSED)
+ix86_init_cost (struct loop *)
{
unsigned *cost = XNEWVEC (unsigned, 3);
cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
/* Handle a "shared" attribute;
arguments as in struct attribute_spec.handler. */
tree
-ix86_handle_shared_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
+ix86_handle_shared_attribute (tree *node, tree name, tree, int,
+ bool *no_add_attrs)
{
if (TREE_CODE (*node) != VAR_DECL)
{
/* Handle a "selectany" attribute;
arguments as in struct attribute_spec.handler. */
tree
-ix86_handle_selectany_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED,
+ix86_handle_selectany_attribute (tree *node, tree name, tree, int,
bool *no_add_attrs)
{
/* The attribute applies only to objects that are initialized and have
user-specified visibility attributes. */
void
-i386_pe_assemble_visibility (tree decl,
- int vis ATTRIBUTE_UNUSED)
+i386_pe_assemble_visibility (tree decl, int)
{
if (!decl
|| !lookup_attribute ("visibility", DECL_ATTRIBUTES (decl)))
a file stream. */
tree
-i386_pe_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
+i386_pe_mangle_assembler_name (const char *name)
{
const char *skipped = name + (*name == '*' ? 1 : 0);
const char *stripped = targetm.strip_name_encoding (skipped);
#define SECTION_PE_SHARED SECTION_MACH_DEP
unsigned int
-i386_pe_section_type_flags (tree decl, const char *name, int reloc)
+i386_pe_section_type_flags (tree decl, const char *, int reloc)
{
unsigned int flags;
void
i386_pe_asm_output_aligned_decl_common (FILE *stream, tree decl,
const char *name, HOST_WIDE_INT size,
- HOST_WIDE_INT align ATTRIBUTE_UNUSED)
+ HOST_WIDE_INT align)
{
HOST_WIDE_INT rounded;
}
void
-i386_pe_end_function (FILE *f, const char *name ATTRIBUTE_UNUSED,
- tree decl ATTRIBUTE_UNUSED)
+i386_pe_end_function (FILE *f, const char *, tree)
{
i386_pe_seh_fini (f);
}