]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/targhooks.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / targhooks.c
CommitLineData
61f71b34 1/* Default target hook functions.
5624e564 2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
61f71b34
DD
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
61f71b34
DD
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
61f71b34 19
dd37c5a0
DD
20/* The migration of target macros to target hooks works as follows:
21
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
24
25 2. Convert all the MI files to use the hook instead of the macro.
26
27 3. Repeat for a majority of the remaining target macros. This will
28 take some time.
29
30 4. Tell target maintainers to start migrating.
31
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
34
35 6. TBD when, poison the macros. Unmigrated targets will break at
36 this point.
37
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
41
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
44
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
48
61f71b34
DD
49#include "config.h"
50#include "system.h"
51#include "coretypes.h"
52#include "tm.h"
61f71b34 53#include "rtl.h"
40e23961 54#include "alias.h"
61f71b34 55#include "tree.h"
40e23961 56#include "fold-const.h"
d8a2d370
DN
57#include "stor-layout.h"
58#include "varasm.h"
36566b39
PK
59#include "function.h"
60#include "flags.h"
36566b39
PK
61#include "insn-config.h"
62#include "expmed.h"
63#include "dojump.h"
64#include "explow.h"
65#include "calls.h"
66#include "emit-rtl.h"
67#include "stmt.h"
61f71b34 68#include "expr.h"
d84a4ef1 69#include "output.h"
718f9c0f 70#include "diagnostic-core.h"
61f71b34
DD
71#include "target.h"
72#include "tm_p.h"
ffa88471 73#include "regs.h"
8a99f6f9 74#include "reload.h"
b0710fe1 75#include "insn-codes.h"
8a99f6f9
R
76#include "optabs.h"
77#include "recog.h"
a803773f
JM
78#include "intl.h"
79#include "opts.h"
2fb9a547
AM
80#include "tree-ssa-alias.h"
81#include "gimple-expr.h"
45b0be94 82#include "gimplify.h"
d8a2d370 83#include "stringpool.h"
442b4905 84#include "tree-ssanames.h"
61f71b34 85
6dd53648 86
c6c3dba9 87bool
ef4bddc2 88default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
c6c3dba9
PB
89 rtx addr ATTRIBUTE_UNUSED,
90 bool strict ATTRIBUTE_UNUSED)
91{
92#ifdef GO_IF_LEGITIMATE_ADDRESS
93 /* Defer to the old implementation using a goto. */
94 if (strict)
95 return strict_memory_address_p (mode, addr);
96 else
97 return memory_address_p (mode, addr);
98#else
99 gcc_unreachable ();
100#endif
101}
102
6773a41c
RO
103void
104default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
105{
106#ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
c3284718 107 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
6773a41c
RO
108#endif
109}
110
215b063c
PB
111int
112default_unspec_may_trap_p (const_rtx x, unsigned flags)
113{
114 int i;
115
c84a808e
EB
116 /* Any floating arithmetic may trap. */
117 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
215b063c
PB
118 return 1;
119
120 for (i = 0; i < XVECLEN (x, 0); ++i)
121 {
122 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
123 return 1;
124 }
125
126 return 0;
127}
128
ef4bddc2 129machine_mode
cde0f3fd 130default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
ef4bddc2 131 machine_mode mode,
cde0f3fd
PB
132 int *punsignedp ATTRIBUTE_UNUSED,
133 const_tree funtype ATTRIBUTE_UNUSED,
134 int for_return ATTRIBUTE_UNUSED)
135{
5e617be8 136 if (type != NULL_TREE && for_return == 2)
666e3ceb 137 return promote_mode (type, mode, punsignedp);
cde0f3fd
PB
138 return mode;
139}
140
ef4bddc2 141machine_mode
cde0f3fd 142default_promote_function_mode_always_promote (const_tree type,
ef4bddc2 143 machine_mode mode,
cde0f3fd
PB
144 int *punsignedp,
145 const_tree funtype ATTRIBUTE_UNUSED,
146 int for_return ATTRIBUTE_UNUSED)
147{
148 return promote_mode (type, mode, punsignedp);
149}
150
ef4bddc2
RS
151machine_mode
152default_cc_modes_compatible (machine_mode m1, machine_mode m2)
e129d93a
ILT
153{
154 if (m1 == m2)
155 return m1;
156 return VOIDmode;
157}
158
61f71b34 159bool
586de218
KG
160default_return_in_memory (const_tree type,
161 const_tree fntype ATTRIBUTE_UNUSED)
61f71b34 162{
33d05111 163 return (TYPE_MODE (type) == BLKmode);
61f71b34
DD
164}
165
506d7b68
PB
166rtx
167default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
ef4bddc2 168 machine_mode mode ATTRIBUTE_UNUSED)
506d7b68
PB
169{
170 return x;
171}
172
14133a4d
KK
173bool
174default_legitimize_address_displacement (rtx *disp ATTRIBUTE_UNUSED,
175 rtx *offset ATTRIBUTE_UNUSED,
176 machine_mode mode ATTRIBUTE_UNUSED)
177{
178 return false;
179}
180
61f71b34
DD
181rtx
182default_expand_builtin_saveregs (void)
183{
61f71b34
DD
184 error ("__builtin_saveregs not supported by this target");
185 return const0_rtx;
61f71b34
DD
186}
187
188void
d5cc9181 189default_setup_incoming_varargs (cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 190 machine_mode mode ATTRIBUTE_UNUSED,
61f71b34
DD
191 tree type ATTRIBUTE_UNUSED,
192 int *pretend_arg_size ATTRIBUTE_UNUSED,
193 int second_time ATTRIBUTE_UNUSED)
194{
61f71b34
DD
195}
196
d6da68b9
KH
197/* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
198
199rtx
200default_builtin_setjmp_frame_value (void)
201{
202 return virtual_stack_vars_rtx;
203}
204
3a4d587b 205/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
8dfa8fcd 206
61f71b34 207bool
d5cc9181 208hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
61f71b34 209{
b602b7b6 210 return false;
61f71b34
DD
211}
212
213bool
d5cc9181 214default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
61f71b34 215{
3a4d587b
AM
216 return (targetm.calls.setup_incoming_varargs
217 != default_setup_incoming_varargs);
61f71b34 218}
c6e8c921 219
ef4bddc2 220machine_mode
93f90be6
FJ
221default_eh_return_filter_mode (void)
222{
7b0518e3 223 return targetm.unwind_word_mode ();
93f90be6
FJ
224}
225
ef4bddc2 226machine_mode
c7ff6e7a
AK
227default_libgcc_cmp_return_mode (void)
228{
229 return word_mode;
230}
231
ef4bddc2 232machine_mode
c7ff6e7a
AK
233default_libgcc_shift_count_mode (void)
234{
235 return word_mode;
236}
237
ef4bddc2 238machine_mode
7b0518e3
UW
239default_unwind_word_mode (void)
240{
241 return word_mode;
242}
243
273a2526
RS
244/* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
245
246unsigned HOST_WIDE_INT
ef4bddc2 247default_shift_truncation_mask (machine_mode mode)
273a2526
RS
248{
249 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
250}
251
bc23502b
PB
252/* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
253
254unsigned int
ef4bddc2 255default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
bc23502b
PB
256{
257 return have_insn_for (DIV, mode) ? 3 : 2;
258}
259
b12cbf2c
AN
260/* The default implementation of TARGET_MODE_REP_EXTENDED. */
261
262int
ef4bddc2
RS
263default_mode_rep_extended (machine_mode mode ATTRIBUTE_UNUSED,
264 machine_mode mode_rep ATTRIBUTE_UNUSED)
b12cbf2c
AN
265{
266 return UNKNOWN;
267}
268
c6e8c921 269/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
0938c9e8 270
c6e8c921 271bool
d5cc9181 272hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
c6e8c921
GK
273{
274 return true;
275}
4185ae53 276
c77cd3d1
UB
277/* Return machine mode for non-standard suffix
278 or VOIDmode if non-standard suffixes are unsupported. */
ef4bddc2 279machine_mode
c77cd3d1
UB
280default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
281{
282 return VOIDmode;
283}
4185ae53
PB
284
285/* The generic C++ ABI specifies this is a 64-bit value. */
286tree
287default_cxx_guard_type (void)
288{
289 return long_long_integer_type_node;
290}
46e995e0 291
46e995e0
PB
292/* Returns the size of the cookie to use when allocating an array
293 whose elements have the indicated TYPE. Assumes that it is already
294 known that a cookie is needed. */
295
296tree
297default_cxx_get_cookie_size (tree type)
298{
299 tree cookie_size;
300
301 /* We need to allocate an additional max (sizeof (size_t), alignof
302 (true_type)) bytes. */
303 tree sizetype_size;
304 tree type_align;
a1c496cb 305
46e995e0
PB
306 sizetype_size = size_in_bytes (sizetype);
307 type_align = size_int (TYPE_ALIGN_UNIT (type));
807e902e 308 if (tree_int_cst_lt (type_align, sizetype_size))
46e995e0
PB
309 cookie_size = sizetype_size;
310 else
311 cookie_size = type_align;
312
313 return cookie_size;
314}
8cd5a4e0 315
8cd5a4e0
RH
316/* Return true if a parameter must be passed by reference. This version
317 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
318
319bool
d5cc9181 320hook_pass_by_reference_must_pass_in_stack (cumulative_args_t c ATTRIBUTE_UNUSED,
ef4bddc2 321 machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
8cd5a4e0
RH
322 bool named_arg ATTRIBUTE_UNUSED)
323{
324 return targetm.calls.must_pass_in_stack (mode, type);
325}
951120ea 326
6cdd5672
RH
327/* Return true if a parameter follows callee copies conventions. This
328 version of the hook is true for all named arguments. */
329
330bool
d5cc9181 331hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 332 machine_mode mode ATTRIBUTE_UNUSED,
586de218 333 const_tree type ATTRIBUTE_UNUSED, bool named)
6cdd5672
RH
334{
335 return named;
336}
951120ea 337
6e2188e0
NF
338/* Emit to STREAM the assembler syntax for insn operand X. */
339
340void
341default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
342 int code ATTRIBUTE_UNUSED)
343{
344#ifdef PRINT_OPERAND
345 PRINT_OPERAND (stream, x, code);
346#else
347 gcc_unreachable ();
348#endif
349}
350
351/* Emit to STREAM the assembler syntax for an insn operand whose memory
352 address is X. */
353
354void
355default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
356 rtx x ATTRIBUTE_UNUSED)
357{
358#ifdef PRINT_OPERAND_ADDRESS
359 PRINT_OPERAND_ADDRESS (stream, x);
360#else
361 gcc_unreachable ();
362#endif
363}
364
365/* Return true if CODE is a valid punctuation character for the
366 `print_operand' hook. */
367
368bool
369default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
370{
371#ifdef PRINT_OPERAND_PUNCT_VALID_P
372 return PRINT_OPERAND_PUNCT_VALID_P (code);
373#else
374 return false;
375#endif
376}
377
77754180
DK
378/* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
379tree
380default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
381{
382 const char *skipped = name + (*name == '*' ? 1 : 0);
383 const char *stripped = targetm.strip_name_encoding (skipped);
384 if (*name != '*' && user_label_prefix[0])
385 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
386 return get_identifier (stripped);
387}
388
6dd53648
RH
389/* True if MODE is valid for the target. By "valid", we mean able to
390 be manipulated in non-trivial ways. In particular, this means all
391 the arithmetic is supported.
392
393 By default we guess this means that any C type is supported. If
394 we can't map the mode back to a type that would be available in C,
395 then reject it. Special case, here, is the double-word arithmetic
396 supported by optabs.c. */
397
398bool
ef4bddc2 399default_scalar_mode_supported_p (machine_mode mode)
6dd53648
RH
400{
401 int precision = GET_MODE_PRECISION (mode);
402
403 switch (GET_MODE_CLASS (mode))
404 {
405 case MODE_PARTIAL_INT:
406 case MODE_INT:
407 if (precision == CHAR_TYPE_SIZE)
408 return true;
409 if (precision == SHORT_TYPE_SIZE)
410 return true;
411 if (precision == INT_TYPE_SIZE)
412 return true;
413 if (precision == LONG_TYPE_SIZE)
414 return true;
415 if (precision == LONG_LONG_TYPE_SIZE)
416 return true;
417 if (precision == 2 * BITS_PER_WORD)
418 return true;
419 return false;
420
421 case MODE_FLOAT:
422 if (precision == FLOAT_TYPE_SIZE)
423 return true;
424 if (precision == DOUBLE_TYPE_SIZE)
425 return true;
426 if (precision == LONG_DOUBLE_TYPE_SIZE)
427 return true;
428 return false;
429
9a8ce21f 430 case MODE_DECIMAL_FLOAT:
ab22c1fa
CF
431 case MODE_FRACT:
432 case MODE_UFRACT:
433 case MODE_ACCUM:
434 case MODE_UACCUM:
9a8ce21f
JG
435 return false;
436
6dd53648 437 default:
1e128c5f 438 gcc_unreachable ();
6dd53648
RH
439 }
440}
6cdd5672 441
8cc4b7a2
JM
442/* Return true if libgcc supports floating-point mode MODE (known to
443 be supported as a scalar mode). */
444
445bool
ef4bddc2 446default_libgcc_floating_mode_supported_p (machine_mode mode)
8cc4b7a2
JM
447{
448 switch (mode)
449 {
450#ifdef HAVE_SFmode
451 case SFmode:
452#endif
453#ifdef HAVE_DFmode
454 case DFmode:
455#endif
456#ifdef HAVE_XFmode
457 case XFmode:
458#endif
459#ifdef HAVE_TFmode
460 case TFmode:
461#endif
462 return true;
463
464 default:
465 return false;
466 }
467}
468
2e681adf
JR
469/* Make some target macros useable by target-independent code. */
470bool
471targhook_words_big_endian (void)
472{
473 return !!WORDS_BIG_ENDIAN;
474}
475
476bool
477targhook_float_words_big_endian (void)
478{
479 return !!FLOAT_WORDS_BIG_ENDIAN;
480}
481
9193fb05
JM
482/* True if the target supports floating-point exceptions and rounding
483 modes. */
484
485bool
486default_float_exceptions_rounding_supported_p (void)
487{
488#ifdef HAVE_adddf3
489 return HAVE_adddf3;
490#else
491 return false;
492#endif
493}
494
0884546e
BE
495/* True if the target supports decimal floating point. */
496
497bool
498default_decimal_float_supported_p (void)
499{
500 return ENABLE_DECIMAL_FLOAT;
501}
502
ab22c1fa
CF
503/* True if the target supports fixed-point arithmetic. */
504
505bool
506default_fixed_point_supported_p (void)
507{
508 return ENABLE_FIXED_POINT;
509}
510
2f251a05
AI
511/* True if the target supports GNU indirect functions. */
512
513bool
514default_has_ifunc_p (void)
515{
516 return HAVE_GNU_INDIRECT_FUNCTION;
517}
518
e7e64a25
AS
519/* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
520 an error message.
b8698a0f 521
0fa2e4df 522 This function checks whether a given INSN is valid within a low-overhead
e7e64a25
AS
523 loop. If INSN is invalid it returns the reason for that, otherwise it
524 returns NULL. A called function may clobber any special registers required
525 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
a71a498d
AS
526 register for branch on table instructions. We reject the doloop pattern in
527 these cases. */
528
e7e64a25 529const char *
ac44248e 530default_invalid_within_doloop (const rtx_insn *insn)
a71a498d
AS
531{
532 if (CALL_P (insn))
e7e64a25 533 return "Function call in loop.";
b8698a0f 534
39718607 535 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
e7e64a25 536 return "Computed branch in the loop.";
b8698a0f 537
e7e64a25 538 return NULL;
a71a498d
AS
539}
540
2505a3f2
RG
541/* Mapping of builtin functions to vectorized variants. */
542
543tree
62f7fd21 544default_builtin_vectorized_function (tree fndecl ATTRIBUTE_UNUSED,
b95becfc
RG
545 tree type_out ATTRIBUTE_UNUSED,
546 tree type_in ATTRIBUTE_UNUSED)
2505a3f2
RG
547{
548 return NULL_TREE;
549}
550
f57d17f1
TM
551/* Vectorized conversion. */
552
553tree
744aa42f 554default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
88dd7150
RG
555 tree dest_type ATTRIBUTE_UNUSED,
556 tree src_type ATTRIBUTE_UNUSED)
f57d17f1
TM
557{
558 return NULL_TREE;
559}
560
35e1a5e7
IR
561/* Default vectorizer cost model values. */
562
563int
720f5239 564default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
a21892ad 565 tree vectype,
720f5239 566 int misalign ATTRIBUTE_UNUSED)
35e1a5e7 567{
a21892ad
BS
568 unsigned elements;
569
35e1a5e7
IR
570 switch (type_of_cost)
571 {
572 case scalar_stmt:
573 case scalar_load:
574 case scalar_store:
575 case vector_stmt:
576 case vector_load:
577 case vector_store:
578 case vec_to_scalar:
579 case scalar_to_vec:
580 case cond_branch_not_taken:
581 case vec_perm:
8bd37302 582 case vec_promote_demote:
35e1a5e7
IR
583 return 1;
584
585 case unaligned_load:
720f5239 586 case unaligned_store:
35e1a5e7
IR
587 return 2;
588
589 case cond_branch_taken:
590 return 3;
591
a21892ad
BS
592 case vec_construct:
593 elements = TYPE_VECTOR_SUBPARTS (vectype);
594 return elements / 2 + 1;
595
35e1a5e7
IR
596 default:
597 gcc_unreachable ();
598 }
599}
600
cc2137be
UB
601/* Reciprocal. */
602
603tree
744aa42f 604default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
ac10986f 605 bool md_fn ATTRIBUTE_UNUSED,
cc2137be
UB
606 bool sqrt ATTRIBUTE_UNUSED)
607{
608 return NULL_TREE;
609}
610
6cdd5672
RH
611bool
612hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
d5cc9181 613 cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 614 machine_mode mode ATTRIBUTE_UNUSED,
586de218 615 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
6cdd5672
RH
616{
617 return false;
618}
619
620bool
621hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
d5cc9181 622 cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 623 machine_mode mode ATTRIBUTE_UNUSED,
586de218 624 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
6cdd5672
RH
625{
626 return true;
627}
78a52f11
RH
628
629int
630hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
d5cc9181 631 cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 632 machine_mode mode ATTRIBUTE_UNUSED,
78a52f11
RH
633 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
634{
635 return 0;
636}
4d3e6fae 637
3c07301f 638void
d5cc9181 639default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 640 machine_mode mode ATTRIBUTE_UNUSED,
3c07301f
NF
641 const_tree type ATTRIBUTE_UNUSED,
642 bool named ATTRIBUTE_UNUSED)
643{
3c07301f 644 gcc_unreachable ();
3c07301f
NF
645}
646
647rtx
d5cc9181 648default_function_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 649 machine_mode mode ATTRIBUTE_UNUSED,
3c07301f
NF
650 const_tree type ATTRIBUTE_UNUSED,
651 bool named ATTRIBUTE_UNUSED)
652{
3c07301f 653 gcc_unreachable ();
3c07301f
NF
654}
655
656rtx
d5cc9181 657default_function_incoming_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
ef4bddc2 658 machine_mode mode ATTRIBUTE_UNUSED,
3c07301f
NF
659 const_tree type ATTRIBUTE_UNUSED,
660 bool named ATTRIBUTE_UNUSED)
661{
3c07301f 662 gcc_unreachable ();
3c07301f
NF
663}
664
c2ed6cf8 665unsigned int
ef4bddc2 666default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
c2ed6cf8
NF
667 const_tree type ATTRIBUTE_UNUSED)
668{
669 return PARM_BOUNDARY;
670}
671
123148b5 672unsigned int
ef4bddc2 673default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
123148b5
BS
674 const_tree type ATTRIBUTE_UNUSED)
675{
676 return PARM_BOUNDARY;
677}
678
b8698a0f 679void
912f2dac
DB
680hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
681{
682}
683
4d3e6fae
FJ
684const char *
685hook_invalid_arg_for_unprototyped_fn (
3101faab
KG
686 const_tree typelist ATTRIBUTE_UNUSED,
687 const_tree funcdecl ATTRIBUTE_UNUSED,
688 const_tree val ATTRIBUTE_UNUSED)
4d3e6fae
FJ
689{
690 return NULL;
691}
7d69de61
RH
692
693/* Initialize the stack protection decls. */
694
695/* Stack protection related decls living in libgcc. */
696static GTY(()) tree stack_chk_guard_decl;
697
698tree
699default_stack_protect_guard (void)
700{
701 tree t = stack_chk_guard_decl;
702
703 if (t == NULL)
704 {
623a6941
MK
705 rtx x;
706
c2255bc4
AH
707 t = build_decl (UNKNOWN_LOCATION,
708 VAR_DECL, get_identifier ("__stack_chk_guard"),
7d69de61
RH
709 ptr_type_node);
710 TREE_STATIC (t) = 1;
711 TREE_PUBLIC (t) = 1;
712 DECL_EXTERNAL (t) = 1;
713 TREE_USED (t) = 1;
714 TREE_THIS_VOLATILE (t) = 1;
715 DECL_ARTIFICIAL (t) = 1;
716 DECL_IGNORED_P (t) = 1;
717
623a6941
MK
718 /* Do not share RTL as the declaration is visible outside of
719 current function. */
720 x = DECL_RTL (t);
721 RTX_FLAG (x, used) = 1;
722
7d69de61
RH
723 stack_chk_guard_decl = t;
724 }
725
726 return t;
727}
728
729static GTY(()) tree stack_chk_fail_decl;
730
b8698a0f 731tree
7d69de61
RH
732default_external_stack_protect_fail (void)
733{
734 tree t = stack_chk_fail_decl;
735
736 if (t == NULL_TREE)
737 {
738 t = build_function_type_list (void_type_node, NULL_TREE);
c2255bc4
AH
739 t = build_decl (UNKNOWN_LOCATION,
740 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
7d69de61
RH
741 TREE_STATIC (t) = 1;
742 TREE_PUBLIC (t) = 1;
743 DECL_EXTERNAL (t) = 1;
744 TREE_USED (t) = 1;
745 TREE_THIS_VOLATILE (t) = 1;
746 TREE_NOTHROW (t) = 1;
747 DECL_ARTIFICIAL (t) = 1;
748 DECL_IGNORED_P (t) = 1;
5b5cba1f
JM
749 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
750 DECL_VISIBILITY_SPECIFIED (t) = 1;
7d69de61
RH
751
752 stack_chk_fail_decl = t;
753 }
754
5039610b 755 return build_call_expr (t, 0);
7d69de61
RH
756}
757
758tree
759default_hidden_stack_protect_fail (void)
760{
7ce918c5
JJ
761#ifndef HAVE_GAS_HIDDEN
762 return default_external_stack_protect_fail ();
763#else
7d69de61
RH
764 tree t = stack_chk_fail_decl;
765
7ce918c5
JJ
766 if (!flag_pic)
767 return default_external_stack_protect_fail ();
768
769 if (t == NULL_TREE)
7d69de61
RH
770 {
771 t = build_function_type_list (void_type_node, NULL_TREE);
c2255bc4 772 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
7d69de61
RH
773 get_identifier ("__stack_chk_fail_local"), t);
774 TREE_STATIC (t) = 1;
775 TREE_PUBLIC (t) = 1;
776 DECL_EXTERNAL (t) = 1;
777 TREE_USED (t) = 1;
778 TREE_THIS_VOLATILE (t) = 1;
779 TREE_NOTHROW (t) = 1;
780 DECL_ARTIFICIAL (t) = 1;
781 DECL_IGNORED_P (t) = 1;
782 DECL_VISIBILITY_SPECIFIED (t) = 1;
783 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
784
785 stack_chk_fail_decl = t;
786 }
787
5039610b 788 return build_call_expr (t, 0);
7ce918c5 789#endif
7d69de61
RH
790}
791
8ddf681a 792bool
3101faab
KG
793hook_bool_const_rtx_commutative_p (const_rtx x,
794 int outer_code ATTRIBUTE_UNUSED)
8ddf681a
R
795{
796 return COMMUTATIVE_P (x);
797}
798
1d636cc6 799rtx
586de218
KG
800default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
801 const_tree fn_decl_or_type,
1d636cc6
RG
802 bool outgoing ATTRIBUTE_UNUSED)
803{
804 /* The old interface doesn't handle receiving the function type. */
805 if (fn_decl_or_type
806 && !DECL_P (fn_decl_or_type))
807 fn_decl_or_type = NULL;
808
1d636cc6
RG
809#ifdef FUNCTION_VALUE
810 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
811#else
7fc6a96b 812 gcc_unreachable ();
1d636cc6
RG
813#endif
814}
815
390b17c2 816rtx
ef4bddc2 817default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
7fc6a96b 818 const_rtx fun ATTRIBUTE_UNUSED)
390b17c2 819{
7fc6a96b 820#ifdef LIBCALL_VALUE
390b17c2 821 return LIBCALL_VALUE (mode);
7fc6a96b
AS
822#else
823 gcc_unreachable ();
824#endif
390b17c2
RE
825}
826
82f81f18
AS
827/* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
828
829bool
830default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
831{
832#ifdef FUNCTION_VALUE_REGNO_P
833 return FUNCTION_VALUE_REGNO_P (regno);
834#else
835 gcc_unreachable ();
836#endif
837}
838
150cdc9e
RH
839rtx
840default_internal_arg_pointer (void)
841{
842 /* If the reg that the virtual arg pointer will be translated into is
843 not a fixed reg or is the stack pointer, make a copy of the virtual
844 arg pointer, and address parms via the copy. The frame pointer is
845 considered fixed even though it is not marked as such. */
846 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
847 || ! (fixed_regs[ARG_POINTER_REGNUM]
848 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
849 return copy_to_reg (virtual_incoming_args_rtx);
850 else
851 return virtual_incoming_args_rtx;
852}
853
531ca746 854rtx
c21df29b 855default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
531ca746 856{
531ca746
RH
857 if (incoming_p)
858 {
531ca746
RH
859#ifdef STATIC_CHAIN_INCOMING_REGNUM
860 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
861#endif
862 }
863
531ca746
RH
864#ifdef STATIC_CHAIN_REGNUM
865 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
866#endif
867
2b4fa409
RH
868 {
869 static bool issued_error;
870 if (!issued_error)
871 {
872 issued_error = true;
873 sorry ("nested functions not supported on this target");
874 }
875
876 /* It really doesn't matter what we return here, so long at it
877 doesn't cause the rest of the compiler to crash. */
878 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
879 }
531ca746
RH
880}
881
531ca746
RH
882void
883default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
884 rtx ARG_UNUSED (r_chain))
885{
531ca746 886 sorry ("nested function trampolines not supported on this target");
531ca746
RH
887}
888
079e7538
NF
889int
890default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
891 tree funtype ATTRIBUTE_UNUSED,
892 int size ATTRIBUTE_UNUSED)
893{
894 return 0;
895}
896
a87cf97e 897reg_class_t
81f40b79
ILT
898default_branch_target_register_class (void)
899{
900 return NO_REGS;
901}
902
5074a1f8
VM
903reg_class_t
904default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
905 reg_class_t cl)
906{
907 return cl;
908}
909
55a2c322
VM
910extern bool
911default_lra_p (void)
912{
913 return false;
914}
915
916int
917default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
918{
919 return 0;
920}
921
3b9ceb4b
VM
922extern bool
923default_register_usage_leveling_p (void)
924{
925 return false;
926}
927
55a2c322
VM
928extern bool
929default_different_addr_displacement_p (void)
930{
931 return false;
932}
933
a87cf97e 934reg_class_t
8a99f6f9 935default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
a87cf97e 936 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
ef4bddc2 937 machine_mode reload_mode ATTRIBUTE_UNUSED,
8a99f6f9
R
938 secondary_reload_info *sri)
939{
48c54229 940 enum reg_class rclass = NO_REGS;
a87cf97e 941 enum reg_class reload_class = (enum reg_class) reload_class_i;
8a99f6f9
R
942
943 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
944 {
945 sri->icode = sri->prev_sri->t_icode;
946 return NO_REGS;
947 }
948#ifdef SECONDARY_INPUT_RELOAD_CLASS
949 if (in_p)
48c54229 950 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
8a99f6f9
R
951#endif
952#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
953 if (! in_p)
48c54229 954 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
8a99f6f9 955#endif
48c54229 956 if (rclass != NO_REGS)
8a99f6f9 957 {
f9621cc4
RS
958 enum insn_code icode
959 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
960 reload_mode);
8a99f6f9
R
961
962 if (icode != CODE_FOR_nothing
2ef6ce06 963 && !insn_operand_matches (icode, in_p, x))
8a99f6f9
R
964 icode = CODE_FOR_nothing;
965 else if (icode != CODE_FOR_nothing)
966 {
967 const char *insn_constraint, *scratch_constraint;
8a99f6f9
R
968 enum reg_class insn_class, scratch_class;
969
970 gcc_assert (insn_data[(int) icode].n_operands == 3);
971 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
972 if (!*insn_constraint)
973 insn_class = ALL_REGS;
974 else
975 {
976 if (in_p)
977 {
978 gcc_assert (*insn_constraint == '=');
979 insn_constraint++;
980 }
8677664e
RS
981 insn_class = (reg_class_for_constraint
982 (lookup_constraint (insn_constraint)));
8a99f6f9
R
983 gcc_assert (insn_class != NO_REGS);
984 }
985
986 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
11e30dd8
HPN
987 /* The scratch register's constraint must start with "=&",
988 except for an input reload, where only "=" is necessary,
989 and where it might be beneficial to re-use registers from
990 the input. */
8a99f6f9 991 gcc_assert (scratch_constraint[0] == '='
11e30dd8
HPN
992 && (in_p || scratch_constraint[1] == '&'));
993 scratch_constraint++;
994 if (*scratch_constraint == '&')
995 scratch_constraint++;
8677664e
RS
996 scratch_class = (reg_class_for_constraint
997 (lookup_constraint (scratch_constraint)));
8a99f6f9
R
998
999 if (reg_class_subset_p (reload_class, insn_class))
1000 {
48c54229
KG
1001 gcc_assert (scratch_class == rclass);
1002 rclass = NO_REGS;
8a99f6f9
R
1003 }
1004 else
48c54229 1005 rclass = insn_class;
8a99f6f9
R
1006
1007 }
48c54229 1008 if (rclass == NO_REGS)
8a99f6f9
R
1009 sri->icode = icode;
1010 else
1011 sri->t_icode = icode;
1012 }
48c54229 1013 return rclass;
8a99f6f9
R
1014}
1015
9b580a0b
RH
1016/* By default, if flag_pic is true, then neither local nor global relocs
1017 should be placed in readonly memory. */
1018
1019int
1020default_reloc_rw_mask (void)
1021{
1022 return flag_pic ? 3 : 0;
1023}
1024
5234b8f5
DS
1025/* By default, do no modification. */
1026tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1027 tree id)
1028{
1029 return id;
1030}
1031
5aea1e76
UW
1032/* Default to natural alignment for vector types. */
1033HOST_WIDE_INT
1034default_vector_alignment (const_tree type)
1035{
9439e9a1 1036 return tree_to_shwi (TYPE_SIZE (type));
5aea1e76
UW
1037}
1038
5b900a4c 1039bool
3101faab 1040default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
5b900a4c
DN
1041{
1042 if (is_packed)
1043 return false;
1044
1045 /* Assuming that types whose size is > pointer-size are not guaranteed to be
1046 naturally aligned. */
1047 if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
1048 return false;
1049
1050 /* Assuming that types whose size is <= pointer-size
1051 are naturally aligned. */
1052 return true;
1053}
1054
0601d0cf 1055/* By default, assume that a target supports any factor of misalignment
b8698a0f 1056 memory access if it supports movmisalign patten.
0601d0cf
RE
1057 is_packed is true if the memory access is defined in a packed struct. */
1058bool
ef4bddc2 1059default_builtin_support_vector_misalignment (machine_mode mode,
0601d0cf
RE
1060 const_tree type
1061 ATTRIBUTE_UNUSED,
1062 int misalignment
1063 ATTRIBUTE_UNUSED,
1064 bool is_packed
1065 ATTRIBUTE_UNUSED)
1066{
947131ba 1067 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
0601d0cf
RE
1068 return true;
1069 return false;
1070}
1071
26983c22
L
1072/* By default, only attempt to parallelize bitwise operations, and
1073 possibly adds/subtracts using bit-twiddling. */
1074
ef4bddc2
RS
1075machine_mode
1076default_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
26983c22 1077{
cc4b5170 1078 return word_mode;
26983c22
L
1079}
1080
767f865f
RG
1081/* By default only the size derived from the preferred vector mode
1082 is tried. */
1083
1084unsigned int
1085default_autovectorize_vector_sizes (void)
1086{
1087 return 0;
1088}
1089
92345349
BS
1090/* By default, the cost model accumulates three separate costs (prologue,
1091 loop body, and epilogue) for a vectorized loop or block. So allocate an
1092 array of three unsigned ints, set it to zero, and return its address. */
c3e7ee41
BS
1093
1094void *
1095default_init_cost (struct loop *loop_info ATTRIBUTE_UNUSED)
1096{
92345349
BS
1097 unsigned *cost = XNEWVEC (unsigned, 3);
1098 cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
c3e7ee41
BS
1099 return cost;
1100}
1101
1102/* By default, the cost model looks up the cost of the given statement
1103 kind and mode, multiplies it by the occurrence count, accumulates
92345349 1104 it into the cost specified by WHERE, and returns the cost added. */
c3e7ee41
BS
1105
1106unsigned
1107default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
92345349
BS
1108 struct _stmt_vec_info *stmt_info, int misalign,
1109 enum vect_cost_model_location where)
c3e7ee41
BS
1110{
1111 unsigned *cost = (unsigned *) data;
1112 unsigned retval = 0;
1113
d6d11272 1114 tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE;
58e5400a
BM
1115 int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype,
1116 misalign);
d6d11272
XDL
1117 /* Statements in an inner loop relative to the loop being
1118 vectorized are weighted more heavily. The value here is
1119 arbitrary and could potentially be improved with analysis. */
1120 if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info))
1121 count *= 50; /* FIXME. */
1122
1123 retval = (unsigned) (count * stmt_cost);
1124 cost[where] += retval;
c3e7ee41
BS
1125
1126 return retval;
1127}
1128
92345349 1129/* By default, the cost model just returns the accumulated costs. */
c3e7ee41 1130
92345349
BS
1131void
1132default_finish_cost (void *data, unsigned *prologue_cost,
1133 unsigned *body_cost, unsigned *epilogue_cost)
c3e7ee41 1134{
92345349
BS
1135 unsigned *cost = (unsigned *) data;
1136 *prologue_cost = cost[vect_prologue];
1137 *body_cost = cost[vect_body];
1138 *epilogue_cost = cost[vect_epilogue];
c3e7ee41
BS
1139}
1140
1141/* Free the cost data. */
1142
1143void
1144default_destroy_cost_data (void *data)
1145{
1146 free (data);
1147}
1148
d4ebfa65
BE
1149/* Determine whether or not a pointer mode is valid. Assume defaults
1150 of ptr_mode or Pmode - can be overridden. */
1151bool
ef4bddc2 1152default_valid_pointer_mode (machine_mode mode)
d4ebfa65
BE
1153{
1154 return (mode == ptr_mode || mode == Pmode);
1155}
1156
7352c013
RG
1157/* Determine whether the memory reference specified by REF may alias
1158 the C libraries errno location. */
1159bool
1160default_ref_may_alias_errno (ao_ref *ref)
1161{
1162 tree base = ao_ref_base (ref);
1163 /* The default implementation assumes the errno location is
1164 a declaration of type int or is always accessed via a
1165 pointer to int. We assume that accesses to errno are
1166 not deliberately obfuscated (even in conforming ways). */
1167 if (TYPE_UNSIGNED (TREE_TYPE (base))
1168 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1169 return false;
1170 /* The default implementation assumes an errno location
1171 declaration is never defined in the current compilation unit. */
1172 if (DECL_P (base)
1173 && !TREE_STATIC (base))
1174 return true;
1175 else if (TREE_CODE (base) == MEM_REF
1176 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1177 {
1178 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1179 return !pi || pi->pt.anything || pi->pt.nonlocal;
1180 }
1181 return false;
1182}
1183
d4ebfa65
BE
1184/* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
1185 for the generic address space only. */
1186
ef4bddc2 1187machine_mode
d4ebfa65
BE
1188default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1189{
1190 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1191 return ptr_mode;
1192}
1193
1194/* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
1195 for the generic address space only. */
1196
ef4bddc2 1197machine_mode
d4ebfa65
BE
1198default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1199{
1200 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1201 return Pmode;
1202}
1203
1204/* Named address space version of valid_pointer_mode. */
1205
1206bool
ef4bddc2 1207default_addr_space_valid_pointer_mode (machine_mode mode, addr_space_t as)
d4ebfa65
BE
1208{
1209 if (!ADDR_SPACE_GENERIC_P (as))
1210 return (mode == targetm.addr_space.pointer_mode (as)
1211 || mode == targetm.addr_space.address_mode (as));
1212
1213 return targetm.valid_pointer_mode (mode);
1214}
1215
1216/* Some places still assume that all pointer or address modes are the
1217 standard Pmode and ptr_mode. These optimizations become invalid if
1218 the target actually supports multiple different modes. For now,
1219 we disable such optimizations on such targets, using this function. */
1220
1221bool
1222target_default_pointer_address_modes_p (void)
1223{
1224 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1225 return false;
1226 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1227 return false;
1228
1229 return true;
1230}
1231
09e881c9
BE
1232/* Named address space version of legitimate_address_p. */
1233
1234bool
ef4bddc2 1235default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
09e881c9
BE
1236 bool strict, addr_space_t as)
1237{
1238 if (!ADDR_SPACE_GENERIC_P (as))
1239 gcc_unreachable ();
1240
1241 return targetm.legitimate_address_p (mode, mem, strict);
1242}
1243
1244/* Named address space version of LEGITIMIZE_ADDRESS. */
1245
1246rtx
1247default_addr_space_legitimize_address (rtx x, rtx oldx,
ef4bddc2 1248 machine_mode mode, addr_space_t as)
09e881c9
BE
1249{
1250 if (!ADDR_SPACE_GENERIC_P (as))
1251 return x;
1252
1253 return targetm.legitimize_address (x, oldx, mode);
1254}
1255
1256/* The default hook for determining if one named address space is a subset of
1257 another and to return which address space to use as the common address
1258 space. */
1259
1260bool
1261default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1262{
1263 return (subset == superset);
1264}
1265
1266/* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1267 called for targets with only a generic address space. */
1268
1269rtx
1270default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1271 tree from_type ATTRIBUTE_UNUSED,
1272 tree to_type ATTRIBUTE_UNUSED)
1273{
1274 gcc_unreachable ();
1275}
1276
dbc42c44
AS
1277bool
1278default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1279{
1280 return true;
1281}
1282
cbda7dc6
AS
1283/* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1284
1285bool
5bfed9a9
GJL
1286default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1287 addr_space_t addrspace ATTRIBUTE_UNUSED)
cbda7dc6 1288{
cbda7dc6 1289 return false;
cbda7dc6
AS
1290}
1291
ab442df7
MM
1292bool
1293default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1294 tree ARG_UNUSED (name),
1295 tree ARG_UNUSED (args),
1296 int ARG_UNUSED (flags))
1297{
5779e713
MM
1298 warning (OPT_Wattributes,
1299 "target attribute is not supported on this machine");
1300
1301 return false;
1302}
1303
1304bool
1305default_target_option_pragma_parse (tree ARG_UNUSED (args),
1306 tree ARG_UNUSED (pop_target))
1307{
1308 warning (OPT_Wpragmas,
1309 "#pragma GCC target is not supported for this machine");
1310
ab442df7
MM
1311 return false;
1312}
1313
1314bool
5cec9f59 1315default_target_can_inline_p (tree caller, tree callee)
ab442df7
MM
1316{
1317 bool ret = false;
1318 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1319 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1320
1321 /* If callee has no option attributes, then it is ok to inline */
1322 if (!callee_opts)
1323 ret = true;
1324
1325 /* If caller has no option attributes, but callee does then it is not ok to
1326 inline */
1327 else if (!caller_opts)
1328 ret = false;
1329
dd5a833e
MS
1330 /* If both caller and callee have attributes, assume that if the
1331 pointer is different, the two functions have different target
1332 options since build_target_option_node uses a hash table for the
1333 options. */
ab442df7
MM
1334 else
1335 ret = (callee_opts == caller_opts);
1336
1337 return ret;
1338}
1339
e6ff3083
AS
1340/* If the machine does not have a case insn that compares the bounds,
1341 this means extra overhead for dispatch tables, which raises the
1342 threshold for using them. */
1343
0cd2402d
SB
1344unsigned int
1345default_case_values_threshold (void)
e6ff3083 1346{
8684d89d 1347 return (targetm.have_casesi () ? 4 : 5);
e6ff3083
AS
1348}
1349
2929029c
WG
1350bool
1351default_have_conditional_execution (void)
1352{
1353#ifdef HAVE_conditional_execution
1354 return HAVE_conditional_execution;
1355#else
1356 return false;
1357#endif
1358}
1359
9b2b7279
AM
1360/* By default we assume that c99 functions are present at the runtime,
1361 but sincos is not. */
1362bool
1363default_libc_has_function (enum function_class fn_class)
1364{
1365 if (fn_class == function_c94
1366 || fn_class == function_c99_misc
1367 || fn_class == function_c99_math_complex)
1368 return true;
1369
1370 return false;
1371}
1372
1373bool
1374gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1375{
1376 return true;
1377}
1378
1379bool
1380no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1381{
1382 return false;
1383}
1384
0a35513e
AH
1385tree
1386default_builtin_tm_load_store (tree ARG_UNUSED (type))
1387{
1388 return NULL_TREE;
1389}
1390
f5c21ef3
AS
1391/* Compute cost of moving registers to/from memory. */
1392
1393int
ef4bddc2 1394default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
a87cf97e 1395 reg_class_t rclass ATTRIBUTE_UNUSED,
f5c21ef3
AS
1396 bool in ATTRIBUTE_UNUSED)
1397{
1398#ifndef MEMORY_MOVE_COST
a87cf97e 1399 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
f5c21ef3 1400#else
a87cf97e 1401 return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in);
f5c21ef3
AS
1402#endif
1403}
1404
de8f4b07
AS
1405/* Compute cost of moving data from a register of class FROM to one of
1406 TO, using MODE. */
1407
1408int
ef4bddc2 1409default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
a87cf97e
JR
1410 reg_class_t from ATTRIBUTE_UNUSED,
1411 reg_class_t to ATTRIBUTE_UNUSED)
de8f4b07
AS
1412{
1413#ifndef REGISTER_MOVE_COST
1414 return 2;
1415#else
a87cf97e 1416 return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to);
de8f4b07
AS
1417#endif
1418}
1419
7cbed008
JG
1420/* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1421 behaviour. SPEED_P is true if we are compiling for speed. */
1422
5a6bc9c7 1423unsigned int
7cbed008
JG
1424get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1425{
1426 unsigned int move_ratio;
1427#ifdef MOVE_RATIO
1428 move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1429#else
1430#if defined (HAVE_movmemqi) || defined (HAVE_movmemhi) || defined (HAVE_movmemsi) || defined (HAVE_movmemdi) || defined (HAVE_movmemti)
1431 move_ratio = 2;
1432#else /* No movmem patterns, pick a default. */
1433 move_ratio = ((speed_p) ? 15 : 3);
1434#endif
1435#endif
1436 return move_ratio;
1437}
1438
1439/* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1440 used; return FALSE if the movmem/setmem optab should be expanded, or
1441 a call to memcpy emitted. */
1442
1443bool
445d7826 1444default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
7cbed008
JG
1445 unsigned int alignment,
1446 enum by_pieces_operation op,
1447 bool speed_p)
1448{
1449 unsigned int max_size = 0;
1450 unsigned int ratio = 0;
1451
1452 switch (op)
1453 {
1454 case CLEAR_BY_PIECES:
1455 max_size = STORE_MAX_PIECES;
1456 ratio = CLEAR_RATIO (speed_p);
1457 break;
1458 case MOVE_BY_PIECES:
1459 max_size = MOVE_MAX_PIECES;
1460 ratio = get_move_ratio (speed_p);
1461 break;
1462 case SET_BY_PIECES:
1463 max_size = STORE_MAX_PIECES;
1464 ratio = SET_RATIO (speed_p);
1465 break;
1466 case STORE_BY_PIECES:
1467 max_size = STORE_MAX_PIECES;
1468 ratio = get_move_ratio (speed_p);
1469 break;
1470 }
1471
1472 return move_by_pieces_ninsns (size, alignment, max_size + 1) < ratio;
1473}
1474
3c5273a9
KT
1475bool
1476default_profile_before_prologue (void)
1477{
1478#ifdef PROFILE_BEFORE_PROLOGUE
1479 return true;
1480#else
1481 return false;
1482#endif
1483}
1484
fba42e24
AS
1485/* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1486
1487reg_class_t
1488default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1489 reg_class_t rclass)
1490{
1491#ifdef PREFERRED_RELOAD_CLASS
1492 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1493#else
1494 return rclass;
1495#endif
1496}
1497
abd26bfb
AS
1498/* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1499
1500reg_class_t
1501default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1502 reg_class_t rclass)
1503{
abd26bfb 1504 return rclass;
abd26bfb
AS
1505}
1506
5f286f4a
YQ
1507/* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1508reg_class_t
1509default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
1510{
1511 return NO_REGS;
1512}
1513
07b8f0a8
AS
1514/* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1515
1516bool
1517default_class_likely_spilled_p (reg_class_t rclass)
1518{
07b8f0a8 1519 return (reg_class_size[(int) rclass] == 1);
07b8f0a8
AS
1520}
1521
a8c44c52
AS
1522/* The default implementation of TARGET_CLASS_MAX_NREGS. */
1523
1524unsigned char
1525default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
ef4bddc2 1526 machine_mode mode ATTRIBUTE_UNUSED)
a8c44c52
AS
1527{
1528#ifdef CLASS_MAX_NREGS
1529 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, mode);
1530#else
1531 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1532#endif
1533}
1534
f0a0390e
RH
1535/* Determine the debugging unwind mechanism for the target. */
1536
1537enum unwind_info_type
1538default_debug_unwind_info (void)
1539{
1540 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1541 /* ??? Change all users to the hook, then poison this. */
1542#ifdef DWARF2_FRAME_INFO
1543 if (DWARF2_FRAME_INFO)
1544 return UI_DWARF2;
1545#endif
1546
1547 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1548#ifdef DWARF2_DEBUGGING_INFO
1549 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1550 return UI_DWARF2;
1551#endif
1552
1553 return UI_NONE;
1554}
1555
ff050c66
MF
1556/* Determine the correct mode for a Dwarf frame register that represents
1557 register REGNO. */
1558
ef4bddc2 1559machine_mode
ff050c66
MF
1560default_dwarf_frame_reg_mode (int regno)
1561{
ef4bddc2 1562 machine_mode save_mode = reg_raw_mode[regno];
ff050c66
MF
1563
1564 if (HARD_REGNO_CALL_PART_CLOBBERED (regno, save_mode))
1565 save_mode = choose_hard_reg_mode (regno, 1, true);
1566 return save_mode;
1567}
1568
ffa88471
SE
1569/* To be used by targets where reg_raw_mode doesn't return the right
1570 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1571
ef4bddc2 1572machine_mode
c3284718 1573default_get_reg_raw_mode (int regno)
ffa88471
SE
1574{
1575 return reg_raw_mode[regno];
1576}
1577
d56a43a0
AK
1578/* Return true if a leaf function should stay leaf even with profiling
1579 enabled. */
1580
1581bool
1582default_keep_leaf_when_profiled ()
1583{
1584 return false;
1585}
1586
a803773f
JM
1587/* Return true if the state of option OPTION should be stored in PCH files
1588 and checked by default_pch_valid_p. Store the option's current state
1589 in STATE if so. */
1590
1591static inline bool
1592option_affects_pch_p (int option, struct cl_option_state *state)
1593{
1594 if ((cl_options[option].flags & CL_TARGET) == 0)
1595 return false;
212bfe71
JR
1596 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
1597 return false;
a803773f
JM
1598 if (option_flag_var (option, &global_options) == &target_flags)
1599 if (targetm.check_pch_target_flags)
1600 return false;
1601 return get_option_state (&global_options, option, state);
1602}
1603
1604/* Default version of get_pch_validity.
1605 By default, every flag difference is fatal; that will be mostly right for
1606 most targets, but completely right for very few. */
1607
1608void *
1609default_get_pch_validity (size_t *sz)
1610{
1611 struct cl_option_state state;
1612 size_t i;
1613 char *result, *r;
1614
1615 *sz = 2;
1616 if (targetm.check_pch_target_flags)
1617 *sz += sizeof (target_flags);
1618 for (i = 0; i < cl_options_count; i++)
1619 if (option_affects_pch_p (i, &state))
1620 *sz += state.size;
1621
1622 result = r = XNEWVEC (char, *sz);
1623 r[0] = flag_pic;
1624 r[1] = flag_pie;
1625 r += 2;
1626 if (targetm.check_pch_target_flags)
1627 {
1628 memcpy (r, &target_flags, sizeof (target_flags));
1629 r += sizeof (target_flags);
1630 }
1631
1632 for (i = 0; i < cl_options_count; i++)
1633 if (option_affects_pch_p (i, &state))
1634 {
1635 memcpy (r, state.data, state.size);
1636 r += state.size;
1637 }
1638
1639 return result;
1640}
1641
1642/* Return a message which says that a PCH file was created with a different
1643 setting of OPTION. */
1644
1645static const char *
1646pch_option_mismatch (const char *option)
1647{
582f770b
UB
1648 return xasprintf (_("created and used with differing settings of '%s'"),
1649 option);
a803773f
JM
1650}
1651
1652/* Default version of pch_valid_p. */
1653
1654const char *
1655default_pch_valid_p (const void *data_p, size_t len)
1656{
1657 struct cl_option_state state;
1658 const char *data = (const char *)data_p;
1659 size_t i;
1660
1661 /* -fpic and -fpie also usually make a PCH invalid. */
1662 if (data[0] != flag_pic)
1663 return _("created and used with different settings of -fpic");
1664 if (data[1] != flag_pie)
1665 return _("created and used with different settings of -fpie");
1666 data += 2;
1667
1668 /* Check target_flags. */
1669 if (targetm.check_pch_target_flags)
1670 {
1671 int tf;
1672 const char *r;
1673
1674 memcpy (&tf, data, sizeof (target_flags));
1675 data += sizeof (target_flags);
1676 len -= sizeof (target_flags);
1677 r = targetm.check_pch_target_flags (tf);
1678 if (r != NULL)
1679 return r;
1680 }
1681
1682 for (i = 0; i < cl_options_count; i++)
1683 if (option_affects_pch_p (i, &state))
1684 {
1685 if (memcmp (data, state.data, state.size) != 0)
1686 return pch_option_mismatch (cl_options[i].opt_text);
1687 data += state.size;
1688 len -= state.size;
1689 }
1690
1691 return NULL;
1692}
1693
42e37616
DM
1694/* Default version of cstore_mode. */
1695
ef4bddc2 1696machine_mode
42e37616
DM
1697default_cstore_mode (enum insn_code icode)
1698{
1699 return insn_data[(int) icode].operand[0].mode;
1700}
1701
d9886a9e
L
1702/* Default version of member_type_forces_blk. */
1703
1704bool
ef4bddc2 1705default_member_type_forces_blk (const_tree, machine_mode)
d9886a9e
L
1706{
1707 return false;
1708}
1709
d5e254e1
IE
1710rtx
1711default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED,
1712 rtx ptr ATTRIBUTE_UNUSED,
1713 rtx bnd ATTRIBUTE_UNUSED)
1714{
1715 gcc_unreachable ();
1716}
1717
1718void
1719default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED,
1720 rtx addr ATTRIBUTE_UNUSED,
1721 rtx bounds ATTRIBUTE_UNUSED,
1722 rtx to ATTRIBUTE_UNUSED)
1723{
1724 gcc_unreachable ();
1725}
1726
1727rtx
1728default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED)
1729{
1730 gcc_unreachable ();
1731}
1732
1733void
1734default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED,
1735 rtx bounds ATTRIBUTE_UNUSED)
1736{
1737 gcc_unreachable ();
1738}
1739
f236ac95
RB
1740/* Default version of canonicalize_comparison. */
1741
1742void
1743default_canonicalize_comparison (int *, rtx *, rtx *, bool)
1744{
1745}
1746
267bac10
JM
1747/* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
1748
1749void
1750default_atomic_assign_expand_fenv (tree *, tree *, tree *)
1751{
1752}
1753
4a7cb16f
AM
1754#ifndef PAD_VARARGS_DOWN
1755#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
1756#endif
1757
1758/* Build an indirect-ref expression over the given TREE, which represents a
1759 piece of a va_arg() expansion. */
1760tree
1761build_va_arg_indirect_ref (tree addr)
1762{
1763 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4a7cb16f
AM
1764 return addr;
1765}
1766
1767/* The "standard" implementation of va_arg: read the value from the
1768 current (padded) address and increment by the (padded) size. */
1769
1770tree
1771std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1772 gimple_seq *post_p)
1773{
1774 tree addr, t, type_size, rounded_size, valist_tmp;
1775 unsigned HOST_WIDE_INT align, boundary;
1776 bool indirect;
1777
4a7cb16f
AM
1778 /* All of the alignment and movement below is for args-grow-up machines.
1779 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
1780 implement their own specialized gimplify_va_arg_expr routines. */
6dad9361
TS
1781 if (ARGS_GROW_DOWNWARD)
1782 gcc_unreachable ();
4a7cb16f
AM
1783
1784 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
1785 if (indirect)
1786 type = build_pointer_type (type);
1787
1788 align = PARM_BOUNDARY / BITS_PER_UNIT;
1789 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
1790
1791 /* When we align parameter on stack for caller, if the parameter
1792 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
1793 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
1794 here with caller. */
1795 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
1796 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
1797
1798 boundary /= BITS_PER_UNIT;
1799
1800 /* Hoist the valist value into a temporary for the moment. */
1801 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
1802
1803 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
1804 requires greater alignment, we must perform dynamic alignment. */
1805 if (boundary > align
1806 && !integer_zerop (TYPE_SIZE (type)))
1807 {
1808 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1809 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
1810 gimplify_and_add (t, pre_p);
1811
1812 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1813 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
1814 valist_tmp,
1815 build_int_cst (TREE_TYPE (valist), -boundary)));
1816 gimplify_and_add (t, pre_p);
1817 }
1818 else
1819 boundary = align;
1820
1821 /* If the actual alignment is less than the alignment of the type,
1822 adjust the type accordingly so that we don't assume strict alignment
1823 when dereferencing the pointer. */
1824 boundary *= BITS_PER_UNIT;
1825 if (boundary < TYPE_ALIGN (type))
1826 {
1827 type = build_variant_type_copy (type);
1828 TYPE_ALIGN (type) = boundary;
1829 }
1830
1831 /* Compute the rounded size of the type. */
1832 type_size = size_in_bytes (type);
1833 rounded_size = round_up (type_size, align);
1834
1835 /* Reduce rounded_size so it's sharable with the postqueue. */
1836 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
1837
1838 /* Get AP. */
1839 addr = valist_tmp;
1840 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
1841 {
1842 /* Small args are padded downward. */
1843 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
1844 rounded_size, size_int (align));
1845 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
1846 size_binop (MINUS_EXPR, rounded_size, type_size));
1847 addr = fold_build_pointer_plus (addr, t);
1848 }
1849
1850 /* Compute new value for AP. */
1851 t = fold_build_pointer_plus (valist_tmp, rounded_size);
1852 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
1853 gimplify_and_add (t, pre_p);
1854
1855 addr = fold_convert (build_pointer_type (type), addr);
1856
1857 if (indirect)
1858 addr = build_va_arg_indirect_ref (addr);
1859
1860 return build_va_arg_indirect_ref (addr);
1861}
1862
d5e254e1
IE
1863tree
1864default_chkp_bound_type (void)
1865{
1866 tree res = make_node (POINTER_BOUNDS_TYPE);
1867 TYPE_PRECISION (res) = TYPE_PRECISION (size_type_node) * 2;
1868 TYPE_NAME (res) = get_identifier ("__bounds_type");
1869 SET_TYPE_MODE (res, targetm.chkp_bound_mode ());
1870 layout_type (res);
1871 return res;
1872}
1873
1874enum machine_mode
1875default_chkp_bound_mode (void)
1876{
1877 return VOIDmode;
1878}
1879
1880tree
1881default_builtin_chkp_function (unsigned int fcode ATTRIBUTE_UNUSED)
1882{
1883 return NULL_TREE;
1884}
1885
1886rtx
1887default_chkp_function_value_bounds (const_tree ret_type ATTRIBUTE_UNUSED,
1888 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1889 bool outgoing ATTRIBUTE_UNUSED)
1890{
1891 gcc_unreachable ();
1892}
1893
1894tree
1895default_chkp_make_bounds_constant (HOST_WIDE_INT lb ATTRIBUTE_UNUSED,
1896 HOST_WIDE_INT ub ATTRIBUTE_UNUSED)
1897{
1898 return NULL_TREE;
1899}
1900
1901int
1902default_chkp_initialize_bounds (tree var ATTRIBUTE_UNUSED,
1903 tree lb ATTRIBUTE_UNUSED,
1904 tree ub ATTRIBUTE_UNUSED,
1905 tree *stmts ATTRIBUTE_UNUSED)
1906{
1907 return 0;
1908}
1909
1910void
1911default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
1912 enum machine_mode mode ATTRIBUTE_UNUSED,
1913 tree type ATTRIBUTE_UNUSED,
1914 int *pretend_arg_size ATTRIBUTE_UNUSED,
1915 int second_time ATTRIBUTE_UNUSED)
1916{
1917}
1918
1d0216c8
RS
1919/* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
1920 not support nested low-overhead loops. */
1921
1922bool
807e902e 1923can_use_doloop_if_innermost (const widest_int &, const widest_int &,
1d0216c8
RS
1924 unsigned int loop_depth, bool)
1925{
1926 return loop_depth == 1;
1927}
4a7cb16f 1928
7d69de61 1929#include "gt-targhooks.h"