]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/targhooks.c
Use function_arg_info for TARGET_FUNCTION_ARG_ADVANCE
[thirdparty/gcc.git] / gcc / targhooks.c
1 /* Default target hook functions.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* The migration of target macros to target hooks works as follows:
21
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
24
25 2. Convert all the MI files to use the hook instead of the macro.
26
27 3. Repeat for a majority of the remaining target macros. This will
28 take some time.
29
30 4. Tell target maintainers to start migrating.
31
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
34
35 6. TBD when, poison the macros. Unmigrated targets will break at
36 this point.
37
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
41
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
44
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
48
49 #include "config.h"
50 #include "system.h"
51 #include "coretypes.h"
52 #include "target.h"
53 #include "function.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "tree-ssa-alias.h"
57 #include "gimple-expr.h"
58 #include "memmodel.h"
59 #include "tm_p.h"
60 #include "stringpool.h"
61 #include "tree-vrp.h"
62 #include "tree-ssanames.h"
63 #include "profile-count.h"
64 #include "optabs.h"
65 #include "regs.h"
66 #include "recog.h"
67 #include "diagnostic-core.h"
68 #include "fold-const.h"
69 #include "stor-layout.h"
70 #include "varasm.h"
71 #include "flags.h"
72 #include "explow.h"
73 #include "calls.h"
74 #include "expr.h"
75 #include "output.h"
76 #include "common/common-target.h"
77 #include "reload.h"
78 #include "intl.h"
79 #include "opts.h"
80 #include "gimplify.h"
81 #include "predict.h"
82 #include "params.h"
83 #include "real.h"
84 #include "langhooks.h"
85 #include "sbitmap.h"
86
87 bool
88 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
89 rtx addr ATTRIBUTE_UNUSED,
90 bool strict ATTRIBUTE_UNUSED)
91 {
92 #ifdef GO_IF_LEGITIMATE_ADDRESS
93 /* Defer to the old implementation using a goto. */
94 if (strict)
95 return strict_memory_address_p (mode, addr);
96 else
97 return memory_address_p (mode, addr);
98 #else
99 gcc_unreachable ();
100 #endif
101 }
102
103 void
104 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
105 {
106 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
107 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
108 #endif
109 }
110
111 int
112 default_unspec_may_trap_p (const_rtx x, unsigned flags)
113 {
114 int i;
115
116 /* Any floating arithmetic may trap. */
117 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
118 return 1;
119
120 for (i = 0; i < XVECLEN (x, 0); ++i)
121 {
122 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
123 return 1;
124 }
125
126 return 0;
127 }
128
129 machine_mode
130 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
131 machine_mode mode,
132 int *punsignedp ATTRIBUTE_UNUSED,
133 const_tree funtype ATTRIBUTE_UNUSED,
134 int for_return ATTRIBUTE_UNUSED)
135 {
136 if (type != NULL_TREE && for_return == 2)
137 return promote_mode (type, mode, punsignedp);
138 return mode;
139 }
140
141 machine_mode
142 default_promote_function_mode_always_promote (const_tree type,
143 machine_mode mode,
144 int *punsignedp,
145 const_tree funtype ATTRIBUTE_UNUSED,
146 int for_return ATTRIBUTE_UNUSED)
147 {
148 return promote_mode (type, mode, punsignedp);
149 }
150
151 machine_mode
152 default_cc_modes_compatible (machine_mode m1, machine_mode m2)
153 {
154 if (m1 == m2)
155 return m1;
156 return VOIDmode;
157 }
158
159 bool
160 default_return_in_memory (const_tree type,
161 const_tree fntype ATTRIBUTE_UNUSED)
162 {
163 return (TYPE_MODE (type) == BLKmode);
164 }
165
166 rtx
167 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
168 machine_mode mode ATTRIBUTE_UNUSED)
169 {
170 return x;
171 }
172
173 bool
174 default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
175 machine_mode)
176 {
177 return false;
178 }
179
180 bool
181 default_const_not_ok_for_debug_p (rtx x)
182 {
183 if (GET_CODE (x) == UNSPEC)
184 return true;
185 return false;
186 }
187
188 rtx
189 default_expand_builtin_saveregs (void)
190 {
191 error ("%<__builtin_saveregs%> not supported by this target");
192 return const0_rtx;
193 }
194
195 void
196 default_setup_incoming_varargs (cumulative_args_t,
197 const function_arg_info &, int *, int)
198 {
199 }
200
201 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
202
203 rtx
204 default_builtin_setjmp_frame_value (void)
205 {
206 return virtual_stack_vars_rtx;
207 }
208
209 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
210
211 bool
212 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
213 {
214 return false;
215 }
216
217 bool
218 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
219 {
220 return (targetm.calls.setup_incoming_varargs
221 != default_setup_incoming_varargs);
222 }
223
224 scalar_int_mode
225 default_eh_return_filter_mode (void)
226 {
227 return targetm.unwind_word_mode ();
228 }
229
230 scalar_int_mode
231 default_libgcc_cmp_return_mode (void)
232 {
233 return word_mode;
234 }
235
236 scalar_int_mode
237 default_libgcc_shift_count_mode (void)
238 {
239 return word_mode;
240 }
241
242 scalar_int_mode
243 default_unwind_word_mode (void)
244 {
245 return word_mode;
246 }
247
248 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
249
250 unsigned HOST_WIDE_INT
251 default_shift_truncation_mask (machine_mode mode)
252 {
253 return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0;
254 }
255
256 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
257
258 unsigned int
259 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
260 {
261 return have_insn_for (DIV, mode) ? 3 : 2;
262 }
263
264 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
265
266 int
267 default_mode_rep_extended (scalar_int_mode, scalar_int_mode)
268 {
269 return UNKNOWN;
270 }
271
272 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
273
274 bool
275 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
276 {
277 return true;
278 }
279
280 /* Return machine mode for non-standard suffix
281 or VOIDmode if non-standard suffixes are unsupported. */
282 machine_mode
283 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
284 {
285 return VOIDmode;
286 }
287
288 /* The generic C++ ABI specifies this is a 64-bit value. */
289 tree
290 default_cxx_guard_type (void)
291 {
292 return long_long_integer_type_node;
293 }
294
295 /* Returns the size of the cookie to use when allocating an array
296 whose elements have the indicated TYPE. Assumes that it is already
297 known that a cookie is needed. */
298
299 tree
300 default_cxx_get_cookie_size (tree type)
301 {
302 tree cookie_size;
303
304 /* We need to allocate an additional max (sizeof (size_t), alignof
305 (true_type)) bytes. */
306 tree sizetype_size;
307 tree type_align;
308
309 sizetype_size = size_in_bytes (sizetype);
310 type_align = size_int (TYPE_ALIGN_UNIT (type));
311 if (tree_int_cst_lt (type_align, sizetype_size))
312 cookie_size = sizetype_size;
313 else
314 cookie_size = type_align;
315
316 return cookie_size;
317 }
318
319 /* Return true if a parameter must be passed by reference. This version
320 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
321
322 bool
323 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t,
324 const function_arg_info &arg)
325 {
326 return targetm.calls.must_pass_in_stack (arg.mode, arg.type);
327 }
328
329 /* Return true if a parameter follows callee copies conventions. This
330 version of the hook is true for all named arguments. */
331
332 bool
333 hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
334 machine_mode mode ATTRIBUTE_UNUSED,
335 const_tree type ATTRIBUTE_UNUSED, bool named)
336 {
337 return named;
338 }
339
340 /* Emit to STREAM the assembler syntax for insn operand X. */
341
342 void
343 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
344 int code ATTRIBUTE_UNUSED)
345 {
346 #ifdef PRINT_OPERAND
347 PRINT_OPERAND (stream, x, code);
348 #else
349 gcc_unreachable ();
350 #endif
351 }
352
353 /* Emit to STREAM the assembler syntax for an insn operand whose memory
354 address is X. */
355
356 void
357 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
358 machine_mode /*mode*/,
359 rtx x ATTRIBUTE_UNUSED)
360 {
361 #ifdef PRINT_OPERAND_ADDRESS
362 PRINT_OPERAND_ADDRESS (stream, x);
363 #else
364 gcc_unreachable ();
365 #endif
366 }
367
368 /* Return true if CODE is a valid punctuation character for the
369 `print_operand' hook. */
370
371 bool
372 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
373 {
374 #ifdef PRINT_OPERAND_PUNCT_VALID_P
375 return PRINT_OPERAND_PUNCT_VALID_P (code);
376 #else
377 return false;
378 #endif
379 }
380
381 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
382 tree
383 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
384 {
385 const char *skipped = name + (*name == '*' ? 1 : 0);
386 const char *stripped = targetm.strip_name_encoding (skipped);
387 if (*name != '*' && user_label_prefix[0])
388 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
389 return get_identifier (stripped);
390 }
391
392 /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
393
394 machine_mode
395 default_translate_mode_attribute (machine_mode mode)
396 {
397 return mode;
398 }
399
400 /* True if MODE is valid for the target. By "valid", we mean able to
401 be manipulated in non-trivial ways. In particular, this means all
402 the arithmetic is supported.
403
404 By default we guess this means that any C type is supported. If
405 we can't map the mode back to a type that would be available in C,
406 then reject it. Special case, here, is the double-word arithmetic
407 supported by optabs.c. */
408
409 bool
410 default_scalar_mode_supported_p (scalar_mode mode)
411 {
412 int precision = GET_MODE_PRECISION (mode);
413
414 switch (GET_MODE_CLASS (mode))
415 {
416 case MODE_PARTIAL_INT:
417 case MODE_INT:
418 if (precision == CHAR_TYPE_SIZE)
419 return true;
420 if (precision == SHORT_TYPE_SIZE)
421 return true;
422 if (precision == INT_TYPE_SIZE)
423 return true;
424 if (precision == LONG_TYPE_SIZE)
425 return true;
426 if (precision == LONG_LONG_TYPE_SIZE)
427 return true;
428 if (precision == 2 * BITS_PER_WORD)
429 return true;
430 return false;
431
432 case MODE_FLOAT:
433 if (precision == FLOAT_TYPE_SIZE)
434 return true;
435 if (precision == DOUBLE_TYPE_SIZE)
436 return true;
437 if (precision == LONG_DOUBLE_TYPE_SIZE)
438 return true;
439 return false;
440
441 case MODE_DECIMAL_FLOAT:
442 case MODE_FRACT:
443 case MODE_UFRACT:
444 case MODE_ACCUM:
445 case MODE_UACCUM:
446 return false;
447
448 default:
449 gcc_unreachable ();
450 }
451 }
452
453 /* Return true if libgcc supports floating-point mode MODE (known to
454 be supported as a scalar mode). */
455
456 bool
457 default_libgcc_floating_mode_supported_p (scalar_float_mode mode)
458 {
459 switch (mode)
460 {
461 #ifdef HAVE_SFmode
462 case E_SFmode:
463 #endif
464 #ifdef HAVE_DFmode
465 case E_DFmode:
466 #endif
467 #ifdef HAVE_XFmode
468 case E_XFmode:
469 #endif
470 #ifdef HAVE_TFmode
471 case E_TFmode:
472 #endif
473 return true;
474
475 default:
476 return false;
477 }
478 }
479
480 /* Return the machine mode to use for the type _FloatN, if EXTENDED is
481 false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
482 supported. */
483 opt_scalar_float_mode
484 default_floatn_mode (int n, bool extended)
485 {
486 if (extended)
487 {
488 opt_scalar_float_mode cand1, cand2;
489 scalar_float_mode mode;
490 switch (n)
491 {
492 case 32:
493 #ifdef HAVE_DFmode
494 cand1 = DFmode;
495 #endif
496 break;
497
498 case 64:
499 #ifdef HAVE_XFmode
500 cand1 = XFmode;
501 #endif
502 #ifdef HAVE_TFmode
503 cand2 = TFmode;
504 #endif
505 break;
506
507 case 128:
508 break;
509
510 default:
511 /* Those are the only valid _FloatNx types. */
512 gcc_unreachable ();
513 }
514 if (cand1.exists (&mode)
515 && REAL_MODE_FORMAT (mode)->ieee_bits > n
516 && targetm.scalar_mode_supported_p (mode)
517 && targetm.libgcc_floating_mode_supported_p (mode))
518 return cand1;
519 if (cand2.exists (&mode)
520 && REAL_MODE_FORMAT (mode)->ieee_bits > n
521 && targetm.scalar_mode_supported_p (mode)
522 && targetm.libgcc_floating_mode_supported_p (mode))
523 return cand2;
524 }
525 else
526 {
527 opt_scalar_float_mode cand;
528 scalar_float_mode mode;
529 switch (n)
530 {
531 case 16:
532 /* Always enable _Float16 if we have basic support for the mode.
533 Targets can control the range and precision of operations on
534 the _Float16 type using TARGET_C_EXCESS_PRECISION. */
535 #ifdef HAVE_HFmode
536 cand = HFmode;
537 #endif
538 break;
539
540 case 32:
541 #ifdef HAVE_SFmode
542 cand = SFmode;
543 #endif
544 break;
545
546 case 64:
547 #ifdef HAVE_DFmode
548 cand = DFmode;
549 #endif
550 break;
551
552 case 128:
553 #ifdef HAVE_TFmode
554 cand = TFmode;
555 #endif
556 break;
557
558 default:
559 break;
560 }
561 if (cand.exists (&mode)
562 && REAL_MODE_FORMAT (mode)->ieee_bits == n
563 && targetm.scalar_mode_supported_p (mode)
564 && targetm.libgcc_floating_mode_supported_p (mode))
565 return cand;
566 }
567 return opt_scalar_float_mode ();
568 }
569
570 /* Define this to return true if the _Floatn and _Floatnx built-in functions
571 should implicitly enable the built-in function without the __builtin_ prefix
572 in addition to the normal built-in function with the __builtin_ prefix. The
573 default is to only enable built-in functions without the __builtin_ prefix
574 for the GNU C langauge. The argument FUNC is the enum builtin_in_function
575 id of the function to be enabled. */
576
577 bool
578 default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
579 {
580 static bool first_time_p = true;
581 static bool c_or_objective_c;
582
583 if (first_time_p)
584 {
585 first_time_p = false;
586 c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
587 }
588
589 return c_or_objective_c;
590 }
591
592 /* Make some target macros useable by target-independent code. */
593 bool
594 targhook_words_big_endian (void)
595 {
596 return !!WORDS_BIG_ENDIAN;
597 }
598
599 bool
600 targhook_float_words_big_endian (void)
601 {
602 return !!FLOAT_WORDS_BIG_ENDIAN;
603 }
604
605 /* True if the target supports floating-point exceptions and rounding
606 modes. */
607
608 bool
609 default_float_exceptions_rounding_supported_p (void)
610 {
611 #ifdef HAVE_adddf3
612 return HAVE_adddf3;
613 #else
614 return false;
615 #endif
616 }
617
618 /* True if the target supports decimal floating point. */
619
620 bool
621 default_decimal_float_supported_p (void)
622 {
623 return ENABLE_DECIMAL_FLOAT;
624 }
625
626 /* True if the target supports fixed-point arithmetic. */
627
628 bool
629 default_fixed_point_supported_p (void)
630 {
631 return ENABLE_FIXED_POINT;
632 }
633
634 /* True if the target supports GNU indirect functions. */
635
636 bool
637 default_has_ifunc_p (void)
638 {
639 return HAVE_GNU_INDIRECT_FUNCTION;
640 }
641
642 /* Return true if we predict the loop LOOP will be transformed to a
643 low-overhead loop, otherwise return false.
644
645 By default, false is returned, as this hook's applicability should be
646 verified for each target. Target maintainers should re-define the hook
647 if the target can take advantage of it. */
648
649 bool
650 default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED)
651 {
652 return false;
653 }
654
655 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
656 an error message.
657
658 This function checks whether a given INSN is valid within a low-overhead
659 loop. If INSN is invalid it returns the reason for that, otherwise it
660 returns NULL. A called function may clobber any special registers required
661 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
662 register for branch on table instructions. We reject the doloop pattern in
663 these cases. */
664
665 const char *
666 default_invalid_within_doloop (const rtx_insn *insn)
667 {
668 if (CALL_P (insn))
669 return "Function call in loop.";
670
671 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
672 return "Computed branch in the loop.";
673
674 return NULL;
675 }
676
677 /* Mapping of builtin functions to vectorized variants. */
678
679 tree
680 default_builtin_vectorized_function (unsigned int, tree, tree)
681 {
682 return NULL_TREE;
683 }
684
685 /* Mapping of target builtin functions to vectorized variants. */
686
687 tree
688 default_builtin_md_vectorized_function (tree, tree, tree)
689 {
690 return NULL_TREE;
691 }
692
693 /* Vectorized conversion. */
694
695 tree
696 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
697 tree dest_type ATTRIBUTE_UNUSED,
698 tree src_type ATTRIBUTE_UNUSED)
699 {
700 return NULL_TREE;
701 }
702
703 /* Default vectorizer cost model values. */
704
705 int
706 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
707 tree vectype,
708 int misalign ATTRIBUTE_UNUSED)
709 {
710 switch (type_of_cost)
711 {
712 case scalar_stmt:
713 case scalar_load:
714 case scalar_store:
715 case vector_stmt:
716 case vector_load:
717 case vector_store:
718 case vec_to_scalar:
719 case scalar_to_vec:
720 case cond_branch_not_taken:
721 case vec_perm:
722 case vec_promote_demote:
723 return 1;
724
725 case unaligned_load:
726 case unaligned_store:
727 return 2;
728
729 case cond_branch_taken:
730 return 3;
731
732 case vec_construct:
733 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
734
735 default:
736 gcc_unreachable ();
737 }
738 }
739
740 /* Reciprocal. */
741
742 tree
743 default_builtin_reciprocal (tree)
744 {
745 return NULL_TREE;
746 }
747
748 bool
749 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
750 cumulative_args_t ca ATTRIBUTE_UNUSED,
751 machine_mode mode ATTRIBUTE_UNUSED,
752 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
753 {
754 return false;
755 }
756
757 bool
758 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
759 cumulative_args_t ca ATTRIBUTE_UNUSED,
760 machine_mode mode ATTRIBUTE_UNUSED,
761 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
762 {
763 return true;
764 }
765
766 bool
767 hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t,
768 const function_arg_info &)
769 {
770 return false;
771 }
772
773 int
774 hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t,
775 const function_arg_info &)
776 {
777 return 0;
778 }
779
780 void
781 hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
782 tree ATTRIBUTE_UNUSED)
783 {
784 }
785
786 void
787 default_function_arg_advance (cumulative_args_t, const function_arg_info &)
788 {
789 gcc_unreachable ();
790 }
791
792 /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
793
794 HOST_WIDE_INT
795 default_function_arg_offset (machine_mode, const_tree)
796 {
797 return 0;
798 }
799
800 /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
801 upward, but pad short args downward on big-endian machines. */
802
803 pad_direction
804 default_function_arg_padding (machine_mode mode, const_tree type)
805 {
806 if (!BYTES_BIG_ENDIAN)
807 return PAD_UPWARD;
808
809 unsigned HOST_WIDE_INT size;
810 if (mode == BLKmode)
811 {
812 if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
813 return PAD_UPWARD;
814 size = int_size_in_bytes (type);
815 }
816 else
817 /* Targets with variable-sized modes must override this hook
818 and handle variable-sized modes explicitly. */
819 size = GET_MODE_SIZE (mode).to_constant ();
820
821 if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
822 return PAD_DOWNWARD;
823
824 return PAD_UPWARD;
825 }
826
827 rtx
828 default_function_arg (cumulative_args_t, const function_arg_info &)
829 {
830 gcc_unreachable ();
831 }
832
833 rtx
834 default_function_incoming_arg (cumulative_args_t, const function_arg_info &)
835 {
836 gcc_unreachable ();
837 }
838
839 unsigned int
840 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
841 const_tree type ATTRIBUTE_UNUSED)
842 {
843 return PARM_BOUNDARY;
844 }
845
846 unsigned int
847 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
848 const_tree type ATTRIBUTE_UNUSED)
849 {
850 return PARM_BOUNDARY;
851 }
852
853 void
854 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
855 {
856 }
857
858 const char *
859 hook_invalid_arg_for_unprototyped_fn (
860 const_tree typelist ATTRIBUTE_UNUSED,
861 const_tree funcdecl ATTRIBUTE_UNUSED,
862 const_tree val ATTRIBUTE_UNUSED)
863 {
864 return NULL;
865 }
866
867 /* Initialize the stack protection decls. */
868
869 /* Stack protection related decls living in libgcc. */
870 static GTY(()) tree stack_chk_guard_decl;
871
872 tree
873 default_stack_protect_guard (void)
874 {
875 tree t = stack_chk_guard_decl;
876
877 if (t == NULL)
878 {
879 rtx x;
880
881 t = build_decl (UNKNOWN_LOCATION,
882 VAR_DECL, get_identifier ("__stack_chk_guard"),
883 ptr_type_node);
884 TREE_STATIC (t) = 1;
885 TREE_PUBLIC (t) = 1;
886 DECL_EXTERNAL (t) = 1;
887 TREE_USED (t) = 1;
888 TREE_THIS_VOLATILE (t) = 1;
889 DECL_ARTIFICIAL (t) = 1;
890 DECL_IGNORED_P (t) = 1;
891
892 /* Do not share RTL as the declaration is visible outside of
893 current function. */
894 x = DECL_RTL (t);
895 RTX_FLAG (x, used) = 1;
896
897 stack_chk_guard_decl = t;
898 }
899
900 return t;
901 }
902
903 static GTY(()) tree stack_chk_fail_decl;
904
905 tree
906 default_external_stack_protect_fail (void)
907 {
908 tree t = stack_chk_fail_decl;
909
910 if (t == NULL_TREE)
911 {
912 t = build_function_type_list (void_type_node, NULL_TREE);
913 t = build_decl (UNKNOWN_LOCATION,
914 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
915 TREE_STATIC (t) = 1;
916 TREE_PUBLIC (t) = 1;
917 DECL_EXTERNAL (t) = 1;
918 TREE_USED (t) = 1;
919 TREE_THIS_VOLATILE (t) = 1;
920 TREE_NOTHROW (t) = 1;
921 DECL_ARTIFICIAL (t) = 1;
922 DECL_IGNORED_P (t) = 1;
923 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
924 DECL_VISIBILITY_SPECIFIED (t) = 1;
925
926 stack_chk_fail_decl = t;
927 }
928
929 return build_call_expr (t, 0);
930 }
931
932 tree
933 default_hidden_stack_protect_fail (void)
934 {
935 #ifndef HAVE_GAS_HIDDEN
936 return default_external_stack_protect_fail ();
937 #else
938 tree t = stack_chk_fail_decl;
939
940 if (!flag_pic)
941 return default_external_stack_protect_fail ();
942
943 if (t == NULL_TREE)
944 {
945 t = build_function_type_list (void_type_node, NULL_TREE);
946 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
947 get_identifier ("__stack_chk_fail_local"), t);
948 TREE_STATIC (t) = 1;
949 TREE_PUBLIC (t) = 1;
950 DECL_EXTERNAL (t) = 1;
951 TREE_USED (t) = 1;
952 TREE_THIS_VOLATILE (t) = 1;
953 TREE_NOTHROW (t) = 1;
954 DECL_ARTIFICIAL (t) = 1;
955 DECL_IGNORED_P (t) = 1;
956 DECL_VISIBILITY_SPECIFIED (t) = 1;
957 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
958
959 stack_chk_fail_decl = t;
960 }
961
962 return build_call_expr (t, 0);
963 #endif
964 }
965
966 bool
967 hook_bool_const_rtx_commutative_p (const_rtx x,
968 int outer_code ATTRIBUTE_UNUSED)
969 {
970 return COMMUTATIVE_P (x);
971 }
972
973 rtx
974 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
975 const_tree fn_decl_or_type,
976 bool outgoing ATTRIBUTE_UNUSED)
977 {
978 /* The old interface doesn't handle receiving the function type. */
979 if (fn_decl_or_type
980 && !DECL_P (fn_decl_or_type))
981 fn_decl_or_type = NULL;
982
983 #ifdef FUNCTION_VALUE
984 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
985 #else
986 gcc_unreachable ();
987 #endif
988 }
989
990 rtx
991 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
992 const_rtx fun ATTRIBUTE_UNUSED)
993 {
994 #ifdef LIBCALL_VALUE
995 return LIBCALL_VALUE (MACRO_MODE (mode));
996 #else
997 gcc_unreachable ();
998 #endif
999 }
1000
1001 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1002
1003 bool
1004 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
1005 {
1006 #ifdef FUNCTION_VALUE_REGNO_P
1007 return FUNCTION_VALUE_REGNO_P (regno);
1008 #else
1009 gcc_unreachable ();
1010 #endif
1011 }
1012
1013 rtx
1014 default_internal_arg_pointer (void)
1015 {
1016 /* If the reg that the virtual arg pointer will be translated into is
1017 not a fixed reg or is the stack pointer, make a copy of the virtual
1018 arg pointer, and address parms via the copy. The frame pointer is
1019 considered fixed even though it is not marked as such. */
1020 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
1021 || ! (fixed_regs[ARG_POINTER_REGNUM]
1022 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
1023 return copy_to_reg (virtual_incoming_args_rtx);
1024 else
1025 return virtual_incoming_args_rtx;
1026 }
1027
1028 rtx
1029 default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
1030 {
1031 if (incoming_p)
1032 {
1033 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1034 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
1035 #endif
1036 }
1037
1038 #ifdef STATIC_CHAIN_REGNUM
1039 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
1040 #endif
1041
1042 {
1043 static bool issued_error;
1044 if (!issued_error)
1045 {
1046 issued_error = true;
1047 sorry ("nested functions not supported on this target");
1048 }
1049
1050 /* It really doesn't matter what we return here, so long at it
1051 doesn't cause the rest of the compiler to crash. */
1052 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
1053 }
1054 }
1055
1056 void
1057 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
1058 rtx ARG_UNUSED (r_chain))
1059 {
1060 sorry ("nested function trampolines not supported on this target");
1061 }
1062
1063 poly_int64
1064 default_return_pops_args (tree, tree, poly_int64)
1065 {
1066 return 0;
1067 }
1068
1069 reg_class_t
1070 default_branch_target_register_class (void)
1071 {
1072 return NO_REGS;
1073 }
1074
1075 reg_class_t
1076 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
1077 reg_class_t cl,
1078 reg_class_t best_cl ATTRIBUTE_UNUSED)
1079 {
1080 return cl;
1081 }
1082
1083 extern bool
1084 default_lra_p (void)
1085 {
1086 return true;
1087 }
1088
1089 int
1090 default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
1091 {
1092 return 0;
1093 }
1094
1095 extern bool
1096 default_register_usage_leveling_p (void)
1097 {
1098 return false;
1099 }
1100
1101 extern bool
1102 default_different_addr_displacement_p (void)
1103 {
1104 return false;
1105 }
1106
1107 reg_class_t
1108 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
1109 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
1110 machine_mode reload_mode ATTRIBUTE_UNUSED,
1111 secondary_reload_info *sri)
1112 {
1113 enum reg_class rclass = NO_REGS;
1114 enum reg_class reload_class = (enum reg_class) reload_class_i;
1115
1116 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
1117 {
1118 sri->icode = sri->prev_sri->t_icode;
1119 return NO_REGS;
1120 }
1121 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1122 if (in_p)
1123 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
1124 MACRO_MODE (reload_mode), x);
1125 #endif
1126 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1127 if (! in_p)
1128 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
1129 MACRO_MODE (reload_mode), x);
1130 #endif
1131 if (rclass != NO_REGS)
1132 {
1133 enum insn_code icode
1134 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
1135 reload_mode);
1136
1137 if (icode != CODE_FOR_nothing
1138 && !insn_operand_matches (icode, in_p, x))
1139 icode = CODE_FOR_nothing;
1140 else if (icode != CODE_FOR_nothing)
1141 {
1142 const char *insn_constraint, *scratch_constraint;
1143 enum reg_class insn_class, scratch_class;
1144
1145 gcc_assert (insn_data[(int) icode].n_operands == 3);
1146 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
1147 if (!*insn_constraint)
1148 insn_class = ALL_REGS;
1149 else
1150 {
1151 if (in_p)
1152 {
1153 gcc_assert (*insn_constraint == '=');
1154 insn_constraint++;
1155 }
1156 insn_class = (reg_class_for_constraint
1157 (lookup_constraint (insn_constraint)));
1158 gcc_assert (insn_class != NO_REGS);
1159 }
1160
1161 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
1162 /* The scratch register's constraint must start with "=&",
1163 except for an input reload, where only "=" is necessary,
1164 and where it might be beneficial to re-use registers from
1165 the input. */
1166 gcc_assert (scratch_constraint[0] == '='
1167 && (in_p || scratch_constraint[1] == '&'));
1168 scratch_constraint++;
1169 if (*scratch_constraint == '&')
1170 scratch_constraint++;
1171 scratch_class = (reg_class_for_constraint
1172 (lookup_constraint (scratch_constraint)));
1173
1174 if (reg_class_subset_p (reload_class, insn_class))
1175 {
1176 gcc_assert (scratch_class == rclass);
1177 rclass = NO_REGS;
1178 }
1179 else
1180 rclass = insn_class;
1181
1182 }
1183 if (rclass == NO_REGS)
1184 sri->icode = icode;
1185 else
1186 sri->t_icode = icode;
1187 }
1188 return rclass;
1189 }
1190
1191 /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1192
1193 machine_mode
1194 default_secondary_memory_needed_mode (machine_mode mode)
1195 {
1196 if (!targetm.lra_p ()
1197 && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
1198 && INTEGRAL_MODE_P (mode))
1199 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1200 return mode;
1201 }
1202
1203 /* By default, if flag_pic is true, then neither local nor global relocs
1204 should be placed in readonly memory. */
1205
1206 int
1207 default_reloc_rw_mask (void)
1208 {
1209 return flag_pic ? 3 : 0;
1210 }
1211
1212 /* By default, address diff vectors are generated
1213 for jump tables when flag_pic is true. */
1214
1215 bool
1216 default_generate_pic_addr_diff_vec (void)
1217 {
1218 return flag_pic;
1219 }
1220
1221 /* By default, do no modification. */
1222 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1223 tree id)
1224 {
1225 return id;
1226 }
1227
1228 /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1229
1230 HOST_WIDE_INT
1231 default_static_rtx_alignment (machine_mode mode)
1232 {
1233 return GET_MODE_ALIGNMENT (mode);
1234 }
1235
1236 /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1237
1238 HOST_WIDE_INT
1239 default_constant_alignment (const_tree, HOST_WIDE_INT align)
1240 {
1241 return align;
1242 }
1243
1244 /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1245 to at least BITS_PER_WORD but otherwise makes no changes. */
1246
1247 HOST_WIDE_INT
1248 constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align)
1249 {
1250 if (TREE_CODE (exp) == STRING_CST)
1251 return MAX (align, BITS_PER_WORD);
1252 return align;
1253 }
1254
1255 /* Default to natural alignment for vector types, bounded by
1256 MAX_OFILE_ALIGNMENT. */
1257
1258 HOST_WIDE_INT
1259 default_vector_alignment (const_tree type)
1260 {
1261 unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT;
1262 tree size = TYPE_SIZE (type);
1263 if (tree_fits_uhwi_p (size))
1264 align = tree_to_uhwi (size);
1265
1266 return align < MAX_OFILE_ALIGNMENT ? align : MAX_OFILE_ALIGNMENT;
1267 }
1268
1269 /* The default implementation of
1270 TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1271
1272 poly_uint64
1273 default_preferred_vector_alignment (const_tree type)
1274 {
1275 return TYPE_ALIGN (type);
1276 }
1277
1278 /* By default assume vectors of element TYPE require a multiple of the natural
1279 alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1280 bool
1281 default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed)
1282 {
1283 return ! is_packed;
1284 }
1285
1286 /* By default, assume that a target supports any factor of misalignment
1287 memory access if it supports movmisalign patten.
1288 is_packed is true if the memory access is defined in a packed struct. */
1289 bool
1290 default_builtin_support_vector_misalignment (machine_mode mode,
1291 const_tree type
1292 ATTRIBUTE_UNUSED,
1293 int misalignment
1294 ATTRIBUTE_UNUSED,
1295 bool is_packed
1296 ATTRIBUTE_UNUSED)
1297 {
1298 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1299 return true;
1300 return false;
1301 }
1302
1303 /* By default, only attempt to parallelize bitwise operations, and
1304 possibly adds/subtracts using bit-twiddling. */
1305
1306 machine_mode
1307 default_preferred_simd_mode (scalar_mode)
1308 {
1309 return word_mode;
1310 }
1311
1312 /* By default do not split reductions further. */
1313
1314 machine_mode
1315 default_split_reduction (machine_mode mode)
1316 {
1317 return mode;
1318 }
1319
1320 /* By default only the size derived from the preferred vector mode
1321 is tried. */
1322
1323 void
1324 default_autovectorize_vector_sizes (vector_sizes *, bool)
1325 {
1326 }
1327
1328 /* By default a vector of integers is used as a mask. */
1329
1330 opt_machine_mode
1331 default_get_mask_mode (poly_uint64 nunits, poly_uint64 vector_size)
1332 {
1333 unsigned int elem_size = vector_element_size (vector_size, nunits);
1334 scalar_int_mode elem_mode
1335 = smallest_int_mode_for_size (elem_size * BITS_PER_UNIT);
1336 machine_mode vector_mode;
1337
1338 gcc_assert (known_eq (elem_size * nunits, vector_size));
1339
1340 if (mode_for_vector (elem_mode, nunits).exists (&vector_mode)
1341 && VECTOR_MODE_P (vector_mode)
1342 && targetm.vector_mode_supported_p (vector_mode))
1343 return vector_mode;
1344
1345 return opt_machine_mode ();
1346 }
1347
1348 /* By default consider masked stores to be expensive. */
1349
1350 bool
1351 default_empty_mask_is_expensive (unsigned ifn)
1352 {
1353 return ifn == IFN_MASK_STORE;
1354 }
1355
1356 /* By default, the cost model accumulates three separate costs (prologue,
1357 loop body, and epilogue) for a vectorized loop or block. So allocate an
1358 array of three unsigned ints, set it to zero, and return its address. */
1359
1360 void *
1361 default_init_cost (class loop *loop_info ATTRIBUTE_UNUSED)
1362 {
1363 unsigned *cost = XNEWVEC (unsigned, 3);
1364 cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
1365 return cost;
1366 }
1367
1368 /* By default, the cost model looks up the cost of the given statement
1369 kind and mode, multiplies it by the occurrence count, accumulates
1370 it into the cost specified by WHERE, and returns the cost added. */
1371
1372 unsigned
1373 default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
1374 class _stmt_vec_info *stmt_info, int misalign,
1375 enum vect_cost_model_location where)
1376 {
1377 unsigned *cost = (unsigned *) data;
1378 unsigned retval = 0;
1379
1380 tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE;
1381 int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype,
1382 misalign);
1383 /* Statements in an inner loop relative to the loop being
1384 vectorized are weighted more heavily. The value here is
1385 arbitrary and could potentially be improved with analysis. */
1386 if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info))
1387 count *= 50; /* FIXME. */
1388
1389 retval = (unsigned) (count * stmt_cost);
1390 cost[where] += retval;
1391
1392 return retval;
1393 }
1394
1395 /* By default, the cost model just returns the accumulated costs. */
1396
1397 void
1398 default_finish_cost (void *data, unsigned *prologue_cost,
1399 unsigned *body_cost, unsigned *epilogue_cost)
1400 {
1401 unsigned *cost = (unsigned *) data;
1402 *prologue_cost = cost[vect_prologue];
1403 *body_cost = cost[vect_body];
1404 *epilogue_cost = cost[vect_epilogue];
1405 }
1406
1407 /* Free the cost data. */
1408
1409 void
1410 default_destroy_cost_data (void *data)
1411 {
1412 free (data);
1413 }
1414
1415 /* Determine whether or not a pointer mode is valid. Assume defaults
1416 of ptr_mode or Pmode - can be overridden. */
1417 bool
1418 default_valid_pointer_mode (scalar_int_mode mode)
1419 {
1420 return (mode == ptr_mode || mode == Pmode);
1421 }
1422
1423 /* Determine whether the memory reference specified by REF may alias
1424 the C libraries errno location. */
1425 bool
1426 default_ref_may_alias_errno (ao_ref *ref)
1427 {
1428 tree base = ao_ref_base (ref);
1429 /* The default implementation assumes the errno location is
1430 a declaration of type int or is always accessed via a
1431 pointer to int. We assume that accesses to errno are
1432 not deliberately obfuscated (even in conforming ways). */
1433 if (TYPE_UNSIGNED (TREE_TYPE (base))
1434 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1435 return false;
1436 /* The default implementation assumes an errno location
1437 declaration is never defined in the current compilation unit. */
1438 if (DECL_P (base)
1439 && !TREE_STATIC (base))
1440 return true;
1441 else if (TREE_CODE (base) == MEM_REF
1442 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1443 {
1444 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1445 return !pi || pi->pt.anything || pi->pt.nonlocal;
1446 }
1447 return false;
1448 }
1449
1450 /* Return the mode for a pointer to a given ADDRSPACE,
1451 defaulting to ptr_mode for all address spaces. */
1452
1453 scalar_int_mode
1454 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1455 {
1456 return ptr_mode;
1457 }
1458
1459 /* Return the mode for an address in a given ADDRSPACE,
1460 defaulting to Pmode for all address spaces. */
1461
1462 scalar_int_mode
1463 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1464 {
1465 return Pmode;
1466 }
1467
1468 /* Named address space version of valid_pointer_mode.
1469 To match the above, the same modes apply to all address spaces. */
1470
1471 bool
1472 default_addr_space_valid_pointer_mode (scalar_int_mode mode,
1473 addr_space_t as ATTRIBUTE_UNUSED)
1474 {
1475 return targetm.valid_pointer_mode (mode);
1476 }
1477
1478 /* Some places still assume that all pointer or address modes are the
1479 standard Pmode and ptr_mode. These optimizations become invalid if
1480 the target actually supports multiple different modes. For now,
1481 we disable such optimizations on such targets, using this function. */
1482
1483 bool
1484 target_default_pointer_address_modes_p (void)
1485 {
1486 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1487 return false;
1488 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1489 return false;
1490
1491 return true;
1492 }
1493
1494 /* Named address space version of legitimate_address_p.
1495 By default, all address spaces have the same form. */
1496
1497 bool
1498 default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1499 bool strict,
1500 addr_space_t as ATTRIBUTE_UNUSED)
1501 {
1502 return targetm.legitimate_address_p (mode, mem, strict);
1503 }
1504
1505 /* Named address space version of LEGITIMIZE_ADDRESS.
1506 By default, all address spaces have the same form. */
1507
1508 rtx
1509 default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
1510 addr_space_t as ATTRIBUTE_UNUSED)
1511 {
1512 return targetm.legitimize_address (x, oldx, mode);
1513 }
1514
1515 /* The default hook for determining if one named address space is a subset of
1516 another and to return which address space to use as the common address
1517 space. */
1518
1519 bool
1520 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1521 {
1522 return (subset == superset);
1523 }
1524
1525 /* The default hook for determining if 0 within a named address
1526 space is a valid address. */
1527
1528 bool
1529 default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED)
1530 {
1531 return false;
1532 }
1533
1534 /* The default hook for debugging the address space is to return the
1535 address space number to indicate DW_AT_address_class. */
1536 int
1537 default_addr_space_debug (addr_space_t as)
1538 {
1539 return as;
1540 }
1541
1542 /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1543 Don't complain about any address space. */
1544
1545 void
1546 default_addr_space_diagnose_usage (addr_space_t, location_t)
1547 {
1548 }
1549
1550
1551 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1552 called for targets with only a generic address space. */
1553
1554 rtx
1555 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1556 tree from_type ATTRIBUTE_UNUSED,
1557 tree to_type ATTRIBUTE_UNUSED)
1558 {
1559 gcc_unreachable ();
1560 }
1561
1562 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1563
1564 unsigned int
1565 default_hard_regno_nregs (unsigned int, machine_mode mode)
1566 {
1567 /* Targets with variable-sized modes must provide their own definition
1568 of this hook. */
1569 return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
1570 }
1571
1572 bool
1573 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1574 {
1575 return true;
1576 }
1577
1578 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1579
1580 bool
1581 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1582 addr_space_t addrspace ATTRIBUTE_UNUSED)
1583 {
1584 return false;
1585 }
1586
1587 bool
1588 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1589 tree ARG_UNUSED (name),
1590 tree ARG_UNUSED (args),
1591 int ARG_UNUSED (flags))
1592 {
1593 warning (OPT_Wattributes,
1594 "target attribute is not supported on this machine");
1595
1596 return false;
1597 }
1598
1599 bool
1600 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1601 tree ARG_UNUSED (pop_target))
1602 {
1603 /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1604 emit no warning because "#pragma GCC pop_target" is valid on targets that
1605 do not have the "target" pragma. */
1606 if (args)
1607 warning (OPT_Wpragmas,
1608 "%<#pragma GCC target%> is not supported for this machine");
1609
1610 return false;
1611 }
1612
1613 bool
1614 default_target_can_inline_p (tree caller, tree callee)
1615 {
1616 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1617 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1618 if (! callee_opts)
1619 callee_opts = target_option_default_node;
1620 if (! caller_opts)
1621 caller_opts = target_option_default_node;
1622
1623 /* If both caller and callee have attributes, assume that if the
1624 pointer is different, the two functions have different target
1625 options since build_target_option_node uses a hash table for the
1626 options. */
1627 return callee_opts == caller_opts;
1628 }
1629
1630 /* If the machine does not have a case insn that compares the bounds,
1631 this means extra overhead for dispatch tables, which raises the
1632 threshold for using them. */
1633
1634 unsigned int
1635 default_case_values_threshold (void)
1636 {
1637 return (targetm.have_casesi () ? 4 : 5);
1638 }
1639
1640 bool
1641 default_have_conditional_execution (void)
1642 {
1643 return HAVE_conditional_execution;
1644 }
1645
1646 /* By default we assume that c99 functions are present at the runtime,
1647 but sincos is not. */
1648 bool
1649 default_libc_has_function (enum function_class fn_class)
1650 {
1651 if (fn_class == function_c94
1652 || fn_class == function_c99_misc
1653 || fn_class == function_c99_math_complex)
1654 return true;
1655
1656 return false;
1657 }
1658
1659 /* By default assume that libc has not a fast implementation. */
1660
1661 bool
1662 default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
1663 {
1664 return false;
1665 }
1666
1667 bool
1668 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1669 {
1670 return true;
1671 }
1672
1673 bool
1674 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1675 {
1676 return false;
1677 }
1678
1679 tree
1680 default_builtin_tm_load_store (tree ARG_UNUSED (type))
1681 {
1682 return NULL_TREE;
1683 }
1684
1685 /* Compute cost of moving registers to/from memory. */
1686
1687 int
1688 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1689 reg_class_t rclass ATTRIBUTE_UNUSED,
1690 bool in ATTRIBUTE_UNUSED)
1691 {
1692 #ifndef MEMORY_MOVE_COST
1693 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1694 #else
1695 return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
1696 #endif
1697 }
1698
1699 /* Compute cost of moving data from a register of class FROM to one of
1700 TO, using MODE. */
1701
1702 int
1703 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1704 reg_class_t from ATTRIBUTE_UNUSED,
1705 reg_class_t to ATTRIBUTE_UNUSED)
1706 {
1707 #ifndef REGISTER_MOVE_COST
1708 return 2;
1709 #else
1710 return REGISTER_MOVE_COST (MACRO_MODE (mode),
1711 (enum reg_class) from, (enum reg_class) to);
1712 #endif
1713 }
1714
1715 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
1716
1717 bool
1718 default_slow_unaligned_access (machine_mode, unsigned int)
1719 {
1720 return STRICT_ALIGNMENT;
1721 }
1722
1723 /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
1724
1725 HOST_WIDE_INT
1726 default_estimated_poly_value (poly_int64 x)
1727 {
1728 return x.coeffs[0];
1729 }
1730
1731 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1732 behavior. SPEED_P is true if we are compiling for speed. */
1733
1734 unsigned int
1735 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1736 {
1737 unsigned int move_ratio;
1738 #ifdef MOVE_RATIO
1739 move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1740 #else
1741 #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
1742 move_ratio = 2;
1743 #else /* No cpymem patterns, pick a default. */
1744 move_ratio = ((speed_p) ? 15 : 3);
1745 #endif
1746 #endif
1747 return move_ratio;
1748 }
1749
1750 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1751 used; return FALSE if the cpymem/setmem optab should be expanded, or
1752 a call to memcpy emitted. */
1753
1754 bool
1755 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
1756 unsigned int alignment,
1757 enum by_pieces_operation op,
1758 bool speed_p)
1759 {
1760 unsigned int max_size = 0;
1761 unsigned int ratio = 0;
1762
1763 switch (op)
1764 {
1765 case CLEAR_BY_PIECES:
1766 max_size = STORE_MAX_PIECES;
1767 ratio = CLEAR_RATIO (speed_p);
1768 break;
1769 case MOVE_BY_PIECES:
1770 max_size = MOVE_MAX_PIECES;
1771 ratio = get_move_ratio (speed_p);
1772 break;
1773 case SET_BY_PIECES:
1774 max_size = STORE_MAX_PIECES;
1775 ratio = SET_RATIO (speed_p);
1776 break;
1777 case STORE_BY_PIECES:
1778 max_size = STORE_MAX_PIECES;
1779 ratio = get_move_ratio (speed_p);
1780 break;
1781 case COMPARE_BY_PIECES:
1782 max_size = COMPARE_MAX_PIECES;
1783 /* Pick a likely default, just as in get_move_ratio. */
1784 ratio = speed_p ? 15 : 3;
1785 break;
1786 }
1787
1788 return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio;
1789 }
1790
1791 /* This hook controls code generation for expanding a memcmp operation by
1792 pieces. Return 1 for the normal pattern of compare/jump after each pair
1793 of loads, or a higher number to reduce the number of branches. */
1794
1795 int
1796 default_compare_by_pieces_branch_ratio (machine_mode)
1797 {
1798 return 1;
1799 }
1800
1801 /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
1802 entry. If RECORD_P is true and the target supports named sections,
1803 the location of the NOPs will be recorded in a special object section
1804 called "__patchable_function_entries". This routine may be called
1805 twice per function to put NOPs before and after the function
1806 entry. */
1807
1808 void
1809 default_print_patchable_function_entry (FILE *file,
1810 unsigned HOST_WIDE_INT patch_area_size,
1811 bool record_p)
1812 {
1813 const char *nop_templ = 0;
1814 int code_num;
1815 rtx_insn *my_nop = make_insn_raw (gen_nop ());
1816
1817 /* We use the template alone, relying on the (currently sane) assumption
1818 that the NOP template does not have variable operands. */
1819 code_num = recog_memoized (my_nop);
1820 nop_templ = get_insn_template (code_num, my_nop);
1821
1822 if (record_p && targetm_common.have_named_sections)
1823 {
1824 char buf[256];
1825 static int patch_area_number;
1826 section *previous_section = in_section;
1827 const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false);
1828
1829 gcc_assert (asm_op != NULL);
1830 patch_area_number++;
1831 ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", patch_area_number);
1832
1833 switch_to_section (get_section ("__patchable_function_entries",
1834 SECTION_WRITE | SECTION_RELRO, NULL));
1835 fputs (asm_op, file);
1836 assemble_name_raw (file, buf);
1837 fputc ('\n', file);
1838
1839 switch_to_section (previous_section);
1840 ASM_OUTPUT_LABEL (file, buf);
1841 }
1842
1843 unsigned i;
1844 for (i = 0; i < patch_area_size; ++i)
1845 fprintf (file, "\t%s\n", nop_templ);
1846 }
1847
1848 bool
1849 default_profile_before_prologue (void)
1850 {
1851 #ifdef PROFILE_BEFORE_PROLOGUE
1852 return true;
1853 #else
1854 return false;
1855 #endif
1856 }
1857
1858 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1859
1860 reg_class_t
1861 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1862 reg_class_t rclass)
1863 {
1864 #ifdef PREFERRED_RELOAD_CLASS
1865 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1866 #else
1867 return rclass;
1868 #endif
1869 }
1870
1871 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1872
1873 reg_class_t
1874 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1875 reg_class_t rclass)
1876 {
1877 return rclass;
1878 }
1879
1880 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1881 reg_class_t
1882 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
1883 {
1884 return NO_REGS;
1885 }
1886
1887 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1888
1889 bool
1890 default_class_likely_spilled_p (reg_class_t rclass)
1891 {
1892 return (reg_class_size[(int) rclass] == 1);
1893 }
1894
1895 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
1896
1897 unsigned char
1898 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
1899 machine_mode mode ATTRIBUTE_UNUSED)
1900 {
1901 #ifdef CLASS_MAX_NREGS
1902 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
1903 MACRO_MODE (mode));
1904 #else
1905 /* Targets with variable-sized modes must provide their own definition
1906 of this hook. */
1907 unsigned int size = GET_MODE_SIZE (mode).to_constant ();
1908 return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1909 #endif
1910 }
1911
1912 /* Determine the debugging unwind mechanism for the target. */
1913
1914 enum unwind_info_type
1915 default_debug_unwind_info (void)
1916 {
1917 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1918 /* ??? Change all users to the hook, then poison this. */
1919 #ifdef DWARF2_FRAME_INFO
1920 if (DWARF2_FRAME_INFO)
1921 return UI_DWARF2;
1922 #endif
1923
1924 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1925 #ifdef DWARF2_DEBUGGING_INFO
1926 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1927 return UI_DWARF2;
1928 #endif
1929
1930 return UI_NONE;
1931 }
1932
1933 /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
1934 must define this hook. */
1935
1936 unsigned int
1937 default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
1938 {
1939 gcc_unreachable ();
1940 }
1941
1942 /* Determine the correct mode for a Dwarf frame register that represents
1943 register REGNO. */
1944
1945 machine_mode
1946 default_dwarf_frame_reg_mode (int regno)
1947 {
1948 machine_mode save_mode = reg_raw_mode[regno];
1949
1950 if (targetm.hard_regno_call_part_clobbered (NULL, regno, save_mode))
1951 save_mode = choose_hard_reg_mode (regno, 1, true);
1952 return save_mode;
1953 }
1954
1955 /* To be used by targets where reg_raw_mode doesn't return the right
1956 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1957
1958 fixed_size_mode
1959 default_get_reg_raw_mode (int regno)
1960 {
1961 /* Targets must override this hook if the underlying register is
1962 variable-sized. */
1963 return as_a <fixed_size_mode> (reg_raw_mode[regno]);
1964 }
1965
1966 /* Return true if a leaf function should stay leaf even with profiling
1967 enabled. */
1968
1969 bool
1970 default_keep_leaf_when_profiled ()
1971 {
1972 return false;
1973 }
1974
1975 /* Return true if the state of option OPTION should be stored in PCH files
1976 and checked by default_pch_valid_p. Store the option's current state
1977 in STATE if so. */
1978
1979 static inline bool
1980 option_affects_pch_p (int option, struct cl_option_state *state)
1981 {
1982 if ((cl_options[option].flags & CL_TARGET) == 0)
1983 return false;
1984 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
1985 return false;
1986 if (option_flag_var (option, &global_options) == &target_flags)
1987 if (targetm.check_pch_target_flags)
1988 return false;
1989 return get_option_state (&global_options, option, state);
1990 }
1991
1992 /* Default version of get_pch_validity.
1993 By default, every flag difference is fatal; that will be mostly right for
1994 most targets, but completely right for very few. */
1995
1996 void *
1997 default_get_pch_validity (size_t *sz)
1998 {
1999 struct cl_option_state state;
2000 size_t i;
2001 char *result, *r;
2002
2003 *sz = 2;
2004 if (targetm.check_pch_target_flags)
2005 *sz += sizeof (target_flags);
2006 for (i = 0; i < cl_options_count; i++)
2007 if (option_affects_pch_p (i, &state))
2008 *sz += state.size;
2009
2010 result = r = XNEWVEC (char, *sz);
2011 r[0] = flag_pic;
2012 r[1] = flag_pie;
2013 r += 2;
2014 if (targetm.check_pch_target_flags)
2015 {
2016 memcpy (r, &target_flags, sizeof (target_flags));
2017 r += sizeof (target_flags);
2018 }
2019
2020 for (i = 0; i < cl_options_count; i++)
2021 if (option_affects_pch_p (i, &state))
2022 {
2023 memcpy (r, state.data, state.size);
2024 r += state.size;
2025 }
2026
2027 return result;
2028 }
2029
2030 /* Return a message which says that a PCH file was created with a different
2031 setting of OPTION. */
2032
2033 static const char *
2034 pch_option_mismatch (const char *option)
2035 {
2036 return xasprintf (_("created and used with differing settings of '%s'"),
2037 option);
2038 }
2039
2040 /* Default version of pch_valid_p. */
2041
2042 const char *
2043 default_pch_valid_p (const void *data_p, size_t len)
2044 {
2045 struct cl_option_state state;
2046 const char *data = (const char *)data_p;
2047 size_t i;
2048
2049 /* -fpic and -fpie also usually make a PCH invalid. */
2050 if (data[0] != flag_pic)
2051 return _("created and used with different settings of %<-fpic%>");
2052 if (data[1] != flag_pie)
2053 return _("created and used with different settings of %<-fpie%>");
2054 data += 2;
2055
2056 /* Check target_flags. */
2057 if (targetm.check_pch_target_flags)
2058 {
2059 int tf;
2060 const char *r;
2061
2062 memcpy (&tf, data, sizeof (target_flags));
2063 data += sizeof (target_flags);
2064 len -= sizeof (target_flags);
2065 r = targetm.check_pch_target_flags (tf);
2066 if (r != NULL)
2067 return r;
2068 }
2069
2070 for (i = 0; i < cl_options_count; i++)
2071 if (option_affects_pch_p (i, &state))
2072 {
2073 if (memcmp (data, state.data, state.size) != 0)
2074 return pch_option_mismatch (cl_options[i].opt_text);
2075 data += state.size;
2076 len -= state.size;
2077 }
2078
2079 return NULL;
2080 }
2081
2082 /* Default version of cstore_mode. */
2083
2084 scalar_int_mode
2085 default_cstore_mode (enum insn_code icode)
2086 {
2087 return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode);
2088 }
2089
2090 /* Default version of member_type_forces_blk. */
2091
2092 bool
2093 default_member_type_forces_blk (const_tree, machine_mode)
2094 {
2095 return false;
2096 }
2097
2098 rtx
2099 default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED,
2100 rtx ptr ATTRIBUTE_UNUSED,
2101 rtx bnd ATTRIBUTE_UNUSED)
2102 {
2103 gcc_unreachable ();
2104 }
2105
2106 void
2107 default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED,
2108 rtx addr ATTRIBUTE_UNUSED,
2109 rtx bounds ATTRIBUTE_UNUSED,
2110 rtx to ATTRIBUTE_UNUSED)
2111 {
2112 gcc_unreachable ();
2113 }
2114
2115 rtx
2116 default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED)
2117 {
2118 gcc_unreachable ();
2119 }
2120
2121 void
2122 default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED,
2123 rtx bounds ATTRIBUTE_UNUSED)
2124 {
2125 gcc_unreachable ();
2126 }
2127
2128 /* Default version of canonicalize_comparison. */
2129
2130 void
2131 default_canonicalize_comparison (int *, rtx *, rtx *, bool)
2132 {
2133 }
2134
2135 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2136
2137 void
2138 default_atomic_assign_expand_fenv (tree *, tree *, tree *)
2139 {
2140 }
2141
2142 #ifndef PAD_VARARGS_DOWN
2143 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2144 #endif
2145
2146 /* Build an indirect-ref expression over the given TREE, which represents a
2147 piece of a va_arg() expansion. */
2148 tree
2149 build_va_arg_indirect_ref (tree addr)
2150 {
2151 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
2152 return addr;
2153 }
2154
2155 /* The "standard" implementation of va_arg: read the value from the
2156 current (padded) address and increment by the (padded) size. */
2157
2158 tree
2159 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2160 gimple_seq *post_p)
2161 {
2162 tree addr, t, type_size, rounded_size, valist_tmp;
2163 unsigned HOST_WIDE_INT align, boundary;
2164 bool indirect;
2165
2166 /* All of the alignment and movement below is for args-grow-up machines.
2167 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2168 implement their own specialized gimplify_va_arg_expr routines. */
2169 if (ARGS_GROW_DOWNWARD)
2170 gcc_unreachable ();
2171
2172 indirect = pass_va_arg_by_reference (type);
2173 if (indirect)
2174 type = build_pointer_type (type);
2175
2176 if (targetm.calls.split_complex_arg
2177 && TREE_CODE (type) == COMPLEX_TYPE
2178 && targetm.calls.split_complex_arg (type))
2179 {
2180 tree real_part, imag_part;
2181
2182 real_part = std_gimplify_va_arg_expr (valist,
2183 TREE_TYPE (type), pre_p, NULL);
2184 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2185
2186 imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
2187 TREE_TYPE (type), pre_p, NULL);
2188 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2189
2190 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2191 }
2192
2193 align = PARM_BOUNDARY / BITS_PER_UNIT;
2194 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
2195
2196 /* When we align parameter on stack for caller, if the parameter
2197 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2198 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2199 here with caller. */
2200 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
2201 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
2202
2203 boundary /= BITS_PER_UNIT;
2204
2205 /* Hoist the valist value into a temporary for the moment. */
2206 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
2207
2208 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2209 requires greater alignment, we must perform dynamic alignment. */
2210 if (boundary > align
2211 && !TYPE_EMPTY_P (type)
2212 && !integer_zerop (TYPE_SIZE (type)))
2213 {
2214 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2215 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
2216 gimplify_and_add (t, pre_p);
2217
2218 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2219 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
2220 valist_tmp,
2221 build_int_cst (TREE_TYPE (valist), -boundary)));
2222 gimplify_and_add (t, pre_p);
2223 }
2224 else
2225 boundary = align;
2226
2227 /* If the actual alignment is less than the alignment of the type,
2228 adjust the type accordingly so that we don't assume strict alignment
2229 when dereferencing the pointer. */
2230 boundary *= BITS_PER_UNIT;
2231 if (boundary < TYPE_ALIGN (type))
2232 {
2233 type = build_variant_type_copy (type);
2234 SET_TYPE_ALIGN (type, boundary);
2235 }
2236
2237 /* Compute the rounded size of the type. */
2238 type_size = arg_size_in_bytes (type);
2239 rounded_size = round_up (type_size, align);
2240
2241 /* Reduce rounded_size so it's sharable with the postqueue. */
2242 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
2243
2244 /* Get AP. */
2245 addr = valist_tmp;
2246 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
2247 {
2248 /* Small args are padded downward. */
2249 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
2250 rounded_size, size_int (align));
2251 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
2252 size_binop (MINUS_EXPR, rounded_size, type_size));
2253 addr = fold_build_pointer_plus (addr, t);
2254 }
2255
2256 /* Compute new value for AP. */
2257 t = fold_build_pointer_plus (valist_tmp, rounded_size);
2258 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2259 gimplify_and_add (t, pre_p);
2260
2261 addr = fold_convert (build_pointer_type (type), addr);
2262
2263 if (indirect)
2264 addr = build_va_arg_indirect_ref (addr);
2265
2266 return build_va_arg_indirect_ref (addr);
2267 }
2268
2269 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2270 not support nested low-overhead loops. */
2271
2272 bool
2273 can_use_doloop_if_innermost (const widest_int &, const widest_int &,
2274 unsigned int loop_depth, bool)
2275 {
2276 return loop_depth == 1;
2277 }
2278
2279 /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2280
2281 bool
2282 default_optab_supported_p (int, machine_mode, machine_mode, optimization_type)
2283 {
2284 return true;
2285 }
2286
2287 /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2288
2289 unsigned int
2290 default_max_noce_ifcvt_seq_cost (edge e)
2291 {
2292 bool predictable_p = predictable_edge_p (e);
2293
2294 enum compiler_param param
2295 = (predictable_p
2296 ? PARAM_MAX_RTL_IF_CONVERSION_PREDICTABLE_COST
2297 : PARAM_MAX_RTL_IF_CONVERSION_UNPREDICTABLE_COST);
2298
2299 /* If we have a parameter set, use that, otherwise take a guess using
2300 BRANCH_COST. */
2301 if (global_options_set.x_param_values[param])
2302 return PARAM_VALUE (param);
2303 else
2304 return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3);
2305 }
2306
2307 /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2308
2309 unsigned int
2310 default_min_arithmetic_precision (void)
2311 {
2312 return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT;
2313 }
2314
2315 /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2316
2317 enum flt_eval_method
2318 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
2319 {
2320 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
2321 }
2322
2323 /* Default implementation for
2324 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2325 HOST_WIDE_INT
2326 default_stack_clash_protection_alloca_probe_range (void)
2327 {
2328 return 0;
2329 }
2330
2331 /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2332
2333 void
2334 default_select_early_remat_modes (sbitmap)
2335 {
2336 }
2337
2338 /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2339
2340 tree
2341 default_preferred_else_value (unsigned, tree type, unsigned, tree *)
2342 {
2343 return build_zero_cst (type);
2344 }
2345
2346 /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2347 bool
2348 default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
2349 {
2350 #ifdef HAVE_speculation_barrier
2351 return active ? HAVE_speculation_barrier : true;
2352 #else
2353 return false;
2354 #endif
2355 }
2356 /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2357 that can be used on targets that never have speculative execution. */
2358 bool
2359 speculation_safe_value_not_needed (bool active)
2360 {
2361 return !active;
2362 }
2363
2364 /* Default implementation of the speculation-safe-load builtin. This
2365 implementation simply copies val to result and generates a
2366 speculation_barrier insn, if such a pattern is defined. */
2367 rtx
2368 default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
2369 rtx result, rtx val,
2370 rtx failval ATTRIBUTE_UNUSED)
2371 {
2372 emit_move_insn (result, val);
2373
2374 #ifdef HAVE_speculation_barrier
2375 /* Assume the target knows what it is doing: if it defines a
2376 speculation barrier, but it is not enabled, then assume that one
2377 isn't needed. */
2378 if (HAVE_speculation_barrier)
2379 emit_insn (gen_speculation_barrier ());
2380 #endif
2381
2382 return result;
2383 }
2384
2385 void
2386 default_remove_extra_call_preserved_regs (rtx_insn *, HARD_REG_SET *)
2387 {
2388 }
2389
2390 #include "gt-targhooks.h"