]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/targhooks.c
Use function_arg_info for TARGET_FUNCTION_(INCOMING_)ARG
[thirdparty/gcc.git] / gcc / targhooks.c
1 /* Default target hook functions.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* The migration of target macros to target hooks works as follows:
21
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
24
25 2. Convert all the MI files to use the hook instead of the macro.
26
27 3. Repeat for a majority of the remaining target macros. This will
28 take some time.
29
30 4. Tell target maintainers to start migrating.
31
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
34
35 6. TBD when, poison the macros. Unmigrated targets will break at
36 this point.
37
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
41
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
44
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
48
49 #include "config.h"
50 #include "system.h"
51 #include "coretypes.h"
52 #include "target.h"
53 #include "function.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "tree-ssa-alias.h"
57 #include "gimple-expr.h"
58 #include "memmodel.h"
59 #include "tm_p.h"
60 #include "stringpool.h"
61 #include "tree-vrp.h"
62 #include "tree-ssanames.h"
63 #include "profile-count.h"
64 #include "optabs.h"
65 #include "regs.h"
66 #include "recog.h"
67 #include "diagnostic-core.h"
68 #include "fold-const.h"
69 #include "stor-layout.h"
70 #include "varasm.h"
71 #include "flags.h"
72 #include "explow.h"
73 #include "calls.h"
74 #include "expr.h"
75 #include "output.h"
76 #include "common/common-target.h"
77 #include "reload.h"
78 #include "intl.h"
79 #include "opts.h"
80 #include "gimplify.h"
81 #include "predict.h"
82 #include "params.h"
83 #include "real.h"
84 #include "langhooks.h"
85 #include "sbitmap.h"
86
87 bool
88 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
89 rtx addr ATTRIBUTE_UNUSED,
90 bool strict ATTRIBUTE_UNUSED)
91 {
92 #ifdef GO_IF_LEGITIMATE_ADDRESS
93 /* Defer to the old implementation using a goto. */
94 if (strict)
95 return strict_memory_address_p (mode, addr);
96 else
97 return memory_address_p (mode, addr);
98 #else
99 gcc_unreachable ();
100 #endif
101 }
102
103 void
104 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
105 {
106 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
107 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
108 #endif
109 }
110
111 int
112 default_unspec_may_trap_p (const_rtx x, unsigned flags)
113 {
114 int i;
115
116 /* Any floating arithmetic may trap. */
117 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
118 return 1;
119
120 for (i = 0; i < XVECLEN (x, 0); ++i)
121 {
122 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
123 return 1;
124 }
125
126 return 0;
127 }
128
129 machine_mode
130 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
131 machine_mode mode,
132 int *punsignedp ATTRIBUTE_UNUSED,
133 const_tree funtype ATTRIBUTE_UNUSED,
134 int for_return ATTRIBUTE_UNUSED)
135 {
136 if (type != NULL_TREE && for_return == 2)
137 return promote_mode (type, mode, punsignedp);
138 return mode;
139 }
140
141 machine_mode
142 default_promote_function_mode_always_promote (const_tree type,
143 machine_mode mode,
144 int *punsignedp,
145 const_tree funtype ATTRIBUTE_UNUSED,
146 int for_return ATTRIBUTE_UNUSED)
147 {
148 return promote_mode (type, mode, punsignedp);
149 }
150
151 machine_mode
152 default_cc_modes_compatible (machine_mode m1, machine_mode m2)
153 {
154 if (m1 == m2)
155 return m1;
156 return VOIDmode;
157 }
158
159 bool
160 default_return_in_memory (const_tree type,
161 const_tree fntype ATTRIBUTE_UNUSED)
162 {
163 return (TYPE_MODE (type) == BLKmode);
164 }
165
166 rtx
167 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
168 machine_mode mode ATTRIBUTE_UNUSED)
169 {
170 return x;
171 }
172
173 bool
174 default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
175 machine_mode)
176 {
177 return false;
178 }
179
180 bool
181 default_const_not_ok_for_debug_p (rtx x)
182 {
183 if (GET_CODE (x) == UNSPEC)
184 return true;
185 return false;
186 }
187
188 rtx
189 default_expand_builtin_saveregs (void)
190 {
191 error ("%<__builtin_saveregs%> not supported by this target");
192 return const0_rtx;
193 }
194
195 void
196 default_setup_incoming_varargs (cumulative_args_t,
197 const function_arg_info &, int *, int)
198 {
199 }
200
201 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
202
203 rtx
204 default_builtin_setjmp_frame_value (void)
205 {
206 return virtual_stack_vars_rtx;
207 }
208
209 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
210
211 bool
212 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
213 {
214 return false;
215 }
216
217 bool
218 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
219 {
220 return (targetm.calls.setup_incoming_varargs
221 != default_setup_incoming_varargs);
222 }
223
224 scalar_int_mode
225 default_eh_return_filter_mode (void)
226 {
227 return targetm.unwind_word_mode ();
228 }
229
230 scalar_int_mode
231 default_libgcc_cmp_return_mode (void)
232 {
233 return word_mode;
234 }
235
236 scalar_int_mode
237 default_libgcc_shift_count_mode (void)
238 {
239 return word_mode;
240 }
241
242 scalar_int_mode
243 default_unwind_word_mode (void)
244 {
245 return word_mode;
246 }
247
248 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
249
250 unsigned HOST_WIDE_INT
251 default_shift_truncation_mask (machine_mode mode)
252 {
253 return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0;
254 }
255
256 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
257
258 unsigned int
259 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
260 {
261 return have_insn_for (DIV, mode) ? 3 : 2;
262 }
263
264 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
265
266 int
267 default_mode_rep_extended (scalar_int_mode, scalar_int_mode)
268 {
269 return UNKNOWN;
270 }
271
272 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
273
274 bool
275 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
276 {
277 return true;
278 }
279
280 /* Return machine mode for non-standard suffix
281 or VOIDmode if non-standard suffixes are unsupported. */
282 machine_mode
283 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
284 {
285 return VOIDmode;
286 }
287
288 /* The generic C++ ABI specifies this is a 64-bit value. */
289 tree
290 default_cxx_guard_type (void)
291 {
292 return long_long_integer_type_node;
293 }
294
295 /* Returns the size of the cookie to use when allocating an array
296 whose elements have the indicated TYPE. Assumes that it is already
297 known that a cookie is needed. */
298
299 tree
300 default_cxx_get_cookie_size (tree type)
301 {
302 tree cookie_size;
303
304 /* We need to allocate an additional max (sizeof (size_t), alignof
305 (true_type)) bytes. */
306 tree sizetype_size;
307 tree type_align;
308
309 sizetype_size = size_in_bytes (sizetype);
310 type_align = size_int (TYPE_ALIGN_UNIT (type));
311 if (tree_int_cst_lt (type_align, sizetype_size))
312 cookie_size = sizetype_size;
313 else
314 cookie_size = type_align;
315
316 return cookie_size;
317 }
318
319 /* Return true if a parameter must be passed by reference. This version
320 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
321
322 bool
323 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t,
324 const function_arg_info &arg)
325 {
326 return targetm.calls.must_pass_in_stack (arg.mode, arg.type);
327 }
328
329 /* Return true if a parameter follows callee copies conventions. This
330 version of the hook is true for all named arguments. */
331
332 bool
333 hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
334 machine_mode mode ATTRIBUTE_UNUSED,
335 const_tree type ATTRIBUTE_UNUSED, bool named)
336 {
337 return named;
338 }
339
340 /* Emit to STREAM the assembler syntax for insn operand X. */
341
342 void
343 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
344 int code ATTRIBUTE_UNUSED)
345 {
346 #ifdef PRINT_OPERAND
347 PRINT_OPERAND (stream, x, code);
348 #else
349 gcc_unreachable ();
350 #endif
351 }
352
353 /* Emit to STREAM the assembler syntax for an insn operand whose memory
354 address is X. */
355
356 void
357 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
358 machine_mode /*mode*/,
359 rtx x ATTRIBUTE_UNUSED)
360 {
361 #ifdef PRINT_OPERAND_ADDRESS
362 PRINT_OPERAND_ADDRESS (stream, x);
363 #else
364 gcc_unreachable ();
365 #endif
366 }
367
368 /* Return true if CODE is a valid punctuation character for the
369 `print_operand' hook. */
370
371 bool
372 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
373 {
374 #ifdef PRINT_OPERAND_PUNCT_VALID_P
375 return PRINT_OPERAND_PUNCT_VALID_P (code);
376 #else
377 return false;
378 #endif
379 }
380
381 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
382 tree
383 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
384 {
385 const char *skipped = name + (*name == '*' ? 1 : 0);
386 const char *stripped = targetm.strip_name_encoding (skipped);
387 if (*name != '*' && user_label_prefix[0])
388 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
389 return get_identifier (stripped);
390 }
391
392 /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
393
394 machine_mode
395 default_translate_mode_attribute (machine_mode mode)
396 {
397 return mode;
398 }
399
400 /* True if MODE is valid for the target. By "valid", we mean able to
401 be manipulated in non-trivial ways. In particular, this means all
402 the arithmetic is supported.
403
404 By default we guess this means that any C type is supported. If
405 we can't map the mode back to a type that would be available in C,
406 then reject it. Special case, here, is the double-word arithmetic
407 supported by optabs.c. */
408
409 bool
410 default_scalar_mode_supported_p (scalar_mode mode)
411 {
412 int precision = GET_MODE_PRECISION (mode);
413
414 switch (GET_MODE_CLASS (mode))
415 {
416 case MODE_PARTIAL_INT:
417 case MODE_INT:
418 if (precision == CHAR_TYPE_SIZE)
419 return true;
420 if (precision == SHORT_TYPE_SIZE)
421 return true;
422 if (precision == INT_TYPE_SIZE)
423 return true;
424 if (precision == LONG_TYPE_SIZE)
425 return true;
426 if (precision == LONG_LONG_TYPE_SIZE)
427 return true;
428 if (precision == 2 * BITS_PER_WORD)
429 return true;
430 return false;
431
432 case MODE_FLOAT:
433 if (precision == FLOAT_TYPE_SIZE)
434 return true;
435 if (precision == DOUBLE_TYPE_SIZE)
436 return true;
437 if (precision == LONG_DOUBLE_TYPE_SIZE)
438 return true;
439 return false;
440
441 case MODE_DECIMAL_FLOAT:
442 case MODE_FRACT:
443 case MODE_UFRACT:
444 case MODE_ACCUM:
445 case MODE_UACCUM:
446 return false;
447
448 default:
449 gcc_unreachable ();
450 }
451 }
452
453 /* Return true if libgcc supports floating-point mode MODE (known to
454 be supported as a scalar mode). */
455
456 bool
457 default_libgcc_floating_mode_supported_p (scalar_float_mode mode)
458 {
459 switch (mode)
460 {
461 #ifdef HAVE_SFmode
462 case E_SFmode:
463 #endif
464 #ifdef HAVE_DFmode
465 case E_DFmode:
466 #endif
467 #ifdef HAVE_XFmode
468 case E_XFmode:
469 #endif
470 #ifdef HAVE_TFmode
471 case E_TFmode:
472 #endif
473 return true;
474
475 default:
476 return false;
477 }
478 }
479
480 /* Return the machine mode to use for the type _FloatN, if EXTENDED is
481 false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
482 supported. */
483 opt_scalar_float_mode
484 default_floatn_mode (int n, bool extended)
485 {
486 if (extended)
487 {
488 opt_scalar_float_mode cand1, cand2;
489 scalar_float_mode mode;
490 switch (n)
491 {
492 case 32:
493 #ifdef HAVE_DFmode
494 cand1 = DFmode;
495 #endif
496 break;
497
498 case 64:
499 #ifdef HAVE_XFmode
500 cand1 = XFmode;
501 #endif
502 #ifdef HAVE_TFmode
503 cand2 = TFmode;
504 #endif
505 break;
506
507 case 128:
508 break;
509
510 default:
511 /* Those are the only valid _FloatNx types. */
512 gcc_unreachable ();
513 }
514 if (cand1.exists (&mode)
515 && REAL_MODE_FORMAT (mode)->ieee_bits > n
516 && targetm.scalar_mode_supported_p (mode)
517 && targetm.libgcc_floating_mode_supported_p (mode))
518 return cand1;
519 if (cand2.exists (&mode)
520 && REAL_MODE_FORMAT (mode)->ieee_bits > n
521 && targetm.scalar_mode_supported_p (mode)
522 && targetm.libgcc_floating_mode_supported_p (mode))
523 return cand2;
524 }
525 else
526 {
527 opt_scalar_float_mode cand;
528 scalar_float_mode mode;
529 switch (n)
530 {
531 case 16:
532 /* Always enable _Float16 if we have basic support for the mode.
533 Targets can control the range and precision of operations on
534 the _Float16 type using TARGET_C_EXCESS_PRECISION. */
535 #ifdef HAVE_HFmode
536 cand = HFmode;
537 #endif
538 break;
539
540 case 32:
541 #ifdef HAVE_SFmode
542 cand = SFmode;
543 #endif
544 break;
545
546 case 64:
547 #ifdef HAVE_DFmode
548 cand = DFmode;
549 #endif
550 break;
551
552 case 128:
553 #ifdef HAVE_TFmode
554 cand = TFmode;
555 #endif
556 break;
557
558 default:
559 break;
560 }
561 if (cand.exists (&mode)
562 && REAL_MODE_FORMAT (mode)->ieee_bits == n
563 && targetm.scalar_mode_supported_p (mode)
564 && targetm.libgcc_floating_mode_supported_p (mode))
565 return cand;
566 }
567 return opt_scalar_float_mode ();
568 }
569
570 /* Define this to return true if the _Floatn and _Floatnx built-in functions
571 should implicitly enable the built-in function without the __builtin_ prefix
572 in addition to the normal built-in function with the __builtin_ prefix. The
573 default is to only enable built-in functions without the __builtin_ prefix
574 for the GNU C langauge. The argument FUNC is the enum builtin_in_function
575 id of the function to be enabled. */
576
577 bool
578 default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
579 {
580 static bool first_time_p = true;
581 static bool c_or_objective_c;
582
583 if (first_time_p)
584 {
585 first_time_p = false;
586 c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
587 }
588
589 return c_or_objective_c;
590 }
591
592 /* Make some target macros useable by target-independent code. */
593 bool
594 targhook_words_big_endian (void)
595 {
596 return !!WORDS_BIG_ENDIAN;
597 }
598
599 bool
600 targhook_float_words_big_endian (void)
601 {
602 return !!FLOAT_WORDS_BIG_ENDIAN;
603 }
604
605 /* True if the target supports floating-point exceptions and rounding
606 modes. */
607
608 bool
609 default_float_exceptions_rounding_supported_p (void)
610 {
611 #ifdef HAVE_adddf3
612 return HAVE_adddf3;
613 #else
614 return false;
615 #endif
616 }
617
618 /* True if the target supports decimal floating point. */
619
620 bool
621 default_decimal_float_supported_p (void)
622 {
623 return ENABLE_DECIMAL_FLOAT;
624 }
625
626 /* True if the target supports fixed-point arithmetic. */
627
628 bool
629 default_fixed_point_supported_p (void)
630 {
631 return ENABLE_FIXED_POINT;
632 }
633
634 /* True if the target supports GNU indirect functions. */
635
636 bool
637 default_has_ifunc_p (void)
638 {
639 return HAVE_GNU_INDIRECT_FUNCTION;
640 }
641
642 /* Return true if we predict the loop LOOP will be transformed to a
643 low-overhead loop, otherwise return false.
644
645 By default, false is returned, as this hook's applicability should be
646 verified for each target. Target maintainers should re-define the hook
647 if the target can take advantage of it. */
648
649 bool
650 default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED)
651 {
652 return false;
653 }
654
655 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
656 an error message.
657
658 This function checks whether a given INSN is valid within a low-overhead
659 loop. If INSN is invalid it returns the reason for that, otherwise it
660 returns NULL. A called function may clobber any special registers required
661 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
662 register for branch on table instructions. We reject the doloop pattern in
663 these cases. */
664
665 const char *
666 default_invalid_within_doloop (const rtx_insn *insn)
667 {
668 if (CALL_P (insn))
669 return "Function call in loop.";
670
671 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
672 return "Computed branch in the loop.";
673
674 return NULL;
675 }
676
677 /* Mapping of builtin functions to vectorized variants. */
678
679 tree
680 default_builtin_vectorized_function (unsigned int, tree, tree)
681 {
682 return NULL_TREE;
683 }
684
685 /* Mapping of target builtin functions to vectorized variants. */
686
687 tree
688 default_builtin_md_vectorized_function (tree, tree, tree)
689 {
690 return NULL_TREE;
691 }
692
693 /* Vectorized conversion. */
694
695 tree
696 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
697 tree dest_type ATTRIBUTE_UNUSED,
698 tree src_type ATTRIBUTE_UNUSED)
699 {
700 return NULL_TREE;
701 }
702
703 /* Default vectorizer cost model values. */
704
705 int
706 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
707 tree vectype,
708 int misalign ATTRIBUTE_UNUSED)
709 {
710 switch (type_of_cost)
711 {
712 case scalar_stmt:
713 case scalar_load:
714 case scalar_store:
715 case vector_stmt:
716 case vector_load:
717 case vector_store:
718 case vec_to_scalar:
719 case scalar_to_vec:
720 case cond_branch_not_taken:
721 case vec_perm:
722 case vec_promote_demote:
723 return 1;
724
725 case unaligned_load:
726 case unaligned_store:
727 return 2;
728
729 case cond_branch_taken:
730 return 3;
731
732 case vec_construct:
733 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
734
735 default:
736 gcc_unreachable ();
737 }
738 }
739
740 /* Reciprocal. */
741
742 tree
743 default_builtin_reciprocal (tree)
744 {
745 return NULL_TREE;
746 }
747
748 bool
749 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
750 cumulative_args_t ca ATTRIBUTE_UNUSED,
751 machine_mode mode ATTRIBUTE_UNUSED,
752 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
753 {
754 return false;
755 }
756
757 bool
758 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
759 cumulative_args_t ca ATTRIBUTE_UNUSED,
760 machine_mode mode ATTRIBUTE_UNUSED,
761 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
762 {
763 return true;
764 }
765
766 bool
767 hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t,
768 const function_arg_info &)
769 {
770 return false;
771 }
772
773 int
774 hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t,
775 const function_arg_info &)
776 {
777 return 0;
778 }
779
780 void
781 hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
782 tree ATTRIBUTE_UNUSED)
783 {
784 }
785
786 void
787 default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
788 machine_mode mode ATTRIBUTE_UNUSED,
789 const_tree type ATTRIBUTE_UNUSED,
790 bool named ATTRIBUTE_UNUSED)
791 {
792 gcc_unreachable ();
793 }
794
795 /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
796
797 HOST_WIDE_INT
798 default_function_arg_offset (machine_mode, const_tree)
799 {
800 return 0;
801 }
802
803 /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
804 upward, but pad short args downward on big-endian machines. */
805
806 pad_direction
807 default_function_arg_padding (machine_mode mode, const_tree type)
808 {
809 if (!BYTES_BIG_ENDIAN)
810 return PAD_UPWARD;
811
812 unsigned HOST_WIDE_INT size;
813 if (mode == BLKmode)
814 {
815 if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
816 return PAD_UPWARD;
817 size = int_size_in_bytes (type);
818 }
819 else
820 /* Targets with variable-sized modes must override this hook
821 and handle variable-sized modes explicitly. */
822 size = GET_MODE_SIZE (mode).to_constant ();
823
824 if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
825 return PAD_DOWNWARD;
826
827 return PAD_UPWARD;
828 }
829
830 rtx
831 default_function_arg (cumulative_args_t, const function_arg_info &)
832 {
833 gcc_unreachable ();
834 }
835
836 rtx
837 default_function_incoming_arg (cumulative_args_t, const function_arg_info &)
838 {
839 gcc_unreachable ();
840 }
841
842 unsigned int
843 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
844 const_tree type ATTRIBUTE_UNUSED)
845 {
846 return PARM_BOUNDARY;
847 }
848
849 unsigned int
850 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
851 const_tree type ATTRIBUTE_UNUSED)
852 {
853 return PARM_BOUNDARY;
854 }
855
856 void
857 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
858 {
859 }
860
861 const char *
862 hook_invalid_arg_for_unprototyped_fn (
863 const_tree typelist ATTRIBUTE_UNUSED,
864 const_tree funcdecl ATTRIBUTE_UNUSED,
865 const_tree val ATTRIBUTE_UNUSED)
866 {
867 return NULL;
868 }
869
870 /* Initialize the stack protection decls. */
871
872 /* Stack protection related decls living in libgcc. */
873 static GTY(()) tree stack_chk_guard_decl;
874
875 tree
876 default_stack_protect_guard (void)
877 {
878 tree t = stack_chk_guard_decl;
879
880 if (t == NULL)
881 {
882 rtx x;
883
884 t = build_decl (UNKNOWN_LOCATION,
885 VAR_DECL, get_identifier ("__stack_chk_guard"),
886 ptr_type_node);
887 TREE_STATIC (t) = 1;
888 TREE_PUBLIC (t) = 1;
889 DECL_EXTERNAL (t) = 1;
890 TREE_USED (t) = 1;
891 TREE_THIS_VOLATILE (t) = 1;
892 DECL_ARTIFICIAL (t) = 1;
893 DECL_IGNORED_P (t) = 1;
894
895 /* Do not share RTL as the declaration is visible outside of
896 current function. */
897 x = DECL_RTL (t);
898 RTX_FLAG (x, used) = 1;
899
900 stack_chk_guard_decl = t;
901 }
902
903 return t;
904 }
905
906 static GTY(()) tree stack_chk_fail_decl;
907
908 tree
909 default_external_stack_protect_fail (void)
910 {
911 tree t = stack_chk_fail_decl;
912
913 if (t == NULL_TREE)
914 {
915 t = build_function_type_list (void_type_node, NULL_TREE);
916 t = build_decl (UNKNOWN_LOCATION,
917 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
918 TREE_STATIC (t) = 1;
919 TREE_PUBLIC (t) = 1;
920 DECL_EXTERNAL (t) = 1;
921 TREE_USED (t) = 1;
922 TREE_THIS_VOLATILE (t) = 1;
923 TREE_NOTHROW (t) = 1;
924 DECL_ARTIFICIAL (t) = 1;
925 DECL_IGNORED_P (t) = 1;
926 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
927 DECL_VISIBILITY_SPECIFIED (t) = 1;
928
929 stack_chk_fail_decl = t;
930 }
931
932 return build_call_expr (t, 0);
933 }
934
935 tree
936 default_hidden_stack_protect_fail (void)
937 {
938 #ifndef HAVE_GAS_HIDDEN
939 return default_external_stack_protect_fail ();
940 #else
941 tree t = stack_chk_fail_decl;
942
943 if (!flag_pic)
944 return default_external_stack_protect_fail ();
945
946 if (t == NULL_TREE)
947 {
948 t = build_function_type_list (void_type_node, NULL_TREE);
949 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
950 get_identifier ("__stack_chk_fail_local"), t);
951 TREE_STATIC (t) = 1;
952 TREE_PUBLIC (t) = 1;
953 DECL_EXTERNAL (t) = 1;
954 TREE_USED (t) = 1;
955 TREE_THIS_VOLATILE (t) = 1;
956 TREE_NOTHROW (t) = 1;
957 DECL_ARTIFICIAL (t) = 1;
958 DECL_IGNORED_P (t) = 1;
959 DECL_VISIBILITY_SPECIFIED (t) = 1;
960 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
961
962 stack_chk_fail_decl = t;
963 }
964
965 return build_call_expr (t, 0);
966 #endif
967 }
968
969 bool
970 hook_bool_const_rtx_commutative_p (const_rtx x,
971 int outer_code ATTRIBUTE_UNUSED)
972 {
973 return COMMUTATIVE_P (x);
974 }
975
976 rtx
977 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
978 const_tree fn_decl_or_type,
979 bool outgoing ATTRIBUTE_UNUSED)
980 {
981 /* The old interface doesn't handle receiving the function type. */
982 if (fn_decl_or_type
983 && !DECL_P (fn_decl_or_type))
984 fn_decl_or_type = NULL;
985
986 #ifdef FUNCTION_VALUE
987 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
988 #else
989 gcc_unreachable ();
990 #endif
991 }
992
993 rtx
994 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
995 const_rtx fun ATTRIBUTE_UNUSED)
996 {
997 #ifdef LIBCALL_VALUE
998 return LIBCALL_VALUE (MACRO_MODE (mode));
999 #else
1000 gcc_unreachable ();
1001 #endif
1002 }
1003
1004 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1005
1006 bool
1007 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
1008 {
1009 #ifdef FUNCTION_VALUE_REGNO_P
1010 return FUNCTION_VALUE_REGNO_P (regno);
1011 #else
1012 gcc_unreachable ();
1013 #endif
1014 }
1015
1016 rtx
1017 default_internal_arg_pointer (void)
1018 {
1019 /* If the reg that the virtual arg pointer will be translated into is
1020 not a fixed reg or is the stack pointer, make a copy of the virtual
1021 arg pointer, and address parms via the copy. The frame pointer is
1022 considered fixed even though it is not marked as such. */
1023 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
1024 || ! (fixed_regs[ARG_POINTER_REGNUM]
1025 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
1026 return copy_to_reg (virtual_incoming_args_rtx);
1027 else
1028 return virtual_incoming_args_rtx;
1029 }
1030
1031 rtx
1032 default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
1033 {
1034 if (incoming_p)
1035 {
1036 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1037 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
1038 #endif
1039 }
1040
1041 #ifdef STATIC_CHAIN_REGNUM
1042 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
1043 #endif
1044
1045 {
1046 static bool issued_error;
1047 if (!issued_error)
1048 {
1049 issued_error = true;
1050 sorry ("nested functions not supported on this target");
1051 }
1052
1053 /* It really doesn't matter what we return here, so long at it
1054 doesn't cause the rest of the compiler to crash. */
1055 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
1056 }
1057 }
1058
1059 void
1060 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
1061 rtx ARG_UNUSED (r_chain))
1062 {
1063 sorry ("nested function trampolines not supported on this target");
1064 }
1065
1066 poly_int64
1067 default_return_pops_args (tree, tree, poly_int64)
1068 {
1069 return 0;
1070 }
1071
1072 reg_class_t
1073 default_branch_target_register_class (void)
1074 {
1075 return NO_REGS;
1076 }
1077
1078 reg_class_t
1079 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
1080 reg_class_t cl,
1081 reg_class_t best_cl ATTRIBUTE_UNUSED)
1082 {
1083 return cl;
1084 }
1085
1086 extern bool
1087 default_lra_p (void)
1088 {
1089 return true;
1090 }
1091
1092 int
1093 default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
1094 {
1095 return 0;
1096 }
1097
1098 extern bool
1099 default_register_usage_leveling_p (void)
1100 {
1101 return false;
1102 }
1103
1104 extern bool
1105 default_different_addr_displacement_p (void)
1106 {
1107 return false;
1108 }
1109
1110 reg_class_t
1111 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
1112 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
1113 machine_mode reload_mode ATTRIBUTE_UNUSED,
1114 secondary_reload_info *sri)
1115 {
1116 enum reg_class rclass = NO_REGS;
1117 enum reg_class reload_class = (enum reg_class) reload_class_i;
1118
1119 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
1120 {
1121 sri->icode = sri->prev_sri->t_icode;
1122 return NO_REGS;
1123 }
1124 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1125 if (in_p)
1126 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
1127 MACRO_MODE (reload_mode), x);
1128 #endif
1129 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1130 if (! in_p)
1131 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
1132 MACRO_MODE (reload_mode), x);
1133 #endif
1134 if (rclass != NO_REGS)
1135 {
1136 enum insn_code icode
1137 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
1138 reload_mode);
1139
1140 if (icode != CODE_FOR_nothing
1141 && !insn_operand_matches (icode, in_p, x))
1142 icode = CODE_FOR_nothing;
1143 else if (icode != CODE_FOR_nothing)
1144 {
1145 const char *insn_constraint, *scratch_constraint;
1146 enum reg_class insn_class, scratch_class;
1147
1148 gcc_assert (insn_data[(int) icode].n_operands == 3);
1149 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
1150 if (!*insn_constraint)
1151 insn_class = ALL_REGS;
1152 else
1153 {
1154 if (in_p)
1155 {
1156 gcc_assert (*insn_constraint == '=');
1157 insn_constraint++;
1158 }
1159 insn_class = (reg_class_for_constraint
1160 (lookup_constraint (insn_constraint)));
1161 gcc_assert (insn_class != NO_REGS);
1162 }
1163
1164 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
1165 /* The scratch register's constraint must start with "=&",
1166 except for an input reload, where only "=" is necessary,
1167 and where it might be beneficial to re-use registers from
1168 the input. */
1169 gcc_assert (scratch_constraint[0] == '='
1170 && (in_p || scratch_constraint[1] == '&'));
1171 scratch_constraint++;
1172 if (*scratch_constraint == '&')
1173 scratch_constraint++;
1174 scratch_class = (reg_class_for_constraint
1175 (lookup_constraint (scratch_constraint)));
1176
1177 if (reg_class_subset_p (reload_class, insn_class))
1178 {
1179 gcc_assert (scratch_class == rclass);
1180 rclass = NO_REGS;
1181 }
1182 else
1183 rclass = insn_class;
1184
1185 }
1186 if (rclass == NO_REGS)
1187 sri->icode = icode;
1188 else
1189 sri->t_icode = icode;
1190 }
1191 return rclass;
1192 }
1193
1194 /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1195
1196 machine_mode
1197 default_secondary_memory_needed_mode (machine_mode mode)
1198 {
1199 if (!targetm.lra_p ()
1200 && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
1201 && INTEGRAL_MODE_P (mode))
1202 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1203 return mode;
1204 }
1205
1206 /* By default, if flag_pic is true, then neither local nor global relocs
1207 should be placed in readonly memory. */
1208
1209 int
1210 default_reloc_rw_mask (void)
1211 {
1212 return flag_pic ? 3 : 0;
1213 }
1214
1215 /* By default, address diff vectors are generated
1216 for jump tables when flag_pic is true. */
1217
1218 bool
1219 default_generate_pic_addr_diff_vec (void)
1220 {
1221 return flag_pic;
1222 }
1223
1224 /* By default, do no modification. */
1225 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1226 tree id)
1227 {
1228 return id;
1229 }
1230
1231 /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1232
1233 HOST_WIDE_INT
1234 default_static_rtx_alignment (machine_mode mode)
1235 {
1236 return GET_MODE_ALIGNMENT (mode);
1237 }
1238
1239 /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1240
1241 HOST_WIDE_INT
1242 default_constant_alignment (const_tree, HOST_WIDE_INT align)
1243 {
1244 return align;
1245 }
1246
1247 /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1248 to at least BITS_PER_WORD but otherwise makes no changes. */
1249
1250 HOST_WIDE_INT
1251 constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align)
1252 {
1253 if (TREE_CODE (exp) == STRING_CST)
1254 return MAX (align, BITS_PER_WORD);
1255 return align;
1256 }
1257
1258 /* Default to natural alignment for vector types, bounded by
1259 MAX_OFILE_ALIGNMENT. */
1260
1261 HOST_WIDE_INT
1262 default_vector_alignment (const_tree type)
1263 {
1264 unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT;
1265 tree size = TYPE_SIZE (type);
1266 if (tree_fits_uhwi_p (size))
1267 align = tree_to_uhwi (size);
1268
1269 return align < MAX_OFILE_ALIGNMENT ? align : MAX_OFILE_ALIGNMENT;
1270 }
1271
1272 /* The default implementation of
1273 TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1274
1275 poly_uint64
1276 default_preferred_vector_alignment (const_tree type)
1277 {
1278 return TYPE_ALIGN (type);
1279 }
1280
1281 /* By default assume vectors of element TYPE require a multiple of the natural
1282 alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1283 bool
1284 default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed)
1285 {
1286 return ! is_packed;
1287 }
1288
1289 /* By default, assume that a target supports any factor of misalignment
1290 memory access if it supports movmisalign patten.
1291 is_packed is true if the memory access is defined in a packed struct. */
1292 bool
1293 default_builtin_support_vector_misalignment (machine_mode mode,
1294 const_tree type
1295 ATTRIBUTE_UNUSED,
1296 int misalignment
1297 ATTRIBUTE_UNUSED,
1298 bool is_packed
1299 ATTRIBUTE_UNUSED)
1300 {
1301 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1302 return true;
1303 return false;
1304 }
1305
1306 /* By default, only attempt to parallelize bitwise operations, and
1307 possibly adds/subtracts using bit-twiddling. */
1308
1309 machine_mode
1310 default_preferred_simd_mode (scalar_mode)
1311 {
1312 return word_mode;
1313 }
1314
1315 /* By default do not split reductions further. */
1316
1317 machine_mode
1318 default_split_reduction (machine_mode mode)
1319 {
1320 return mode;
1321 }
1322
1323 /* By default only the size derived from the preferred vector mode
1324 is tried. */
1325
1326 void
1327 default_autovectorize_vector_sizes (vector_sizes *, bool)
1328 {
1329 }
1330
1331 /* By default a vector of integers is used as a mask. */
1332
1333 opt_machine_mode
1334 default_get_mask_mode (poly_uint64 nunits, poly_uint64 vector_size)
1335 {
1336 unsigned int elem_size = vector_element_size (vector_size, nunits);
1337 scalar_int_mode elem_mode
1338 = smallest_int_mode_for_size (elem_size * BITS_PER_UNIT);
1339 machine_mode vector_mode;
1340
1341 gcc_assert (known_eq (elem_size * nunits, vector_size));
1342
1343 if (mode_for_vector (elem_mode, nunits).exists (&vector_mode)
1344 && VECTOR_MODE_P (vector_mode)
1345 && targetm.vector_mode_supported_p (vector_mode))
1346 return vector_mode;
1347
1348 return opt_machine_mode ();
1349 }
1350
1351 /* By default consider masked stores to be expensive. */
1352
1353 bool
1354 default_empty_mask_is_expensive (unsigned ifn)
1355 {
1356 return ifn == IFN_MASK_STORE;
1357 }
1358
1359 /* By default, the cost model accumulates three separate costs (prologue,
1360 loop body, and epilogue) for a vectorized loop or block. So allocate an
1361 array of three unsigned ints, set it to zero, and return its address. */
1362
1363 void *
1364 default_init_cost (class loop *loop_info ATTRIBUTE_UNUSED)
1365 {
1366 unsigned *cost = XNEWVEC (unsigned, 3);
1367 cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
1368 return cost;
1369 }
1370
1371 /* By default, the cost model looks up the cost of the given statement
1372 kind and mode, multiplies it by the occurrence count, accumulates
1373 it into the cost specified by WHERE, and returns the cost added. */
1374
1375 unsigned
1376 default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
1377 class _stmt_vec_info *stmt_info, int misalign,
1378 enum vect_cost_model_location where)
1379 {
1380 unsigned *cost = (unsigned *) data;
1381 unsigned retval = 0;
1382
1383 tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE;
1384 int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype,
1385 misalign);
1386 /* Statements in an inner loop relative to the loop being
1387 vectorized are weighted more heavily. The value here is
1388 arbitrary and could potentially be improved with analysis. */
1389 if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info))
1390 count *= 50; /* FIXME. */
1391
1392 retval = (unsigned) (count * stmt_cost);
1393 cost[where] += retval;
1394
1395 return retval;
1396 }
1397
1398 /* By default, the cost model just returns the accumulated costs. */
1399
1400 void
1401 default_finish_cost (void *data, unsigned *prologue_cost,
1402 unsigned *body_cost, unsigned *epilogue_cost)
1403 {
1404 unsigned *cost = (unsigned *) data;
1405 *prologue_cost = cost[vect_prologue];
1406 *body_cost = cost[vect_body];
1407 *epilogue_cost = cost[vect_epilogue];
1408 }
1409
1410 /* Free the cost data. */
1411
1412 void
1413 default_destroy_cost_data (void *data)
1414 {
1415 free (data);
1416 }
1417
1418 /* Determine whether or not a pointer mode is valid. Assume defaults
1419 of ptr_mode or Pmode - can be overridden. */
1420 bool
1421 default_valid_pointer_mode (scalar_int_mode mode)
1422 {
1423 return (mode == ptr_mode || mode == Pmode);
1424 }
1425
1426 /* Determine whether the memory reference specified by REF may alias
1427 the C libraries errno location. */
1428 bool
1429 default_ref_may_alias_errno (ao_ref *ref)
1430 {
1431 tree base = ao_ref_base (ref);
1432 /* The default implementation assumes the errno location is
1433 a declaration of type int or is always accessed via a
1434 pointer to int. We assume that accesses to errno are
1435 not deliberately obfuscated (even in conforming ways). */
1436 if (TYPE_UNSIGNED (TREE_TYPE (base))
1437 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1438 return false;
1439 /* The default implementation assumes an errno location
1440 declaration is never defined in the current compilation unit. */
1441 if (DECL_P (base)
1442 && !TREE_STATIC (base))
1443 return true;
1444 else if (TREE_CODE (base) == MEM_REF
1445 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1446 {
1447 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1448 return !pi || pi->pt.anything || pi->pt.nonlocal;
1449 }
1450 return false;
1451 }
1452
1453 /* Return the mode for a pointer to a given ADDRSPACE,
1454 defaulting to ptr_mode for all address spaces. */
1455
1456 scalar_int_mode
1457 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1458 {
1459 return ptr_mode;
1460 }
1461
1462 /* Return the mode for an address in a given ADDRSPACE,
1463 defaulting to Pmode for all address spaces. */
1464
1465 scalar_int_mode
1466 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1467 {
1468 return Pmode;
1469 }
1470
1471 /* Named address space version of valid_pointer_mode.
1472 To match the above, the same modes apply to all address spaces. */
1473
1474 bool
1475 default_addr_space_valid_pointer_mode (scalar_int_mode mode,
1476 addr_space_t as ATTRIBUTE_UNUSED)
1477 {
1478 return targetm.valid_pointer_mode (mode);
1479 }
1480
1481 /* Some places still assume that all pointer or address modes are the
1482 standard Pmode and ptr_mode. These optimizations become invalid if
1483 the target actually supports multiple different modes. For now,
1484 we disable such optimizations on such targets, using this function. */
1485
1486 bool
1487 target_default_pointer_address_modes_p (void)
1488 {
1489 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1490 return false;
1491 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1492 return false;
1493
1494 return true;
1495 }
1496
1497 /* Named address space version of legitimate_address_p.
1498 By default, all address spaces have the same form. */
1499
1500 bool
1501 default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1502 bool strict,
1503 addr_space_t as ATTRIBUTE_UNUSED)
1504 {
1505 return targetm.legitimate_address_p (mode, mem, strict);
1506 }
1507
1508 /* Named address space version of LEGITIMIZE_ADDRESS.
1509 By default, all address spaces have the same form. */
1510
1511 rtx
1512 default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
1513 addr_space_t as ATTRIBUTE_UNUSED)
1514 {
1515 return targetm.legitimize_address (x, oldx, mode);
1516 }
1517
1518 /* The default hook for determining if one named address space is a subset of
1519 another and to return which address space to use as the common address
1520 space. */
1521
1522 bool
1523 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1524 {
1525 return (subset == superset);
1526 }
1527
1528 /* The default hook for determining if 0 within a named address
1529 space is a valid address. */
1530
1531 bool
1532 default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED)
1533 {
1534 return false;
1535 }
1536
1537 /* The default hook for debugging the address space is to return the
1538 address space number to indicate DW_AT_address_class. */
1539 int
1540 default_addr_space_debug (addr_space_t as)
1541 {
1542 return as;
1543 }
1544
1545 /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1546 Don't complain about any address space. */
1547
1548 void
1549 default_addr_space_diagnose_usage (addr_space_t, location_t)
1550 {
1551 }
1552
1553
1554 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1555 called for targets with only a generic address space. */
1556
1557 rtx
1558 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1559 tree from_type ATTRIBUTE_UNUSED,
1560 tree to_type ATTRIBUTE_UNUSED)
1561 {
1562 gcc_unreachable ();
1563 }
1564
1565 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1566
1567 unsigned int
1568 default_hard_regno_nregs (unsigned int, machine_mode mode)
1569 {
1570 /* Targets with variable-sized modes must provide their own definition
1571 of this hook. */
1572 return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
1573 }
1574
1575 bool
1576 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1577 {
1578 return true;
1579 }
1580
1581 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1582
1583 bool
1584 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1585 addr_space_t addrspace ATTRIBUTE_UNUSED)
1586 {
1587 return false;
1588 }
1589
1590 bool
1591 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1592 tree ARG_UNUSED (name),
1593 tree ARG_UNUSED (args),
1594 int ARG_UNUSED (flags))
1595 {
1596 warning (OPT_Wattributes,
1597 "target attribute is not supported on this machine");
1598
1599 return false;
1600 }
1601
1602 bool
1603 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1604 tree ARG_UNUSED (pop_target))
1605 {
1606 /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1607 emit no warning because "#pragma GCC pop_target" is valid on targets that
1608 do not have the "target" pragma. */
1609 if (args)
1610 warning (OPT_Wpragmas,
1611 "%<#pragma GCC target%> is not supported for this machine");
1612
1613 return false;
1614 }
1615
1616 bool
1617 default_target_can_inline_p (tree caller, tree callee)
1618 {
1619 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1620 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1621 if (! callee_opts)
1622 callee_opts = target_option_default_node;
1623 if (! caller_opts)
1624 caller_opts = target_option_default_node;
1625
1626 /* If both caller and callee have attributes, assume that if the
1627 pointer is different, the two functions have different target
1628 options since build_target_option_node uses a hash table for the
1629 options. */
1630 return callee_opts == caller_opts;
1631 }
1632
1633 /* If the machine does not have a case insn that compares the bounds,
1634 this means extra overhead for dispatch tables, which raises the
1635 threshold for using them. */
1636
1637 unsigned int
1638 default_case_values_threshold (void)
1639 {
1640 return (targetm.have_casesi () ? 4 : 5);
1641 }
1642
1643 bool
1644 default_have_conditional_execution (void)
1645 {
1646 return HAVE_conditional_execution;
1647 }
1648
1649 /* By default we assume that c99 functions are present at the runtime,
1650 but sincos is not. */
1651 bool
1652 default_libc_has_function (enum function_class fn_class)
1653 {
1654 if (fn_class == function_c94
1655 || fn_class == function_c99_misc
1656 || fn_class == function_c99_math_complex)
1657 return true;
1658
1659 return false;
1660 }
1661
1662 /* By default assume that libc has not a fast implementation. */
1663
1664 bool
1665 default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
1666 {
1667 return false;
1668 }
1669
1670 bool
1671 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1672 {
1673 return true;
1674 }
1675
1676 bool
1677 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1678 {
1679 return false;
1680 }
1681
1682 tree
1683 default_builtin_tm_load_store (tree ARG_UNUSED (type))
1684 {
1685 return NULL_TREE;
1686 }
1687
1688 /* Compute cost of moving registers to/from memory. */
1689
1690 int
1691 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1692 reg_class_t rclass ATTRIBUTE_UNUSED,
1693 bool in ATTRIBUTE_UNUSED)
1694 {
1695 #ifndef MEMORY_MOVE_COST
1696 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1697 #else
1698 return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
1699 #endif
1700 }
1701
1702 /* Compute cost of moving data from a register of class FROM to one of
1703 TO, using MODE. */
1704
1705 int
1706 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1707 reg_class_t from ATTRIBUTE_UNUSED,
1708 reg_class_t to ATTRIBUTE_UNUSED)
1709 {
1710 #ifndef REGISTER_MOVE_COST
1711 return 2;
1712 #else
1713 return REGISTER_MOVE_COST (MACRO_MODE (mode),
1714 (enum reg_class) from, (enum reg_class) to);
1715 #endif
1716 }
1717
1718 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
1719
1720 bool
1721 default_slow_unaligned_access (machine_mode, unsigned int)
1722 {
1723 return STRICT_ALIGNMENT;
1724 }
1725
1726 /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
1727
1728 HOST_WIDE_INT
1729 default_estimated_poly_value (poly_int64 x)
1730 {
1731 return x.coeffs[0];
1732 }
1733
1734 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1735 behavior. SPEED_P is true if we are compiling for speed. */
1736
1737 unsigned int
1738 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1739 {
1740 unsigned int move_ratio;
1741 #ifdef MOVE_RATIO
1742 move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1743 #else
1744 #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
1745 move_ratio = 2;
1746 #else /* No cpymem patterns, pick a default. */
1747 move_ratio = ((speed_p) ? 15 : 3);
1748 #endif
1749 #endif
1750 return move_ratio;
1751 }
1752
1753 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1754 used; return FALSE if the cpymem/setmem optab should be expanded, or
1755 a call to memcpy emitted. */
1756
1757 bool
1758 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
1759 unsigned int alignment,
1760 enum by_pieces_operation op,
1761 bool speed_p)
1762 {
1763 unsigned int max_size = 0;
1764 unsigned int ratio = 0;
1765
1766 switch (op)
1767 {
1768 case CLEAR_BY_PIECES:
1769 max_size = STORE_MAX_PIECES;
1770 ratio = CLEAR_RATIO (speed_p);
1771 break;
1772 case MOVE_BY_PIECES:
1773 max_size = MOVE_MAX_PIECES;
1774 ratio = get_move_ratio (speed_p);
1775 break;
1776 case SET_BY_PIECES:
1777 max_size = STORE_MAX_PIECES;
1778 ratio = SET_RATIO (speed_p);
1779 break;
1780 case STORE_BY_PIECES:
1781 max_size = STORE_MAX_PIECES;
1782 ratio = get_move_ratio (speed_p);
1783 break;
1784 case COMPARE_BY_PIECES:
1785 max_size = COMPARE_MAX_PIECES;
1786 /* Pick a likely default, just as in get_move_ratio. */
1787 ratio = speed_p ? 15 : 3;
1788 break;
1789 }
1790
1791 return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio;
1792 }
1793
1794 /* This hook controls code generation for expanding a memcmp operation by
1795 pieces. Return 1 for the normal pattern of compare/jump after each pair
1796 of loads, or a higher number to reduce the number of branches. */
1797
1798 int
1799 default_compare_by_pieces_branch_ratio (machine_mode)
1800 {
1801 return 1;
1802 }
1803
1804 /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
1805 entry. If RECORD_P is true and the target supports named sections,
1806 the location of the NOPs will be recorded in a special object section
1807 called "__patchable_function_entries". This routine may be called
1808 twice per function to put NOPs before and after the function
1809 entry. */
1810
1811 void
1812 default_print_patchable_function_entry (FILE *file,
1813 unsigned HOST_WIDE_INT patch_area_size,
1814 bool record_p)
1815 {
1816 const char *nop_templ = 0;
1817 int code_num;
1818 rtx_insn *my_nop = make_insn_raw (gen_nop ());
1819
1820 /* We use the template alone, relying on the (currently sane) assumption
1821 that the NOP template does not have variable operands. */
1822 code_num = recog_memoized (my_nop);
1823 nop_templ = get_insn_template (code_num, my_nop);
1824
1825 if (record_p && targetm_common.have_named_sections)
1826 {
1827 char buf[256];
1828 static int patch_area_number;
1829 section *previous_section = in_section;
1830 const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false);
1831
1832 gcc_assert (asm_op != NULL);
1833 patch_area_number++;
1834 ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", patch_area_number);
1835
1836 switch_to_section (get_section ("__patchable_function_entries",
1837 SECTION_WRITE | SECTION_RELRO, NULL));
1838 fputs (asm_op, file);
1839 assemble_name_raw (file, buf);
1840 fputc ('\n', file);
1841
1842 switch_to_section (previous_section);
1843 ASM_OUTPUT_LABEL (file, buf);
1844 }
1845
1846 unsigned i;
1847 for (i = 0; i < patch_area_size; ++i)
1848 fprintf (file, "\t%s\n", nop_templ);
1849 }
1850
1851 bool
1852 default_profile_before_prologue (void)
1853 {
1854 #ifdef PROFILE_BEFORE_PROLOGUE
1855 return true;
1856 #else
1857 return false;
1858 #endif
1859 }
1860
1861 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1862
1863 reg_class_t
1864 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1865 reg_class_t rclass)
1866 {
1867 #ifdef PREFERRED_RELOAD_CLASS
1868 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1869 #else
1870 return rclass;
1871 #endif
1872 }
1873
1874 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1875
1876 reg_class_t
1877 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1878 reg_class_t rclass)
1879 {
1880 return rclass;
1881 }
1882
1883 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1884 reg_class_t
1885 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
1886 {
1887 return NO_REGS;
1888 }
1889
1890 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1891
1892 bool
1893 default_class_likely_spilled_p (reg_class_t rclass)
1894 {
1895 return (reg_class_size[(int) rclass] == 1);
1896 }
1897
1898 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
1899
1900 unsigned char
1901 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
1902 machine_mode mode ATTRIBUTE_UNUSED)
1903 {
1904 #ifdef CLASS_MAX_NREGS
1905 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
1906 MACRO_MODE (mode));
1907 #else
1908 /* Targets with variable-sized modes must provide their own definition
1909 of this hook. */
1910 unsigned int size = GET_MODE_SIZE (mode).to_constant ();
1911 return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1912 #endif
1913 }
1914
1915 /* Determine the debugging unwind mechanism for the target. */
1916
1917 enum unwind_info_type
1918 default_debug_unwind_info (void)
1919 {
1920 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1921 /* ??? Change all users to the hook, then poison this. */
1922 #ifdef DWARF2_FRAME_INFO
1923 if (DWARF2_FRAME_INFO)
1924 return UI_DWARF2;
1925 #endif
1926
1927 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1928 #ifdef DWARF2_DEBUGGING_INFO
1929 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1930 return UI_DWARF2;
1931 #endif
1932
1933 return UI_NONE;
1934 }
1935
1936 /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
1937 must define this hook. */
1938
1939 unsigned int
1940 default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
1941 {
1942 gcc_unreachable ();
1943 }
1944
1945 /* Determine the correct mode for a Dwarf frame register that represents
1946 register REGNO. */
1947
1948 machine_mode
1949 default_dwarf_frame_reg_mode (int regno)
1950 {
1951 machine_mode save_mode = reg_raw_mode[regno];
1952
1953 if (targetm.hard_regno_call_part_clobbered (NULL, regno, save_mode))
1954 save_mode = choose_hard_reg_mode (regno, 1, true);
1955 return save_mode;
1956 }
1957
1958 /* To be used by targets where reg_raw_mode doesn't return the right
1959 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1960
1961 fixed_size_mode
1962 default_get_reg_raw_mode (int regno)
1963 {
1964 /* Targets must override this hook if the underlying register is
1965 variable-sized. */
1966 return as_a <fixed_size_mode> (reg_raw_mode[regno]);
1967 }
1968
1969 /* Return true if a leaf function should stay leaf even with profiling
1970 enabled. */
1971
1972 bool
1973 default_keep_leaf_when_profiled ()
1974 {
1975 return false;
1976 }
1977
1978 /* Return true if the state of option OPTION should be stored in PCH files
1979 and checked by default_pch_valid_p. Store the option's current state
1980 in STATE if so. */
1981
1982 static inline bool
1983 option_affects_pch_p (int option, struct cl_option_state *state)
1984 {
1985 if ((cl_options[option].flags & CL_TARGET) == 0)
1986 return false;
1987 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
1988 return false;
1989 if (option_flag_var (option, &global_options) == &target_flags)
1990 if (targetm.check_pch_target_flags)
1991 return false;
1992 return get_option_state (&global_options, option, state);
1993 }
1994
1995 /* Default version of get_pch_validity.
1996 By default, every flag difference is fatal; that will be mostly right for
1997 most targets, but completely right for very few. */
1998
1999 void *
2000 default_get_pch_validity (size_t *sz)
2001 {
2002 struct cl_option_state state;
2003 size_t i;
2004 char *result, *r;
2005
2006 *sz = 2;
2007 if (targetm.check_pch_target_flags)
2008 *sz += sizeof (target_flags);
2009 for (i = 0; i < cl_options_count; i++)
2010 if (option_affects_pch_p (i, &state))
2011 *sz += state.size;
2012
2013 result = r = XNEWVEC (char, *sz);
2014 r[0] = flag_pic;
2015 r[1] = flag_pie;
2016 r += 2;
2017 if (targetm.check_pch_target_flags)
2018 {
2019 memcpy (r, &target_flags, sizeof (target_flags));
2020 r += sizeof (target_flags);
2021 }
2022
2023 for (i = 0; i < cl_options_count; i++)
2024 if (option_affects_pch_p (i, &state))
2025 {
2026 memcpy (r, state.data, state.size);
2027 r += state.size;
2028 }
2029
2030 return result;
2031 }
2032
2033 /* Return a message which says that a PCH file was created with a different
2034 setting of OPTION. */
2035
2036 static const char *
2037 pch_option_mismatch (const char *option)
2038 {
2039 return xasprintf (_("created and used with differing settings of '%s'"),
2040 option);
2041 }
2042
2043 /* Default version of pch_valid_p. */
2044
2045 const char *
2046 default_pch_valid_p (const void *data_p, size_t len)
2047 {
2048 struct cl_option_state state;
2049 const char *data = (const char *)data_p;
2050 size_t i;
2051
2052 /* -fpic and -fpie also usually make a PCH invalid. */
2053 if (data[0] != flag_pic)
2054 return _("created and used with different settings of %<-fpic%>");
2055 if (data[1] != flag_pie)
2056 return _("created and used with different settings of %<-fpie%>");
2057 data += 2;
2058
2059 /* Check target_flags. */
2060 if (targetm.check_pch_target_flags)
2061 {
2062 int tf;
2063 const char *r;
2064
2065 memcpy (&tf, data, sizeof (target_flags));
2066 data += sizeof (target_flags);
2067 len -= sizeof (target_flags);
2068 r = targetm.check_pch_target_flags (tf);
2069 if (r != NULL)
2070 return r;
2071 }
2072
2073 for (i = 0; i < cl_options_count; i++)
2074 if (option_affects_pch_p (i, &state))
2075 {
2076 if (memcmp (data, state.data, state.size) != 0)
2077 return pch_option_mismatch (cl_options[i].opt_text);
2078 data += state.size;
2079 len -= state.size;
2080 }
2081
2082 return NULL;
2083 }
2084
2085 /* Default version of cstore_mode. */
2086
2087 scalar_int_mode
2088 default_cstore_mode (enum insn_code icode)
2089 {
2090 return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode);
2091 }
2092
2093 /* Default version of member_type_forces_blk. */
2094
2095 bool
2096 default_member_type_forces_blk (const_tree, machine_mode)
2097 {
2098 return false;
2099 }
2100
2101 rtx
2102 default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED,
2103 rtx ptr ATTRIBUTE_UNUSED,
2104 rtx bnd ATTRIBUTE_UNUSED)
2105 {
2106 gcc_unreachable ();
2107 }
2108
2109 void
2110 default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED,
2111 rtx addr ATTRIBUTE_UNUSED,
2112 rtx bounds ATTRIBUTE_UNUSED,
2113 rtx to ATTRIBUTE_UNUSED)
2114 {
2115 gcc_unreachable ();
2116 }
2117
2118 rtx
2119 default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED)
2120 {
2121 gcc_unreachable ();
2122 }
2123
2124 void
2125 default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED,
2126 rtx bounds ATTRIBUTE_UNUSED)
2127 {
2128 gcc_unreachable ();
2129 }
2130
2131 /* Default version of canonicalize_comparison. */
2132
2133 void
2134 default_canonicalize_comparison (int *, rtx *, rtx *, bool)
2135 {
2136 }
2137
2138 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2139
2140 void
2141 default_atomic_assign_expand_fenv (tree *, tree *, tree *)
2142 {
2143 }
2144
2145 #ifndef PAD_VARARGS_DOWN
2146 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2147 #endif
2148
2149 /* Build an indirect-ref expression over the given TREE, which represents a
2150 piece of a va_arg() expansion. */
2151 tree
2152 build_va_arg_indirect_ref (tree addr)
2153 {
2154 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
2155 return addr;
2156 }
2157
2158 /* The "standard" implementation of va_arg: read the value from the
2159 current (padded) address and increment by the (padded) size. */
2160
2161 tree
2162 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2163 gimple_seq *post_p)
2164 {
2165 tree addr, t, type_size, rounded_size, valist_tmp;
2166 unsigned HOST_WIDE_INT align, boundary;
2167 bool indirect;
2168
2169 /* All of the alignment and movement below is for args-grow-up machines.
2170 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2171 implement their own specialized gimplify_va_arg_expr routines. */
2172 if (ARGS_GROW_DOWNWARD)
2173 gcc_unreachable ();
2174
2175 indirect = pass_va_arg_by_reference (type);
2176 if (indirect)
2177 type = build_pointer_type (type);
2178
2179 if (targetm.calls.split_complex_arg
2180 && TREE_CODE (type) == COMPLEX_TYPE
2181 && targetm.calls.split_complex_arg (type))
2182 {
2183 tree real_part, imag_part;
2184
2185 real_part = std_gimplify_va_arg_expr (valist,
2186 TREE_TYPE (type), pre_p, NULL);
2187 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2188
2189 imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
2190 TREE_TYPE (type), pre_p, NULL);
2191 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2192
2193 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2194 }
2195
2196 align = PARM_BOUNDARY / BITS_PER_UNIT;
2197 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
2198
2199 /* When we align parameter on stack for caller, if the parameter
2200 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2201 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2202 here with caller. */
2203 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
2204 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
2205
2206 boundary /= BITS_PER_UNIT;
2207
2208 /* Hoist the valist value into a temporary for the moment. */
2209 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
2210
2211 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2212 requires greater alignment, we must perform dynamic alignment. */
2213 if (boundary > align
2214 && !TYPE_EMPTY_P (type)
2215 && !integer_zerop (TYPE_SIZE (type)))
2216 {
2217 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2218 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
2219 gimplify_and_add (t, pre_p);
2220
2221 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2222 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
2223 valist_tmp,
2224 build_int_cst (TREE_TYPE (valist), -boundary)));
2225 gimplify_and_add (t, pre_p);
2226 }
2227 else
2228 boundary = align;
2229
2230 /* If the actual alignment is less than the alignment of the type,
2231 adjust the type accordingly so that we don't assume strict alignment
2232 when dereferencing the pointer. */
2233 boundary *= BITS_PER_UNIT;
2234 if (boundary < TYPE_ALIGN (type))
2235 {
2236 type = build_variant_type_copy (type);
2237 SET_TYPE_ALIGN (type, boundary);
2238 }
2239
2240 /* Compute the rounded size of the type. */
2241 type_size = arg_size_in_bytes (type);
2242 rounded_size = round_up (type_size, align);
2243
2244 /* Reduce rounded_size so it's sharable with the postqueue. */
2245 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
2246
2247 /* Get AP. */
2248 addr = valist_tmp;
2249 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
2250 {
2251 /* Small args are padded downward. */
2252 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
2253 rounded_size, size_int (align));
2254 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
2255 size_binop (MINUS_EXPR, rounded_size, type_size));
2256 addr = fold_build_pointer_plus (addr, t);
2257 }
2258
2259 /* Compute new value for AP. */
2260 t = fold_build_pointer_plus (valist_tmp, rounded_size);
2261 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2262 gimplify_and_add (t, pre_p);
2263
2264 addr = fold_convert (build_pointer_type (type), addr);
2265
2266 if (indirect)
2267 addr = build_va_arg_indirect_ref (addr);
2268
2269 return build_va_arg_indirect_ref (addr);
2270 }
2271
2272 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2273 not support nested low-overhead loops. */
2274
2275 bool
2276 can_use_doloop_if_innermost (const widest_int &, const widest_int &,
2277 unsigned int loop_depth, bool)
2278 {
2279 return loop_depth == 1;
2280 }
2281
2282 /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2283
2284 bool
2285 default_optab_supported_p (int, machine_mode, machine_mode, optimization_type)
2286 {
2287 return true;
2288 }
2289
2290 /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2291
2292 unsigned int
2293 default_max_noce_ifcvt_seq_cost (edge e)
2294 {
2295 bool predictable_p = predictable_edge_p (e);
2296
2297 enum compiler_param param
2298 = (predictable_p
2299 ? PARAM_MAX_RTL_IF_CONVERSION_PREDICTABLE_COST
2300 : PARAM_MAX_RTL_IF_CONVERSION_UNPREDICTABLE_COST);
2301
2302 /* If we have a parameter set, use that, otherwise take a guess using
2303 BRANCH_COST. */
2304 if (global_options_set.x_param_values[param])
2305 return PARAM_VALUE (param);
2306 else
2307 return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3);
2308 }
2309
2310 /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2311
2312 unsigned int
2313 default_min_arithmetic_precision (void)
2314 {
2315 return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT;
2316 }
2317
2318 /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2319
2320 enum flt_eval_method
2321 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
2322 {
2323 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
2324 }
2325
2326 /* Default implementation for
2327 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2328 HOST_WIDE_INT
2329 default_stack_clash_protection_alloca_probe_range (void)
2330 {
2331 return 0;
2332 }
2333
2334 /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2335
2336 void
2337 default_select_early_remat_modes (sbitmap)
2338 {
2339 }
2340
2341 /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2342
2343 tree
2344 default_preferred_else_value (unsigned, tree type, unsigned, tree *)
2345 {
2346 return build_zero_cst (type);
2347 }
2348
2349 /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2350 bool
2351 default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
2352 {
2353 #ifdef HAVE_speculation_barrier
2354 return active ? HAVE_speculation_barrier : true;
2355 #else
2356 return false;
2357 #endif
2358 }
2359 /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2360 that can be used on targets that never have speculative execution. */
2361 bool
2362 speculation_safe_value_not_needed (bool active)
2363 {
2364 return !active;
2365 }
2366
2367 /* Default implementation of the speculation-safe-load builtin. This
2368 implementation simply copies val to result and generates a
2369 speculation_barrier insn, if such a pattern is defined. */
2370 rtx
2371 default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
2372 rtx result, rtx val,
2373 rtx failval ATTRIBUTE_UNUSED)
2374 {
2375 emit_move_insn (result, val);
2376
2377 #ifdef HAVE_speculation_barrier
2378 /* Assume the target knows what it is doing: if it defines a
2379 speculation barrier, but it is not enabled, then assume that one
2380 isn't needed. */
2381 if (HAVE_speculation_barrier)
2382 emit_insn (gen_speculation_barrier ());
2383 #endif
2384
2385 return result;
2386 }
2387
2388 void
2389 default_remove_extra_call_preserved_regs (rtx_insn *, HARD_REG_SET *)
2390 {
2391 }
2392
2393 #include "gt-targhooks.h"