1 /* Expand builtin functions.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
36 #include "stringpool.h"
38 #include "tree-ssanames.h"
43 #include "diagnostic-core.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
51 #include "tree-object-size.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
65 #include "stringpool.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
77 struct target_builtins default_target_builtins
;
79 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names
[BUILT_IN_LAST
]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names
[(int) END_BUILTINS
] =
89 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info
[(int)END_BUILTINS
];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p
;
99 static int target_char_cast (tree
, char *);
100 static rtx
get_memory_rtx (tree
, tree
);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx
result_vector (int, rtx
);
104 static void expand_builtin_prefetch (tree
);
105 static rtx
expand_builtin_apply_args (void);
106 static rtx
expand_builtin_apply_args_1 (void);
107 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
108 static void expand_builtin_return (rtx
);
109 static enum type_class
type_to_class (tree
);
110 static rtx
expand_builtin_classify_type (tree
);
111 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
112 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
114 static rtx
expand_builtin_sincos (tree
);
115 static rtx
expand_builtin_cexpi (tree
, rtx
);
116 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
117 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
118 static rtx
expand_builtin_next_arg (void);
119 static rtx
expand_builtin_va_start (tree
);
120 static rtx
expand_builtin_va_end (tree
);
121 static rtx
expand_builtin_va_copy (tree
);
122 static rtx
inline_expand_builtin_string_cmp (tree
, rtx
);
123 static rtx
expand_builtin_strcmp (tree
, rtx
);
124 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
125 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
126 static rtx
expand_builtin_memchr (tree
, rtx
);
127 static rtx
expand_builtin_memcpy (tree
, rtx
);
128 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
129 rtx target
, tree exp
,
131 static rtx
expand_builtin_memmove (tree
, rtx
);
132 static rtx
expand_builtin_mempcpy (tree
, rtx
);
133 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, memop_ret
);
134 static rtx
expand_builtin_strcat (tree
, rtx
);
135 static rtx
expand_builtin_strcpy (tree
, rtx
);
136 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
);
137 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_stpncpy (tree
, rtx
);
139 static rtx
expand_builtin_strncat (tree
, rtx
);
140 static rtx
expand_builtin_strncpy (tree
, rtx
);
141 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
142 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
143 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
144 static rtx
expand_builtin_bzero (tree
);
145 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
146 static rtx
expand_builtin_strnlen (tree
, rtx
, machine_mode
);
147 static rtx
expand_builtin_alloca (tree
);
148 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
149 static rtx
expand_builtin_frame_address (tree
, tree
);
150 static tree
stabilize_va_list_loc (location_t
, tree
, int);
151 static rtx
expand_builtin_expect (tree
, rtx
);
152 static rtx
expand_builtin_expect_with_probability (tree
, rtx
);
153 static tree
fold_builtin_constant_p (tree
);
154 static tree
fold_builtin_classify_type (tree
);
155 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
156 static tree
fold_builtin_inf (location_t
, tree
, int);
157 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
158 static bool validate_arg (const_tree
, enum tree_code code
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
162 static tree
fold_builtin_isascii (location_t
, tree
);
163 static tree
fold_builtin_toascii (location_t
, tree
);
164 static tree
fold_builtin_isdigit (location_t
, tree
);
165 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
166 static tree
fold_builtin_abs (location_t
, tree
, tree
);
167 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
169 static tree
fold_builtin_0 (location_t
, tree
);
170 static tree
fold_builtin_1 (location_t
, tree
, tree
);
171 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
173 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
175 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
176 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
177 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
179 static rtx
expand_builtin_object_size (tree
);
180 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
181 enum built_in_function
);
182 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
183 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
184 static void maybe_emit_free_warning (tree
);
185 static tree
fold_builtin_object_size (tree
, tree
);
187 unsigned HOST_WIDE_INT target_newline
;
188 unsigned HOST_WIDE_INT target_percent
;
189 static unsigned HOST_WIDE_INT target_c
;
190 static unsigned HOST_WIDE_INT target_s
;
191 char target_percent_c
[3];
192 char target_percent_s
[3];
193 char target_percent_s_newline
[4];
194 static tree
do_mpfr_remquo (tree
, tree
, tree
);
195 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
201 is_builtin_name (const char *name
)
203 if (strncmp (name
, "__builtin_", 10) == 0)
205 if (strncmp (name
, "__sync_", 7) == 0)
207 if (strncmp (name
, "__atomic_", 9) == 0)
212 /* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
217 called_as_built_in (tree node
)
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
222 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
223 return is_builtin_name (name
);
226 /* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
236 whereas foo() itself starts on an even address.
238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
242 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
243 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
245 poly_int64 bitsize
, bitpos
;
248 int unsignedp
, reversep
, volatilep
;
249 unsigned int align
= BITS_PER_UNIT
;
250 bool known_alignment
= false;
252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
254 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
255 &unsignedp
, &reversep
, &volatilep
);
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
259 if (TREE_CODE (exp
) == FUNCTION_DECL
)
261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
266 align
= 2 * BITS_PER_UNIT
;
268 else if (TREE_CODE (exp
) == LABEL_DECL
)
270 else if (TREE_CODE (exp
) == CONST_DECL
)
272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp
= DECL_INITIAL (exp
);
274 align
= TYPE_ALIGN (TREE_TYPE (exp
));
275 if (CONSTANT_CLASS_P (exp
))
276 align
= targetm
.constant_alignment (exp
, align
);
278 known_alignment
= true;
280 else if (DECL_P (exp
))
282 align
= DECL_ALIGN (exp
);
283 known_alignment
= true;
285 else if (TREE_CODE (exp
) == INDIRECT_REF
286 || TREE_CODE (exp
) == MEM_REF
287 || TREE_CODE (exp
) == TARGET_MEM_REF
)
289 tree addr
= TREE_OPERAND (exp
, 0);
291 unsigned HOST_WIDE_INT ptr_bitpos
;
292 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
294 /* If the address is explicitely aligned, handle that. */
295 if (TREE_CODE (addr
) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
298 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
299 ptr_bitmask
*= BITS_PER_UNIT
;
300 align
= least_bit_hwi (ptr_bitmask
);
301 addr
= TREE_OPERAND (addr
, 0);
305 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
306 align
= MAX (ptr_align
, align
);
308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos
&= ptr_bitmask
;
311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
313 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
317 unsigned HOST_WIDE_INT step
= 1;
319 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
320 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
322 if (TMR_INDEX2 (exp
))
323 align
= BITS_PER_UNIT
;
324 known_alignment
= false;
327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
330 alignment knowledge and if using that alignment would
331 improve the situation. */
333 if (!addr_p
&& !known_alignment
334 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
339 /* Else adjust bitpos accordingly. */
340 bitpos
+= ptr_bitpos
;
341 if (TREE_CODE (exp
) == MEM_REF
342 || TREE_CODE (exp
) == TARGET_MEM_REF
)
343 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
346 else if (TREE_CODE (exp
) == STRING_CST
)
348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align
= TYPE_ALIGN (TREE_TYPE (exp
));
351 if (CONSTANT_CLASS_P (exp
))
352 align
= targetm
.constant_alignment (exp
, align
);
354 known_alignment
= true;
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
361 unsigned int trailing_zeros
= tree_ctz (offset
);
362 if (trailing_zeros
< HOST_BITS_PER_INT
)
364 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
366 align
= MIN (align
, inner
);
370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
373 if (alt_align
!= 0 && alt_align
< align
)
376 known_alignment
= false;
380 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
381 return known_alignment
;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
391 unsigned HOST_WIDE_INT
*bitposp
)
393 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
396 /* Return the alignment in bits of EXP, an object. */
399 get_object_alignment (tree exp
)
401 unsigned HOST_WIDE_INT bitpos
= 0;
404 get_object_alignment_1 (exp
, &align
, &bitpos
);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
410 align
= least_bit_hwi (bitpos
);
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
422 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
423 unsigned HOST_WIDE_INT
*bitposp
)
427 if (TREE_CODE (exp
) == ADDR_EXPR
)
428 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
429 alignp
, bitposp
, true);
430 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
433 unsigned HOST_WIDE_INT bitpos
;
434 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
436 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
437 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
440 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
441 if (trailing_zeros
< HOST_BITS_PER_INT
)
443 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
445 align
= MIN (align
, inner
);
449 *bitposp
= bitpos
& (align
- 1);
452 else if (TREE_CODE (exp
) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp
)))
455 unsigned int ptr_align
, ptr_misalign
;
456 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
458 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
460 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
461 *alignp
= ptr_align
* BITS_PER_UNIT
;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
465 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
466 /* We cannot really tell whether this result is an approximation. */
472 *alignp
= BITS_PER_UNIT
;
476 else if (TREE_CODE (exp
) == INTEGER_CST
)
478 *alignp
= BIGGEST_ALIGNMENT
;
479 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
480 & (BIGGEST_ALIGNMENT
- 1));
485 *alignp
= BITS_PER_UNIT
;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
497 get_pointer_alignment (tree exp
)
499 unsigned HOST_WIDE_INT bitpos
= 0;
502 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
508 align
= least_bit_hwi (bitpos
);
513 /* Return the number of leading non-zero elements in the sequence
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
520 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
526 /* Optimize the common case of plain char. */
527 for (n
= 0; n
< maxelts
; n
++)
529 const char *elt
= (const char*) ptr
+ n
;
536 for (n
= 0; n
< maxelts
; n
++)
538 const char *elt
= (const char*) ptr
+ n
* eltsize
;
539 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
546 /* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
551 warn_string_no_nul (location_t loc
, const char *fn
, tree arg
, tree decl
)
553 if (TREE_NO_WARNING (arg
))
556 loc
= expansion_point_location_if_in_system_header (loc
);
558 if (warning_at (loc
, OPT_Wstringop_overflow_
,
559 "%qs argument missing terminating nul", fn
))
561 inform (DECL_SOURCE_LOCATION (decl
),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg
) = 1;
567 /* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
574 unterminated_array (tree exp
, tree
*size
/* = NULL */, bool *exact
/* = NULL */)
576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
578 c_strlen_data lendata
= { };
579 tree len
= c_strlen (exp
, 1, &lendata
);
580 if (len
== NULL_TREE
&& lendata
.minlen
&& lendata
.decl
)
584 len
= lendata
.minlen
;
587 /* Constant offsets are already accounted for in LENDATA.MINLEN,
588 but not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (lendata
.off
) == INTEGER_CST
)
591 else if (TREE_CODE (lendata
.off
) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (lendata
.off
, 1)) == INTEGER_CST
)
594 /* Subtract the offset from the size of the array. */
596 tree temp
= TREE_OPERAND (lendata
.off
, 1);
597 temp
= fold_convert (ssizetype
, temp
);
598 len
= fold_build2 (MINUS_EXPR
, ssizetype
, len
, temp
);
614 /* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
620 ONLY_VALUE should be nonzero if the result is not going to be emitted
621 into the instruction stream and zero if it is going to be expanded.
622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
623 is returned, otherwise NULL, since
624 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
631 Additional information about the string accessed may be recorded
632 in DATA. For example, if ARG references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
641 The value returned is of type `ssizetype'. */
644 c_strlen (tree arg
, int only_value
, c_strlen_data
*data
, unsigned eltsize
)
646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data
= { };
651 data
= &local_strlen_data
;
653 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
655 tree src
= STRIP_NOPS (arg
);
656 if (TREE_CODE (src
) == COND_EXPR
657 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
661 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
662 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
, data
, eltsize
);
663 if (tree_int_cst_equal (len1
, len2
))
667 if (TREE_CODE (src
) == COMPOUND_EXPR
668 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
669 return c_strlen (TREE_OPERAND (src
, 1), only_value
, data
, eltsize
);
671 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
673 /* Offset from the beginning of the string in bytes. */
677 src
= string_constant (src
, &byteoff
, &memsize
, &decl
);
681 /* Determine the size of the string element. */
682 if (eltsize
!= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
)))))
685 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
686 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
687 in case the latter is less than the size of the array, such as when
688 SRC refers to a short string literal used to initialize a large array.
689 In that case, the elements of the array after the terminating NUL are
691 HOST_WIDE_INT strelts
= TREE_STRING_LENGTH (src
);
692 strelts
= strelts
/ eltsize
;
694 if (!tree_fits_uhwi_p (memsize
))
697 HOST_WIDE_INT maxelts
= tree_to_uhwi (memsize
) / eltsize
;
699 /* PTR can point to the byte representation of any string type, including
700 char* and wchar_t*. */
701 const char *ptr
= TREE_STRING_POINTER (src
);
703 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
705 /* The code below works only for single byte character types. */
709 /* If the string has an internal NUL character followed by any
710 non-NUL characters (e.g., "foo\0bar"), we can't compute
711 the offset to the following NUL if we don't know where to
712 start searching for it. */
713 unsigned len
= string_length (ptr
, eltsize
, strelts
);
715 /* Return when an embedded null character is found or none at all.
716 In the latter case, set the DECL/LEN field in the DATA structure
717 so that callers may examine them. */
718 if (len
+ 1 < strelts
)
720 else if (len
>= maxelts
)
724 data
->minlen
= ssize_int (len
);
728 /* For empty strings the result should be zero. */
730 return ssize_int (0);
732 /* We don't know the starting offset, but we do know that the string
733 has no internal zero bytes. If the offset falls within the bounds
734 of the string subtract the offset from the length of the string,
735 and return that. Otherwise the length is zero. Take care to
736 use SAVE_EXPR in case the OFFSET has side-effects. */
737 tree offsave
= TREE_SIDE_EFFECTS (byteoff
) ? save_expr (byteoff
)
739 offsave
= fold_convert_loc (loc
, sizetype
, offsave
);
740 tree condexp
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, offsave
,
742 tree lenexp
= fold_build2_loc (loc
, MINUS_EXPR
, sizetype
, size_int (len
),
744 lenexp
= fold_convert_loc (loc
, ssizetype
, lenexp
);
745 return fold_build3_loc (loc
, COND_EXPR
, ssizetype
, condexp
, lenexp
,
746 build_zero_cst (ssizetype
));
749 /* Offset from the beginning of the string in elements. */
750 HOST_WIDE_INT eltoff
;
752 /* We have a known offset into the string. Start searching there for
753 a null character if we can represent it as a single HOST_WIDE_INT. */
756 else if (! tree_fits_uhwi_p (byteoff
) || tree_to_uhwi (byteoff
) % eltsize
)
759 eltoff
= tree_to_uhwi (byteoff
) / eltsize
;
761 /* If the offset is known to be out of bounds, warn, and call strlen at
763 if (eltoff
< 0 || eltoff
>= maxelts
)
765 /* Suppress multiple warnings for propagated constant strings. */
767 && !TREE_NO_WARNING (arg
)
768 && warning_at (loc
, OPT_Warray_bounds
,
769 "offset %qwi outside bounds of constant string",
773 inform (DECL_SOURCE_LOCATION (decl
), "%qE declared here", decl
);
774 TREE_NO_WARNING (arg
) = 1;
779 /* If eltoff is larger than strelts but less than maxelts the
780 string length is zero, since the excess memory will be zero. */
781 if (eltoff
> strelts
)
782 return ssize_int (0);
784 /* Use strlen to search for the first zero byte. Since any strings
785 constructed with build_string will have nulls appended, we win even
786 if we get handed something like (char[4])"abcd".
788 Since ELTOFF is our starting index into the string, no further
789 calculation is needed. */
790 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
793 /* Don't know what to return if there was no zero termination.
794 Ideally this would turn into a gcc_checking_assert over time.
795 Set DECL/LEN so callers can examine them. */
796 if (len
>= maxelts
- eltoff
)
800 data
->minlen
= ssize_int (len
);
804 return ssize_int (len
);
807 /* Return a constant integer corresponding to target reading
808 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
809 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
810 are assumed to be zero, otherwise it reads as many characters
814 c_readstr (const char *str
, scalar_int_mode mode
,
815 bool null_terminated_p
/*=true*/)
819 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
821 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
822 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
823 / HOST_BITS_PER_WIDE_INT
;
825 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
826 for (i
= 0; i
< len
; i
++)
830 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
833 if (WORDS_BIG_ENDIAN
)
834 j
= GET_MODE_SIZE (mode
) - i
- 1;
835 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
836 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
837 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
840 if (ch
|| !null_terminated_p
)
841 ch
= (unsigned char) str
[i
];
842 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
845 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
846 return immed_wide_int_const (c
, mode
);
849 /* Cast a target constant CST to target CHAR and if that value fits into
850 host char type, return zero and put that value into variable pointed to by
854 target_char_cast (tree cst
, char *p
)
856 unsigned HOST_WIDE_INT val
, hostval
;
858 if (TREE_CODE (cst
) != INTEGER_CST
859 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
862 /* Do not care if it fits or not right here. */
863 val
= TREE_INT_CST_LOW (cst
);
865 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
866 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
869 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
870 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
879 /* Similar to save_expr, but assumes that arbitrary code is not executed
880 in between the multiple evaluations. In particular, we assume that a
881 non-addressable local variable will not be modified. */
884 builtin_save_expr (tree exp
)
886 if (TREE_CODE (exp
) == SSA_NAME
887 || (TREE_ADDRESSABLE (exp
) == 0
888 && (TREE_CODE (exp
) == PARM_DECL
889 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
892 return save_expr (exp
);
895 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
896 times to get the address of either a higher stack frame, or a return
897 address located within it (depending on FNDECL_CODE). */
900 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
903 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
906 /* For a zero count with __builtin_return_address, we don't care what
907 frame address we return, because target-specific definitions will
908 override us. Therefore frame pointer elimination is OK, and using
909 the soft frame pointer is OK.
911 For a nonzero count, or a zero count with __builtin_frame_address,
912 we require a stable offset from the current frame pointer to the
913 previous one, so we must use the hard frame pointer, and
914 we must disable frame pointer elimination. */
915 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
916 tem
= frame_pointer_rtx
;
919 tem
= hard_frame_pointer_rtx
;
921 /* Tell reload not to eliminate the frame pointer. */
922 crtl
->accesses_prior_frames
= 1;
927 SETUP_FRAME_ADDRESSES ();
929 /* On the SPARC, the return address is not in the frame, it is in a
930 register. There is no way to access it off of the current frame
931 pointer, but it can be accessed off the previous frame pointer by
932 reading the value from the register window save area. */
933 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
936 /* Scan back COUNT frames to the specified frame. */
937 for (i
= 0; i
< count
; i
++)
939 /* Assume the dynamic chain pointer is in the word that the
940 frame address points to, unless otherwise specified. */
941 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
942 tem
= memory_address (Pmode
, tem
);
943 tem
= gen_frame_mem (Pmode
, tem
);
944 tem
= copy_to_reg (tem
);
947 /* For __builtin_frame_address, return what we've got. But, on
948 the SPARC for example, we may have to add a bias. */
949 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
950 return FRAME_ADDR_RTX (tem
);
952 /* For __builtin_return_address, get the return address from that frame. */
953 #ifdef RETURN_ADDR_RTX
954 tem
= RETURN_ADDR_RTX (count
, tem
);
956 tem
= memory_address (Pmode
,
957 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
958 tem
= gen_frame_mem (Pmode
, tem
);
963 /* Alias set used for setjmp buffer. */
964 static alias_set_type setjmp_alias_set
= -1;
966 /* Construct the leading half of a __builtin_setjmp call. Control will
967 return to RECEIVER_LABEL. This is also called directly by the SJLJ
968 exception handling code. */
971 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
973 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
977 if (setjmp_alias_set
== -1)
978 setjmp_alias_set
= new_alias_set ();
980 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
982 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
984 /* We store the frame pointer and the address of receiver_label in
985 the buffer and use the rest of it for the stack save area, which
986 is machine-dependent. */
988 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
989 set_mem_alias_set (mem
, setjmp_alias_set
);
990 emit_move_insn (mem
, hard_frame_pointer_rtx
);
992 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
993 GET_MODE_SIZE (Pmode
))),
994 set_mem_alias_set (mem
, setjmp_alias_set
);
996 emit_move_insn (validize_mem (mem
),
997 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
999 stack_save
= gen_rtx_MEM (sa_mode
,
1000 plus_constant (Pmode
, buf_addr
,
1001 2 * GET_MODE_SIZE (Pmode
)));
1002 set_mem_alias_set (stack_save
, setjmp_alias_set
);
1003 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1005 /* If there is further processing to do, do it. */
1006 if (targetm
.have_builtin_setjmp_setup ())
1007 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
1009 /* We have a nonlocal label. */
1010 cfun
->has_nonlocal_label
= 1;
1013 /* Construct the trailing part of a __builtin_setjmp call. This is
1014 also called directly by the SJLJ exception handling code.
1015 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1018 expand_builtin_setjmp_receiver (rtx receiver_label
)
1022 /* Mark the FP as used when we get here, so we have to make sure it's
1023 marked as used by this function. */
1024 emit_use (hard_frame_pointer_rtx
);
1026 /* Mark the static chain as clobbered here so life information
1027 doesn't get messed up for it. */
1028 chain
= rtx_for_static_chain (current_function_decl
, true);
1029 if (chain
&& REG_P (chain
))
1030 emit_clobber (chain
);
1032 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
1034 /* If the argument pointer can be eliminated in favor of the
1035 frame pointer, we don't need to restore it. We assume here
1036 that if such an elimination is present, it can always be used.
1037 This is the case on all known machines; if we don't make this
1038 assumption, we do unnecessary saving on many machines. */
1040 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
1042 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
1043 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
1044 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
1047 if (i
== ARRAY_SIZE (elim_regs
))
1049 /* Now restore our arg pointer from the address at which it
1050 was saved in our stack frame. */
1051 emit_move_insn (crtl
->args
.internal_arg_pointer
,
1052 copy_to_reg (get_arg_pointer_save_area ()));
1056 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
1057 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
1058 else if (targetm
.have_nonlocal_goto_receiver ())
1059 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
1063 /* We must not allow the code we just generated to be reordered by
1064 scheduling. Specifically, the update of the frame pointer must
1065 happen immediately, not later. */
1066 emit_insn (gen_blockage ());
1069 /* __builtin_longjmp is passed a pointer to an array of five words (not
1070 all will be used on all machines). It operates similarly to the C
1071 library function of the same name, but is more efficient. Much of
1072 the code below is copied from the handling of non-local gotos. */
1075 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
1078 rtx_insn
*insn
, *last
;
1079 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1081 /* DRAP is needed for stack realign if longjmp is expanded to current
1083 if (SUPPORTS_STACK_ALIGNMENT
)
1084 crtl
->need_drap
= true;
1086 if (setjmp_alias_set
== -1)
1087 setjmp_alias_set
= new_alias_set ();
1089 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1091 buf_addr
= force_reg (Pmode
, buf_addr
);
1093 /* We require that the user must pass a second argument of 1, because
1094 that is what builtin_setjmp will return. */
1095 gcc_assert (value
== const1_rtx
);
1097 last
= get_last_insn ();
1098 if (targetm
.have_builtin_longjmp ())
1099 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
1102 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1103 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1104 GET_MODE_SIZE (Pmode
)));
1106 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1107 2 * GET_MODE_SIZE (Pmode
)));
1108 set_mem_alias_set (fp
, setjmp_alias_set
);
1109 set_mem_alias_set (lab
, setjmp_alias_set
);
1110 set_mem_alias_set (stack
, setjmp_alias_set
);
1112 /* Pick up FP, label, and SP from the block and jump. This code is
1113 from expand_goto in stmt.c; see there for detailed comments. */
1114 if (targetm
.have_nonlocal_goto ())
1115 /* We have to pass a value to the nonlocal_goto pattern that will
1116 get copied into the static_chain pointer, but it does not matter
1117 what that value is, because builtin_setjmp does not use it. */
1118 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1121 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1122 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1124 lab
= copy_to_reg (lab
);
1126 /* Restore the frame pointer and stack pointer. We must use a
1127 temporary since the setjmp buffer may be a local. */
1128 fp
= copy_to_reg (fp
);
1129 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1131 /* Ensure the frame pointer move is not optimized. */
1132 emit_insn (gen_blockage ());
1133 emit_clobber (hard_frame_pointer_rtx
);
1134 emit_clobber (frame_pointer_rtx
);
1135 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1137 emit_use (hard_frame_pointer_rtx
);
1138 emit_use (stack_pointer_rtx
);
1139 emit_indirect_jump (lab
);
1143 /* Search backwards and mark the jump insn as a non-local goto.
1144 Note that this precludes the use of __builtin_longjmp to a
1145 __builtin_setjmp target in the same function. However, we've
1146 already cautioned the user that these functions are for
1147 internal exception handling use only. */
1148 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1150 gcc_assert (insn
!= last
);
1154 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1157 else if (CALL_P (insn
))
1163 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1165 return (iter
->i
< iter
->n
);
1168 /* This function validates the types of a function call argument list
1169 against a specified list of tree_codes. If the last specifier is a 0,
1170 that represents an ellipsis, otherwise the last specifier must be a
1174 validate_arglist (const_tree callexpr
, ...)
1176 enum tree_code code
;
1179 const_call_expr_arg_iterator iter
;
1182 va_start (ap
, callexpr
);
1183 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1185 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1186 tree fn
= CALL_EXPR_FN (callexpr
);
1187 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1189 for (unsigned argno
= 1; ; ++argno
)
1191 code
= (enum tree_code
) va_arg (ap
, int);
1196 /* This signifies an ellipses, any further arguments are all ok. */
1200 /* This signifies an endlink, if no arguments remain, return
1201 true, otherwise return false. */
1202 res
= !more_const_call_expr_args_p (&iter
);
1205 /* The actual argument must be nonnull when either the whole
1206 called function has been declared nonnull, or when the formal
1207 argument corresponding to the actual argument has been. */
1209 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1211 arg
= next_const_call_expr_arg (&iter
);
1212 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1218 /* If no parameters remain or the parameter's code does not
1219 match the specified code, return false. Otherwise continue
1220 checking any remaining arguments. */
1221 arg
= next_const_call_expr_arg (&iter
);
1222 if (!validate_arg (arg
, code
))
1228 /* We need gotos here since we can only have one VA_CLOSE in a
1233 BITMAP_FREE (argmap
);
1238 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1239 and the address of the save area. */
1242 expand_builtin_nonlocal_goto (tree exp
)
1244 tree t_label
, t_save_area
;
1245 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1248 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1251 t_label
= CALL_EXPR_ARG (exp
, 0);
1252 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1254 r_label
= expand_normal (t_label
);
1255 r_label
= convert_memory_address (Pmode
, r_label
);
1256 r_save_area
= expand_normal (t_save_area
);
1257 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1258 /* Copy the address of the save location to a register just in case it was
1259 based on the frame pointer. */
1260 r_save_area
= copy_to_reg (r_save_area
);
1261 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1262 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1263 plus_constant (Pmode
, r_save_area
,
1264 GET_MODE_SIZE (Pmode
)));
1266 crtl
->has_nonlocal_goto
= 1;
1268 /* ??? We no longer need to pass the static chain value, afaik. */
1269 if (targetm
.have_nonlocal_goto ())
1270 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1273 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1274 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1276 r_label
= copy_to_reg (r_label
);
1278 /* Restore the frame pointer and stack pointer. We must use a
1279 temporary since the setjmp buffer may be a local. */
1280 r_fp
= copy_to_reg (r_fp
);
1281 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1283 /* Ensure the frame pointer move is not optimized. */
1284 emit_insn (gen_blockage ());
1285 emit_clobber (hard_frame_pointer_rtx
);
1286 emit_clobber (frame_pointer_rtx
);
1287 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1289 /* USE of hard_frame_pointer_rtx added for consistency;
1290 not clear if really needed. */
1291 emit_use (hard_frame_pointer_rtx
);
1292 emit_use (stack_pointer_rtx
);
1294 /* If the architecture is using a GP register, we must
1295 conservatively assume that the target function makes use of it.
1296 The prologue of functions with nonlocal gotos must therefore
1297 initialize the GP register to the appropriate value, and we
1298 must then make sure that this value is live at the point
1299 of the jump. (Note that this doesn't necessarily apply
1300 to targets with a nonlocal_goto pattern; they are free
1301 to implement it in their own way. Note also that this is
1302 a no-op if the GP register is a global invariant.) */
1303 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1304 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1305 emit_use (pic_offset_table_rtx
);
1307 emit_indirect_jump (r_label
);
1310 /* Search backwards to the jump insn and mark it as a
1312 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1316 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1319 else if (CALL_P (insn
))
1326 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1327 (not all will be used on all machines) that was passed to __builtin_setjmp.
1328 It updates the stack pointer in that block to the current value. This is
1329 also called directly by the SJLJ exception handling code. */
1332 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1334 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1335 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1337 = gen_rtx_MEM (sa_mode
,
1340 plus_constant (Pmode
, buf_addr
,
1341 2 * GET_MODE_SIZE (Pmode
))));
1343 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1346 /* Expand a call to __builtin_prefetch. For a target that does not support
1347 data prefetch, evaluate the memory address argument in case it has side
1351 expand_builtin_prefetch (tree exp
)
1353 tree arg0
, arg1
, arg2
;
1357 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1360 arg0
= CALL_EXPR_ARG (exp
, 0);
1362 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1363 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1365 nargs
= call_expr_nargs (exp
);
1367 arg1
= CALL_EXPR_ARG (exp
, 1);
1369 arg1
= integer_zero_node
;
1371 arg2
= CALL_EXPR_ARG (exp
, 2);
1373 arg2
= integer_three_node
;
1375 /* Argument 0 is an address. */
1376 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1378 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1379 if (TREE_CODE (arg1
) != INTEGER_CST
)
1381 error ("second argument to %<__builtin_prefetch%> must be a constant");
1382 arg1
= integer_zero_node
;
1384 op1
= expand_normal (arg1
);
1385 /* Argument 1 must be either zero or one. */
1386 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1388 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1393 /* Argument 2 (locality) must be a compile-time constant int. */
1394 if (TREE_CODE (arg2
) != INTEGER_CST
)
1396 error ("third argument to %<__builtin_prefetch%> must be a constant");
1397 arg2
= integer_zero_node
;
1399 op2
= expand_normal (arg2
);
1400 /* Argument 2 must be 0, 1, 2, or 3. */
1401 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1403 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1407 if (targetm
.have_prefetch ())
1409 class expand_operand ops
[3];
1411 create_address_operand (&ops
[0], op0
);
1412 create_integer_operand (&ops
[1], INTVAL (op1
));
1413 create_integer_operand (&ops
[2], INTVAL (op2
));
1414 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1418 /* Don't do anything with direct references to volatile memory, but
1419 generate code to handle other side effects. */
1420 if (!MEM_P (op0
) && side_effects_p (op0
))
1424 /* Get a MEM rtx for expression EXP which is the address of an operand
1425 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1426 the maximum length of the block of memory that might be accessed or
1430 get_memory_rtx (tree exp
, tree len
)
1432 tree orig_exp
= exp
;
1435 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1436 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1437 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1438 exp
= TREE_OPERAND (exp
, 0);
1440 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1441 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1443 /* Get an expression we can use to find the attributes to assign to MEM.
1444 First remove any nops. */
1445 while (CONVERT_EXPR_P (exp
)
1446 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1447 exp
= TREE_OPERAND (exp
, 0);
1449 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1450 (as builtin stringops may alias with anything). */
1451 exp
= fold_build2 (MEM_REF
,
1452 build_array_type (char_type_node
,
1453 build_range_type (sizetype
,
1454 size_one_node
, len
)),
1455 exp
, build_int_cst (ptr_type_node
, 0));
1457 /* If the MEM_REF has no acceptable address, try to get the base object
1458 from the original address we got, and build an all-aliasing
1459 unknown-sized access to that one. */
1460 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1461 set_mem_attributes (mem
, exp
, 0);
1462 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1463 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1466 exp
= build_fold_addr_expr (exp
);
1467 exp
= fold_build2 (MEM_REF
,
1468 build_array_type (char_type_node
,
1469 build_range_type (sizetype
,
1472 exp
, build_int_cst (ptr_type_node
, 0));
1473 set_mem_attributes (mem
, exp
, 0);
1475 set_mem_alias_set (mem
, 0);
1479 /* Built-in functions to perform an untyped call and return. */
1481 #define apply_args_mode \
1482 (this_target_builtins->x_apply_args_mode)
1483 #define apply_result_mode \
1484 (this_target_builtins->x_apply_result_mode)
1486 /* Return the size required for the block returned by __builtin_apply_args,
1487 and initialize apply_args_mode. */
1490 apply_args_size (void)
1492 static int size
= -1;
1496 /* The values computed by this function never change. */
1499 /* The first value is the incoming arg-pointer. */
1500 size
= GET_MODE_SIZE (Pmode
);
1502 /* The second value is the structure value address unless this is
1503 passed as an "invisible" first argument. */
1504 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1505 size
+= GET_MODE_SIZE (Pmode
);
1507 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1508 if (FUNCTION_ARG_REGNO_P (regno
))
1510 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1512 gcc_assert (mode
!= VOIDmode
);
1514 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1515 if (size
% align
!= 0)
1516 size
= CEIL (size
, align
) * align
;
1517 size
+= GET_MODE_SIZE (mode
);
1518 apply_args_mode
[regno
] = mode
;
1522 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1528 /* Return the size required for the block returned by __builtin_apply,
1529 and initialize apply_result_mode. */
1532 apply_result_size (void)
1534 static int size
= -1;
1537 /* The values computed by this function never change. */
1542 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1543 if (targetm
.calls
.function_value_regno_p (regno
))
1545 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1547 gcc_assert (mode
!= VOIDmode
);
1549 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1550 if (size
% align
!= 0)
1551 size
= CEIL (size
, align
) * align
;
1552 size
+= GET_MODE_SIZE (mode
);
1553 apply_result_mode
[regno
] = mode
;
1556 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1558 /* Allow targets that use untyped_call and untyped_return to override
1559 the size so that machine-specific information can be stored here. */
1560 #ifdef APPLY_RESULT_SIZE
1561 size
= APPLY_RESULT_SIZE
;
1567 /* Create a vector describing the result block RESULT. If SAVEP is true,
1568 the result block is used to save the values; otherwise it is used to
1569 restore the values. */
1572 result_vector (int savep
, rtx result
)
1574 int regno
, size
, align
, nelts
;
1575 fixed_size_mode mode
;
1577 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1580 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1581 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1583 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1584 if (size
% align
!= 0)
1585 size
= CEIL (size
, align
) * align
;
1586 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1587 mem
= adjust_address (result
, mode
, size
);
1588 savevec
[nelts
++] = (savep
1589 ? gen_rtx_SET (mem
, reg
)
1590 : gen_rtx_SET (reg
, mem
));
1591 size
+= GET_MODE_SIZE (mode
);
1593 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1596 /* Save the state required to perform an untyped call with the same
1597 arguments as were passed to the current function. */
1600 expand_builtin_apply_args_1 (void)
1603 int size
, align
, regno
;
1604 fixed_size_mode mode
;
1605 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1607 /* Create a block where the arg-pointer, structure value address,
1608 and argument registers can be saved. */
1609 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1611 /* Walk past the arg-pointer and structure value address. */
1612 size
= GET_MODE_SIZE (Pmode
);
1613 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1614 size
+= GET_MODE_SIZE (Pmode
);
1616 /* Save each register used in calling a function to the block. */
1617 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1618 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1620 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1621 if (size
% align
!= 0)
1622 size
= CEIL (size
, align
) * align
;
1624 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1626 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1627 size
+= GET_MODE_SIZE (mode
);
1630 /* Save the arg pointer to the block. */
1631 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1632 /* We need the pointer as the caller actually passed them to us, not
1633 as we might have pretended they were passed. Make sure it's a valid
1634 operand, as emit_move_insn isn't expected to handle a PLUS. */
1635 if (STACK_GROWS_DOWNWARD
)
1637 = force_operand (plus_constant (Pmode
, tem
,
1638 crtl
->args
.pretend_args_size
),
1640 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1642 size
= GET_MODE_SIZE (Pmode
);
1644 /* Save the structure value address unless this is passed as an
1645 "invisible" first argument. */
1646 if (struct_incoming_value
)
1647 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1648 copy_to_reg (struct_incoming_value
));
1650 /* Return the address of the block. */
1651 return copy_addr_to_reg (XEXP (registers
, 0));
1654 /* __builtin_apply_args returns block of memory allocated on
1655 the stack into which is stored the arg pointer, structure
1656 value address, static chain, and all the registers that might
1657 possibly be used in performing a function call. The code is
1658 moved to the start of the function so the incoming values are
1662 expand_builtin_apply_args (void)
1664 /* Don't do __builtin_apply_args more than once in a function.
1665 Save the result of the first call and reuse it. */
1666 if (apply_args_value
!= 0)
1667 return apply_args_value
;
1669 /* When this function is called, it means that registers must be
1670 saved on entry to this function. So we migrate the
1671 call to the first insn of this function. */
1675 temp
= expand_builtin_apply_args_1 ();
1676 rtx_insn
*seq
= get_insns ();
1679 apply_args_value
= temp
;
1681 /* Put the insns after the NOTE that starts the function.
1682 If this is inside a start_sequence, make the outer-level insn
1683 chain current, so the code is placed at the start of the
1684 function. If internal_arg_pointer is a non-virtual pseudo,
1685 it needs to be placed after the function that initializes
1687 push_topmost_sequence ();
1688 if (REG_P (crtl
->args
.internal_arg_pointer
)
1689 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1690 emit_insn_before (seq
, parm_birth_insn
);
1692 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1693 pop_topmost_sequence ();
1698 /* Perform an untyped call and save the state required to perform an
1699 untyped return of whatever value was returned by the given function. */
1702 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1704 int size
, align
, regno
;
1705 fixed_size_mode mode
;
1706 rtx incoming_args
, result
, reg
, dest
, src
;
1707 rtx_call_insn
*call_insn
;
1708 rtx old_stack_level
= 0;
1709 rtx call_fusage
= 0;
1710 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1712 arguments
= convert_memory_address (Pmode
, arguments
);
1714 /* Create a block where the return registers can be saved. */
1715 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1717 /* Fetch the arg pointer from the ARGUMENTS block. */
1718 incoming_args
= gen_reg_rtx (Pmode
);
1719 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1720 if (!STACK_GROWS_DOWNWARD
)
1721 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1722 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1724 /* Push a new argument block and copy the arguments. Do not allow
1725 the (potential) memcpy call below to interfere with our stack
1727 do_pending_stack_adjust ();
1730 /* Save the stack with nonlocal if available. */
1731 if (targetm
.have_save_stack_nonlocal ())
1732 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1734 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1736 /* Allocate a block of memory onto the stack and copy the memory
1737 arguments to the outgoing arguments address. We can pass TRUE
1738 as the 4th argument because we just saved the stack pointer
1739 and will restore it right after the call. */
1740 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1742 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1743 may have already set current_function_calls_alloca to true.
1744 current_function_calls_alloca won't be set if argsize is zero,
1745 so we have to guarantee need_drap is true here. */
1746 if (SUPPORTS_STACK_ALIGNMENT
)
1747 crtl
->need_drap
= true;
1749 dest
= virtual_outgoing_args_rtx
;
1750 if (!STACK_GROWS_DOWNWARD
)
1752 if (CONST_INT_P (argsize
))
1753 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1755 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1757 dest
= gen_rtx_MEM (BLKmode
, dest
);
1758 set_mem_align (dest
, PARM_BOUNDARY
);
1759 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1760 set_mem_align (src
, PARM_BOUNDARY
);
1761 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1763 /* Refer to the argument block. */
1765 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1766 set_mem_align (arguments
, PARM_BOUNDARY
);
1768 /* Walk past the arg-pointer and structure value address. */
1769 size
= GET_MODE_SIZE (Pmode
);
1771 size
+= GET_MODE_SIZE (Pmode
);
1773 /* Restore each of the registers previously saved. Make USE insns
1774 for each of these registers for use in making the call. */
1775 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1776 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1778 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1779 if (size
% align
!= 0)
1780 size
= CEIL (size
, align
) * align
;
1781 reg
= gen_rtx_REG (mode
, regno
);
1782 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1783 use_reg (&call_fusage
, reg
);
1784 size
+= GET_MODE_SIZE (mode
);
1787 /* Restore the structure value address unless this is passed as an
1788 "invisible" first argument. */
1789 size
= GET_MODE_SIZE (Pmode
);
1792 rtx value
= gen_reg_rtx (Pmode
);
1793 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1794 emit_move_insn (struct_value
, value
);
1795 if (REG_P (struct_value
))
1796 use_reg (&call_fusage
, struct_value
);
1799 /* All arguments and registers used for the call are set up by now! */
1800 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1802 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1803 and we don't want to load it into a register as an optimization,
1804 because prepare_call_address already did it if it should be done. */
1805 if (GET_CODE (function
) != SYMBOL_REF
)
1806 function
= memory_address (FUNCTION_MODE
, function
);
1808 /* Generate the actual call instruction and save the return value. */
1809 if (targetm
.have_untyped_call ())
1811 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1812 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1813 result_vector (1, result
)));
1815 else if (targetm
.have_call_value ())
1819 /* Locate the unique return register. It is not possible to
1820 express a call that sets more than one return register using
1821 call_value; use untyped_call for that. In fact, untyped_call
1822 only needs to save the return registers in the given block. */
1823 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1824 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1826 gcc_assert (!valreg
); /* have_untyped_call required. */
1828 valreg
= gen_rtx_REG (mode
, regno
);
1831 emit_insn (targetm
.gen_call_value (valreg
,
1832 gen_rtx_MEM (FUNCTION_MODE
, function
),
1833 const0_rtx
, NULL_RTX
, const0_rtx
));
1835 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1840 /* Find the CALL insn we just emitted, and attach the register usage
1842 call_insn
= last_call_insn ();
1843 add_function_usage_to (call_insn
, call_fusage
);
1845 /* Restore the stack. */
1846 if (targetm
.have_save_stack_nonlocal ())
1847 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1849 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1850 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1854 /* Return the address of the result block. */
1855 result
= copy_addr_to_reg (XEXP (result
, 0));
1856 return convert_memory_address (ptr_mode
, result
);
1859 /* Perform an untyped return. */
1862 expand_builtin_return (rtx result
)
1864 int size
, align
, regno
;
1865 fixed_size_mode mode
;
1867 rtx_insn
*call_fusage
= 0;
1869 result
= convert_memory_address (Pmode
, result
);
1871 apply_result_size ();
1872 result
= gen_rtx_MEM (BLKmode
, result
);
1874 if (targetm
.have_untyped_return ())
1876 rtx vector
= result_vector (0, result
);
1877 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1882 /* Restore the return value and note that each value is used. */
1884 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1885 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1887 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1888 if (size
% align
!= 0)
1889 size
= CEIL (size
, align
) * align
;
1890 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1891 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1893 push_to_sequence (call_fusage
);
1895 call_fusage
= get_insns ();
1897 size
+= GET_MODE_SIZE (mode
);
1900 /* Put the USE insns before the return. */
1901 emit_insn (call_fusage
);
1903 /* Return whatever values was restored by jumping directly to the end
1905 expand_naked_return ();
1908 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1910 static enum type_class
1911 type_to_class (tree type
)
1913 switch (TREE_CODE (type
))
1915 case VOID_TYPE
: return void_type_class
;
1916 case INTEGER_TYPE
: return integer_type_class
;
1917 case ENUMERAL_TYPE
: return enumeral_type_class
;
1918 case BOOLEAN_TYPE
: return boolean_type_class
;
1919 case POINTER_TYPE
: return pointer_type_class
;
1920 case REFERENCE_TYPE
: return reference_type_class
;
1921 case OFFSET_TYPE
: return offset_type_class
;
1922 case REAL_TYPE
: return real_type_class
;
1923 case COMPLEX_TYPE
: return complex_type_class
;
1924 case FUNCTION_TYPE
: return function_type_class
;
1925 case METHOD_TYPE
: return method_type_class
;
1926 case RECORD_TYPE
: return record_type_class
;
1928 case QUAL_UNION_TYPE
: return union_type_class
;
1929 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1930 ? string_type_class
: array_type_class
);
1931 case LANG_TYPE
: return lang_type_class
;
1932 default: return no_type_class
;
1936 /* Expand a call EXP to __builtin_classify_type. */
1939 expand_builtin_classify_type (tree exp
)
1941 if (call_expr_nargs (exp
))
1942 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1943 return GEN_INT (no_type_class
);
1946 /* This helper macro, meant to be used in mathfn_built_in below, determines
1947 which among a set of builtin math functions is appropriate for a given type
1948 mode. The `F' (float) and `L' (long double) are automatically generated
1949 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1950 types, there are additional types that are considered with 'F32', 'F64',
1951 'F128', etc. suffixes. */
1952 #define CASE_MATHFN(MATHFN) \
1953 CASE_CFN_##MATHFN: \
1954 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1955 fcodel = BUILT_IN_##MATHFN##L ; break;
1956 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1958 #define CASE_MATHFN_FLOATN(MATHFN) \
1959 CASE_CFN_##MATHFN: \
1960 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1961 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1962 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1963 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1964 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1966 /* Similar to above, but appends _R after any F/L suffix. */
1967 #define CASE_MATHFN_REENT(MATHFN) \
1968 case CFN_BUILT_IN_##MATHFN##_R: \
1969 case CFN_BUILT_IN_##MATHFN##F_R: \
1970 case CFN_BUILT_IN_##MATHFN##L_R: \
1971 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1972 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1974 /* Return a function equivalent to FN but operating on floating-point
1975 values of type TYPE, or END_BUILTINS if no such function exists.
1976 This is purely an operation on function codes; it does not guarantee
1977 that the target actually has an implementation of the function. */
1979 static built_in_function
1980 mathfn_built_in_2 (tree type
, combined_fn fn
)
1983 built_in_function fcode
, fcodef
, fcodel
;
1984 built_in_function fcodef16
= END_BUILTINS
;
1985 built_in_function fcodef32
= END_BUILTINS
;
1986 built_in_function fcodef64
= END_BUILTINS
;
1987 built_in_function fcodef128
= END_BUILTINS
;
1988 built_in_function fcodef32x
= END_BUILTINS
;
1989 built_in_function fcodef64x
= END_BUILTINS
;
1990 built_in_function fcodef128x
= END_BUILTINS
;
2002 CASE_MATHFN_FLOATN (CEIL
)
2004 CASE_MATHFN_FLOATN (COPYSIGN
)
2016 CASE_MATHFN_FLOATN (FLOOR
)
2017 CASE_MATHFN_FLOATN (FMA
)
2018 CASE_MATHFN_FLOATN (FMAX
)
2019 CASE_MATHFN_FLOATN (FMIN
)
2023 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
2024 CASE_MATHFN (HUGE_VAL
)
2028 CASE_MATHFN (IFLOOR
)
2031 CASE_MATHFN (IROUND
)
2038 CASE_MATHFN (LFLOOR
)
2039 CASE_MATHFN (LGAMMA
)
2040 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
2041 CASE_MATHFN (LLCEIL
)
2042 CASE_MATHFN (LLFLOOR
)
2043 CASE_MATHFN (LLRINT
)
2044 CASE_MATHFN (LLROUND
)
2051 CASE_MATHFN (LROUND
)
2055 CASE_MATHFN_FLOATN (NEARBYINT
)
2056 CASE_MATHFN (NEXTAFTER
)
2057 CASE_MATHFN (NEXTTOWARD
)
2061 CASE_MATHFN (REMAINDER
)
2062 CASE_MATHFN (REMQUO
)
2063 CASE_MATHFN_FLOATN (RINT
)
2064 CASE_MATHFN_FLOATN (ROUND
)
2065 CASE_MATHFN_FLOATN (ROUNDEVEN
)
2067 CASE_MATHFN (SCALBLN
)
2068 CASE_MATHFN (SCALBN
)
2069 CASE_MATHFN (SIGNBIT
)
2070 CASE_MATHFN (SIGNIFICAND
)
2072 CASE_MATHFN (SINCOS
)
2074 CASE_MATHFN_FLOATN (SQRT
)
2077 CASE_MATHFN (TGAMMA
)
2078 CASE_MATHFN_FLOATN (TRUNC
)
2084 return END_BUILTINS
;
2087 mtype
= TYPE_MAIN_VARIANT (type
);
2088 if (mtype
== double_type_node
)
2090 else if (mtype
== float_type_node
)
2092 else if (mtype
== long_double_type_node
)
2094 else if (mtype
== float16_type_node
)
2096 else if (mtype
== float32_type_node
)
2098 else if (mtype
== float64_type_node
)
2100 else if (mtype
== float128_type_node
)
2102 else if (mtype
== float32x_type_node
)
2104 else if (mtype
== float64x_type_node
)
2106 else if (mtype
== float128x_type_node
)
2109 return END_BUILTINS
;
2112 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2113 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2114 otherwise use the explicit declaration. If we can't do the conversion,
2118 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2120 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2121 if (fcode2
== END_BUILTINS
)
2124 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2127 return builtin_decl_explicit (fcode2
);
2130 /* Like mathfn_built_in_1, but always use the implicit array. */
2133 mathfn_built_in (tree type
, combined_fn fn
)
2135 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2138 /* Like mathfn_built_in_1, but take a built_in_function and
2139 always use the implicit array. */
2142 mathfn_built_in (tree type
, enum built_in_function fn
)
2144 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2147 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2148 return its code, otherwise return IFN_LAST. Note that this function
2149 only tests whether the function is defined in internals.def, not whether
2150 it is actually available on the target. */
2153 associated_internal_fn (tree fndecl
)
2155 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2156 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2157 switch (DECL_FUNCTION_CODE (fndecl
))
2159 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2161 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2162 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2163 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2164 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2165 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2166 #include "internal-fn.def"
2168 CASE_FLT_FN (BUILT_IN_POW10
):
2171 CASE_FLT_FN (BUILT_IN_DREM
):
2172 return IFN_REMAINDER
;
2174 CASE_FLT_FN (BUILT_IN_SCALBN
):
2175 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2185 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2186 on the current target by a call to an internal function, return the
2187 code of that internal function, otherwise return IFN_LAST. The caller
2188 is responsible for ensuring that any side-effects of the built-in
2189 call are dealt with correctly. E.g. if CALL sets errno, the caller
2190 must decide that the errno result isn't needed or make it available
2191 in some other way. */
2194 replacement_internal_fn (gcall
*call
)
2196 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2198 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2199 if (ifn
!= IFN_LAST
)
2201 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2202 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2203 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2210 /* Expand a call to the builtin trinary math functions (fma).
2211 Return NULL_RTX if a normal call should be emitted rather than expanding the
2212 function in-line. EXP is the expression that is a call to the builtin
2213 function; if convenient, the result should be placed in TARGET.
2214 SUBTARGET may be used as the target for computing one of EXP's
2218 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2220 optab builtin_optab
;
2221 rtx op0
, op1
, op2
, result
;
2223 tree fndecl
= get_callee_fndecl (exp
);
2224 tree arg0
, arg1
, arg2
;
2227 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2230 arg0
= CALL_EXPR_ARG (exp
, 0);
2231 arg1
= CALL_EXPR_ARG (exp
, 1);
2232 arg2
= CALL_EXPR_ARG (exp
, 2);
2234 switch (DECL_FUNCTION_CODE (fndecl
))
2236 CASE_FLT_FN (BUILT_IN_FMA
):
2237 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2238 builtin_optab
= fma_optab
; break;
2243 /* Make a suitable register to place result in. */
2244 mode
= TYPE_MODE (TREE_TYPE (exp
));
2246 /* Before working hard, check whether the instruction is available. */
2247 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2250 result
= gen_reg_rtx (mode
);
2252 /* Always stabilize the argument list. */
2253 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2254 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2255 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2257 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2258 op1
= expand_normal (arg1
);
2259 op2
= expand_normal (arg2
);
2263 /* Compute into RESULT.
2264 Set RESULT to wherever the result comes back. */
2265 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2268 /* If we were unable to expand via the builtin, stop the sequence
2269 (without outputting the insns) and call to the library function
2270 with the stabilized argument list. */
2274 return expand_call (exp
, target
, target
== const0_rtx
);
2277 /* Output the entire sequence. */
2278 insns
= get_insns ();
2285 /* Expand a call to the builtin sin and cos math functions.
2286 Return NULL_RTX if a normal call should be emitted rather than expanding the
2287 function in-line. EXP is the expression that is a call to the builtin
2288 function; if convenient, the result should be placed in TARGET.
2289 SUBTARGET may be used as the target for computing one of EXP's
2293 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2295 optab builtin_optab
;
2298 tree fndecl
= get_callee_fndecl (exp
);
2302 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2305 arg
= CALL_EXPR_ARG (exp
, 0);
2307 switch (DECL_FUNCTION_CODE (fndecl
))
2309 CASE_FLT_FN (BUILT_IN_SIN
):
2310 CASE_FLT_FN (BUILT_IN_COS
):
2311 builtin_optab
= sincos_optab
; break;
2316 /* Make a suitable register to place result in. */
2317 mode
= TYPE_MODE (TREE_TYPE (exp
));
2319 /* Check if sincos insn is available, otherwise fallback
2320 to sin or cos insn. */
2321 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2322 switch (DECL_FUNCTION_CODE (fndecl
))
2324 CASE_FLT_FN (BUILT_IN_SIN
):
2325 builtin_optab
= sin_optab
; break;
2326 CASE_FLT_FN (BUILT_IN_COS
):
2327 builtin_optab
= cos_optab
; break;
2332 /* Before working hard, check whether the instruction is available. */
2333 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2335 rtx result
= gen_reg_rtx (mode
);
2337 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2338 need to expand the argument again. This way, we will not perform
2339 side-effects more the once. */
2340 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2342 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2346 /* Compute into RESULT.
2347 Set RESULT to wherever the result comes back. */
2348 if (builtin_optab
== sincos_optab
)
2352 switch (DECL_FUNCTION_CODE (fndecl
))
2354 CASE_FLT_FN (BUILT_IN_SIN
):
2355 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2357 CASE_FLT_FN (BUILT_IN_COS
):
2358 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2366 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2370 /* Output the entire sequence. */
2371 insns
= get_insns ();
2377 /* If we were unable to expand via the builtin, stop the sequence
2378 (without outputting the insns) and call to the library function
2379 with the stabilized argument list. */
2383 return expand_call (exp
, target
, target
== const0_rtx
);
2386 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2387 return an RTL instruction code that implements the functionality.
2388 If that isn't possible or available return CODE_FOR_nothing. */
2390 static enum insn_code
2391 interclass_mathfn_icode (tree arg
, tree fndecl
)
2393 bool errno_set
= false;
2394 optab builtin_optab
= unknown_optab
;
2397 switch (DECL_FUNCTION_CODE (fndecl
))
2399 CASE_FLT_FN (BUILT_IN_ILOGB
):
2400 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2401 CASE_FLT_FN (BUILT_IN_ISINF
):
2402 builtin_optab
= isinf_optab
; break;
2403 case BUILT_IN_ISNORMAL
:
2404 case BUILT_IN_ISFINITE
:
2405 CASE_FLT_FN (BUILT_IN_FINITE
):
2406 case BUILT_IN_FINITED32
:
2407 case BUILT_IN_FINITED64
:
2408 case BUILT_IN_FINITED128
:
2409 case BUILT_IN_ISINFD32
:
2410 case BUILT_IN_ISINFD64
:
2411 case BUILT_IN_ISINFD128
:
2412 /* These builtins have no optabs (yet). */
2418 /* There's no easy way to detect the case we need to set EDOM. */
2419 if (flag_errno_math
&& errno_set
)
2420 return CODE_FOR_nothing
;
2422 /* Optab mode depends on the mode of the input argument. */
2423 mode
= TYPE_MODE (TREE_TYPE (arg
));
2426 return optab_handler (builtin_optab
, mode
);
2427 return CODE_FOR_nothing
;
2430 /* Expand a call to one of the builtin math functions that operate on
2431 floating point argument and output an integer result (ilogb, isinf,
2433 Return 0 if a normal call should be emitted rather than expanding the
2434 function in-line. EXP is the expression that is a call to the builtin
2435 function; if convenient, the result should be placed in TARGET. */
2438 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2440 enum insn_code icode
= CODE_FOR_nothing
;
2442 tree fndecl
= get_callee_fndecl (exp
);
2446 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2449 arg
= CALL_EXPR_ARG (exp
, 0);
2450 icode
= interclass_mathfn_icode (arg
, fndecl
);
2451 mode
= TYPE_MODE (TREE_TYPE (arg
));
2453 if (icode
!= CODE_FOR_nothing
)
2455 class expand_operand ops
[1];
2456 rtx_insn
*last
= get_last_insn ();
2457 tree orig_arg
= arg
;
2459 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2460 need to expand the argument again. This way, we will not perform
2461 side-effects more the once. */
2462 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2464 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2466 if (mode
!= GET_MODE (op0
))
2467 op0
= convert_to_mode (mode
, op0
, 0);
2469 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2470 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2471 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2472 return ops
[0].value
;
2474 delete_insns_since (last
);
2475 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2481 /* Expand a call to the builtin sincos math function.
2482 Return NULL_RTX if a normal call should be emitted rather than expanding the
2483 function in-line. EXP is the expression that is a call to the builtin
2487 expand_builtin_sincos (tree exp
)
2489 rtx op0
, op1
, op2
, target1
, target2
;
2491 tree arg
, sinp
, cosp
;
2493 location_t loc
= EXPR_LOCATION (exp
);
2494 tree alias_type
, alias_off
;
2496 if (!validate_arglist (exp
, REAL_TYPE
,
2497 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2500 arg
= CALL_EXPR_ARG (exp
, 0);
2501 sinp
= CALL_EXPR_ARG (exp
, 1);
2502 cosp
= CALL_EXPR_ARG (exp
, 2);
2504 /* Make a suitable register to place result in. */
2505 mode
= TYPE_MODE (TREE_TYPE (arg
));
2507 /* Check if sincos insn is available, otherwise emit the call. */
2508 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2511 target1
= gen_reg_rtx (mode
);
2512 target2
= gen_reg_rtx (mode
);
2514 op0
= expand_normal (arg
);
2515 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2516 alias_off
= build_int_cst (alias_type
, 0);
2517 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2519 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2522 /* Compute into target1 and target2.
2523 Set TARGET to wherever the result comes back. */
2524 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2525 gcc_assert (result
);
2527 /* Move target1 and target2 to the memory locations indicated
2529 emit_move_insn (op1
, target1
);
2530 emit_move_insn (op2
, target2
);
2535 /* Expand a call to the internal cexpi builtin to the sincos math function.
2536 EXP is the expression that is a call to the builtin function; if convenient,
2537 the result should be placed in TARGET. */
2540 expand_builtin_cexpi (tree exp
, rtx target
)
2542 tree fndecl
= get_callee_fndecl (exp
);
2546 location_t loc
= EXPR_LOCATION (exp
);
2548 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2551 arg
= CALL_EXPR_ARG (exp
, 0);
2552 type
= TREE_TYPE (arg
);
2553 mode
= TYPE_MODE (TREE_TYPE (arg
));
2555 /* Try expanding via a sincos optab, fall back to emitting a libcall
2556 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2557 is only generated from sincos, cexp or if we have either of them. */
2558 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2560 op1
= gen_reg_rtx (mode
);
2561 op2
= gen_reg_rtx (mode
);
2563 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2565 /* Compute into op1 and op2. */
2566 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2568 else if (targetm
.libc_has_function (function_sincos
))
2570 tree call
, fn
= NULL_TREE
;
2574 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2575 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2576 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2577 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2578 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2579 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2583 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2584 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2585 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2586 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2587 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2588 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2590 /* Make sure not to fold the sincos call again. */
2591 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2592 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2593 call
, 3, arg
, top1
, top2
));
2597 tree call
, fn
= NULL_TREE
, narg
;
2598 tree ctype
= build_complex_type (type
);
2600 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2601 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2602 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2603 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2604 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2605 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2609 /* If we don't have a decl for cexp create one. This is the
2610 friendliest fallback if the user calls __builtin_cexpi
2611 without full target C99 function support. */
2612 if (fn
== NULL_TREE
)
2615 const char *name
= NULL
;
2617 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2619 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2621 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2624 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2625 fn
= build_fn_decl (name
, fntype
);
2628 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2629 build_real (type
, dconst0
), arg
);
2631 /* Make sure not to fold the cexp call again. */
2632 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2633 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2634 target
, VOIDmode
, EXPAND_NORMAL
);
2637 /* Now build the proper return type. */
2638 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2639 make_tree (TREE_TYPE (arg
), op2
),
2640 make_tree (TREE_TYPE (arg
), op1
)),
2641 target
, VOIDmode
, EXPAND_NORMAL
);
2644 /* Conveniently construct a function call expression. FNDECL names the
2645 function to be called, N is the number of arguments, and the "..."
2646 parameters are the argument expressions. Unlike build_call_exr
2647 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2650 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2653 tree fntype
= TREE_TYPE (fndecl
);
2654 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2657 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2659 SET_EXPR_LOCATION (fn
, loc
);
2663 /* Expand a call to one of the builtin rounding functions gcc defines
2664 as an extension (lfloor and lceil). As these are gcc extensions we
2665 do not need to worry about setting errno to EDOM.
2666 If expanding via optab fails, lower expression to (int)(floor(x)).
2667 EXP is the expression that is a call to the builtin function;
2668 if convenient, the result should be placed in TARGET. */
2671 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2673 convert_optab builtin_optab
;
2676 tree fndecl
= get_callee_fndecl (exp
);
2677 enum built_in_function fallback_fn
;
2678 tree fallback_fndecl
;
2682 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2685 arg
= CALL_EXPR_ARG (exp
, 0);
2687 switch (DECL_FUNCTION_CODE (fndecl
))
2689 CASE_FLT_FN (BUILT_IN_ICEIL
):
2690 CASE_FLT_FN (BUILT_IN_LCEIL
):
2691 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2692 builtin_optab
= lceil_optab
;
2693 fallback_fn
= BUILT_IN_CEIL
;
2696 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2697 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2698 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2699 builtin_optab
= lfloor_optab
;
2700 fallback_fn
= BUILT_IN_FLOOR
;
2707 /* Make a suitable register to place result in. */
2708 mode
= TYPE_MODE (TREE_TYPE (exp
));
2710 target
= gen_reg_rtx (mode
);
2712 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2713 need to expand the argument again. This way, we will not perform
2714 side-effects more the once. */
2715 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2717 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2721 /* Compute into TARGET. */
2722 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2724 /* Output the entire sequence. */
2725 insns
= get_insns ();
2731 /* If we were unable to expand via the builtin, stop the sequence
2732 (without outputting the insns). */
2735 /* Fall back to floating point rounding optab. */
2736 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2738 /* For non-C99 targets we may end up without a fallback fndecl here
2739 if the user called __builtin_lfloor directly. In this case emit
2740 a call to the floor/ceil variants nevertheless. This should result
2741 in the best user experience for not full C99 targets. */
2742 if (fallback_fndecl
== NULL_TREE
)
2745 const char *name
= NULL
;
2747 switch (DECL_FUNCTION_CODE (fndecl
))
2749 case BUILT_IN_ICEIL
:
2750 case BUILT_IN_LCEIL
:
2751 case BUILT_IN_LLCEIL
:
2754 case BUILT_IN_ICEILF
:
2755 case BUILT_IN_LCEILF
:
2756 case BUILT_IN_LLCEILF
:
2759 case BUILT_IN_ICEILL
:
2760 case BUILT_IN_LCEILL
:
2761 case BUILT_IN_LLCEILL
:
2764 case BUILT_IN_IFLOOR
:
2765 case BUILT_IN_LFLOOR
:
2766 case BUILT_IN_LLFLOOR
:
2769 case BUILT_IN_IFLOORF
:
2770 case BUILT_IN_LFLOORF
:
2771 case BUILT_IN_LLFLOORF
:
2774 case BUILT_IN_IFLOORL
:
2775 case BUILT_IN_LFLOORL
:
2776 case BUILT_IN_LLFLOORL
:
2783 fntype
= build_function_type_list (TREE_TYPE (arg
),
2784 TREE_TYPE (arg
), NULL_TREE
);
2785 fallback_fndecl
= build_fn_decl (name
, fntype
);
2788 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2790 tmp
= expand_normal (exp
);
2791 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2793 /* Truncate the result of floating point optab to integer
2794 via expand_fix (). */
2795 target
= gen_reg_rtx (mode
);
2796 expand_fix (target
, tmp
, 0);
2801 /* Expand a call to one of the builtin math functions doing integer
2803 Return 0 if a normal call should be emitted rather than expanding the
2804 function in-line. EXP is the expression that is a call to the builtin
2805 function; if convenient, the result should be placed in TARGET. */
2808 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2810 convert_optab builtin_optab
;
2813 tree fndecl
= get_callee_fndecl (exp
);
2816 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2818 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2821 arg
= CALL_EXPR_ARG (exp
, 0);
2823 switch (DECL_FUNCTION_CODE (fndecl
))
2825 CASE_FLT_FN (BUILT_IN_IRINT
):
2826 fallback_fn
= BUILT_IN_LRINT
;
2828 CASE_FLT_FN (BUILT_IN_LRINT
):
2829 CASE_FLT_FN (BUILT_IN_LLRINT
):
2830 builtin_optab
= lrint_optab
;
2833 CASE_FLT_FN (BUILT_IN_IROUND
):
2834 fallback_fn
= BUILT_IN_LROUND
;
2836 CASE_FLT_FN (BUILT_IN_LROUND
):
2837 CASE_FLT_FN (BUILT_IN_LLROUND
):
2838 builtin_optab
= lround_optab
;
2845 /* There's no easy way to detect the case we need to set EDOM. */
2846 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2849 /* Make a suitable register to place result in. */
2850 mode
= TYPE_MODE (TREE_TYPE (exp
));
2852 /* There's no easy way to detect the case we need to set EDOM. */
2853 if (!flag_errno_math
)
2855 rtx result
= gen_reg_rtx (mode
);
2857 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2858 need to expand the argument again. This way, we will not perform
2859 side-effects more the once. */
2860 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2862 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2866 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2868 /* Output the entire sequence. */
2869 insns
= get_insns ();
2875 /* If we were unable to expand via the builtin, stop the sequence
2876 (without outputting the insns) and call to the library function
2877 with the stabilized argument list. */
2881 if (fallback_fn
!= BUILT_IN_NONE
)
2883 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2884 targets, (int) round (x) should never be transformed into
2885 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2886 a call to lround in the hope that the target provides at least some
2887 C99 functions. This should result in the best user experience for
2888 not full C99 targets. */
2889 tree fallback_fndecl
= mathfn_built_in_1
2890 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2892 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2893 fallback_fndecl
, 1, arg
);
2895 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2896 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2897 return convert_to_mode (mode
, target
, 0);
2900 return expand_call (exp
, target
, target
== const0_rtx
);
2903 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2904 a normal call should be emitted rather than expanding the function
2905 in-line. EXP is the expression that is a call to the builtin
2906 function; if convenient, the result should be placed in TARGET. */
2909 expand_builtin_powi (tree exp
, rtx target
)
2916 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2919 arg0
= CALL_EXPR_ARG (exp
, 0);
2920 arg1
= CALL_EXPR_ARG (exp
, 1);
2921 mode
= TYPE_MODE (TREE_TYPE (exp
));
2923 /* Emit a libcall to libgcc. */
2925 /* Mode of the 2nd argument must match that of an int. */
2926 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2928 if (target
== NULL_RTX
)
2929 target
= gen_reg_rtx (mode
);
2931 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2932 if (GET_MODE (op0
) != mode
)
2933 op0
= convert_to_mode (mode
, op0
, 0);
2934 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2935 if (GET_MODE (op1
) != mode2
)
2936 op1
= convert_to_mode (mode2
, op1
, 0);
2938 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2939 target
, LCT_CONST
, mode
,
2940 op0
, mode
, op1
, mode2
);
2945 /* Expand expression EXP which is a call to the strlen builtin. Return
2946 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2947 try to get the result in TARGET, if convenient. */
2950 expand_builtin_strlen (tree exp
, rtx target
,
2951 machine_mode target_mode
)
2953 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2956 class expand_operand ops
[4];
2959 tree src
= CALL_EXPR_ARG (exp
, 0);
2961 rtx_insn
*before_strlen
;
2962 machine_mode insn_mode
;
2963 enum insn_code icode
= CODE_FOR_nothing
;
2966 /* If the length can be computed at compile-time, return it. */
2967 len
= c_strlen (src
, 0);
2969 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2971 /* If the length can be computed at compile-time and is constant
2972 integer, but there are side-effects in src, evaluate
2973 src for side-effects, then return len.
2974 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2975 can be optimized into: i++; x = 3; */
2976 len
= c_strlen (src
, 1);
2977 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2979 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2980 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2983 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2985 /* If SRC is not a pointer type, don't do this operation inline. */
2989 /* Bail out if we can't compute strlen in the right mode. */
2990 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2992 icode
= optab_handler (strlen_optab
, insn_mode
);
2993 if (icode
!= CODE_FOR_nothing
)
2996 if (insn_mode
== VOIDmode
)
2999 /* Make a place to hold the source address. We will not expand
3000 the actual source until we are sure that the expansion will
3001 not fail -- there are trees that cannot be expanded twice. */
3002 src_reg
= gen_reg_rtx (Pmode
);
3004 /* Mark the beginning of the strlen sequence so we can emit the
3005 source operand later. */
3006 before_strlen
= get_last_insn ();
3008 create_output_operand (&ops
[0], target
, insn_mode
);
3009 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3010 create_integer_operand (&ops
[2], 0);
3011 create_integer_operand (&ops
[3], align
);
3012 if (!maybe_expand_insn (icode
, 4, ops
))
3015 /* Check to see if the argument was declared attribute nonstring
3016 and if so, issue a warning since at this point it's not known
3017 to be nul-terminated. */
3018 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
3020 /* Now that we are assured of success, expand the source. */
3022 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3025 #ifdef POINTERS_EXTEND_UNSIGNED
3026 if (GET_MODE (pat
) != Pmode
)
3027 pat
= convert_to_mode (Pmode
, pat
,
3028 POINTERS_EXTEND_UNSIGNED
);
3030 emit_move_insn (src_reg
, pat
);
3036 emit_insn_after (pat
, before_strlen
);
3038 emit_insn_before (pat
, get_insns ());
3040 /* Return the value in the proper mode for this function. */
3041 if (GET_MODE (ops
[0].value
) == target_mode
)
3042 target
= ops
[0].value
;
3043 else if (target
!= 0)
3044 convert_move (target
, ops
[0].value
, 0);
3046 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3051 /* Expand call EXP to the strnlen built-in, returning the result
3052 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3055 expand_builtin_strnlen (tree exp
, rtx target
, machine_mode target_mode
)
3057 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3060 tree src
= CALL_EXPR_ARG (exp
, 0);
3061 tree bound
= CALL_EXPR_ARG (exp
, 1);
3066 location_t loc
= UNKNOWN_LOCATION
;
3067 if (EXPR_HAS_LOCATION (exp
))
3068 loc
= EXPR_LOCATION (exp
);
3070 tree maxobjsize
= max_object_size ();
3071 tree func
= get_callee_fndecl (exp
);
3073 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3074 so these conversions aren't necessary. */
3075 c_strlen_data lendata
= { };
3076 tree len
= c_strlen (src
, 0, &lendata
, 1);
3078 len
= fold_convert_loc (loc
, TREE_TYPE (bound
), len
);
3080 if (TREE_CODE (bound
) == INTEGER_CST
)
3082 if (!TREE_NO_WARNING (exp
)
3083 && tree_int_cst_lt (maxobjsize
, bound
)
3084 && warning_at (loc
, OPT_Wstringop_overflow_
,
3085 "%K%qD specified bound %E "
3086 "exceeds maximum object size %E",
3087 exp
, func
, bound
, maxobjsize
))
3088 TREE_NO_WARNING (exp
) = true;
3091 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3093 /* Clear EXACT if LEN may be less than SRC suggests,
3095 strnlen (&a[i], sizeof a)
3096 where the value of i is unknown. Unless i's value is
3097 zero, the call is unsafe because the bound is greater. */
3098 lendata
.decl
= unterminated_array (src
, &len
, &exact
);
3104 && !TREE_NO_WARNING (exp
)
3105 && ((tree_int_cst_lt (len
, bound
))
3109 = expansion_point_location_if_in_system_header (loc
);
3111 if (warning_at (warnloc
, OPT_Wstringop_overflow_
,
3113 ? G_("%K%qD specified bound %E exceeds the size %E "
3114 "of unterminated array")
3115 : G_("%K%qD specified bound %E may exceed the size "
3116 "of at most %E of unterminated array"),
3117 exp
, func
, bound
, len
))
3119 inform (DECL_SOURCE_LOCATION (lendata
.decl
),
3120 "referenced argument declared here");
3121 TREE_NO_WARNING (exp
) = true;
3129 len
= fold_build2_loc (loc
, MIN_EXPR
, size_type_node
, len
, bound
);
3130 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3133 if (TREE_CODE (bound
) != SSA_NAME
)
3137 enum value_range_kind rng
= get_range_info (bound
, &min
, &max
);
3138 if (rng
!= VR_RANGE
)
3141 if (!TREE_NO_WARNING (exp
)
3142 && wi::ltu_p (wi::to_wide (maxobjsize
, min
.get_precision ()), min
)
3143 && warning_at (loc
, OPT_Wstringop_overflow_
,
3144 "%K%qD specified bound [%wu, %wu] "
3145 "exceeds maximum object size %E",
3146 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), maxobjsize
))
3147 TREE_NO_WARNING (exp
) = true;
3150 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
3152 lendata
.decl
= unterminated_array (src
, &len
, &exact
);
3158 && !TREE_NO_WARNING (exp
)
3159 && (wi::ltu_p (wi::to_wide (len
), min
)
3163 = expansion_point_location_if_in_system_header (loc
);
3165 if (warning_at (warnloc
, OPT_Wstringop_overflow_
,
3167 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3168 "the size %E of unterminated array")
3169 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3170 "the size of at most %E of unterminated array"),
3171 exp
, func
, min
.to_uhwi (), max
.to_uhwi (), len
))
3173 inform (DECL_SOURCE_LOCATION (lendata
.decl
),
3174 "referenced argument declared here");
3175 TREE_NO_WARNING (exp
) = true;
3182 if (wi::gtu_p (min
, wi::to_wide (len
)))
3183 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3185 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, bound
);
3186 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3189 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3190 bytes from constant string DATA + OFFSET and return it as target
3194 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3195 scalar_int_mode mode
)
3197 const char *str
= (const char *) data
;
3199 gcc_assert (offset
>= 0
3200 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3201 <= strlen (str
) + 1));
3203 return c_readstr (str
+ offset
, mode
);
3206 /* LEN specify length of the block of memcpy/memset operation.
3207 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3208 In some cases we can make very likely guess on max size, then we
3209 set it into PROBABLE_MAX_SIZE. */
3212 determine_block_size (tree len
, rtx len_rtx
,
3213 unsigned HOST_WIDE_INT
*min_size
,
3214 unsigned HOST_WIDE_INT
*max_size
,
3215 unsigned HOST_WIDE_INT
*probable_max_size
)
3217 if (CONST_INT_P (len_rtx
))
3219 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3225 enum value_range_kind range_type
= VR_UNDEFINED
;
3227 /* Determine bounds from the type. */
3228 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3229 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3232 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3233 *probable_max_size
= *max_size
3234 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3236 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3238 if (TREE_CODE (len
) == SSA_NAME
)
3239 range_type
= get_range_info (len
, &min
, &max
);
3240 if (range_type
== VR_RANGE
)
3242 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3243 *min_size
= min
.to_uhwi ();
3244 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3245 *probable_max_size
= *max_size
= max
.to_uhwi ();
3247 else if (range_type
== VR_ANTI_RANGE
)
3249 /* Anti range 0...N lets us to determine minimal size to N+1. */
3252 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3253 *min_size
= max
.to_uhwi () + 1;
3261 Produce anti range allowing negative values of N. We still
3262 can use the information and make a guess that N is not negative.
3264 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3265 *probable_max_size
= min
.to_uhwi () - 1;
3268 gcc_checking_assert (*max_size
<=
3269 (unsigned HOST_WIDE_INT
)
3270 GET_MODE_MASK (GET_MODE (len_rtx
)));
3273 /* Try to verify that the sizes and lengths of the arguments to a string
3274 manipulation function given by EXP are within valid bounds and that
3275 the operation does not lead to buffer overflow or read past the end.
3276 Arguments other than EXP may be null. When non-null, the arguments
3277 have the following meaning:
3278 DST is the destination of a copy call or NULL otherwise.
3279 SRC is the source of a copy call or NULL otherwise.
3280 DSTWRITE is the number of bytes written into the destination obtained
3281 from the user-supplied size argument to the function (such as in
3282 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3283 MAXREAD is the user-supplied bound on the length of the source sequence
3284 (such as in strncat(d, s, N). It specifies the upper limit on the number
3285 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3286 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3287 expression EXP is a string function call (as opposed to a memory call
3288 like memcpy). As an exception, SRCSTR can also be an integer denoting
3289 the precomputed size of the source string or object (for functions like
3291 DSTSIZE is the size of the destination object specified by the last
3292 argument to the _chk builtins, typically resulting from the expansion
3293 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3296 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3299 If the call is successfully verified as safe return true, otherwise
3303 check_access (tree exp
, tree
, tree
, tree dstwrite
,
3304 tree maxread
, tree srcstr
, tree dstsize
)
3306 int opt
= OPT_Wstringop_overflow_
;
3308 /* The size of the largest object is half the address space, or
3309 PTRDIFF_MAX. (This is way too permissive.) */
3310 tree maxobjsize
= max_object_size ();
3312 /* Either the length of the source string for string functions or
3313 the size of the source object for raw memory functions. */
3314 tree slen
= NULL_TREE
;
3316 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3318 /* Set to true when the exact number of bytes written by a string
3319 function like strcpy is not known and the only thing that is
3320 known is that it must be at least one (for the terminating nul). */
3321 bool at_least_one
= false;
3324 /* SRCSTR is normally a pointer to string but as a special case
3325 it can be an integer denoting the length of a string. */
3326 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
3328 /* Try to determine the range of lengths the source string
3329 refers to. If it can be determined and is less than
3330 the upper bound given by MAXREAD add one to it for
3331 the terminating nul. Otherwise, set it to one for
3332 the same reason, or to MAXREAD as appropriate. */
3333 c_strlen_data lendata
= { };
3334 get_range_strlen (srcstr
, &lendata
, /* eltsize = */ 1);
3335 range
[0] = lendata
.minlen
;
3336 range
[1] = lendata
.maxbound
;
3337 if (range
[0] && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
3339 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
3340 range
[0] = range
[1] = maxread
;
3342 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3343 range
[0], size_one_node
);
3345 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
3347 else if (!integer_all_onesp (range
[1]))
3348 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3349 range
[1], size_one_node
);
3355 at_least_one
= true;
3356 slen
= size_one_node
;
3363 if (!dstwrite
&& !maxread
)
3365 /* When the only available piece of data is the object size
3366 there is nothing to do. */
3370 /* Otherwise, when the length of the source sequence is known
3371 (as with strlen), set DSTWRITE to it. */
3377 dstsize
= maxobjsize
;
3380 get_size_range (dstwrite
, range
);
3382 tree func
= get_callee_fndecl (exp
);
3384 /* First check the number of bytes to be written against the maximum
3387 && TREE_CODE (range
[0]) == INTEGER_CST
3388 && tree_int_cst_lt (maxobjsize
, range
[0]))
3390 if (TREE_NO_WARNING (exp
))
3393 location_t loc
= tree_nonartificial_location (exp
);
3394 loc
= expansion_point_location_if_in_system_header (loc
);
3397 if (range
[0] == range
[1])
3398 warned
= warning_at (loc
, opt
,
3399 "%K%qD specified size %E "
3400 "exceeds maximum object size %E",
3401 exp
, func
, range
[0], maxobjsize
);
3403 warned
= warning_at (loc
, opt
,
3404 "%K%qD specified size between %E and %E "
3405 "exceeds maximum object size %E",
3407 range
[0], range
[1], maxobjsize
);
3409 TREE_NO_WARNING (exp
) = true;
3414 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3415 constant, and in range of unsigned HOST_WIDE_INT. */
3416 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
3418 /* Next check the number of bytes to be written against the destination
3420 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
3423 && TREE_CODE (range
[0]) == INTEGER_CST
3424 && ((tree_fits_uhwi_p (dstsize
)
3425 && tree_int_cst_lt (dstsize
, range
[0]))
3427 && tree_fits_uhwi_p (dstwrite
)
3428 && tree_int_cst_lt (dstwrite
, range
[0]))))
3430 if (TREE_NO_WARNING (exp
))
3433 location_t loc
= tree_nonartificial_location (exp
);
3434 loc
= expansion_point_location_if_in_system_header (loc
);
3436 if (dstwrite
== slen
&& at_least_one
)
3438 /* This is a call to strcpy with a destination of 0 size
3439 and a source of unknown length. The call will write
3440 at least one byte past the end of the destination. */
3441 warning_at (loc
, opt
,
3442 "%K%qD writing %E or more bytes into a region "
3443 "of size %E overflows the destination",
3444 exp
, func
, range
[0], dstsize
);
3446 else if (tree_int_cst_equal (range
[0], range
[1]))
3447 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3448 "%K%qD writing %E byte into a region "
3449 "of size %E overflows the destination",
3450 "%K%qD writing %E bytes into a region "
3451 "of size %E overflows the destination",
3452 exp
, func
, range
[0], dstsize
);
3453 else if (tree_int_cst_sign_bit (range
[1]))
3455 /* Avoid printing the upper bound if it's invalid. */
3456 warning_at (loc
, opt
,
3457 "%K%qD writing %E or more bytes into a region "
3458 "of size %E overflows the destination",
3459 exp
, func
, range
[0], dstsize
);
3462 warning_at (loc
, opt
,
3463 "%K%qD writing between %E and %E bytes into "
3464 "a region of size %E overflows the destination",
3465 exp
, func
, range
[0], range
[1],
3468 /* Return error when an overflow has been detected. */
3473 /* Check the maximum length of the source sequence against the size
3474 of the destination object if known, or against the maximum size
3478 get_size_range (maxread
, range
);
3479 if (range
[0] && dstsize
&& tree_fits_uhwi_p (dstsize
))
3481 location_t loc
= tree_nonartificial_location (exp
);
3482 loc
= expansion_point_location_if_in_system_header (loc
);
3484 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3486 if (TREE_NO_WARNING (exp
))
3489 /* Warn about crazy big sizes first since that's more
3490 likely to be meaningful than saying that the bound
3491 is greater than the object size if both are big. */
3492 if (range
[0] == range
[1])
3493 warning_at (loc
, opt
,
3494 "%K%qD specified bound %E "
3495 "exceeds maximum object size %E",
3497 range
[0], maxobjsize
);
3499 warning_at (loc
, opt
,
3500 "%K%qD specified bound between %E and %E "
3501 "exceeds maximum object size %E",
3503 range
[0], range
[1], maxobjsize
);
3508 if (dstsize
!= maxobjsize
&& tree_int_cst_lt (dstsize
, range
[0]))
3510 if (TREE_NO_WARNING (exp
))
3513 if (tree_int_cst_equal (range
[0], range
[1]))
3514 warning_at (loc
, opt
,
3515 "%K%qD specified bound %E "
3516 "exceeds destination size %E",
3520 warning_at (loc
, opt
,
3521 "%K%qD specified bound between %E and %E "
3522 "exceeds destination size %E",
3524 range
[0], range
[1], dstsize
);
3530 /* Check for reading past the end of SRC. */
3533 && dstwrite
&& range
[0]
3534 && tree_int_cst_lt (slen
, range
[0]))
3536 if (TREE_NO_WARNING (exp
))
3539 location_t loc
= tree_nonartificial_location (exp
);
3541 if (tree_int_cst_equal (range
[0], range
[1]))
3542 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3543 "%K%qD reading %E byte from a region of size %E",
3544 "%K%qD reading %E bytes from a region of size %E",
3545 exp
, func
, range
[0], slen
);
3546 else if (tree_int_cst_sign_bit (range
[1]))
3548 /* Avoid printing the upper bound if it's invalid. */
3549 warning_at (loc
, opt
,
3550 "%K%qD reading %E or more bytes from a region "
3552 exp
, func
, range
[0], slen
);
3555 warning_at (loc
, opt
,
3556 "%K%qD reading between %E and %E bytes from a region "
3558 exp
, func
, range
[0], range
[1], slen
);
3565 /* Helper to compute the size of the object referenced by the DEST
3566 expression which must have pointer type, using Object Size type
3567 OSTYPE (only the least significant 2 bits are used). Return
3568 an estimate of the size of the object if successful or NULL when
3569 the size cannot be determined. When the referenced object involves
3570 a non-constant offset in some range the returned value represents
3571 the largest size given the smallest non-negative offset in the
3572 range. If nonnull, set *PDECL to the decl of the referenced
3573 subobject if it can be determined, or to null otherwise.
3574 The function is intended for diagnostics and should not be used
3575 to influence code generation or optimization. */
3578 compute_objsize (tree dest
, int ostype
, tree
*pdecl
/* = NULL */)
3580 tree dummy
= NULL_TREE
;
3584 unsigned HOST_WIDE_INT size
;
3586 /* Only the two least significant bits are meaningful. */
3589 if (compute_builtin_object_size (dest
, ostype
, &size
))
3590 return build_int_cst (sizetype
, size
);
3592 if (TREE_CODE (dest
) == SSA_NAME
)
3594 gimple
*stmt
= SSA_NAME_DEF_STMT (dest
);
3595 if (!is_gimple_assign (stmt
))
3598 dest
= gimple_assign_rhs1 (stmt
);
3600 tree_code code
= gimple_assign_rhs_code (stmt
);
3601 if (code
== POINTER_PLUS_EXPR
)
3603 /* compute_builtin_object_size fails for addresses with
3604 non-constant offsets. Try to determine the range of
3605 such an offset here and use it to adjust the constant
3607 tree off
= gimple_assign_rhs2 (stmt
);
3608 if (TREE_CODE (off
) == INTEGER_CST
)
3610 if (tree size
= compute_objsize (dest
, ostype
, pdecl
))
3612 wide_int wioff
= wi::to_wide (off
);
3613 wide_int wisiz
= wi::to_wide (size
);
3615 /* Ignore negative offsets for now. For others,
3616 use the lower bound as the most optimistic
3617 estimate of the (remaining) size. */
3618 if (wi::sign_mask (wioff
))
3620 else if (wi::ltu_p (wioff
, wisiz
))
3621 return wide_int_to_tree (TREE_TYPE (size
),
3622 wi::sub (wisiz
, wioff
));
3624 return size_zero_node
;
3627 else if (TREE_CODE (off
) == SSA_NAME
3628 && INTEGRAL_TYPE_P (TREE_TYPE (off
)))
3631 enum value_range_kind rng
= get_range_info (off
, &min
, &max
);
3633 if (rng
== VR_RANGE
)
3635 if (tree size
= compute_objsize (dest
, ostype
, pdecl
))
3637 wide_int wisiz
= wi::to_wide (size
);
3639 /* Ignore negative offsets for now. For others,
3640 use the lower bound as the most optimistic
3641 estimate of the (remaining)size. */
3642 if (wi::sign_mask (min
)
3643 || wi::sign_mask (max
))
3645 else if (wi::ltu_p (min
, wisiz
))
3646 return wide_int_to_tree (TREE_TYPE (size
),
3647 wi::sub (wisiz
, min
));
3649 return size_zero_node
;
3654 else if (code
!= ADDR_EXPR
)
3658 /* Unless computing the largest size (for memcpy and other raw memory
3659 functions), try to determine the size of the object from its type. */
3663 if (TREE_CODE (dest
) == ARRAY_REF
3664 || TREE_CODE (dest
) == MEM_REF
)
3666 tree ref
= TREE_OPERAND (dest
, 0);
3667 tree off
= TREE_OPERAND (dest
, 1);
3668 if (tree size
= compute_objsize (ref
, ostype
, pdecl
))
3670 /* If the declaration of the destination object is known
3671 to have zero size, return zero. */
3672 if (integer_zerop (size
))
3673 return integer_zero_node
;
3675 if (TREE_CODE (off
) != INTEGER_CST
3676 || TREE_CODE (size
) != INTEGER_CST
)
3679 if (TREE_CODE (dest
) == ARRAY_REF
)
3681 tree eltype
= TREE_TYPE (dest
);
3682 if (tree tpsize
= TYPE_SIZE_UNIT (eltype
))
3683 off
= fold_build2 (MULT_EXPR
, size_type_node
, off
, tpsize
);
3688 if (tree_int_cst_lt (off
, size
))
3689 return fold_build2 (MINUS_EXPR
, size_type_node
, size
, off
);
3690 return integer_zero_node
;
3696 if (TREE_CODE (dest
) == COMPONENT_REF
)
3698 *pdecl
= TREE_OPERAND (dest
, 1);
3699 return component_ref_size (dest
);
3702 if (TREE_CODE (dest
) != ADDR_EXPR
)
3705 tree ref
= TREE_OPERAND (dest
, 0);
3709 return DECL_SIZE_UNIT (ref
);
3712 tree type
= TREE_TYPE (dest
);
3713 if (TREE_CODE (type
) == POINTER_TYPE
)
3714 type
= TREE_TYPE (type
);
3716 type
= TYPE_MAIN_VARIANT (type
);
3718 if (TREE_CODE (type
) == ARRAY_TYPE
3719 && !array_at_struct_end_p (ref
))
3721 if (tree size
= TYPE_SIZE_UNIT (type
))
3722 return TREE_CODE (size
) == INTEGER_CST
? size
: NULL_TREE
;
3728 /* Helper to determine and check the sizes of the source and the destination
3729 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3730 call expression, DEST is the destination argument, SRC is the source
3731 argument or null, and LEN is the number of bytes. Use Object Size type-0
3732 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3733 (no overflow or invalid sizes), false otherwise. */
3736 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
3738 /* For functions like memset and memcpy that operate on raw memory
3739 try to determine the size of the largest source and destination
3740 object using type-0 Object Size regardless of the object size
3741 type specified by the option. */
3742 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3743 tree dstsize
= compute_objsize (dest
, 0);
3745 return check_access (exp
, dest
, src
, size
, /*maxread=*/NULL_TREE
,
3749 /* Validate memchr arguments without performing any expansion.
3753 expand_builtin_memchr (tree exp
, rtx
)
3755 if (!validate_arglist (exp
,
3756 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3759 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3760 tree len
= CALL_EXPR_ARG (exp
, 2);
3762 /* Diagnose calls where the specified length exceeds the size
3764 if (warn_stringop_overflow
)
3766 tree size
= compute_objsize (arg1
, 0);
3767 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
3768 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
3774 /* Expand a call EXP to the memcpy builtin.
3775 Return NULL_RTX if we failed, the caller should emit a normal call,
3776 otherwise try to get the result in TARGET, if convenient (and in
3777 mode MODE if that's convenient). */
3780 expand_builtin_memcpy (tree exp
, rtx target
)
3782 if (!validate_arglist (exp
,
3783 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3786 tree dest
= CALL_EXPR_ARG (exp
, 0);
3787 tree src
= CALL_EXPR_ARG (exp
, 1);
3788 tree len
= CALL_EXPR_ARG (exp
, 2);
3790 check_memop_access (exp
, dest
, src
, len
);
3792 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3793 /*retmode=*/ RETURN_BEGIN
);
3796 /* Check a call EXP to the memmove built-in for validity.
3797 Return NULL_RTX on both success and failure. */
3800 expand_builtin_memmove (tree exp
, rtx
)
3802 if (!validate_arglist (exp
,
3803 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3806 tree dest
= CALL_EXPR_ARG (exp
, 0);
3807 tree src
= CALL_EXPR_ARG (exp
, 1);
3808 tree len
= CALL_EXPR_ARG (exp
, 2);
3810 check_memop_access (exp
, dest
, src
, len
);
3815 /* Expand a call EXP to the mempcpy builtin.
3816 Return NULL_RTX if we failed; the caller should emit a normal call,
3817 otherwise try to get the result in TARGET, if convenient (and in
3818 mode MODE if that's convenient). */
3821 expand_builtin_mempcpy (tree exp
, rtx target
)
3823 if (!validate_arglist (exp
,
3824 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3827 tree dest
= CALL_EXPR_ARG (exp
, 0);
3828 tree src
= CALL_EXPR_ARG (exp
, 1);
3829 tree len
= CALL_EXPR_ARG (exp
, 2);
3831 /* Policy does not generally allow using compute_objsize (which
3832 is used internally by check_memop_size) to change code generation
3833 or drive optimization decisions.
3835 In this instance it is safe because the code we generate has
3836 the same semantics regardless of the return value of
3837 check_memop_sizes. Exactly the same amount of data is copied
3838 and the return value is exactly the same in both cases.
3840 Furthermore, check_memop_size always uses mode 0 for the call to
3841 compute_objsize, so the imprecise nature of compute_objsize is
3844 /* Avoid expanding mempcpy into memcpy when the call is determined
3845 to overflow the buffer. This also prevents the same overflow
3846 from being diagnosed again when expanding memcpy. */
3847 if (!check_memop_access (exp
, dest
, src
, len
))
3850 return expand_builtin_mempcpy_args (dest
, src
, len
,
3851 target
, exp
, /*retmode=*/ RETURN_END
);
3854 /* Helper function to do the actual work for expand of memory copy family
3855 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3856 of memory from SRC to DEST and assign to TARGET if convenient. Return
3857 value is based on RETMODE argument. */
3860 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3861 rtx target
, tree exp
, memop_ret retmode
)
3863 const char *src_str
;
3864 unsigned int src_align
= get_pointer_alignment (src
);
3865 unsigned int dest_align
= get_pointer_alignment (dest
);
3866 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3867 HOST_WIDE_INT expected_size
= -1;
3868 unsigned int expected_align
= 0;
3869 unsigned HOST_WIDE_INT min_size
;
3870 unsigned HOST_WIDE_INT max_size
;
3871 unsigned HOST_WIDE_INT probable_max_size
;
3875 /* If DEST is not a pointer type, call the normal function. */
3876 if (dest_align
== 0)
3879 /* If either SRC is not a pointer type, don't do this
3880 operation in-line. */
3884 if (currently_expanding_gimple_stmt
)
3885 stringop_block_profile (currently_expanding_gimple_stmt
,
3886 &expected_align
, &expected_size
);
3888 if (expected_align
< dest_align
)
3889 expected_align
= dest_align
;
3890 dest_mem
= get_memory_rtx (dest
, len
);
3891 set_mem_align (dest_mem
, dest_align
);
3892 len_rtx
= expand_normal (len
);
3893 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3894 &probable_max_size
);
3895 src_str
= c_getstr (src
);
3897 /* If SRC is a string constant and block move would be done
3898 by pieces, we can avoid loading the string from memory
3899 and only stored the computed constants. */
3901 && CONST_INT_P (len_rtx
)
3902 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3903 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3904 CONST_CAST (char *, src_str
),
3907 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3908 builtin_memcpy_read_str
,
3909 CONST_CAST (char *, src_str
),
3910 dest_align
, false, retmode
);
3911 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3912 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3916 src_mem
= get_memory_rtx (src
, len
);
3917 set_mem_align (src_mem
, src_align
);
3919 /* Copy word part most expediently. */
3920 enum block_op_methods method
= BLOCK_OP_NORMAL
;
3921 if (CALL_EXPR_TAILCALL (exp
)
3922 && (retmode
== RETURN_BEGIN
|| target
== const0_rtx
))
3923 method
= BLOCK_OP_TAILCALL
;
3924 bool use_mempcpy_call
= (targetm
.libc_has_fast_function (BUILT_IN_MEMPCPY
)
3925 && retmode
== RETURN_END
3926 && target
!= const0_rtx
);
3927 if (use_mempcpy_call
)
3928 method
= BLOCK_OP_NO_LIBCALL_RET
;
3929 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
3930 expected_align
, expected_size
,
3931 min_size
, max_size
, probable_max_size
,
3932 use_mempcpy_call
, &is_move_done
);
3934 /* Bail out when a mempcpy call would be expanded as libcall and when
3935 we have a target that provides a fast implementation
3936 of mempcpy routine. */
3940 if (dest_addr
== pc_rtx
)
3945 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3946 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3949 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
3951 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3952 /* stpcpy pointer to last byte. */
3953 if (retmode
== RETURN_END_MINUS_ONE
)
3954 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3961 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3962 rtx target
, tree orig_exp
, memop_ret retmode
)
3964 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3968 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3969 we failed, the caller should emit a normal call, otherwise try to
3970 get the result in TARGET, if convenient.
3971 Return value is based on RETMODE argument. */
3974 expand_movstr (tree dest
, tree src
, rtx target
, memop_ret retmode
)
3976 class expand_operand ops
[3];
3980 if (!targetm
.have_movstr ())
3983 dest_mem
= get_memory_rtx (dest
, NULL
);
3984 src_mem
= get_memory_rtx (src
, NULL
);
3985 if (retmode
== RETURN_BEGIN
)
3987 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3988 dest_mem
= replace_equiv_address (dest_mem
, target
);
3991 create_output_operand (&ops
[0],
3992 retmode
!= RETURN_BEGIN
? target
: NULL_RTX
, Pmode
);
3993 create_fixed_operand (&ops
[1], dest_mem
);
3994 create_fixed_operand (&ops
[2], src_mem
);
3995 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3998 if (retmode
!= RETURN_BEGIN
&& target
!= const0_rtx
)
4000 target
= ops
[0].value
;
4001 /* movstr is supposed to set end to the address of the NUL
4002 terminator. If the caller requested a mempcpy-like return value,
4004 if (retmode
== RETURN_END
)
4006 rtx tem
= plus_constant (GET_MODE (target
),
4007 gen_lowpart (GET_MODE (target
), target
), 1);
4008 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
4014 /* Do some very basic size validation of a call to the strcpy builtin
4015 given by EXP. Return NULL_RTX to have the built-in expand to a call
4016 to the library function. */
4019 expand_builtin_strcat (tree exp
, rtx
)
4021 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
4022 || !warn_stringop_overflow
)
4025 tree dest
= CALL_EXPR_ARG (exp
, 0);
4026 tree src
= CALL_EXPR_ARG (exp
, 1);
4028 /* There is no way here to determine the length of the string in
4029 the destination to which the SRC string is being appended so
4030 just diagnose cases when the souce string is longer than
4031 the destination object. */
4033 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4035 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, src
,
4041 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4042 NULL_RTX if we failed the caller should emit a normal call, otherwise
4043 try to get the result in TARGET, if convenient (and in mode MODE if that's
4047 expand_builtin_strcpy (tree exp
, rtx target
)
4049 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4052 tree dest
= CALL_EXPR_ARG (exp
, 0);
4053 tree src
= CALL_EXPR_ARG (exp
, 1);
4055 if (warn_stringop_overflow
)
4057 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4058 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4062 if (rtx ret
= expand_builtin_strcpy_args (exp
, dest
, src
, target
))
4064 /* Check to see if the argument was declared attribute nonstring
4065 and if so, issue a warning since at this point it's not known
4066 to be nul-terminated. */
4067 tree fndecl
= get_callee_fndecl (exp
);
4068 maybe_warn_nonstring_arg (fndecl
, exp
);
4075 /* Helper function to do the actual work for expand_builtin_strcpy. The
4076 arguments to the builtin_strcpy call DEST and SRC are broken out
4077 so that this can also be called without constructing an actual CALL_EXPR.
4078 The other arguments and return value are the same as for
4079 expand_builtin_strcpy. */
4082 expand_builtin_strcpy_args (tree exp
, tree dest
, tree src
, rtx target
)
4084 /* Detect strcpy calls with unterminated arrays.. */
4085 if (tree nonstr
= unterminated_array (src
))
4087 /* NONSTR refers to the non-nul terminated constant array. */
4088 if (!TREE_NO_WARNING (exp
))
4089 warn_string_no_nul (EXPR_LOCATION (exp
), "strcpy", src
, nonstr
);
4093 return expand_movstr (dest
, src
, target
, /*retmode=*/ RETURN_BEGIN
);
4096 /* Expand a call EXP to the stpcpy builtin.
4097 Return NULL_RTX if we failed the caller should emit a normal call,
4098 otherwise try to get the result in TARGET, if convenient (and in
4099 mode MODE if that's convenient). */
4102 expand_builtin_stpcpy_1 (tree exp
, rtx target
, machine_mode mode
)
4105 location_t loc
= EXPR_LOCATION (exp
);
4107 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4110 dst
= CALL_EXPR_ARG (exp
, 0);
4111 src
= CALL_EXPR_ARG (exp
, 1);
4113 if (warn_stringop_overflow
)
4115 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
4116 check_access (exp
, dst
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
4120 /* If return value is ignored, transform stpcpy into strcpy. */
4121 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
4123 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
4124 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
4125 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4132 /* Ensure we get an actual string whose length can be evaluated at
4133 compile-time, not an expression containing a string. This is
4134 because the latter will potentially produce pessimized code
4135 when used to produce the return value. */
4136 c_strlen_data lendata
= { };
4137 if (!c_getstr (src
, NULL
)
4138 || !(len
= c_strlen (src
, 0, &lendata
, 1)))
4139 return expand_movstr (dst
, src
, target
,
4140 /*retmode=*/ RETURN_END_MINUS_ONE
);
4142 if (lendata
.decl
&& !TREE_NO_WARNING (exp
))
4143 warn_string_no_nul (EXPR_LOCATION (exp
), "stpcpy", src
, lendata
.decl
);
4145 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
4146 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
4148 /*retmode=*/ RETURN_END_MINUS_ONE
);
4153 if (TREE_CODE (len
) == INTEGER_CST
)
4155 rtx len_rtx
= expand_normal (len
);
4157 if (CONST_INT_P (len_rtx
))
4159 ret
= expand_builtin_strcpy_args (exp
, dst
, src
, target
);
4165 if (mode
!= VOIDmode
)
4166 target
= gen_reg_rtx (mode
);
4168 target
= gen_reg_rtx (GET_MODE (ret
));
4170 if (GET_MODE (target
) != GET_MODE (ret
))
4171 ret
= gen_lowpart (GET_MODE (target
), ret
);
4173 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
4174 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
4182 return expand_movstr (dst
, src
, target
,
4183 /*retmode=*/ RETURN_END_MINUS_ONE
);
4187 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4188 arguments while being careful to avoid duplicate warnings (which could
4189 be issued if the expander were to expand the call, resulting in it
4190 being emitted in expand_call(). */
4193 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
4195 if (rtx ret
= expand_builtin_stpcpy_1 (exp
, target
, mode
))
4197 /* The call has been successfully expanded. Check for nonstring
4198 arguments and issue warnings as appropriate. */
4199 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
4206 /* Check a call EXP to the stpncpy built-in for validity.
4207 Return NULL_RTX on both success and failure. */
4210 expand_builtin_stpncpy (tree exp
, rtx
)
4212 if (!validate_arglist (exp
,
4213 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4214 || !warn_stringop_overflow
)
4217 /* The source and destination of the call. */
4218 tree dest
= CALL_EXPR_ARG (exp
, 0);
4219 tree src
= CALL_EXPR_ARG (exp
, 1);
4221 /* The exact number of bytes to write (not the maximum). */
4222 tree len
= CALL_EXPR_ARG (exp
, 2);
4224 /* The size of the destination object. */
4225 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4227 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
, destsize
);
4232 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4233 bytes from constant string DATA + OFFSET and return it as target
4237 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
4238 scalar_int_mode mode
)
4240 const char *str
= (const char *) data
;
4242 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
4245 return c_readstr (str
+ offset
, mode
);
4248 /* Helper to check the sizes of sequences and the destination of calls
4249 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4250 success (no overflow or invalid sizes), false otherwise. */
4253 check_strncat_sizes (tree exp
, tree objsize
)
4255 tree dest
= CALL_EXPR_ARG (exp
, 0);
4256 tree src
= CALL_EXPR_ARG (exp
, 1);
4257 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4259 /* Try to determine the range of lengths that the source expression
4261 c_strlen_data lendata
= { };
4262 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
4264 /* Try to verify that the destination is big enough for the shortest
4267 if (!objsize
&& warn_stringop_overflow
)
4269 /* If it hasn't been provided by __strncat_chk, try to determine
4270 the size of the destination object into which the source is
4272 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4275 /* Add one for the terminating nul. */
4276 tree srclen
= (lendata
.minlen
4277 ? fold_build2 (PLUS_EXPR
, size_type_node
, lendata
.minlen
,
4281 /* The strncat function copies at most MAXREAD bytes and always appends
4282 the terminating nul so the specified upper bound should never be equal
4283 to (or greater than) the size of the destination. */
4284 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
4285 && tree_int_cst_equal (objsize
, maxread
))
4287 location_t loc
= tree_nonartificial_location (exp
);
4288 loc
= expansion_point_location_if_in_system_header (loc
);
4290 warning_at (loc
, OPT_Wstringop_overflow_
,
4291 "%K%qD specified bound %E equals destination size",
4292 exp
, get_callee_fndecl (exp
), maxread
);
4298 || (maxread
&& tree_fits_uhwi_p (maxread
)
4299 && tree_fits_uhwi_p (srclen
)
4300 && tree_int_cst_lt (maxread
, srclen
)))
4303 /* The number of bytes to write is LEN but check_access will also
4304 check SRCLEN if LEN's value isn't known. */
4305 return check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, maxread
, srclen
,
4309 /* Similar to expand_builtin_strcat, do some very basic size validation
4310 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4311 the built-in expand to a call to the library function. */
4314 expand_builtin_strncat (tree exp
, rtx
)
4316 if (!validate_arglist (exp
,
4317 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4318 || !warn_stringop_overflow
)
4321 tree dest
= CALL_EXPR_ARG (exp
, 0);
4322 tree src
= CALL_EXPR_ARG (exp
, 1);
4323 /* The upper bound on the number of bytes to write. */
4324 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4325 /* The length of the source sequence. */
4326 tree slen
= c_strlen (src
, 1);
4328 /* Try to determine the range of lengths that the source expression
4329 refers to. Since the lengths are only used for warning and not
4330 for code generation disable strict mode below. */
4334 c_strlen_data lendata
= { };
4335 get_range_strlen (src
, &lendata
, /* eltsize = */ 1);
4336 maxlen
= lendata
.maxbound
;
4339 /* Try to verify that the destination is big enough for the shortest
4340 string. First try to determine the size of the destination object
4341 into which the source is being copied. */
4342 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4344 /* Add one for the terminating nul. */
4345 tree srclen
= (maxlen
4346 ? fold_build2 (PLUS_EXPR
, size_type_node
, maxlen
,
4350 /* The strncat function copies at most MAXREAD bytes and always appends
4351 the terminating nul so the specified upper bound should never be equal
4352 to (or greater than) the size of the destination. */
4353 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
4354 && tree_int_cst_equal (destsize
, maxread
))
4356 location_t loc
= tree_nonartificial_location (exp
);
4357 loc
= expansion_point_location_if_in_system_header (loc
);
4359 warning_at (loc
, OPT_Wstringop_overflow_
,
4360 "%K%qD specified bound %E equals destination size",
4361 exp
, get_callee_fndecl (exp
), maxread
);
4367 || (maxread
&& tree_fits_uhwi_p (maxread
)
4368 && tree_fits_uhwi_p (srclen
)
4369 && tree_int_cst_lt (maxread
, srclen
)))
4372 /* The number of bytes to write is SRCLEN. */
4373 check_access (exp
, dest
, src
, NULL_TREE
, maxread
, srclen
, destsize
);
4378 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4379 NULL_RTX if we failed the caller should emit a normal call. */
4382 expand_builtin_strncpy (tree exp
, rtx target
)
4384 location_t loc
= EXPR_LOCATION (exp
);
4386 if (validate_arglist (exp
,
4387 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4389 tree dest
= CALL_EXPR_ARG (exp
, 0);
4390 tree src
= CALL_EXPR_ARG (exp
, 1);
4391 /* The number of bytes to write (not the maximum). */
4392 tree len
= CALL_EXPR_ARG (exp
, 2);
4393 /* The length of the source sequence. */
4394 tree slen
= c_strlen (src
, 1);
4396 if (warn_stringop_overflow
)
4398 tree destsize
= compute_objsize (dest
,
4399 warn_stringop_overflow
- 1);
4401 /* The number of bytes to write is LEN but check_access will also
4402 check SLEN if LEN's value isn't known. */
4403 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
,
4407 /* We must be passed a constant len and src parameter. */
4408 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4411 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4413 /* We're required to pad with trailing zeros if the requested
4414 len is greater than strlen(s2)+1. In that case try to
4415 use store_by_pieces, if it fails, punt. */
4416 if (tree_int_cst_lt (slen
, len
))
4418 unsigned int dest_align
= get_pointer_alignment (dest
);
4419 const char *p
= c_getstr (src
);
4422 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
4423 || !can_store_by_pieces (tree_to_uhwi (len
),
4424 builtin_strncpy_read_str
,
4425 CONST_CAST (char *, p
),
4429 dest_mem
= get_memory_rtx (dest
, len
);
4430 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4431 builtin_strncpy_read_str
,
4432 CONST_CAST (char *, p
), dest_align
, false,
4434 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4435 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4442 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4443 bytes from constant string DATA + OFFSET and return it as target
4447 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4448 scalar_int_mode mode
)
4450 const char *c
= (const char *) data
;
4451 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
4453 memset (p
, *c
, GET_MODE_SIZE (mode
));
4455 return c_readstr (p
, mode
);
4458 /* Callback routine for store_by_pieces. Return the RTL of a register
4459 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4460 char value given in the RTL register data. For example, if mode is
4461 4 bytes wide, return the RTL for 0x01010101*data. */
4464 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4465 scalar_int_mode mode
)
4471 size
= GET_MODE_SIZE (mode
);
4475 p
= XALLOCAVEC (char, size
);
4476 memset (p
, 1, size
);
4477 coeff
= c_readstr (p
, mode
);
4479 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4480 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4481 return force_reg (mode
, target
);
4484 /* Expand expression EXP, which is a call to the memset builtin. Return
4485 NULL_RTX if we failed the caller should emit a normal call, otherwise
4486 try to get the result in TARGET, if convenient (and in mode MODE if that's
4490 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4492 if (!validate_arglist (exp
,
4493 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4496 tree dest
= CALL_EXPR_ARG (exp
, 0);
4497 tree val
= CALL_EXPR_ARG (exp
, 1);
4498 tree len
= CALL_EXPR_ARG (exp
, 2);
4500 check_memop_access (exp
, dest
, NULL_TREE
, len
);
4502 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4505 /* Helper function to do the actual work for expand_builtin_memset. The
4506 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4507 so that this can also be called without constructing an actual CALL_EXPR.
4508 The other arguments and return value are the same as for
4509 expand_builtin_memset. */
4512 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4513 rtx target
, machine_mode mode
, tree orig_exp
)
4516 enum built_in_function fcode
;
4517 machine_mode val_mode
;
4519 unsigned int dest_align
;
4520 rtx dest_mem
, dest_addr
, len_rtx
;
4521 HOST_WIDE_INT expected_size
= -1;
4522 unsigned int expected_align
= 0;
4523 unsigned HOST_WIDE_INT min_size
;
4524 unsigned HOST_WIDE_INT max_size
;
4525 unsigned HOST_WIDE_INT probable_max_size
;
4527 dest_align
= get_pointer_alignment (dest
);
4529 /* If DEST is not a pointer type, don't do this operation in-line. */
4530 if (dest_align
== 0)
4533 if (currently_expanding_gimple_stmt
)
4534 stringop_block_profile (currently_expanding_gimple_stmt
,
4535 &expected_align
, &expected_size
);
4537 if (expected_align
< dest_align
)
4538 expected_align
= dest_align
;
4540 /* If the LEN parameter is zero, return DEST. */
4541 if (integer_zerop (len
))
4543 /* Evaluate and ignore VAL in case it has side-effects. */
4544 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4545 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4548 /* Stabilize the arguments in case we fail. */
4549 dest
= builtin_save_expr (dest
);
4550 val
= builtin_save_expr (val
);
4551 len
= builtin_save_expr (len
);
4553 len_rtx
= expand_normal (len
);
4554 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4555 &probable_max_size
);
4556 dest_mem
= get_memory_rtx (dest
, len
);
4557 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4559 if (TREE_CODE (val
) != INTEGER_CST
)
4563 val_rtx
= expand_normal (val
);
4564 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4566 /* Assume that we can memset by pieces if we can store
4567 * the coefficients by pieces (in the required modes).
4568 * We can't pass builtin_memset_gen_str as that emits RTL. */
4570 if (tree_fits_uhwi_p (len
)
4571 && can_store_by_pieces (tree_to_uhwi (len
),
4572 builtin_memset_read_str
, &c
, dest_align
,
4575 val_rtx
= force_reg (val_mode
, val_rtx
);
4576 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4577 builtin_memset_gen_str
, val_rtx
, dest_align
,
4578 true, RETURN_BEGIN
);
4580 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4581 dest_align
, expected_align
,
4582 expected_size
, min_size
, max_size
,
4586 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4587 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4591 if (target_char_cast (val
, &c
))
4596 if (tree_fits_uhwi_p (len
)
4597 && can_store_by_pieces (tree_to_uhwi (len
),
4598 builtin_memset_read_str
, &c
, dest_align
,
4600 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4601 builtin_memset_read_str
, &c
, dest_align
, true,
4603 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4604 gen_int_mode (c
, val_mode
),
4605 dest_align
, expected_align
,
4606 expected_size
, min_size
, max_size
,
4610 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4611 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4615 set_mem_align (dest_mem
, dest_align
);
4616 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4617 CALL_EXPR_TAILCALL (orig_exp
)
4618 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4619 expected_align
, expected_size
,
4625 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4626 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4632 fndecl
= get_callee_fndecl (orig_exp
);
4633 fcode
= DECL_FUNCTION_CODE (fndecl
);
4634 if (fcode
== BUILT_IN_MEMSET
)
4635 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4637 else if (fcode
== BUILT_IN_BZERO
)
4638 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4642 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4643 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4644 return expand_call (fn
, target
, target
== const0_rtx
);
4647 /* Expand expression EXP, which is a call to the bzero builtin. Return
4648 NULL_RTX if we failed the caller should emit a normal call. */
4651 expand_builtin_bzero (tree exp
)
4653 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4656 tree dest
= CALL_EXPR_ARG (exp
, 0);
4657 tree size
= CALL_EXPR_ARG (exp
, 1);
4659 check_memop_access (exp
, dest
, NULL_TREE
, size
);
4661 /* New argument list transforming bzero(ptr x, int y) to
4662 memset(ptr x, int 0, size_t y). This is done this way
4663 so that if it isn't expanded inline, we fallback to
4664 calling bzero instead of memset. */
4666 location_t loc
= EXPR_LOCATION (exp
);
4668 return expand_builtin_memset_args (dest
, integer_zero_node
,
4669 fold_convert_loc (loc
,
4670 size_type_node
, size
),
4671 const0_rtx
, VOIDmode
, exp
);
4674 /* Try to expand cmpstr operation ICODE with the given operands.
4675 Return the result rtx on success, otherwise return null. */
4678 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4679 HOST_WIDE_INT align
)
4681 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4683 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4686 class expand_operand ops
[4];
4687 create_output_operand (&ops
[0], target
, insn_mode
);
4688 create_fixed_operand (&ops
[1], arg1_rtx
);
4689 create_fixed_operand (&ops
[2], arg2_rtx
);
4690 create_integer_operand (&ops
[3], align
);
4691 if (maybe_expand_insn (icode
, 4, ops
))
4692 return ops
[0].value
;
4696 /* Expand expression EXP, which is a call to the memcmp built-in function.
4697 Return NULL_RTX if we failed and the caller should emit a normal call,
4698 otherwise try to get the result in TARGET, if convenient.
4699 RESULT_EQ is true if we can relax the returned value to be either zero
4700 or nonzero, without caring about the sign. */
4703 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4705 if (!validate_arglist (exp
,
4706 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4709 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4710 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4711 tree len
= CALL_EXPR_ARG (exp
, 2);
4712 enum built_in_function fcode
= DECL_FUNCTION_CODE (get_callee_fndecl (exp
));
4713 bool no_overflow
= true;
4715 /* Diagnose calls where the specified length exceeds the size of either
4717 tree size
= compute_objsize (arg1
, 0);
4718 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
4719 len
, /*maxread=*/NULL_TREE
, size
,
4720 /*objsize=*/NULL_TREE
);
4723 size
= compute_objsize (arg2
, 0);
4724 no_overflow
= check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
,
4725 len
, /*maxread=*/NULL_TREE
, size
,
4726 /*objsize=*/NULL_TREE
);
4729 /* If the specified length exceeds the size of either object,
4730 call the function. */
4734 /* Due to the performance benefit, always inline the calls first
4735 when result_eq is false. */
4736 rtx result
= NULL_RTX
;
4738 if (!result_eq
&& fcode
!= BUILT_IN_BCMP
)
4740 result
= inline_expand_builtin_string_cmp (exp
, target
);
4745 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4746 location_t loc
= EXPR_LOCATION (exp
);
4748 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4749 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4751 /* If we don't have POINTER_TYPE, call the function. */
4752 if (arg1_align
== 0 || arg2_align
== 0)
4755 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4756 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4757 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4759 /* Set MEM_SIZE as appropriate. */
4760 if (CONST_INT_P (len_rtx
))
4762 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4763 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4766 by_pieces_constfn constfn
= NULL
;
4768 const char *src_str
= c_getstr (arg2
);
4769 if (result_eq
&& src_str
== NULL
)
4771 src_str
= c_getstr (arg1
);
4772 if (src_str
!= NULL
)
4773 std::swap (arg1_rtx
, arg2_rtx
);
4776 /* If SRC is a string constant and block move would be done
4777 by pieces, we can avoid loading the string from memory
4778 and only stored the computed constants. */
4780 && CONST_INT_P (len_rtx
)
4781 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4782 constfn
= builtin_memcpy_read_str
;
4784 result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4785 TREE_TYPE (len
), target
,
4787 CONST_CAST (char *, src_str
));
4791 /* Return the value in the proper mode for this function. */
4792 if (GET_MODE (result
) == mode
)
4797 convert_move (target
, result
, 0);
4801 return convert_to_mode (mode
, result
, 0);
4807 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4808 if we failed the caller should emit a normal call, otherwise try to get
4809 the result in TARGET, if convenient. */
4812 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4814 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4817 /* Due to the performance benefit, always inline the calls first. */
4818 rtx result
= NULL_RTX
;
4819 result
= inline_expand_builtin_string_cmp (exp
, target
);
4823 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4824 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4825 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
4828 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4829 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4831 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4832 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4834 /* If we don't have POINTER_TYPE, call the function. */
4835 if (arg1_align
== 0 || arg2_align
== 0)
4838 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4839 arg1
= builtin_save_expr (arg1
);
4840 arg2
= builtin_save_expr (arg2
);
4842 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4843 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4845 /* Try to call cmpstrsi. */
4846 if (cmpstr_icode
!= CODE_FOR_nothing
)
4847 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4848 MIN (arg1_align
, arg2_align
));
4850 /* Try to determine at least one length and call cmpstrnsi. */
4851 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4856 tree len1
= c_strlen (arg1
, 1);
4857 tree len2
= c_strlen (arg2
, 1);
4860 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4862 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4864 /* If we don't have a constant length for the first, use the length
4865 of the second, if we know it. We don't require a constant for
4866 this case; some cost analysis could be done if both are available
4867 but neither is constant. For now, assume they're equally cheap,
4868 unless one has side effects. If both strings have constant lengths,
4875 else if (TREE_SIDE_EFFECTS (len1
))
4877 else if (TREE_SIDE_EFFECTS (len2
))
4879 else if (TREE_CODE (len1
) != INTEGER_CST
)
4881 else if (TREE_CODE (len2
) != INTEGER_CST
)
4883 else if (tree_int_cst_lt (len1
, len2
))
4888 /* If both arguments have side effects, we cannot optimize. */
4889 if (len
&& !TREE_SIDE_EFFECTS (len
))
4891 arg3_rtx
= expand_normal (len
);
4892 result
= expand_cmpstrn_or_cmpmem
4893 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4894 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4898 tree fndecl
= get_callee_fndecl (exp
);
4901 /* Check to see if the argument was declared attribute nonstring
4902 and if so, issue a warning since at this point it's not known
4903 to be nul-terminated. */
4904 maybe_warn_nonstring_arg (fndecl
, exp
);
4906 /* Return the value in the proper mode for this function. */
4907 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4908 if (GET_MODE (result
) == mode
)
4911 return convert_to_mode (mode
, result
, 0);
4912 convert_move (target
, result
, 0);
4916 /* Expand the library call ourselves using a stabilized argument
4917 list to avoid re-evaluating the function's arguments twice. */
4918 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4919 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4920 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4921 return expand_call (fn
, target
, target
== const0_rtx
);
4924 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4925 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4926 the result in TARGET, if convenient. */
4929 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4930 ATTRIBUTE_UNUSED machine_mode mode
)
4932 if (!validate_arglist (exp
,
4933 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4936 /* Due to the performance benefit, always inline the calls first. */
4937 rtx result
= NULL_RTX
;
4938 result
= inline_expand_builtin_string_cmp (exp
, target
);
4942 /* If c_strlen can determine an expression for one of the string
4943 lengths, and it doesn't have side effects, then emit cmpstrnsi
4944 using length MIN(strlen(string)+1, arg3). */
4945 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4946 if (cmpstrn_icode
== CODE_FOR_nothing
)
4951 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4952 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4953 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4955 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4956 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4958 tree len1
= c_strlen (arg1
, 1);
4959 tree len2
= c_strlen (arg2
, 1);
4961 location_t loc
= EXPR_LOCATION (exp
);
4964 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4966 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4968 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4970 /* If we don't have a constant length for the first, use the length
4971 of the second, if we know it. If neither string is constant length,
4972 use the given length argument. We don't require a constant for
4973 this case; some cost analysis could be done if both are available
4974 but neither is constant. For now, assume they're equally cheap,
4975 unless one has side effects. If both strings have constant lengths,
4984 else if (TREE_SIDE_EFFECTS (len1
))
4986 else if (TREE_SIDE_EFFECTS (len2
))
4988 else if (TREE_CODE (len1
) != INTEGER_CST
)
4990 else if (TREE_CODE (len2
) != INTEGER_CST
)
4992 else if (tree_int_cst_lt (len1
, len2
))
4997 /* If we are not using the given length, we must incorporate it here.
4998 The actual new length parameter will be MIN(len,arg3) in this case. */
5001 len
= fold_convert_loc (loc
, sizetype
, len
);
5002 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
5004 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
5005 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
5006 rtx arg3_rtx
= expand_normal (len
);
5007 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
5008 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
5009 MIN (arg1_align
, arg2_align
));
5011 tree fndecl
= get_callee_fndecl (exp
);
5014 /* Check to see if the argument was declared attribute nonstring
5015 and if so, issue a warning since at this point it's not known
5016 to be nul-terminated. */
5017 maybe_warn_nonstring_arg (fndecl
, exp
);
5019 /* Return the value in the proper mode for this function. */
5020 mode
= TYPE_MODE (TREE_TYPE (exp
));
5021 if (GET_MODE (result
) == mode
)
5024 return convert_to_mode (mode
, result
, 0);
5025 convert_move (target
, result
, 0);
5029 /* Expand the library call ourselves using a stabilized argument
5030 list to avoid re-evaluating the function's arguments twice. */
5031 tree fn
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
5032 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
5033 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5034 return expand_call (fn
, target
, target
== const0_rtx
);
5037 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5038 if that's convenient. */
5041 expand_builtin_saveregs (void)
5046 /* Don't do __builtin_saveregs more than once in a function.
5047 Save the result of the first call and reuse it. */
5048 if (saveregs_value
!= 0)
5049 return saveregs_value
;
5051 /* When this function is called, it means that registers must be
5052 saved on entry to this function. So we migrate the call to the
5053 first insn of this function. */
5057 /* Do whatever the machine needs done in this case. */
5058 val
= targetm
.calls
.expand_builtin_saveregs ();
5063 saveregs_value
= val
;
5065 /* Put the insns after the NOTE that starts the function. If this
5066 is inside a start_sequence, make the outer-level insn chain current, so
5067 the code is placed at the start of the function. */
5068 push_topmost_sequence ();
5069 emit_insn_after (seq
, entry_of_function ());
5070 pop_topmost_sequence ();
5075 /* Expand a call to __builtin_next_arg. */
5078 expand_builtin_next_arg (void)
5080 /* Checking arguments is already done in fold_builtin_next_arg
5081 that must be called before this function. */
5082 return expand_binop (ptr_mode
, add_optab
,
5083 crtl
->args
.internal_arg_pointer
,
5084 crtl
->args
.arg_offset_rtx
,
5085 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
5088 /* Make it easier for the backends by protecting the valist argument
5089 from multiple evaluations. */
5092 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
5094 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
5096 /* The current way of determining the type of valist is completely
5097 bogus. We should have the information on the va builtin instead. */
5099 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
5101 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
5103 if (TREE_SIDE_EFFECTS (valist
))
5104 valist
= save_expr (valist
);
5106 /* For this case, the backends will be expecting a pointer to
5107 vatype, but it's possible we've actually been given an array
5108 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5110 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
5112 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
5113 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
5118 tree pt
= build_pointer_type (vatype
);
5122 if (! TREE_SIDE_EFFECTS (valist
))
5125 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
5126 TREE_SIDE_EFFECTS (valist
) = 1;
5129 if (TREE_SIDE_EFFECTS (valist
))
5130 valist
= save_expr (valist
);
5131 valist
= fold_build2_loc (loc
, MEM_REF
,
5132 vatype
, valist
, build_int_cst (pt
, 0));
5138 /* The "standard" definition of va_list is void*. */
5141 std_build_builtin_va_list (void)
5143 return ptr_type_node
;
5146 /* The "standard" abi va_list is va_list_type_node. */
5149 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
5151 return va_list_type_node
;
5154 /* The "standard" type of va_list is va_list_type_node. */
5157 std_canonical_va_list_type (tree type
)
5161 wtype
= va_list_type_node
;
5164 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
5166 /* If va_list is an array type, the argument may have decayed
5167 to a pointer type, e.g. by being passed to another function.
5168 In that case, unwrap both types so that we can compare the
5169 underlying records. */
5170 if (TREE_CODE (htype
) == ARRAY_TYPE
5171 || POINTER_TYPE_P (htype
))
5173 wtype
= TREE_TYPE (wtype
);
5174 htype
= TREE_TYPE (htype
);
5177 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
5178 return va_list_type_node
;
5183 /* The "standard" implementation of va_start: just assign `nextarg' to
5187 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
5189 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5190 convert_move (va_r
, nextarg
, 0);
5193 /* Expand EXP, a call to __builtin_va_start. */
5196 expand_builtin_va_start (tree exp
)
5200 location_t loc
= EXPR_LOCATION (exp
);
5202 if (call_expr_nargs (exp
) < 2)
5204 error_at (loc
, "too few arguments to function %<va_start%>");
5208 if (fold_builtin_next_arg (exp
, true))
5211 nextarg
= expand_builtin_next_arg ();
5212 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
5214 if (targetm
.expand_builtin_va_start
)
5215 targetm
.expand_builtin_va_start (valist
, nextarg
);
5217 std_expand_builtin_va_start (valist
, nextarg
);
5222 /* Expand EXP, a call to __builtin_va_end. */
5225 expand_builtin_va_end (tree exp
)
5227 tree valist
= CALL_EXPR_ARG (exp
, 0);
5229 /* Evaluate for side effects, if needed. I hate macros that don't
5231 if (TREE_SIDE_EFFECTS (valist
))
5232 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5237 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5238 builtin rather than just as an assignment in stdarg.h because of the
5239 nastiness of array-type va_list types. */
5242 expand_builtin_va_copy (tree exp
)
5245 location_t loc
= EXPR_LOCATION (exp
);
5247 dst
= CALL_EXPR_ARG (exp
, 0);
5248 src
= CALL_EXPR_ARG (exp
, 1);
5250 dst
= stabilize_va_list_loc (loc
, dst
, 1);
5251 src
= stabilize_va_list_loc (loc
, src
, 0);
5253 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
5255 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
5257 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
5258 TREE_SIDE_EFFECTS (t
) = 1;
5259 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5263 rtx dstb
, srcb
, size
;
5265 /* Evaluate to pointers. */
5266 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5267 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5268 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
5269 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5271 dstb
= convert_memory_address (Pmode
, dstb
);
5272 srcb
= convert_memory_address (Pmode
, srcb
);
5274 /* "Dereference" to BLKmode memories. */
5275 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
5276 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
5277 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5278 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
5279 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
5280 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
5283 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5289 /* Expand a call to one of the builtin functions __builtin_frame_address or
5290 __builtin_return_address. */
5293 expand_builtin_frame_address (tree fndecl
, tree exp
)
5295 /* The argument must be a nonnegative integer constant.
5296 It counts the number of frames to scan up the stack.
5297 The value is either the frame pointer value or the return
5298 address saved in that frame. */
5299 if (call_expr_nargs (exp
) == 0)
5300 /* Warning about missing arg was already issued. */
5302 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
5304 error ("invalid argument to %qD", fndecl
);
5309 /* Number of frames to scan up the stack. */
5310 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
5312 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
5314 /* Some ports cannot access arbitrary stack frames. */
5317 warning (0, "unsupported argument to %qD", fndecl
);
5323 /* Warn since no effort is made to ensure that any frame
5324 beyond the current one exists or can be safely reached. */
5325 warning (OPT_Wframe_address
, "calling %qD with "
5326 "a nonzero argument is unsafe", fndecl
);
5329 /* For __builtin_frame_address, return what we've got. */
5330 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5334 && ! CONSTANT_P (tem
))
5335 tem
= copy_addr_to_reg (tem
);
5340 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5341 failed and the caller should emit a normal call. */
5344 expand_builtin_alloca (tree exp
)
5349 tree fndecl
= get_callee_fndecl (exp
);
5350 HOST_WIDE_INT max_size
;
5351 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5352 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5354 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5355 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5357 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5358 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5359 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5365 && warn_vla_limit
>= HOST_WIDE_INT_MAX
5366 && warn_alloc_size_limit
< warn_vla_limit
)
5368 && warn_alloca_limit
>= HOST_WIDE_INT_MAX
5369 && warn_alloc_size_limit
< warn_alloca_limit
5372 /* -Walloca-larger-than and -Wvla-larger-than settings of
5373 less than HOST_WIDE_INT_MAX override the more general
5374 -Walloc-size-larger-than so unless either of the former
5375 options is smaller than the last one (wchich would imply
5376 that the call was already checked), check the alloca
5377 arguments for overflow. */
5378 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
5379 int idx
[] = { 0, -1 };
5380 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
5383 /* Compute the argument. */
5384 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5386 /* Compute the alignment. */
5387 align
= (fcode
== BUILT_IN_ALLOCA
5389 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5391 /* Compute the maximum size. */
5392 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5393 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5396 /* Allocate the desired space. If the allocation stems from the declaration
5397 of a variable-sized object, it cannot accumulate. */
5399 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
5400 result
= convert_memory_address (ptr_mode
, result
);
5405 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5406 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5407 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5408 handle_builtin_stack_restore function. */
5411 expand_asan_emit_allocas_unpoison (tree exp
)
5413 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5414 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5415 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5416 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5417 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
5418 stack_pointer_rtx
, NULL_RTX
, 0,
5420 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
5421 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
5423 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
5424 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
5425 top
, ptr_mode
, bot
, ptr_mode
);
5429 /* Expand a call to bswap builtin in EXP.
5430 Return NULL_RTX if a normal call should be emitted rather than expanding the
5431 function in-line. If convenient, the result should be placed in TARGET.
5432 SUBTARGET may be used as the target for computing one of EXP's operands. */
5435 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
5441 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5444 arg
= CALL_EXPR_ARG (exp
, 0);
5445 op0
= expand_expr (arg
,
5446 subtarget
&& GET_MODE (subtarget
) == target_mode
5447 ? subtarget
: NULL_RTX
,
5448 target_mode
, EXPAND_NORMAL
);
5449 if (GET_MODE (op0
) != target_mode
)
5450 op0
= convert_to_mode (target_mode
, op0
, 1);
5452 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
5454 gcc_assert (target
);
5456 return convert_to_mode (target_mode
, target
, 1);
5459 /* Expand a call to a unary builtin in EXP.
5460 Return NULL_RTX if a normal call should be emitted rather than expanding the
5461 function in-line. If convenient, the result should be placed in TARGET.
5462 SUBTARGET may be used as the target for computing one of EXP's operands. */
5465 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5466 rtx subtarget
, optab op_optab
)
5470 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5473 /* Compute the argument. */
5474 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5476 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5477 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5478 VOIDmode
, EXPAND_NORMAL
);
5479 /* Compute op, into TARGET if possible.
5480 Set TARGET to wherever the result comes back. */
5481 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5482 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5483 gcc_assert (target
);
5485 return convert_to_mode (target_mode
, target
, 0);
5488 /* Expand a call to __builtin_expect. We just return our argument
5489 as the builtin_expect semantic should've been already executed by
5490 tree branch prediction pass. */
5493 expand_builtin_expect (tree exp
, rtx target
)
5497 if (call_expr_nargs (exp
) < 2)
5499 arg
= CALL_EXPR_ARG (exp
, 0);
5501 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5502 /* When guessing was done, the hints should be already stripped away. */
5503 gcc_assert (!flag_guess_branch_prob
5504 || optimize
== 0 || seen_error ());
5508 /* Expand a call to __builtin_expect_with_probability. We just return our
5509 argument as the builtin_expect semantic should've been already executed by
5510 tree branch prediction pass. */
5513 expand_builtin_expect_with_probability (tree exp
, rtx target
)
5517 if (call_expr_nargs (exp
) < 3)
5519 arg
= CALL_EXPR_ARG (exp
, 0);
5521 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5522 /* When guessing was done, the hints should be already stripped away. */
5523 gcc_assert (!flag_guess_branch_prob
5524 || optimize
== 0 || seen_error ());
5529 /* Expand a call to __builtin_assume_aligned. We just return our first
5530 argument as the builtin_assume_aligned semantic should've been already
5534 expand_builtin_assume_aligned (tree exp
, rtx target
)
5536 if (call_expr_nargs (exp
) < 2)
5538 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5540 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5541 && (call_expr_nargs (exp
) < 3
5542 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5547 expand_builtin_trap (void)
5549 if (targetm
.have_trap ())
5551 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5552 /* For trap insns when not accumulating outgoing args force
5553 REG_ARGS_SIZE note to prevent crossjumping of calls with
5554 different args sizes. */
5555 if (!ACCUMULATE_OUTGOING_ARGS
)
5556 add_args_size_note (insn
, stack_pointer_delta
);
5560 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5561 tree call_expr
= build_call_expr (fn
, 0);
5562 expand_call (call_expr
, NULL_RTX
, false);
5568 /* Expand a call to __builtin_unreachable. We do nothing except emit
5569 a barrier saying that control flow will not pass here.
5571 It is the responsibility of the program being compiled to ensure
5572 that control flow does never reach __builtin_unreachable. */
5574 expand_builtin_unreachable (void)
5579 /* Expand EXP, a call to fabs, fabsf or fabsl.
5580 Return NULL_RTX if a normal call should be emitted rather than expanding
5581 the function inline. If convenient, the result should be placed
5582 in TARGET. SUBTARGET may be used as the target for computing
5586 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5592 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5595 arg
= CALL_EXPR_ARG (exp
, 0);
5596 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5597 mode
= TYPE_MODE (TREE_TYPE (arg
));
5598 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5599 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5602 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5603 Return NULL is a normal call should be emitted rather than expanding the
5604 function inline. If convenient, the result should be placed in TARGET.
5605 SUBTARGET may be used as the target for computing the operand. */
5608 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5613 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5616 arg
= CALL_EXPR_ARG (exp
, 0);
5617 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5619 arg
= CALL_EXPR_ARG (exp
, 1);
5620 op1
= expand_normal (arg
);
5622 return expand_copysign (op0
, op1
, target
);
5625 /* Expand a call to __builtin___clear_cache. */
5628 expand_builtin___clear_cache (tree exp
)
5630 if (!targetm
.code_for_clear_cache
)
5632 #ifdef CLEAR_INSN_CACHE
5633 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5634 does something. Just do the default expansion to a call to
5638 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5639 does nothing. There is no need to call it. Do nothing. */
5641 #endif /* CLEAR_INSN_CACHE */
5644 /* We have a "clear_cache" insn, and it will handle everything. */
5646 rtx begin_rtx
, end_rtx
;
5648 /* We must not expand to a library call. If we did, any
5649 fallback library function in libgcc that might contain a call to
5650 __builtin___clear_cache() would recurse infinitely. */
5651 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5653 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5657 if (targetm
.have_clear_cache ())
5659 class expand_operand ops
[2];
5661 begin
= CALL_EXPR_ARG (exp
, 0);
5662 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5664 end
= CALL_EXPR_ARG (exp
, 1);
5665 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5667 create_address_operand (&ops
[0], begin_rtx
);
5668 create_address_operand (&ops
[1], end_rtx
);
5669 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5675 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5678 round_trampoline_addr (rtx tramp
)
5680 rtx temp
, addend
, mask
;
5682 /* If we don't need too much alignment, we'll have been guaranteed
5683 proper alignment by get_trampoline_type. */
5684 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5687 /* Round address up to desired boundary. */
5688 temp
= gen_reg_rtx (Pmode
);
5689 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5690 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5692 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5693 temp
, 0, OPTAB_LIB_WIDEN
);
5694 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5695 temp
, 0, OPTAB_LIB_WIDEN
);
5701 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5703 tree t_tramp
, t_func
, t_chain
;
5704 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5706 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5707 POINTER_TYPE
, VOID_TYPE
))
5710 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5711 t_func
= CALL_EXPR_ARG (exp
, 1);
5712 t_chain
= CALL_EXPR_ARG (exp
, 2);
5714 r_tramp
= expand_normal (t_tramp
);
5715 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5716 MEM_NOTRAP_P (m_tramp
) = 1;
5718 /* If ONSTACK, the TRAMP argument should be the address of a field
5719 within the local function's FRAME decl. Either way, let's see if
5720 we can fill in the MEM_ATTRs for this memory. */
5721 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5722 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5724 /* Creator of a heap trampoline is responsible for making sure the
5725 address is aligned to at least STACK_BOUNDARY. Normally malloc
5726 will ensure this anyhow. */
5727 tmp
= round_trampoline_addr (r_tramp
);
5730 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5731 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5732 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5735 /* The FUNC argument should be the address of the nested function.
5736 Extract the actual function decl to pass to the hook. */
5737 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5738 t_func
= TREE_OPERAND (t_func
, 0);
5739 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5741 r_chain
= expand_normal (t_chain
);
5743 /* Generate insns to initialize the trampoline. */
5744 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5748 trampolines_created
= 1;
5750 if (targetm
.calls
.custom_function_descriptors
!= 0)
5751 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5752 "trampoline generated for nested function %qD", t_func
);
5759 expand_builtin_adjust_trampoline (tree exp
)
5763 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5766 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5767 tramp
= round_trampoline_addr (tramp
);
5768 if (targetm
.calls
.trampoline_adjust_address
)
5769 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5774 /* Expand a call to the builtin descriptor initialization routine.
5775 A descriptor is made up of a couple of pointers to the static
5776 chain and the code entry in this order. */
5779 expand_builtin_init_descriptor (tree exp
)
5781 tree t_descr
, t_func
, t_chain
;
5782 rtx m_descr
, r_descr
, r_func
, r_chain
;
5784 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5788 t_descr
= CALL_EXPR_ARG (exp
, 0);
5789 t_func
= CALL_EXPR_ARG (exp
, 1);
5790 t_chain
= CALL_EXPR_ARG (exp
, 2);
5792 r_descr
= expand_normal (t_descr
);
5793 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5794 MEM_NOTRAP_P (m_descr
) = 1;
5795 set_mem_align (m_descr
, GET_MODE_ALIGNMENT (ptr_mode
));
5797 r_func
= expand_normal (t_func
);
5798 r_chain
= expand_normal (t_chain
);
5800 /* Generate insns to initialize the descriptor. */
5801 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5802 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5803 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5808 /* Expand a call to the builtin descriptor adjustment routine. */
5811 expand_builtin_adjust_descriptor (tree exp
)
5815 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5818 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5820 /* Unalign the descriptor to allow runtime identification. */
5821 tramp
= plus_constant (ptr_mode
, tramp
,
5822 targetm
.calls
.custom_function_descriptors
);
5824 return force_operand (tramp
, NULL_RTX
);
5827 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5828 function. The function first checks whether the back end provides
5829 an insn to implement signbit for the respective mode. If not, it
5830 checks whether the floating point format of the value is such that
5831 the sign bit can be extracted. If that is not the case, error out.
5832 EXP is the expression that is a call to the builtin function; if
5833 convenient, the result should be placed in TARGET. */
5835 expand_builtin_signbit (tree exp
, rtx target
)
5837 const struct real_format
*fmt
;
5838 scalar_float_mode fmode
;
5839 scalar_int_mode rmode
, imode
;
5842 enum insn_code icode
;
5844 location_t loc
= EXPR_LOCATION (exp
);
5846 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5849 arg
= CALL_EXPR_ARG (exp
, 0);
5850 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5851 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5852 fmt
= REAL_MODE_FORMAT (fmode
);
5854 arg
= builtin_save_expr (arg
);
5856 /* Expand the argument yielding a RTX expression. */
5857 temp
= expand_normal (arg
);
5859 /* Check if the back end provides an insn that handles signbit for the
5861 icode
= optab_handler (signbit_optab
, fmode
);
5862 if (icode
!= CODE_FOR_nothing
)
5864 rtx_insn
*last
= get_last_insn ();
5865 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5866 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5868 delete_insns_since (last
);
5871 /* For floating point formats without a sign bit, implement signbit
5873 bitpos
= fmt
->signbit_ro
;
5876 /* But we can't do this if the format supports signed zero. */
5877 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5879 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5880 build_real (TREE_TYPE (arg
), dconst0
));
5881 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5884 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5886 imode
= int_mode_for_mode (fmode
).require ();
5887 temp
= gen_lowpart (imode
, temp
);
5892 /* Handle targets with different FP word orders. */
5893 if (FLOAT_WORDS_BIG_ENDIAN
)
5894 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5896 word
= bitpos
/ BITS_PER_WORD
;
5897 temp
= operand_subword_force (temp
, word
, fmode
);
5898 bitpos
= bitpos
% BITS_PER_WORD
;
5901 /* Force the intermediate word_mode (or narrower) result into a
5902 register. This avoids attempting to create paradoxical SUBREGs
5903 of floating point modes below. */
5904 temp
= force_reg (imode
, temp
);
5906 /* If the bitpos is within the "result mode" lowpart, the operation
5907 can be implement with a single bitwise AND. Otherwise, we need
5908 a right shift and an AND. */
5910 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5912 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5914 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5915 temp
= gen_lowpart (rmode
, temp
);
5916 temp
= expand_binop (rmode
, and_optab
, temp
,
5917 immed_wide_int_const (mask
, rmode
),
5918 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5922 /* Perform a logical right shift to place the signbit in the least
5923 significant bit, then truncate the result to the desired mode
5924 and mask just this bit. */
5925 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5926 temp
= gen_lowpart (rmode
, temp
);
5927 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5928 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5934 /* Expand fork or exec calls. TARGET is the desired target of the
5935 call. EXP is the call. FN is the
5936 identificator of the actual function. IGNORE is nonzero if the
5937 value is to be ignored. */
5940 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5945 /* If we are not profiling, just call the function. */
5946 if (!profile_arc_flag
)
5949 /* Otherwise call the wrapper. This should be equivalent for the rest of
5950 compiler, so the code does not diverge, and the wrapper may run the
5951 code necessary for keeping the profiling sane. */
5953 switch (DECL_FUNCTION_CODE (fn
))
5956 id
= get_identifier ("__gcov_fork");
5959 case BUILT_IN_EXECL
:
5960 id
= get_identifier ("__gcov_execl");
5963 case BUILT_IN_EXECV
:
5964 id
= get_identifier ("__gcov_execv");
5967 case BUILT_IN_EXECLP
:
5968 id
= get_identifier ("__gcov_execlp");
5971 case BUILT_IN_EXECLE
:
5972 id
= get_identifier ("__gcov_execle");
5975 case BUILT_IN_EXECVP
:
5976 id
= get_identifier ("__gcov_execvp");
5979 case BUILT_IN_EXECVE
:
5980 id
= get_identifier ("__gcov_execve");
5987 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5988 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5989 DECL_EXTERNAL (decl
) = 1;
5990 TREE_PUBLIC (decl
) = 1;
5991 DECL_ARTIFICIAL (decl
) = 1;
5992 TREE_NOTHROW (decl
) = 1;
5993 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5994 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5995 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5996 return expand_call (call
, target
, ignore
);
6001 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6002 the pointer in these functions is void*, the tree optimizers may remove
6003 casts. The mode computed in expand_builtin isn't reliable either, due
6004 to __sync_bool_compare_and_swap.
6006 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6007 group of builtins. This gives us log2 of the mode size. */
6009 static inline machine_mode
6010 get_builtin_sync_mode (int fcode_diff
)
6012 /* The size is not negotiable, so ask not to get BLKmode in return
6013 if the target indicates that a smaller size would be better. */
6014 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
6017 /* Expand the memory expression LOC and return the appropriate memory operand
6018 for the builtin_sync operations. */
6021 get_builtin_sync_mem (tree loc
, machine_mode mode
)
6024 int addr_space
= TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc
))
6025 ? TREE_TYPE (TREE_TYPE (loc
))
6027 scalar_int_mode addr_mode
= targetm
.addr_space
.address_mode (addr_space
);
6029 addr
= expand_expr (loc
, NULL_RTX
, addr_mode
, EXPAND_SUM
);
6030 addr
= convert_memory_address (addr_mode
, addr
);
6032 /* Note that we explicitly do not want any alias information for this
6033 memory, so that we kill all other live memories. Otherwise we don't
6034 satisfy the full barrier semantics of the intrinsic. */
6035 mem
= gen_rtx_MEM (mode
, addr
);
6037 set_mem_addr_space (mem
, addr_space
);
6039 mem
= validize_mem (mem
);
6041 /* The alignment needs to be at least according to that of the mode. */
6042 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
6043 get_pointer_alignment (loc
)));
6044 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
6045 MEM_VOLATILE_P (mem
) = 1;
6050 /* Make sure an argument is in the right mode.
6051 EXP is the tree argument.
6052 MODE is the mode it should be in. */
6055 expand_expr_force_mode (tree exp
, machine_mode mode
)
6058 machine_mode old_mode
;
6060 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
6061 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6062 of CONST_INTs, where we know the old_mode only from the call argument. */
6064 old_mode
= GET_MODE (val
);
6065 if (old_mode
== VOIDmode
)
6066 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
6067 val
= convert_modes (mode
, old_mode
, val
, 1);
6072 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6073 EXP is the CALL_EXPR. CODE is the rtx code
6074 that corresponds to the arithmetic or logical operation from the name;
6075 an exception here is that NOT actually means NAND. TARGET is an optional
6076 place for us to store the results; AFTER is true if this is the
6077 fetch_and_xxx form. */
6080 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
6081 enum rtx_code code
, bool after
,
6085 location_t loc
= EXPR_LOCATION (exp
);
6087 if (code
== NOT
&& warn_sync_nand
)
6089 tree fndecl
= get_callee_fndecl (exp
);
6090 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6092 static bool warned_f_a_n
, warned_n_a_f
;
6096 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6097 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6098 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6099 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6100 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6104 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
6105 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6106 warned_f_a_n
= true;
6109 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6110 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6111 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6112 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6113 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6117 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
6118 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
6119 warned_n_a_f
= true;
6127 /* Expand the operands. */
6128 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6129 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6131 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
6135 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6136 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6137 true if this is the boolean form. TARGET is a place for us to store the
6138 results; this is NOT optional if IS_BOOL is true. */
6141 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
6142 bool is_bool
, rtx target
)
6144 rtx old_val
, new_val
, mem
;
6147 /* Expand the operands. */
6148 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6149 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6150 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6152 pbool
= poval
= NULL
;
6153 if (target
!= const0_rtx
)
6160 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
6161 false, MEMMODEL_SYNC_SEQ_CST
,
6162 MEMMODEL_SYNC_SEQ_CST
))
6168 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6169 general form is actually an atomic exchange, and some targets only
6170 support a reduced form with the second argument being a constant 1.
6171 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6175 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
6180 /* Expand the operands. */
6181 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6182 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6184 return expand_sync_lock_test_and_set (target
, mem
, val
);
6187 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6190 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
6194 /* Expand the operands. */
6195 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6197 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
6200 /* Given an integer representing an ``enum memmodel'', verify its
6201 correctness and return the memory model enum. */
6203 static enum memmodel
6204 get_memmodel (tree exp
)
6207 unsigned HOST_WIDE_INT val
;
6209 = expansion_point_location_if_in_system_header (input_location
);
6211 /* If the parameter is not a constant, it's a run time value so we'll just
6212 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6213 if (TREE_CODE (exp
) != INTEGER_CST
)
6214 return MEMMODEL_SEQ_CST
;
6216 op
= expand_normal (exp
);
6219 if (targetm
.memmodel_check
)
6220 val
= targetm
.memmodel_check (val
);
6221 else if (val
& ~MEMMODEL_MASK
)
6223 warning_at (loc
, OPT_Winvalid_memory_model
,
6224 "unknown architecture specifier in memory model to builtin");
6225 return MEMMODEL_SEQ_CST
;
6228 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6229 if (memmodel_base (val
) >= MEMMODEL_LAST
)
6231 warning_at (loc
, OPT_Winvalid_memory_model
,
6232 "invalid memory model argument to builtin");
6233 return MEMMODEL_SEQ_CST
;
6236 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6237 be conservative and promote consume to acquire. */
6238 if (val
== MEMMODEL_CONSUME
)
6239 val
= MEMMODEL_ACQUIRE
;
6241 return (enum memmodel
) val
;
6244 /* Expand the __atomic_exchange intrinsic:
6245 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6246 EXP is the CALL_EXPR.
6247 TARGET is an optional place for us to store the results. */
6250 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
6253 enum memmodel model
;
6255 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6257 if (!flag_inline_atomics
)
6260 /* Expand the operands. */
6261 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6262 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6264 return expand_atomic_exchange (target
, mem
, val
, model
);
6267 /* Expand the __atomic_compare_exchange intrinsic:
6268 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6269 TYPE desired, BOOL weak,
6270 enum memmodel success,
6271 enum memmodel failure)
6272 EXP is the CALL_EXPR.
6273 TARGET is an optional place for us to store the results. */
6276 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
6279 rtx expect
, desired
, mem
, oldval
;
6280 rtx_code_label
*label
;
6281 enum memmodel success
, failure
;
6285 = expansion_point_location_if_in_system_header (input_location
);
6287 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
6288 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
6290 if (failure
> success
)
6292 warning_at (loc
, OPT_Winvalid_memory_model
,
6293 "failure memory model cannot be stronger than success "
6294 "memory model for %<__atomic_compare_exchange%>");
6295 success
= MEMMODEL_SEQ_CST
;
6298 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6300 warning_at (loc
, OPT_Winvalid_memory_model
,
6301 "invalid failure memory model for "
6302 "%<__atomic_compare_exchange%>");
6303 failure
= MEMMODEL_SEQ_CST
;
6304 success
= MEMMODEL_SEQ_CST
;
6308 if (!flag_inline_atomics
)
6311 /* Expand the operands. */
6312 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6314 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6315 expect
= convert_memory_address (Pmode
, expect
);
6316 expect
= gen_rtx_MEM (mode
, expect
);
6317 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
6319 weak
= CALL_EXPR_ARG (exp
, 3);
6321 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
6324 if (target
== const0_rtx
)
6327 /* Lest the rtl backend create a race condition with an imporoper store
6328 to memory, always create a new pseudo for OLDVAL. */
6331 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
6332 is_weak
, success
, failure
))
6335 /* Conditionally store back to EXPECT, lest we create a race condition
6336 with an improper store to memory. */
6337 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6338 the normal case where EXPECT is totally private, i.e. a register. At
6339 which point the store can be unconditional. */
6340 label
= gen_label_rtx ();
6341 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
6342 GET_MODE (target
), 1, label
);
6343 emit_move_insn (expect
, oldval
);
6349 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6350 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6351 call. The weak parameter must be dropped to match the expected parameter
6352 list and the expected argument changed from value to pointer to memory
6356 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
6359 vec
<tree
, va_gc
> *vec
;
6362 vec
->quick_push (gimple_call_arg (call
, 0));
6363 tree expected
= gimple_call_arg (call
, 1);
6364 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
6365 TREE_TYPE (expected
));
6366 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
6368 emit_move_insn (x
, expd
);
6369 tree v
= make_tree (TREE_TYPE (expected
), x
);
6370 vec
->quick_push (build1 (ADDR_EXPR
,
6371 build_pointer_type (TREE_TYPE (expected
)), v
));
6372 vec
->quick_push (gimple_call_arg (call
, 2));
6373 /* Skip the boolean weak parameter. */
6374 for (z
= 4; z
< 6; z
++)
6375 vec
->quick_push (gimple_call_arg (call
, z
));
6376 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6377 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
6378 gcc_assert (bytes_log2
< 5);
6379 built_in_function fncode
6380 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6382 tree fndecl
= builtin_decl_explicit (fncode
);
6383 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
6385 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
6386 tree lhs
= gimple_call_lhs (call
);
6387 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
6390 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6391 if (GET_MODE (boolret
) != mode
)
6392 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6393 x
= force_reg (mode
, x
);
6394 write_complex_part (target
, boolret
, true);
6395 write_complex_part (target
, x
, false);
6399 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6402 expand_ifn_atomic_compare_exchange (gcall
*call
)
6404 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
6405 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6406 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
6407 rtx expect
, desired
, mem
, oldval
, boolret
;
6408 enum memmodel success
, failure
;
6412 = expansion_point_location_if_in_system_header (gimple_location (call
));
6414 success
= get_memmodel (gimple_call_arg (call
, 4));
6415 failure
= get_memmodel (gimple_call_arg (call
, 5));
6417 if (failure
> success
)
6419 warning_at (loc
, OPT_Winvalid_memory_model
,
6420 "failure memory model cannot be stronger than success "
6421 "memory model for %<__atomic_compare_exchange%>");
6422 success
= MEMMODEL_SEQ_CST
;
6425 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6427 warning_at (loc
, OPT_Winvalid_memory_model
,
6428 "invalid failure memory model for "
6429 "%<__atomic_compare_exchange%>");
6430 failure
= MEMMODEL_SEQ_CST
;
6431 success
= MEMMODEL_SEQ_CST
;
6434 if (!flag_inline_atomics
)
6436 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6440 /* Expand the operands. */
6441 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6443 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6444 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6446 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6451 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6452 is_weak
, success
, failure
))
6454 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6458 lhs
= gimple_call_lhs (call
);
6461 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6462 if (GET_MODE (boolret
) != mode
)
6463 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6464 write_complex_part (target
, boolret
, true);
6465 write_complex_part (target
, oldval
, false);
6469 /* Expand the __atomic_load intrinsic:
6470 TYPE __atomic_load (TYPE *object, enum memmodel)
6471 EXP is the CALL_EXPR.
6472 TARGET is an optional place for us to store the results. */
6475 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6478 enum memmodel model
;
6480 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6481 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6484 = expansion_point_location_if_in_system_header (input_location
);
6485 warning_at (loc
, OPT_Winvalid_memory_model
,
6486 "invalid memory model for %<__atomic_load%>");
6487 model
= MEMMODEL_SEQ_CST
;
6490 if (!flag_inline_atomics
)
6493 /* Expand the operand. */
6494 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6496 return expand_atomic_load (target
, mem
, model
);
6500 /* Expand the __atomic_store intrinsic:
6501 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6502 EXP is the CALL_EXPR.
6503 TARGET is an optional place for us to store the results. */
6506 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6509 enum memmodel model
;
6511 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6512 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6513 || is_mm_release (model
)))
6516 = expansion_point_location_if_in_system_header (input_location
);
6517 warning_at (loc
, OPT_Winvalid_memory_model
,
6518 "invalid memory model for %<__atomic_store%>");
6519 model
= MEMMODEL_SEQ_CST
;
6522 if (!flag_inline_atomics
)
6525 /* Expand the operands. */
6526 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6527 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6529 return expand_atomic_store (mem
, val
, model
, false);
6532 /* Expand the __atomic_fetch_XXX intrinsic:
6533 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6534 EXP is the CALL_EXPR.
6535 TARGET is an optional place for us to store the results.
6536 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6537 FETCH_AFTER is true if returning the result of the operation.
6538 FETCH_AFTER is false if returning the value before the operation.
6539 IGNORE is true if the result is not used.
6540 EXT_CALL is the correct builtin for an external call if this cannot be
6541 resolved to an instruction sequence. */
6544 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6545 enum rtx_code code
, bool fetch_after
,
6546 bool ignore
, enum built_in_function ext_call
)
6549 enum memmodel model
;
6553 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6555 /* Expand the operands. */
6556 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6557 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6559 /* Only try generating instructions if inlining is turned on. */
6560 if (flag_inline_atomics
)
6562 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6567 /* Return if a different routine isn't needed for the library call. */
6568 if (ext_call
== BUILT_IN_NONE
)
6571 /* Change the call to the specified function. */
6572 fndecl
= get_callee_fndecl (exp
);
6573 addr
= CALL_EXPR_FN (exp
);
6576 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6577 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6579 /* If we will emit code after the call, the call cannot be a tail call.
6580 If it is emitted as a tail call, a barrier is emitted after it, and
6581 then all trailing code is removed. */
6583 CALL_EXPR_TAILCALL (exp
) = 0;
6585 /* Expand the call here so we can emit trailing code. */
6586 ret
= expand_call (exp
, target
, ignore
);
6588 /* Replace the original function just in case it matters. */
6589 TREE_OPERAND (addr
, 0) = fndecl
;
6591 /* Then issue the arithmetic correction to return the right result. */
6596 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6598 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6601 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6607 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6610 expand_ifn_atomic_bit_test_and (gcall
*call
)
6612 tree ptr
= gimple_call_arg (call
, 0);
6613 tree bit
= gimple_call_arg (call
, 1);
6614 tree flag
= gimple_call_arg (call
, 2);
6615 tree lhs
= gimple_call_lhs (call
);
6616 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6617 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6620 class expand_operand ops
[5];
6622 gcc_assert (flag_inline_atomics
);
6624 if (gimple_call_num_args (call
) == 4)
6625 model
= get_memmodel (gimple_call_arg (call
, 3));
6627 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6628 rtx val
= expand_expr_force_mode (bit
, mode
);
6630 switch (gimple_call_internal_fn (call
))
6632 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6634 optab
= atomic_bit_test_and_set_optab
;
6636 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6638 optab
= atomic_bit_test_and_complement_optab
;
6640 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6642 optab
= atomic_bit_test_and_reset_optab
;
6648 if (lhs
== NULL_TREE
)
6650 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6651 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6653 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6654 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6658 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6659 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6660 gcc_assert (icode
!= CODE_FOR_nothing
);
6661 create_output_operand (&ops
[0], target
, mode
);
6662 create_fixed_operand (&ops
[1], mem
);
6663 create_convert_operand_to (&ops
[2], val
, mode
, true);
6664 create_integer_operand (&ops
[3], model
);
6665 create_integer_operand (&ops
[4], integer_onep (flag
));
6666 if (maybe_expand_insn (icode
, 5, ops
))
6670 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6671 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6674 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6675 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6676 code
, model
, false);
6677 if (integer_onep (flag
))
6679 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6680 NULL_RTX
, true, OPTAB_DIRECT
);
6681 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6682 true, OPTAB_DIRECT
);
6685 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6687 if (result
!= target
)
6688 emit_move_insn (target
, result
);
6691 /* Expand an atomic clear operation.
6692 void _atomic_clear (BOOL *obj, enum memmodel)
6693 EXP is the call expression. */
6696 expand_builtin_atomic_clear (tree exp
)
6700 enum memmodel model
;
6702 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6703 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6704 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6706 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6709 = expansion_point_location_if_in_system_header (input_location
);
6710 warning_at (loc
, OPT_Winvalid_memory_model
,
6711 "invalid memory model for %<__atomic_store%>");
6712 model
= MEMMODEL_SEQ_CST
;
6715 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6716 Failing that, a store is issued by __atomic_store. The only way this can
6717 fail is if the bool type is larger than a word size. Unlikely, but
6718 handle it anyway for completeness. Assume a single threaded model since
6719 there is no atomic support in this case, and no barriers are required. */
6720 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6722 emit_move_insn (mem
, const0_rtx
);
6726 /* Expand an atomic test_and_set operation.
6727 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6728 EXP is the call expression. */
6731 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6734 enum memmodel model
;
6737 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6738 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6739 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6741 return expand_atomic_test_and_set (target
, mem
, model
);
6745 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6746 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6749 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6753 unsigned int mode_align
, type_align
;
6755 if (TREE_CODE (arg0
) != INTEGER_CST
)
6758 /* We need a corresponding integer mode for the access to be lock-free. */
6759 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6760 if (!int_mode_for_size (size
, 0).exists (&mode
))
6761 return boolean_false_node
;
6763 mode_align
= GET_MODE_ALIGNMENT (mode
);
6765 if (TREE_CODE (arg1
) == INTEGER_CST
)
6767 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6769 /* Either this argument is null, or it's a fake pointer encoding
6770 the alignment of the object. */
6771 val
= least_bit_hwi (val
);
6772 val
*= BITS_PER_UNIT
;
6774 if (val
== 0 || mode_align
< val
)
6775 type_align
= mode_align
;
6781 tree ttype
= TREE_TYPE (arg1
);
6783 /* This function is usually invoked and folded immediately by the front
6784 end before anything else has a chance to look at it. The pointer
6785 parameter at this point is usually cast to a void *, so check for that
6786 and look past the cast. */
6787 if (CONVERT_EXPR_P (arg1
)
6788 && POINTER_TYPE_P (ttype
)
6789 && VOID_TYPE_P (TREE_TYPE (ttype
))
6790 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6791 arg1
= TREE_OPERAND (arg1
, 0);
6793 ttype
= TREE_TYPE (arg1
);
6794 gcc_assert (POINTER_TYPE_P (ttype
));
6796 /* Get the underlying type of the object. */
6797 ttype
= TREE_TYPE (ttype
);
6798 type_align
= TYPE_ALIGN (ttype
);
6801 /* If the object has smaller alignment, the lock free routines cannot
6803 if (type_align
< mode_align
)
6804 return boolean_false_node
;
6806 /* Check if a compare_and_swap pattern exists for the mode which represents
6807 the required size. The pattern is not allowed to fail, so the existence
6808 of the pattern indicates support is present. Also require that an
6809 atomic load exists for the required size. */
6810 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6811 return boolean_true_node
;
6813 return boolean_false_node
;
6816 /* Return true if the parameters to call EXP represent an object which will
6817 always generate lock free instructions. The first argument represents the
6818 size of the object, and the second parameter is a pointer to the object
6819 itself. If NULL is passed for the object, then the result is based on
6820 typical alignment for an object of the specified size. Otherwise return
6824 expand_builtin_atomic_always_lock_free (tree exp
)
6827 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6828 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6830 if (TREE_CODE (arg0
) != INTEGER_CST
)
6832 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6836 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6837 if (size
== boolean_true_node
)
6842 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6843 is lock free on this architecture. */
6846 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6848 if (!flag_inline_atomics
)
6851 /* If it isn't always lock free, don't generate a result. */
6852 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6853 return boolean_true_node
;
6858 /* Return true if the parameters to call EXP represent an object which will
6859 always generate lock free instructions. The first argument represents the
6860 size of the object, and the second parameter is a pointer to the object
6861 itself. If NULL is passed for the object, then the result is based on
6862 typical alignment for an object of the specified size. Otherwise return
6866 expand_builtin_atomic_is_lock_free (tree exp
)
6869 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6870 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6872 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6874 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6878 if (!flag_inline_atomics
)
6881 /* If the value is known at compile time, return the RTX for it. */
6882 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6883 if (size
== boolean_true_node
)
6889 /* Expand the __atomic_thread_fence intrinsic:
6890 void __atomic_thread_fence (enum memmodel)
6891 EXP is the CALL_EXPR. */
6894 expand_builtin_atomic_thread_fence (tree exp
)
6896 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6897 expand_mem_thread_fence (model
);
6900 /* Expand the __atomic_signal_fence intrinsic:
6901 void __atomic_signal_fence (enum memmodel)
6902 EXP is the CALL_EXPR. */
6905 expand_builtin_atomic_signal_fence (tree exp
)
6907 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6908 expand_mem_signal_fence (model
);
6911 /* Expand the __sync_synchronize intrinsic. */
6914 expand_builtin_sync_synchronize (void)
6916 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6920 expand_builtin_thread_pointer (tree exp
, rtx target
)
6922 enum insn_code icode
;
6923 if (!validate_arglist (exp
, VOID_TYPE
))
6925 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6926 if (icode
!= CODE_FOR_nothing
)
6928 class expand_operand op
;
6929 /* If the target is not sutitable then create a new target. */
6930 if (target
== NULL_RTX
6932 || GET_MODE (target
) != Pmode
)
6933 target
= gen_reg_rtx (Pmode
);
6934 create_output_operand (&op
, target
, Pmode
);
6935 expand_insn (icode
, 1, &op
);
6938 error ("%<__builtin_thread_pointer%> is not supported on this target");
6943 expand_builtin_set_thread_pointer (tree exp
)
6945 enum insn_code icode
;
6946 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6948 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6949 if (icode
!= CODE_FOR_nothing
)
6951 class expand_operand op
;
6952 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6953 Pmode
, EXPAND_NORMAL
);
6954 create_input_operand (&op
, val
, Pmode
);
6955 expand_insn (icode
, 1, &op
);
6958 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6962 /* Emit code to restore the current value of stack. */
6965 expand_stack_restore (tree var
)
6968 rtx sa
= expand_normal (var
);
6970 sa
= convert_memory_address (Pmode
, sa
);
6972 prev
= get_last_insn ();
6973 emit_stack_restore (SAVE_BLOCK
, sa
);
6975 record_new_stack_level ();
6977 fixup_args_size_notes (prev
, get_last_insn (), 0);
6980 /* Emit code to save the current value of stack. */
6983 expand_stack_save (void)
6987 emit_stack_save (SAVE_BLOCK
, &ret
);
6991 /* Emit code to get the openacc gang, worker or vector id or size. */
6994 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
6997 rtx fallback_retval
;
6998 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
6999 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
7001 case BUILT_IN_GOACC_PARLEVEL_ID
:
7002 name
= "__builtin_goacc_parlevel_id";
7003 fallback_retval
= const0_rtx
;
7004 gen_fn
= targetm
.gen_oacc_dim_pos
;
7006 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
7007 name
= "__builtin_goacc_parlevel_size";
7008 fallback_retval
= const1_rtx
;
7009 gen_fn
= targetm
.gen_oacc_dim_size
;
7015 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
7017 error ("%qs only supported in OpenACC code", name
);
7021 tree arg
= CALL_EXPR_ARG (exp
, 0);
7022 if (TREE_CODE (arg
) != INTEGER_CST
)
7024 error ("non-constant argument 0 to %qs", name
);
7028 int dim
= TREE_INT_CST_LOW (arg
);
7032 case GOMP_DIM_WORKER
:
7033 case GOMP_DIM_VECTOR
:
7036 error ("illegal argument 0 to %qs", name
);
7043 if (target
== NULL_RTX
)
7044 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7046 if (!targetm
.have_oacc_dim_size ())
7048 emit_move_insn (target
, fallback_retval
);
7052 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
7053 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
7055 emit_move_insn (target
, reg
);
7060 /* Expand a string compare operation using a sequence of char comparison
7061 to get rid of the calling overhead, with result going to TARGET if
7064 VAR_STR is the variable string source;
7065 CONST_STR is the constant string source;
7066 LENGTH is the number of chars to compare;
7067 CONST_STR_N indicates which source string is the constant string;
7068 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7070 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7072 target = (int) (unsigned char) var_str[0]
7073 - (int) (unsigned char) const_str[0];
7077 target = (int) (unsigned char) var_str[length - 2]
7078 - (int) (unsigned char) const_str[length - 2];
7081 target = (int) (unsigned char) var_str[length - 1]
7082 - (int) (unsigned char) const_str[length - 1];
7087 inline_string_cmp (rtx target
, tree var_str
, const char *const_str
,
7088 unsigned HOST_WIDE_INT length
,
7089 int const_str_n
, machine_mode mode
)
7091 HOST_WIDE_INT offset
= 0;
7093 = get_memory_rtx (var_str
, build_int_cst (unsigned_type_node
,length
));
7094 rtx var_rtx
= NULL_RTX
;
7095 rtx const_rtx
= NULL_RTX
;
7096 rtx result
= target
? target
: gen_reg_rtx (mode
);
7097 rtx_code_label
*ne_label
= gen_label_rtx ();
7098 tree unit_type_node
= unsigned_char_type_node
;
7099 scalar_int_mode unit_mode
7100 = as_a
<scalar_int_mode
> TYPE_MODE (unit_type_node
);
7104 for (unsigned HOST_WIDE_INT i
= 0; i
< length
; i
++)
7107 = adjust_address (var_rtx_array
, TYPE_MODE (unit_type_node
), offset
);
7108 const_rtx
= c_readstr (const_str
+ offset
, unit_mode
);
7109 rtx op0
= (const_str_n
== 1) ? const_rtx
: var_rtx
;
7110 rtx op1
= (const_str_n
== 1) ? var_rtx
: const_rtx
;
7112 op0
= convert_modes (mode
, unit_mode
, op0
, 1);
7113 op1
= convert_modes (mode
, unit_mode
, op1
, 1);
7114 result
= expand_simple_binop (mode
, MINUS
, op0
, op1
,
7115 result
, 1, OPTAB_WIDEN
);
7117 emit_cmp_and_jump_insns (result
, CONST0_RTX (mode
), NE
, NULL_RTX
,
7118 mode
, true, ne_label
);
7119 offset
+= GET_MODE_SIZE (unit_mode
);
7122 emit_label (ne_label
);
7123 rtx_insn
*insns
= get_insns ();
7130 /* Inline expansion a call to str(n)cmp, with result going to
7131 TARGET if that's convenient.
7132 If the call is not been inlined, return NULL_RTX. */
7134 inline_expand_builtin_string_cmp (tree exp
, rtx target
)
7136 tree fndecl
= get_callee_fndecl (exp
);
7137 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7138 unsigned HOST_WIDE_INT length
= 0;
7139 bool is_ncmp
= (fcode
== BUILT_IN_STRNCMP
|| fcode
== BUILT_IN_MEMCMP
);
7141 /* Do NOT apply this inlining expansion when optimizing for size or
7142 optimization level below 2. */
7143 if (optimize
< 2 || optimize_insn_for_size_p ())
7146 gcc_checking_assert (fcode
== BUILT_IN_STRCMP
7147 || fcode
== BUILT_IN_STRNCMP
7148 || fcode
== BUILT_IN_MEMCMP
);
7150 /* On a target where the type of the call (int) has same or narrower presicion
7151 than unsigned char, give up the inlining expansion. */
7152 if (TYPE_PRECISION (unsigned_char_type_node
)
7153 >= TYPE_PRECISION (TREE_TYPE (exp
)))
7156 tree arg1
= CALL_EXPR_ARG (exp
, 0);
7157 tree arg2
= CALL_EXPR_ARG (exp
, 1);
7158 tree len3_tree
= is_ncmp
? CALL_EXPR_ARG (exp
, 2) : NULL_TREE
;
7160 unsigned HOST_WIDE_INT len1
= 0;
7161 unsigned HOST_WIDE_INT len2
= 0;
7162 unsigned HOST_WIDE_INT len3
= 0;
7164 const char *src_str1
= c_getstr (arg1
, &len1
);
7165 const char *src_str2
= c_getstr (arg2
, &len2
);
7167 /* If neither strings is constant string, the call is not qualify. */
7168 if (!src_str1
&& !src_str2
)
7171 /* For strncmp, if the length is not a const, not qualify. */
7174 if (!tree_fits_uhwi_p (len3_tree
))
7177 len3
= tree_to_uhwi (len3_tree
);
7180 if (src_str1
!= NULL
)
7181 len1
= strnlen (src_str1
, len1
) + 1;
7183 if (src_str2
!= NULL
)
7184 len2
= strnlen (src_str2
, len2
) + 1;
7186 int const_str_n
= 0;
7191 else if (len2
> len1
)
7196 gcc_checking_assert (const_str_n
> 0);
7197 length
= (const_str_n
== 1) ? len1
: len2
;
7199 if (is_ncmp
&& len3
< length
)
7202 /* If the length of the comparision is larger than the threshold,
7204 if (length
> (unsigned HOST_WIDE_INT
)
7205 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH
))
7208 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
7210 /* Now, start inline expansion the call. */
7211 return inline_string_cmp (target
, (const_str_n
== 1) ? arg2
: arg1
,
7212 (const_str_n
== 1) ? src_str1
: src_str2
, length
,
7216 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7217 represents the size of the first argument to that call, or VOIDmode
7218 if the argument is a pointer. IGNORE will be true if the result
7221 expand_speculation_safe_value (machine_mode mode
, tree exp
, rtx target
,
7225 unsigned nargs
= call_expr_nargs (exp
);
7227 tree arg0
= CALL_EXPR_ARG (exp
, 0);
7229 if (mode
== VOIDmode
)
7231 mode
= TYPE_MODE (TREE_TYPE (arg0
));
7232 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
7235 val
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7237 /* An optional second argument can be used as a failsafe value on
7238 some machines. If it isn't present, then the failsafe value is
7242 tree arg1
= CALL_EXPR_ARG (exp
, 1);
7243 failsafe
= expand_expr (arg1
, NULL_RTX
, mode
, EXPAND_NORMAL
);
7246 failsafe
= const0_rtx
;
7248 /* If the result isn't used, the behavior is undefined. It would be
7249 nice to emit a warning here, but path splitting means this might
7250 happen with legitimate code. So simply drop the builtin
7251 expansion in that case; we've handled any side-effects above. */
7255 /* If we don't have a suitable target, create one to hold the result. */
7256 if (target
== NULL
|| GET_MODE (target
) != mode
)
7257 target
= gen_reg_rtx (mode
);
7259 if (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
)
7260 val
= convert_modes (mode
, VOIDmode
, val
, false);
7262 return targetm
.speculation_safe_value (mode
, target
, val
, failsafe
);
7265 /* Expand an expression EXP that calls a built-in function,
7266 with result going to TARGET if that's convenient
7267 (and in mode MODE if that's convenient).
7268 SUBTARGET may be used as the target for computing one of EXP's operands.
7269 IGNORE is nonzero if the value is to be ignored. */
7272 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
7275 tree fndecl
= get_callee_fndecl (exp
);
7276 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
7279 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7280 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7282 /* When ASan is enabled, we don't want to expand some memory/string
7283 builtins and rely on libsanitizer's hooks. This allows us to avoid
7284 redundant checks and be sure, that possible overflow will be detected
7287 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7288 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
7289 return expand_call (exp
, target
, ignore
);
7291 /* When not optimizing, generate calls to library functions for a certain
7294 && !called_as_built_in (fndecl
)
7295 && fcode
!= BUILT_IN_FORK
7296 && fcode
!= BUILT_IN_EXECL
7297 && fcode
!= BUILT_IN_EXECV
7298 && fcode
!= BUILT_IN_EXECLP
7299 && fcode
!= BUILT_IN_EXECLE
7300 && fcode
!= BUILT_IN_EXECVP
7301 && fcode
!= BUILT_IN_EXECVE
7302 && !ALLOCA_FUNCTION_CODE_P (fcode
)
7303 && fcode
!= BUILT_IN_FREE
)
7304 return expand_call (exp
, target
, ignore
);
7306 /* The built-in function expanders test for target == const0_rtx
7307 to determine whether the function's result will be ignored. */
7309 target
= const0_rtx
;
7311 /* If the result of a pure or const built-in function is ignored, and
7312 none of its arguments are volatile, we can avoid expanding the
7313 built-in call and just evaluate the arguments for side-effects. */
7314 if (target
== const0_rtx
7315 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
7316 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
7318 bool volatilep
= false;
7320 call_expr_arg_iterator iter
;
7322 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7323 if (TREE_THIS_VOLATILE (arg
))
7331 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
7332 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7339 CASE_FLT_FN (BUILT_IN_FABS
):
7340 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
7341 case BUILT_IN_FABSD32
:
7342 case BUILT_IN_FABSD64
:
7343 case BUILT_IN_FABSD128
:
7344 target
= expand_builtin_fabs (exp
, target
, subtarget
);
7349 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
7350 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
7351 target
= expand_builtin_copysign (exp
, target
, subtarget
);
7356 /* Just do a normal library call if we were unable to fold
7358 CASE_FLT_FN (BUILT_IN_CABS
):
7361 CASE_FLT_FN (BUILT_IN_FMA
):
7362 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
7363 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
7368 CASE_FLT_FN (BUILT_IN_ILOGB
):
7369 if (! flag_unsafe_math_optimizations
)
7372 CASE_FLT_FN (BUILT_IN_ISINF
):
7373 CASE_FLT_FN (BUILT_IN_FINITE
):
7374 case BUILT_IN_ISFINITE
:
7375 case BUILT_IN_ISNORMAL
:
7376 target
= expand_builtin_interclass_mathfn (exp
, target
);
7381 CASE_FLT_FN (BUILT_IN_ICEIL
):
7382 CASE_FLT_FN (BUILT_IN_LCEIL
):
7383 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7384 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7385 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7386 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7387 target
= expand_builtin_int_roundingfn (exp
, target
);
7392 CASE_FLT_FN (BUILT_IN_IRINT
):
7393 CASE_FLT_FN (BUILT_IN_LRINT
):
7394 CASE_FLT_FN (BUILT_IN_LLRINT
):
7395 CASE_FLT_FN (BUILT_IN_IROUND
):
7396 CASE_FLT_FN (BUILT_IN_LROUND
):
7397 CASE_FLT_FN (BUILT_IN_LLROUND
):
7398 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
7403 CASE_FLT_FN (BUILT_IN_POWI
):
7404 target
= expand_builtin_powi (exp
, target
);
7409 CASE_FLT_FN (BUILT_IN_CEXPI
):
7410 target
= expand_builtin_cexpi (exp
, target
);
7411 gcc_assert (target
);
7414 CASE_FLT_FN (BUILT_IN_SIN
):
7415 CASE_FLT_FN (BUILT_IN_COS
):
7416 if (! flag_unsafe_math_optimizations
)
7418 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
7423 CASE_FLT_FN (BUILT_IN_SINCOS
):
7424 if (! flag_unsafe_math_optimizations
)
7426 target
= expand_builtin_sincos (exp
);
7431 case BUILT_IN_APPLY_ARGS
:
7432 return expand_builtin_apply_args ();
7434 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7435 FUNCTION with a copy of the parameters described by
7436 ARGUMENTS, and ARGSIZE. It returns a block of memory
7437 allocated on the stack into which is stored all the registers
7438 that might possibly be used for returning the result of a
7439 function. ARGUMENTS is the value returned by
7440 __builtin_apply_args. ARGSIZE is the number of bytes of
7441 arguments that must be copied. ??? How should this value be
7442 computed? We'll also need a safe worst case value for varargs
7444 case BUILT_IN_APPLY
:
7445 if (!validate_arglist (exp
, POINTER_TYPE
,
7446 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
7447 && !validate_arglist (exp
, REFERENCE_TYPE
,
7448 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7454 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
7455 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
7456 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
7458 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
7461 /* __builtin_return (RESULT) causes the function to return the
7462 value described by RESULT. RESULT is address of the block of
7463 memory returned by __builtin_apply. */
7464 case BUILT_IN_RETURN
:
7465 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7466 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
7469 case BUILT_IN_SAVEREGS
:
7470 return expand_builtin_saveregs ();
7472 case BUILT_IN_VA_ARG_PACK
:
7473 /* All valid uses of __builtin_va_arg_pack () are removed during
7475 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
7478 case BUILT_IN_VA_ARG_PACK_LEN
:
7479 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7481 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
7484 /* Return the address of the first anonymous stack arg. */
7485 case BUILT_IN_NEXT_ARG
:
7486 if (fold_builtin_next_arg (exp
, false))
7488 return expand_builtin_next_arg ();
7490 case BUILT_IN_CLEAR_CACHE
:
7491 target
= expand_builtin___clear_cache (exp
);
7496 case BUILT_IN_CLASSIFY_TYPE
:
7497 return expand_builtin_classify_type (exp
);
7499 case BUILT_IN_CONSTANT_P
:
7502 case BUILT_IN_FRAME_ADDRESS
:
7503 case BUILT_IN_RETURN_ADDRESS
:
7504 return expand_builtin_frame_address (fndecl
, exp
);
7506 /* Returns the address of the area where the structure is returned.
7508 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
7509 if (call_expr_nargs (exp
) != 0
7510 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
7511 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
7514 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
7516 CASE_BUILT_IN_ALLOCA
:
7517 target
= expand_builtin_alloca (exp
);
7522 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
7523 return expand_asan_emit_allocas_unpoison (exp
);
7525 case BUILT_IN_STACK_SAVE
:
7526 return expand_stack_save ();
7528 case BUILT_IN_STACK_RESTORE
:
7529 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
7532 case BUILT_IN_BSWAP16
:
7533 case BUILT_IN_BSWAP32
:
7534 case BUILT_IN_BSWAP64
:
7535 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
7540 CASE_INT_FN (BUILT_IN_FFS
):
7541 target
= expand_builtin_unop (target_mode
, exp
, target
,
7542 subtarget
, ffs_optab
);
7547 CASE_INT_FN (BUILT_IN_CLZ
):
7548 target
= expand_builtin_unop (target_mode
, exp
, target
,
7549 subtarget
, clz_optab
);
7554 CASE_INT_FN (BUILT_IN_CTZ
):
7555 target
= expand_builtin_unop (target_mode
, exp
, target
,
7556 subtarget
, ctz_optab
);
7561 CASE_INT_FN (BUILT_IN_CLRSB
):
7562 target
= expand_builtin_unop (target_mode
, exp
, target
,
7563 subtarget
, clrsb_optab
);
7568 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7569 target
= expand_builtin_unop (target_mode
, exp
, target
,
7570 subtarget
, popcount_optab
);
7575 CASE_INT_FN (BUILT_IN_PARITY
):
7576 target
= expand_builtin_unop (target_mode
, exp
, target
,
7577 subtarget
, parity_optab
);
7582 case BUILT_IN_STRLEN
:
7583 target
= expand_builtin_strlen (exp
, target
, target_mode
);
7588 case BUILT_IN_STRNLEN
:
7589 target
= expand_builtin_strnlen (exp
, target
, target_mode
);
7594 case BUILT_IN_STRCAT
:
7595 target
= expand_builtin_strcat (exp
, target
);
7600 case BUILT_IN_STRCPY
:
7601 target
= expand_builtin_strcpy (exp
, target
);
7606 case BUILT_IN_STRNCAT
:
7607 target
= expand_builtin_strncat (exp
, target
);
7612 case BUILT_IN_STRNCPY
:
7613 target
= expand_builtin_strncpy (exp
, target
);
7618 case BUILT_IN_STPCPY
:
7619 target
= expand_builtin_stpcpy (exp
, target
, mode
);
7624 case BUILT_IN_STPNCPY
:
7625 target
= expand_builtin_stpncpy (exp
, target
);
7630 case BUILT_IN_MEMCHR
:
7631 target
= expand_builtin_memchr (exp
, target
);
7636 case BUILT_IN_MEMCPY
:
7637 target
= expand_builtin_memcpy (exp
, target
);
7642 case BUILT_IN_MEMMOVE
:
7643 target
= expand_builtin_memmove (exp
, target
);
7648 case BUILT_IN_MEMPCPY
:
7649 target
= expand_builtin_mempcpy (exp
, target
);
7654 case BUILT_IN_MEMSET
:
7655 target
= expand_builtin_memset (exp
, target
, mode
);
7660 case BUILT_IN_BZERO
:
7661 target
= expand_builtin_bzero (exp
);
7666 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7667 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7668 when changing it to a strcmp call. */
7669 case BUILT_IN_STRCMP_EQ
:
7670 target
= expand_builtin_memcmp (exp
, target
, true);
7674 /* Change this call back to a BUILT_IN_STRCMP. */
7675 TREE_OPERAND (exp
, 1)
7676 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP
));
7678 /* Delete the last parameter. */
7680 vec
<tree
, va_gc
> *arg_vec
;
7681 vec_alloc (arg_vec
, 2);
7682 for (i
= 0; i
< 2; i
++)
7683 arg_vec
->quick_push (CALL_EXPR_ARG (exp
, i
));
7684 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), arg_vec
);
7687 case BUILT_IN_STRCMP
:
7688 target
= expand_builtin_strcmp (exp
, target
);
7693 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7694 back to a BUILT_IN_STRNCMP. */
7695 case BUILT_IN_STRNCMP_EQ
:
7696 target
= expand_builtin_memcmp (exp
, target
, true);
7700 /* Change it back to a BUILT_IN_STRNCMP. */
7701 TREE_OPERAND (exp
, 1)
7702 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP
));
7705 case BUILT_IN_STRNCMP
:
7706 target
= expand_builtin_strncmp (exp
, target
, mode
);
7712 case BUILT_IN_MEMCMP
:
7713 case BUILT_IN_MEMCMP_EQ
:
7714 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7717 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7719 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7720 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7724 case BUILT_IN_SETJMP
:
7725 /* This should have been lowered to the builtins below. */
7728 case BUILT_IN_SETJMP_SETUP
:
7729 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7730 and the receiver label. */
7731 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7733 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7734 VOIDmode
, EXPAND_NORMAL
);
7735 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7736 rtx_insn
*label_r
= label_rtx (label
);
7738 /* This is copied from the handling of non-local gotos. */
7739 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7740 nonlocal_goto_handler_labels
7741 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7742 nonlocal_goto_handler_labels
);
7743 /* ??? Do not let expand_label treat us as such since we would
7744 not want to be both on the list of non-local labels and on
7745 the list of forced labels. */
7746 FORCED_LABEL (label
) = 0;
7751 case BUILT_IN_SETJMP_RECEIVER
:
7752 /* __builtin_setjmp_receiver is passed the receiver label. */
7753 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7755 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7756 rtx_insn
*label_r
= label_rtx (label
);
7758 expand_builtin_setjmp_receiver (label_r
);
7763 /* __builtin_longjmp is passed a pointer to an array of five words.
7764 It's similar to the C library longjmp function but works with
7765 __builtin_setjmp above. */
7766 case BUILT_IN_LONGJMP
:
7767 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7769 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7770 VOIDmode
, EXPAND_NORMAL
);
7771 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7773 if (value
!= const1_rtx
)
7775 error ("%<__builtin_longjmp%> second argument must be 1");
7779 expand_builtin_longjmp (buf_addr
, value
);
7784 case BUILT_IN_NONLOCAL_GOTO
:
7785 target
= expand_builtin_nonlocal_goto (exp
);
7790 /* This updates the setjmp buffer that is its argument with the value
7791 of the current stack pointer. */
7792 case BUILT_IN_UPDATE_SETJMP_BUF
:
7793 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7796 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7798 expand_builtin_update_setjmp_buf (buf_addr
);
7804 expand_builtin_trap ();
7807 case BUILT_IN_UNREACHABLE
:
7808 expand_builtin_unreachable ();
7811 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7812 case BUILT_IN_SIGNBITD32
:
7813 case BUILT_IN_SIGNBITD64
:
7814 case BUILT_IN_SIGNBITD128
:
7815 target
= expand_builtin_signbit (exp
, target
);
7820 /* Various hooks for the DWARF 2 __throw routine. */
7821 case BUILT_IN_UNWIND_INIT
:
7822 expand_builtin_unwind_init ();
7824 case BUILT_IN_DWARF_CFA
:
7825 return virtual_cfa_rtx
;
7826 #ifdef DWARF2_UNWIND_INFO
7827 case BUILT_IN_DWARF_SP_COLUMN
:
7828 return expand_builtin_dwarf_sp_column ();
7829 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7830 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7833 case BUILT_IN_FROB_RETURN_ADDR
:
7834 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7835 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7836 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7837 case BUILT_IN_EH_RETURN
:
7838 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7839 CALL_EXPR_ARG (exp
, 1));
7841 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7842 return expand_builtin_eh_return_data_regno (exp
);
7843 case BUILT_IN_EXTEND_POINTER
:
7844 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7845 case BUILT_IN_EH_POINTER
:
7846 return expand_builtin_eh_pointer (exp
);
7847 case BUILT_IN_EH_FILTER
:
7848 return expand_builtin_eh_filter (exp
);
7849 case BUILT_IN_EH_COPY_VALUES
:
7850 return expand_builtin_eh_copy_values (exp
);
7852 case BUILT_IN_VA_START
:
7853 return expand_builtin_va_start (exp
);
7854 case BUILT_IN_VA_END
:
7855 return expand_builtin_va_end (exp
);
7856 case BUILT_IN_VA_COPY
:
7857 return expand_builtin_va_copy (exp
);
7858 case BUILT_IN_EXPECT
:
7859 return expand_builtin_expect (exp
, target
);
7860 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
7861 return expand_builtin_expect_with_probability (exp
, target
);
7862 case BUILT_IN_ASSUME_ALIGNED
:
7863 return expand_builtin_assume_aligned (exp
, target
);
7864 case BUILT_IN_PREFETCH
:
7865 expand_builtin_prefetch (exp
);
7868 case BUILT_IN_INIT_TRAMPOLINE
:
7869 return expand_builtin_init_trampoline (exp
, true);
7870 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7871 return expand_builtin_init_trampoline (exp
, false);
7872 case BUILT_IN_ADJUST_TRAMPOLINE
:
7873 return expand_builtin_adjust_trampoline (exp
);
7875 case BUILT_IN_INIT_DESCRIPTOR
:
7876 return expand_builtin_init_descriptor (exp
);
7877 case BUILT_IN_ADJUST_DESCRIPTOR
:
7878 return expand_builtin_adjust_descriptor (exp
);
7881 case BUILT_IN_EXECL
:
7882 case BUILT_IN_EXECV
:
7883 case BUILT_IN_EXECLP
:
7884 case BUILT_IN_EXECLE
:
7885 case BUILT_IN_EXECVP
:
7886 case BUILT_IN_EXECVE
:
7887 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7892 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7893 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7894 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7895 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7896 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7897 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7898 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7903 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7904 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7905 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7906 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7907 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7908 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7909 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7914 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7915 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7916 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7917 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7918 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7919 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7920 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7925 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7926 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7927 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7928 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7929 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7930 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7931 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7936 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7937 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7938 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7939 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7940 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7941 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7942 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7947 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7948 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7949 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7950 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7951 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7952 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7953 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7958 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7959 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7960 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7961 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7962 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7963 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7964 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7969 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7970 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7971 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7972 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7973 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7974 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7975 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7980 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7981 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7982 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7983 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7984 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7985 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7986 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7991 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7992 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7993 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7994 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7995 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7996 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7997 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
8002 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
8003 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
8004 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
8005 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
8006 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
8007 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
8008 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
8013 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
8014 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
8015 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
8016 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
8017 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
8018 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
8019 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
8024 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
8025 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
8026 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
8027 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
8028 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
8029 if (mode
== VOIDmode
)
8030 mode
= TYPE_MODE (boolean_type_node
);
8031 if (!target
|| !register_operand (target
, mode
))
8032 target
= gen_reg_rtx (mode
);
8034 mode
= get_builtin_sync_mode
8035 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
8036 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
8041 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
8042 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
8043 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
8044 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
8045 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
8046 mode
= get_builtin_sync_mode
8047 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
8048 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
8053 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
8054 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
8055 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
8056 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
8057 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
8058 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
8059 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
8064 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
8065 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
8066 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
8067 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
8068 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
8069 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
8070 expand_builtin_sync_lock_release (mode
, exp
);
8073 case BUILT_IN_SYNC_SYNCHRONIZE
:
8074 expand_builtin_sync_synchronize ();
8077 case BUILT_IN_ATOMIC_EXCHANGE_1
:
8078 case BUILT_IN_ATOMIC_EXCHANGE_2
:
8079 case BUILT_IN_ATOMIC_EXCHANGE_4
:
8080 case BUILT_IN_ATOMIC_EXCHANGE_8
:
8081 case BUILT_IN_ATOMIC_EXCHANGE_16
:
8082 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
8083 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
8088 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
8089 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
8090 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
8091 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
8092 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
8094 unsigned int nargs
, z
;
8095 vec
<tree
, va_gc
> *vec
;
8098 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
8099 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
8103 /* If this is turned into an external library call, the weak parameter
8104 must be dropped to match the expected parameter list. */
8105 nargs
= call_expr_nargs (exp
);
8106 vec_alloc (vec
, nargs
- 1);
8107 for (z
= 0; z
< 3; z
++)
8108 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8109 /* Skip the boolean weak parameter. */
8110 for (z
= 4; z
< 6; z
++)
8111 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
8112 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
8116 case BUILT_IN_ATOMIC_LOAD_1
:
8117 case BUILT_IN_ATOMIC_LOAD_2
:
8118 case BUILT_IN_ATOMIC_LOAD_4
:
8119 case BUILT_IN_ATOMIC_LOAD_8
:
8120 case BUILT_IN_ATOMIC_LOAD_16
:
8121 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
8122 target
= expand_builtin_atomic_load (mode
, exp
, target
);
8127 case BUILT_IN_ATOMIC_STORE_1
:
8128 case BUILT_IN_ATOMIC_STORE_2
:
8129 case BUILT_IN_ATOMIC_STORE_4
:
8130 case BUILT_IN_ATOMIC_STORE_8
:
8131 case BUILT_IN_ATOMIC_STORE_16
:
8132 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
8133 target
= expand_builtin_atomic_store (mode
, exp
);
8138 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
8139 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
8140 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
8141 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
8142 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
8144 enum built_in_function lib
;
8145 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
8146 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
8147 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
8148 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
8154 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
8155 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
8156 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
8157 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
8158 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
8160 enum built_in_function lib
;
8161 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
8162 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
8163 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
8164 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
8170 case BUILT_IN_ATOMIC_AND_FETCH_1
:
8171 case BUILT_IN_ATOMIC_AND_FETCH_2
:
8172 case BUILT_IN_ATOMIC_AND_FETCH_4
:
8173 case BUILT_IN_ATOMIC_AND_FETCH_8
:
8174 case BUILT_IN_ATOMIC_AND_FETCH_16
:
8176 enum built_in_function lib
;
8177 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
8178 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
8179 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
8180 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
8186 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
8187 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
8188 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
8189 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
8190 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
8192 enum built_in_function lib
;
8193 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
8194 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
8195 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
8196 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
8202 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
8203 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
8204 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
8205 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
8206 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
8208 enum built_in_function lib
;
8209 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
8210 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
8211 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
8212 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
8218 case BUILT_IN_ATOMIC_OR_FETCH_1
:
8219 case BUILT_IN_ATOMIC_OR_FETCH_2
:
8220 case BUILT_IN_ATOMIC_OR_FETCH_4
:
8221 case BUILT_IN_ATOMIC_OR_FETCH_8
:
8222 case BUILT_IN_ATOMIC_OR_FETCH_16
:
8224 enum built_in_function lib
;
8225 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
8226 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
8227 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
8228 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
8234 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
8235 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
8236 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
8237 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
8238 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
8239 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
8240 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
8241 ignore
, BUILT_IN_NONE
);
8246 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
8247 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
8248 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
8249 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
8250 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
8251 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
8252 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
8253 ignore
, BUILT_IN_NONE
);
8258 case BUILT_IN_ATOMIC_FETCH_AND_1
:
8259 case BUILT_IN_ATOMIC_FETCH_AND_2
:
8260 case BUILT_IN_ATOMIC_FETCH_AND_4
:
8261 case BUILT_IN_ATOMIC_FETCH_AND_8
:
8262 case BUILT_IN_ATOMIC_FETCH_AND_16
:
8263 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
8264 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
8265 ignore
, BUILT_IN_NONE
);
8270 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
8271 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
8272 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
8273 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
8274 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
8275 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
8276 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
8277 ignore
, BUILT_IN_NONE
);
8282 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
8283 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
8284 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
8285 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
8286 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
8287 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
8288 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
8289 ignore
, BUILT_IN_NONE
);
8294 case BUILT_IN_ATOMIC_FETCH_OR_1
:
8295 case BUILT_IN_ATOMIC_FETCH_OR_2
:
8296 case BUILT_IN_ATOMIC_FETCH_OR_4
:
8297 case BUILT_IN_ATOMIC_FETCH_OR_8
:
8298 case BUILT_IN_ATOMIC_FETCH_OR_16
:
8299 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
8300 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
8301 ignore
, BUILT_IN_NONE
);
8306 case BUILT_IN_ATOMIC_TEST_AND_SET
:
8307 return expand_builtin_atomic_test_and_set (exp
, target
);
8309 case BUILT_IN_ATOMIC_CLEAR
:
8310 return expand_builtin_atomic_clear (exp
);
8312 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
8313 return expand_builtin_atomic_always_lock_free (exp
);
8315 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
8316 target
= expand_builtin_atomic_is_lock_free (exp
);
8321 case BUILT_IN_ATOMIC_THREAD_FENCE
:
8322 expand_builtin_atomic_thread_fence (exp
);
8325 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
8326 expand_builtin_atomic_signal_fence (exp
);
8329 case BUILT_IN_OBJECT_SIZE
:
8330 return expand_builtin_object_size (exp
);
8332 case BUILT_IN_MEMCPY_CHK
:
8333 case BUILT_IN_MEMPCPY_CHK
:
8334 case BUILT_IN_MEMMOVE_CHK
:
8335 case BUILT_IN_MEMSET_CHK
:
8336 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
8341 case BUILT_IN_STRCPY_CHK
:
8342 case BUILT_IN_STPCPY_CHK
:
8343 case BUILT_IN_STRNCPY_CHK
:
8344 case BUILT_IN_STPNCPY_CHK
:
8345 case BUILT_IN_STRCAT_CHK
:
8346 case BUILT_IN_STRNCAT_CHK
:
8347 case BUILT_IN_SNPRINTF_CHK
:
8348 case BUILT_IN_VSNPRINTF_CHK
:
8349 maybe_emit_chk_warning (exp
, fcode
);
8352 case BUILT_IN_SPRINTF_CHK
:
8353 case BUILT_IN_VSPRINTF_CHK
:
8354 maybe_emit_sprintf_chk_warning (exp
, fcode
);
8358 if (warn_free_nonheap_object
)
8359 maybe_emit_free_warning (exp
);
8362 case BUILT_IN_THREAD_POINTER
:
8363 return expand_builtin_thread_pointer (exp
, target
);
8365 case BUILT_IN_SET_THREAD_POINTER
:
8366 expand_builtin_set_thread_pointer (exp
);
8369 case BUILT_IN_ACC_ON_DEVICE
:
8370 /* Do library call, if we failed to expand the builtin when
8374 case BUILT_IN_GOACC_PARLEVEL_ID
:
8375 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
8376 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
8378 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR
:
8379 return expand_speculation_safe_value (VOIDmode
, exp
, target
, ignore
);
8381 case BUILT_IN_SPECULATION_SAFE_VALUE_1
:
8382 case BUILT_IN_SPECULATION_SAFE_VALUE_2
:
8383 case BUILT_IN_SPECULATION_SAFE_VALUE_4
:
8384 case BUILT_IN_SPECULATION_SAFE_VALUE_8
:
8385 case BUILT_IN_SPECULATION_SAFE_VALUE_16
:
8386 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SPECULATION_SAFE_VALUE_1
);
8387 return expand_speculation_safe_value (mode
, exp
, target
, ignore
);
8389 default: /* just do library call, if unknown builtin */
8393 /* The switch statement above can drop through to cause the function
8394 to be called normally. */
8395 return expand_call (exp
, target
, ignore
);
8398 /* Determine whether a tree node represents a call to a built-in
8399 function. If the tree T is a call to a built-in function with
8400 the right number of arguments of the appropriate types, return
8401 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8402 Otherwise the return value is END_BUILTINS. */
8404 enum built_in_function
8405 builtin_mathfn_code (const_tree t
)
8407 const_tree fndecl
, arg
, parmlist
;
8408 const_tree argtype
, parmtype
;
8409 const_call_expr_arg_iterator iter
;
8411 if (TREE_CODE (t
) != CALL_EXPR
)
8412 return END_BUILTINS
;
8414 fndecl
= get_callee_fndecl (t
);
8415 if (fndecl
== NULL_TREE
|| !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
8416 return END_BUILTINS
;
8418 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
8419 init_const_call_expr_arg_iterator (t
, &iter
);
8420 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
8422 /* If a function doesn't take a variable number of arguments,
8423 the last element in the list will have type `void'. */
8424 parmtype
= TREE_VALUE (parmlist
);
8425 if (VOID_TYPE_P (parmtype
))
8427 if (more_const_call_expr_args_p (&iter
))
8428 return END_BUILTINS
;
8429 return DECL_FUNCTION_CODE (fndecl
);
8432 if (! more_const_call_expr_args_p (&iter
))
8433 return END_BUILTINS
;
8435 arg
= next_const_call_expr_arg (&iter
);
8436 argtype
= TREE_TYPE (arg
);
8438 if (SCALAR_FLOAT_TYPE_P (parmtype
))
8440 if (! SCALAR_FLOAT_TYPE_P (argtype
))
8441 return END_BUILTINS
;
8443 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
8445 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
8446 return END_BUILTINS
;
8448 else if (POINTER_TYPE_P (parmtype
))
8450 if (! POINTER_TYPE_P (argtype
))
8451 return END_BUILTINS
;
8453 else if (INTEGRAL_TYPE_P (parmtype
))
8455 if (! INTEGRAL_TYPE_P (argtype
))
8456 return END_BUILTINS
;
8459 return END_BUILTINS
;
8462 /* Variable-length argument list. */
8463 return DECL_FUNCTION_CODE (fndecl
);
8466 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8467 evaluate to a constant. */
8470 fold_builtin_constant_p (tree arg
)
8472 /* We return 1 for a numeric type that's known to be a constant
8473 value at compile-time or for an aggregate type that's a
8474 literal constant. */
8477 /* If we know this is a constant, emit the constant of one. */
8478 if (CONSTANT_CLASS_P (arg
)
8479 || (TREE_CODE (arg
) == CONSTRUCTOR
8480 && TREE_CONSTANT (arg
)))
8481 return integer_one_node
;
8482 if (TREE_CODE (arg
) == ADDR_EXPR
)
8484 tree op
= TREE_OPERAND (arg
, 0);
8485 if (TREE_CODE (op
) == STRING_CST
8486 || (TREE_CODE (op
) == ARRAY_REF
8487 && integer_zerop (TREE_OPERAND (op
, 1))
8488 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
8489 return integer_one_node
;
8492 /* If this expression has side effects, show we don't know it to be a
8493 constant. Likewise if it's a pointer or aggregate type since in
8494 those case we only want literals, since those are only optimized
8495 when generating RTL, not later.
8496 And finally, if we are compiling an initializer, not code, we
8497 need to return a definite result now; there's not going to be any
8498 more optimization done. */
8499 if (TREE_SIDE_EFFECTS (arg
)
8500 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
8501 || POINTER_TYPE_P (TREE_TYPE (arg
))
8503 || folding_initializer
8504 || force_folding_builtin_constant_p
)
8505 return integer_zero_node
;
8510 /* Create builtin_expect or builtin_expect_with_probability
8511 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8512 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8513 builtin_expect_with_probability instead uses third argument as PROBABILITY
8517 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
8518 tree predictor
, tree probability
)
8520 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
8522 fn
= builtin_decl_explicit (probability
== NULL_TREE
? BUILT_IN_EXPECT
8523 : BUILT_IN_EXPECT_WITH_PROBABILITY
);
8524 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
8525 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
8526 pred_type
= TREE_VALUE (arg_types
);
8527 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
8529 pred
= fold_convert_loc (loc
, pred_type
, pred
);
8530 expected
= fold_convert_loc (loc
, expected_type
, expected
);
8533 call_expr
= build_call_expr_loc (loc
, fn
, 3, pred
, expected
, probability
);
8535 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
8538 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
8539 build_int_cst (ret_type
, 0));
8542 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8543 NULL_TREE if no simplification is possible. */
8546 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
,
8549 tree inner
, fndecl
, inner_arg0
;
8550 enum tree_code code
;
8552 /* Distribute the expected value over short-circuiting operators.
8553 See through the cast from truthvalue_type_node to long. */
8555 while (CONVERT_EXPR_P (inner_arg0
)
8556 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
8557 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
8558 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
8560 /* If this is a builtin_expect within a builtin_expect keep the
8561 inner one. See through a comparison against a constant. It
8562 might have been added to create a thruthvalue. */
8565 if (COMPARISON_CLASS_P (inner
)
8566 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
8567 inner
= TREE_OPERAND (inner
, 0);
8569 if (TREE_CODE (inner
) == CALL_EXPR
8570 && (fndecl
= get_callee_fndecl (inner
))
8571 && (fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
8572 || fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT_WITH_PROBABILITY
)))
8576 code
= TREE_CODE (inner
);
8577 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
8579 tree op0
= TREE_OPERAND (inner
, 0);
8580 tree op1
= TREE_OPERAND (inner
, 1);
8581 arg1
= save_expr (arg1
);
8583 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
, arg3
);
8584 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
, arg3
);
8585 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
8587 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
8590 /* If the argument isn't invariant then there's nothing else we can do. */
8591 if (!TREE_CONSTANT (inner_arg0
))
8594 /* If we expect that a comparison against the argument will fold to
8595 a constant return the constant. In practice, this means a true
8596 constant or the address of a non-weak symbol. */
8599 if (TREE_CODE (inner
) == ADDR_EXPR
)
8603 inner
= TREE_OPERAND (inner
, 0);
8605 while (TREE_CODE (inner
) == COMPONENT_REF
8606 || TREE_CODE (inner
) == ARRAY_REF
);
8607 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8611 /* Otherwise, ARG0 already has the proper type for the return value. */
8615 /* Fold a call to __builtin_classify_type with argument ARG. */
8618 fold_builtin_classify_type (tree arg
)
8621 return build_int_cst (integer_type_node
, no_type_class
);
8623 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8626 /* Fold a call to __builtin_strlen with argument ARG. */
8629 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
8631 if (!validate_arg (arg
, POINTER_TYPE
))
8635 c_strlen_data lendata
= { };
8636 tree len
= c_strlen (arg
, 0, &lendata
);
8639 return fold_convert_loc (loc
, type
, len
);
8642 c_strlen (arg
, 1, &lendata
);
8646 if (EXPR_HAS_LOCATION (arg
))
8647 loc
= EXPR_LOCATION (arg
);
8648 else if (loc
== UNKNOWN_LOCATION
)
8649 loc
= input_location
;
8650 warn_string_no_nul (loc
, "strlen", arg
, lendata
.decl
);
8657 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8660 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8662 REAL_VALUE_TYPE real
;
8664 /* __builtin_inff is intended to be usable to define INFINITY on all
8665 targets. If an infinity is not available, INFINITY expands "to a
8666 positive constant of type float that overflows at translation
8667 time", footnote "In this case, using INFINITY will violate the
8668 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8669 Thus we pedwarn to ensure this constraint violation is
8671 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8672 pedwarn (loc
, 0, "target format does not support infinity");
8675 return build_real (type
, real
);
8678 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8679 NULL_TREE if no simplification can be made. */
8682 fold_builtin_sincos (location_t loc
,
8683 tree arg0
, tree arg1
, tree arg2
)
8686 tree fndecl
, call
= NULL_TREE
;
8688 if (!validate_arg (arg0
, REAL_TYPE
)
8689 || !validate_arg (arg1
, POINTER_TYPE
)
8690 || !validate_arg (arg2
, POINTER_TYPE
))
8693 type
= TREE_TYPE (arg0
);
8695 /* Calculate the result when the argument is a constant. */
8696 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8697 if (fn
== END_BUILTINS
)
8700 /* Canonicalize sincos to cexpi. */
8701 if (TREE_CODE (arg0
) == REAL_CST
)
8703 tree complex_type
= build_complex_type (type
);
8704 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8708 if (!targetm
.libc_has_function (function_c99_math_complex
)
8709 || !builtin_decl_implicit_p (fn
))
8711 fndecl
= builtin_decl_explicit (fn
);
8712 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8713 call
= builtin_save_expr (call
);
8716 tree ptype
= build_pointer_type (type
);
8717 arg1
= fold_convert (ptype
, arg1
);
8718 arg2
= fold_convert (ptype
, arg2
);
8719 return build2 (COMPOUND_EXPR
, void_type_node
,
8720 build2 (MODIFY_EXPR
, void_type_node
,
8721 build_fold_indirect_ref_loc (loc
, arg1
),
8722 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8723 build2 (MODIFY_EXPR
, void_type_node
,
8724 build_fold_indirect_ref_loc (loc
, arg2
),
8725 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8728 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8729 Return NULL_TREE if no simplification can be made. */
8732 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8734 if (!validate_arg (arg1
, POINTER_TYPE
)
8735 || !validate_arg (arg2
, POINTER_TYPE
)
8736 || !validate_arg (len
, INTEGER_TYPE
))
8739 /* If the LEN parameter is zero, return zero. */
8740 if (integer_zerop (len
))
8741 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8744 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8745 if (operand_equal_p (arg1
, arg2
, 0))
8746 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8748 /* If len parameter is one, return an expression corresponding to
8749 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8750 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8752 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8753 tree cst_uchar_ptr_node
8754 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8757 = fold_convert_loc (loc
, integer_type_node
,
8758 build1 (INDIRECT_REF
, cst_uchar_node
,
8759 fold_convert_loc (loc
,
8763 = fold_convert_loc (loc
, integer_type_node
,
8764 build1 (INDIRECT_REF
, cst_uchar_node
,
8765 fold_convert_loc (loc
,
8768 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8774 /* Fold a call to builtin isascii with argument ARG. */
8777 fold_builtin_isascii (location_t loc
, tree arg
)
8779 if (!validate_arg (arg
, INTEGER_TYPE
))
8783 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8784 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8785 build_int_cst (integer_type_node
,
8786 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8787 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8788 arg
, integer_zero_node
);
8792 /* Fold a call to builtin toascii with argument ARG. */
8795 fold_builtin_toascii (location_t loc
, tree arg
)
8797 if (!validate_arg (arg
, INTEGER_TYPE
))
8800 /* Transform toascii(c) -> (c & 0x7f). */
8801 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8802 build_int_cst (integer_type_node
, 0x7f));
8805 /* Fold a call to builtin isdigit with argument ARG. */
8808 fold_builtin_isdigit (location_t loc
, tree arg
)
8810 if (!validate_arg (arg
, INTEGER_TYPE
))
8814 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8815 /* According to the C standard, isdigit is unaffected by locale.
8816 However, it definitely is affected by the target character set. */
8817 unsigned HOST_WIDE_INT target_digit0
8818 = lang_hooks
.to_target_charset ('0');
8820 if (target_digit0
== 0)
8823 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8824 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8825 build_int_cst (unsigned_type_node
, target_digit0
));
8826 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8827 build_int_cst (unsigned_type_node
, 9));
8831 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8834 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8836 if (!validate_arg (arg
, REAL_TYPE
))
8839 arg
= fold_convert_loc (loc
, type
, arg
);
8840 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8843 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8846 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8848 if (!validate_arg (arg
, INTEGER_TYPE
))
8851 arg
= fold_convert_loc (loc
, type
, arg
);
8852 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8855 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8858 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8860 if (validate_arg (arg
, COMPLEX_TYPE
)
8861 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8863 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8867 tree new_arg
= builtin_save_expr (arg
);
8868 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8869 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8870 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8877 /* Fold a call to builtin frexp, we can assume the base is 2. */
8880 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8882 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8887 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8890 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8892 /* Proceed if a valid pointer type was passed in. */
8893 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8895 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8901 /* For +-0, return (*exp = 0, +-0). */
8902 exp
= integer_zero_node
;
8907 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8908 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8911 /* Since the frexp function always expects base 2, and in
8912 GCC normalized significands are already in the range
8913 [0.5, 1.0), we have exactly what frexp wants. */
8914 REAL_VALUE_TYPE frac_rvt
= *value
;
8915 SET_REAL_EXP (&frac_rvt
, 0);
8916 frac
= build_real (rettype
, frac_rvt
);
8917 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8924 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8925 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8926 TREE_SIDE_EFFECTS (arg1
) = 1;
8927 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8933 /* Fold a call to builtin modf. */
8936 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8938 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8943 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8946 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8948 /* Proceed if a valid pointer type was passed in. */
8949 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8951 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8952 REAL_VALUE_TYPE trunc
, frac
;
8958 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8959 trunc
= frac
= *value
;
8962 /* For +-Inf, return (*arg1 = arg0, +-0). */
8964 frac
.sign
= value
->sign
;
8968 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8969 real_trunc (&trunc
, VOIDmode
, value
);
8970 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8971 /* If the original number was negative and already
8972 integral, then the fractional part is -0.0. */
8973 if (value
->sign
&& frac
.cl
== rvc_zero
)
8974 frac
.sign
= value
->sign
;
8978 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8979 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8980 build_real (rettype
, trunc
));
8981 TREE_SIDE_EFFECTS (arg1
) = 1;
8982 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8983 build_real (rettype
, frac
));
8989 /* Given a location LOC, an interclass builtin function decl FNDECL
8990 and its single argument ARG, return an folded expression computing
8991 the same, or NULL_TREE if we either couldn't or didn't want to fold
8992 (the latter happen if there's an RTL instruction available). */
8995 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8999 if (!validate_arg (arg
, REAL_TYPE
))
9002 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9005 mode
= TYPE_MODE (TREE_TYPE (arg
));
9007 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
9009 /* If there is no optab, try generic code. */
9010 switch (DECL_FUNCTION_CODE (fndecl
))
9014 CASE_FLT_FN (BUILT_IN_ISINF
):
9016 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9017 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9018 tree type
= TREE_TYPE (arg
);
9022 if (is_ibm_extended
)
9024 /* NaN and Inf are encoded in the high-order double value
9025 only. The low-order value is not significant. */
9026 type
= double_type_node
;
9028 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9030 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9031 real_from_string (&r
, buf
);
9032 result
= build_call_expr (isgr_fn
, 2,
9033 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9034 build_real (type
, r
));
9037 CASE_FLT_FN (BUILT_IN_FINITE
):
9038 case BUILT_IN_ISFINITE
:
9040 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9041 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9042 tree type
= TREE_TYPE (arg
);
9046 if (is_ibm_extended
)
9048 /* NaN and Inf are encoded in the high-order double value
9049 only. The low-order value is not significant. */
9050 type
= double_type_node
;
9052 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9054 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9055 real_from_string (&r
, buf
);
9056 result
= build_call_expr (isle_fn
, 2,
9057 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9058 build_real (type
, r
));
9059 /*result = fold_build2_loc (loc, UNGT_EXPR,
9060 TREE_TYPE (TREE_TYPE (fndecl)),
9061 fold_build1_loc (loc, ABS_EXPR, type, arg),
9062 build_real (type, r));
9063 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9064 TREE_TYPE (TREE_TYPE (fndecl)),
9068 case BUILT_IN_ISNORMAL
:
9070 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9071 islessequal(fabs(x),DBL_MAX). */
9072 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9073 tree type
= TREE_TYPE (arg
);
9074 tree orig_arg
, max_exp
, min_exp
;
9075 machine_mode orig_mode
= mode
;
9076 REAL_VALUE_TYPE rmax
, rmin
;
9079 orig_arg
= arg
= builtin_save_expr (arg
);
9080 if (is_ibm_extended
)
9082 /* Use double to test the normal range of IBM extended
9083 precision. Emin for IBM extended precision is
9084 different to emin for IEEE double, being 53 higher
9085 since the low double exponent is at least 53 lower
9086 than the high double exponent. */
9087 type
= double_type_node
;
9089 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
9091 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9093 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9094 real_from_string (&rmax
, buf
);
9095 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
9096 real_from_string (&rmin
, buf
);
9097 max_exp
= build_real (type
, rmax
);
9098 min_exp
= build_real (type
, rmin
);
9100 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
9101 if (is_ibm_extended
)
9103 /* Testing the high end of the range is done just using
9104 the high double, using the same test as isfinite().
9105 For the subnormal end of the range we first test the
9106 high double, then if its magnitude is equal to the
9107 limit of 0x1p-969, we test whether the low double is
9108 non-zero and opposite sign to the high double. */
9109 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
9110 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9111 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
9112 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
9114 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
9115 complex_double_type_node
, orig_arg
);
9116 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
9117 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
9118 tree zero
= build_real (type
, dconst0
);
9119 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
9120 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
9121 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
9122 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
9123 fold_build3 (COND_EXPR
,
9126 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
9128 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
9134 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9135 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
9137 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
9148 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9149 ARG is the argument for the call. */
9152 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9154 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9156 if (!validate_arg (arg
, REAL_TYPE
))
9159 switch (builtin_index
)
9161 case BUILT_IN_ISINF
:
9162 if (!HONOR_INFINITIES (arg
))
9163 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9167 case BUILT_IN_ISINF_SIGN
:
9169 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9170 /* In a boolean context, GCC will fold the inner COND_EXPR to
9171 1. So e.g. "if (isinf_sign(x))" would be folded to just
9172 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9173 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
9174 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9175 tree tmp
= NULL_TREE
;
9177 arg
= builtin_save_expr (arg
);
9179 if (signbit_fn
&& isinf_fn
)
9181 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9182 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9184 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9185 signbit_call
, integer_zero_node
);
9186 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9187 isinf_call
, integer_zero_node
);
9189 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9190 integer_minus_one_node
, integer_one_node
);
9191 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9199 case BUILT_IN_ISFINITE
:
9200 if (!HONOR_NANS (arg
)
9201 && !HONOR_INFINITIES (arg
))
9202 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9206 case BUILT_IN_ISNAN
:
9207 if (!HONOR_NANS (arg
))
9208 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9211 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
9212 if (is_ibm_extended
)
9214 /* NaN and Inf are encoded in the high-order double value
9215 only. The low-order value is not significant. */
9216 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
9219 arg
= builtin_save_expr (arg
);
9220 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9227 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9228 This builtin will generate code to return the appropriate floating
9229 point classification depending on the value of the floating point
9230 number passed in. The possible return values must be supplied as
9231 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9232 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9233 one floating point argument which is "type generic". */
9236 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9238 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9239 arg
, type
, res
, tmp
;
9244 /* Verify the required arguments in the original call. */
9246 || !validate_arg (args
[0], INTEGER_TYPE
)
9247 || !validate_arg (args
[1], INTEGER_TYPE
)
9248 || !validate_arg (args
[2], INTEGER_TYPE
)
9249 || !validate_arg (args
[3], INTEGER_TYPE
)
9250 || !validate_arg (args
[4], INTEGER_TYPE
)
9251 || !validate_arg (args
[5], REAL_TYPE
))
9255 fp_infinite
= args
[1];
9256 fp_normal
= args
[2];
9257 fp_subnormal
= args
[3];
9260 type
= TREE_TYPE (arg
);
9261 mode
= TYPE_MODE (type
);
9262 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9266 (fabs(x) == Inf ? FP_INFINITE :
9267 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9268 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9270 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9271 build_real (type
, dconst0
));
9272 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9273 tmp
, fp_zero
, fp_subnormal
);
9275 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9276 real_from_string (&r
, buf
);
9277 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9278 arg
, build_real (type
, r
));
9279 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9281 if (HONOR_INFINITIES (mode
))
9284 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9285 build_real (type
, r
));
9286 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9290 if (HONOR_NANS (mode
))
9292 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9293 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9299 /* Fold a call to an unordered comparison function such as
9300 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9301 being called and ARG0 and ARG1 are the arguments for the call.
9302 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9303 the opposite of the desired result. UNORDERED_CODE is used
9304 for modes that can hold NaNs and ORDERED_CODE is used for
9308 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9309 enum tree_code unordered_code
,
9310 enum tree_code ordered_code
)
9312 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9313 enum tree_code code
;
9315 enum tree_code code0
, code1
;
9316 tree cmp_type
= NULL_TREE
;
9318 type0
= TREE_TYPE (arg0
);
9319 type1
= TREE_TYPE (arg1
);
9321 code0
= TREE_CODE (type0
);
9322 code1
= TREE_CODE (type1
);
9324 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9325 /* Choose the wider of two real types. */
9326 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9328 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9330 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9333 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9334 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9336 if (unordered_code
== UNORDERED_EXPR
)
9338 if (!HONOR_NANS (arg0
))
9339 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9340 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9343 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9344 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9345 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9348 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9349 arithmetics if it can never overflow, or into internal functions that
9350 return both result of arithmetics and overflowed boolean flag in
9351 a complex integer result, or some other check for overflow.
9352 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9353 checking part of that. */
9356 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9357 tree arg0
, tree arg1
, tree arg2
)
9359 enum internal_fn ifn
= IFN_LAST
;
9360 /* The code of the expression corresponding to the built-in. */
9361 enum tree_code opcode
= ERROR_MARK
;
9362 bool ovf_only
= false;
9366 case BUILT_IN_ADD_OVERFLOW_P
:
9369 case BUILT_IN_ADD_OVERFLOW
:
9370 case BUILT_IN_SADD_OVERFLOW
:
9371 case BUILT_IN_SADDL_OVERFLOW
:
9372 case BUILT_IN_SADDLL_OVERFLOW
:
9373 case BUILT_IN_UADD_OVERFLOW
:
9374 case BUILT_IN_UADDL_OVERFLOW
:
9375 case BUILT_IN_UADDLL_OVERFLOW
:
9377 ifn
= IFN_ADD_OVERFLOW
;
9379 case BUILT_IN_SUB_OVERFLOW_P
:
9382 case BUILT_IN_SUB_OVERFLOW
:
9383 case BUILT_IN_SSUB_OVERFLOW
:
9384 case BUILT_IN_SSUBL_OVERFLOW
:
9385 case BUILT_IN_SSUBLL_OVERFLOW
:
9386 case BUILT_IN_USUB_OVERFLOW
:
9387 case BUILT_IN_USUBL_OVERFLOW
:
9388 case BUILT_IN_USUBLL_OVERFLOW
:
9389 opcode
= MINUS_EXPR
;
9390 ifn
= IFN_SUB_OVERFLOW
;
9392 case BUILT_IN_MUL_OVERFLOW_P
:
9395 case BUILT_IN_MUL_OVERFLOW
:
9396 case BUILT_IN_SMUL_OVERFLOW
:
9397 case BUILT_IN_SMULL_OVERFLOW
:
9398 case BUILT_IN_SMULLL_OVERFLOW
:
9399 case BUILT_IN_UMUL_OVERFLOW
:
9400 case BUILT_IN_UMULL_OVERFLOW
:
9401 case BUILT_IN_UMULLL_OVERFLOW
:
9403 ifn
= IFN_MUL_OVERFLOW
;
9409 /* For the "generic" overloads, the first two arguments can have different
9410 types and the last argument determines the target type to use to check
9411 for overflow. The arguments of the other overloads all have the same
9413 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
9415 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9416 arguments are constant, attempt to fold the built-in call into a constant
9417 expression indicating whether or not it detected an overflow. */
9419 && TREE_CODE (arg0
) == INTEGER_CST
9420 && TREE_CODE (arg1
) == INTEGER_CST
)
9421 /* Perform the computation in the target type and check for overflow. */
9422 return omit_one_operand_loc (loc
, boolean_type_node
,
9423 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9424 ? boolean_true_node
: boolean_false_node
,
9427 tree intres
, ovfres
;
9428 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9430 intres
= fold_binary_loc (loc
, opcode
, type
,
9431 fold_convert_loc (loc
, type
, arg0
),
9432 fold_convert_loc (loc
, type
, arg1
));
9433 if (TREE_OVERFLOW (intres
))
9434 intres
= drop_tree_overflow (intres
);
9435 ovfres
= (arith_overflowed_p (opcode
, type
, arg0
, arg1
)
9436 ? boolean_true_node
: boolean_false_node
);
9440 tree ctype
= build_complex_type (type
);
9441 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
, 2,
9443 tree tgt
= save_expr (call
);
9444 intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9445 ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9446 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9450 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
9452 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9454 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9455 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9458 /* Fold a call to __builtin_FILE to a constant string. */
9461 fold_builtin_FILE (location_t loc
)
9463 if (const char *fname
= LOCATION_FILE (loc
))
9465 /* The documentation says this builtin is equivalent to the preprocessor
9466 __FILE__ macro so it appears appropriate to use the same file prefix
9468 fname
= remap_macro_filename (fname
);
9469 return build_string_literal (strlen (fname
) + 1, fname
);
9472 return build_string_literal (1, "");
9475 /* Fold a call to __builtin_FUNCTION to a constant string. */
9478 fold_builtin_FUNCTION ()
9480 const char *name
= "";
9482 if (current_function_decl
)
9483 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
9485 return build_string_literal (strlen (name
) + 1, name
);
9488 /* Fold a call to __builtin_LINE to an integer constant. */
9491 fold_builtin_LINE (location_t loc
, tree type
)
9493 return build_int_cst (type
, LOCATION_LINE (loc
));
9496 /* Fold a call to built-in function FNDECL with 0 arguments.
9497 This function returns NULL_TREE if no simplification was possible. */
9500 fold_builtin_0 (location_t loc
, tree fndecl
)
9502 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9503 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9507 return fold_builtin_FILE (loc
);
9509 case BUILT_IN_FUNCTION
:
9510 return fold_builtin_FUNCTION ();
9513 return fold_builtin_LINE (loc
, type
);
9515 CASE_FLT_FN (BUILT_IN_INF
):
9516 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
9517 case BUILT_IN_INFD32
:
9518 case BUILT_IN_INFD64
:
9519 case BUILT_IN_INFD128
:
9520 return fold_builtin_inf (loc
, type
, true);
9522 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9523 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
9524 return fold_builtin_inf (loc
, type
, false);
9526 case BUILT_IN_CLASSIFY_TYPE
:
9527 return fold_builtin_classify_type (NULL_TREE
);
9535 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9536 This function returns NULL_TREE if no simplification was possible. */
9539 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9541 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9542 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9544 if (TREE_CODE (arg0
) == ERROR_MARK
)
9547 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
9552 case BUILT_IN_CONSTANT_P
:
9554 tree val
= fold_builtin_constant_p (arg0
);
9556 /* Gimplification will pull the CALL_EXPR for the builtin out of
9557 an if condition. When not optimizing, we'll not CSE it back.
9558 To avoid link error types of regressions, return false now. */
9559 if (!val
&& !optimize
)
9560 val
= integer_zero_node
;
9565 case BUILT_IN_CLASSIFY_TYPE
:
9566 return fold_builtin_classify_type (arg0
);
9568 case BUILT_IN_STRLEN
:
9569 return fold_builtin_strlen (loc
, type
, arg0
);
9571 CASE_FLT_FN (BUILT_IN_FABS
):
9572 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
9573 case BUILT_IN_FABSD32
:
9574 case BUILT_IN_FABSD64
:
9575 case BUILT_IN_FABSD128
:
9576 return fold_builtin_fabs (loc
, arg0
, type
);
9580 case BUILT_IN_LLABS
:
9581 case BUILT_IN_IMAXABS
:
9582 return fold_builtin_abs (loc
, arg0
, type
);
9584 CASE_FLT_FN (BUILT_IN_CONJ
):
9585 if (validate_arg (arg0
, COMPLEX_TYPE
)
9586 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9587 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9590 CASE_FLT_FN (BUILT_IN_CREAL
):
9591 if (validate_arg (arg0
, COMPLEX_TYPE
)
9592 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9593 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9596 CASE_FLT_FN (BUILT_IN_CIMAG
):
9597 if (validate_arg (arg0
, COMPLEX_TYPE
)
9598 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9599 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9602 CASE_FLT_FN (BUILT_IN_CARG
):
9603 return fold_builtin_carg (loc
, arg0
, type
);
9605 case BUILT_IN_ISASCII
:
9606 return fold_builtin_isascii (loc
, arg0
);
9608 case BUILT_IN_TOASCII
:
9609 return fold_builtin_toascii (loc
, arg0
);
9611 case BUILT_IN_ISDIGIT
:
9612 return fold_builtin_isdigit (loc
, arg0
);
9614 CASE_FLT_FN (BUILT_IN_FINITE
):
9615 case BUILT_IN_FINITED32
:
9616 case BUILT_IN_FINITED64
:
9617 case BUILT_IN_FINITED128
:
9618 case BUILT_IN_ISFINITE
:
9620 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9623 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9626 CASE_FLT_FN (BUILT_IN_ISINF
):
9627 case BUILT_IN_ISINFD32
:
9628 case BUILT_IN_ISINFD64
:
9629 case BUILT_IN_ISINFD128
:
9631 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9634 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9637 case BUILT_IN_ISNORMAL
:
9638 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9640 case BUILT_IN_ISINF_SIGN
:
9641 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9643 CASE_FLT_FN (BUILT_IN_ISNAN
):
9644 case BUILT_IN_ISNAND32
:
9645 case BUILT_IN_ISNAND64
:
9646 case BUILT_IN_ISNAND128
:
9647 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9650 if (integer_zerop (arg0
))
9651 return build_empty_stmt (loc
);
9662 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9663 This function returns NULL_TREE if no simplification was possible. */
9666 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9668 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9669 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9671 if (TREE_CODE (arg0
) == ERROR_MARK
9672 || TREE_CODE (arg1
) == ERROR_MARK
)
9675 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9680 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9681 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9682 if (validate_arg (arg0
, REAL_TYPE
)
9683 && validate_arg (arg1
, POINTER_TYPE
))
9684 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9687 CASE_FLT_FN (BUILT_IN_FREXP
):
9688 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9690 CASE_FLT_FN (BUILT_IN_MODF
):
9691 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9693 case BUILT_IN_STRSPN
:
9694 return fold_builtin_strspn (loc
, arg0
, arg1
);
9696 case BUILT_IN_STRCSPN
:
9697 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9699 case BUILT_IN_STRPBRK
:
9700 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9702 case BUILT_IN_EXPECT
:
9703 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, NULL_TREE
);
9705 case BUILT_IN_ISGREATER
:
9706 return fold_builtin_unordered_cmp (loc
, fndecl
,
9707 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9708 case BUILT_IN_ISGREATEREQUAL
:
9709 return fold_builtin_unordered_cmp (loc
, fndecl
,
9710 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9711 case BUILT_IN_ISLESS
:
9712 return fold_builtin_unordered_cmp (loc
, fndecl
,
9713 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9714 case BUILT_IN_ISLESSEQUAL
:
9715 return fold_builtin_unordered_cmp (loc
, fndecl
,
9716 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9717 case BUILT_IN_ISLESSGREATER
:
9718 return fold_builtin_unordered_cmp (loc
, fndecl
,
9719 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9720 case BUILT_IN_ISUNORDERED
:
9721 return fold_builtin_unordered_cmp (loc
, fndecl
,
9722 arg0
, arg1
, UNORDERED_EXPR
,
9725 /* We do the folding for va_start in the expander. */
9726 case BUILT_IN_VA_START
:
9729 case BUILT_IN_OBJECT_SIZE
:
9730 return fold_builtin_object_size (arg0
, arg1
);
9732 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9733 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9735 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9736 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9744 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9746 This function returns NULL_TREE if no simplification was possible. */
9749 fold_builtin_3 (location_t loc
, tree fndecl
,
9750 tree arg0
, tree arg1
, tree arg2
)
9752 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9753 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9755 if (TREE_CODE (arg0
) == ERROR_MARK
9756 || TREE_CODE (arg1
) == ERROR_MARK
9757 || TREE_CODE (arg2
) == ERROR_MARK
)
9760 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9767 CASE_FLT_FN (BUILT_IN_SINCOS
):
9768 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9770 CASE_FLT_FN (BUILT_IN_REMQUO
):
9771 if (validate_arg (arg0
, REAL_TYPE
)
9772 && validate_arg (arg1
, REAL_TYPE
)
9773 && validate_arg (arg2
, POINTER_TYPE
))
9774 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9777 case BUILT_IN_MEMCMP
:
9778 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9780 case BUILT_IN_EXPECT
:
9781 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
, NULL_TREE
);
9783 case BUILT_IN_EXPECT_WITH_PROBABILITY
:
9784 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
, arg2
);
9786 case BUILT_IN_ADD_OVERFLOW
:
9787 case BUILT_IN_SUB_OVERFLOW
:
9788 case BUILT_IN_MUL_OVERFLOW
:
9789 case BUILT_IN_ADD_OVERFLOW_P
:
9790 case BUILT_IN_SUB_OVERFLOW_P
:
9791 case BUILT_IN_MUL_OVERFLOW_P
:
9792 case BUILT_IN_SADD_OVERFLOW
:
9793 case BUILT_IN_SADDL_OVERFLOW
:
9794 case BUILT_IN_SADDLL_OVERFLOW
:
9795 case BUILT_IN_SSUB_OVERFLOW
:
9796 case BUILT_IN_SSUBL_OVERFLOW
:
9797 case BUILT_IN_SSUBLL_OVERFLOW
:
9798 case BUILT_IN_SMUL_OVERFLOW
:
9799 case BUILT_IN_SMULL_OVERFLOW
:
9800 case BUILT_IN_SMULLL_OVERFLOW
:
9801 case BUILT_IN_UADD_OVERFLOW
:
9802 case BUILT_IN_UADDL_OVERFLOW
:
9803 case BUILT_IN_UADDLL_OVERFLOW
:
9804 case BUILT_IN_USUB_OVERFLOW
:
9805 case BUILT_IN_USUBL_OVERFLOW
:
9806 case BUILT_IN_USUBLL_OVERFLOW
:
9807 case BUILT_IN_UMUL_OVERFLOW
:
9808 case BUILT_IN_UMULL_OVERFLOW
:
9809 case BUILT_IN_UMULLL_OVERFLOW
:
9810 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9818 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9819 arguments. IGNORE is true if the result of the
9820 function call is ignored. This function returns NULL_TREE if no
9821 simplification was possible. */
9824 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9826 tree ret
= NULL_TREE
;
9831 ret
= fold_builtin_0 (loc
, fndecl
);
9834 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9837 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9840 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9843 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9848 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9849 SET_EXPR_LOCATION (ret
, loc
);
9855 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9856 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9857 of arguments in ARGS to be omitted. OLDNARGS is the number of
9858 elements in ARGS. */
9861 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9862 int skip
, tree fndecl
, int n
, va_list newargs
)
9864 int nargs
= oldnargs
- skip
+ n
;
9871 buffer
= XALLOCAVEC (tree
, nargs
);
9872 for (i
= 0; i
< n
; i
++)
9873 buffer
[i
] = va_arg (newargs
, tree
);
9874 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9875 buffer
[i
] = args
[j
];
9878 buffer
= args
+ skip
;
9880 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9883 /* Return true if FNDECL shouldn't be folded right now.
9884 If a built-in function has an inline attribute always_inline
9885 wrapper, defer folding it after always_inline functions have
9886 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9887 might not be performed. */
9890 avoid_folding_inline_builtin (tree fndecl
)
9892 return (DECL_DECLARED_INLINE_P (fndecl
)
9893 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9895 && !cfun
->always_inline_functions_inlined
9896 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9899 /* A wrapper function for builtin folding that prevents warnings for
9900 "statement without effect" and the like, caused by removing the
9901 call node earlier than the warning is generated. */
9904 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9906 tree ret
= NULL_TREE
;
9907 tree fndecl
= get_callee_fndecl (exp
);
9908 if (fndecl
&& fndecl_built_in_p (fndecl
)
9909 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9910 yet. Defer folding until we see all the arguments
9911 (after inlining). */
9912 && !CALL_EXPR_VA_ARG_PACK (exp
))
9914 int nargs
= call_expr_nargs (exp
);
9916 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9917 instead last argument is __builtin_va_arg_pack (). Defer folding
9918 even in that case, until arguments are finalized. */
9919 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9921 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9922 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9926 if (avoid_folding_inline_builtin (fndecl
))
9929 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9930 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9931 CALL_EXPR_ARGP (exp
), ignore
);
9934 tree
*args
= CALL_EXPR_ARGP (exp
);
9935 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9943 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9944 N arguments are passed in the array ARGARRAY. Return a folded
9945 expression or NULL_TREE if no simplification was possible. */
9948 fold_builtin_call_array (location_t loc
, tree
,
9953 if (TREE_CODE (fn
) != ADDR_EXPR
)
9956 tree fndecl
= TREE_OPERAND (fn
, 0);
9957 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9958 && fndecl_built_in_p (fndecl
))
9960 /* If last argument is __builtin_va_arg_pack (), arguments to this
9961 function are not finalized yet. Defer folding until they are. */
9962 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9964 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9965 if (fndecl2
&& fndecl_built_in_p (fndecl2
, BUILT_IN_VA_ARG_PACK
))
9968 if (avoid_folding_inline_builtin (fndecl
))
9970 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9971 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9973 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9979 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9980 along with N new arguments specified as the "..." parameters. SKIP
9981 is the number of arguments in EXP to be omitted. This function is used
9982 to do varargs-to-varargs transformations. */
9985 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9991 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9992 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9998 /* Validate a single argument ARG against a tree code CODE representing
9999 a type. Return true when argument is valid. */
10002 validate_arg (const_tree arg
, enum tree_code code
)
10006 else if (code
== POINTER_TYPE
)
10007 return POINTER_TYPE_P (TREE_TYPE (arg
));
10008 else if (code
== INTEGER_TYPE
)
10009 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10010 return code
== TREE_CODE (TREE_TYPE (arg
));
10013 /* This function validates the types of a function call argument list
10014 against a specified list of tree_codes. If the last specifier is a 0,
10015 that represents an ellipses, otherwise the last specifier must be a
10018 This is the GIMPLE version of validate_arglist. Eventually we want to
10019 completely convert builtins.c to work from GIMPLEs and the tree based
10020 validate_arglist will then be removed. */
10023 validate_gimple_arglist (const gcall
*call
, ...)
10025 enum tree_code code
;
10031 va_start (ap
, call
);
10036 code
= (enum tree_code
) va_arg (ap
, int);
10040 /* This signifies an ellipses, any further arguments are all ok. */
10044 /* This signifies an endlink, if no arguments remain, return
10045 true, otherwise return false. */
10046 res
= (i
== gimple_call_num_args (call
));
10049 /* If no parameters remain or the parameter's code does not
10050 match the specified code, return false. Otherwise continue
10051 checking any remaining arguments. */
10052 arg
= gimple_call_arg (call
, i
++);
10053 if (!validate_arg (arg
, code
))
10060 /* We need gotos here since we can only have one VA_CLOSE in a
10068 /* Default target-specific builtin expander that does nothing. */
10071 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10072 rtx target ATTRIBUTE_UNUSED
,
10073 rtx subtarget ATTRIBUTE_UNUSED
,
10074 machine_mode mode ATTRIBUTE_UNUSED
,
10075 int ignore ATTRIBUTE_UNUSED
)
10080 /* Returns true is EXP represents data that would potentially reside
10081 in a readonly section. */
10084 readonly_data_expr (tree exp
)
10088 if (TREE_CODE (exp
) != ADDR_EXPR
)
10091 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10095 /* Make sure we call decl_readonly_section only for trees it
10096 can handle (since it returns true for everything it doesn't
10098 if (TREE_CODE (exp
) == STRING_CST
10099 || TREE_CODE (exp
) == CONSTRUCTOR
10100 || (VAR_P (exp
) && TREE_STATIC (exp
)))
10101 return decl_readonly_section (exp
, 0);
10106 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10107 to the call, and TYPE is its return type.
10109 Return NULL_TREE if no simplification was possible, otherwise return the
10110 simplified form of the call as a tree.
10112 The simplified form may be a constant or other expression which
10113 computes the same value, but in a more efficient manner (including
10114 calls to other builtin functions).
10116 The call may contain arguments which need to be evaluated, but
10117 which are not useful to determine the result of the call. In
10118 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10119 COMPOUND_EXPR will be an argument which must be evaluated.
10120 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10121 COMPOUND_EXPR in the chain will contain the tree for the simplified
10122 form of the builtin function call. */
10125 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10127 if (!validate_arg (s1
, POINTER_TYPE
)
10128 || !validate_arg (s2
, POINTER_TYPE
))
10133 const char *p1
, *p2
;
10135 p2
= c_getstr (s2
);
10139 p1
= c_getstr (s1
);
10142 const char *r
= strpbrk (p1
, p2
);
10146 return build_int_cst (TREE_TYPE (s1
), 0);
10148 /* Return an offset into the constant string argument. */
10149 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10150 return fold_convert_loc (loc
, type
, tem
);
10154 /* strpbrk(x, "") == NULL.
10155 Evaluate and ignore s1 in case it had side-effects. */
10156 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
10159 return NULL_TREE
; /* Really call strpbrk. */
10161 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10165 /* New argument list transforming strpbrk(s1, s2) to
10166 strchr(s1, s2[0]). */
10167 return build_call_expr_loc (loc
, fn
, 2, s1
,
10168 build_int_cst (integer_type_node
, p2
[0]));
10172 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10175 Return NULL_TREE if no simplification was possible, otherwise return the
10176 simplified form of the call as a tree.
10178 The simplified form may be a constant or other expression which
10179 computes the same value, but in a more efficient manner (including
10180 calls to other builtin functions).
10182 The call may contain arguments which need to be evaluated, but
10183 which are not useful to determine the result of the call. In
10184 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10185 COMPOUND_EXPR will be an argument which must be evaluated.
10186 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10187 COMPOUND_EXPR in the chain will contain the tree for the simplified
10188 form of the builtin function call. */
10191 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
10193 if (!validate_arg (s1
, POINTER_TYPE
)
10194 || !validate_arg (s2
, POINTER_TYPE
))
10198 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10200 /* If either argument is "", return NULL_TREE. */
10201 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10202 /* Evaluate and ignore both arguments in case either one has
10204 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10210 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10213 Return NULL_TREE if no simplification was possible, otherwise return the
10214 simplified form of the call as a tree.
10216 The simplified form may be a constant or other expression which
10217 computes the same value, but in a more efficient manner (including
10218 calls to other builtin functions).
10220 The call may contain arguments which need to be evaluated, but
10221 which are not useful to determine the result of the call. In
10222 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10223 COMPOUND_EXPR will be an argument which must be evaluated.
10224 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10225 COMPOUND_EXPR in the chain will contain the tree for the simplified
10226 form of the builtin function call. */
10229 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
10231 if (!validate_arg (s1
, POINTER_TYPE
)
10232 || !validate_arg (s2
, POINTER_TYPE
))
10236 /* If the first argument is "", return NULL_TREE. */
10237 const char *p1
= c_getstr (s1
);
10238 if (p1
&& *p1
== '\0')
10240 /* Evaluate and ignore argument s2 in case it has
10242 return omit_one_operand_loc (loc
, size_type_node
,
10243 size_zero_node
, s2
);
10246 /* If the second argument is "", return __builtin_strlen(s1). */
10247 const char *p2
= c_getstr (s2
);
10248 if (p2
&& *p2
== '\0')
10250 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10252 /* If the replacement _DECL isn't initialized, don't do the
10257 return build_call_expr_loc (loc
, fn
, 1, s1
);
10263 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10264 produced. False otherwise. This is done so that we don't output the error
10265 or warning twice or three times. */
10268 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10270 tree fntype
= TREE_TYPE (current_function_decl
);
10271 int nargs
= call_expr_nargs (exp
);
10273 /* There is good chance the current input_location points inside the
10274 definition of the va_start macro (perhaps on the token for
10275 builtin) in a system header, so warnings will not be emitted.
10276 Use the location in real source code. */
10277 location_t current_location
=
10278 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10281 if (!stdarg_p (fntype
))
10283 error ("%<va_start%> used in function with fixed arguments");
10289 if (va_start_p
&& (nargs
!= 2))
10291 error ("wrong number of arguments to function %<va_start%>");
10294 arg
= CALL_EXPR_ARG (exp
, 1);
10296 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10297 when we checked the arguments and if needed issued a warning. */
10302 /* Evidently an out of date version of <stdarg.h>; can't validate
10303 va_start's second argument, but can still work as intended. */
10304 warning_at (current_location
,
10306 "%<__builtin_next_arg%> called without an argument");
10309 else if (nargs
> 1)
10311 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10314 arg
= CALL_EXPR_ARG (exp
, 0);
10317 if (TREE_CODE (arg
) == SSA_NAME
)
10318 arg
= SSA_NAME_VAR (arg
);
10320 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10321 or __builtin_next_arg (0) the first time we see it, after checking
10322 the arguments and if needed issuing a warning. */
10323 if (!integer_zerop (arg
))
10325 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10327 /* Strip off all nops for the sake of the comparison. This
10328 is not quite the same as STRIP_NOPS. It does more.
10329 We must also strip off INDIRECT_EXPR for C++ reference
10331 while (CONVERT_EXPR_P (arg
)
10332 || TREE_CODE (arg
) == INDIRECT_REF
)
10333 arg
= TREE_OPERAND (arg
, 0);
10334 if (arg
!= last_parm
)
10336 /* FIXME: Sometimes with the tree optimizers we can get the
10337 not the last argument even though the user used the last
10338 argument. We just warn and set the arg to be the last
10339 argument so that we will get wrong-code because of
10341 warning_at (current_location
,
10343 "second parameter of %<va_start%> not last named argument");
10346 /* Undefined by C99 7.15.1.4p4 (va_start):
10347 "If the parameter parmN is declared with the register storage
10348 class, with a function or array type, or with a type that is
10349 not compatible with the type that results after application of
10350 the default argument promotions, the behavior is undefined."
10352 else if (DECL_REGISTER (arg
))
10354 warning_at (current_location
,
10356 "undefined behavior when second parameter of "
10357 "%<va_start%> is declared with %<register%> storage");
10360 /* We want to verify the second parameter just once before the tree
10361 optimizers are run and then avoid keeping it in the tree,
10362 as otherwise we could warn even for correct code like:
10363 void foo (int i, ...)
10364 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10366 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10368 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10374 /* Expand a call EXP to __builtin_object_size. */
10377 expand_builtin_object_size (tree exp
)
10380 int object_size_type
;
10381 tree fndecl
= get_callee_fndecl (exp
);
10383 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10385 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10387 expand_builtin_trap ();
10391 ost
= CALL_EXPR_ARG (exp
, 1);
10394 if (TREE_CODE (ost
) != INTEGER_CST
10395 || tree_int_cst_sgn (ost
) < 0
10396 || compare_tree_int (ost
, 3) > 0)
10398 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10400 expand_builtin_trap ();
10404 object_size_type
= tree_to_shwi (ost
);
10406 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10409 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10410 FCODE is the BUILT_IN_* to use.
10411 Return NULL_RTX if we failed; the caller should emit a normal call,
10412 otherwise try to get the result in TARGET, if convenient (and in
10413 mode MODE if that's convenient). */
10416 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10417 enum built_in_function fcode
)
10419 if (!validate_arglist (exp
,
10421 fcode
== BUILT_IN_MEMSET_CHK
10422 ? INTEGER_TYPE
: POINTER_TYPE
,
10423 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10426 tree dest
= CALL_EXPR_ARG (exp
, 0);
10427 tree src
= CALL_EXPR_ARG (exp
, 1);
10428 tree len
= CALL_EXPR_ARG (exp
, 2);
10429 tree size
= CALL_EXPR_ARG (exp
, 3);
10431 bool sizes_ok
= check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
,
10432 /*str=*/NULL_TREE
, size
);
10434 if (!tree_fits_uhwi_p (size
))
10437 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10439 /* Avoid transforming the checking call to an ordinary one when
10440 an overflow has been detected or when the call couldn't be
10441 validated because the size is not constant. */
10442 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10445 tree fn
= NULL_TREE
;
10446 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10447 mem{cpy,pcpy,move,set} is available. */
10450 case BUILT_IN_MEMCPY_CHK
:
10451 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10453 case BUILT_IN_MEMPCPY_CHK
:
10454 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10456 case BUILT_IN_MEMMOVE_CHK
:
10457 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10459 case BUILT_IN_MEMSET_CHK
:
10460 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10469 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10470 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10471 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10472 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10474 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10478 unsigned int dest_align
= get_pointer_alignment (dest
);
10480 /* If DEST is not a pointer type, call the normal function. */
10481 if (dest_align
== 0)
10484 /* If SRC and DEST are the same (and not volatile), do nothing. */
10485 if (operand_equal_p (src
, dest
, 0))
10489 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10491 /* Evaluate and ignore LEN in case it has side-effects. */
10492 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10493 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10496 expr
= fold_build_pointer_plus (dest
, len
);
10497 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10500 /* __memmove_chk special case. */
10501 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10503 unsigned int src_align
= get_pointer_alignment (src
);
10505 if (src_align
== 0)
10508 /* If src is categorized for a readonly section we can use
10509 normal __memcpy_chk. */
10510 if (readonly_data_expr (src
))
10512 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10515 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10516 dest
, src
, len
, size
);
10517 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10518 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10519 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10526 /* Emit warning if a buffer overflow is detected at compile time. */
10529 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10531 /* The source string. */
10532 tree srcstr
= NULL_TREE
;
10533 /* The size of the destination object. */
10534 tree objsize
= NULL_TREE
;
10535 /* The string that is being concatenated with (as in __strcat_chk)
10536 or null if it isn't. */
10537 tree catstr
= NULL_TREE
;
10538 /* The maximum length of the source sequence in a bounded operation
10539 (such as __strncat_chk) or null if the operation isn't bounded
10540 (such as __strcat_chk). */
10541 tree maxread
= NULL_TREE
;
10542 /* The exact size of the access (such as in __strncpy_chk). */
10543 tree size
= NULL_TREE
;
10547 case BUILT_IN_STRCPY_CHK
:
10548 case BUILT_IN_STPCPY_CHK
:
10549 srcstr
= CALL_EXPR_ARG (exp
, 1);
10550 objsize
= CALL_EXPR_ARG (exp
, 2);
10553 case BUILT_IN_STRCAT_CHK
:
10554 /* For __strcat_chk the warning will be emitted only if overflowing
10555 by at least strlen (dest) + 1 bytes. */
10556 catstr
= CALL_EXPR_ARG (exp
, 0);
10557 srcstr
= CALL_EXPR_ARG (exp
, 1);
10558 objsize
= CALL_EXPR_ARG (exp
, 2);
10561 case BUILT_IN_STRNCAT_CHK
:
10562 catstr
= CALL_EXPR_ARG (exp
, 0);
10563 srcstr
= CALL_EXPR_ARG (exp
, 1);
10564 maxread
= CALL_EXPR_ARG (exp
, 2);
10565 objsize
= CALL_EXPR_ARG (exp
, 3);
10568 case BUILT_IN_STRNCPY_CHK
:
10569 case BUILT_IN_STPNCPY_CHK
:
10570 srcstr
= CALL_EXPR_ARG (exp
, 1);
10571 size
= CALL_EXPR_ARG (exp
, 2);
10572 objsize
= CALL_EXPR_ARG (exp
, 3);
10575 case BUILT_IN_SNPRINTF_CHK
:
10576 case BUILT_IN_VSNPRINTF_CHK
:
10577 maxread
= CALL_EXPR_ARG (exp
, 1);
10578 objsize
= CALL_EXPR_ARG (exp
, 3);
10581 gcc_unreachable ();
10584 if (catstr
&& maxread
)
10586 /* Check __strncat_chk. There is no way to determine the length
10587 of the string to which the source string is being appended so
10588 just warn when the length of the source string is not known. */
10589 check_strncat_sizes (exp
, objsize
);
10593 /* The destination argument is the first one for all built-ins above. */
10594 tree dst
= CALL_EXPR_ARG (exp
, 0);
10596 check_access (exp
, dst
, srcstr
, size
, maxread
, srcstr
, objsize
);
10599 /* Emit warning if a buffer overflow is detected at compile time
10600 in __sprintf_chk/__vsprintf_chk calls. */
10603 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10605 tree size
, len
, fmt
;
10606 const char *fmt_str
;
10607 int nargs
= call_expr_nargs (exp
);
10609 /* Verify the required arguments in the original call. */
10613 size
= CALL_EXPR_ARG (exp
, 2);
10614 fmt
= CALL_EXPR_ARG (exp
, 3);
10616 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10619 /* Check whether the format is a literal string constant. */
10620 fmt_str
= c_getstr (fmt
);
10621 if (fmt_str
== NULL
)
10624 if (!init_target_chars ())
10627 /* If the format doesn't contain % args or %%, we know its size. */
10628 if (strchr (fmt_str
, target_percent
) == 0)
10629 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10630 /* If the format is "%s" and first ... argument is a string literal,
10632 else if (fcode
== BUILT_IN_SPRINTF_CHK
10633 && strcmp (fmt_str
, target_percent_s
) == 0)
10639 arg
= CALL_EXPR_ARG (exp
, 4);
10640 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10643 len
= c_strlen (arg
, 1);
10644 if (!len
|| ! tree_fits_uhwi_p (len
))
10650 /* Add one for the terminating nul. */
10651 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10653 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, /*size=*/NULL_TREE
,
10654 /*maxread=*/NULL_TREE
, len
, size
);
10657 /* Emit warning if a free is called with address of a variable. */
10660 maybe_emit_free_warning (tree exp
)
10662 if (call_expr_nargs (exp
) != 1)
10665 tree arg
= CALL_EXPR_ARG (exp
, 0);
10668 if (TREE_CODE (arg
) != ADDR_EXPR
)
10671 arg
= get_base_address (TREE_OPERAND (arg
, 0));
10672 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
10675 if (SSA_VAR_P (arg
))
10676 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10677 "%Kattempt to free a non-heap object %qD", exp
, arg
);
10679 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10680 "%Kattempt to free a non-heap object", exp
);
10683 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10687 fold_builtin_object_size (tree ptr
, tree ost
)
10689 unsigned HOST_WIDE_INT bytes
;
10690 int object_size_type
;
10692 if (!validate_arg (ptr
, POINTER_TYPE
)
10693 || !validate_arg (ost
, INTEGER_TYPE
))
10698 if (TREE_CODE (ost
) != INTEGER_CST
10699 || tree_int_cst_sgn (ost
) < 0
10700 || compare_tree_int (ost
, 3) > 0)
10703 object_size_type
= tree_to_shwi (ost
);
10705 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10706 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10707 and (size_t) 0 for types 2 and 3. */
10708 if (TREE_SIDE_EFFECTS (ptr
))
10709 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10711 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10713 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10714 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10715 return build_int_cstu (size_type_node
, bytes
);
10717 else if (TREE_CODE (ptr
) == SSA_NAME
)
10719 /* If object size is not known yet, delay folding until
10720 later. Maybe subsequent passes will help determining
10722 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10723 && wi::fits_to_tree_p (bytes
, size_type_node
))
10724 return build_int_cstu (size_type_node
, bytes
);
10730 /* Builtins with folding operations that operate on "..." arguments
10731 need special handling; we need to store the arguments in a convenient
10732 data structure before attempting any folding. Fortunately there are
10733 only a few builtins that fall into this category. FNDECL is the
10734 function, EXP is the CALL_EXPR for the call. */
10737 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10739 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10740 tree ret
= NULL_TREE
;
10744 case BUILT_IN_FPCLASSIFY
:
10745 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10753 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10754 SET_EXPR_LOCATION (ret
, loc
);
10755 TREE_NO_WARNING (ret
) = 1;
10761 /* Initialize format string characters in the target charset. */
10764 init_target_chars (void)
10769 target_newline
= lang_hooks
.to_target_charset ('\n');
10770 target_percent
= lang_hooks
.to_target_charset ('%');
10771 target_c
= lang_hooks
.to_target_charset ('c');
10772 target_s
= lang_hooks
.to_target_charset ('s');
10773 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10777 target_percent_c
[0] = target_percent
;
10778 target_percent_c
[1] = target_c
;
10779 target_percent_c
[2] = '\0';
10781 target_percent_s
[0] = target_percent
;
10782 target_percent_s
[1] = target_s
;
10783 target_percent_s
[2] = '\0';
10785 target_percent_s_newline
[0] = target_percent
;
10786 target_percent_s_newline
[1] = target_s
;
10787 target_percent_s_newline
[2] = target_newline
;
10788 target_percent_s_newline
[3] = '\0';
10795 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10796 and no overflow/underflow occurred. INEXACT is true if M was not
10797 exactly calculated. TYPE is the tree type for the result. This
10798 function assumes that you cleared the MPFR flags and then
10799 calculated M to see if anything subsequently set a flag prior to
10800 entering this function. Return NULL_TREE if any checks fail. */
10803 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10805 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10806 overflow/underflow occurred. If -frounding-math, proceed iff the
10807 result of calling FUNC was exact. */
10808 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10809 && (!flag_rounding_math
|| !inexact
))
10811 REAL_VALUE_TYPE rr
;
10813 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10814 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10815 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10816 but the mpft_t is not, then we underflowed in the
10818 if (real_isfinite (&rr
)
10819 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10821 REAL_VALUE_TYPE rmode
;
10823 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10824 /* Proceed iff the specified mode can hold the value. */
10825 if (real_identical (&rmode
, &rr
))
10826 return build_real (type
, rmode
);
10832 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10833 number and no overflow/underflow occurred. INEXACT is true if M
10834 was not exactly calculated. TYPE is the tree type for the result.
10835 This function assumes that you cleared the MPFR flags and then
10836 calculated M to see if anything subsequently set a flag prior to
10837 entering this function. Return NULL_TREE if any checks fail, if
10838 FORCE_CONVERT is true, then bypass the checks. */
10841 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10843 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10844 overflow/underflow occurred. If -frounding-math, proceed iff the
10845 result of calling FUNC was exact. */
10847 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10848 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10849 && (!flag_rounding_math
|| !inexact
)))
10851 REAL_VALUE_TYPE re
, im
;
10853 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10854 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10855 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10856 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10857 but the mpft_t is not, then we underflowed in the
10860 || (real_isfinite (&re
) && real_isfinite (&im
)
10861 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10862 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10864 REAL_VALUE_TYPE re_mode
, im_mode
;
10866 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10867 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10868 /* Proceed iff the specified mode can hold the value. */
10870 || (real_identical (&re_mode
, &re
)
10871 && real_identical (&im_mode
, &im
)))
10872 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10873 build_real (TREE_TYPE (type
), im_mode
));
10879 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10880 the pointer *(ARG_QUO) and return the result. The type is taken
10881 from the type of ARG0 and is used for setting the precision of the
10882 calculation and results. */
10885 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10887 tree
const type
= TREE_TYPE (arg0
);
10888 tree result
= NULL_TREE
;
10893 /* To proceed, MPFR must exactly represent the target floating point
10894 format, which only happens when the target base equals two. */
10895 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10896 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10897 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10899 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10900 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10902 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10904 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10905 const int prec
= fmt
->p
;
10906 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10911 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10912 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10913 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10914 mpfr_clear_flags ();
10915 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10916 /* Remquo is independent of the rounding mode, so pass
10917 inexact=0 to do_mpfr_ckconv(). */
10918 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10919 mpfr_clears (m0
, m1
, NULL
);
10922 /* MPFR calculates quo in the host's long so it may
10923 return more bits in quo than the target int can hold
10924 if sizeof(host long) > sizeof(target int). This can
10925 happen even for native compilers in LP64 mode. In
10926 these cases, modulo the quo value with the largest
10927 number that the target int can hold while leaving one
10928 bit for the sign. */
10929 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10930 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10932 /* Dereference the quo pointer argument. */
10933 arg_quo
= build_fold_indirect_ref (arg_quo
);
10934 /* Proceed iff a valid pointer type was passed in. */
10935 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10937 /* Set the value. */
10939 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10940 build_int_cst (TREE_TYPE (arg_quo
),
10942 TREE_SIDE_EFFECTS (result_quo
) = 1;
10943 /* Combine the quo assignment with the rem. */
10944 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10945 result_quo
, result_rem
));
10953 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10954 resulting value as a tree with type TYPE. The mpfr precision is
10955 set to the precision of TYPE. We assume that this mpfr function
10956 returns zero if the result could be calculated exactly within the
10957 requested precision. In addition, the integer pointer represented
10958 by ARG_SG will be dereferenced and set to the appropriate signgam
10962 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10964 tree result
= NULL_TREE
;
10968 /* To proceed, MPFR must exactly represent the target floating point
10969 format, which only happens when the target base equals two. Also
10970 verify ARG is a constant and that ARG_SG is an int pointer. */
10971 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10972 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10973 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10974 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10976 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10978 /* In addition to NaN and Inf, the argument cannot be zero or a
10979 negative integer. */
10980 if (real_isfinite (ra
)
10981 && ra
->cl
!= rvc_zero
10982 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10984 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10985 const int prec
= fmt
->p
;
10986 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10991 mpfr_init2 (m
, prec
);
10992 mpfr_from_real (m
, ra
, GMP_RNDN
);
10993 mpfr_clear_flags ();
10994 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10995 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
11001 /* Dereference the arg_sg pointer argument. */
11002 arg_sg
= build_fold_indirect_ref (arg_sg
);
11003 /* Assign the signgam value into *arg_sg. */
11004 result_sg
= fold_build2 (MODIFY_EXPR
,
11005 TREE_TYPE (arg_sg
), arg_sg
,
11006 build_int_cst (TREE_TYPE (arg_sg
), sg
));
11007 TREE_SIDE_EFFECTS (result_sg
) = 1;
11008 /* Combine the signgam assignment with the lgamma result. */
11009 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11010 result_sg
, result_lg
));
11018 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11019 mpc function FUNC on it and return the resulting value as a tree
11020 with type TYPE. The mpfr precision is set to the precision of
11021 TYPE. We assume that function FUNC returns zero if the result
11022 could be calculated exactly within the requested precision. If
11023 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11024 in the arguments and/or results. */
11027 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
11028 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
11030 tree result
= NULL_TREE
;
11035 /* To proceed, MPFR must exactly represent the target floating point
11036 format, which only happens when the target base equals two. */
11037 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
11038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
11039 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
11040 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
11041 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
11043 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
11044 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
11045 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
11046 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
11049 || (real_isfinite (re0
) && real_isfinite (im0
)
11050 && real_isfinite (re1
) && real_isfinite (im1
)))
11052 const struct real_format
*const fmt
=
11053 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11054 const int prec
= fmt
->p
;
11055 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11056 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11060 mpc_init2 (m0
, prec
);
11061 mpc_init2 (m1
, prec
);
11062 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
11063 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
11064 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
11065 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
11066 mpfr_clear_flags ();
11067 inexact
= func (m0
, m0
, m1
, crnd
);
11068 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11077 /* A wrapper function for builtin folding that prevents warnings for
11078 "statement without effect" and the like, caused by removing the
11079 call node earlier than the warning is generated. */
11082 fold_call_stmt (gcall
*stmt
, bool ignore
)
11084 tree ret
= NULL_TREE
;
11085 tree fndecl
= gimple_call_fndecl (stmt
);
11086 location_t loc
= gimple_location (stmt
);
11087 if (fndecl
&& fndecl_built_in_p (fndecl
)
11088 && !gimple_call_va_arg_pack_p (stmt
))
11090 int nargs
= gimple_call_num_args (stmt
);
11091 tree
*args
= (nargs
> 0
11092 ? gimple_call_arg_ptr (stmt
, 0)
11093 : &error_mark_node
);
11095 if (avoid_folding_inline_builtin (fndecl
))
11097 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11099 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11103 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11106 /* Propagate location information from original call to
11107 expansion of builtin. Otherwise things like
11108 maybe_emit_chk_warning, that operate on the expansion
11109 of a builtin, will use the wrong location information. */
11110 if (gimple_has_location (stmt
))
11112 tree realret
= ret
;
11113 if (TREE_CODE (ret
) == NOP_EXPR
)
11114 realret
= TREE_OPERAND (ret
, 0);
11115 if (CAN_HAVE_LOCATION_P (realret
)
11116 && !EXPR_HAS_LOCATION (realret
))
11117 SET_EXPR_LOCATION (realret
, loc
);
11127 /* Look up the function in builtin_decl that corresponds to DECL
11128 and set ASMSPEC as its user assembler name. DECL must be a
11129 function decl that declares a builtin. */
11132 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11134 gcc_assert (fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
11137 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11138 set_user_assembler_name (builtin
, asmspec
);
11140 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
11141 && INT_TYPE_SIZE
< BITS_PER_WORD
)
11143 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
11144 set_user_assembler_libfunc ("ffs", asmspec
);
11145 set_optab_libfunc (ffs_optab
, mode
, "ffs");
11149 /* Return true if DECL is a builtin that expands to a constant or similarly
11152 is_simple_builtin (tree decl
)
11154 if (decl
&& fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
11155 switch (DECL_FUNCTION_CODE (decl
))
11157 /* Builtins that expand to constants. */
11158 case BUILT_IN_CONSTANT_P
:
11159 case BUILT_IN_EXPECT
:
11160 case BUILT_IN_OBJECT_SIZE
:
11161 case BUILT_IN_UNREACHABLE
:
11162 /* Simple register moves or loads from stack. */
11163 case BUILT_IN_ASSUME_ALIGNED
:
11164 case BUILT_IN_RETURN_ADDRESS
:
11165 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11166 case BUILT_IN_FROB_RETURN_ADDR
:
11167 case BUILT_IN_RETURN
:
11168 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11169 case BUILT_IN_FRAME_ADDRESS
:
11170 case BUILT_IN_VA_END
:
11171 case BUILT_IN_STACK_SAVE
:
11172 case BUILT_IN_STACK_RESTORE
:
11173 /* Exception state returns or moves registers around. */
11174 case BUILT_IN_EH_FILTER
:
11175 case BUILT_IN_EH_POINTER
:
11176 case BUILT_IN_EH_COPY_VALUES
:
11186 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11187 most probably expanded inline into reasonably simple code. This is a
11188 superset of is_simple_builtin. */
11190 is_inexpensive_builtin (tree decl
)
11194 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
11196 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11197 switch (DECL_FUNCTION_CODE (decl
))
11200 CASE_BUILT_IN_ALLOCA
:
11201 case BUILT_IN_BSWAP16
:
11202 case BUILT_IN_BSWAP32
:
11203 case BUILT_IN_BSWAP64
:
11205 case BUILT_IN_CLZIMAX
:
11206 case BUILT_IN_CLZL
:
11207 case BUILT_IN_CLZLL
:
11209 case BUILT_IN_CTZIMAX
:
11210 case BUILT_IN_CTZL
:
11211 case BUILT_IN_CTZLL
:
11213 case BUILT_IN_FFSIMAX
:
11214 case BUILT_IN_FFSL
:
11215 case BUILT_IN_FFSLL
:
11216 case BUILT_IN_IMAXABS
:
11217 case BUILT_IN_FINITE
:
11218 case BUILT_IN_FINITEF
:
11219 case BUILT_IN_FINITEL
:
11220 case BUILT_IN_FINITED32
:
11221 case BUILT_IN_FINITED64
:
11222 case BUILT_IN_FINITED128
:
11223 case BUILT_IN_FPCLASSIFY
:
11224 case BUILT_IN_ISFINITE
:
11225 case BUILT_IN_ISINF_SIGN
:
11226 case BUILT_IN_ISINF
:
11227 case BUILT_IN_ISINFF
:
11228 case BUILT_IN_ISINFL
:
11229 case BUILT_IN_ISINFD32
:
11230 case BUILT_IN_ISINFD64
:
11231 case BUILT_IN_ISINFD128
:
11232 case BUILT_IN_ISNAN
:
11233 case BUILT_IN_ISNANF
:
11234 case BUILT_IN_ISNANL
:
11235 case BUILT_IN_ISNAND32
:
11236 case BUILT_IN_ISNAND64
:
11237 case BUILT_IN_ISNAND128
:
11238 case BUILT_IN_ISNORMAL
:
11239 case BUILT_IN_ISGREATER
:
11240 case BUILT_IN_ISGREATEREQUAL
:
11241 case BUILT_IN_ISLESS
:
11242 case BUILT_IN_ISLESSEQUAL
:
11243 case BUILT_IN_ISLESSGREATER
:
11244 case BUILT_IN_ISUNORDERED
:
11245 case BUILT_IN_VA_ARG_PACK
:
11246 case BUILT_IN_VA_ARG_PACK_LEN
:
11247 case BUILT_IN_VA_COPY
:
11248 case BUILT_IN_TRAP
:
11249 case BUILT_IN_SAVEREGS
:
11250 case BUILT_IN_POPCOUNTL
:
11251 case BUILT_IN_POPCOUNTLL
:
11252 case BUILT_IN_POPCOUNTIMAX
:
11253 case BUILT_IN_POPCOUNT
:
11254 case BUILT_IN_PARITYL
:
11255 case BUILT_IN_PARITYLL
:
11256 case BUILT_IN_PARITYIMAX
:
11257 case BUILT_IN_PARITY
:
11258 case BUILT_IN_LABS
:
11259 case BUILT_IN_LLABS
:
11260 case BUILT_IN_PREFETCH
:
11261 case BUILT_IN_ACC_ON_DEVICE
:
11265 return is_simple_builtin (decl
);
11271 /* Return true if T is a constant and the value cast to a target char
11272 can be represented by a host char.
11273 Store the casted char constant in *P if so. */
11276 target_char_cst_p (tree t
, char *p
)
11278 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
11281 *p
= (char)tree_to_uhwi (t
);
11285 /* Return true if the builtin DECL is implemented in a standard library.
11286 Otherwise returns false which doesn't guarantee it is not (thus the list of
11287 handled builtins below may be incomplete). */
11290 builtin_with_linkage_p (tree decl
)
11292 if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11293 switch (DECL_FUNCTION_CODE (decl
))
11295 CASE_FLT_FN (BUILT_IN_ACOS
):
11296 CASE_FLT_FN (BUILT_IN_ACOSH
):
11297 CASE_FLT_FN (BUILT_IN_ASIN
):
11298 CASE_FLT_FN (BUILT_IN_ASINH
):
11299 CASE_FLT_FN (BUILT_IN_ATAN
):
11300 CASE_FLT_FN (BUILT_IN_ATANH
):
11301 CASE_FLT_FN (BUILT_IN_ATAN2
):
11302 CASE_FLT_FN (BUILT_IN_CBRT
):
11303 CASE_FLT_FN (BUILT_IN_CEIL
):
11304 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL
):
11305 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
11306 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
11307 CASE_FLT_FN (BUILT_IN_COS
):
11308 CASE_FLT_FN (BUILT_IN_COSH
):
11309 CASE_FLT_FN (BUILT_IN_ERF
):
11310 CASE_FLT_FN (BUILT_IN_ERFC
):
11311 CASE_FLT_FN (BUILT_IN_EXP
):
11312 CASE_FLT_FN (BUILT_IN_EXP2
):
11313 CASE_FLT_FN (BUILT_IN_EXPM1
):
11314 CASE_FLT_FN (BUILT_IN_FABS
):
11315 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
11316 CASE_FLT_FN (BUILT_IN_FDIM
):
11317 CASE_FLT_FN (BUILT_IN_FLOOR
):
11318 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR
):
11319 CASE_FLT_FN (BUILT_IN_FMA
):
11320 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
11321 CASE_FLT_FN (BUILT_IN_FMAX
):
11322 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX
):
11323 CASE_FLT_FN (BUILT_IN_FMIN
):
11324 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN
):
11325 CASE_FLT_FN (BUILT_IN_FMOD
):
11326 CASE_FLT_FN (BUILT_IN_FREXP
):
11327 CASE_FLT_FN (BUILT_IN_HYPOT
):
11328 CASE_FLT_FN (BUILT_IN_ILOGB
):
11329 CASE_FLT_FN (BUILT_IN_LDEXP
):
11330 CASE_FLT_FN (BUILT_IN_LGAMMA
):
11331 CASE_FLT_FN (BUILT_IN_LLRINT
):
11332 CASE_FLT_FN (BUILT_IN_LLROUND
):
11333 CASE_FLT_FN (BUILT_IN_LOG
):
11334 CASE_FLT_FN (BUILT_IN_LOG10
):
11335 CASE_FLT_FN (BUILT_IN_LOG1P
):
11336 CASE_FLT_FN (BUILT_IN_LOG2
):
11337 CASE_FLT_FN (BUILT_IN_LOGB
):
11338 CASE_FLT_FN (BUILT_IN_LRINT
):
11339 CASE_FLT_FN (BUILT_IN_LROUND
):
11340 CASE_FLT_FN (BUILT_IN_MODF
):
11341 CASE_FLT_FN (BUILT_IN_NAN
):
11342 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
11343 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT
):
11344 CASE_FLT_FN (BUILT_IN_NEXTAFTER
):
11345 CASE_FLT_FN (BUILT_IN_NEXTTOWARD
):
11346 CASE_FLT_FN (BUILT_IN_POW
):
11347 CASE_FLT_FN (BUILT_IN_REMAINDER
):
11348 CASE_FLT_FN (BUILT_IN_REMQUO
):
11349 CASE_FLT_FN (BUILT_IN_RINT
):
11350 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT
):
11351 CASE_FLT_FN (BUILT_IN_ROUND
):
11352 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND
):
11353 CASE_FLT_FN (BUILT_IN_SCALBLN
):
11354 CASE_FLT_FN (BUILT_IN_SCALBN
):
11355 CASE_FLT_FN (BUILT_IN_SIN
):
11356 CASE_FLT_FN (BUILT_IN_SINH
):
11357 CASE_FLT_FN (BUILT_IN_SINCOS
):
11358 CASE_FLT_FN (BUILT_IN_SQRT
):
11359 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT
):
11360 CASE_FLT_FN (BUILT_IN_TAN
):
11361 CASE_FLT_FN (BUILT_IN_TANH
):
11362 CASE_FLT_FN (BUILT_IN_TGAMMA
):
11363 CASE_FLT_FN (BUILT_IN_TRUNC
):
11364 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC
):