1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
50 #include "tree-object-size.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
64 #include "stringpool.h"
67 #include "tree-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
74 struct target_builtins default_target_builtins
;
76 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names
[BUILT_IN_LAST
]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names
[(int) END_BUILTINS
] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info
[(int)END_BUILTINS
];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p
;
96 static rtx
c_readstr (const char *, scalar_int_mode
);
97 static int target_char_cast (tree
, char *);
98 static rtx
get_memory_rtx (tree
, tree
);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx
result_vector (int, rtx
);
102 static void expand_builtin_prefetch (tree
);
103 static rtx
expand_builtin_apply_args (void);
104 static rtx
expand_builtin_apply_args_1 (void);
105 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
106 static void expand_builtin_return (rtx
);
107 static enum type_class
type_to_class (tree
);
108 static rtx
expand_builtin_classify_type (tree
);
109 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
111 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
112 static rtx
expand_builtin_sincos (tree
);
113 static rtx
expand_builtin_cexpi (tree
, rtx
);
114 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
115 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
116 static rtx
expand_builtin_next_arg (void);
117 static rtx
expand_builtin_va_start (tree
);
118 static rtx
expand_builtin_va_end (tree
);
119 static rtx
expand_builtin_va_copy (tree
);
120 static rtx
expand_builtin_strcmp (tree
, rtx
);
121 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
122 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
123 static rtx
expand_builtin_memchr (tree
, rtx
);
124 static rtx
expand_builtin_memcpy (tree
, rtx
);
125 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
126 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
127 rtx target
, tree exp
, int endp
);
128 static rtx
expand_builtin_memmove (tree
, rtx
);
129 static rtx
expand_builtin_mempcpy (tree
, rtx
);
130 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
);
131 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, int);
132 static rtx
expand_builtin_strcat (tree
, rtx
);
133 static rtx
expand_builtin_strcpy (tree
, rtx
);
134 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
135 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
136 static rtx
expand_builtin_stpncpy (tree
, rtx
);
137 static rtx
expand_builtin_strncat (tree
, rtx
);
138 static rtx
expand_builtin_strncpy (tree
, rtx
);
139 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
140 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_alloca (tree
);
146 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
147 static rtx
expand_builtin_frame_address (tree
, tree
);
148 static tree
stabilize_va_list_loc (location_t
, tree
, int);
149 static rtx
expand_builtin_expect (tree
, rtx
);
150 static tree
fold_builtin_constant_p (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
153 static tree
fold_builtin_inf (location_t
, tree
, int);
154 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
155 static bool validate_arg (const_tree
, enum tree_code code
);
156 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
157 static rtx
expand_builtin_signbit (tree
, rtx
);
158 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_isascii (location_t
, tree
);
160 static tree
fold_builtin_toascii (location_t
, tree
);
161 static tree
fold_builtin_isdigit (location_t
, tree
);
162 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
163 static tree
fold_builtin_abs (location_t
, tree
, tree
);
164 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
166 static tree
fold_builtin_0 (location_t
, tree
);
167 static tree
fold_builtin_1 (location_t
, tree
, tree
);
168 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
169 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
170 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
172 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
174 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
176 static rtx
expand_builtin_object_size (tree
);
177 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
178 enum built_in_function
);
179 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
180 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
181 static void maybe_emit_free_warning (tree
);
182 static tree
fold_builtin_object_size (tree
, tree
);
184 unsigned HOST_WIDE_INT target_newline
;
185 unsigned HOST_WIDE_INT target_percent
;
186 static unsigned HOST_WIDE_INT target_c
;
187 static unsigned HOST_WIDE_INT target_s
;
188 char target_percent_c
[3];
189 char target_percent_s
[3];
190 char target_percent_s_newline
[4];
191 static tree
do_mpfr_remquo (tree
, tree
, tree
);
192 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
198 is_builtin_name (const char *name
)
200 if (strncmp (name
, "__builtin_", 10) == 0)
202 if (strncmp (name
, "__sync_", 7) == 0)
204 if (strncmp (name
, "__atomic_", 9) == 0)
210 /* Return true if DECL is a function symbol representing a built-in. */
213 is_builtin_fn (tree decl
)
215 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
218 /* Return true if NODE should be considered for inline expansion regardless
219 of the optimization level. This means whenever a function is invoked with
220 its "internal" name, which normally contains the prefix "__builtin". */
223 called_as_built_in (tree node
)
225 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226 we want the name used to call the function, not the name it
228 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
229 return is_builtin_name (name
);
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233 that N < M. If these numbers can be determined, store M in alignp and N in
234 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
235 *alignp and any bit-offset to *bitposp.
237 Note that the address (and thus the alignment) computed here is based
238 on the address to which a symbol resolves, whereas DECL_ALIGN is based
239 on the address at which an object is actually located. These two
240 addresses are not always the same. For example, on ARM targets,
241 the address &foo of a Thumb function foo() has the lowest bit set,
242 whereas foo() itself starts on an even address.
244 If ADDR_P is true we are taking the address of the memory reference EXP
245 and thus cannot rely on the access taking place. */
248 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
249 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
251 HOST_WIDE_INT bitsize
, bitpos
;
254 int unsignedp
, reversep
, volatilep
;
255 unsigned int align
= BITS_PER_UNIT
;
256 bool known_alignment
= false;
258 /* Get the innermost object and the constant (bitpos) and possibly
259 variable (offset) offset of the access. */
260 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
261 &unsignedp
, &reversep
, &volatilep
);
263 /* Extract alignment information from the innermost object and
264 possibly adjust bitpos and offset. */
265 if (TREE_CODE (exp
) == FUNCTION_DECL
)
267 /* Function addresses can encode extra information besides their
268 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 allows the low bit to be used as a virtual bit, we know
270 that the address itself must be at least 2-byte aligned. */
271 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
272 align
= 2 * BITS_PER_UNIT
;
274 else if (TREE_CODE (exp
) == LABEL_DECL
)
276 else if (TREE_CODE (exp
) == CONST_DECL
)
278 /* The alignment of a CONST_DECL is determined by its initializer. */
279 exp
= DECL_INITIAL (exp
);
280 align
= TYPE_ALIGN (TREE_TYPE (exp
));
281 if (CONSTANT_CLASS_P (exp
))
282 align
= targetm
.constant_alignment (exp
, align
);
284 known_alignment
= true;
286 else if (DECL_P (exp
))
288 align
= DECL_ALIGN (exp
);
289 known_alignment
= true;
291 else if (TREE_CODE (exp
) == INDIRECT_REF
292 || TREE_CODE (exp
) == MEM_REF
293 || TREE_CODE (exp
) == TARGET_MEM_REF
)
295 tree addr
= TREE_OPERAND (exp
, 0);
297 unsigned HOST_WIDE_INT ptr_bitpos
;
298 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
300 /* If the address is explicitely aligned, handle that. */
301 if (TREE_CODE (addr
) == BIT_AND_EXPR
302 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
304 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
305 ptr_bitmask
*= BITS_PER_UNIT
;
306 align
= least_bit_hwi (ptr_bitmask
);
307 addr
= TREE_OPERAND (addr
, 0);
311 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
312 align
= MAX (ptr_align
, align
);
314 /* Re-apply explicit alignment to the bitpos. */
315 ptr_bitpos
&= ptr_bitmask
;
317 /* The alignment of the pointer operand in a TARGET_MEM_REF
318 has to take the variable offset parts into account. */
319 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
323 unsigned HOST_WIDE_INT step
= 1;
325 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
326 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
328 if (TMR_INDEX2 (exp
))
329 align
= BITS_PER_UNIT
;
330 known_alignment
= false;
333 /* When EXP is an actual memory reference then we can use
334 TYPE_ALIGN of a pointer indirection to derive alignment.
335 Do so only if get_pointer_alignment_1 did not reveal absolute
336 alignment knowledge and if using that alignment would
337 improve the situation. */
339 if (!addr_p
&& !known_alignment
340 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
345 /* Else adjust bitpos accordingly. */
346 bitpos
+= ptr_bitpos
;
347 if (TREE_CODE (exp
) == MEM_REF
348 || TREE_CODE (exp
) == TARGET_MEM_REF
)
349 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
352 else if (TREE_CODE (exp
) == STRING_CST
)
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align
= TYPE_ALIGN (TREE_TYPE (exp
));
357 if (CONSTANT_CLASS_P (exp
))
358 align
= targetm
.constant_alignment (exp
, align
);
360 known_alignment
= true;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
367 unsigned int trailing_zeros
= tree_ctz (offset
);
368 if (trailing_zeros
< HOST_BITS_PER_INT
)
370 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
372 align
= MIN (align
, inner
);
377 *bitposp
= bitpos
& (*alignp
- 1);
378 return known_alignment
;
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
388 unsigned HOST_WIDE_INT
*bitposp
)
390 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
393 /* Return the alignment in bits of EXP, an object. */
396 get_object_alignment (tree exp
)
398 unsigned HOST_WIDE_INT bitpos
= 0;
401 get_object_alignment_1 (exp
, &align
, &bitpos
);
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
407 align
= least_bit_hwi (bitpos
);
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
416 If EXP is not a pointer, false is returned too. */
419 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
420 unsigned HOST_WIDE_INT
*bitposp
)
424 if (TREE_CODE (exp
) == ADDR_EXPR
)
425 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
426 alignp
, bitposp
, true);
427 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
430 unsigned HOST_WIDE_INT bitpos
;
431 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
433 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
434 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
437 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
438 if (trailing_zeros
< HOST_BITS_PER_INT
)
440 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
442 align
= MIN (align
, inner
);
446 *bitposp
= bitpos
& (align
- 1);
449 else if (TREE_CODE (exp
) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp
)))
452 unsigned int ptr_align
, ptr_misalign
;
453 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
455 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
457 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
458 *alignp
= ptr_align
* BITS_PER_UNIT
;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
462 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
463 /* We cannot really tell whether this result is an approximation. */
469 *alignp
= BITS_PER_UNIT
;
473 else if (TREE_CODE (exp
) == INTEGER_CST
)
475 *alignp
= BIGGEST_ALIGNMENT
;
476 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
477 & (BIGGEST_ALIGNMENT
- 1));
482 *alignp
= BITS_PER_UNIT
;
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
494 get_pointer_alignment (tree exp
)
496 unsigned HOST_WIDE_INT bitpos
= 0;
499 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
505 align
= least_bit_hwi (bitpos
);
510 /* Return the number of non-zero elements in the sequence
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
517 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
523 /* Optimize the common case of plain char. */
524 for (n
= 0; n
< maxelts
; n
++)
526 const char *elt
= (const char*) ptr
+ n
;
533 for (n
= 0; n
< maxelts
; n
++)
535 const char *elt
= (const char*) ptr
+ n
* eltsize
;
536 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
543 /* Compute the length of a null-terminated character string or wide
544 character string handling character sizes of 1, 2, and 4 bytes.
545 TREE_STRING_LENGTH is not the right way because it evaluates to
546 the size of the character array in bytes (as opposed to characters)
547 and because it can contain a zero byte in the middle.
549 ONLY_VALUE should be nonzero if the result is not going to be emitted
550 into the instruction stream and zero if it is going to be expanded.
551 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
552 is returned, otherwise NULL, since
553 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
554 evaluate the side-effects.
556 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
557 accesses. Note that this implies the result is not going to be emitted
558 into the instruction stream.
560 The value returned is of type `ssizetype'.
562 Unfortunately, string_constant can't access the values of const char
563 arrays with initializers, so neither can we do so here. */
566 c_strlen (tree src
, int only_value
)
569 if (TREE_CODE (src
) == COND_EXPR
570 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
574 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
575 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
576 if (tree_int_cst_equal (len1
, len2
))
580 if (TREE_CODE (src
) == COMPOUND_EXPR
581 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
582 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
584 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
586 /* Offset from the beginning of the string in bytes. */
588 src
= string_constant (src
, &byteoff
);
592 /* Determine the size of the string element. */
594 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
))));
596 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
598 unsigned maxelts
= TREE_STRING_LENGTH (src
) / eltsize
- 1;
600 /* PTR can point to the byte representation of any string type, including
601 char* and wchar_t*. */
602 const char *ptr
= TREE_STRING_POINTER (src
);
604 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
606 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
607 compute the offset to the following null if we don't know where to
608 start searching for it. */
609 if (string_length (ptr
, eltsize
, maxelts
) < maxelts
)
611 /* Return when an embedded null character is found. */
615 /* We don't know the starting offset, but we do know that the string
616 has no internal zero bytes. We can assume that the offset falls
617 within the bounds of the string; otherwise, the programmer deserves
618 what he gets. Subtract the offset from the length of the string,
619 and return that. This would perhaps not be valid if we were dealing
620 with named arrays in addition to literal string constants. */
622 return size_diffop_loc (loc
, size_int (maxelts
* eltsize
), byteoff
);
625 /* Offset from the beginning of the string in elements. */
626 HOST_WIDE_INT eltoff
;
628 /* We have a known offset into the string. Start searching there for
629 a null character if we can represent it as a single HOST_WIDE_INT. */
632 else if (! tree_fits_shwi_p (byteoff
))
635 eltoff
= tree_to_shwi (byteoff
) / eltsize
;
637 /* If the offset is known to be out of bounds, warn, and call strlen at
639 if (eltoff
< 0 || eltoff
> maxelts
)
641 /* Suppress multiple warnings for propagated constant strings. */
643 && !TREE_NO_WARNING (src
))
645 warning_at (loc
, 0, "offset %qwi outside bounds of constant string",
647 TREE_NO_WARNING (src
) = 1;
652 /* Use strlen to search for the first zero byte. Since any strings
653 constructed with build_string will have nulls appended, we win even
654 if we get handed something like (char[4])"abcd".
656 Since ELTOFF is our starting index into the string, no further
657 calculation is needed. */
658 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
661 return ssize_int (len
);
664 /* Return a constant integer corresponding to target reading
665 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
668 c_readstr (const char *str
, scalar_int_mode mode
)
672 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
674 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
675 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
676 / HOST_BITS_PER_WIDE_INT
;
678 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
679 for (i
= 0; i
< len
; i
++)
683 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
686 if (WORDS_BIG_ENDIAN
)
687 j
= GET_MODE_SIZE (mode
) - i
- 1;
688 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
689 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
690 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
694 ch
= (unsigned char) str
[i
];
695 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
698 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
699 return immed_wide_int_const (c
, mode
);
702 /* Cast a target constant CST to target CHAR and if that value fits into
703 host char type, return zero and put that value into variable pointed to by
707 target_char_cast (tree cst
, char *p
)
709 unsigned HOST_WIDE_INT val
, hostval
;
711 if (TREE_CODE (cst
) != INTEGER_CST
712 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
715 /* Do not care if it fits or not right here. */
716 val
= TREE_INT_CST_LOW (cst
);
718 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
719 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
722 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
723 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
732 /* Similar to save_expr, but assumes that arbitrary code is not executed
733 in between the multiple evaluations. In particular, we assume that a
734 non-addressable local variable will not be modified. */
737 builtin_save_expr (tree exp
)
739 if (TREE_CODE (exp
) == SSA_NAME
740 || (TREE_ADDRESSABLE (exp
) == 0
741 && (TREE_CODE (exp
) == PARM_DECL
742 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
745 return save_expr (exp
);
748 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
749 times to get the address of either a higher stack frame, or a return
750 address located within it (depending on FNDECL_CODE). */
753 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
756 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
759 /* For a zero count with __builtin_return_address, we don't care what
760 frame address we return, because target-specific definitions will
761 override us. Therefore frame pointer elimination is OK, and using
762 the soft frame pointer is OK.
764 For a nonzero count, or a zero count with __builtin_frame_address,
765 we require a stable offset from the current frame pointer to the
766 previous one, so we must use the hard frame pointer, and
767 we must disable frame pointer elimination. */
768 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
769 tem
= frame_pointer_rtx
;
772 tem
= hard_frame_pointer_rtx
;
774 /* Tell reload not to eliminate the frame pointer. */
775 crtl
->accesses_prior_frames
= 1;
780 SETUP_FRAME_ADDRESSES ();
782 /* On the SPARC, the return address is not in the frame, it is in a
783 register. There is no way to access it off of the current frame
784 pointer, but it can be accessed off the previous frame pointer by
785 reading the value from the register window save area. */
786 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
789 /* Scan back COUNT frames to the specified frame. */
790 for (i
= 0; i
< count
; i
++)
792 /* Assume the dynamic chain pointer is in the word that the
793 frame address points to, unless otherwise specified. */
794 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
795 tem
= memory_address (Pmode
, tem
);
796 tem
= gen_frame_mem (Pmode
, tem
);
797 tem
= copy_to_reg (tem
);
800 /* For __builtin_frame_address, return what we've got. But, on
801 the SPARC for example, we may have to add a bias. */
802 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
803 return FRAME_ADDR_RTX (tem
);
805 /* For __builtin_return_address, get the return address from that frame. */
806 #ifdef RETURN_ADDR_RTX
807 tem
= RETURN_ADDR_RTX (count
, tem
);
809 tem
= memory_address (Pmode
,
810 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
811 tem
= gen_frame_mem (Pmode
, tem
);
816 /* Alias set used for setjmp buffer. */
817 static alias_set_type setjmp_alias_set
= -1;
819 /* Construct the leading half of a __builtin_setjmp call. Control will
820 return to RECEIVER_LABEL. This is also called directly by the SJLJ
821 exception handling code. */
824 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
826 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
830 if (setjmp_alias_set
== -1)
831 setjmp_alias_set
= new_alias_set ();
833 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
835 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
837 /* We store the frame pointer and the address of receiver_label in
838 the buffer and use the rest of it for the stack save area, which
839 is machine-dependent. */
841 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
842 set_mem_alias_set (mem
, setjmp_alias_set
);
843 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
845 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
846 GET_MODE_SIZE (Pmode
))),
847 set_mem_alias_set (mem
, setjmp_alias_set
);
849 emit_move_insn (validize_mem (mem
),
850 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
852 stack_save
= gen_rtx_MEM (sa_mode
,
853 plus_constant (Pmode
, buf_addr
,
854 2 * GET_MODE_SIZE (Pmode
)));
855 set_mem_alias_set (stack_save
, setjmp_alias_set
);
856 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
858 /* If there is further processing to do, do it. */
859 if (targetm
.have_builtin_setjmp_setup ())
860 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
862 /* We have a nonlocal label. */
863 cfun
->has_nonlocal_label
= 1;
866 /* Construct the trailing part of a __builtin_setjmp call. This is
867 also called directly by the SJLJ exception handling code.
868 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
871 expand_builtin_setjmp_receiver (rtx receiver_label
)
875 /* Mark the FP as used when we get here, so we have to make sure it's
876 marked as used by this function. */
877 emit_use (hard_frame_pointer_rtx
);
879 /* Mark the static chain as clobbered here so life information
880 doesn't get messed up for it. */
881 chain
= rtx_for_static_chain (current_function_decl
, true);
882 if (chain
&& REG_P (chain
))
883 emit_clobber (chain
);
885 /* Now put in the code to restore the frame pointer, and argument
886 pointer, if needed. */
887 if (! targetm
.have_nonlocal_goto ())
889 /* First adjust our frame pointer to its actual value. It was
890 previously set to the start of the virtual area corresponding to
891 the stacked variables when we branched here and now needs to be
892 adjusted to the actual hardware fp value.
894 Assignments to virtual registers are converted by
895 instantiate_virtual_regs into the corresponding assignment
896 to the underlying register (fp in this case) that makes
897 the original assignment true.
898 So the following insn will actually be decrementing fp by
899 TARGET_STARTING_FRAME_OFFSET. */
900 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
902 /* Restoring the frame pointer also modifies the hard frame pointer.
903 Mark it used (so that the previous assignment remains live once
904 the frame pointer is eliminated) and clobbered (to represent the
905 implicit update from the assignment). */
906 emit_use (hard_frame_pointer_rtx
);
907 emit_clobber (hard_frame_pointer_rtx
);
910 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
912 /* If the argument pointer can be eliminated in favor of the
913 frame pointer, we don't need to restore it. We assume here
914 that if such an elimination is present, it can always be used.
915 This is the case on all known machines; if we don't make this
916 assumption, we do unnecessary saving on many machines. */
918 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
920 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
921 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
922 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
925 if (i
== ARRAY_SIZE (elim_regs
))
927 /* Now restore our arg pointer from the address at which it
928 was saved in our stack frame. */
929 emit_move_insn (crtl
->args
.internal_arg_pointer
,
930 copy_to_reg (get_arg_pointer_save_area ()));
934 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
935 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
936 else if (targetm
.have_nonlocal_goto_receiver ())
937 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
941 /* We must not allow the code we just generated to be reordered by
942 scheduling. Specifically, the update of the frame pointer must
943 happen immediately, not later. */
944 emit_insn (gen_blockage ());
947 /* __builtin_longjmp is passed a pointer to an array of five words (not
948 all will be used on all machines). It operates similarly to the C
949 library function of the same name, but is more efficient. Much of
950 the code below is copied from the handling of non-local gotos. */
953 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
956 rtx_insn
*insn
, *last
;
957 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
959 /* DRAP is needed for stack realign if longjmp is expanded to current
961 if (SUPPORTS_STACK_ALIGNMENT
)
962 crtl
->need_drap
= true;
964 if (setjmp_alias_set
== -1)
965 setjmp_alias_set
= new_alias_set ();
967 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
969 buf_addr
= force_reg (Pmode
, buf_addr
);
971 /* We require that the user must pass a second argument of 1, because
972 that is what builtin_setjmp will return. */
973 gcc_assert (value
== const1_rtx
);
975 last
= get_last_insn ();
976 if (targetm
.have_builtin_longjmp ())
977 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
980 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
981 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
982 GET_MODE_SIZE (Pmode
)));
984 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
985 2 * GET_MODE_SIZE (Pmode
)));
986 set_mem_alias_set (fp
, setjmp_alias_set
);
987 set_mem_alias_set (lab
, setjmp_alias_set
);
988 set_mem_alias_set (stack
, setjmp_alias_set
);
990 /* Pick up FP, label, and SP from the block and jump. This code is
991 from expand_goto in stmt.c; see there for detailed comments. */
992 if (targetm
.have_nonlocal_goto ())
993 /* We have to pass a value to the nonlocal_goto pattern that will
994 get copied into the static_chain pointer, but it does not matter
995 what that value is, because builtin_setjmp does not use it. */
996 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
999 lab
= copy_to_reg (lab
);
1001 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1002 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1004 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1005 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1007 emit_use (hard_frame_pointer_rtx
);
1008 emit_use (stack_pointer_rtx
);
1009 emit_indirect_jump (lab
);
1013 /* Search backwards and mark the jump insn as a non-local goto.
1014 Note that this precludes the use of __builtin_longjmp to a
1015 __builtin_setjmp target in the same function. However, we've
1016 already cautioned the user that these functions are for
1017 internal exception handling use only. */
1018 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1020 gcc_assert (insn
!= last
);
1024 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1027 else if (CALL_P (insn
))
1033 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1035 return (iter
->i
< iter
->n
);
1038 /* This function validates the types of a function call argument list
1039 against a specified list of tree_codes. If the last specifier is a 0,
1040 that represents an ellipsis, otherwise the last specifier must be a
1044 validate_arglist (const_tree callexpr
, ...)
1046 enum tree_code code
;
1049 const_call_expr_arg_iterator iter
;
1052 va_start (ap
, callexpr
);
1053 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1055 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1056 tree fn
= CALL_EXPR_FN (callexpr
);
1057 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1059 for (unsigned argno
= 1; ; ++argno
)
1061 code
= (enum tree_code
) va_arg (ap
, int);
1066 /* This signifies an ellipses, any further arguments are all ok. */
1070 /* This signifies an endlink, if no arguments remain, return
1071 true, otherwise return false. */
1072 res
= !more_const_call_expr_args_p (&iter
);
1075 /* The actual argument must be nonnull when either the whole
1076 called function has been declared nonnull, or when the formal
1077 argument corresponding to the actual argument has been. */
1079 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1081 arg
= next_const_call_expr_arg (&iter
);
1082 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1088 /* If no parameters remain or the parameter's code does not
1089 match the specified code, return false. Otherwise continue
1090 checking any remaining arguments. */
1091 arg
= next_const_call_expr_arg (&iter
);
1092 if (!validate_arg (arg
, code
))
1098 /* We need gotos here since we can only have one VA_CLOSE in a
1103 BITMAP_FREE (argmap
);
1108 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1109 and the address of the save area. */
1112 expand_builtin_nonlocal_goto (tree exp
)
1114 tree t_label
, t_save_area
;
1115 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1118 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1121 t_label
= CALL_EXPR_ARG (exp
, 0);
1122 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1124 r_label
= expand_normal (t_label
);
1125 r_label
= convert_memory_address (Pmode
, r_label
);
1126 r_save_area
= expand_normal (t_save_area
);
1127 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1128 /* Copy the address of the save location to a register just in case it was
1129 based on the frame pointer. */
1130 r_save_area
= copy_to_reg (r_save_area
);
1131 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1132 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1133 plus_constant (Pmode
, r_save_area
,
1134 GET_MODE_SIZE (Pmode
)));
1136 crtl
->has_nonlocal_goto
= 1;
1138 /* ??? We no longer need to pass the static chain value, afaik. */
1139 if (targetm
.have_nonlocal_goto ())
1140 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1143 r_label
= copy_to_reg (r_label
);
1145 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1146 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1148 /* Restore frame pointer for containing function. */
1149 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1150 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1152 /* USE of hard_frame_pointer_rtx added for consistency;
1153 not clear if really needed. */
1154 emit_use (hard_frame_pointer_rtx
);
1155 emit_use (stack_pointer_rtx
);
1157 /* If the architecture is using a GP register, we must
1158 conservatively assume that the target function makes use of it.
1159 The prologue of functions with nonlocal gotos must therefore
1160 initialize the GP register to the appropriate value, and we
1161 must then make sure that this value is live at the point
1162 of the jump. (Note that this doesn't necessarily apply
1163 to targets with a nonlocal_goto pattern; they are free
1164 to implement it in their own way. Note also that this is
1165 a no-op if the GP register is a global invariant.) */
1166 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1167 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1168 emit_use (pic_offset_table_rtx
);
1170 emit_indirect_jump (r_label
);
1173 /* Search backwards to the jump insn and mark it as a
1175 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1179 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1182 else if (CALL_P (insn
))
1189 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1190 (not all will be used on all machines) that was passed to __builtin_setjmp.
1191 It updates the stack pointer in that block to the current value. This is
1192 also called directly by the SJLJ exception handling code. */
1195 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1197 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1198 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1200 = gen_rtx_MEM (sa_mode
,
1203 plus_constant (Pmode
, buf_addr
,
1204 2 * GET_MODE_SIZE (Pmode
))));
1206 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1209 /* Expand a call to __builtin_prefetch. For a target that does not support
1210 data prefetch, evaluate the memory address argument in case it has side
1214 expand_builtin_prefetch (tree exp
)
1216 tree arg0
, arg1
, arg2
;
1220 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1223 arg0
= CALL_EXPR_ARG (exp
, 0);
1225 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1226 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1228 nargs
= call_expr_nargs (exp
);
1230 arg1
= CALL_EXPR_ARG (exp
, 1);
1232 arg1
= integer_zero_node
;
1234 arg2
= CALL_EXPR_ARG (exp
, 2);
1236 arg2
= integer_three_node
;
1238 /* Argument 0 is an address. */
1239 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1241 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1242 if (TREE_CODE (arg1
) != INTEGER_CST
)
1244 error ("second argument to %<__builtin_prefetch%> must be a constant");
1245 arg1
= integer_zero_node
;
1247 op1
= expand_normal (arg1
);
1248 /* Argument 1 must be either zero or one. */
1249 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1251 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1256 /* Argument 2 (locality) must be a compile-time constant int. */
1257 if (TREE_CODE (arg2
) != INTEGER_CST
)
1259 error ("third argument to %<__builtin_prefetch%> must be a constant");
1260 arg2
= integer_zero_node
;
1262 op2
= expand_normal (arg2
);
1263 /* Argument 2 must be 0, 1, 2, or 3. */
1264 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1266 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1270 if (targetm
.have_prefetch ())
1272 struct expand_operand ops
[3];
1274 create_address_operand (&ops
[0], op0
);
1275 create_integer_operand (&ops
[1], INTVAL (op1
));
1276 create_integer_operand (&ops
[2], INTVAL (op2
));
1277 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1281 /* Don't do anything with direct references to volatile memory, but
1282 generate code to handle other side effects. */
1283 if (!MEM_P (op0
) && side_effects_p (op0
))
1287 /* Get a MEM rtx for expression EXP which is the address of an operand
1288 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1289 the maximum length of the block of memory that might be accessed or
1293 get_memory_rtx (tree exp
, tree len
)
1295 tree orig_exp
= exp
;
1298 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1299 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1300 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1301 exp
= TREE_OPERAND (exp
, 0);
1303 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1304 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1306 /* Get an expression we can use to find the attributes to assign to MEM.
1307 First remove any nops. */
1308 while (CONVERT_EXPR_P (exp
)
1309 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1310 exp
= TREE_OPERAND (exp
, 0);
1312 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1313 (as builtin stringops may alias with anything). */
1314 exp
= fold_build2 (MEM_REF
,
1315 build_array_type (char_type_node
,
1316 build_range_type (sizetype
,
1317 size_one_node
, len
)),
1318 exp
, build_int_cst (ptr_type_node
, 0));
1320 /* If the MEM_REF has no acceptable address, try to get the base object
1321 from the original address we got, and build an all-aliasing
1322 unknown-sized access to that one. */
1323 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1324 set_mem_attributes (mem
, exp
, 0);
1325 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1326 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1329 exp
= build_fold_addr_expr (exp
);
1330 exp
= fold_build2 (MEM_REF
,
1331 build_array_type (char_type_node
,
1332 build_range_type (sizetype
,
1335 exp
, build_int_cst (ptr_type_node
, 0));
1336 set_mem_attributes (mem
, exp
, 0);
1338 set_mem_alias_set (mem
, 0);
1342 /* Built-in functions to perform an untyped call and return. */
1344 #define apply_args_mode \
1345 (this_target_builtins->x_apply_args_mode)
1346 #define apply_result_mode \
1347 (this_target_builtins->x_apply_result_mode)
1349 /* Return the size required for the block returned by __builtin_apply_args,
1350 and initialize apply_args_mode. */
1353 apply_args_size (void)
1355 static int size
= -1;
1360 /* The values computed by this function never change. */
1363 /* The first value is the incoming arg-pointer. */
1364 size
= GET_MODE_SIZE (Pmode
);
1366 /* The second value is the structure value address unless this is
1367 passed as an "invisible" first argument. */
1368 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1369 size
+= GET_MODE_SIZE (Pmode
);
1371 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1372 if (FUNCTION_ARG_REGNO_P (regno
))
1374 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1376 gcc_assert (mode
!= VOIDmode
);
1378 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1379 if (size
% align
!= 0)
1380 size
= CEIL (size
, align
) * align
;
1381 size
+= GET_MODE_SIZE (mode
);
1382 apply_args_mode
[regno
] = mode
;
1386 apply_args_mode
[regno
] = VOIDmode
;
1392 /* Return the size required for the block returned by __builtin_apply,
1393 and initialize apply_result_mode. */
1396 apply_result_size (void)
1398 static int size
= -1;
1402 /* The values computed by this function never change. */
1407 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1408 if (targetm
.calls
.function_value_regno_p (regno
))
1410 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1412 gcc_assert (mode
!= VOIDmode
);
1414 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1415 if (size
% align
!= 0)
1416 size
= CEIL (size
, align
) * align
;
1417 size
+= GET_MODE_SIZE (mode
);
1418 apply_result_mode
[regno
] = mode
;
1421 apply_result_mode
[regno
] = VOIDmode
;
1423 /* Allow targets that use untyped_call and untyped_return to override
1424 the size so that machine-specific information can be stored here. */
1425 #ifdef APPLY_RESULT_SIZE
1426 size
= APPLY_RESULT_SIZE
;
1432 /* Create a vector describing the result block RESULT. If SAVEP is true,
1433 the result block is used to save the values; otherwise it is used to
1434 restore the values. */
1437 result_vector (int savep
, rtx result
)
1439 int regno
, size
, align
, nelts
;
1442 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1445 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1446 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1448 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1449 if (size
% align
!= 0)
1450 size
= CEIL (size
, align
) * align
;
1451 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1452 mem
= adjust_address (result
, mode
, size
);
1453 savevec
[nelts
++] = (savep
1454 ? gen_rtx_SET (mem
, reg
)
1455 : gen_rtx_SET (reg
, mem
));
1456 size
+= GET_MODE_SIZE (mode
);
1458 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1461 /* Save the state required to perform an untyped call with the same
1462 arguments as were passed to the current function. */
1465 expand_builtin_apply_args_1 (void)
1468 int size
, align
, regno
;
1470 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1472 /* Create a block where the arg-pointer, structure value address,
1473 and argument registers can be saved. */
1474 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1476 /* Walk past the arg-pointer and structure value address. */
1477 size
= GET_MODE_SIZE (Pmode
);
1478 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1479 size
+= GET_MODE_SIZE (Pmode
);
1481 /* Save each register used in calling a function to the block. */
1482 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1483 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1485 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1486 if (size
% align
!= 0)
1487 size
= CEIL (size
, align
) * align
;
1489 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1491 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1492 size
+= GET_MODE_SIZE (mode
);
1495 /* Save the arg pointer to the block. */
1496 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1497 /* We need the pointer as the caller actually passed them to us, not
1498 as we might have pretended they were passed. Make sure it's a valid
1499 operand, as emit_move_insn isn't expected to handle a PLUS. */
1500 if (STACK_GROWS_DOWNWARD
)
1502 = force_operand (plus_constant (Pmode
, tem
,
1503 crtl
->args
.pretend_args_size
),
1505 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1507 size
= GET_MODE_SIZE (Pmode
);
1509 /* Save the structure value address unless this is passed as an
1510 "invisible" first argument. */
1511 if (struct_incoming_value
)
1513 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1514 copy_to_reg (struct_incoming_value
));
1515 size
+= GET_MODE_SIZE (Pmode
);
1518 /* Return the address of the block. */
1519 return copy_addr_to_reg (XEXP (registers
, 0));
1522 /* __builtin_apply_args returns block of memory allocated on
1523 the stack into which is stored the arg pointer, structure
1524 value address, static chain, and all the registers that might
1525 possibly be used in performing a function call. The code is
1526 moved to the start of the function so the incoming values are
1530 expand_builtin_apply_args (void)
1532 /* Don't do __builtin_apply_args more than once in a function.
1533 Save the result of the first call and reuse it. */
1534 if (apply_args_value
!= 0)
1535 return apply_args_value
;
1537 /* When this function is called, it means that registers must be
1538 saved on entry to this function. So we migrate the
1539 call to the first insn of this function. */
1543 temp
= expand_builtin_apply_args_1 ();
1544 rtx_insn
*seq
= get_insns ();
1547 apply_args_value
= temp
;
1549 /* Put the insns after the NOTE that starts the function.
1550 If this is inside a start_sequence, make the outer-level insn
1551 chain current, so the code is placed at the start of the
1552 function. If internal_arg_pointer is a non-virtual pseudo,
1553 it needs to be placed after the function that initializes
1555 push_topmost_sequence ();
1556 if (REG_P (crtl
->args
.internal_arg_pointer
)
1557 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1558 emit_insn_before (seq
, parm_birth_insn
);
1560 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1561 pop_topmost_sequence ();
1566 /* Perform an untyped call and save the state required to perform an
1567 untyped return of whatever value was returned by the given function. */
1570 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1572 int size
, align
, regno
;
1574 rtx incoming_args
, result
, reg
, dest
, src
;
1575 rtx_call_insn
*call_insn
;
1576 rtx old_stack_level
= 0;
1577 rtx call_fusage
= 0;
1578 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1580 arguments
= convert_memory_address (Pmode
, arguments
);
1582 /* Create a block where the return registers can be saved. */
1583 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1585 /* Fetch the arg pointer from the ARGUMENTS block. */
1586 incoming_args
= gen_reg_rtx (Pmode
);
1587 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1588 if (!STACK_GROWS_DOWNWARD
)
1589 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1590 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1592 /* Push a new argument block and copy the arguments. Do not allow
1593 the (potential) memcpy call below to interfere with our stack
1595 do_pending_stack_adjust ();
1598 /* Save the stack with nonlocal if available. */
1599 if (targetm
.have_save_stack_nonlocal ())
1600 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1602 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1604 /* Allocate a block of memory onto the stack and copy the memory
1605 arguments to the outgoing arguments address. We can pass TRUE
1606 as the 4th argument because we just saved the stack pointer
1607 and will restore it right after the call. */
1608 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1610 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1611 may have already set current_function_calls_alloca to true.
1612 current_function_calls_alloca won't be set if argsize is zero,
1613 so we have to guarantee need_drap is true here. */
1614 if (SUPPORTS_STACK_ALIGNMENT
)
1615 crtl
->need_drap
= true;
1617 dest
= virtual_outgoing_args_rtx
;
1618 if (!STACK_GROWS_DOWNWARD
)
1620 if (CONST_INT_P (argsize
))
1621 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1623 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1625 dest
= gen_rtx_MEM (BLKmode
, dest
);
1626 set_mem_align (dest
, PARM_BOUNDARY
);
1627 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1628 set_mem_align (src
, PARM_BOUNDARY
);
1629 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1631 /* Refer to the argument block. */
1633 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1634 set_mem_align (arguments
, PARM_BOUNDARY
);
1636 /* Walk past the arg-pointer and structure value address. */
1637 size
= GET_MODE_SIZE (Pmode
);
1639 size
+= GET_MODE_SIZE (Pmode
);
1641 /* Restore each of the registers previously saved. Make USE insns
1642 for each of these registers for use in making the call. */
1643 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1644 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1646 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1647 if (size
% align
!= 0)
1648 size
= CEIL (size
, align
) * align
;
1649 reg
= gen_rtx_REG (mode
, regno
);
1650 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1651 use_reg (&call_fusage
, reg
);
1652 size
+= GET_MODE_SIZE (mode
);
1655 /* Restore the structure value address unless this is passed as an
1656 "invisible" first argument. */
1657 size
= GET_MODE_SIZE (Pmode
);
1660 rtx value
= gen_reg_rtx (Pmode
);
1661 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1662 emit_move_insn (struct_value
, value
);
1663 if (REG_P (struct_value
))
1664 use_reg (&call_fusage
, struct_value
);
1665 size
+= GET_MODE_SIZE (Pmode
);
1668 /* All arguments and registers used for the call are set up by now! */
1669 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1671 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1672 and we don't want to load it into a register as an optimization,
1673 because prepare_call_address already did it if it should be done. */
1674 if (GET_CODE (function
) != SYMBOL_REF
)
1675 function
= memory_address (FUNCTION_MODE
, function
);
1677 /* Generate the actual call instruction and save the return value. */
1678 if (targetm
.have_untyped_call ())
1680 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1681 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1682 result_vector (1, result
)));
1684 else if (targetm
.have_call_value ())
1688 /* Locate the unique return register. It is not possible to
1689 express a call that sets more than one return register using
1690 call_value; use untyped_call for that. In fact, untyped_call
1691 only needs to save the return registers in the given block. */
1692 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1693 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1695 gcc_assert (!valreg
); /* have_untyped_call required. */
1697 valreg
= gen_rtx_REG (mode
, regno
);
1700 emit_insn (targetm
.gen_call_value (valreg
,
1701 gen_rtx_MEM (FUNCTION_MODE
, function
),
1702 const0_rtx
, NULL_RTX
, const0_rtx
));
1704 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1709 /* Find the CALL insn we just emitted, and attach the register usage
1711 call_insn
= last_call_insn ();
1712 add_function_usage_to (call_insn
, call_fusage
);
1714 /* Restore the stack. */
1715 if (targetm
.have_save_stack_nonlocal ())
1716 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1718 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1719 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1723 /* Return the address of the result block. */
1724 result
= copy_addr_to_reg (XEXP (result
, 0));
1725 return convert_memory_address (ptr_mode
, result
);
1728 /* Perform an untyped return. */
1731 expand_builtin_return (rtx result
)
1733 int size
, align
, regno
;
1736 rtx_insn
*call_fusage
= 0;
1738 result
= convert_memory_address (Pmode
, result
);
1740 apply_result_size ();
1741 result
= gen_rtx_MEM (BLKmode
, result
);
1743 if (targetm
.have_untyped_return ())
1745 rtx vector
= result_vector (0, result
);
1746 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1751 /* Restore the return value and note that each value is used. */
1753 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1754 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1756 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1757 if (size
% align
!= 0)
1758 size
= CEIL (size
, align
) * align
;
1759 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1760 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1762 push_to_sequence (call_fusage
);
1764 call_fusage
= get_insns ();
1766 size
+= GET_MODE_SIZE (mode
);
1769 /* Put the USE insns before the return. */
1770 emit_insn (call_fusage
);
1772 /* Return whatever values was restored by jumping directly to the end
1774 expand_naked_return ();
1777 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1779 static enum type_class
1780 type_to_class (tree type
)
1782 switch (TREE_CODE (type
))
1784 case VOID_TYPE
: return void_type_class
;
1785 case INTEGER_TYPE
: return integer_type_class
;
1786 case ENUMERAL_TYPE
: return enumeral_type_class
;
1787 case BOOLEAN_TYPE
: return boolean_type_class
;
1788 case POINTER_TYPE
: return pointer_type_class
;
1789 case REFERENCE_TYPE
: return reference_type_class
;
1790 case OFFSET_TYPE
: return offset_type_class
;
1791 case REAL_TYPE
: return real_type_class
;
1792 case COMPLEX_TYPE
: return complex_type_class
;
1793 case FUNCTION_TYPE
: return function_type_class
;
1794 case METHOD_TYPE
: return method_type_class
;
1795 case RECORD_TYPE
: return record_type_class
;
1797 case QUAL_UNION_TYPE
: return union_type_class
;
1798 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1799 ? string_type_class
: array_type_class
);
1800 case LANG_TYPE
: return lang_type_class
;
1801 default: return no_type_class
;
1805 /* Expand a call EXP to __builtin_classify_type. */
1808 expand_builtin_classify_type (tree exp
)
1810 if (call_expr_nargs (exp
))
1811 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1812 return GEN_INT (no_type_class
);
1815 /* This helper macro, meant to be used in mathfn_built_in below, determines
1816 which among a set of builtin math functions is appropriate for a given type
1817 mode. The `F' (float) and `L' (long double) are automatically generated
1818 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1819 types, there are additional types that are considered with 'F32', 'F64',
1820 'F128', etc. suffixes. */
1821 #define CASE_MATHFN(MATHFN) \
1822 CASE_CFN_##MATHFN: \
1823 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1824 fcodel = BUILT_IN_##MATHFN##L ; break;
1825 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1827 #define CASE_MATHFN_FLOATN(MATHFN) \
1828 CASE_CFN_##MATHFN: \
1829 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1830 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1831 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1832 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1833 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1835 /* Similar to above, but appends _R after any F/L suffix. */
1836 #define CASE_MATHFN_REENT(MATHFN) \
1837 case CFN_BUILT_IN_##MATHFN##_R: \
1838 case CFN_BUILT_IN_##MATHFN##F_R: \
1839 case CFN_BUILT_IN_##MATHFN##L_R: \
1840 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1841 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1843 /* Return a function equivalent to FN but operating on floating-point
1844 values of type TYPE, or END_BUILTINS if no such function exists.
1845 This is purely an operation on function codes; it does not guarantee
1846 that the target actually has an implementation of the function. */
1848 static built_in_function
1849 mathfn_built_in_2 (tree type
, combined_fn fn
)
1852 built_in_function fcode
, fcodef
, fcodel
;
1853 built_in_function fcodef16
= END_BUILTINS
;
1854 built_in_function fcodef32
= END_BUILTINS
;
1855 built_in_function fcodef64
= END_BUILTINS
;
1856 built_in_function fcodef128
= END_BUILTINS
;
1857 built_in_function fcodef32x
= END_BUILTINS
;
1858 built_in_function fcodef64x
= END_BUILTINS
;
1859 built_in_function fcodef128x
= END_BUILTINS
;
1873 CASE_MATHFN_FLOATN (COPYSIGN
)
1886 CASE_MATHFN_FLOATN (FMA
)
1887 CASE_MATHFN_FLOATN (FMAX
)
1888 CASE_MATHFN_FLOATN (FMIN
)
1892 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
1893 CASE_MATHFN (HUGE_VAL
)
1897 CASE_MATHFN (IFLOOR
)
1900 CASE_MATHFN (IROUND
)
1907 CASE_MATHFN (LFLOOR
)
1908 CASE_MATHFN (LGAMMA
)
1909 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
1910 CASE_MATHFN (LLCEIL
)
1911 CASE_MATHFN (LLFLOOR
)
1912 CASE_MATHFN (LLRINT
)
1913 CASE_MATHFN (LLROUND
)
1920 CASE_MATHFN (LROUND
)
1924 CASE_MATHFN (NEARBYINT
)
1925 CASE_MATHFN (NEXTAFTER
)
1926 CASE_MATHFN (NEXTTOWARD
)
1930 CASE_MATHFN (REMAINDER
)
1931 CASE_MATHFN (REMQUO
)
1935 CASE_MATHFN (SCALBLN
)
1936 CASE_MATHFN (SCALBN
)
1937 CASE_MATHFN (SIGNBIT
)
1938 CASE_MATHFN (SIGNIFICAND
)
1940 CASE_MATHFN (SINCOS
)
1942 CASE_MATHFN_FLOATN (SQRT
)
1945 CASE_MATHFN (TGAMMA
)
1952 return END_BUILTINS
;
1955 mtype
= TYPE_MAIN_VARIANT (type
);
1956 if (mtype
== double_type_node
)
1958 else if (mtype
== float_type_node
)
1960 else if (mtype
== long_double_type_node
)
1962 else if (mtype
== float16_type_node
)
1964 else if (mtype
== float32_type_node
)
1966 else if (mtype
== float64_type_node
)
1968 else if (mtype
== float128_type_node
)
1970 else if (mtype
== float32x_type_node
)
1972 else if (mtype
== float64x_type_node
)
1974 else if (mtype
== float128x_type_node
)
1977 return END_BUILTINS
;
1980 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1981 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1982 otherwise use the explicit declaration. If we can't do the conversion,
1986 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
1988 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
1989 if (fcode2
== END_BUILTINS
)
1992 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1995 return builtin_decl_explicit (fcode2
);
1998 /* Like mathfn_built_in_1, but always use the implicit array. */
2001 mathfn_built_in (tree type
, combined_fn fn
)
2003 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2006 /* Like mathfn_built_in_1, but take a built_in_function and
2007 always use the implicit array. */
2010 mathfn_built_in (tree type
, enum built_in_function fn
)
2012 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2015 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2016 return its code, otherwise return IFN_LAST. Note that this function
2017 only tests whether the function is defined in internals.def, not whether
2018 it is actually available on the target. */
2021 associated_internal_fn (tree fndecl
)
2023 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2024 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2025 switch (DECL_FUNCTION_CODE (fndecl
))
2027 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2028 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2029 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2030 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2031 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2032 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2033 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2034 #include "internal-fn.def"
2036 CASE_FLT_FN (BUILT_IN_POW10
):
2039 CASE_FLT_FN (BUILT_IN_DREM
):
2040 return IFN_REMAINDER
;
2042 CASE_FLT_FN (BUILT_IN_SCALBN
):
2043 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2044 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2053 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2054 on the current target by a call to an internal function, return the
2055 code of that internal function, otherwise return IFN_LAST. The caller
2056 is responsible for ensuring that any side-effects of the built-in
2057 call are dealt with correctly. E.g. if CALL sets errno, the caller
2058 must decide that the errno result isn't needed or make it available
2059 in some other way. */
2062 replacement_internal_fn (gcall
*call
)
2064 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2066 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2067 if (ifn
!= IFN_LAST
)
2069 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2070 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2071 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2078 /* Expand a call to the builtin trinary math functions (fma).
2079 Return NULL_RTX if a normal call should be emitted rather than expanding the
2080 function in-line. EXP is the expression that is a call to the builtin
2081 function; if convenient, the result should be placed in TARGET.
2082 SUBTARGET may be used as the target for computing one of EXP's
2086 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2088 optab builtin_optab
;
2089 rtx op0
, op1
, op2
, result
;
2091 tree fndecl
= get_callee_fndecl (exp
);
2092 tree arg0
, arg1
, arg2
;
2095 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2098 arg0
= CALL_EXPR_ARG (exp
, 0);
2099 arg1
= CALL_EXPR_ARG (exp
, 1);
2100 arg2
= CALL_EXPR_ARG (exp
, 2);
2102 switch (DECL_FUNCTION_CODE (fndecl
))
2104 CASE_FLT_FN (BUILT_IN_FMA
):
2105 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2106 builtin_optab
= fma_optab
; break;
2111 /* Make a suitable register to place result in. */
2112 mode
= TYPE_MODE (TREE_TYPE (exp
));
2114 /* Before working hard, check whether the instruction is available. */
2115 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2118 result
= gen_reg_rtx (mode
);
2120 /* Always stabilize the argument list. */
2121 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2122 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2123 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2125 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2126 op1
= expand_normal (arg1
);
2127 op2
= expand_normal (arg2
);
2131 /* Compute into RESULT.
2132 Set RESULT to wherever the result comes back. */
2133 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2136 /* If we were unable to expand via the builtin, stop the sequence
2137 (without outputting the insns) and call to the library function
2138 with the stabilized argument list. */
2142 return expand_call (exp
, target
, target
== const0_rtx
);
2145 /* Output the entire sequence. */
2146 insns
= get_insns ();
2153 /* Expand a call to the builtin sin and cos math functions.
2154 Return NULL_RTX if a normal call should be emitted rather than expanding the
2155 function in-line. EXP is the expression that is a call to the builtin
2156 function; if convenient, the result should be placed in TARGET.
2157 SUBTARGET may be used as the target for computing one of EXP's
2161 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2163 optab builtin_optab
;
2166 tree fndecl
= get_callee_fndecl (exp
);
2170 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2173 arg
= CALL_EXPR_ARG (exp
, 0);
2175 switch (DECL_FUNCTION_CODE (fndecl
))
2177 CASE_FLT_FN (BUILT_IN_SIN
):
2178 CASE_FLT_FN (BUILT_IN_COS
):
2179 builtin_optab
= sincos_optab
; break;
2184 /* Make a suitable register to place result in. */
2185 mode
= TYPE_MODE (TREE_TYPE (exp
));
2187 /* Check if sincos insn is available, otherwise fallback
2188 to sin or cos insn. */
2189 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2190 switch (DECL_FUNCTION_CODE (fndecl
))
2192 CASE_FLT_FN (BUILT_IN_SIN
):
2193 builtin_optab
= sin_optab
; break;
2194 CASE_FLT_FN (BUILT_IN_COS
):
2195 builtin_optab
= cos_optab
; break;
2200 /* Before working hard, check whether the instruction is available. */
2201 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2203 rtx result
= gen_reg_rtx (mode
);
2205 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2206 need to expand the argument again. This way, we will not perform
2207 side-effects more the once. */
2208 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2210 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2214 /* Compute into RESULT.
2215 Set RESULT to wherever the result comes back. */
2216 if (builtin_optab
== sincos_optab
)
2220 switch (DECL_FUNCTION_CODE (fndecl
))
2222 CASE_FLT_FN (BUILT_IN_SIN
):
2223 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2225 CASE_FLT_FN (BUILT_IN_COS
):
2226 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2234 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2238 /* Output the entire sequence. */
2239 insns
= get_insns ();
2245 /* If we were unable to expand via the builtin, stop the sequence
2246 (without outputting the insns) and call to the library function
2247 with the stabilized argument list. */
2251 return expand_call (exp
, target
, target
== const0_rtx
);
2254 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2255 return an RTL instruction code that implements the functionality.
2256 If that isn't possible or available return CODE_FOR_nothing. */
2258 static enum insn_code
2259 interclass_mathfn_icode (tree arg
, tree fndecl
)
2261 bool errno_set
= false;
2262 optab builtin_optab
= unknown_optab
;
2265 switch (DECL_FUNCTION_CODE (fndecl
))
2267 CASE_FLT_FN (BUILT_IN_ILOGB
):
2268 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2269 CASE_FLT_FN (BUILT_IN_ISINF
):
2270 builtin_optab
= isinf_optab
; break;
2271 case BUILT_IN_ISNORMAL
:
2272 case BUILT_IN_ISFINITE
:
2273 CASE_FLT_FN (BUILT_IN_FINITE
):
2274 case BUILT_IN_FINITED32
:
2275 case BUILT_IN_FINITED64
:
2276 case BUILT_IN_FINITED128
:
2277 case BUILT_IN_ISINFD32
:
2278 case BUILT_IN_ISINFD64
:
2279 case BUILT_IN_ISINFD128
:
2280 /* These builtins have no optabs (yet). */
2286 /* There's no easy way to detect the case we need to set EDOM. */
2287 if (flag_errno_math
&& errno_set
)
2288 return CODE_FOR_nothing
;
2290 /* Optab mode depends on the mode of the input argument. */
2291 mode
= TYPE_MODE (TREE_TYPE (arg
));
2294 return optab_handler (builtin_optab
, mode
);
2295 return CODE_FOR_nothing
;
2298 /* Expand a call to one of the builtin math functions that operate on
2299 floating point argument and output an integer result (ilogb, isinf,
2301 Return 0 if a normal call should be emitted rather than expanding the
2302 function in-line. EXP is the expression that is a call to the builtin
2303 function; if convenient, the result should be placed in TARGET. */
2306 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2308 enum insn_code icode
= CODE_FOR_nothing
;
2310 tree fndecl
= get_callee_fndecl (exp
);
2314 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2317 arg
= CALL_EXPR_ARG (exp
, 0);
2318 icode
= interclass_mathfn_icode (arg
, fndecl
);
2319 mode
= TYPE_MODE (TREE_TYPE (arg
));
2321 if (icode
!= CODE_FOR_nothing
)
2323 struct expand_operand ops
[1];
2324 rtx_insn
*last
= get_last_insn ();
2325 tree orig_arg
= arg
;
2327 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2328 need to expand the argument again. This way, we will not perform
2329 side-effects more the once. */
2330 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2332 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2334 if (mode
!= GET_MODE (op0
))
2335 op0
= convert_to_mode (mode
, op0
, 0);
2337 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2338 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2339 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2340 return ops
[0].value
;
2342 delete_insns_since (last
);
2343 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2349 /* Expand a call to the builtin sincos math function.
2350 Return NULL_RTX if a normal call should be emitted rather than expanding the
2351 function in-line. EXP is the expression that is a call to the builtin
2355 expand_builtin_sincos (tree exp
)
2357 rtx op0
, op1
, op2
, target1
, target2
;
2359 tree arg
, sinp
, cosp
;
2361 location_t loc
= EXPR_LOCATION (exp
);
2362 tree alias_type
, alias_off
;
2364 if (!validate_arglist (exp
, REAL_TYPE
,
2365 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2368 arg
= CALL_EXPR_ARG (exp
, 0);
2369 sinp
= CALL_EXPR_ARG (exp
, 1);
2370 cosp
= CALL_EXPR_ARG (exp
, 2);
2372 /* Make a suitable register to place result in. */
2373 mode
= TYPE_MODE (TREE_TYPE (arg
));
2375 /* Check if sincos insn is available, otherwise emit the call. */
2376 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2379 target1
= gen_reg_rtx (mode
);
2380 target2
= gen_reg_rtx (mode
);
2382 op0
= expand_normal (arg
);
2383 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2384 alias_off
= build_int_cst (alias_type
, 0);
2385 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2387 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2390 /* Compute into target1 and target2.
2391 Set TARGET to wherever the result comes back. */
2392 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2393 gcc_assert (result
);
2395 /* Move target1 and target2 to the memory locations indicated
2397 emit_move_insn (op1
, target1
);
2398 emit_move_insn (op2
, target2
);
2403 /* Expand a call to the internal cexpi builtin to the sincos math function.
2404 EXP is the expression that is a call to the builtin function; if convenient,
2405 the result should be placed in TARGET. */
2408 expand_builtin_cexpi (tree exp
, rtx target
)
2410 tree fndecl
= get_callee_fndecl (exp
);
2414 location_t loc
= EXPR_LOCATION (exp
);
2416 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2419 arg
= CALL_EXPR_ARG (exp
, 0);
2420 type
= TREE_TYPE (arg
);
2421 mode
= TYPE_MODE (TREE_TYPE (arg
));
2423 /* Try expanding via a sincos optab, fall back to emitting a libcall
2424 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2425 is only generated from sincos, cexp or if we have either of them. */
2426 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2428 op1
= gen_reg_rtx (mode
);
2429 op2
= gen_reg_rtx (mode
);
2431 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2433 /* Compute into op1 and op2. */
2434 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2436 else if (targetm
.libc_has_function (function_sincos
))
2438 tree call
, fn
= NULL_TREE
;
2442 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2443 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2444 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2445 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2446 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2447 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2451 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2452 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2453 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2454 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2455 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2456 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2458 /* Make sure not to fold the sincos call again. */
2459 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2460 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2461 call
, 3, arg
, top1
, top2
));
2465 tree call
, fn
= NULL_TREE
, narg
;
2466 tree ctype
= build_complex_type (type
);
2468 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2469 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2470 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2471 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2472 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2473 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2477 /* If we don't have a decl for cexp create one. This is the
2478 friendliest fallback if the user calls __builtin_cexpi
2479 without full target C99 function support. */
2480 if (fn
== NULL_TREE
)
2483 const char *name
= NULL
;
2485 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2487 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2489 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2492 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2493 fn
= build_fn_decl (name
, fntype
);
2496 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2497 build_real (type
, dconst0
), arg
);
2499 /* Make sure not to fold the cexp call again. */
2500 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2501 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2502 target
, VOIDmode
, EXPAND_NORMAL
);
2505 /* Now build the proper return type. */
2506 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2507 make_tree (TREE_TYPE (arg
), op2
),
2508 make_tree (TREE_TYPE (arg
), op1
)),
2509 target
, VOIDmode
, EXPAND_NORMAL
);
2512 /* Conveniently construct a function call expression. FNDECL names the
2513 function to be called, N is the number of arguments, and the "..."
2514 parameters are the argument expressions. Unlike build_call_exr
2515 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2518 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2521 tree fntype
= TREE_TYPE (fndecl
);
2522 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2525 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2527 SET_EXPR_LOCATION (fn
, loc
);
2531 /* Expand a call to one of the builtin rounding functions gcc defines
2532 as an extension (lfloor and lceil). As these are gcc extensions we
2533 do not need to worry about setting errno to EDOM.
2534 If expanding via optab fails, lower expression to (int)(floor(x)).
2535 EXP is the expression that is a call to the builtin function;
2536 if convenient, the result should be placed in TARGET. */
2539 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2541 convert_optab builtin_optab
;
2544 tree fndecl
= get_callee_fndecl (exp
);
2545 enum built_in_function fallback_fn
;
2546 tree fallback_fndecl
;
2550 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2553 arg
= CALL_EXPR_ARG (exp
, 0);
2555 switch (DECL_FUNCTION_CODE (fndecl
))
2557 CASE_FLT_FN (BUILT_IN_ICEIL
):
2558 CASE_FLT_FN (BUILT_IN_LCEIL
):
2559 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2560 builtin_optab
= lceil_optab
;
2561 fallback_fn
= BUILT_IN_CEIL
;
2564 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2565 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2566 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2567 builtin_optab
= lfloor_optab
;
2568 fallback_fn
= BUILT_IN_FLOOR
;
2575 /* Make a suitable register to place result in. */
2576 mode
= TYPE_MODE (TREE_TYPE (exp
));
2578 target
= gen_reg_rtx (mode
);
2580 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2581 need to expand the argument again. This way, we will not perform
2582 side-effects more the once. */
2583 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2585 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2589 /* Compute into TARGET. */
2590 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2592 /* Output the entire sequence. */
2593 insns
= get_insns ();
2599 /* If we were unable to expand via the builtin, stop the sequence
2600 (without outputting the insns). */
2603 /* Fall back to floating point rounding optab. */
2604 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2606 /* For non-C99 targets we may end up without a fallback fndecl here
2607 if the user called __builtin_lfloor directly. In this case emit
2608 a call to the floor/ceil variants nevertheless. This should result
2609 in the best user experience for not full C99 targets. */
2610 if (fallback_fndecl
== NULL_TREE
)
2613 const char *name
= NULL
;
2615 switch (DECL_FUNCTION_CODE (fndecl
))
2617 case BUILT_IN_ICEIL
:
2618 case BUILT_IN_LCEIL
:
2619 case BUILT_IN_LLCEIL
:
2622 case BUILT_IN_ICEILF
:
2623 case BUILT_IN_LCEILF
:
2624 case BUILT_IN_LLCEILF
:
2627 case BUILT_IN_ICEILL
:
2628 case BUILT_IN_LCEILL
:
2629 case BUILT_IN_LLCEILL
:
2632 case BUILT_IN_IFLOOR
:
2633 case BUILT_IN_LFLOOR
:
2634 case BUILT_IN_LLFLOOR
:
2637 case BUILT_IN_IFLOORF
:
2638 case BUILT_IN_LFLOORF
:
2639 case BUILT_IN_LLFLOORF
:
2642 case BUILT_IN_IFLOORL
:
2643 case BUILT_IN_LFLOORL
:
2644 case BUILT_IN_LLFLOORL
:
2651 fntype
= build_function_type_list (TREE_TYPE (arg
),
2652 TREE_TYPE (arg
), NULL_TREE
);
2653 fallback_fndecl
= build_fn_decl (name
, fntype
);
2656 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2658 tmp
= expand_normal (exp
);
2659 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2661 /* Truncate the result of floating point optab to integer
2662 via expand_fix (). */
2663 target
= gen_reg_rtx (mode
);
2664 expand_fix (target
, tmp
, 0);
2669 /* Expand a call to one of the builtin math functions doing integer
2671 Return 0 if a normal call should be emitted rather than expanding the
2672 function in-line. EXP is the expression that is a call to the builtin
2673 function; if convenient, the result should be placed in TARGET. */
2676 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2678 convert_optab builtin_optab
;
2681 tree fndecl
= get_callee_fndecl (exp
);
2684 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2686 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2689 arg
= CALL_EXPR_ARG (exp
, 0);
2691 switch (DECL_FUNCTION_CODE (fndecl
))
2693 CASE_FLT_FN (BUILT_IN_IRINT
):
2694 fallback_fn
= BUILT_IN_LRINT
;
2696 CASE_FLT_FN (BUILT_IN_LRINT
):
2697 CASE_FLT_FN (BUILT_IN_LLRINT
):
2698 builtin_optab
= lrint_optab
;
2701 CASE_FLT_FN (BUILT_IN_IROUND
):
2702 fallback_fn
= BUILT_IN_LROUND
;
2704 CASE_FLT_FN (BUILT_IN_LROUND
):
2705 CASE_FLT_FN (BUILT_IN_LLROUND
):
2706 builtin_optab
= lround_optab
;
2713 /* There's no easy way to detect the case we need to set EDOM. */
2714 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2717 /* Make a suitable register to place result in. */
2718 mode
= TYPE_MODE (TREE_TYPE (exp
));
2720 /* There's no easy way to detect the case we need to set EDOM. */
2721 if (!flag_errno_math
)
2723 rtx result
= gen_reg_rtx (mode
);
2725 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2726 need to expand the argument again. This way, we will not perform
2727 side-effects more the once. */
2728 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2730 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2734 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2736 /* Output the entire sequence. */
2737 insns
= get_insns ();
2743 /* If we were unable to expand via the builtin, stop the sequence
2744 (without outputting the insns) and call to the library function
2745 with the stabilized argument list. */
2749 if (fallback_fn
!= BUILT_IN_NONE
)
2751 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2752 targets, (int) round (x) should never be transformed into
2753 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2754 a call to lround in the hope that the target provides at least some
2755 C99 functions. This should result in the best user experience for
2756 not full C99 targets. */
2757 tree fallback_fndecl
= mathfn_built_in_1
2758 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2760 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2761 fallback_fndecl
, 1, arg
);
2763 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2764 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2765 return convert_to_mode (mode
, target
, 0);
2768 return expand_call (exp
, target
, target
== const0_rtx
);
2771 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2772 a normal call should be emitted rather than expanding the function
2773 in-line. EXP is the expression that is a call to the builtin
2774 function; if convenient, the result should be placed in TARGET. */
2777 expand_builtin_powi (tree exp
, rtx target
)
2784 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2787 arg0
= CALL_EXPR_ARG (exp
, 0);
2788 arg1
= CALL_EXPR_ARG (exp
, 1);
2789 mode
= TYPE_MODE (TREE_TYPE (exp
));
2791 /* Emit a libcall to libgcc. */
2793 /* Mode of the 2nd argument must match that of an int. */
2794 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2796 if (target
== NULL_RTX
)
2797 target
= gen_reg_rtx (mode
);
2799 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2800 if (GET_MODE (op0
) != mode
)
2801 op0
= convert_to_mode (mode
, op0
, 0);
2802 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2803 if (GET_MODE (op1
) != mode2
)
2804 op1
= convert_to_mode (mode2
, op1
, 0);
2806 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2807 target
, LCT_CONST
, mode
,
2808 op0
, mode
, op1
, mode2
);
2813 /* Expand expression EXP which is a call to the strlen builtin. Return
2814 NULL_RTX if we failed the caller should emit a normal call, otherwise
2815 try to get the result in TARGET, if convenient. */
2818 expand_builtin_strlen (tree exp
, rtx target
,
2819 machine_mode target_mode
)
2821 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2825 struct expand_operand ops
[4];
2828 tree src
= CALL_EXPR_ARG (exp
, 0);
2830 rtx_insn
*before_strlen
;
2831 machine_mode insn_mode
;
2832 enum insn_code icode
= CODE_FOR_nothing
;
2835 /* If the length can be computed at compile-time, return it. */
2836 len
= c_strlen (src
, 0);
2838 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2840 /* If the length can be computed at compile-time and is constant
2841 integer, but there are side-effects in src, evaluate
2842 src for side-effects, then return len.
2843 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2844 can be optimized into: i++; x = 3; */
2845 len
= c_strlen (src
, 1);
2846 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2848 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2849 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2852 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2854 /* If SRC is not a pointer type, don't do this operation inline. */
2858 /* Bail out if we can't compute strlen in the right mode. */
2859 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2861 icode
= optab_handler (strlen_optab
, insn_mode
);
2862 if (icode
!= CODE_FOR_nothing
)
2865 if (insn_mode
== VOIDmode
)
2868 /* Make a place to hold the source address. We will not expand
2869 the actual source until we are sure that the expansion will
2870 not fail -- there are trees that cannot be expanded twice. */
2871 src_reg
= gen_reg_rtx (Pmode
);
2873 /* Mark the beginning of the strlen sequence so we can emit the
2874 source operand later. */
2875 before_strlen
= get_last_insn ();
2877 create_output_operand (&ops
[0], target
, insn_mode
);
2878 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2879 create_integer_operand (&ops
[2], 0);
2880 create_integer_operand (&ops
[3], align
);
2881 if (!maybe_expand_insn (icode
, 4, ops
))
2884 /* Check to see if the argument was declared attribute nonstring
2885 and if so, issue a warning since at this point it's not known
2886 to be nul-terminated. */
2887 maybe_warn_nonstring_arg (TREE_OPERAND (CALL_EXPR_FN (exp
), 0), exp
);
2889 /* Now that we are assured of success, expand the source. */
2891 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2894 #ifdef POINTERS_EXTEND_UNSIGNED
2895 if (GET_MODE (pat
) != Pmode
)
2896 pat
= convert_to_mode (Pmode
, pat
,
2897 POINTERS_EXTEND_UNSIGNED
);
2899 emit_move_insn (src_reg
, pat
);
2905 emit_insn_after (pat
, before_strlen
);
2907 emit_insn_before (pat
, get_insns ());
2909 /* Return the value in the proper mode for this function. */
2910 if (GET_MODE (ops
[0].value
) == target_mode
)
2911 target
= ops
[0].value
;
2912 else if (target
!= 0)
2913 convert_move (target
, ops
[0].value
, 0);
2915 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
2921 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2922 bytes from constant string DATA + OFFSET and return it as target
2926 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2927 scalar_int_mode mode
)
2929 const char *str
= (const char *) data
;
2931 gcc_assert (offset
>= 0
2932 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2933 <= strlen (str
) + 1));
2935 return c_readstr (str
+ offset
, mode
);
2938 /* LEN specify length of the block of memcpy/memset operation.
2939 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2940 In some cases we can make very likely guess on max size, then we
2941 set it into PROBABLE_MAX_SIZE. */
2944 determine_block_size (tree len
, rtx len_rtx
,
2945 unsigned HOST_WIDE_INT
*min_size
,
2946 unsigned HOST_WIDE_INT
*max_size
,
2947 unsigned HOST_WIDE_INT
*probable_max_size
)
2949 if (CONST_INT_P (len_rtx
))
2951 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
2957 enum value_range_type range_type
= VR_UNDEFINED
;
2959 /* Determine bounds from the type. */
2960 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
2961 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
2964 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
2965 *probable_max_size
= *max_size
2966 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
2968 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
2970 if (TREE_CODE (len
) == SSA_NAME
)
2971 range_type
= get_range_info (len
, &min
, &max
);
2972 if (range_type
== VR_RANGE
)
2974 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
2975 *min_size
= min
.to_uhwi ();
2976 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
2977 *probable_max_size
= *max_size
= max
.to_uhwi ();
2979 else if (range_type
== VR_ANTI_RANGE
)
2981 /* Anti range 0...N lets us to determine minimal size to N+1. */
2984 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
2985 *min_size
= max
.to_uhwi () + 1;
2993 Produce anti range allowing negative values of N. We still
2994 can use the information and make a guess that N is not negative.
2996 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
2997 *probable_max_size
= min
.to_uhwi () - 1;
3000 gcc_checking_assert (*max_size
<=
3001 (unsigned HOST_WIDE_INT
)
3002 GET_MODE_MASK (GET_MODE (len_rtx
)));
3005 /* Try to verify that the sizes and lengths of the arguments to a string
3006 manipulation function given by EXP are within valid bounds and that
3007 the operation does not lead to buffer overflow or read past the end.
3008 Arguments other than EXP may be null. When non-null, the arguments
3009 have the following meaning:
3010 DST is the destination of a copy call or NULL otherwise.
3011 SRC is the source of a copy call or NULL otherwise.
3012 DSTWRITE is the number of bytes written into the destination obtained
3013 from the user-supplied size argument to the function (such as in
3014 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3015 MAXREAD is the user-supplied bound on the length of the source sequence
3016 (such as in strncat(d, s, N). It specifies the upper limit on the number
3017 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3018 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3019 expression EXP is a string function call (as opposed to a memory call
3020 like memcpy). As an exception, SRCSTR can also be an integer denoting
3021 the precomputed size of the source string or object (for functions like
3023 DSTSIZE is the size of the destination object specified by the last
3024 argument to the _chk builtins, typically resulting from the expansion
3025 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3028 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3031 If the call is successfully verified as safe return true, otherwise
3035 check_access (tree exp
, tree
, tree
, tree dstwrite
,
3036 tree maxread
, tree srcstr
, tree dstsize
)
3038 int opt
= OPT_Wstringop_overflow_
;
3040 /* The size of the largest object is half the address space, or
3041 PTRDIFF_MAX. (This is way too permissive.) */
3042 tree maxobjsize
= max_object_size ();
3044 /* Either the length of the source string for string functions or
3045 the size of the source object for raw memory functions. */
3046 tree slen
= NULL_TREE
;
3048 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3050 /* Set to true when the exact number of bytes written by a string
3051 function like strcpy is not known and the only thing that is
3052 known is that it must be at least one (for the terminating nul). */
3053 bool at_least_one
= false;
3056 /* SRCSTR is normally a pointer to string but as a special case
3057 it can be an integer denoting the length of a string. */
3058 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
3060 /* Try to determine the range of lengths the source string
3061 refers to. If it can be determined and is less than
3062 the upper bound given by MAXREAD add one to it for
3063 the terminating nul. Otherwise, set it to one for
3064 the same reason, or to MAXREAD as appropriate. */
3065 get_range_strlen (srcstr
, range
);
3066 if (range
[0] && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
3068 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
3069 range
[0] = range
[1] = maxread
;
3071 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3072 range
[0], size_one_node
);
3074 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
3076 else if (!integer_all_onesp (range
[1]))
3077 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3078 range
[1], size_one_node
);
3084 at_least_one
= true;
3085 slen
= size_one_node
;
3092 if (!dstwrite
&& !maxread
)
3094 /* When the only available piece of data is the object size
3095 there is nothing to do. */
3099 /* Otherwise, when the length of the source sequence is known
3100 (as with strlen), set DSTWRITE to it. */
3106 dstsize
= maxobjsize
;
3109 get_size_range (dstwrite
, range
);
3111 tree func
= get_callee_fndecl (exp
);
3113 /* First check the number of bytes to be written against the maximum
3115 if (range
[0] && tree_int_cst_lt (maxobjsize
, range
[0]))
3117 location_t loc
= tree_nonartificial_location (exp
);
3118 loc
= expansion_point_location_if_in_system_header (loc
);
3120 if (range
[0] == range
[1])
3121 warning_at (loc
, opt
,
3122 "%K%qD specified size %E "
3123 "exceeds maximum object size %E",
3124 exp
, func
, range
[0], maxobjsize
);
3126 warning_at (loc
, opt
,
3127 "%K%qD specified size between %E and %E "
3128 "exceeds maximum object size %E",
3130 range
[0], range
[1], maxobjsize
);
3134 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3135 constant, and in range of unsigned HOST_WIDE_INT. */
3136 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
3138 /* Next check the number of bytes to be written against the destination
3140 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
3143 && ((tree_fits_uhwi_p (dstsize
)
3144 && tree_int_cst_lt (dstsize
, range
[0]))
3145 || (tree_fits_uhwi_p (dstwrite
)
3146 && tree_int_cst_lt (dstwrite
, range
[0]))))
3148 location_t loc
= tree_nonartificial_location (exp
);
3149 loc
= expansion_point_location_if_in_system_header (loc
);
3151 if (dstwrite
== slen
&& at_least_one
)
3153 /* This is a call to strcpy with a destination of 0 size
3154 and a source of unknown length. The call will write
3155 at least one byte past the end of the destination. */
3156 warning_at (loc
, opt
,
3157 "%K%qD writing %E or more bytes into a region "
3158 "of size %E overflows the destination",
3159 exp
, func
, range
[0], dstsize
);
3161 else if (tree_int_cst_equal (range
[0], range
[1]))
3162 warning_at (loc
, opt
,
3163 (integer_onep (range
[0])
3164 ? G_("%K%qD writing %E byte into a region "
3165 "of size %E overflows the destination")
3166 : G_("%K%qD writing %E bytes into a region "
3167 "of size %E overflows the destination")),
3168 exp
, func
, range
[0], dstsize
);
3169 else if (tree_int_cst_sign_bit (range
[1]))
3171 /* Avoid printing the upper bound if it's invalid. */
3172 warning_at (loc
, opt
,
3173 "%K%qD writing %E or more bytes into a region "
3174 "of size %E overflows the destination",
3175 exp
, func
, range
[0], dstsize
);
3178 warning_at (loc
, opt
,
3179 "%K%qD writing between %E and %E bytes into "
3180 "a region of size %E overflows the destination",
3181 exp
, func
, range
[0], range
[1],
3184 /* Return error when an overflow has been detected. */
3189 /* Check the maximum length of the source sequence against the size
3190 of the destination object if known, or against the maximum size
3194 get_size_range (maxread
, range
);
3196 /* Use the lower end for MAXREAD from now on. */
3200 if (range
[0] && dstsize
&& tree_fits_uhwi_p (dstsize
))
3202 location_t loc
= tree_nonartificial_location (exp
);
3203 loc
= expansion_point_location_if_in_system_header (loc
);
3205 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3207 /* Warn about crazy big sizes first since that's more
3208 likely to be meaningful than saying that the bound
3209 is greater than the object size if both are big. */
3210 if (range
[0] == range
[1])
3211 warning_at (loc
, opt
,
3212 "%K%qD specified bound %E "
3213 "exceeds maximum object size %E",
3215 range
[0], maxobjsize
);
3217 warning_at (loc
, opt
,
3218 "%K%qD specified bound between %E and %E "
3219 "exceeds maximum object size %E",
3221 range
[0], range
[1], maxobjsize
);
3226 if (dstsize
!= maxobjsize
&& tree_int_cst_lt (dstsize
, range
[0]))
3228 if (tree_int_cst_equal (range
[0], range
[1]))
3229 warning_at (loc
, opt
,
3230 "%K%qD specified bound %E "
3231 "exceeds destination size %E",
3235 warning_at (loc
, opt
,
3236 "%K%qD specified bound between %E and %E "
3237 "exceeds destination size %E",
3239 range
[0], range
[1], dstsize
);
3245 /* Check for reading past the end of SRC. */
3248 && dstwrite
&& range
[0]
3249 && tree_int_cst_lt (slen
, range
[0]))
3251 location_t loc
= tree_nonartificial_location (exp
);
3253 if (tree_int_cst_equal (range
[0], range
[1]))
3254 warning_at (loc
, opt
,
3255 (tree_int_cst_equal (range
[0], integer_one_node
)
3256 ? G_("%K%qD reading %E byte from a region of size %E")
3257 : G_("%K%qD reading %E bytes from a region of size %E")),
3258 exp
, func
, range
[0], slen
);
3259 else if (tree_int_cst_sign_bit (range
[1]))
3261 /* Avoid printing the upper bound if it's invalid. */
3262 warning_at (loc
, opt
,
3263 "%K%qD reading %E or more bytes from a region "
3265 exp
, func
, range
[0], slen
);
3268 warning_at (loc
, opt
,
3269 "%K%qD reading between %E and %E bytes from a region "
3271 exp
, func
, range
[0], range
[1], slen
);
3278 /* Helper to compute the size of the object referenced by the DEST
3279 expression which must have pointer type, using Object Size type
3280 OSTYPE (only the least significant 2 bits are used). Return
3281 an estimate of the size of the object if successful or NULL when
3282 the size cannot be determined. When the referenced object involves
3283 a non-constant offset in some range the returned value represents
3284 the largest size given the smallest non-negative offset in the
3285 range. The function is intended for diagnostics and should not
3286 be used to influence code generation or optimization. */
3289 compute_objsize (tree dest
, int ostype
)
3291 unsigned HOST_WIDE_INT size
;
3293 /* Only the two least significant bits are meaningful. */
3296 if (compute_builtin_object_size (dest
, ostype
, &size
))
3297 return build_int_cst (sizetype
, size
);
3299 if (TREE_CODE (dest
) == SSA_NAME
)
3301 gimple
*stmt
= SSA_NAME_DEF_STMT (dest
);
3302 if (!is_gimple_assign (stmt
))
3305 dest
= gimple_assign_rhs1 (stmt
);
3307 tree_code code
= gimple_assign_rhs_code (stmt
);
3308 if (code
== POINTER_PLUS_EXPR
)
3310 /* compute_builtin_object_size fails for addresses with
3311 non-constant offsets. Try to determine the range of
3312 such an offset here and use it to adjus the constant
3314 tree off
= gimple_assign_rhs2 (stmt
);
3315 if (TREE_CODE (off
) == SSA_NAME
3316 && INTEGRAL_TYPE_P (TREE_TYPE (off
)))
3319 enum value_range_type rng
= get_range_info (off
, &min
, &max
);
3321 if (rng
== VR_RANGE
)
3323 if (tree size
= compute_objsize (dest
, ostype
))
3325 wide_int wisiz
= wi::to_wide (size
);
3327 /* Ignore negative offsets for now. For others,
3328 use the lower bound as the most optimistic
3329 estimate of the (remaining)size. */
3330 if (wi::sign_mask (min
))
3332 else if (wi::ltu_p (min
, wisiz
))
3333 return wide_int_to_tree (TREE_TYPE (size
),
3334 wi::sub (wisiz
, min
));
3336 return size_zero_node
;
3341 else if (code
!= ADDR_EXPR
)
3345 /* Unless computing the largest size (for memcpy and other raw memory
3346 functions), try to determine the size of the object from its type. */
3350 if (TREE_CODE (dest
) != ADDR_EXPR
)
3353 tree type
= TREE_TYPE (dest
);
3354 if (TREE_CODE (type
) == POINTER_TYPE
)
3355 type
= TREE_TYPE (type
);
3357 type
= TYPE_MAIN_VARIANT (type
);
3359 if (TREE_CODE (type
) == ARRAY_TYPE
3360 && !array_at_struct_end_p (dest
))
3362 /* Return the constant size unless it's zero (that's a zero-length
3363 array likely at the end of a struct). */
3364 tree size
= TYPE_SIZE_UNIT (type
);
3365 if (size
&& TREE_CODE (size
) == INTEGER_CST
3366 && !integer_zerop (size
))
3373 /* Helper to determine and check the sizes of the source and the destination
3374 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3375 call expression, DEST is the destination argument, SRC is the source
3376 argument or null, and LEN is the number of bytes. Use Object Size type-0
3377 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3378 (no overflow or invalid sizes), false otherwise. */
3381 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
3383 /* For functions like memset and memcpy that operate on raw memory
3384 try to determine the size of the largest source and destination
3385 object using type-0 Object Size regardless of the object size
3386 type specified by the option. */
3387 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3388 tree dstsize
= compute_objsize (dest
, 0);
3390 return check_access (exp
, dest
, src
, size
, /*maxread=*/NULL_TREE
,
3394 /* Validate memchr arguments without performing any expansion.
3398 expand_builtin_memchr (tree exp
, rtx
)
3400 if (!validate_arglist (exp
,
3401 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3404 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3405 tree len
= CALL_EXPR_ARG (exp
, 2);
3407 /* Diagnose calls where the specified length exceeds the size
3409 if (warn_stringop_overflow
)
3411 tree size
= compute_objsize (arg1
, 0);
3412 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
3413 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
3419 /* Expand a call EXP to the memcpy builtin.
3420 Return NULL_RTX if we failed, the caller should emit a normal call,
3421 otherwise try to get the result in TARGET, if convenient (and in
3422 mode MODE if that's convenient). */
3425 expand_builtin_memcpy (tree exp
, rtx target
)
3427 if (!validate_arglist (exp
,
3428 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3431 tree dest
= CALL_EXPR_ARG (exp
, 0);
3432 tree src
= CALL_EXPR_ARG (exp
, 1);
3433 tree len
= CALL_EXPR_ARG (exp
, 2);
3435 check_memop_access (exp
, dest
, src
, len
);
3437 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3441 /* Check a call EXP to the memmove built-in for validity.
3442 Return NULL_RTX on both success and failure. */
3445 expand_builtin_memmove (tree exp
, rtx
)
3447 if (!validate_arglist (exp
,
3448 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3451 tree dest
= CALL_EXPR_ARG (exp
, 0);
3452 tree src
= CALL_EXPR_ARG (exp
, 1);
3453 tree len
= CALL_EXPR_ARG (exp
, 2);
3455 check_memop_access (exp
, dest
, src
, len
);
3460 /* Expand an instrumented call EXP to the memcpy builtin.
3461 Return NULL_RTX if we failed, the caller should emit a normal call,
3462 otherwise try to get the result in TARGET, if convenient (and in
3463 mode MODE if that's convenient). */
3466 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3468 if (!validate_arglist (exp
,
3469 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3470 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3471 INTEGER_TYPE
, VOID_TYPE
))
3475 tree dest
= CALL_EXPR_ARG (exp
, 0);
3476 tree src
= CALL_EXPR_ARG (exp
, 2);
3477 tree len
= CALL_EXPR_ARG (exp
, 4);
3478 rtx res
= expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3481 /* Return src bounds with the result. */
3484 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3485 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3486 res
= chkp_join_splitted_slot (res
, bnd
);
3492 /* Expand a call EXP to the mempcpy builtin.
3493 Return NULL_RTX if we failed; the caller should emit a normal call,
3494 otherwise try to get the result in TARGET, if convenient (and in
3495 mode MODE if that's convenient). If ENDP is 0 return the
3496 destination pointer, if ENDP is 1 return the end pointer ala
3497 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3501 expand_builtin_mempcpy (tree exp
, rtx target
)
3503 if (!validate_arglist (exp
,
3504 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3507 tree dest
= CALL_EXPR_ARG (exp
, 0);
3508 tree src
= CALL_EXPR_ARG (exp
, 1);
3509 tree len
= CALL_EXPR_ARG (exp
, 2);
3511 /* Policy does not generally allow using compute_objsize (which
3512 is used internally by check_memop_size) to change code generation
3513 or drive optimization decisions.
3515 In this instance it is safe because the code we generate has
3516 the same semantics regardless of the return value of
3517 check_memop_sizes. Exactly the same amount of data is copied
3518 and the return value is exactly the same in both cases.
3520 Furthermore, check_memop_size always uses mode 0 for the call to
3521 compute_objsize, so the imprecise nature of compute_objsize is
3524 /* Avoid expanding mempcpy into memcpy when the call is determined
3525 to overflow the buffer. This also prevents the same overflow
3526 from being diagnosed again when expanding memcpy. */
3527 if (!check_memop_access (exp
, dest
, src
, len
))
3530 return expand_builtin_mempcpy_args (dest
, src
, len
,
3531 target
, exp
, /*endp=*/ 1);
3534 /* Expand an instrumented call EXP to the mempcpy builtin.
3535 Return NULL_RTX if we failed, the caller should emit a normal call,
3536 otherwise try to get the result in TARGET, if convenient (and in
3537 mode MODE if that's convenient). */
3540 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
)
3542 if (!validate_arglist (exp
,
3543 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3544 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3545 INTEGER_TYPE
, VOID_TYPE
))
3549 tree dest
= CALL_EXPR_ARG (exp
, 0);
3550 tree src
= CALL_EXPR_ARG (exp
, 2);
3551 tree len
= CALL_EXPR_ARG (exp
, 4);
3552 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3555 /* Return src bounds with the result. */
3558 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3559 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3560 res
= chkp_join_splitted_slot (res
, bnd
);
3566 /* Helper function to do the actual work for expand of memory copy family
3567 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3568 of memory from SRC to DEST and assign to TARGET if convenient.
3569 If ENDP is 0 return the
3570 destination pointer, if ENDP is 1 return the end pointer ala
3571 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3575 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3576 rtx target
, tree exp
, int endp
)
3578 const char *src_str
;
3579 unsigned int src_align
= get_pointer_alignment (src
);
3580 unsigned int dest_align
= get_pointer_alignment (dest
);
3581 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3582 HOST_WIDE_INT expected_size
= -1;
3583 unsigned int expected_align
= 0;
3584 unsigned HOST_WIDE_INT min_size
;
3585 unsigned HOST_WIDE_INT max_size
;
3586 unsigned HOST_WIDE_INT probable_max_size
;
3588 /* If DEST is not a pointer type, call the normal function. */
3589 if (dest_align
== 0)
3592 /* If either SRC is not a pointer type, don't do this
3593 operation in-line. */
3597 if (currently_expanding_gimple_stmt
)
3598 stringop_block_profile (currently_expanding_gimple_stmt
,
3599 &expected_align
, &expected_size
);
3601 if (expected_align
< dest_align
)
3602 expected_align
= dest_align
;
3603 dest_mem
= get_memory_rtx (dest
, len
);
3604 set_mem_align (dest_mem
, dest_align
);
3605 len_rtx
= expand_normal (len
);
3606 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3607 &probable_max_size
);
3608 src_str
= c_getstr (src
);
3610 /* If SRC is a string constant and block move would be done
3611 by pieces, we can avoid loading the string from memory
3612 and only stored the computed constants. */
3614 && CONST_INT_P (len_rtx
)
3615 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3616 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3617 CONST_CAST (char *, src_str
),
3620 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3621 builtin_memcpy_read_str
,
3622 CONST_CAST (char *, src_str
),
3623 dest_align
, false, endp
);
3624 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3625 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3629 src_mem
= get_memory_rtx (src
, len
);
3630 set_mem_align (src_mem
, src_align
);
3632 /* Copy word part most expediently. */
3633 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3634 CALL_EXPR_TAILCALL (exp
)
3635 && (endp
== 0 || target
== const0_rtx
)
3636 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3637 expected_align
, expected_size
,
3638 min_size
, max_size
, probable_max_size
);
3642 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3643 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3646 if (endp
&& target
!= const0_rtx
)
3648 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3649 /* stpcpy pointer to last byte. */
3651 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3658 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3659 rtx target
, tree orig_exp
, int endp
)
3661 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3665 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3666 we failed, the caller should emit a normal call, otherwise try to
3667 get the result in TARGET, if convenient. If ENDP is 0 return the
3668 destination pointer, if ENDP is 1 return the end pointer ala
3669 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3673 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3675 struct expand_operand ops
[3];
3679 if (!targetm
.have_movstr ())
3682 dest_mem
= get_memory_rtx (dest
, NULL
);
3683 src_mem
= get_memory_rtx (src
, NULL
);
3686 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3687 dest_mem
= replace_equiv_address (dest_mem
, target
);
3690 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3691 create_fixed_operand (&ops
[1], dest_mem
);
3692 create_fixed_operand (&ops
[2], src_mem
);
3693 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3696 if (endp
&& target
!= const0_rtx
)
3698 target
= ops
[0].value
;
3699 /* movstr is supposed to set end to the address of the NUL
3700 terminator. If the caller requested a mempcpy-like return value,
3704 rtx tem
= plus_constant (GET_MODE (target
),
3705 gen_lowpart (GET_MODE (target
), target
), 1);
3706 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3712 /* Do some very basic size validation of a call to the strcpy builtin
3713 given by EXP. Return NULL_RTX to have the built-in expand to a call
3714 to the library function. */
3717 expand_builtin_strcat (tree exp
, rtx
)
3719 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
3720 || !warn_stringop_overflow
)
3723 tree dest
= CALL_EXPR_ARG (exp
, 0);
3724 tree src
= CALL_EXPR_ARG (exp
, 1);
3726 /* There is no way here to determine the length of the string in
3727 the destination to which the SRC string is being appended so
3728 just diagnose cases when the souce string is longer than
3729 the destination object. */
3731 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3733 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, src
,
3739 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3740 NULL_RTX if we failed the caller should emit a normal call, otherwise
3741 try to get the result in TARGET, if convenient (and in mode MODE if that's
3745 expand_builtin_strcpy (tree exp
, rtx target
)
3747 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3750 tree dest
= CALL_EXPR_ARG (exp
, 0);
3751 tree src
= CALL_EXPR_ARG (exp
, 1);
3753 if (warn_stringop_overflow
)
3755 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3756 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
3760 return expand_builtin_strcpy_args (dest
, src
, target
);
3763 /* Helper function to do the actual work for expand_builtin_strcpy. The
3764 arguments to the builtin_strcpy call DEST and SRC are broken out
3765 so that this can also be called without constructing an actual CALL_EXPR.
3766 The other arguments and return value are the same as for
3767 expand_builtin_strcpy. */
3770 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3772 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3775 /* Expand a call EXP to the stpcpy builtin.
3776 Return NULL_RTX if we failed the caller should emit a normal call,
3777 otherwise try to get the result in TARGET, if convenient (and in
3778 mode MODE if that's convenient). */
3781 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3784 location_t loc
= EXPR_LOCATION (exp
);
3786 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3789 dst
= CALL_EXPR_ARG (exp
, 0);
3790 src
= CALL_EXPR_ARG (exp
, 1);
3792 if (warn_stringop_overflow
)
3794 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
3795 check_access (exp
, dst
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
3799 /* If return value is ignored, transform stpcpy into strcpy. */
3800 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3802 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3803 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3804 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3811 /* Ensure we get an actual string whose length can be evaluated at
3812 compile-time, not an expression containing a string. This is
3813 because the latter will potentially produce pessimized code
3814 when used to produce the return value. */
3815 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3816 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3818 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3819 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3820 target
, exp
, /*endp=*/2);
3825 if (TREE_CODE (len
) == INTEGER_CST
)
3827 rtx len_rtx
= expand_normal (len
);
3829 if (CONST_INT_P (len_rtx
))
3831 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3837 if (mode
!= VOIDmode
)
3838 target
= gen_reg_rtx (mode
);
3840 target
= gen_reg_rtx (GET_MODE (ret
));
3842 if (GET_MODE (target
) != GET_MODE (ret
))
3843 ret
= gen_lowpart (GET_MODE (target
), ret
);
3845 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3846 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3854 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3858 /* Check a call EXP to the stpncpy built-in for validity.
3859 Return NULL_RTX on both success and failure. */
3862 expand_builtin_stpncpy (tree exp
, rtx
)
3864 if (!validate_arglist (exp
,
3865 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3866 || !warn_stringop_overflow
)
3869 /* The source and destination of the call. */
3870 tree dest
= CALL_EXPR_ARG (exp
, 0);
3871 tree src
= CALL_EXPR_ARG (exp
, 1);
3873 /* The exact number of bytes to write (not the maximum). */
3874 tree len
= CALL_EXPR_ARG (exp
, 2);
3876 /* The size of the destination object. */
3877 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3879 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
, destsize
);
3884 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3885 bytes from constant string DATA + OFFSET and return it as target
3889 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3890 scalar_int_mode mode
)
3892 const char *str
= (const char *) data
;
3894 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3897 return c_readstr (str
+ offset
, mode
);
3900 /* Helper to check the sizes of sequences and the destination of calls
3901 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3902 success (no overflow or invalid sizes), false otherwise. */
3905 check_strncat_sizes (tree exp
, tree objsize
)
3907 tree dest
= CALL_EXPR_ARG (exp
, 0);
3908 tree src
= CALL_EXPR_ARG (exp
, 1);
3909 tree maxread
= CALL_EXPR_ARG (exp
, 2);
3911 /* Try to determine the range of lengths that the source expression
3914 get_range_strlen (src
, lenrange
);
3916 /* Try to verify that the destination is big enough for the shortest
3919 if (!objsize
&& warn_stringop_overflow
)
3921 /* If it hasn't been provided by __strncat_chk, try to determine
3922 the size of the destination object into which the source is
3924 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3927 /* Add one for the terminating nul. */
3928 tree srclen
= (lenrange
[0]
3929 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
3933 /* The strncat function copies at most MAXREAD bytes and always appends
3934 the terminating nul so the specified upper bound should never be equal
3935 to (or greater than) the size of the destination. */
3936 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
3937 && tree_int_cst_equal (objsize
, maxread
))
3939 location_t loc
= tree_nonartificial_location (exp
);
3940 loc
= expansion_point_location_if_in_system_header (loc
);
3942 warning_at (loc
, OPT_Wstringop_overflow_
,
3943 "%K%qD specified bound %E equals destination size",
3944 exp
, get_callee_fndecl (exp
), maxread
);
3950 || (maxread
&& tree_fits_uhwi_p (maxread
)
3951 && tree_fits_uhwi_p (srclen
)
3952 && tree_int_cst_lt (maxread
, srclen
)))
3955 /* The number of bytes to write is LEN but check_access will also
3956 check SRCLEN if LEN's value isn't known. */
3957 return check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, maxread
, srclen
,
3961 /* Similar to expand_builtin_strcat, do some very basic size validation
3962 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3963 the built-in expand to a call to the library function. */
3966 expand_builtin_strncat (tree exp
, rtx
)
3968 if (!validate_arglist (exp
,
3969 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3970 || !warn_stringop_overflow
)
3973 tree dest
= CALL_EXPR_ARG (exp
, 0);
3974 tree src
= CALL_EXPR_ARG (exp
, 1);
3975 /* The upper bound on the number of bytes to write. */
3976 tree maxread
= CALL_EXPR_ARG (exp
, 2);
3977 /* The length of the source sequence. */
3978 tree slen
= c_strlen (src
, 1);
3980 /* Try to determine the range of lengths that the source expression
3984 lenrange
[0] = lenrange
[1] = slen
;
3986 get_range_strlen (src
, lenrange
);
3988 /* Try to verify that the destination is big enough for the shortest
3989 string. First try to determine the size of the destination object
3990 into which the source is being copied. */
3991 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3993 /* Add one for the terminating nul. */
3994 tree srclen
= (lenrange
[0]
3995 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
3999 /* The strncat function copies at most MAXREAD bytes and always appends
4000 the terminating nul so the specified upper bound should never be equal
4001 to (or greater than) the size of the destination. */
4002 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
4003 && tree_int_cst_equal (destsize
, maxread
))
4005 location_t loc
= tree_nonartificial_location (exp
);
4006 loc
= expansion_point_location_if_in_system_header (loc
);
4008 warning_at (loc
, OPT_Wstringop_overflow_
,
4009 "%K%qD specified bound %E equals destination size",
4010 exp
, get_callee_fndecl (exp
), maxread
);
4016 || (maxread
&& tree_fits_uhwi_p (maxread
)
4017 && tree_fits_uhwi_p (srclen
)
4018 && tree_int_cst_lt (maxread
, srclen
)))
4021 /* The number of bytes to write is SRCLEN. */
4022 check_access (exp
, dest
, src
, NULL_TREE
, maxread
, srclen
, destsize
);
4027 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4028 NULL_RTX if we failed the caller should emit a normal call. */
4031 expand_builtin_strncpy (tree exp
, rtx target
)
4033 location_t loc
= EXPR_LOCATION (exp
);
4035 if (validate_arglist (exp
,
4036 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4038 tree dest
= CALL_EXPR_ARG (exp
, 0);
4039 tree src
= CALL_EXPR_ARG (exp
, 1);
4040 /* The number of bytes to write (not the maximum). */
4041 tree len
= CALL_EXPR_ARG (exp
, 2);
4042 /* The length of the source sequence. */
4043 tree slen
= c_strlen (src
, 1);
4045 if (warn_stringop_overflow
)
4047 tree destsize
= compute_objsize (dest
,
4048 warn_stringop_overflow
- 1);
4050 /* The number of bytes to write is LEN but check_access will also
4051 check SLEN if LEN's value isn't known. */
4052 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
,
4056 /* We must be passed a constant len and src parameter. */
4057 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4060 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4062 /* We're required to pad with trailing zeros if the requested
4063 len is greater than strlen(s2)+1. In that case try to
4064 use store_by_pieces, if it fails, punt. */
4065 if (tree_int_cst_lt (slen
, len
))
4067 unsigned int dest_align
= get_pointer_alignment (dest
);
4068 const char *p
= c_getstr (src
);
4071 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
4072 || !can_store_by_pieces (tree_to_uhwi (len
),
4073 builtin_strncpy_read_str
,
4074 CONST_CAST (char *, p
),
4078 dest_mem
= get_memory_rtx (dest
, len
);
4079 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4080 builtin_strncpy_read_str
,
4081 CONST_CAST (char *, p
), dest_align
, false, 0);
4082 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4083 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4090 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4091 bytes from constant string DATA + OFFSET and return it as target
4095 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4096 scalar_int_mode mode
)
4098 const char *c
= (const char *) data
;
4099 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
4101 memset (p
, *c
, GET_MODE_SIZE (mode
));
4103 return c_readstr (p
, mode
);
4106 /* Callback routine for store_by_pieces. Return the RTL of a register
4107 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4108 char value given in the RTL register data. For example, if mode is
4109 4 bytes wide, return the RTL for 0x01010101*data. */
4112 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4113 scalar_int_mode mode
)
4119 size
= GET_MODE_SIZE (mode
);
4123 p
= XALLOCAVEC (char, size
);
4124 memset (p
, 1, size
);
4125 coeff
= c_readstr (p
, mode
);
4127 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4128 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4129 return force_reg (mode
, target
);
4132 /* Expand expression EXP, which is a call to the memset builtin. Return
4133 NULL_RTX if we failed the caller should emit a normal call, otherwise
4134 try to get the result in TARGET, if convenient (and in mode MODE if that's
4138 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4140 if (!validate_arglist (exp
,
4141 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4144 tree dest
= CALL_EXPR_ARG (exp
, 0);
4145 tree val
= CALL_EXPR_ARG (exp
, 1);
4146 tree len
= CALL_EXPR_ARG (exp
, 2);
4148 check_memop_access (exp
, dest
, NULL_TREE
, len
);
4150 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4153 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4154 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4155 try to get the result in TARGET, if convenient (and in mode MODE if that's
4159 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
4161 if (!validate_arglist (exp
,
4162 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
4163 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4167 tree dest
= CALL_EXPR_ARG (exp
, 0);
4168 tree val
= CALL_EXPR_ARG (exp
, 2);
4169 tree len
= CALL_EXPR_ARG (exp
, 3);
4170 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4172 /* Return src bounds with the result. */
4175 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
4176 expand_normal (CALL_EXPR_ARG (exp
, 1)));
4177 res
= chkp_join_splitted_slot (res
, bnd
);
4183 /* Helper function to do the actual work for expand_builtin_memset. The
4184 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4185 so that this can also be called without constructing an actual CALL_EXPR.
4186 The other arguments and return value are the same as for
4187 expand_builtin_memset. */
4190 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4191 rtx target
, machine_mode mode
, tree orig_exp
)
4194 enum built_in_function fcode
;
4195 machine_mode val_mode
;
4197 unsigned int dest_align
;
4198 rtx dest_mem
, dest_addr
, len_rtx
;
4199 HOST_WIDE_INT expected_size
= -1;
4200 unsigned int expected_align
= 0;
4201 unsigned HOST_WIDE_INT min_size
;
4202 unsigned HOST_WIDE_INT max_size
;
4203 unsigned HOST_WIDE_INT probable_max_size
;
4205 dest_align
= get_pointer_alignment (dest
);
4207 /* If DEST is not a pointer type, don't do this operation in-line. */
4208 if (dest_align
== 0)
4211 if (currently_expanding_gimple_stmt
)
4212 stringop_block_profile (currently_expanding_gimple_stmt
,
4213 &expected_align
, &expected_size
);
4215 if (expected_align
< dest_align
)
4216 expected_align
= dest_align
;
4218 /* If the LEN parameter is zero, return DEST. */
4219 if (integer_zerop (len
))
4221 /* Evaluate and ignore VAL in case it has side-effects. */
4222 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4223 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4226 /* Stabilize the arguments in case we fail. */
4227 dest
= builtin_save_expr (dest
);
4228 val
= builtin_save_expr (val
);
4229 len
= builtin_save_expr (len
);
4231 len_rtx
= expand_normal (len
);
4232 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4233 &probable_max_size
);
4234 dest_mem
= get_memory_rtx (dest
, len
);
4235 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4237 if (TREE_CODE (val
) != INTEGER_CST
)
4241 val_rtx
= expand_normal (val
);
4242 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4244 /* Assume that we can memset by pieces if we can store
4245 * the coefficients by pieces (in the required modes).
4246 * We can't pass builtin_memset_gen_str as that emits RTL. */
4248 if (tree_fits_uhwi_p (len
)
4249 && can_store_by_pieces (tree_to_uhwi (len
),
4250 builtin_memset_read_str
, &c
, dest_align
,
4253 val_rtx
= force_reg (val_mode
, val_rtx
);
4254 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4255 builtin_memset_gen_str
, val_rtx
, dest_align
,
4258 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4259 dest_align
, expected_align
,
4260 expected_size
, min_size
, max_size
,
4264 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4265 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4269 if (target_char_cast (val
, &c
))
4274 if (tree_fits_uhwi_p (len
)
4275 && can_store_by_pieces (tree_to_uhwi (len
),
4276 builtin_memset_read_str
, &c
, dest_align
,
4278 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4279 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4280 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4281 gen_int_mode (c
, val_mode
),
4282 dest_align
, expected_align
,
4283 expected_size
, min_size
, max_size
,
4287 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4288 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4292 set_mem_align (dest_mem
, dest_align
);
4293 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4294 CALL_EXPR_TAILCALL (orig_exp
)
4295 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4296 expected_align
, expected_size
,
4302 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4303 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4309 fndecl
= get_callee_fndecl (orig_exp
);
4310 fcode
= DECL_FUNCTION_CODE (fndecl
);
4311 if (fcode
== BUILT_IN_MEMSET
4312 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
4313 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4315 else if (fcode
== BUILT_IN_BZERO
)
4316 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4320 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4321 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4322 return expand_call (fn
, target
, target
== const0_rtx
);
4325 /* Expand expression EXP, which is a call to the bzero builtin. Return
4326 NULL_RTX if we failed the caller should emit a normal call. */
4329 expand_builtin_bzero (tree exp
)
4331 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4334 tree dest
= CALL_EXPR_ARG (exp
, 0);
4335 tree size
= CALL_EXPR_ARG (exp
, 1);
4337 check_memop_access (exp
, dest
, NULL_TREE
, size
);
4339 /* New argument list transforming bzero(ptr x, int y) to
4340 memset(ptr x, int 0, size_t y). This is done this way
4341 so that if it isn't expanded inline, we fallback to
4342 calling bzero instead of memset. */
4344 location_t loc
= EXPR_LOCATION (exp
);
4346 return expand_builtin_memset_args (dest
, integer_zero_node
,
4347 fold_convert_loc (loc
,
4348 size_type_node
, size
),
4349 const0_rtx
, VOIDmode
, exp
);
4352 /* Try to expand cmpstr operation ICODE with the given operands.
4353 Return the result rtx on success, otherwise return null. */
4356 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4357 HOST_WIDE_INT align
)
4359 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4361 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4364 struct expand_operand ops
[4];
4365 create_output_operand (&ops
[0], target
, insn_mode
);
4366 create_fixed_operand (&ops
[1], arg1_rtx
);
4367 create_fixed_operand (&ops
[2], arg2_rtx
);
4368 create_integer_operand (&ops
[3], align
);
4369 if (maybe_expand_insn (icode
, 4, ops
))
4370 return ops
[0].value
;
4374 /* Expand expression EXP, which is a call to the memcmp built-in function.
4375 Return NULL_RTX if we failed and the caller should emit a normal call,
4376 otherwise try to get the result in TARGET, if convenient.
4377 RESULT_EQ is true if we can relax the returned value to be either zero
4378 or nonzero, without caring about the sign. */
4381 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4383 if (!validate_arglist (exp
,
4384 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4387 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4388 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4389 tree len
= CALL_EXPR_ARG (exp
, 2);
4391 /* Diagnose calls where the specified length exceeds the size of either
4393 if (warn_stringop_overflow
)
4395 tree size
= compute_objsize (arg1
, 0);
4396 if (check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
4397 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
))
4399 size
= compute_objsize (arg2
, 0);
4400 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
4401 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
4405 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4406 location_t loc
= EXPR_LOCATION (exp
);
4408 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4409 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4411 /* If we don't have POINTER_TYPE, call the function. */
4412 if (arg1_align
== 0 || arg2_align
== 0)
4415 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4416 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4417 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4419 /* Set MEM_SIZE as appropriate. */
4420 if (CONST_INT_P (len_rtx
))
4422 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4423 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4426 by_pieces_constfn constfn
= NULL
;
4428 const char *src_str
= c_getstr (arg2
);
4429 if (result_eq
&& src_str
== NULL
)
4431 src_str
= c_getstr (arg1
);
4432 if (src_str
!= NULL
)
4433 std::swap (arg1_rtx
, arg2_rtx
);
4436 /* If SRC is a string constant and block move would be done
4437 by pieces, we can avoid loading the string from memory
4438 and only stored the computed constants. */
4440 && CONST_INT_P (len_rtx
)
4441 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4442 constfn
= builtin_memcpy_read_str
;
4444 rtx result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4445 TREE_TYPE (len
), target
,
4447 CONST_CAST (char *, src_str
));
4451 /* Return the value in the proper mode for this function. */
4452 if (GET_MODE (result
) == mode
)
4457 convert_move (target
, result
, 0);
4461 return convert_to_mode (mode
, result
, 0);
4467 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4468 if we failed the caller should emit a normal call, otherwise try to get
4469 the result in TARGET, if convenient. */
4472 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4474 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4477 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4478 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4479 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4481 rtx arg1_rtx
, arg2_rtx
;
4483 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4484 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4485 rtx result
= NULL_RTX
;
4487 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4488 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4490 /* If we don't have POINTER_TYPE, call the function. */
4491 if (arg1_align
== 0 || arg2_align
== 0)
4494 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4495 arg1
= builtin_save_expr (arg1
);
4496 arg2
= builtin_save_expr (arg2
);
4498 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4499 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4501 /* Try to call cmpstrsi. */
4502 if (cmpstr_icode
!= CODE_FOR_nothing
)
4503 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4504 MIN (arg1_align
, arg2_align
));
4506 /* Try to determine at least one length and call cmpstrnsi. */
4507 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4512 tree len1
= c_strlen (arg1
, 1);
4513 tree len2
= c_strlen (arg2
, 1);
4516 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4518 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4520 /* If we don't have a constant length for the first, use the length
4521 of the second, if we know it. We don't require a constant for
4522 this case; some cost analysis could be done if both are available
4523 but neither is constant. For now, assume they're equally cheap,
4524 unless one has side effects. If both strings have constant lengths,
4531 else if (TREE_SIDE_EFFECTS (len1
))
4533 else if (TREE_SIDE_EFFECTS (len2
))
4535 else if (TREE_CODE (len1
) != INTEGER_CST
)
4537 else if (TREE_CODE (len2
) != INTEGER_CST
)
4539 else if (tree_int_cst_lt (len1
, len2
))
4544 /* If both arguments have side effects, we cannot optimize. */
4545 if (len
&& !TREE_SIDE_EFFECTS (len
))
4547 arg3_rtx
= expand_normal (len
);
4548 result
= expand_cmpstrn_or_cmpmem
4549 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4550 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4556 /* Return the value in the proper mode for this function. */
4557 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4558 if (GET_MODE (result
) == mode
)
4561 return convert_to_mode (mode
, result
, 0);
4562 convert_move (target
, result
, 0);
4566 /* Expand the library call ourselves using a stabilized argument
4567 list to avoid re-evaluating the function's arguments twice. */
4568 fndecl
= get_callee_fndecl (exp
);
4569 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4570 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4571 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4572 return expand_call (fn
, target
, target
== const0_rtx
);
4577 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4578 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4579 the result in TARGET, if convenient. */
4582 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4583 ATTRIBUTE_UNUSED machine_mode mode
)
4585 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4587 if (!validate_arglist (exp
,
4588 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4591 /* If c_strlen can determine an expression for one of the string
4592 lengths, and it doesn't have side effects, then emit cmpstrnsi
4593 using length MIN(strlen(string)+1, arg3). */
4594 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4595 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4597 tree len
, len1
, len2
, len3
;
4598 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4601 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4602 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4603 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4605 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4606 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4608 len1
= c_strlen (arg1
, 1);
4609 len2
= c_strlen (arg2
, 1);
4612 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4614 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4616 len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4618 /* If we don't have a constant length for the first, use the length
4619 of the second, if we know it. If neither string is constant length,
4620 use the given length argument. We don't require a constant for
4621 this case; some cost analysis could be done if both are available
4622 but neither is constant. For now, assume they're equally cheap,
4623 unless one has side effects. If both strings have constant lengths,
4632 else if (TREE_SIDE_EFFECTS (len1
))
4634 else if (TREE_SIDE_EFFECTS (len2
))
4636 else if (TREE_CODE (len1
) != INTEGER_CST
)
4638 else if (TREE_CODE (len2
) != INTEGER_CST
)
4640 else if (tree_int_cst_lt (len1
, len2
))
4645 /* If we are not using the given length, we must incorporate it here.
4646 The actual new length parameter will be MIN(len,arg3) in this case. */
4648 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4649 arg1_rtx
= get_memory_rtx (arg1
, len
);
4650 arg2_rtx
= get_memory_rtx (arg2
, len
);
4651 arg3_rtx
= expand_normal (len
);
4652 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4653 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4654 MIN (arg1_align
, arg2_align
));
4657 /* Return the value in the proper mode for this function. */
4658 mode
= TYPE_MODE (TREE_TYPE (exp
));
4659 if (GET_MODE (result
) == mode
)
4662 return convert_to_mode (mode
, result
, 0);
4663 convert_move (target
, result
, 0);
4667 /* Expand the library call ourselves using a stabilized argument
4668 list to avoid re-evaluating the function's arguments twice. */
4669 fndecl
= get_callee_fndecl (exp
);
4670 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4672 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4673 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4674 return expand_call (fn
, target
, target
== const0_rtx
);
4679 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4680 if that's convenient. */
4683 expand_builtin_saveregs (void)
4688 /* Don't do __builtin_saveregs more than once in a function.
4689 Save the result of the first call and reuse it. */
4690 if (saveregs_value
!= 0)
4691 return saveregs_value
;
4693 /* When this function is called, it means that registers must be
4694 saved on entry to this function. So we migrate the call to the
4695 first insn of this function. */
4699 /* Do whatever the machine needs done in this case. */
4700 val
= targetm
.calls
.expand_builtin_saveregs ();
4705 saveregs_value
= val
;
4707 /* Put the insns after the NOTE that starts the function. If this
4708 is inside a start_sequence, make the outer-level insn chain current, so
4709 the code is placed at the start of the function. */
4710 push_topmost_sequence ();
4711 emit_insn_after (seq
, entry_of_function ());
4712 pop_topmost_sequence ();
4717 /* Expand a call to __builtin_next_arg. */
4720 expand_builtin_next_arg (void)
4722 /* Checking arguments is already done in fold_builtin_next_arg
4723 that must be called before this function. */
4724 return expand_binop (ptr_mode
, add_optab
,
4725 crtl
->args
.internal_arg_pointer
,
4726 crtl
->args
.arg_offset_rtx
,
4727 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4730 /* Make it easier for the backends by protecting the valist argument
4731 from multiple evaluations. */
4734 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4736 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4738 /* The current way of determining the type of valist is completely
4739 bogus. We should have the information on the va builtin instead. */
4741 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4743 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4745 if (TREE_SIDE_EFFECTS (valist
))
4746 valist
= save_expr (valist
);
4748 /* For this case, the backends will be expecting a pointer to
4749 vatype, but it's possible we've actually been given an array
4750 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4752 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4754 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4755 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4760 tree pt
= build_pointer_type (vatype
);
4764 if (! TREE_SIDE_EFFECTS (valist
))
4767 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4768 TREE_SIDE_EFFECTS (valist
) = 1;
4771 if (TREE_SIDE_EFFECTS (valist
))
4772 valist
= save_expr (valist
);
4773 valist
= fold_build2_loc (loc
, MEM_REF
,
4774 vatype
, valist
, build_int_cst (pt
, 0));
4780 /* The "standard" definition of va_list is void*. */
4783 std_build_builtin_va_list (void)
4785 return ptr_type_node
;
4788 /* The "standard" abi va_list is va_list_type_node. */
4791 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4793 return va_list_type_node
;
4796 /* The "standard" type of va_list is va_list_type_node. */
4799 std_canonical_va_list_type (tree type
)
4803 wtype
= va_list_type_node
;
4806 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4808 /* If va_list is an array type, the argument may have decayed
4809 to a pointer type, e.g. by being passed to another function.
4810 In that case, unwrap both types so that we can compare the
4811 underlying records. */
4812 if (TREE_CODE (htype
) == ARRAY_TYPE
4813 || POINTER_TYPE_P (htype
))
4815 wtype
= TREE_TYPE (wtype
);
4816 htype
= TREE_TYPE (htype
);
4819 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4820 return va_list_type_node
;
4825 /* The "standard" implementation of va_start: just assign `nextarg' to
4829 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4831 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4832 convert_move (va_r
, nextarg
, 0);
4834 /* We do not have any valid bounds for the pointer, so
4835 just store zero bounds for it. */
4836 if (chkp_function_instrumented_p (current_function_decl
))
4837 chkp_expand_bounds_reset_for_mem (valist
,
4838 make_tree (TREE_TYPE (valist
),
4842 /* Expand EXP, a call to __builtin_va_start. */
4845 expand_builtin_va_start (tree exp
)
4849 location_t loc
= EXPR_LOCATION (exp
);
4851 if (call_expr_nargs (exp
) < 2)
4853 error_at (loc
, "too few arguments to function %<va_start%>");
4857 if (fold_builtin_next_arg (exp
, true))
4860 nextarg
= expand_builtin_next_arg ();
4861 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4863 if (targetm
.expand_builtin_va_start
)
4864 targetm
.expand_builtin_va_start (valist
, nextarg
);
4866 std_expand_builtin_va_start (valist
, nextarg
);
4871 /* Expand EXP, a call to __builtin_va_end. */
4874 expand_builtin_va_end (tree exp
)
4876 tree valist
= CALL_EXPR_ARG (exp
, 0);
4878 /* Evaluate for side effects, if needed. I hate macros that don't
4880 if (TREE_SIDE_EFFECTS (valist
))
4881 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4886 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4887 builtin rather than just as an assignment in stdarg.h because of the
4888 nastiness of array-type va_list types. */
4891 expand_builtin_va_copy (tree exp
)
4894 location_t loc
= EXPR_LOCATION (exp
);
4896 dst
= CALL_EXPR_ARG (exp
, 0);
4897 src
= CALL_EXPR_ARG (exp
, 1);
4899 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4900 src
= stabilize_va_list_loc (loc
, src
, 0);
4902 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4904 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4906 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4907 TREE_SIDE_EFFECTS (t
) = 1;
4908 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4912 rtx dstb
, srcb
, size
;
4914 /* Evaluate to pointers. */
4915 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4916 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4917 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4918 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4920 dstb
= convert_memory_address (Pmode
, dstb
);
4921 srcb
= convert_memory_address (Pmode
, srcb
);
4923 /* "Dereference" to BLKmode memories. */
4924 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4925 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4926 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4927 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4928 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4929 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4932 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4938 /* Expand a call to one of the builtin functions __builtin_frame_address or
4939 __builtin_return_address. */
4942 expand_builtin_frame_address (tree fndecl
, tree exp
)
4944 /* The argument must be a nonnegative integer constant.
4945 It counts the number of frames to scan up the stack.
4946 The value is either the frame pointer value or the return
4947 address saved in that frame. */
4948 if (call_expr_nargs (exp
) == 0)
4949 /* Warning about missing arg was already issued. */
4951 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4953 error ("invalid argument to %qD", fndecl
);
4958 /* Number of frames to scan up the stack. */
4959 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4961 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4963 /* Some ports cannot access arbitrary stack frames. */
4966 warning (0, "unsupported argument to %qD", fndecl
);
4972 /* Warn since no effort is made to ensure that any frame
4973 beyond the current one exists or can be safely reached. */
4974 warning (OPT_Wframe_address
, "calling %qD with "
4975 "a nonzero argument is unsafe", fndecl
);
4978 /* For __builtin_frame_address, return what we've got. */
4979 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4983 && ! CONSTANT_P (tem
))
4984 tem
= copy_addr_to_reg (tem
);
4989 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4990 failed and the caller should emit a normal call. */
4993 expand_builtin_alloca (tree exp
)
4998 tree fndecl
= get_callee_fndecl (exp
);
4999 HOST_WIDE_INT max_size
;
5000 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5001 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5003 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5004 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5006 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5007 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5008 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5013 if ((alloca_for_var
&& !warn_vla_limit
)
5014 || (!alloca_for_var
&& !warn_alloca_limit
))
5016 /* -Walloca-larger-than and -Wvla-larger-than settings override
5017 the more general -Walloc-size-larger-than so unless either of
5018 the former options is specified check the alloca arguments for
5020 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
5021 int idx
[] = { 0, -1 };
5022 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
5025 /* Compute the argument. */
5026 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5028 /* Compute the alignment. */
5029 align
= (fcode
== BUILT_IN_ALLOCA
5031 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5033 /* Compute the maximum size. */
5034 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5035 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5038 /* Allocate the desired space. If the allocation stems from the declaration
5039 of a variable-sized object, it cannot accumulate. */
5041 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
5042 result
= convert_memory_address (ptr_mode
, result
);
5047 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5048 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5049 dummy value into second parameter relying on this function to perform the
5050 change. See motivation for this in comment to handle_builtin_stack_restore
5054 expand_asan_emit_allocas_unpoison (tree exp
)
5056 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5057 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5058 rtx bot
= convert_memory_address (ptr_mode
, virtual_stack_dynamic_rtx
);
5059 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
5060 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
5061 top
, ptr_mode
, bot
, ptr_mode
);
5065 /* Expand a call to bswap builtin in EXP.
5066 Return NULL_RTX if a normal call should be emitted rather than expanding the
5067 function in-line. If convenient, the result should be placed in TARGET.
5068 SUBTARGET may be used as the target for computing one of EXP's operands. */
5071 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
5077 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5080 arg
= CALL_EXPR_ARG (exp
, 0);
5081 op0
= expand_expr (arg
,
5082 subtarget
&& GET_MODE (subtarget
) == target_mode
5083 ? subtarget
: NULL_RTX
,
5084 target_mode
, EXPAND_NORMAL
);
5085 if (GET_MODE (op0
) != target_mode
)
5086 op0
= convert_to_mode (target_mode
, op0
, 1);
5088 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
5090 gcc_assert (target
);
5092 return convert_to_mode (target_mode
, target
, 1);
5095 /* Expand a call to a unary builtin in EXP.
5096 Return NULL_RTX if a normal call should be emitted rather than expanding the
5097 function in-line. If convenient, the result should be placed in TARGET.
5098 SUBTARGET may be used as the target for computing one of EXP's operands. */
5101 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5102 rtx subtarget
, optab op_optab
)
5106 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5109 /* Compute the argument. */
5110 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5112 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5113 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5114 VOIDmode
, EXPAND_NORMAL
);
5115 /* Compute op, into TARGET if possible.
5116 Set TARGET to wherever the result comes back. */
5117 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5118 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5119 gcc_assert (target
);
5121 return convert_to_mode (target_mode
, target
, 0);
5124 /* Expand a call to __builtin_expect. We just return our argument
5125 as the builtin_expect semantic should've been already executed by
5126 tree branch prediction pass. */
5129 expand_builtin_expect (tree exp
, rtx target
)
5133 if (call_expr_nargs (exp
) < 2)
5135 arg
= CALL_EXPR_ARG (exp
, 0);
5137 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5138 /* When guessing was done, the hints should be already stripped away. */
5139 gcc_assert (!flag_guess_branch_prob
5140 || optimize
== 0 || seen_error ());
5144 /* Expand a call to __builtin_assume_aligned. We just return our first
5145 argument as the builtin_assume_aligned semantic should've been already
5149 expand_builtin_assume_aligned (tree exp
, rtx target
)
5151 if (call_expr_nargs (exp
) < 2)
5153 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5155 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5156 && (call_expr_nargs (exp
) < 3
5157 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5162 expand_builtin_trap (void)
5164 if (targetm
.have_trap ())
5166 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5167 /* For trap insns when not accumulating outgoing args force
5168 REG_ARGS_SIZE note to prevent crossjumping of calls with
5169 different args sizes. */
5170 if (!ACCUMULATE_OUTGOING_ARGS
)
5171 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
5175 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5176 tree call_expr
= build_call_expr (fn
, 0);
5177 expand_call (call_expr
, NULL_RTX
, false);
5183 /* Expand a call to __builtin_unreachable. We do nothing except emit
5184 a barrier saying that control flow will not pass here.
5186 It is the responsibility of the program being compiled to ensure
5187 that control flow does never reach __builtin_unreachable. */
5189 expand_builtin_unreachable (void)
5194 /* Expand EXP, a call to fabs, fabsf or fabsl.
5195 Return NULL_RTX if a normal call should be emitted rather than expanding
5196 the function inline. If convenient, the result should be placed
5197 in TARGET. SUBTARGET may be used as the target for computing
5201 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5207 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5210 arg
= CALL_EXPR_ARG (exp
, 0);
5211 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5212 mode
= TYPE_MODE (TREE_TYPE (arg
));
5213 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5214 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5217 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5218 Return NULL is a normal call should be emitted rather than expanding the
5219 function inline. If convenient, the result should be placed in TARGET.
5220 SUBTARGET may be used as the target for computing the operand. */
5223 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5228 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5231 arg
= CALL_EXPR_ARG (exp
, 0);
5232 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5234 arg
= CALL_EXPR_ARG (exp
, 1);
5235 op1
= expand_normal (arg
);
5237 return expand_copysign (op0
, op1
, target
);
5240 /* Expand a call to __builtin___clear_cache. */
5243 expand_builtin___clear_cache (tree exp
)
5245 if (!targetm
.code_for_clear_cache
)
5247 #ifdef CLEAR_INSN_CACHE
5248 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5249 does something. Just do the default expansion to a call to
5253 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5254 does nothing. There is no need to call it. Do nothing. */
5256 #endif /* CLEAR_INSN_CACHE */
5259 /* We have a "clear_cache" insn, and it will handle everything. */
5261 rtx begin_rtx
, end_rtx
;
5263 /* We must not expand to a library call. If we did, any
5264 fallback library function in libgcc that might contain a call to
5265 __builtin___clear_cache() would recurse infinitely. */
5266 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5268 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5272 if (targetm
.have_clear_cache ())
5274 struct expand_operand ops
[2];
5276 begin
= CALL_EXPR_ARG (exp
, 0);
5277 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5279 end
= CALL_EXPR_ARG (exp
, 1);
5280 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5282 create_address_operand (&ops
[0], begin_rtx
);
5283 create_address_operand (&ops
[1], end_rtx
);
5284 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5290 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5293 round_trampoline_addr (rtx tramp
)
5295 rtx temp
, addend
, mask
;
5297 /* If we don't need too much alignment, we'll have been guaranteed
5298 proper alignment by get_trampoline_type. */
5299 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5302 /* Round address up to desired boundary. */
5303 temp
= gen_reg_rtx (Pmode
);
5304 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5305 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5307 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5308 temp
, 0, OPTAB_LIB_WIDEN
);
5309 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5310 temp
, 0, OPTAB_LIB_WIDEN
);
5316 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5318 tree t_tramp
, t_func
, t_chain
;
5319 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5321 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5322 POINTER_TYPE
, VOID_TYPE
))
5325 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5326 t_func
= CALL_EXPR_ARG (exp
, 1);
5327 t_chain
= CALL_EXPR_ARG (exp
, 2);
5329 r_tramp
= expand_normal (t_tramp
);
5330 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5331 MEM_NOTRAP_P (m_tramp
) = 1;
5333 /* If ONSTACK, the TRAMP argument should be the address of a field
5334 within the local function's FRAME decl. Either way, let's see if
5335 we can fill in the MEM_ATTRs for this memory. */
5336 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5337 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5339 /* Creator of a heap trampoline is responsible for making sure the
5340 address is aligned to at least STACK_BOUNDARY. Normally malloc
5341 will ensure this anyhow. */
5342 tmp
= round_trampoline_addr (r_tramp
);
5345 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5346 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5347 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5350 /* The FUNC argument should be the address of the nested function.
5351 Extract the actual function decl to pass to the hook. */
5352 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5353 t_func
= TREE_OPERAND (t_func
, 0);
5354 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5356 r_chain
= expand_normal (t_chain
);
5358 /* Generate insns to initialize the trampoline. */
5359 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5363 trampolines_created
= 1;
5365 if (targetm
.calls
.custom_function_descriptors
!= 0)
5366 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5367 "trampoline generated for nested function %qD", t_func
);
5374 expand_builtin_adjust_trampoline (tree exp
)
5378 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5381 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5382 tramp
= round_trampoline_addr (tramp
);
5383 if (targetm
.calls
.trampoline_adjust_address
)
5384 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5389 /* Expand a call to the builtin descriptor initialization routine.
5390 A descriptor is made up of a couple of pointers to the static
5391 chain and the code entry in this order. */
5394 expand_builtin_init_descriptor (tree exp
)
5396 tree t_descr
, t_func
, t_chain
;
5397 rtx m_descr
, r_descr
, r_func
, r_chain
;
5399 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5403 t_descr
= CALL_EXPR_ARG (exp
, 0);
5404 t_func
= CALL_EXPR_ARG (exp
, 1);
5405 t_chain
= CALL_EXPR_ARG (exp
, 2);
5407 r_descr
= expand_normal (t_descr
);
5408 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5409 MEM_NOTRAP_P (m_descr
) = 1;
5411 r_func
= expand_normal (t_func
);
5412 r_chain
= expand_normal (t_chain
);
5414 /* Generate insns to initialize the descriptor. */
5415 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5416 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5417 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5422 /* Expand a call to the builtin descriptor adjustment routine. */
5425 expand_builtin_adjust_descriptor (tree exp
)
5429 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5432 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5434 /* Unalign the descriptor to allow runtime identification. */
5435 tramp
= plus_constant (ptr_mode
, tramp
,
5436 targetm
.calls
.custom_function_descriptors
);
5438 return force_operand (tramp
, NULL_RTX
);
5441 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5442 function. The function first checks whether the back end provides
5443 an insn to implement signbit for the respective mode. If not, it
5444 checks whether the floating point format of the value is such that
5445 the sign bit can be extracted. If that is not the case, error out.
5446 EXP is the expression that is a call to the builtin function; if
5447 convenient, the result should be placed in TARGET. */
5449 expand_builtin_signbit (tree exp
, rtx target
)
5451 const struct real_format
*fmt
;
5452 scalar_float_mode fmode
;
5453 scalar_int_mode rmode
, imode
;
5456 enum insn_code icode
;
5458 location_t loc
= EXPR_LOCATION (exp
);
5460 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5463 arg
= CALL_EXPR_ARG (exp
, 0);
5464 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5465 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5466 fmt
= REAL_MODE_FORMAT (fmode
);
5468 arg
= builtin_save_expr (arg
);
5470 /* Expand the argument yielding a RTX expression. */
5471 temp
= expand_normal (arg
);
5473 /* Check if the back end provides an insn that handles signbit for the
5475 icode
= optab_handler (signbit_optab
, fmode
);
5476 if (icode
!= CODE_FOR_nothing
)
5478 rtx_insn
*last
= get_last_insn ();
5479 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5480 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5482 delete_insns_since (last
);
5485 /* For floating point formats without a sign bit, implement signbit
5487 bitpos
= fmt
->signbit_ro
;
5490 /* But we can't do this if the format supports signed zero. */
5491 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5493 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5494 build_real (TREE_TYPE (arg
), dconst0
));
5495 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5498 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5500 imode
= int_mode_for_mode (fmode
).require ();
5501 temp
= gen_lowpart (imode
, temp
);
5506 /* Handle targets with different FP word orders. */
5507 if (FLOAT_WORDS_BIG_ENDIAN
)
5508 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5510 word
= bitpos
/ BITS_PER_WORD
;
5511 temp
= operand_subword_force (temp
, word
, fmode
);
5512 bitpos
= bitpos
% BITS_PER_WORD
;
5515 /* Force the intermediate word_mode (or narrower) result into a
5516 register. This avoids attempting to create paradoxical SUBREGs
5517 of floating point modes below. */
5518 temp
= force_reg (imode
, temp
);
5520 /* If the bitpos is within the "result mode" lowpart, the operation
5521 can be implement with a single bitwise AND. Otherwise, we need
5522 a right shift and an AND. */
5524 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5526 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5528 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5529 temp
= gen_lowpart (rmode
, temp
);
5530 temp
= expand_binop (rmode
, and_optab
, temp
,
5531 immed_wide_int_const (mask
, rmode
),
5532 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5536 /* Perform a logical right shift to place the signbit in the least
5537 significant bit, then truncate the result to the desired mode
5538 and mask just this bit. */
5539 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5540 temp
= gen_lowpart (rmode
, temp
);
5541 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5542 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5548 /* Expand fork or exec calls. TARGET is the desired target of the
5549 call. EXP is the call. FN is the
5550 identificator of the actual function. IGNORE is nonzero if the
5551 value is to be ignored. */
5554 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5559 /* If we are not profiling, just call the function. */
5560 if (!profile_arc_flag
)
5563 /* Otherwise call the wrapper. This should be equivalent for the rest of
5564 compiler, so the code does not diverge, and the wrapper may run the
5565 code necessary for keeping the profiling sane. */
5567 switch (DECL_FUNCTION_CODE (fn
))
5570 id
= get_identifier ("__gcov_fork");
5573 case BUILT_IN_EXECL
:
5574 id
= get_identifier ("__gcov_execl");
5577 case BUILT_IN_EXECV
:
5578 id
= get_identifier ("__gcov_execv");
5581 case BUILT_IN_EXECLP
:
5582 id
= get_identifier ("__gcov_execlp");
5585 case BUILT_IN_EXECLE
:
5586 id
= get_identifier ("__gcov_execle");
5589 case BUILT_IN_EXECVP
:
5590 id
= get_identifier ("__gcov_execvp");
5593 case BUILT_IN_EXECVE
:
5594 id
= get_identifier ("__gcov_execve");
5601 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5602 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5603 DECL_EXTERNAL (decl
) = 1;
5604 TREE_PUBLIC (decl
) = 1;
5605 DECL_ARTIFICIAL (decl
) = 1;
5606 TREE_NOTHROW (decl
) = 1;
5607 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5608 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5609 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5610 return expand_call (call
, target
, ignore
);
5615 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5616 the pointer in these functions is void*, the tree optimizers may remove
5617 casts. The mode computed in expand_builtin isn't reliable either, due
5618 to __sync_bool_compare_and_swap.
5620 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5621 group of builtins. This gives us log2 of the mode size. */
5623 static inline machine_mode
5624 get_builtin_sync_mode (int fcode_diff
)
5626 /* The size is not negotiable, so ask not to get BLKmode in return
5627 if the target indicates that a smaller size would be better. */
5628 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5631 /* Expand the memory expression LOC and return the appropriate memory operand
5632 for the builtin_sync operations. */
5635 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5639 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5640 addr
= convert_memory_address (Pmode
, addr
);
5642 /* Note that we explicitly do not want any alias information for this
5643 memory, so that we kill all other live memories. Otherwise we don't
5644 satisfy the full barrier semantics of the intrinsic. */
5645 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5647 /* The alignment needs to be at least according to that of the mode. */
5648 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5649 get_pointer_alignment (loc
)));
5650 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5651 MEM_VOLATILE_P (mem
) = 1;
5656 /* Make sure an argument is in the right mode.
5657 EXP is the tree argument.
5658 MODE is the mode it should be in. */
5661 expand_expr_force_mode (tree exp
, machine_mode mode
)
5664 machine_mode old_mode
;
5666 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5667 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5668 of CONST_INTs, where we know the old_mode only from the call argument. */
5670 old_mode
= GET_MODE (val
);
5671 if (old_mode
== VOIDmode
)
5672 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5673 val
= convert_modes (mode
, old_mode
, val
, 1);
5678 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5679 EXP is the CALL_EXPR. CODE is the rtx code
5680 that corresponds to the arithmetic or logical operation from the name;
5681 an exception here is that NOT actually means NAND. TARGET is an optional
5682 place for us to store the results; AFTER is true if this is the
5683 fetch_and_xxx form. */
5686 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5687 enum rtx_code code
, bool after
,
5691 location_t loc
= EXPR_LOCATION (exp
);
5693 if (code
== NOT
&& warn_sync_nand
)
5695 tree fndecl
= get_callee_fndecl (exp
);
5696 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5698 static bool warned_f_a_n
, warned_n_a_f
;
5702 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5703 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5704 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5705 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5706 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5710 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5711 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5712 warned_f_a_n
= true;
5715 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5716 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5717 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5718 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5719 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5723 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5724 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5725 warned_n_a_f
= true;
5733 /* Expand the operands. */
5734 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5735 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5737 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5741 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5742 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5743 true if this is the boolean form. TARGET is a place for us to store the
5744 results; this is NOT optional if IS_BOOL is true. */
5747 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5748 bool is_bool
, rtx target
)
5750 rtx old_val
, new_val
, mem
;
5753 /* Expand the operands. */
5754 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5755 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5756 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5758 pbool
= poval
= NULL
;
5759 if (target
!= const0_rtx
)
5766 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5767 false, MEMMODEL_SYNC_SEQ_CST
,
5768 MEMMODEL_SYNC_SEQ_CST
))
5774 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5775 general form is actually an atomic exchange, and some targets only
5776 support a reduced form with the second argument being a constant 1.
5777 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5781 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5786 /* Expand the operands. */
5787 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5788 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5790 return expand_sync_lock_test_and_set (target
, mem
, val
);
5793 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5796 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5800 /* Expand the operands. */
5801 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5803 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5806 /* Given an integer representing an ``enum memmodel'', verify its
5807 correctness and return the memory model enum. */
5809 static enum memmodel
5810 get_memmodel (tree exp
)
5813 unsigned HOST_WIDE_INT val
;
5815 = expansion_point_location_if_in_system_header (input_location
);
5817 /* If the parameter is not a constant, it's a run time value so we'll just
5818 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5819 if (TREE_CODE (exp
) != INTEGER_CST
)
5820 return MEMMODEL_SEQ_CST
;
5822 op
= expand_normal (exp
);
5825 if (targetm
.memmodel_check
)
5826 val
= targetm
.memmodel_check (val
);
5827 else if (val
& ~MEMMODEL_MASK
)
5829 warning_at (loc
, OPT_Winvalid_memory_model
,
5830 "unknown architecture specifier in memory model to builtin");
5831 return MEMMODEL_SEQ_CST
;
5834 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5835 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5837 warning_at (loc
, OPT_Winvalid_memory_model
,
5838 "invalid memory model argument to builtin");
5839 return MEMMODEL_SEQ_CST
;
5842 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5843 be conservative and promote consume to acquire. */
5844 if (val
== MEMMODEL_CONSUME
)
5845 val
= MEMMODEL_ACQUIRE
;
5847 return (enum memmodel
) val
;
5850 /* Expand the __atomic_exchange intrinsic:
5851 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5852 EXP is the CALL_EXPR.
5853 TARGET is an optional place for us to store the results. */
5856 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5859 enum memmodel model
;
5861 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5863 if (!flag_inline_atomics
)
5866 /* Expand the operands. */
5867 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5868 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5870 return expand_atomic_exchange (target
, mem
, val
, model
);
5873 /* Expand the __atomic_compare_exchange intrinsic:
5874 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5875 TYPE desired, BOOL weak,
5876 enum memmodel success,
5877 enum memmodel failure)
5878 EXP is the CALL_EXPR.
5879 TARGET is an optional place for us to store the results. */
5882 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5885 rtx expect
, desired
, mem
, oldval
;
5886 rtx_code_label
*label
;
5887 enum memmodel success
, failure
;
5891 = expansion_point_location_if_in_system_header (input_location
);
5893 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5894 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5896 if (failure
> success
)
5898 warning_at (loc
, OPT_Winvalid_memory_model
,
5899 "failure memory model cannot be stronger than success "
5900 "memory model for %<__atomic_compare_exchange%>");
5901 success
= MEMMODEL_SEQ_CST
;
5904 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5906 warning_at (loc
, OPT_Winvalid_memory_model
,
5907 "invalid failure memory model for "
5908 "%<__atomic_compare_exchange%>");
5909 failure
= MEMMODEL_SEQ_CST
;
5910 success
= MEMMODEL_SEQ_CST
;
5914 if (!flag_inline_atomics
)
5917 /* Expand the operands. */
5918 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5920 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5921 expect
= convert_memory_address (Pmode
, expect
);
5922 expect
= gen_rtx_MEM (mode
, expect
);
5923 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5925 weak
= CALL_EXPR_ARG (exp
, 3);
5927 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5930 if (target
== const0_rtx
)
5933 /* Lest the rtl backend create a race condition with an imporoper store
5934 to memory, always create a new pseudo for OLDVAL. */
5937 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5938 is_weak
, success
, failure
))
5941 /* Conditionally store back to EXPECT, lest we create a race condition
5942 with an improper store to memory. */
5943 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5944 the normal case where EXPECT is totally private, i.e. a register. At
5945 which point the store can be unconditional. */
5946 label
= gen_label_rtx ();
5947 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5948 GET_MODE (target
), 1, label
);
5949 emit_move_insn (expect
, oldval
);
5955 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5956 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5957 call. The weak parameter must be dropped to match the expected parameter
5958 list and the expected argument changed from value to pointer to memory
5962 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
5965 vec
<tree
, va_gc
> *vec
;
5968 vec
->quick_push (gimple_call_arg (call
, 0));
5969 tree expected
= gimple_call_arg (call
, 1);
5970 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
5971 TREE_TYPE (expected
));
5972 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
5974 emit_move_insn (x
, expd
);
5975 tree v
= make_tree (TREE_TYPE (expected
), x
);
5976 vec
->quick_push (build1 (ADDR_EXPR
,
5977 build_pointer_type (TREE_TYPE (expected
)), v
));
5978 vec
->quick_push (gimple_call_arg (call
, 2));
5979 /* Skip the boolean weak parameter. */
5980 for (z
= 4; z
< 6; z
++)
5981 vec
->quick_push (gimple_call_arg (call
, z
));
5982 built_in_function fncode
5983 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5984 + exact_log2 (GET_MODE_SIZE (mode
)));
5985 tree fndecl
= builtin_decl_explicit (fncode
);
5986 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
5988 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
5989 tree lhs
= gimple_call_lhs (call
);
5990 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
5993 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5994 if (GET_MODE (boolret
) != mode
)
5995 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5996 x
= force_reg (mode
, x
);
5997 write_complex_part (target
, boolret
, true);
5998 write_complex_part (target
, x
, false);
6002 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6005 expand_ifn_atomic_compare_exchange (gcall
*call
)
6007 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
6008 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6009 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
6010 rtx expect
, desired
, mem
, oldval
, boolret
;
6011 enum memmodel success
, failure
;
6015 = expansion_point_location_if_in_system_header (gimple_location (call
));
6017 success
= get_memmodel (gimple_call_arg (call
, 4));
6018 failure
= get_memmodel (gimple_call_arg (call
, 5));
6020 if (failure
> success
)
6022 warning_at (loc
, OPT_Winvalid_memory_model
,
6023 "failure memory model cannot be stronger than success "
6024 "memory model for %<__atomic_compare_exchange%>");
6025 success
= MEMMODEL_SEQ_CST
;
6028 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6030 warning_at (loc
, OPT_Winvalid_memory_model
,
6031 "invalid failure memory model for "
6032 "%<__atomic_compare_exchange%>");
6033 failure
= MEMMODEL_SEQ_CST
;
6034 success
= MEMMODEL_SEQ_CST
;
6037 if (!flag_inline_atomics
)
6039 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6043 /* Expand the operands. */
6044 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6046 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6047 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6049 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6054 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6055 is_weak
, success
, failure
))
6057 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6061 lhs
= gimple_call_lhs (call
);
6064 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6065 if (GET_MODE (boolret
) != mode
)
6066 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6067 write_complex_part (target
, boolret
, true);
6068 write_complex_part (target
, oldval
, false);
6072 /* Expand the __atomic_load intrinsic:
6073 TYPE __atomic_load (TYPE *object, enum memmodel)
6074 EXP is the CALL_EXPR.
6075 TARGET is an optional place for us to store the results. */
6078 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6081 enum memmodel model
;
6083 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6084 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6087 = expansion_point_location_if_in_system_header (input_location
);
6088 warning_at (loc
, OPT_Winvalid_memory_model
,
6089 "invalid memory model for %<__atomic_load%>");
6090 model
= MEMMODEL_SEQ_CST
;
6093 if (!flag_inline_atomics
)
6096 /* Expand the operand. */
6097 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6099 return expand_atomic_load (target
, mem
, model
);
6103 /* Expand the __atomic_store intrinsic:
6104 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6105 EXP is the CALL_EXPR.
6106 TARGET is an optional place for us to store the results. */
6109 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6112 enum memmodel model
;
6114 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6115 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6116 || is_mm_release (model
)))
6119 = expansion_point_location_if_in_system_header (input_location
);
6120 warning_at (loc
, OPT_Winvalid_memory_model
,
6121 "invalid memory model for %<__atomic_store%>");
6122 model
= MEMMODEL_SEQ_CST
;
6125 if (!flag_inline_atomics
)
6128 /* Expand the operands. */
6129 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6130 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6132 return expand_atomic_store (mem
, val
, model
, false);
6135 /* Expand the __atomic_fetch_XXX intrinsic:
6136 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6137 EXP is the CALL_EXPR.
6138 TARGET is an optional place for us to store the results.
6139 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6140 FETCH_AFTER is true if returning the result of the operation.
6141 FETCH_AFTER is false if returning the value before the operation.
6142 IGNORE is true if the result is not used.
6143 EXT_CALL is the correct builtin for an external call if this cannot be
6144 resolved to an instruction sequence. */
6147 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6148 enum rtx_code code
, bool fetch_after
,
6149 bool ignore
, enum built_in_function ext_call
)
6152 enum memmodel model
;
6156 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6158 /* Expand the operands. */
6159 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6160 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6162 /* Only try generating instructions if inlining is turned on. */
6163 if (flag_inline_atomics
)
6165 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6170 /* Return if a different routine isn't needed for the library call. */
6171 if (ext_call
== BUILT_IN_NONE
)
6174 /* Change the call to the specified function. */
6175 fndecl
= get_callee_fndecl (exp
);
6176 addr
= CALL_EXPR_FN (exp
);
6179 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6180 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6182 /* If we will emit code after the call, the call can not be a tail call.
6183 If it is emitted as a tail call, a barrier is emitted after it, and
6184 then all trailing code is removed. */
6186 CALL_EXPR_TAILCALL (exp
) = 0;
6188 /* Expand the call here so we can emit trailing code. */
6189 ret
= expand_call (exp
, target
, ignore
);
6191 /* Replace the original function just in case it matters. */
6192 TREE_OPERAND (addr
, 0) = fndecl
;
6194 /* Then issue the arithmetic correction to return the right result. */
6199 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6201 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6204 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6210 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6213 expand_ifn_atomic_bit_test_and (gcall
*call
)
6215 tree ptr
= gimple_call_arg (call
, 0);
6216 tree bit
= gimple_call_arg (call
, 1);
6217 tree flag
= gimple_call_arg (call
, 2);
6218 tree lhs
= gimple_call_lhs (call
);
6219 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6220 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6223 struct expand_operand ops
[5];
6225 gcc_assert (flag_inline_atomics
);
6227 if (gimple_call_num_args (call
) == 4)
6228 model
= get_memmodel (gimple_call_arg (call
, 3));
6230 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6231 rtx val
= expand_expr_force_mode (bit
, mode
);
6233 switch (gimple_call_internal_fn (call
))
6235 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6237 optab
= atomic_bit_test_and_set_optab
;
6239 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6241 optab
= atomic_bit_test_and_complement_optab
;
6243 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6245 optab
= atomic_bit_test_and_reset_optab
;
6251 if (lhs
== NULL_TREE
)
6253 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6254 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6256 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6257 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6261 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6262 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6263 gcc_assert (icode
!= CODE_FOR_nothing
);
6264 create_output_operand (&ops
[0], target
, mode
);
6265 create_fixed_operand (&ops
[1], mem
);
6266 create_convert_operand_to (&ops
[2], val
, mode
, true);
6267 create_integer_operand (&ops
[3], model
);
6268 create_integer_operand (&ops
[4], integer_onep (flag
));
6269 if (maybe_expand_insn (icode
, 5, ops
))
6273 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6274 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6277 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6278 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6279 code
, model
, false);
6280 if (integer_onep (flag
))
6282 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6283 NULL_RTX
, true, OPTAB_DIRECT
);
6284 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6285 true, OPTAB_DIRECT
);
6288 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6290 if (result
!= target
)
6291 emit_move_insn (target
, result
);
6294 /* Expand an atomic clear operation.
6295 void _atomic_clear (BOOL *obj, enum memmodel)
6296 EXP is the call expression. */
6299 expand_builtin_atomic_clear (tree exp
)
6303 enum memmodel model
;
6305 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6306 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6307 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6309 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6312 = expansion_point_location_if_in_system_header (input_location
);
6313 warning_at (loc
, OPT_Winvalid_memory_model
,
6314 "invalid memory model for %<__atomic_store%>");
6315 model
= MEMMODEL_SEQ_CST
;
6318 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6319 Failing that, a store is issued by __atomic_store. The only way this can
6320 fail is if the bool type is larger than a word size. Unlikely, but
6321 handle it anyway for completeness. Assume a single threaded model since
6322 there is no atomic support in this case, and no barriers are required. */
6323 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6325 emit_move_insn (mem
, const0_rtx
);
6329 /* Expand an atomic test_and_set operation.
6330 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6331 EXP is the call expression. */
6334 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6337 enum memmodel model
;
6340 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6341 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6342 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6344 return expand_atomic_test_and_set (target
, mem
, model
);
6348 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6349 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6352 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6356 unsigned int mode_align
, type_align
;
6358 if (TREE_CODE (arg0
) != INTEGER_CST
)
6361 /* We need a corresponding integer mode for the access to be lock-free. */
6362 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6363 if (!int_mode_for_size (size
, 0).exists (&mode
))
6364 return boolean_false_node
;
6366 mode_align
= GET_MODE_ALIGNMENT (mode
);
6368 if (TREE_CODE (arg1
) == INTEGER_CST
)
6370 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6372 /* Either this argument is null, or it's a fake pointer encoding
6373 the alignment of the object. */
6374 val
= least_bit_hwi (val
);
6375 val
*= BITS_PER_UNIT
;
6377 if (val
== 0 || mode_align
< val
)
6378 type_align
= mode_align
;
6384 tree ttype
= TREE_TYPE (arg1
);
6386 /* This function is usually invoked and folded immediately by the front
6387 end before anything else has a chance to look at it. The pointer
6388 parameter at this point is usually cast to a void *, so check for that
6389 and look past the cast. */
6390 if (CONVERT_EXPR_P (arg1
)
6391 && POINTER_TYPE_P (ttype
)
6392 && VOID_TYPE_P (TREE_TYPE (ttype
))
6393 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6394 arg1
= TREE_OPERAND (arg1
, 0);
6396 ttype
= TREE_TYPE (arg1
);
6397 gcc_assert (POINTER_TYPE_P (ttype
));
6399 /* Get the underlying type of the object. */
6400 ttype
= TREE_TYPE (ttype
);
6401 type_align
= TYPE_ALIGN (ttype
);
6404 /* If the object has smaller alignment, the lock free routines cannot
6406 if (type_align
< mode_align
)
6407 return boolean_false_node
;
6409 /* Check if a compare_and_swap pattern exists for the mode which represents
6410 the required size. The pattern is not allowed to fail, so the existence
6411 of the pattern indicates support is present. Also require that an
6412 atomic load exists for the required size. */
6413 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6414 return boolean_true_node
;
6416 return boolean_false_node
;
6419 /* Return true if the parameters to call EXP represent an object which will
6420 always generate lock free instructions. The first argument represents the
6421 size of the object, and the second parameter is a pointer to the object
6422 itself. If NULL is passed for the object, then the result is based on
6423 typical alignment for an object of the specified size. Otherwise return
6427 expand_builtin_atomic_always_lock_free (tree exp
)
6430 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6431 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6433 if (TREE_CODE (arg0
) != INTEGER_CST
)
6435 error ("non-constant argument 1 to __atomic_always_lock_free");
6439 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6440 if (size
== boolean_true_node
)
6445 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6446 is lock free on this architecture. */
6449 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6451 if (!flag_inline_atomics
)
6454 /* If it isn't always lock free, don't generate a result. */
6455 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6456 return boolean_true_node
;
6461 /* Return true if the parameters to call EXP represent an object which will
6462 always generate lock free instructions. The first argument represents the
6463 size of the object, and the second parameter is a pointer to the object
6464 itself. If NULL is passed for the object, then the result is based on
6465 typical alignment for an object of the specified size. Otherwise return
6469 expand_builtin_atomic_is_lock_free (tree exp
)
6472 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6473 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6475 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6477 error ("non-integer argument 1 to __atomic_is_lock_free");
6481 if (!flag_inline_atomics
)
6484 /* If the value is known at compile time, return the RTX for it. */
6485 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6486 if (size
== boolean_true_node
)
6492 /* Expand the __atomic_thread_fence intrinsic:
6493 void __atomic_thread_fence (enum memmodel)
6494 EXP is the CALL_EXPR. */
6497 expand_builtin_atomic_thread_fence (tree exp
)
6499 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6500 expand_mem_thread_fence (model
);
6503 /* Expand the __atomic_signal_fence intrinsic:
6504 void __atomic_signal_fence (enum memmodel)
6505 EXP is the CALL_EXPR. */
6508 expand_builtin_atomic_signal_fence (tree exp
)
6510 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6511 expand_mem_signal_fence (model
);
6514 /* Expand the __sync_synchronize intrinsic. */
6517 expand_builtin_sync_synchronize (void)
6519 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6523 expand_builtin_thread_pointer (tree exp
, rtx target
)
6525 enum insn_code icode
;
6526 if (!validate_arglist (exp
, VOID_TYPE
))
6528 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6529 if (icode
!= CODE_FOR_nothing
)
6531 struct expand_operand op
;
6532 /* If the target is not sutitable then create a new target. */
6533 if (target
== NULL_RTX
6535 || GET_MODE (target
) != Pmode
)
6536 target
= gen_reg_rtx (Pmode
);
6537 create_output_operand (&op
, target
, Pmode
);
6538 expand_insn (icode
, 1, &op
);
6541 error ("__builtin_thread_pointer is not supported on this target");
6546 expand_builtin_set_thread_pointer (tree exp
)
6548 enum insn_code icode
;
6549 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6551 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6552 if (icode
!= CODE_FOR_nothing
)
6554 struct expand_operand op
;
6555 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6556 Pmode
, EXPAND_NORMAL
);
6557 create_input_operand (&op
, val
, Pmode
);
6558 expand_insn (icode
, 1, &op
);
6561 error ("__builtin_set_thread_pointer is not supported on this target");
6565 /* Emit code to restore the current value of stack. */
6568 expand_stack_restore (tree var
)
6571 rtx sa
= expand_normal (var
);
6573 sa
= convert_memory_address (Pmode
, sa
);
6575 prev
= get_last_insn ();
6576 emit_stack_restore (SAVE_BLOCK
, sa
);
6578 record_new_stack_level ();
6580 fixup_args_size_notes (prev
, get_last_insn (), 0);
6583 /* Emit code to save the current value of stack. */
6586 expand_stack_save (void)
6590 emit_stack_save (SAVE_BLOCK
, &ret
);
6595 /* Expand an expression EXP that calls a built-in function,
6596 with result going to TARGET if that's convenient
6597 (and in mode MODE if that's convenient).
6598 SUBTARGET may be used as the target for computing one of EXP's operands.
6599 IGNORE is nonzero if the value is to be ignored. */
6602 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
6605 tree fndecl
= get_callee_fndecl (exp
);
6606 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6607 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6610 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6611 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6613 /* When ASan is enabled, we don't want to expand some memory/string
6614 builtins and rely on libsanitizer's hooks. This allows us to avoid
6615 redundant checks and be sure, that possible overflow will be detected
6618 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
6619 return expand_call (exp
, target
, ignore
);
6621 /* When not optimizing, generate calls to library functions for a certain
6624 && !called_as_built_in (fndecl
)
6625 && fcode
!= BUILT_IN_FORK
6626 && fcode
!= BUILT_IN_EXECL
6627 && fcode
!= BUILT_IN_EXECV
6628 && fcode
!= BUILT_IN_EXECLP
6629 && fcode
!= BUILT_IN_EXECLE
6630 && fcode
!= BUILT_IN_EXECVP
6631 && fcode
!= BUILT_IN_EXECVE
6632 && !ALLOCA_FUNCTION_CODE_P (fcode
)
6633 && fcode
!= BUILT_IN_FREE
6634 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
6635 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
6636 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
6637 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
6638 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6639 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
6640 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6641 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6642 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6643 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
6644 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
6645 && fcode
!= BUILT_IN_CHKP_BNDRET
)
6646 return expand_call (exp
, target
, ignore
);
6648 /* The built-in function expanders test for target == const0_rtx
6649 to determine whether the function's result will be ignored. */
6651 target
= const0_rtx
;
6653 /* If the result of a pure or const built-in function is ignored, and
6654 none of its arguments are volatile, we can avoid expanding the
6655 built-in call and just evaluate the arguments for side-effects. */
6656 if (target
== const0_rtx
6657 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6658 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6660 bool volatilep
= false;
6662 call_expr_arg_iterator iter
;
6664 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6665 if (TREE_THIS_VOLATILE (arg
))
6673 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6674 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6679 /* expand_builtin_with_bounds is supposed to be used for
6680 instrumented builtin calls. */
6681 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6685 CASE_FLT_FN (BUILT_IN_FABS
):
6686 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
6687 case BUILT_IN_FABSD32
:
6688 case BUILT_IN_FABSD64
:
6689 case BUILT_IN_FABSD128
:
6690 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6695 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6696 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
6697 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6702 /* Just do a normal library call if we were unable to fold
6704 CASE_FLT_FN (BUILT_IN_CABS
):
6707 CASE_FLT_FN (BUILT_IN_FMA
):
6708 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
6709 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6714 CASE_FLT_FN (BUILT_IN_ILOGB
):
6715 if (! flag_unsafe_math_optimizations
)
6718 CASE_FLT_FN (BUILT_IN_ISINF
):
6719 CASE_FLT_FN (BUILT_IN_FINITE
):
6720 case BUILT_IN_ISFINITE
:
6721 case BUILT_IN_ISNORMAL
:
6722 target
= expand_builtin_interclass_mathfn (exp
, target
);
6727 CASE_FLT_FN (BUILT_IN_ICEIL
):
6728 CASE_FLT_FN (BUILT_IN_LCEIL
):
6729 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6730 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6731 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6732 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6733 target
= expand_builtin_int_roundingfn (exp
, target
);
6738 CASE_FLT_FN (BUILT_IN_IRINT
):
6739 CASE_FLT_FN (BUILT_IN_LRINT
):
6740 CASE_FLT_FN (BUILT_IN_LLRINT
):
6741 CASE_FLT_FN (BUILT_IN_IROUND
):
6742 CASE_FLT_FN (BUILT_IN_LROUND
):
6743 CASE_FLT_FN (BUILT_IN_LLROUND
):
6744 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6749 CASE_FLT_FN (BUILT_IN_POWI
):
6750 target
= expand_builtin_powi (exp
, target
);
6755 CASE_FLT_FN (BUILT_IN_CEXPI
):
6756 target
= expand_builtin_cexpi (exp
, target
);
6757 gcc_assert (target
);
6760 CASE_FLT_FN (BUILT_IN_SIN
):
6761 CASE_FLT_FN (BUILT_IN_COS
):
6762 if (! flag_unsafe_math_optimizations
)
6764 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6769 CASE_FLT_FN (BUILT_IN_SINCOS
):
6770 if (! flag_unsafe_math_optimizations
)
6772 target
= expand_builtin_sincos (exp
);
6777 case BUILT_IN_APPLY_ARGS
:
6778 return expand_builtin_apply_args ();
6780 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6781 FUNCTION with a copy of the parameters described by
6782 ARGUMENTS, and ARGSIZE. It returns a block of memory
6783 allocated on the stack into which is stored all the registers
6784 that might possibly be used for returning the result of a
6785 function. ARGUMENTS is the value returned by
6786 __builtin_apply_args. ARGSIZE is the number of bytes of
6787 arguments that must be copied. ??? How should this value be
6788 computed? We'll also need a safe worst case value for varargs
6790 case BUILT_IN_APPLY
:
6791 if (!validate_arglist (exp
, POINTER_TYPE
,
6792 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6793 && !validate_arglist (exp
, REFERENCE_TYPE
,
6794 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6800 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6801 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6802 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6804 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6807 /* __builtin_return (RESULT) causes the function to return the
6808 value described by RESULT. RESULT is address of the block of
6809 memory returned by __builtin_apply. */
6810 case BUILT_IN_RETURN
:
6811 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6812 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6815 case BUILT_IN_SAVEREGS
:
6816 return expand_builtin_saveregs ();
6818 case BUILT_IN_VA_ARG_PACK
:
6819 /* All valid uses of __builtin_va_arg_pack () are removed during
6821 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6824 case BUILT_IN_VA_ARG_PACK_LEN
:
6825 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6827 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6830 /* Return the address of the first anonymous stack arg. */
6831 case BUILT_IN_NEXT_ARG
:
6832 if (fold_builtin_next_arg (exp
, false))
6834 return expand_builtin_next_arg ();
6836 case BUILT_IN_CLEAR_CACHE
:
6837 target
= expand_builtin___clear_cache (exp
);
6842 case BUILT_IN_CLASSIFY_TYPE
:
6843 return expand_builtin_classify_type (exp
);
6845 case BUILT_IN_CONSTANT_P
:
6848 case BUILT_IN_FRAME_ADDRESS
:
6849 case BUILT_IN_RETURN_ADDRESS
:
6850 return expand_builtin_frame_address (fndecl
, exp
);
6852 /* Returns the address of the area where the structure is returned.
6854 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6855 if (call_expr_nargs (exp
) != 0
6856 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6857 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6860 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6862 CASE_BUILT_IN_ALLOCA
:
6863 target
= expand_builtin_alloca (exp
);
6868 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
6869 return expand_asan_emit_allocas_unpoison (exp
);
6871 case BUILT_IN_STACK_SAVE
:
6872 return expand_stack_save ();
6874 case BUILT_IN_STACK_RESTORE
:
6875 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6878 case BUILT_IN_BSWAP16
:
6879 case BUILT_IN_BSWAP32
:
6880 case BUILT_IN_BSWAP64
:
6881 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6886 CASE_INT_FN (BUILT_IN_FFS
):
6887 target
= expand_builtin_unop (target_mode
, exp
, target
,
6888 subtarget
, ffs_optab
);
6893 CASE_INT_FN (BUILT_IN_CLZ
):
6894 target
= expand_builtin_unop (target_mode
, exp
, target
,
6895 subtarget
, clz_optab
);
6900 CASE_INT_FN (BUILT_IN_CTZ
):
6901 target
= expand_builtin_unop (target_mode
, exp
, target
,
6902 subtarget
, ctz_optab
);
6907 CASE_INT_FN (BUILT_IN_CLRSB
):
6908 target
= expand_builtin_unop (target_mode
, exp
, target
,
6909 subtarget
, clrsb_optab
);
6914 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6915 target
= expand_builtin_unop (target_mode
, exp
, target
,
6916 subtarget
, popcount_optab
);
6921 CASE_INT_FN (BUILT_IN_PARITY
):
6922 target
= expand_builtin_unop (target_mode
, exp
, target
,
6923 subtarget
, parity_optab
);
6928 case BUILT_IN_STRLEN
:
6929 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6934 case BUILT_IN_STRCAT
:
6935 target
= expand_builtin_strcat (exp
, target
);
6940 case BUILT_IN_STRCPY
:
6941 target
= expand_builtin_strcpy (exp
, target
);
6946 case BUILT_IN_STRNCAT
:
6947 target
= expand_builtin_strncat (exp
, target
);
6952 case BUILT_IN_STRNCPY
:
6953 target
= expand_builtin_strncpy (exp
, target
);
6958 case BUILT_IN_STPCPY
:
6959 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6964 case BUILT_IN_STPNCPY
:
6965 target
= expand_builtin_stpncpy (exp
, target
);
6970 case BUILT_IN_MEMCHR
:
6971 target
= expand_builtin_memchr (exp
, target
);
6976 case BUILT_IN_MEMCPY
:
6977 target
= expand_builtin_memcpy (exp
, target
);
6982 case BUILT_IN_MEMMOVE
:
6983 target
= expand_builtin_memmove (exp
, target
);
6988 case BUILT_IN_MEMPCPY
:
6989 target
= expand_builtin_mempcpy (exp
, target
);
6994 case BUILT_IN_MEMSET
:
6995 target
= expand_builtin_memset (exp
, target
, mode
);
7000 case BUILT_IN_BZERO
:
7001 target
= expand_builtin_bzero (exp
);
7006 case BUILT_IN_STRCMP
:
7007 target
= expand_builtin_strcmp (exp
, target
);
7012 case BUILT_IN_STRNCMP
:
7013 target
= expand_builtin_strncmp (exp
, target
, mode
);
7019 case BUILT_IN_MEMCMP
:
7020 case BUILT_IN_MEMCMP_EQ
:
7021 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7024 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7026 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7027 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7031 case BUILT_IN_SETJMP
:
7032 /* This should have been lowered to the builtins below. */
7035 case BUILT_IN_SETJMP_SETUP
:
7036 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7037 and the receiver label. */
7038 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7040 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7041 VOIDmode
, EXPAND_NORMAL
);
7042 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7043 rtx_insn
*label_r
= label_rtx (label
);
7045 /* This is copied from the handling of non-local gotos. */
7046 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7047 nonlocal_goto_handler_labels
7048 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7049 nonlocal_goto_handler_labels
);
7050 /* ??? Do not let expand_label treat us as such since we would
7051 not want to be both on the list of non-local labels and on
7052 the list of forced labels. */
7053 FORCED_LABEL (label
) = 0;
7058 case BUILT_IN_SETJMP_RECEIVER
:
7059 /* __builtin_setjmp_receiver is passed the receiver label. */
7060 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7062 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7063 rtx_insn
*label_r
= label_rtx (label
);
7065 expand_builtin_setjmp_receiver (label_r
);
7070 /* __builtin_longjmp is passed a pointer to an array of five words.
7071 It's similar to the C library longjmp function but works with
7072 __builtin_setjmp above. */
7073 case BUILT_IN_LONGJMP
:
7074 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7076 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7077 VOIDmode
, EXPAND_NORMAL
);
7078 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7080 if (value
!= const1_rtx
)
7082 error ("%<__builtin_longjmp%> second argument must be 1");
7086 expand_builtin_longjmp (buf_addr
, value
);
7091 case BUILT_IN_NONLOCAL_GOTO
:
7092 target
= expand_builtin_nonlocal_goto (exp
);
7097 /* This updates the setjmp buffer that is its argument with the value
7098 of the current stack pointer. */
7099 case BUILT_IN_UPDATE_SETJMP_BUF
:
7100 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7103 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7105 expand_builtin_update_setjmp_buf (buf_addr
);
7111 expand_builtin_trap ();
7114 case BUILT_IN_UNREACHABLE
:
7115 expand_builtin_unreachable ();
7118 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7119 case BUILT_IN_SIGNBITD32
:
7120 case BUILT_IN_SIGNBITD64
:
7121 case BUILT_IN_SIGNBITD128
:
7122 target
= expand_builtin_signbit (exp
, target
);
7127 /* Various hooks for the DWARF 2 __throw routine. */
7128 case BUILT_IN_UNWIND_INIT
:
7129 expand_builtin_unwind_init ();
7131 case BUILT_IN_DWARF_CFA
:
7132 return virtual_cfa_rtx
;
7133 #ifdef DWARF2_UNWIND_INFO
7134 case BUILT_IN_DWARF_SP_COLUMN
:
7135 return expand_builtin_dwarf_sp_column ();
7136 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7137 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7140 case BUILT_IN_FROB_RETURN_ADDR
:
7141 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7142 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7143 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7144 case BUILT_IN_EH_RETURN
:
7145 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7146 CALL_EXPR_ARG (exp
, 1));
7148 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7149 return expand_builtin_eh_return_data_regno (exp
);
7150 case BUILT_IN_EXTEND_POINTER
:
7151 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7152 case BUILT_IN_EH_POINTER
:
7153 return expand_builtin_eh_pointer (exp
);
7154 case BUILT_IN_EH_FILTER
:
7155 return expand_builtin_eh_filter (exp
);
7156 case BUILT_IN_EH_COPY_VALUES
:
7157 return expand_builtin_eh_copy_values (exp
);
7159 case BUILT_IN_VA_START
:
7160 return expand_builtin_va_start (exp
);
7161 case BUILT_IN_VA_END
:
7162 return expand_builtin_va_end (exp
);
7163 case BUILT_IN_VA_COPY
:
7164 return expand_builtin_va_copy (exp
);
7165 case BUILT_IN_EXPECT
:
7166 return expand_builtin_expect (exp
, target
);
7167 case BUILT_IN_ASSUME_ALIGNED
:
7168 return expand_builtin_assume_aligned (exp
, target
);
7169 case BUILT_IN_PREFETCH
:
7170 expand_builtin_prefetch (exp
);
7173 case BUILT_IN_INIT_TRAMPOLINE
:
7174 return expand_builtin_init_trampoline (exp
, true);
7175 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7176 return expand_builtin_init_trampoline (exp
, false);
7177 case BUILT_IN_ADJUST_TRAMPOLINE
:
7178 return expand_builtin_adjust_trampoline (exp
);
7180 case BUILT_IN_INIT_DESCRIPTOR
:
7181 return expand_builtin_init_descriptor (exp
);
7182 case BUILT_IN_ADJUST_DESCRIPTOR
:
7183 return expand_builtin_adjust_descriptor (exp
);
7186 case BUILT_IN_EXECL
:
7187 case BUILT_IN_EXECV
:
7188 case BUILT_IN_EXECLP
:
7189 case BUILT_IN_EXECLE
:
7190 case BUILT_IN_EXECVP
:
7191 case BUILT_IN_EXECVE
:
7192 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7197 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7198 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7199 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7200 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7201 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7202 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7203 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7208 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7209 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7210 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7211 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7212 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7213 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7214 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7219 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7220 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7221 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7222 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7223 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7224 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7225 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7230 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7231 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7232 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7233 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7234 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7235 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7236 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7241 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7242 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7243 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7244 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7245 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7246 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7247 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7252 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7253 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7254 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7255 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7256 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7257 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7258 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7263 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7264 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7265 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7266 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7267 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7268 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7269 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7274 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7275 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7276 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7277 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7278 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7279 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7280 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7285 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7286 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7287 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7288 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7289 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7290 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7291 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7296 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7297 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7298 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7299 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7300 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7301 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7302 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7307 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7308 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7309 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7310 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7311 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7312 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7313 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7318 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7319 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7320 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7321 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7322 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7323 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7324 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7329 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7330 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7331 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7332 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7333 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7334 if (mode
== VOIDmode
)
7335 mode
= TYPE_MODE (boolean_type_node
);
7336 if (!target
|| !register_operand (target
, mode
))
7337 target
= gen_reg_rtx (mode
);
7339 mode
= get_builtin_sync_mode
7340 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7341 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7346 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7347 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7348 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7349 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7350 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7351 mode
= get_builtin_sync_mode
7352 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7353 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7358 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7359 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7360 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7361 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7362 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7363 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7364 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
7369 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
7370 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
7371 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
7372 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
7373 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
7374 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
7375 expand_builtin_sync_lock_release (mode
, exp
);
7378 case BUILT_IN_SYNC_SYNCHRONIZE
:
7379 expand_builtin_sync_synchronize ();
7382 case BUILT_IN_ATOMIC_EXCHANGE_1
:
7383 case BUILT_IN_ATOMIC_EXCHANGE_2
:
7384 case BUILT_IN_ATOMIC_EXCHANGE_4
:
7385 case BUILT_IN_ATOMIC_EXCHANGE_8
:
7386 case BUILT_IN_ATOMIC_EXCHANGE_16
:
7387 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
7388 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
7393 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
7394 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
7395 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
7396 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
7397 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
7399 unsigned int nargs
, z
;
7400 vec
<tree
, va_gc
> *vec
;
7403 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
7404 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
7408 /* If this is turned into an external library call, the weak parameter
7409 must be dropped to match the expected parameter list. */
7410 nargs
= call_expr_nargs (exp
);
7411 vec_alloc (vec
, nargs
- 1);
7412 for (z
= 0; z
< 3; z
++)
7413 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7414 /* Skip the boolean weak parameter. */
7415 for (z
= 4; z
< 6; z
++)
7416 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7417 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
7421 case BUILT_IN_ATOMIC_LOAD_1
:
7422 case BUILT_IN_ATOMIC_LOAD_2
:
7423 case BUILT_IN_ATOMIC_LOAD_4
:
7424 case BUILT_IN_ATOMIC_LOAD_8
:
7425 case BUILT_IN_ATOMIC_LOAD_16
:
7426 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
7427 target
= expand_builtin_atomic_load (mode
, exp
, target
);
7432 case BUILT_IN_ATOMIC_STORE_1
:
7433 case BUILT_IN_ATOMIC_STORE_2
:
7434 case BUILT_IN_ATOMIC_STORE_4
:
7435 case BUILT_IN_ATOMIC_STORE_8
:
7436 case BUILT_IN_ATOMIC_STORE_16
:
7437 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
7438 target
= expand_builtin_atomic_store (mode
, exp
);
7443 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
7444 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
7445 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
7446 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
7447 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
7449 enum built_in_function lib
;
7450 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
7451 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
7452 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
7453 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
7459 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
7460 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
7461 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
7462 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
7463 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
7465 enum built_in_function lib
;
7466 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
7467 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
7468 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
7469 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
7475 case BUILT_IN_ATOMIC_AND_FETCH_1
:
7476 case BUILT_IN_ATOMIC_AND_FETCH_2
:
7477 case BUILT_IN_ATOMIC_AND_FETCH_4
:
7478 case BUILT_IN_ATOMIC_AND_FETCH_8
:
7479 case BUILT_IN_ATOMIC_AND_FETCH_16
:
7481 enum built_in_function lib
;
7482 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
7483 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
7484 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
7485 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
7491 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
7492 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
7493 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
7494 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
7495 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
7497 enum built_in_function lib
;
7498 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
7499 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
7500 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
7501 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
7507 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
7508 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
7509 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
7510 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
7511 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
7513 enum built_in_function lib
;
7514 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
7515 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
7516 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
7517 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
7523 case BUILT_IN_ATOMIC_OR_FETCH_1
:
7524 case BUILT_IN_ATOMIC_OR_FETCH_2
:
7525 case BUILT_IN_ATOMIC_OR_FETCH_4
:
7526 case BUILT_IN_ATOMIC_OR_FETCH_8
:
7527 case BUILT_IN_ATOMIC_OR_FETCH_16
:
7529 enum built_in_function lib
;
7530 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
7531 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
7532 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
7533 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
7539 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
7540 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
7541 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
7542 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
7543 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
7544 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
7545 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
7546 ignore
, BUILT_IN_NONE
);
7551 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
7552 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
7553 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
7554 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
7555 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
7556 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
7557 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
7558 ignore
, BUILT_IN_NONE
);
7563 case BUILT_IN_ATOMIC_FETCH_AND_1
:
7564 case BUILT_IN_ATOMIC_FETCH_AND_2
:
7565 case BUILT_IN_ATOMIC_FETCH_AND_4
:
7566 case BUILT_IN_ATOMIC_FETCH_AND_8
:
7567 case BUILT_IN_ATOMIC_FETCH_AND_16
:
7568 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
7569 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
7570 ignore
, BUILT_IN_NONE
);
7575 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
7576 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
7577 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
7578 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
7579 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
7580 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
7581 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
7582 ignore
, BUILT_IN_NONE
);
7587 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
7588 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
7589 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
7590 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
7591 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
7592 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
7593 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
7594 ignore
, BUILT_IN_NONE
);
7599 case BUILT_IN_ATOMIC_FETCH_OR_1
:
7600 case BUILT_IN_ATOMIC_FETCH_OR_2
:
7601 case BUILT_IN_ATOMIC_FETCH_OR_4
:
7602 case BUILT_IN_ATOMIC_FETCH_OR_8
:
7603 case BUILT_IN_ATOMIC_FETCH_OR_16
:
7604 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
7605 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
7606 ignore
, BUILT_IN_NONE
);
7611 case BUILT_IN_ATOMIC_TEST_AND_SET
:
7612 return expand_builtin_atomic_test_and_set (exp
, target
);
7614 case BUILT_IN_ATOMIC_CLEAR
:
7615 return expand_builtin_atomic_clear (exp
);
7617 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
7618 return expand_builtin_atomic_always_lock_free (exp
);
7620 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
7621 target
= expand_builtin_atomic_is_lock_free (exp
);
7626 case BUILT_IN_ATOMIC_THREAD_FENCE
:
7627 expand_builtin_atomic_thread_fence (exp
);
7630 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
7631 expand_builtin_atomic_signal_fence (exp
);
7634 case BUILT_IN_OBJECT_SIZE
:
7635 return expand_builtin_object_size (exp
);
7637 case BUILT_IN_MEMCPY_CHK
:
7638 case BUILT_IN_MEMPCPY_CHK
:
7639 case BUILT_IN_MEMMOVE_CHK
:
7640 case BUILT_IN_MEMSET_CHK
:
7641 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7646 case BUILT_IN_STRCPY_CHK
:
7647 case BUILT_IN_STPCPY_CHK
:
7648 case BUILT_IN_STRNCPY_CHK
:
7649 case BUILT_IN_STPNCPY_CHK
:
7650 case BUILT_IN_STRCAT_CHK
:
7651 case BUILT_IN_STRNCAT_CHK
:
7652 case BUILT_IN_SNPRINTF_CHK
:
7653 case BUILT_IN_VSNPRINTF_CHK
:
7654 maybe_emit_chk_warning (exp
, fcode
);
7657 case BUILT_IN_SPRINTF_CHK
:
7658 case BUILT_IN_VSPRINTF_CHK
:
7659 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7663 if (warn_free_nonheap_object
)
7664 maybe_emit_free_warning (exp
);
7667 case BUILT_IN_THREAD_POINTER
:
7668 return expand_builtin_thread_pointer (exp
, target
);
7670 case BUILT_IN_SET_THREAD_POINTER
:
7671 expand_builtin_set_thread_pointer (exp
);
7674 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7675 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7676 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7677 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7678 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7679 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7680 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7681 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7682 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7683 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7684 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7685 /* We allow user CHKP builtins if Pointer Bounds
7687 if (!chkp_function_instrumented_p (current_function_decl
))
7689 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7690 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7691 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7692 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7693 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7694 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7695 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7696 return expand_normal (size_zero_node
);
7697 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7698 return expand_normal (size_int (-1));
7704 case BUILT_IN_CHKP_BNDMK
:
7705 case BUILT_IN_CHKP_BNDSTX
:
7706 case BUILT_IN_CHKP_BNDCL
:
7707 case BUILT_IN_CHKP_BNDCU
:
7708 case BUILT_IN_CHKP_BNDLDX
:
7709 case BUILT_IN_CHKP_BNDRET
:
7710 case BUILT_IN_CHKP_INTERSECT
:
7711 case BUILT_IN_CHKP_NARROW
:
7712 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7713 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7714 /* Software implementation of Pointer Bounds Checker is NYI.
7715 Target support is required. */
7716 error ("Your target platform does not support -fcheck-pointer-bounds");
7719 case BUILT_IN_ACC_ON_DEVICE
:
7720 /* Do library call, if we failed to expand the builtin when
7724 default: /* just do library call, if unknown builtin */
7728 /* The switch statement above can drop through to cause the function
7729 to be called normally. */
7730 return expand_call (exp
, target
, ignore
);
7733 /* Similar to expand_builtin but is used for instrumented calls. */
7736 expand_builtin_with_bounds (tree exp
, rtx target
,
7737 rtx subtarget ATTRIBUTE_UNUSED
,
7738 machine_mode mode
, int ignore
)
7740 tree fndecl
= get_callee_fndecl (exp
);
7741 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7743 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7745 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7746 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7748 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7749 && fcode
< END_CHKP_BUILTINS
);
7753 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7754 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7759 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7760 target
= expand_builtin_mempcpy_with_bounds (exp
, target
);
7765 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7766 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7771 case BUILT_IN_MEMCPY_CHKP
:
7772 case BUILT_IN_MEMMOVE_CHKP
:
7773 case BUILT_IN_MEMPCPY_CHKP
:
7774 if (call_expr_nargs (exp
) > 3)
7776 /* memcpy_chkp (void *dst, size_t dstbnd,
7777 const void *src, size_t srcbnd, size_t n)
7778 and others take a pointer bound argument just after each
7779 pointer argument. */
7780 tree dest
= CALL_EXPR_ARG (exp
, 0);
7781 tree src
= CALL_EXPR_ARG (exp
, 2);
7782 tree len
= CALL_EXPR_ARG (exp
, 4);
7784 check_memop_access (exp
, dest
, src
, len
);
7792 /* The switch statement above can drop through to cause the function
7793 to be called normally. */
7794 return expand_call (exp
, target
, ignore
);
7797 /* Determine whether a tree node represents a call to a built-in
7798 function. If the tree T is a call to a built-in function with
7799 the right number of arguments of the appropriate types, return
7800 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7801 Otherwise the return value is END_BUILTINS. */
7803 enum built_in_function
7804 builtin_mathfn_code (const_tree t
)
7806 const_tree fndecl
, arg
, parmlist
;
7807 const_tree argtype
, parmtype
;
7808 const_call_expr_arg_iterator iter
;
7810 if (TREE_CODE (t
) != CALL_EXPR
7811 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7812 return END_BUILTINS
;
7814 fndecl
= get_callee_fndecl (t
);
7815 if (fndecl
== NULL_TREE
7816 || TREE_CODE (fndecl
) != FUNCTION_DECL
7817 || ! DECL_BUILT_IN (fndecl
)
7818 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7819 return END_BUILTINS
;
7821 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7822 init_const_call_expr_arg_iterator (t
, &iter
);
7823 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7825 /* If a function doesn't take a variable number of arguments,
7826 the last element in the list will have type `void'. */
7827 parmtype
= TREE_VALUE (parmlist
);
7828 if (VOID_TYPE_P (parmtype
))
7830 if (more_const_call_expr_args_p (&iter
))
7831 return END_BUILTINS
;
7832 return DECL_FUNCTION_CODE (fndecl
);
7835 if (! more_const_call_expr_args_p (&iter
))
7836 return END_BUILTINS
;
7838 arg
= next_const_call_expr_arg (&iter
);
7839 argtype
= TREE_TYPE (arg
);
7841 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7843 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7844 return END_BUILTINS
;
7846 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7848 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7849 return END_BUILTINS
;
7851 else if (POINTER_TYPE_P (parmtype
))
7853 if (! POINTER_TYPE_P (argtype
))
7854 return END_BUILTINS
;
7856 else if (INTEGRAL_TYPE_P (parmtype
))
7858 if (! INTEGRAL_TYPE_P (argtype
))
7859 return END_BUILTINS
;
7862 return END_BUILTINS
;
7865 /* Variable-length argument list. */
7866 return DECL_FUNCTION_CODE (fndecl
);
7869 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7870 evaluate to a constant. */
7873 fold_builtin_constant_p (tree arg
)
7875 /* We return 1 for a numeric type that's known to be a constant
7876 value at compile-time or for an aggregate type that's a
7877 literal constant. */
7880 /* If we know this is a constant, emit the constant of one. */
7881 if (CONSTANT_CLASS_P (arg
)
7882 || (TREE_CODE (arg
) == CONSTRUCTOR
7883 && TREE_CONSTANT (arg
)))
7884 return integer_one_node
;
7885 if (TREE_CODE (arg
) == ADDR_EXPR
)
7887 tree op
= TREE_OPERAND (arg
, 0);
7888 if (TREE_CODE (op
) == STRING_CST
7889 || (TREE_CODE (op
) == ARRAY_REF
7890 && integer_zerop (TREE_OPERAND (op
, 1))
7891 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7892 return integer_one_node
;
7895 /* If this expression has side effects, show we don't know it to be a
7896 constant. Likewise if it's a pointer or aggregate type since in
7897 those case we only want literals, since those are only optimized
7898 when generating RTL, not later.
7899 And finally, if we are compiling an initializer, not code, we
7900 need to return a definite result now; there's not going to be any
7901 more optimization done. */
7902 if (TREE_SIDE_EFFECTS (arg
)
7903 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7904 || POINTER_TYPE_P (TREE_TYPE (arg
))
7906 || folding_initializer
7907 || force_folding_builtin_constant_p
)
7908 return integer_zero_node
;
7913 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7914 return it as a truthvalue. */
7917 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7920 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7922 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7923 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7924 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7925 pred_type
= TREE_VALUE (arg_types
);
7926 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7928 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7929 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7930 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7933 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7934 build_int_cst (ret_type
, 0));
7937 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7938 NULL_TREE if no simplification is possible. */
7941 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7943 tree inner
, fndecl
, inner_arg0
;
7944 enum tree_code code
;
7946 /* Distribute the expected value over short-circuiting operators.
7947 See through the cast from truthvalue_type_node to long. */
7949 while (CONVERT_EXPR_P (inner_arg0
)
7950 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7951 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7952 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7954 /* If this is a builtin_expect within a builtin_expect keep the
7955 inner one. See through a comparison against a constant. It
7956 might have been added to create a thruthvalue. */
7959 if (COMPARISON_CLASS_P (inner
)
7960 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7961 inner
= TREE_OPERAND (inner
, 0);
7963 if (TREE_CODE (inner
) == CALL_EXPR
7964 && (fndecl
= get_callee_fndecl (inner
))
7965 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7966 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7970 code
= TREE_CODE (inner
);
7971 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7973 tree op0
= TREE_OPERAND (inner
, 0);
7974 tree op1
= TREE_OPERAND (inner
, 1);
7976 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7977 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7978 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7980 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7983 /* If the argument isn't invariant then there's nothing else we can do. */
7984 if (!TREE_CONSTANT (inner_arg0
))
7987 /* If we expect that a comparison against the argument will fold to
7988 a constant return the constant. In practice, this means a true
7989 constant or the address of a non-weak symbol. */
7992 if (TREE_CODE (inner
) == ADDR_EXPR
)
7996 inner
= TREE_OPERAND (inner
, 0);
7998 while (TREE_CODE (inner
) == COMPONENT_REF
7999 || TREE_CODE (inner
) == ARRAY_REF
);
8000 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8004 /* Otherwise, ARG0 already has the proper type for the return value. */
8008 /* Fold a call to __builtin_classify_type with argument ARG. */
8011 fold_builtin_classify_type (tree arg
)
8014 return build_int_cst (integer_type_node
, no_type_class
);
8016 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8019 /* Fold a call to __builtin_strlen with argument ARG. */
8022 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
8024 if (!validate_arg (arg
, POINTER_TYPE
))
8028 tree len
= c_strlen (arg
, 0);
8031 return fold_convert_loc (loc
, type
, len
);
8037 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8040 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8042 REAL_VALUE_TYPE real
;
8044 /* __builtin_inff is intended to be usable to define INFINITY on all
8045 targets. If an infinity is not available, INFINITY expands "to a
8046 positive constant of type float that overflows at translation
8047 time", footnote "In this case, using INFINITY will violate the
8048 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8049 Thus we pedwarn to ensure this constraint violation is
8051 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8052 pedwarn (loc
, 0, "target format does not support infinity");
8055 return build_real (type
, real
);
8058 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8059 NULL_TREE if no simplification can be made. */
8062 fold_builtin_sincos (location_t loc
,
8063 tree arg0
, tree arg1
, tree arg2
)
8066 tree fndecl
, call
= NULL_TREE
;
8068 if (!validate_arg (arg0
, REAL_TYPE
)
8069 || !validate_arg (arg1
, POINTER_TYPE
)
8070 || !validate_arg (arg2
, POINTER_TYPE
))
8073 type
= TREE_TYPE (arg0
);
8075 /* Calculate the result when the argument is a constant. */
8076 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8077 if (fn
== END_BUILTINS
)
8080 /* Canonicalize sincos to cexpi. */
8081 if (TREE_CODE (arg0
) == REAL_CST
)
8083 tree complex_type
= build_complex_type (type
);
8084 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8088 if (!targetm
.libc_has_function (function_c99_math_complex
)
8089 || !builtin_decl_implicit_p (fn
))
8091 fndecl
= builtin_decl_explicit (fn
);
8092 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8093 call
= builtin_save_expr (call
);
8096 return build2 (COMPOUND_EXPR
, void_type_node
,
8097 build2 (MODIFY_EXPR
, void_type_node
,
8098 build_fold_indirect_ref_loc (loc
, arg1
),
8099 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8100 build2 (MODIFY_EXPR
, void_type_node
,
8101 build_fold_indirect_ref_loc (loc
, arg2
),
8102 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8105 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8106 Return NULL_TREE if no simplification can be made. */
8109 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8111 if (!validate_arg (arg1
, POINTER_TYPE
)
8112 || !validate_arg (arg2
, POINTER_TYPE
)
8113 || !validate_arg (len
, INTEGER_TYPE
))
8116 /* If the LEN parameter is zero, return zero. */
8117 if (integer_zerop (len
))
8118 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8121 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8122 if (operand_equal_p (arg1
, arg2
, 0))
8123 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8125 /* If len parameter is one, return an expression corresponding to
8126 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8127 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8129 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8130 tree cst_uchar_ptr_node
8131 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8134 = fold_convert_loc (loc
, integer_type_node
,
8135 build1 (INDIRECT_REF
, cst_uchar_node
,
8136 fold_convert_loc (loc
,
8140 = fold_convert_loc (loc
, integer_type_node
,
8141 build1 (INDIRECT_REF
, cst_uchar_node
,
8142 fold_convert_loc (loc
,
8145 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8151 /* Fold a call to builtin isascii with argument ARG. */
8154 fold_builtin_isascii (location_t loc
, tree arg
)
8156 if (!validate_arg (arg
, INTEGER_TYPE
))
8160 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8161 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8162 build_int_cst (integer_type_node
,
8163 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8164 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8165 arg
, integer_zero_node
);
8169 /* Fold a call to builtin toascii with argument ARG. */
8172 fold_builtin_toascii (location_t loc
, tree arg
)
8174 if (!validate_arg (arg
, INTEGER_TYPE
))
8177 /* Transform toascii(c) -> (c & 0x7f). */
8178 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8179 build_int_cst (integer_type_node
, 0x7f));
8182 /* Fold a call to builtin isdigit with argument ARG. */
8185 fold_builtin_isdigit (location_t loc
, tree arg
)
8187 if (!validate_arg (arg
, INTEGER_TYPE
))
8191 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8192 /* According to the C standard, isdigit is unaffected by locale.
8193 However, it definitely is affected by the target character set. */
8194 unsigned HOST_WIDE_INT target_digit0
8195 = lang_hooks
.to_target_charset ('0');
8197 if (target_digit0
== 0)
8200 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8201 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8202 build_int_cst (unsigned_type_node
, target_digit0
));
8203 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8204 build_int_cst (unsigned_type_node
, 9));
8208 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8211 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8213 if (!validate_arg (arg
, REAL_TYPE
))
8216 arg
= fold_convert_loc (loc
, type
, arg
);
8217 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8220 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8223 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8225 if (!validate_arg (arg
, INTEGER_TYPE
))
8228 arg
= fold_convert_loc (loc
, type
, arg
);
8229 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8232 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8235 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8237 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8238 if (validate_arg (arg0
, REAL_TYPE
)
8239 && validate_arg (arg1
, REAL_TYPE
)
8240 && validate_arg (arg2
, REAL_TYPE
)
8241 && optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8242 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
8247 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8250 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8252 if (validate_arg (arg
, COMPLEX_TYPE
)
8253 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8255 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8259 tree new_arg
= builtin_save_expr (arg
);
8260 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8261 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8262 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8269 /* Fold a call to builtin frexp, we can assume the base is 2. */
8272 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8274 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8279 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8282 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8284 /* Proceed if a valid pointer type was passed in. */
8285 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8287 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8293 /* For +-0, return (*exp = 0, +-0). */
8294 exp
= integer_zero_node
;
8299 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8300 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8303 /* Since the frexp function always expects base 2, and in
8304 GCC normalized significands are already in the range
8305 [0.5, 1.0), we have exactly what frexp wants. */
8306 REAL_VALUE_TYPE frac_rvt
= *value
;
8307 SET_REAL_EXP (&frac_rvt
, 0);
8308 frac
= build_real (rettype
, frac_rvt
);
8309 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8316 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8317 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8318 TREE_SIDE_EFFECTS (arg1
) = 1;
8319 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8325 /* Fold a call to builtin modf. */
8328 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8330 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8335 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8338 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8340 /* Proceed if a valid pointer type was passed in. */
8341 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8343 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8344 REAL_VALUE_TYPE trunc
, frac
;
8350 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8351 trunc
= frac
= *value
;
8354 /* For +-Inf, return (*arg1 = arg0, +-0). */
8356 frac
.sign
= value
->sign
;
8360 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8361 real_trunc (&trunc
, VOIDmode
, value
);
8362 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8363 /* If the original number was negative and already
8364 integral, then the fractional part is -0.0. */
8365 if (value
->sign
&& frac
.cl
== rvc_zero
)
8366 frac
.sign
= value
->sign
;
8370 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8371 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8372 build_real (rettype
, trunc
));
8373 TREE_SIDE_EFFECTS (arg1
) = 1;
8374 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8375 build_real (rettype
, frac
));
8381 /* Given a location LOC, an interclass builtin function decl FNDECL
8382 and its single argument ARG, return an folded expression computing
8383 the same, or NULL_TREE if we either couldn't or didn't want to fold
8384 (the latter happen if there's an RTL instruction available). */
8387 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8391 if (!validate_arg (arg
, REAL_TYPE
))
8394 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8397 mode
= TYPE_MODE (TREE_TYPE (arg
));
8399 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8401 /* If there is no optab, try generic code. */
8402 switch (DECL_FUNCTION_CODE (fndecl
))
8406 CASE_FLT_FN (BUILT_IN_ISINF
):
8408 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8409 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8410 tree type
= TREE_TYPE (arg
);
8414 if (is_ibm_extended
)
8416 /* NaN and Inf are encoded in the high-order double value
8417 only. The low-order value is not significant. */
8418 type
= double_type_node
;
8420 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8422 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8423 real_from_string (&r
, buf
);
8424 result
= build_call_expr (isgr_fn
, 2,
8425 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8426 build_real (type
, r
));
8429 CASE_FLT_FN (BUILT_IN_FINITE
):
8430 case BUILT_IN_ISFINITE
:
8432 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8433 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8434 tree type
= TREE_TYPE (arg
);
8438 if (is_ibm_extended
)
8440 /* NaN and Inf are encoded in the high-order double value
8441 only. The low-order value is not significant. */
8442 type
= double_type_node
;
8444 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8446 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8447 real_from_string (&r
, buf
);
8448 result
= build_call_expr (isle_fn
, 2,
8449 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8450 build_real (type
, r
));
8451 /*result = fold_build2_loc (loc, UNGT_EXPR,
8452 TREE_TYPE (TREE_TYPE (fndecl)),
8453 fold_build1_loc (loc, ABS_EXPR, type, arg),
8454 build_real (type, r));
8455 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8456 TREE_TYPE (TREE_TYPE (fndecl)),
8460 case BUILT_IN_ISNORMAL
:
8462 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8463 islessequal(fabs(x),DBL_MAX). */
8464 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8465 tree type
= TREE_TYPE (arg
);
8466 tree orig_arg
, max_exp
, min_exp
;
8467 machine_mode orig_mode
= mode
;
8468 REAL_VALUE_TYPE rmax
, rmin
;
8471 orig_arg
= arg
= builtin_save_expr (arg
);
8472 if (is_ibm_extended
)
8474 /* Use double to test the normal range of IBM extended
8475 precision. Emin for IBM extended precision is
8476 different to emin for IEEE double, being 53 higher
8477 since the low double exponent is at least 53 lower
8478 than the high double exponent. */
8479 type
= double_type_node
;
8481 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8483 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8485 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8486 real_from_string (&rmax
, buf
);
8487 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
8488 real_from_string (&rmin
, buf
);
8489 max_exp
= build_real (type
, rmax
);
8490 min_exp
= build_real (type
, rmin
);
8492 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
8493 if (is_ibm_extended
)
8495 /* Testing the high end of the range is done just using
8496 the high double, using the same test as isfinite().
8497 For the subnormal end of the range we first test the
8498 high double, then if its magnitude is equal to the
8499 limit of 0x1p-969, we test whether the low double is
8500 non-zero and opposite sign to the high double. */
8501 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
8502 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8503 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
8504 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
8506 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
8507 complex_double_type_node
, orig_arg
);
8508 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
8509 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
8510 tree zero
= build_real (type
, dconst0
);
8511 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
8512 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
8513 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
8514 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
8515 fold_build3 (COND_EXPR
,
8518 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
8520 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
8526 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
8527 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
8529 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
8540 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8541 ARG is the argument for the call. */
8544 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
8546 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8548 if (!validate_arg (arg
, REAL_TYPE
))
8551 switch (builtin_index
)
8553 case BUILT_IN_ISINF
:
8554 if (!HONOR_INFINITIES (arg
))
8555 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8559 case BUILT_IN_ISINF_SIGN
:
8561 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8562 /* In a boolean context, GCC will fold the inner COND_EXPR to
8563 1. So e.g. "if (isinf_sign(x))" would be folded to just
8564 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8565 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
8566 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
8567 tree tmp
= NULL_TREE
;
8569 arg
= builtin_save_expr (arg
);
8571 if (signbit_fn
&& isinf_fn
)
8573 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
8574 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
8576 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8577 signbit_call
, integer_zero_node
);
8578 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8579 isinf_call
, integer_zero_node
);
8581 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
8582 integer_minus_one_node
, integer_one_node
);
8583 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8591 case BUILT_IN_ISFINITE
:
8592 if (!HONOR_NANS (arg
)
8593 && !HONOR_INFINITIES (arg
))
8594 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8598 case BUILT_IN_ISNAN
:
8599 if (!HONOR_NANS (arg
))
8600 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8603 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
8604 if (is_ibm_extended
)
8606 /* NaN and Inf are encoded in the high-order double value
8607 only. The low-order value is not significant. */
8608 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
8611 arg
= builtin_save_expr (arg
);
8612 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
8619 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8620 This builtin will generate code to return the appropriate floating
8621 point classification depending on the value of the floating point
8622 number passed in. The possible return values must be supplied as
8623 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8624 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8625 one floating point argument which is "type generic". */
8628 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
8630 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
8631 arg
, type
, res
, tmp
;
8636 /* Verify the required arguments in the original call. */
8638 || !validate_arg (args
[0], INTEGER_TYPE
)
8639 || !validate_arg (args
[1], INTEGER_TYPE
)
8640 || !validate_arg (args
[2], INTEGER_TYPE
)
8641 || !validate_arg (args
[3], INTEGER_TYPE
)
8642 || !validate_arg (args
[4], INTEGER_TYPE
)
8643 || !validate_arg (args
[5], REAL_TYPE
))
8647 fp_infinite
= args
[1];
8648 fp_normal
= args
[2];
8649 fp_subnormal
= args
[3];
8652 type
= TREE_TYPE (arg
);
8653 mode
= TYPE_MODE (type
);
8654 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8658 (fabs(x) == Inf ? FP_INFINITE :
8659 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8660 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8662 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8663 build_real (type
, dconst0
));
8664 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8665 tmp
, fp_zero
, fp_subnormal
);
8667 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8668 real_from_string (&r
, buf
);
8669 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
8670 arg
, build_real (type
, r
));
8671 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
8673 if (HONOR_INFINITIES (mode
))
8676 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8677 build_real (type
, r
));
8678 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
8682 if (HONOR_NANS (mode
))
8684 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
8685 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8691 /* Fold a call to an unordered comparison function such as
8692 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8693 being called and ARG0 and ARG1 are the arguments for the call.
8694 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8695 the opposite of the desired result. UNORDERED_CODE is used
8696 for modes that can hold NaNs and ORDERED_CODE is used for
8700 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8701 enum tree_code unordered_code
,
8702 enum tree_code ordered_code
)
8704 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8705 enum tree_code code
;
8707 enum tree_code code0
, code1
;
8708 tree cmp_type
= NULL_TREE
;
8710 type0
= TREE_TYPE (arg0
);
8711 type1
= TREE_TYPE (arg1
);
8713 code0
= TREE_CODE (type0
);
8714 code1
= TREE_CODE (type1
);
8716 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8717 /* Choose the wider of two real types. */
8718 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8720 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8722 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8725 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8726 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8728 if (unordered_code
== UNORDERED_EXPR
)
8730 if (!HONOR_NANS (arg0
))
8731 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8732 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8735 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
8736 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8737 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8740 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8741 arithmetics if it can never overflow, or into internal functions that
8742 return both result of arithmetics and overflowed boolean flag in
8743 a complex integer result, or some other check for overflow.
8744 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8745 checking part of that. */
8748 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8749 tree arg0
, tree arg1
, tree arg2
)
8751 enum internal_fn ifn
= IFN_LAST
;
8752 /* The code of the expression corresponding to the type-generic
8753 built-in, or ERROR_MARK for the type-specific ones. */
8754 enum tree_code opcode
= ERROR_MARK
;
8755 bool ovf_only
= false;
8759 case BUILT_IN_ADD_OVERFLOW_P
:
8762 case BUILT_IN_ADD_OVERFLOW
:
8765 case BUILT_IN_SADD_OVERFLOW
:
8766 case BUILT_IN_SADDL_OVERFLOW
:
8767 case BUILT_IN_SADDLL_OVERFLOW
:
8768 case BUILT_IN_UADD_OVERFLOW
:
8769 case BUILT_IN_UADDL_OVERFLOW
:
8770 case BUILT_IN_UADDLL_OVERFLOW
:
8771 ifn
= IFN_ADD_OVERFLOW
;
8773 case BUILT_IN_SUB_OVERFLOW_P
:
8776 case BUILT_IN_SUB_OVERFLOW
:
8777 opcode
= MINUS_EXPR
;
8779 case BUILT_IN_SSUB_OVERFLOW
:
8780 case BUILT_IN_SSUBL_OVERFLOW
:
8781 case BUILT_IN_SSUBLL_OVERFLOW
:
8782 case BUILT_IN_USUB_OVERFLOW
:
8783 case BUILT_IN_USUBL_OVERFLOW
:
8784 case BUILT_IN_USUBLL_OVERFLOW
:
8785 ifn
= IFN_SUB_OVERFLOW
;
8787 case BUILT_IN_MUL_OVERFLOW_P
:
8790 case BUILT_IN_MUL_OVERFLOW
:
8793 case BUILT_IN_SMUL_OVERFLOW
:
8794 case BUILT_IN_SMULL_OVERFLOW
:
8795 case BUILT_IN_SMULLL_OVERFLOW
:
8796 case BUILT_IN_UMUL_OVERFLOW
:
8797 case BUILT_IN_UMULL_OVERFLOW
:
8798 case BUILT_IN_UMULLL_OVERFLOW
:
8799 ifn
= IFN_MUL_OVERFLOW
;
8805 /* For the "generic" overloads, the first two arguments can have different
8806 types and the last argument determines the target type to use to check
8807 for overflow. The arguments of the other overloads all have the same
8809 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
8811 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8812 arguments are constant, attempt to fold the built-in call into a constant
8813 expression indicating whether or not it detected an overflow. */
8815 && TREE_CODE (arg0
) == INTEGER_CST
8816 && TREE_CODE (arg1
) == INTEGER_CST
)
8817 /* Perform the computation in the target type and check for overflow. */
8818 return omit_one_operand_loc (loc
, boolean_type_node
,
8819 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
8820 ? boolean_true_node
: boolean_false_node
,
8823 tree ctype
= build_complex_type (type
);
8824 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8826 tree tgt
= save_expr (call
);
8827 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8828 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8829 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8832 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
8834 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8836 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8837 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8840 /* Fold a call to __builtin_FILE to a constant string. */
8843 fold_builtin_FILE (location_t loc
)
8845 if (const char *fname
= LOCATION_FILE (loc
))
8846 return build_string_literal (strlen (fname
) + 1, fname
);
8848 return build_string_literal (1, "");
8851 /* Fold a call to __builtin_FUNCTION to a constant string. */
8854 fold_builtin_FUNCTION ()
8856 const char *name
= "";
8858 if (current_function_decl
)
8859 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
8861 return build_string_literal (strlen (name
) + 1, name
);
8864 /* Fold a call to __builtin_LINE to an integer constant. */
8867 fold_builtin_LINE (location_t loc
, tree type
)
8869 return build_int_cst (type
, LOCATION_LINE (loc
));
8872 /* Fold a call to built-in function FNDECL with 0 arguments.
8873 This function returns NULL_TREE if no simplification was possible. */
8876 fold_builtin_0 (location_t loc
, tree fndecl
)
8878 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8879 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8883 return fold_builtin_FILE (loc
);
8885 case BUILT_IN_FUNCTION
:
8886 return fold_builtin_FUNCTION ();
8889 return fold_builtin_LINE (loc
, type
);
8891 CASE_FLT_FN (BUILT_IN_INF
):
8892 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
8893 case BUILT_IN_INFD32
:
8894 case BUILT_IN_INFD64
:
8895 case BUILT_IN_INFD128
:
8896 return fold_builtin_inf (loc
, type
, true);
8898 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
8899 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
8900 return fold_builtin_inf (loc
, type
, false);
8902 case BUILT_IN_CLASSIFY_TYPE
:
8903 return fold_builtin_classify_type (NULL_TREE
);
8911 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8912 This function returns NULL_TREE if no simplification was possible. */
8915 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
8917 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8918 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8920 if (TREE_CODE (arg0
) == ERROR_MARK
)
8923 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
8928 case BUILT_IN_CONSTANT_P
:
8930 tree val
= fold_builtin_constant_p (arg0
);
8932 /* Gimplification will pull the CALL_EXPR for the builtin out of
8933 an if condition. When not optimizing, we'll not CSE it back.
8934 To avoid link error types of regressions, return false now. */
8935 if (!val
&& !optimize
)
8936 val
= integer_zero_node
;
8941 case BUILT_IN_CLASSIFY_TYPE
:
8942 return fold_builtin_classify_type (arg0
);
8944 case BUILT_IN_STRLEN
:
8945 return fold_builtin_strlen (loc
, type
, arg0
);
8947 CASE_FLT_FN (BUILT_IN_FABS
):
8948 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8949 case BUILT_IN_FABSD32
:
8950 case BUILT_IN_FABSD64
:
8951 case BUILT_IN_FABSD128
:
8952 return fold_builtin_fabs (loc
, arg0
, type
);
8956 case BUILT_IN_LLABS
:
8957 case BUILT_IN_IMAXABS
:
8958 return fold_builtin_abs (loc
, arg0
, type
);
8960 CASE_FLT_FN (BUILT_IN_CONJ
):
8961 if (validate_arg (arg0
, COMPLEX_TYPE
)
8962 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8963 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
8966 CASE_FLT_FN (BUILT_IN_CREAL
):
8967 if (validate_arg (arg0
, COMPLEX_TYPE
)
8968 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8969 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
8972 CASE_FLT_FN (BUILT_IN_CIMAG
):
8973 if (validate_arg (arg0
, COMPLEX_TYPE
)
8974 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8975 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
8978 CASE_FLT_FN (BUILT_IN_CARG
):
8979 return fold_builtin_carg (loc
, arg0
, type
);
8981 case BUILT_IN_ISASCII
:
8982 return fold_builtin_isascii (loc
, arg0
);
8984 case BUILT_IN_TOASCII
:
8985 return fold_builtin_toascii (loc
, arg0
);
8987 case BUILT_IN_ISDIGIT
:
8988 return fold_builtin_isdigit (loc
, arg0
);
8990 CASE_FLT_FN (BUILT_IN_FINITE
):
8991 case BUILT_IN_FINITED32
:
8992 case BUILT_IN_FINITED64
:
8993 case BUILT_IN_FINITED128
:
8994 case BUILT_IN_ISFINITE
:
8996 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
8999 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9002 CASE_FLT_FN (BUILT_IN_ISINF
):
9003 case BUILT_IN_ISINFD32
:
9004 case BUILT_IN_ISINFD64
:
9005 case BUILT_IN_ISINFD128
:
9007 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9010 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9013 case BUILT_IN_ISNORMAL
:
9014 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9016 case BUILT_IN_ISINF_SIGN
:
9017 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9019 CASE_FLT_FN (BUILT_IN_ISNAN
):
9020 case BUILT_IN_ISNAND32
:
9021 case BUILT_IN_ISNAND64
:
9022 case BUILT_IN_ISNAND128
:
9023 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9026 if (integer_zerop (arg0
))
9027 return build_empty_stmt (loc
);
9038 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9039 This function returns NULL_TREE if no simplification was possible. */
9042 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9044 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9045 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9047 if (TREE_CODE (arg0
) == ERROR_MARK
9048 || TREE_CODE (arg1
) == ERROR_MARK
)
9051 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9056 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9057 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9058 if (validate_arg (arg0
, REAL_TYPE
)
9059 && validate_arg (arg1
, POINTER_TYPE
))
9060 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9063 CASE_FLT_FN (BUILT_IN_FREXP
):
9064 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9066 CASE_FLT_FN (BUILT_IN_MODF
):
9067 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9069 case BUILT_IN_STRSPN
:
9070 return fold_builtin_strspn (loc
, arg0
, arg1
);
9072 case BUILT_IN_STRCSPN
:
9073 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9075 case BUILT_IN_STRPBRK
:
9076 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9078 case BUILT_IN_EXPECT
:
9079 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
9081 case BUILT_IN_ISGREATER
:
9082 return fold_builtin_unordered_cmp (loc
, fndecl
,
9083 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9084 case BUILT_IN_ISGREATEREQUAL
:
9085 return fold_builtin_unordered_cmp (loc
, fndecl
,
9086 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9087 case BUILT_IN_ISLESS
:
9088 return fold_builtin_unordered_cmp (loc
, fndecl
,
9089 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9090 case BUILT_IN_ISLESSEQUAL
:
9091 return fold_builtin_unordered_cmp (loc
, fndecl
,
9092 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9093 case BUILT_IN_ISLESSGREATER
:
9094 return fold_builtin_unordered_cmp (loc
, fndecl
,
9095 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9096 case BUILT_IN_ISUNORDERED
:
9097 return fold_builtin_unordered_cmp (loc
, fndecl
,
9098 arg0
, arg1
, UNORDERED_EXPR
,
9101 /* We do the folding for va_start in the expander. */
9102 case BUILT_IN_VA_START
:
9105 case BUILT_IN_OBJECT_SIZE
:
9106 return fold_builtin_object_size (arg0
, arg1
);
9108 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9109 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9111 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9112 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9120 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9122 This function returns NULL_TREE if no simplification was possible. */
9125 fold_builtin_3 (location_t loc
, tree fndecl
,
9126 tree arg0
, tree arg1
, tree arg2
)
9128 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9129 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9131 if (TREE_CODE (arg0
) == ERROR_MARK
9132 || TREE_CODE (arg1
) == ERROR_MARK
9133 || TREE_CODE (arg2
) == ERROR_MARK
)
9136 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9143 CASE_FLT_FN (BUILT_IN_SINCOS
):
9144 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9146 CASE_FLT_FN (BUILT_IN_FMA
):
9147 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
9148 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
9150 CASE_FLT_FN (BUILT_IN_REMQUO
):
9151 if (validate_arg (arg0
, REAL_TYPE
)
9152 && validate_arg (arg1
, REAL_TYPE
)
9153 && validate_arg (arg2
, POINTER_TYPE
))
9154 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9157 case BUILT_IN_MEMCMP
:
9158 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9160 case BUILT_IN_EXPECT
:
9161 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
9163 case BUILT_IN_ADD_OVERFLOW
:
9164 case BUILT_IN_SUB_OVERFLOW
:
9165 case BUILT_IN_MUL_OVERFLOW
:
9166 case BUILT_IN_ADD_OVERFLOW_P
:
9167 case BUILT_IN_SUB_OVERFLOW_P
:
9168 case BUILT_IN_MUL_OVERFLOW_P
:
9169 case BUILT_IN_SADD_OVERFLOW
:
9170 case BUILT_IN_SADDL_OVERFLOW
:
9171 case BUILT_IN_SADDLL_OVERFLOW
:
9172 case BUILT_IN_SSUB_OVERFLOW
:
9173 case BUILT_IN_SSUBL_OVERFLOW
:
9174 case BUILT_IN_SSUBLL_OVERFLOW
:
9175 case BUILT_IN_SMUL_OVERFLOW
:
9176 case BUILT_IN_SMULL_OVERFLOW
:
9177 case BUILT_IN_SMULLL_OVERFLOW
:
9178 case BUILT_IN_UADD_OVERFLOW
:
9179 case BUILT_IN_UADDL_OVERFLOW
:
9180 case BUILT_IN_UADDLL_OVERFLOW
:
9181 case BUILT_IN_USUB_OVERFLOW
:
9182 case BUILT_IN_USUBL_OVERFLOW
:
9183 case BUILT_IN_USUBLL_OVERFLOW
:
9184 case BUILT_IN_UMUL_OVERFLOW
:
9185 case BUILT_IN_UMULL_OVERFLOW
:
9186 case BUILT_IN_UMULLL_OVERFLOW
:
9187 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9195 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9196 arguments. IGNORE is true if the result of the
9197 function call is ignored. This function returns NULL_TREE if no
9198 simplification was possible. */
9201 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9203 tree ret
= NULL_TREE
;
9208 ret
= fold_builtin_0 (loc
, fndecl
);
9211 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9214 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9217 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9220 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9225 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9226 SET_EXPR_LOCATION (ret
, loc
);
9227 TREE_NO_WARNING (ret
) = 1;
9233 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9234 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9235 of arguments in ARGS to be omitted. OLDNARGS is the number of
9236 elements in ARGS. */
9239 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9240 int skip
, tree fndecl
, int n
, va_list newargs
)
9242 int nargs
= oldnargs
- skip
+ n
;
9249 buffer
= XALLOCAVEC (tree
, nargs
);
9250 for (i
= 0; i
< n
; i
++)
9251 buffer
[i
] = va_arg (newargs
, tree
);
9252 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9253 buffer
[i
] = args
[j
];
9256 buffer
= args
+ skip
;
9258 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9261 /* Return true if FNDECL shouldn't be folded right now.
9262 If a built-in function has an inline attribute always_inline
9263 wrapper, defer folding it after always_inline functions have
9264 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9265 might not be performed. */
9268 avoid_folding_inline_builtin (tree fndecl
)
9270 return (DECL_DECLARED_INLINE_P (fndecl
)
9271 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9273 && !cfun
->always_inline_functions_inlined
9274 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9277 /* A wrapper function for builtin folding that prevents warnings for
9278 "statement without effect" and the like, caused by removing the
9279 call node earlier than the warning is generated. */
9282 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9284 tree ret
= NULL_TREE
;
9285 tree fndecl
= get_callee_fndecl (exp
);
9287 && TREE_CODE (fndecl
) == FUNCTION_DECL
9288 && DECL_BUILT_IN (fndecl
)
9289 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9290 yet. Defer folding until we see all the arguments
9291 (after inlining). */
9292 && !CALL_EXPR_VA_ARG_PACK (exp
))
9294 int nargs
= call_expr_nargs (exp
);
9296 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9297 instead last argument is __builtin_va_arg_pack (). Defer folding
9298 even in that case, until arguments are finalized. */
9299 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9301 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9303 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9304 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9305 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9309 if (avoid_folding_inline_builtin (fndecl
))
9312 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9313 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9314 CALL_EXPR_ARGP (exp
), ignore
);
9317 tree
*args
= CALL_EXPR_ARGP (exp
);
9318 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9326 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9327 N arguments are passed in the array ARGARRAY. Return a folded
9328 expression or NULL_TREE if no simplification was possible. */
9331 fold_builtin_call_array (location_t loc
, tree
,
9336 if (TREE_CODE (fn
) != ADDR_EXPR
)
9339 tree fndecl
= TREE_OPERAND (fn
, 0);
9340 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9341 && DECL_BUILT_IN (fndecl
))
9343 /* If last argument is __builtin_va_arg_pack (), arguments to this
9344 function are not finalized yet. Defer folding until they are. */
9345 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9347 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9349 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9350 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9351 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9354 if (avoid_folding_inline_builtin (fndecl
))
9356 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9357 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9359 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9365 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9366 along with N new arguments specified as the "..." parameters. SKIP
9367 is the number of arguments in EXP to be omitted. This function is used
9368 to do varargs-to-varargs transformations. */
9371 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9377 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9378 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9384 /* Validate a single argument ARG against a tree code CODE representing
9385 a type. Return true when argument is valid. */
9388 validate_arg (const_tree arg
, enum tree_code code
)
9392 else if (code
== POINTER_TYPE
)
9393 return POINTER_TYPE_P (TREE_TYPE (arg
));
9394 else if (code
== INTEGER_TYPE
)
9395 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9396 return code
== TREE_CODE (TREE_TYPE (arg
));
9399 /* This function validates the types of a function call argument list
9400 against a specified list of tree_codes. If the last specifier is a 0,
9401 that represents an ellipses, otherwise the last specifier must be a
9404 This is the GIMPLE version of validate_arglist. Eventually we want to
9405 completely convert builtins.c to work from GIMPLEs and the tree based
9406 validate_arglist will then be removed. */
9409 validate_gimple_arglist (const gcall
*call
, ...)
9411 enum tree_code code
;
9417 va_start (ap
, call
);
9422 code
= (enum tree_code
) va_arg (ap
, int);
9426 /* This signifies an ellipses, any further arguments are all ok. */
9430 /* This signifies an endlink, if no arguments remain, return
9431 true, otherwise return false. */
9432 res
= (i
== gimple_call_num_args (call
));
9435 /* If no parameters remain or the parameter's code does not
9436 match the specified code, return false. Otherwise continue
9437 checking any remaining arguments. */
9438 arg
= gimple_call_arg (call
, i
++);
9439 if (!validate_arg (arg
, code
))
9446 /* We need gotos here since we can only have one VA_CLOSE in a
9454 /* Default target-specific builtin expander that does nothing. */
9457 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9458 rtx target ATTRIBUTE_UNUSED
,
9459 rtx subtarget ATTRIBUTE_UNUSED
,
9460 machine_mode mode ATTRIBUTE_UNUSED
,
9461 int ignore ATTRIBUTE_UNUSED
)
9466 /* Returns true is EXP represents data that would potentially reside
9467 in a readonly section. */
9470 readonly_data_expr (tree exp
)
9474 if (TREE_CODE (exp
) != ADDR_EXPR
)
9477 exp
= get_base_address (TREE_OPERAND (exp
, 0));
9481 /* Make sure we call decl_readonly_section only for trees it
9482 can handle (since it returns true for everything it doesn't
9484 if (TREE_CODE (exp
) == STRING_CST
9485 || TREE_CODE (exp
) == CONSTRUCTOR
9486 || (VAR_P (exp
) && TREE_STATIC (exp
)))
9487 return decl_readonly_section (exp
, 0);
9492 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9493 to the call, and TYPE is its return type.
9495 Return NULL_TREE if no simplification was possible, otherwise return the
9496 simplified form of the call as a tree.
9498 The simplified form may be a constant or other expression which
9499 computes the same value, but in a more efficient manner (including
9500 calls to other builtin functions).
9502 The call may contain arguments which need to be evaluated, but
9503 which are not useful to determine the result of the call. In
9504 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9505 COMPOUND_EXPR will be an argument which must be evaluated.
9506 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9507 COMPOUND_EXPR in the chain will contain the tree for the simplified
9508 form of the builtin function call. */
9511 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
9513 if (!validate_arg (s1
, POINTER_TYPE
)
9514 || !validate_arg (s2
, POINTER_TYPE
))
9519 const char *p1
, *p2
;
9528 const char *r
= strpbrk (p1
, p2
);
9532 return build_int_cst (TREE_TYPE (s1
), 0);
9534 /* Return an offset into the constant string argument. */
9535 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9536 return fold_convert_loc (loc
, type
, tem
);
9540 /* strpbrk(x, "") == NULL.
9541 Evaluate and ignore s1 in case it had side-effects. */
9542 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
9545 return NULL_TREE
; /* Really call strpbrk. */
9547 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9551 /* New argument list transforming strpbrk(s1, s2) to
9552 strchr(s1, s2[0]). */
9553 return build_call_expr_loc (loc
, fn
, 2, s1
,
9554 build_int_cst (integer_type_node
, p2
[0]));
9558 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9561 Return NULL_TREE if no simplification was possible, otherwise return the
9562 simplified form of the call as a tree.
9564 The simplified form may be a constant or other expression which
9565 computes the same value, but in a more efficient manner (including
9566 calls to other builtin functions).
9568 The call may contain arguments which need to be evaluated, but
9569 which are not useful to determine the result of the call. In
9570 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9571 COMPOUND_EXPR will be an argument which must be evaluated.
9572 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9573 COMPOUND_EXPR in the chain will contain the tree for the simplified
9574 form of the builtin function call. */
9577 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
9579 if (!validate_arg (s1
, POINTER_TYPE
)
9580 || !validate_arg (s2
, POINTER_TYPE
))
9584 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9586 /* If either argument is "", return NULL_TREE. */
9587 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9588 /* Evaluate and ignore both arguments in case either one has
9590 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
9596 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9599 Return NULL_TREE if no simplification was possible, otherwise return the
9600 simplified form of the call as a tree.
9602 The simplified form may be a constant or other expression which
9603 computes the same value, but in a more efficient manner (including
9604 calls to other builtin functions).
9606 The call may contain arguments which need to be evaluated, but
9607 which are not useful to determine the result of the call. In
9608 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9609 COMPOUND_EXPR will be an argument which must be evaluated.
9610 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9611 COMPOUND_EXPR in the chain will contain the tree for the simplified
9612 form of the builtin function call. */
9615 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
9617 if (!validate_arg (s1
, POINTER_TYPE
)
9618 || !validate_arg (s2
, POINTER_TYPE
))
9622 /* If the first argument is "", return NULL_TREE. */
9623 const char *p1
= c_getstr (s1
);
9624 if (p1
&& *p1
== '\0')
9626 /* Evaluate and ignore argument s2 in case it has
9628 return omit_one_operand_loc (loc
, size_type_node
,
9629 size_zero_node
, s2
);
9632 /* If the second argument is "", return __builtin_strlen(s1). */
9633 const char *p2
= c_getstr (s2
);
9634 if (p2
&& *p2
== '\0')
9636 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9638 /* If the replacement _DECL isn't initialized, don't do the
9643 return build_call_expr_loc (loc
, fn
, 1, s1
);
9649 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9650 produced. False otherwise. This is done so that we don't output the error
9651 or warning twice or three times. */
9654 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9656 tree fntype
= TREE_TYPE (current_function_decl
);
9657 int nargs
= call_expr_nargs (exp
);
9659 /* There is good chance the current input_location points inside the
9660 definition of the va_start macro (perhaps on the token for
9661 builtin) in a system header, so warnings will not be emitted.
9662 Use the location in real source code. */
9663 source_location current_location
=
9664 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9667 if (!stdarg_p (fntype
))
9669 error ("%<va_start%> used in function with fixed args");
9675 if (va_start_p
&& (nargs
!= 2))
9677 error ("wrong number of arguments to function %<va_start%>");
9680 arg
= CALL_EXPR_ARG (exp
, 1);
9682 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9683 when we checked the arguments and if needed issued a warning. */
9688 /* Evidently an out of date version of <stdarg.h>; can't validate
9689 va_start's second argument, but can still work as intended. */
9690 warning_at (current_location
,
9692 "%<__builtin_next_arg%> called without an argument");
9697 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9700 arg
= CALL_EXPR_ARG (exp
, 0);
9703 if (TREE_CODE (arg
) == SSA_NAME
)
9704 arg
= SSA_NAME_VAR (arg
);
9706 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9707 or __builtin_next_arg (0) the first time we see it, after checking
9708 the arguments and if needed issuing a warning. */
9709 if (!integer_zerop (arg
))
9711 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9713 /* Strip off all nops for the sake of the comparison. This
9714 is not quite the same as STRIP_NOPS. It does more.
9715 We must also strip off INDIRECT_EXPR for C++ reference
9717 while (CONVERT_EXPR_P (arg
)
9718 || TREE_CODE (arg
) == INDIRECT_REF
)
9719 arg
= TREE_OPERAND (arg
, 0);
9720 if (arg
!= last_parm
)
9722 /* FIXME: Sometimes with the tree optimizers we can get the
9723 not the last argument even though the user used the last
9724 argument. We just warn and set the arg to be the last
9725 argument so that we will get wrong-code because of
9727 warning_at (current_location
,
9729 "second parameter of %<va_start%> not last named argument");
9732 /* Undefined by C99 7.15.1.4p4 (va_start):
9733 "If the parameter parmN is declared with the register storage
9734 class, with a function or array type, or with a type that is
9735 not compatible with the type that results after application of
9736 the default argument promotions, the behavior is undefined."
9738 else if (DECL_REGISTER (arg
))
9740 warning_at (current_location
,
9742 "undefined behavior when second parameter of "
9743 "%<va_start%> is declared with %<register%> storage");
9746 /* We want to verify the second parameter just once before the tree
9747 optimizers are run and then avoid keeping it in the tree,
9748 as otherwise we could warn even for correct code like:
9749 void foo (int i, ...)
9750 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9752 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9754 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9760 /* Expand a call EXP to __builtin_object_size. */
9763 expand_builtin_object_size (tree exp
)
9766 int object_size_type
;
9767 tree fndecl
= get_callee_fndecl (exp
);
9769 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9771 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9773 expand_builtin_trap ();
9777 ost
= CALL_EXPR_ARG (exp
, 1);
9780 if (TREE_CODE (ost
) != INTEGER_CST
9781 || tree_int_cst_sgn (ost
) < 0
9782 || compare_tree_int (ost
, 3) > 0)
9784 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9786 expand_builtin_trap ();
9790 object_size_type
= tree_to_shwi (ost
);
9792 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
9795 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9796 FCODE is the BUILT_IN_* to use.
9797 Return NULL_RTX if we failed; the caller should emit a normal call,
9798 otherwise try to get the result in TARGET, if convenient (and in
9799 mode MODE if that's convenient). */
9802 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
9803 enum built_in_function fcode
)
9805 if (!validate_arglist (exp
,
9807 fcode
== BUILT_IN_MEMSET_CHK
9808 ? INTEGER_TYPE
: POINTER_TYPE
,
9809 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9812 tree dest
= CALL_EXPR_ARG (exp
, 0);
9813 tree src
= CALL_EXPR_ARG (exp
, 1);
9814 tree len
= CALL_EXPR_ARG (exp
, 2);
9815 tree size
= CALL_EXPR_ARG (exp
, 3);
9817 bool sizes_ok
= check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
,
9818 /*str=*/NULL_TREE
, size
);
9820 if (!tree_fits_uhwi_p (size
))
9823 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
9825 /* Avoid transforming the checking call to an ordinary one when
9826 an overflow has been detected or when the call couldn't be
9827 validated because the size is not constant. */
9828 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
9831 tree fn
= NULL_TREE
;
9832 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9833 mem{cpy,pcpy,move,set} is available. */
9836 case BUILT_IN_MEMCPY_CHK
:
9837 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
9839 case BUILT_IN_MEMPCPY_CHK
:
9840 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
9842 case BUILT_IN_MEMMOVE_CHK
:
9843 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
9845 case BUILT_IN_MEMSET_CHK
:
9846 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
9855 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
9856 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9857 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9858 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9860 else if (fcode
== BUILT_IN_MEMSET_CHK
)
9864 unsigned int dest_align
= get_pointer_alignment (dest
);
9866 /* If DEST is not a pointer type, call the normal function. */
9867 if (dest_align
== 0)
9870 /* If SRC and DEST are the same (and not volatile), do nothing. */
9871 if (operand_equal_p (src
, dest
, 0))
9875 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
9877 /* Evaluate and ignore LEN in case it has side-effects. */
9878 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9879 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
9882 expr
= fold_build_pointer_plus (dest
, len
);
9883 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
9886 /* __memmove_chk special case. */
9887 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
9889 unsigned int src_align
= get_pointer_alignment (src
);
9894 /* If src is categorized for a readonly section we can use
9895 normal __memcpy_chk. */
9896 if (readonly_data_expr (src
))
9898 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
9901 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
9902 dest
, src
, len
, size
);
9903 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9904 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9905 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9912 /* Emit warning if a buffer overflow is detected at compile time. */
9915 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
9917 /* The source string. */
9918 tree srcstr
= NULL_TREE
;
9919 /* The size of the destination object. */
9920 tree objsize
= NULL_TREE
;
9921 /* The string that is being concatenated with (as in __strcat_chk)
9922 or null if it isn't. */
9923 tree catstr
= NULL_TREE
;
9924 /* The maximum length of the source sequence in a bounded operation
9925 (such as __strncat_chk) or null if the operation isn't bounded
9926 (such as __strcat_chk). */
9927 tree maxread
= NULL_TREE
;
9928 /* The exact size of the access (such as in __strncpy_chk). */
9929 tree size
= NULL_TREE
;
9933 case BUILT_IN_STRCPY_CHK
:
9934 case BUILT_IN_STPCPY_CHK
:
9935 srcstr
= CALL_EXPR_ARG (exp
, 1);
9936 objsize
= CALL_EXPR_ARG (exp
, 2);
9939 case BUILT_IN_STRCAT_CHK
:
9940 /* For __strcat_chk the warning will be emitted only if overflowing
9941 by at least strlen (dest) + 1 bytes. */
9942 catstr
= CALL_EXPR_ARG (exp
, 0);
9943 srcstr
= CALL_EXPR_ARG (exp
, 1);
9944 objsize
= CALL_EXPR_ARG (exp
, 2);
9947 case BUILT_IN_STRNCAT_CHK
:
9948 catstr
= CALL_EXPR_ARG (exp
, 0);
9949 srcstr
= CALL_EXPR_ARG (exp
, 1);
9950 maxread
= CALL_EXPR_ARG (exp
, 2);
9951 objsize
= CALL_EXPR_ARG (exp
, 3);
9954 case BUILT_IN_STRNCPY_CHK
:
9955 case BUILT_IN_STPNCPY_CHK
:
9956 srcstr
= CALL_EXPR_ARG (exp
, 1);
9957 size
= CALL_EXPR_ARG (exp
, 2);
9958 objsize
= CALL_EXPR_ARG (exp
, 3);
9961 case BUILT_IN_SNPRINTF_CHK
:
9962 case BUILT_IN_VSNPRINTF_CHK
:
9963 maxread
= CALL_EXPR_ARG (exp
, 1);
9964 objsize
= CALL_EXPR_ARG (exp
, 3);
9970 if (catstr
&& maxread
)
9972 /* Check __strncat_chk. There is no way to determine the length
9973 of the string to which the source string is being appended so
9974 just warn when the length of the source string is not known. */
9975 check_strncat_sizes (exp
, objsize
);
9979 /* The destination argument is the first one for all built-ins above. */
9980 tree dst
= CALL_EXPR_ARG (exp
, 0);
9982 check_access (exp
, dst
, srcstr
, size
, maxread
, srcstr
, objsize
);
9985 /* Emit warning if a buffer overflow is detected at compile time
9986 in __sprintf_chk/__vsprintf_chk calls. */
9989 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
9991 tree size
, len
, fmt
;
9992 const char *fmt_str
;
9993 int nargs
= call_expr_nargs (exp
);
9995 /* Verify the required arguments in the original call. */
9999 size
= CALL_EXPR_ARG (exp
, 2);
10000 fmt
= CALL_EXPR_ARG (exp
, 3);
10002 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10005 /* Check whether the format is a literal string constant. */
10006 fmt_str
= c_getstr (fmt
);
10007 if (fmt_str
== NULL
)
10010 if (!init_target_chars ())
10013 /* If the format doesn't contain % args or %%, we know its size. */
10014 if (strchr (fmt_str
, target_percent
) == 0)
10015 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10016 /* If the format is "%s" and first ... argument is a string literal,
10018 else if (fcode
== BUILT_IN_SPRINTF_CHK
10019 && strcmp (fmt_str
, target_percent_s
) == 0)
10025 arg
= CALL_EXPR_ARG (exp
, 4);
10026 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10029 len
= c_strlen (arg
, 1);
10030 if (!len
|| ! tree_fits_uhwi_p (len
))
10036 /* Add one for the terminating nul. */
10037 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10039 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, /*size=*/NULL_TREE
,
10040 /*maxread=*/NULL_TREE
, len
, size
);
10043 /* Emit warning if a free is called with address of a variable. */
10046 maybe_emit_free_warning (tree exp
)
10048 tree arg
= CALL_EXPR_ARG (exp
, 0);
10051 if (TREE_CODE (arg
) != ADDR_EXPR
)
10054 arg
= get_base_address (TREE_OPERAND (arg
, 0));
10055 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
10058 if (SSA_VAR_P (arg
))
10059 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10060 "%Kattempt to free a non-heap object %qD", exp
, arg
);
10062 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10063 "%Kattempt to free a non-heap object", exp
);
10066 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10070 fold_builtin_object_size (tree ptr
, tree ost
)
10072 unsigned HOST_WIDE_INT bytes
;
10073 int object_size_type
;
10075 if (!validate_arg (ptr
, POINTER_TYPE
)
10076 || !validate_arg (ost
, INTEGER_TYPE
))
10081 if (TREE_CODE (ost
) != INTEGER_CST
10082 || tree_int_cst_sgn (ost
) < 0
10083 || compare_tree_int (ost
, 3) > 0)
10086 object_size_type
= tree_to_shwi (ost
);
10088 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10089 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10090 and (size_t) 0 for types 2 and 3. */
10091 if (TREE_SIDE_EFFECTS (ptr
))
10092 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10094 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10096 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10097 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10098 return build_int_cstu (size_type_node
, bytes
);
10100 else if (TREE_CODE (ptr
) == SSA_NAME
)
10102 /* If object size is not known yet, delay folding until
10103 later. Maybe subsequent passes will help determining
10105 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10106 && wi::fits_to_tree_p (bytes
, size_type_node
))
10107 return build_int_cstu (size_type_node
, bytes
);
10113 /* Builtins with folding operations that operate on "..." arguments
10114 need special handling; we need to store the arguments in a convenient
10115 data structure before attempting any folding. Fortunately there are
10116 only a few builtins that fall into this category. FNDECL is the
10117 function, EXP is the CALL_EXPR for the call. */
10120 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10122 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10123 tree ret
= NULL_TREE
;
10127 case BUILT_IN_FPCLASSIFY
:
10128 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10136 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10137 SET_EXPR_LOCATION (ret
, loc
);
10138 TREE_NO_WARNING (ret
) = 1;
10144 /* Initialize format string characters in the target charset. */
10147 init_target_chars (void)
10152 target_newline
= lang_hooks
.to_target_charset ('\n');
10153 target_percent
= lang_hooks
.to_target_charset ('%');
10154 target_c
= lang_hooks
.to_target_charset ('c');
10155 target_s
= lang_hooks
.to_target_charset ('s');
10156 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10160 target_percent_c
[0] = target_percent
;
10161 target_percent_c
[1] = target_c
;
10162 target_percent_c
[2] = '\0';
10164 target_percent_s
[0] = target_percent
;
10165 target_percent_s
[1] = target_s
;
10166 target_percent_s
[2] = '\0';
10168 target_percent_s_newline
[0] = target_percent
;
10169 target_percent_s_newline
[1] = target_s
;
10170 target_percent_s_newline
[2] = target_newline
;
10171 target_percent_s_newline
[3] = '\0';
10178 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10179 and no overflow/underflow occurred. INEXACT is true if M was not
10180 exactly calculated. TYPE is the tree type for the result. This
10181 function assumes that you cleared the MPFR flags and then
10182 calculated M to see if anything subsequently set a flag prior to
10183 entering this function. Return NULL_TREE if any checks fail. */
10186 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10188 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10189 overflow/underflow occurred. If -frounding-math, proceed iff the
10190 result of calling FUNC was exact. */
10191 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10192 && (!flag_rounding_math
|| !inexact
))
10194 REAL_VALUE_TYPE rr
;
10196 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10197 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10198 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10199 but the mpft_t is not, then we underflowed in the
10201 if (real_isfinite (&rr
)
10202 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10204 REAL_VALUE_TYPE rmode
;
10206 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10207 /* Proceed iff the specified mode can hold the value. */
10208 if (real_identical (&rmode
, &rr
))
10209 return build_real (type
, rmode
);
10215 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10216 number and no overflow/underflow occurred. INEXACT is true if M
10217 was not exactly calculated. TYPE is the tree type for the result.
10218 This function assumes that you cleared the MPFR flags and then
10219 calculated M to see if anything subsequently set a flag prior to
10220 entering this function. Return NULL_TREE if any checks fail, if
10221 FORCE_CONVERT is true, then bypass the checks. */
10224 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10226 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10227 overflow/underflow occurred. If -frounding-math, proceed iff the
10228 result of calling FUNC was exact. */
10230 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10231 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10232 && (!flag_rounding_math
|| !inexact
)))
10234 REAL_VALUE_TYPE re
, im
;
10236 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10237 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10238 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10239 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10240 but the mpft_t is not, then we underflowed in the
10243 || (real_isfinite (&re
) && real_isfinite (&im
)
10244 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10245 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10247 REAL_VALUE_TYPE re_mode
, im_mode
;
10249 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10250 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10251 /* Proceed iff the specified mode can hold the value. */
10253 || (real_identical (&re_mode
, &re
)
10254 && real_identical (&im_mode
, &im
)))
10255 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10256 build_real (TREE_TYPE (type
), im_mode
));
10262 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10263 the pointer *(ARG_QUO) and return the result. The type is taken
10264 from the type of ARG0 and is used for setting the precision of the
10265 calculation and results. */
10268 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10270 tree
const type
= TREE_TYPE (arg0
);
10271 tree result
= NULL_TREE
;
10276 /* To proceed, MPFR must exactly represent the target floating point
10277 format, which only happens when the target base equals two. */
10278 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10279 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10280 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10282 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10283 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10285 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10287 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10288 const int prec
= fmt
->p
;
10289 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10294 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10295 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10296 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10297 mpfr_clear_flags ();
10298 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10299 /* Remquo is independent of the rounding mode, so pass
10300 inexact=0 to do_mpfr_ckconv(). */
10301 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10302 mpfr_clears (m0
, m1
, NULL
);
10305 /* MPFR calculates quo in the host's long so it may
10306 return more bits in quo than the target int can hold
10307 if sizeof(host long) > sizeof(target int). This can
10308 happen even for native compilers in LP64 mode. In
10309 these cases, modulo the quo value with the largest
10310 number that the target int can hold while leaving one
10311 bit for the sign. */
10312 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10313 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10315 /* Dereference the quo pointer argument. */
10316 arg_quo
= build_fold_indirect_ref (arg_quo
);
10317 /* Proceed iff a valid pointer type was passed in. */
10318 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10320 /* Set the value. */
10322 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10323 build_int_cst (TREE_TYPE (arg_quo
),
10325 TREE_SIDE_EFFECTS (result_quo
) = 1;
10326 /* Combine the quo assignment with the rem. */
10327 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10328 result_quo
, result_rem
));
10336 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10337 resulting value as a tree with type TYPE. The mpfr precision is
10338 set to the precision of TYPE. We assume that this mpfr function
10339 returns zero if the result could be calculated exactly within the
10340 requested precision. In addition, the integer pointer represented
10341 by ARG_SG will be dereferenced and set to the appropriate signgam
10345 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10347 tree result
= NULL_TREE
;
10351 /* To proceed, MPFR must exactly represent the target floating point
10352 format, which only happens when the target base equals two. Also
10353 verify ARG is a constant and that ARG_SG is an int pointer. */
10354 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10355 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10356 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10357 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10359 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10361 /* In addition to NaN and Inf, the argument cannot be zero or a
10362 negative integer. */
10363 if (real_isfinite (ra
)
10364 && ra
->cl
!= rvc_zero
10365 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10367 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10368 const int prec
= fmt
->p
;
10369 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10374 mpfr_init2 (m
, prec
);
10375 mpfr_from_real (m
, ra
, GMP_RNDN
);
10376 mpfr_clear_flags ();
10377 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10378 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10384 /* Dereference the arg_sg pointer argument. */
10385 arg_sg
= build_fold_indirect_ref (arg_sg
);
10386 /* Assign the signgam value into *arg_sg. */
10387 result_sg
= fold_build2 (MODIFY_EXPR
,
10388 TREE_TYPE (arg_sg
), arg_sg
,
10389 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10390 TREE_SIDE_EFFECTS (result_sg
) = 1;
10391 /* Combine the signgam assignment with the lgamma result. */
10392 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10393 result_sg
, result_lg
));
10401 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10402 mpc function FUNC on it and return the resulting value as a tree
10403 with type TYPE. The mpfr precision is set to the precision of
10404 TYPE. We assume that function FUNC returns zero if the result
10405 could be calculated exactly within the requested precision. If
10406 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10407 in the arguments and/or results. */
10410 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10411 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10413 tree result
= NULL_TREE
;
10418 /* To proceed, MPFR must exactly represent the target floating point
10419 format, which only happens when the target base equals two. */
10420 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10421 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10422 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10423 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10424 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10426 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10427 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10428 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10429 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10432 || (real_isfinite (re0
) && real_isfinite (im0
)
10433 && real_isfinite (re1
) && real_isfinite (im1
)))
10435 const struct real_format
*const fmt
=
10436 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10437 const int prec
= fmt
->p
;
10438 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10439 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10443 mpc_init2 (m0
, prec
);
10444 mpc_init2 (m1
, prec
);
10445 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10446 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10447 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10448 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10449 mpfr_clear_flags ();
10450 inexact
= func (m0
, m0
, m1
, crnd
);
10451 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
10460 /* A wrapper function for builtin folding that prevents warnings for
10461 "statement without effect" and the like, caused by removing the
10462 call node earlier than the warning is generated. */
10465 fold_call_stmt (gcall
*stmt
, bool ignore
)
10467 tree ret
= NULL_TREE
;
10468 tree fndecl
= gimple_call_fndecl (stmt
);
10469 location_t loc
= gimple_location (stmt
);
10471 && TREE_CODE (fndecl
) == FUNCTION_DECL
10472 && DECL_BUILT_IN (fndecl
)
10473 && !gimple_call_va_arg_pack_p (stmt
))
10475 int nargs
= gimple_call_num_args (stmt
);
10476 tree
*args
= (nargs
> 0
10477 ? gimple_call_arg_ptr (stmt
, 0)
10478 : &error_mark_node
);
10480 if (avoid_folding_inline_builtin (fndecl
))
10482 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10484 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
10488 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10491 /* Propagate location information from original call to
10492 expansion of builtin. Otherwise things like
10493 maybe_emit_chk_warning, that operate on the expansion
10494 of a builtin, will use the wrong location information. */
10495 if (gimple_has_location (stmt
))
10497 tree realret
= ret
;
10498 if (TREE_CODE (ret
) == NOP_EXPR
)
10499 realret
= TREE_OPERAND (ret
, 0);
10500 if (CAN_HAVE_LOCATION_P (realret
)
10501 && !EXPR_HAS_LOCATION (realret
))
10502 SET_EXPR_LOCATION (realret
, loc
);
10512 /* Look up the function in builtin_decl that corresponds to DECL
10513 and set ASMSPEC as its user assembler name. DECL must be a
10514 function decl that declares a builtin. */
10517 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
10519 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
10520 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
10523 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
10524 set_user_assembler_name (builtin
, asmspec
);
10526 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
10527 && INT_TYPE_SIZE
< BITS_PER_WORD
)
10529 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
10530 set_user_assembler_libfunc ("ffs", asmspec
);
10531 set_optab_libfunc (ffs_optab
, mode
, "ffs");
10535 /* Return true if DECL is a builtin that expands to a constant or similarly
10538 is_simple_builtin (tree decl
)
10540 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10541 switch (DECL_FUNCTION_CODE (decl
))
10543 /* Builtins that expand to constants. */
10544 case BUILT_IN_CONSTANT_P
:
10545 case BUILT_IN_EXPECT
:
10546 case BUILT_IN_OBJECT_SIZE
:
10547 case BUILT_IN_UNREACHABLE
:
10548 /* Simple register moves or loads from stack. */
10549 case BUILT_IN_ASSUME_ALIGNED
:
10550 case BUILT_IN_RETURN_ADDRESS
:
10551 case BUILT_IN_EXTRACT_RETURN_ADDR
:
10552 case BUILT_IN_FROB_RETURN_ADDR
:
10553 case BUILT_IN_RETURN
:
10554 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
10555 case BUILT_IN_FRAME_ADDRESS
:
10556 case BUILT_IN_VA_END
:
10557 case BUILT_IN_STACK_SAVE
:
10558 case BUILT_IN_STACK_RESTORE
:
10559 /* Exception state returns or moves registers around. */
10560 case BUILT_IN_EH_FILTER
:
10561 case BUILT_IN_EH_POINTER
:
10562 case BUILT_IN_EH_COPY_VALUES
:
10572 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10573 most probably expanded inline into reasonably simple code. This is a
10574 superset of is_simple_builtin. */
10576 is_inexpensive_builtin (tree decl
)
10580 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
10582 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10583 switch (DECL_FUNCTION_CODE (decl
))
10586 CASE_BUILT_IN_ALLOCA
:
10587 case BUILT_IN_BSWAP16
:
10588 case BUILT_IN_BSWAP32
:
10589 case BUILT_IN_BSWAP64
:
10591 case BUILT_IN_CLZIMAX
:
10592 case BUILT_IN_CLZL
:
10593 case BUILT_IN_CLZLL
:
10595 case BUILT_IN_CTZIMAX
:
10596 case BUILT_IN_CTZL
:
10597 case BUILT_IN_CTZLL
:
10599 case BUILT_IN_FFSIMAX
:
10600 case BUILT_IN_FFSL
:
10601 case BUILT_IN_FFSLL
:
10602 case BUILT_IN_IMAXABS
:
10603 case BUILT_IN_FINITE
:
10604 case BUILT_IN_FINITEF
:
10605 case BUILT_IN_FINITEL
:
10606 case BUILT_IN_FINITED32
:
10607 case BUILT_IN_FINITED64
:
10608 case BUILT_IN_FINITED128
:
10609 case BUILT_IN_FPCLASSIFY
:
10610 case BUILT_IN_ISFINITE
:
10611 case BUILT_IN_ISINF_SIGN
:
10612 case BUILT_IN_ISINF
:
10613 case BUILT_IN_ISINFF
:
10614 case BUILT_IN_ISINFL
:
10615 case BUILT_IN_ISINFD32
:
10616 case BUILT_IN_ISINFD64
:
10617 case BUILT_IN_ISINFD128
:
10618 case BUILT_IN_ISNAN
:
10619 case BUILT_IN_ISNANF
:
10620 case BUILT_IN_ISNANL
:
10621 case BUILT_IN_ISNAND32
:
10622 case BUILT_IN_ISNAND64
:
10623 case BUILT_IN_ISNAND128
:
10624 case BUILT_IN_ISNORMAL
:
10625 case BUILT_IN_ISGREATER
:
10626 case BUILT_IN_ISGREATEREQUAL
:
10627 case BUILT_IN_ISLESS
:
10628 case BUILT_IN_ISLESSEQUAL
:
10629 case BUILT_IN_ISLESSGREATER
:
10630 case BUILT_IN_ISUNORDERED
:
10631 case BUILT_IN_VA_ARG_PACK
:
10632 case BUILT_IN_VA_ARG_PACK_LEN
:
10633 case BUILT_IN_VA_COPY
:
10634 case BUILT_IN_TRAP
:
10635 case BUILT_IN_SAVEREGS
:
10636 case BUILT_IN_POPCOUNTL
:
10637 case BUILT_IN_POPCOUNTLL
:
10638 case BUILT_IN_POPCOUNTIMAX
:
10639 case BUILT_IN_POPCOUNT
:
10640 case BUILT_IN_PARITYL
:
10641 case BUILT_IN_PARITYLL
:
10642 case BUILT_IN_PARITYIMAX
:
10643 case BUILT_IN_PARITY
:
10644 case BUILT_IN_LABS
:
10645 case BUILT_IN_LLABS
:
10646 case BUILT_IN_PREFETCH
:
10647 case BUILT_IN_ACC_ON_DEVICE
:
10651 return is_simple_builtin (decl
);
10657 /* Return true if T is a constant and the value cast to a target char
10658 can be represented by a host char.
10659 Store the casted char constant in *P if so. */
10662 target_char_cst_p (tree t
, char *p
)
10664 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
10667 *p
= (char)tree_to_uhwi (t
);
10671 /* Return the maximum object size. */
10674 max_object_size (void)
10676 /* To do: Make this a configurable parameter. */
10677 return TYPE_MAX_VALUE (ptrdiff_type_node
);