1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
33 #include "fold-const.h"
34 #include "stringpool.h"
35 #include "stor-layout.h"
38 #include "tree-object-size.h"
41 #include "internal-fn.h"
45 #include "insn-config.h"
52 #include "insn-codes.h"
57 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "tree-ssanames.h"
63 #include "value-prof.h"
64 #include "diagnostic-core.h"
69 #include "tree-chkp.h"
73 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
75 struct target_builtins default_target_builtins
;
77 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names
[BUILT_IN_LAST
]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names
[(int) END_BUILTINS
] =
87 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info
[(int)END_BUILTINS
];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p
;
98 static rtx
c_readstr (const char *, machine_mode
);
99 static int target_char_cast (tree
, char *);
100 static rtx
get_memory_rtx (tree
, tree
);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx
result_vector (int, rtx
);
104 static void expand_builtin_prefetch (tree
);
105 static rtx
expand_builtin_apply_args (void);
106 static rtx
expand_builtin_apply_args_1 (void);
107 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
108 static void expand_builtin_return (rtx
);
109 static enum type_class
type_to_class (tree
);
110 static rtx
expand_builtin_classify_type (tree
);
111 static void expand_errno_check (tree
, rtx
);
112 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
114 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
115 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
116 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
117 static rtx
expand_builtin_sincos (tree
);
118 static rtx
expand_builtin_cexpi (tree
, rtx
);
119 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
120 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
121 static rtx
expand_builtin_next_arg (void);
122 static rtx
expand_builtin_va_start (tree
);
123 static rtx
expand_builtin_va_end (tree
);
124 static rtx
expand_builtin_va_copy (tree
);
125 static rtx
expand_builtin_strcmp (tree
, rtx
);
126 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
127 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
128 static rtx
expand_builtin_memcpy (tree
, rtx
);
129 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
130 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
131 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
132 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
133 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
134 machine_mode
, int, tree
);
135 static rtx
expand_builtin_strcpy (tree
, rtx
);
136 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
137 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_strncpy (tree
, rtx
);
139 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
140 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_alloca (tree
, bool);
146 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
147 static rtx
expand_builtin_frame_address (tree
, tree
);
148 static tree
stabilize_va_list_loc (location_t
, tree
, int);
149 static rtx
expand_builtin_expect (tree
, rtx
);
150 static tree
fold_builtin_constant_p (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
153 static tree
fold_builtin_inf (location_t
, tree
, int);
154 static tree
fold_builtin_nan (tree
, tree
, int);
155 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
156 static bool validate_arg (const_tree
, enum tree_code code
);
157 static bool integer_valued_real_p (tree
);
158 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
162 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
164 static tree
fold_builtin_floor (location_t
, tree
, tree
);
165 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
166 static tree
fold_builtin_round (location_t
, tree
, tree
);
167 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
168 static tree
fold_builtin_bitop (tree
, tree
);
169 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
170 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
171 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
173 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
174 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
175 static tree
fold_builtin_isascii (location_t
, tree
);
176 static tree
fold_builtin_toascii (location_t
, tree
);
177 static tree
fold_builtin_isdigit (location_t
, tree
);
178 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
179 static tree
fold_builtin_abs (location_t
, tree
, tree
);
180 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
182 static tree
fold_builtin_0 (location_t
, tree
);
183 static tree
fold_builtin_1 (location_t
, tree
, tree
);
184 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
185 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
186 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
188 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
189 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
190 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
191 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
192 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
194 static rtx
expand_builtin_object_size (tree
);
195 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
196 enum built_in_function
);
197 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
198 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
199 static void maybe_emit_free_warning (tree
);
200 static tree
fold_builtin_object_size (tree
, tree
);
202 unsigned HOST_WIDE_INT target_newline
;
203 unsigned HOST_WIDE_INT target_percent
;
204 static unsigned HOST_WIDE_INT target_c
;
205 static unsigned HOST_WIDE_INT target_s
;
206 char target_percent_c
[3];
207 char target_percent_s
[3];
208 char target_percent_s_newline
[4];
209 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
210 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
211 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
212 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
213 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
214 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
215 static tree
do_mpfr_sincos (tree
, tree
, tree
);
216 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
217 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
218 const REAL_VALUE_TYPE
*, bool);
219 static tree
do_mpfr_remquo (tree
, tree
, tree
);
220 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
221 static void expand_builtin_sync_synchronize (void);
223 /* Return true if NAME starts with __builtin_ or __sync_. */
226 is_builtin_name (const char *name
)
228 if (strncmp (name
, "__builtin_", 10) == 0)
230 if (strncmp (name
, "__sync_", 7) == 0)
232 if (strncmp (name
, "__atomic_", 9) == 0)
235 && (!strcmp (name
, "__cilkrts_detach")
236 || !strcmp (name
, "__cilkrts_pop_frame")))
242 /* Return true if DECL is a function symbol representing a built-in. */
245 is_builtin_fn (tree decl
)
247 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
255 called_as_built_in (tree node
)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
260 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
261 return is_builtin_name (name
);
264 /* Compute values M and N such that M divides (address of EXP - N) and such
265 that N < M. If these numbers can be determined, store M in alignp and N in
266 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
267 *alignp and any bit-offset to *bitposp.
269 Note that the address (and thus the alignment) computed here is based
270 on the address to which a symbol resolves, whereas DECL_ALIGN is based
271 on the address at which an object is actually located. These two
272 addresses are not always the same. For example, on ARM targets,
273 the address &foo of a Thumb function foo() has the lowest bit set,
274 whereas foo() itself starts on an even address.
276 If ADDR_P is true we are taking the address of the memory reference EXP
277 and thus cannot rely on the access taking place. */
280 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
281 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
283 HOST_WIDE_INT bitsize
, bitpos
;
286 int unsignedp
, volatilep
;
287 unsigned int align
= BITS_PER_UNIT
;
288 bool known_alignment
= false;
290 /* Get the innermost object and the constant (bitpos) and possibly
291 variable (offset) offset of the access. */
292 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
293 &mode
, &unsignedp
, &volatilep
, true);
295 /* Extract alignment information from the innermost object and
296 possibly adjust bitpos and offset. */
297 if (TREE_CODE (exp
) == FUNCTION_DECL
)
299 /* Function addresses can encode extra information besides their
300 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
301 allows the low bit to be used as a virtual bit, we know
302 that the address itself must be at least 2-byte aligned. */
303 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
304 align
= 2 * BITS_PER_UNIT
;
306 else if (TREE_CODE (exp
) == LABEL_DECL
)
308 else if (TREE_CODE (exp
) == CONST_DECL
)
310 /* The alignment of a CONST_DECL is determined by its initializer. */
311 exp
= DECL_INITIAL (exp
);
312 align
= TYPE_ALIGN (TREE_TYPE (exp
));
313 if (CONSTANT_CLASS_P (exp
))
314 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
316 known_alignment
= true;
318 else if (DECL_P (exp
))
320 align
= DECL_ALIGN (exp
);
321 known_alignment
= true;
323 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
325 align
= TYPE_ALIGN (TREE_TYPE (exp
));
327 else if (TREE_CODE (exp
) == INDIRECT_REF
328 || TREE_CODE (exp
) == MEM_REF
329 || TREE_CODE (exp
) == TARGET_MEM_REF
)
331 tree addr
= TREE_OPERAND (exp
, 0);
333 unsigned HOST_WIDE_INT ptr_bitpos
;
334 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
336 /* If the address is explicitely aligned, handle that. */
337 if (TREE_CODE (addr
) == BIT_AND_EXPR
338 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
340 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
341 ptr_bitmask
*= BITS_PER_UNIT
;
342 align
= ptr_bitmask
& -ptr_bitmask
;
343 addr
= TREE_OPERAND (addr
, 0);
347 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
348 align
= MAX (ptr_align
, align
);
350 /* Re-apply explicit alignment to the bitpos. */
351 ptr_bitpos
&= ptr_bitmask
;
353 /* The alignment of the pointer operand in a TARGET_MEM_REF
354 has to take the variable offset parts into account. */
355 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
359 unsigned HOST_WIDE_INT step
= 1;
361 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
362 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
364 if (TMR_INDEX2 (exp
))
365 align
= BITS_PER_UNIT
;
366 known_alignment
= false;
369 /* When EXP is an actual memory reference then we can use
370 TYPE_ALIGN of a pointer indirection to derive alignment.
371 Do so only if get_pointer_alignment_1 did not reveal absolute
372 alignment knowledge and if using that alignment would
373 improve the situation. */
374 if (!addr_p
&& !known_alignment
375 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
376 align
= TYPE_ALIGN (TREE_TYPE (exp
));
379 /* Else adjust bitpos accordingly. */
380 bitpos
+= ptr_bitpos
;
381 if (TREE_CODE (exp
) == MEM_REF
382 || TREE_CODE (exp
) == TARGET_MEM_REF
)
383 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
386 else if (TREE_CODE (exp
) == STRING_CST
)
388 /* STRING_CST are the only constant objects we allow to be not
389 wrapped inside a CONST_DECL. */
390 align
= TYPE_ALIGN (TREE_TYPE (exp
));
391 if (CONSTANT_CLASS_P (exp
))
392 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
394 known_alignment
= true;
397 /* If there is a non-constant offset part extract the maximum
398 alignment that can prevail. */
401 unsigned int trailing_zeros
= tree_ctz (offset
);
402 if (trailing_zeros
< HOST_BITS_PER_INT
)
404 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
406 align
= MIN (align
, inner
);
411 *bitposp
= bitpos
& (*alignp
- 1);
412 return known_alignment
;
415 /* For a memory reference expression EXP compute values M and N such that M
416 divides (&EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Otherwise return false
418 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
421 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
422 unsigned HOST_WIDE_INT
*bitposp
)
424 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
427 /* Return the alignment in bits of EXP, an object. */
430 get_object_alignment (tree exp
)
432 unsigned HOST_WIDE_INT bitpos
= 0;
435 get_object_alignment_1 (exp
, &align
, &bitpos
);
437 /* align and bitpos now specify known low bits of the pointer.
438 ptr & (align - 1) == bitpos. */
441 align
= (bitpos
& -bitpos
);
445 /* For a pointer valued expression EXP compute values M and N such that M
446 divides (EXP - N) and such that N < M. If these numbers can be determined,
447 store M in alignp and N in *BITPOSP and return true. Return false if
448 the results are just a conservative approximation.
450 If EXP is not a pointer, false is returned too. */
453 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
454 unsigned HOST_WIDE_INT
*bitposp
)
458 if (TREE_CODE (exp
) == ADDR_EXPR
)
459 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
460 alignp
, bitposp
, true);
461 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
464 unsigned HOST_WIDE_INT bitpos
;
465 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
467 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
468 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
471 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
472 if (trailing_zeros
< HOST_BITS_PER_INT
)
474 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
476 align
= MIN (align
, inner
);
480 *bitposp
= bitpos
& (align
- 1);
483 else if (TREE_CODE (exp
) == SSA_NAME
484 && POINTER_TYPE_P (TREE_TYPE (exp
)))
486 unsigned int ptr_align
, ptr_misalign
;
487 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
489 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
491 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
492 *alignp
= ptr_align
* BITS_PER_UNIT
;
493 /* We cannot really tell whether this result is an approximation. */
499 *alignp
= BITS_PER_UNIT
;
503 else if (TREE_CODE (exp
) == INTEGER_CST
)
505 *alignp
= BIGGEST_ALIGNMENT
;
506 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
507 & (BIGGEST_ALIGNMENT
- 1));
512 *alignp
= BITS_PER_UNIT
;
516 /* Return the alignment in bits of EXP, a pointer valued expression.
517 The alignment returned is, by default, the alignment of the thing that
518 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
520 Otherwise, look at the expression to see if we can do better, i.e., if the
521 expression is actually pointing at an object whose alignment is tighter. */
524 get_pointer_alignment (tree exp
)
526 unsigned HOST_WIDE_INT bitpos
= 0;
529 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
531 /* align and bitpos now specify known low bits of the pointer.
532 ptr & (align - 1) == bitpos. */
535 align
= (bitpos
& -bitpos
);
540 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
541 way, because it could contain a zero byte in the middle.
542 TREE_STRING_LENGTH is the size of the character array, not the string.
544 ONLY_VALUE should be nonzero if the result is not going to be emitted
545 into the instruction stream and zero if it is going to be expanded.
546 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
547 is returned, otherwise NULL, since
548 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
549 evaluate the side-effects.
551 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
552 accesses. Note that this implies the result is not going to be emitted
553 into the instruction stream.
555 The value returned is of type `ssizetype'.
557 Unfortunately, string_constant can't access the values of const char
558 arrays with initializers, so neither can we do so here. */
561 c_strlen (tree src
, int only_value
)
564 HOST_WIDE_INT offset
;
570 if (TREE_CODE (src
) == COND_EXPR
571 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
575 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
576 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
577 if (tree_int_cst_equal (len1
, len2
))
581 if (TREE_CODE (src
) == COMPOUND_EXPR
582 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
583 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
585 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
587 src
= string_constant (src
, &offset_node
);
591 max
= TREE_STRING_LENGTH (src
) - 1;
592 ptr
= TREE_STRING_POINTER (src
);
594 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
596 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
597 compute the offset to the following null if we don't know where to
598 start searching for it. */
601 for (i
= 0; i
< max
; i
++)
605 /* We don't know the starting offset, but we do know that the string
606 has no internal zero bytes. We can assume that the offset falls
607 within the bounds of the string; otherwise, the programmer deserves
608 what he gets. Subtract the offset from the length of the string,
609 and return that. This would perhaps not be valid if we were dealing
610 with named arrays in addition to literal string constants. */
612 return size_diffop_loc (loc
, size_int (max
), offset_node
);
615 /* We have a known offset into the string. Start searching there for
616 a null character if we can represent it as a single HOST_WIDE_INT. */
617 if (offset_node
== 0)
619 else if (! tree_fits_shwi_p (offset_node
))
622 offset
= tree_to_shwi (offset_node
);
624 /* If the offset is known to be out of bounds, warn, and call strlen at
626 if (offset
< 0 || offset
> max
)
628 /* Suppress multiple warnings for propagated constant strings. */
630 && !TREE_NO_WARNING (src
))
632 warning_at (loc
, 0, "offset outside bounds of constant string");
633 TREE_NO_WARNING (src
) = 1;
638 /* Use strlen to search for the first zero byte. Since any strings
639 constructed with build_string will have nulls appended, we win even
640 if we get handed something like (char[4])"abcd".
642 Since OFFSET is our starting index into the string, no further
643 calculation is needed. */
644 return ssize_int (strlen (ptr
+ offset
));
647 /* Return a char pointer for a C string if it is a string constant
648 or sum of string constant and integer constant. */
655 src
= string_constant (src
, &offset_node
);
659 if (offset_node
== 0)
660 return TREE_STRING_POINTER (src
);
661 else if (!tree_fits_uhwi_p (offset_node
)
662 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
665 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
672 c_readstr (const char *str
, machine_mode mode
)
676 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
678 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
679 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
680 / HOST_BITS_PER_WIDE_INT
;
682 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
683 for (i
= 0; i
< len
; i
++)
687 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
690 if (WORDS_BIG_ENDIAN
)
691 j
= GET_MODE_SIZE (mode
) - i
- 1;
692 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
694 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
698 ch
= (unsigned char) str
[i
];
699 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
702 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
703 return immed_wide_int_const (c
, mode
);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
711 target_char_cast (tree cst
, char *p
)
713 unsigned HOST_WIDE_INT val
, hostval
;
715 if (TREE_CODE (cst
) != INTEGER_CST
716 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
719 /* Do not care if it fits or not right here. */
720 val
= TREE_INT_CST_LOW (cst
);
722 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
723 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
726 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
727 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
741 builtin_save_expr (tree exp
)
743 if (TREE_CODE (exp
) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp
) == 0
745 && (TREE_CODE (exp
) == PARM_DECL
746 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
749 return save_expr (exp
);
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
757 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
760 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
773 tem
= frame_pointer_rtx
;
776 tem
= hard_frame_pointer_rtx
;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl
->accesses_prior_frames
= 1;
784 SETUP_FRAME_ADDRESSES ();
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
793 /* Scan back COUNT frames to the specified frame. */
794 for (i
= 0; i
< count
; i
++)
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
799 tem
= memory_address (Pmode
, tem
);
800 tem
= gen_frame_mem (Pmode
, tem
);
801 tem
= copy_to_reg (tem
);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
807 return FRAME_ADDR_RTX (tem
);
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem
= RETURN_ADDR_RTX (count
, tem
);
813 tem
= memory_address (Pmode
,
814 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
815 tem
= gen_frame_mem (Pmode
, tem
);
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set
= -1;
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
828 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
830 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
834 if (setjmp_alias_set
== -1)
835 setjmp_alias_set
= new_alias_set ();
837 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
839 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
845 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
846 set_mem_alias_set (mem
, setjmp_alias_set
);
847 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
849 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
850 GET_MODE_SIZE (Pmode
))),
851 set_mem_alias_set (mem
, setjmp_alias_set
);
853 emit_move_insn (validize_mem (mem
),
854 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
856 stack_save
= gen_rtx_MEM (sa_mode
,
857 plus_constant (Pmode
, buf_addr
,
858 2 * GET_MODE_SIZE (Pmode
)));
859 set_mem_alias_set (stack_save
, setjmp_alias_set
);
860 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
862 /* If there is further processing to do, do it. */
863 if (targetm
.have_builtin_setjmp_setup ())
864 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
866 /* We have a nonlocal label. */
867 cfun
->has_nonlocal_label
= 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
875 expand_builtin_setjmp_receiver (rtx receiver_label
)
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx
);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
886 if (chain
&& REG_P (chain
))
887 emit_clobber (chain
);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm
.have_nonlocal_goto ())
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx
);
911 emit_clobber (hard_frame_pointer_rtx
);
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
916 #ifdef ELIMINABLE_REGS
917 /* If the argument pointer can be eliminated in favor of the
918 frame pointer, we don't need to restore it. We assume here
919 that if such an elimination is present, it can always be used.
920 This is the case on all known machines; if we don't make this
921 assumption, we do unnecessary saving on many machines. */
923 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
925 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
926 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
927 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
930 if (i
== ARRAY_SIZE (elim_regs
))
933 /* Now restore our arg pointer from the address at which it
934 was saved in our stack frame. */
935 emit_move_insn (crtl
->args
.internal_arg_pointer
,
936 copy_to_reg (get_arg_pointer_save_area ()));
940 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
941 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
942 else if (targetm
.have_nonlocal_goto_receiver ())
943 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
947 /* We must not allow the code we just generated to be reordered by
948 scheduling. Specifically, the update of the frame pointer must
949 happen immediately, not later. */
950 emit_insn (gen_blockage ());
953 /* __builtin_longjmp is passed a pointer to an array of five words (not
954 all will be used on all machines). It operates similarly to the C
955 library function of the same name, but is more efficient. Much of
956 the code below is copied from the handling of non-local gotos. */
959 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
962 rtx_insn
*insn
, *last
;
963 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
965 /* DRAP is needed for stack realign if longjmp is expanded to current
967 if (SUPPORTS_STACK_ALIGNMENT
)
968 crtl
->need_drap
= true;
970 if (setjmp_alias_set
== -1)
971 setjmp_alias_set
= new_alias_set ();
973 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
975 buf_addr
= force_reg (Pmode
, buf_addr
);
977 /* We require that the user must pass a second argument of 1, because
978 that is what builtin_setjmp will return. */
979 gcc_assert (value
== const1_rtx
);
981 last
= get_last_insn ();
982 if (targetm
.have_builtin_longjmp ())
983 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
986 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
987 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
988 GET_MODE_SIZE (Pmode
)));
990 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
991 2 * GET_MODE_SIZE (Pmode
)));
992 set_mem_alias_set (fp
, setjmp_alias_set
);
993 set_mem_alias_set (lab
, setjmp_alias_set
);
994 set_mem_alias_set (stack
, setjmp_alias_set
);
996 /* Pick up FP, label, and SP from the block and jump. This code is
997 from expand_goto in stmt.c; see there for detailed comments. */
998 if (targetm
.have_nonlocal_goto ())
999 /* We have to pass a value to the nonlocal_goto pattern that will
1000 get copied into the static_chain pointer, but it does not matter
1001 what that value is, because builtin_setjmp does not use it. */
1002 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1005 lab
= copy_to_reg (lab
);
1007 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1008 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1010 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1011 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1013 emit_use (hard_frame_pointer_rtx
);
1014 emit_use (stack_pointer_rtx
);
1015 emit_indirect_jump (lab
);
1019 /* Search backwards and mark the jump insn as a non-local goto.
1020 Note that this precludes the use of __builtin_longjmp to a
1021 __builtin_setjmp target in the same function. However, we've
1022 already cautioned the user that these functions are for
1023 internal exception handling use only. */
1024 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1026 gcc_assert (insn
!= last
);
1030 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1033 else if (CALL_P (insn
))
1039 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1041 return (iter
->i
< iter
->n
);
1044 /* This function validates the types of a function call argument list
1045 against a specified list of tree_codes. If the last specifier is a 0,
1046 that represents an ellipses, otherwise the last specifier must be a
1050 validate_arglist (const_tree callexpr
, ...)
1052 enum tree_code code
;
1055 const_call_expr_arg_iterator iter
;
1058 va_start (ap
, callexpr
);
1059 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1063 code
= (enum tree_code
) va_arg (ap
, int);
1067 /* This signifies an ellipses, any further arguments are all ok. */
1071 /* This signifies an endlink, if no arguments remain, return
1072 true, otherwise return false. */
1073 res
= !more_const_call_expr_args_p (&iter
);
1076 /* If no parameters remain or the parameter's code does not
1077 match the specified code, return false. Otherwise continue
1078 checking any remaining arguments. */
1079 arg
= next_const_call_expr_arg (&iter
);
1080 if (!validate_arg (arg
, code
))
1087 /* We need gotos here since we can only have one VA_CLOSE in a
1095 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1096 and the address of the save area. */
1099 expand_builtin_nonlocal_goto (tree exp
)
1101 tree t_label
, t_save_area
;
1102 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1105 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1108 t_label
= CALL_EXPR_ARG (exp
, 0);
1109 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1111 r_label
= expand_normal (t_label
);
1112 r_label
= convert_memory_address (Pmode
, r_label
);
1113 r_save_area
= expand_normal (t_save_area
);
1114 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1115 /* Copy the address of the save location to a register just in case it was
1116 based on the frame pointer. */
1117 r_save_area
= copy_to_reg (r_save_area
);
1118 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1119 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1120 plus_constant (Pmode
, r_save_area
,
1121 GET_MODE_SIZE (Pmode
)));
1123 crtl
->has_nonlocal_goto
= 1;
1125 /* ??? We no longer need to pass the static chain value, afaik. */
1126 if (targetm
.have_nonlocal_goto ())
1127 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1130 r_label
= copy_to_reg (r_label
);
1132 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1133 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1135 /* Restore frame pointer for containing function. */
1136 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1137 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1139 /* USE of hard_frame_pointer_rtx added for consistency;
1140 not clear if really needed. */
1141 emit_use (hard_frame_pointer_rtx
);
1142 emit_use (stack_pointer_rtx
);
1144 /* If the architecture is using a GP register, we must
1145 conservatively assume that the target function makes use of it.
1146 The prologue of functions with nonlocal gotos must therefore
1147 initialize the GP register to the appropriate value, and we
1148 must then make sure that this value is live at the point
1149 of the jump. (Note that this doesn't necessarily apply
1150 to targets with a nonlocal_goto pattern; they are free
1151 to implement it in their own way. Note also that this is
1152 a no-op if the GP register is a global invariant.) */
1153 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1154 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1155 emit_use (pic_offset_table_rtx
);
1157 emit_indirect_jump (r_label
);
1160 /* Search backwards to the jump insn and mark it as a
1162 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1166 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1169 else if (CALL_P (insn
))
1176 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1177 (not all will be used on all machines) that was passed to __builtin_setjmp.
1178 It updates the stack pointer in that block to the current value. This is
1179 also called directly by the SJLJ exception handling code. */
1182 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1184 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1186 = gen_rtx_MEM (sa_mode
,
1189 plus_constant (Pmode
, buf_addr
,
1190 2 * GET_MODE_SIZE (Pmode
))));
1192 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1195 /* Expand a call to __builtin_prefetch. For a target that does not support
1196 data prefetch, evaluate the memory address argument in case it has side
1200 expand_builtin_prefetch (tree exp
)
1202 tree arg0
, arg1
, arg2
;
1206 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1209 arg0
= CALL_EXPR_ARG (exp
, 0);
1211 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1212 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1214 nargs
= call_expr_nargs (exp
);
1216 arg1
= CALL_EXPR_ARG (exp
, 1);
1218 arg1
= integer_zero_node
;
1220 arg2
= CALL_EXPR_ARG (exp
, 2);
1222 arg2
= integer_three_node
;
1224 /* Argument 0 is an address. */
1225 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1227 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1228 if (TREE_CODE (arg1
) != INTEGER_CST
)
1230 error ("second argument to %<__builtin_prefetch%> must be a constant");
1231 arg1
= integer_zero_node
;
1233 op1
= expand_normal (arg1
);
1234 /* Argument 1 must be either zero or one. */
1235 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1237 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1242 /* Argument 2 (locality) must be a compile-time constant int. */
1243 if (TREE_CODE (arg2
) != INTEGER_CST
)
1245 error ("third argument to %<__builtin_prefetch%> must be a constant");
1246 arg2
= integer_zero_node
;
1248 op2
= expand_normal (arg2
);
1249 /* Argument 2 must be 0, 1, 2, or 3. */
1250 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1252 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1256 if (targetm
.have_prefetch ())
1258 struct expand_operand ops
[3];
1260 create_address_operand (&ops
[0], op0
);
1261 create_integer_operand (&ops
[1], INTVAL (op1
));
1262 create_integer_operand (&ops
[2], INTVAL (op2
));
1263 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1267 /* Don't do anything with direct references to volatile memory, but
1268 generate code to handle other side effects. */
1269 if (!MEM_P (op0
) && side_effects_p (op0
))
1273 /* Get a MEM rtx for expression EXP which is the address of an operand
1274 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1275 the maximum length of the block of memory that might be accessed or
1279 get_memory_rtx (tree exp
, tree len
)
1281 tree orig_exp
= exp
;
1284 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1285 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1286 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1287 exp
= TREE_OPERAND (exp
, 0);
1289 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1290 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1292 /* Get an expression we can use to find the attributes to assign to MEM.
1293 First remove any nops. */
1294 while (CONVERT_EXPR_P (exp
)
1295 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1296 exp
= TREE_OPERAND (exp
, 0);
1298 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1299 (as builtin stringops may alias with anything). */
1300 exp
= fold_build2 (MEM_REF
,
1301 build_array_type (char_type_node
,
1302 build_range_type (sizetype
,
1303 size_one_node
, len
)),
1304 exp
, build_int_cst (ptr_type_node
, 0));
1306 /* If the MEM_REF has no acceptable address, try to get the base object
1307 from the original address we got, and build an all-aliasing
1308 unknown-sized access to that one. */
1309 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1310 set_mem_attributes (mem
, exp
, 0);
1311 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1312 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1315 exp
= build_fold_addr_expr (exp
);
1316 exp
= fold_build2 (MEM_REF
,
1317 build_array_type (char_type_node
,
1318 build_range_type (sizetype
,
1321 exp
, build_int_cst (ptr_type_node
, 0));
1322 set_mem_attributes (mem
, exp
, 0);
1324 set_mem_alias_set (mem
, 0);
1328 /* Built-in functions to perform an untyped call and return. */
1330 #define apply_args_mode \
1331 (this_target_builtins->x_apply_args_mode)
1332 #define apply_result_mode \
1333 (this_target_builtins->x_apply_result_mode)
1335 /* Return the size required for the block returned by __builtin_apply_args,
1336 and initialize apply_args_mode. */
1339 apply_args_size (void)
1341 static int size
= -1;
1346 /* The values computed by this function never change. */
1349 /* The first value is the incoming arg-pointer. */
1350 size
= GET_MODE_SIZE (Pmode
);
1352 /* The second value is the structure value address unless this is
1353 passed as an "invisible" first argument. */
1354 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1355 size
+= GET_MODE_SIZE (Pmode
);
1357 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1358 if (FUNCTION_ARG_REGNO_P (regno
))
1360 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1362 gcc_assert (mode
!= VOIDmode
);
1364 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1365 if (size
% align
!= 0)
1366 size
= CEIL (size
, align
) * align
;
1367 size
+= GET_MODE_SIZE (mode
);
1368 apply_args_mode
[regno
] = mode
;
1372 apply_args_mode
[regno
] = VOIDmode
;
1378 /* Return the size required for the block returned by __builtin_apply,
1379 and initialize apply_result_mode. */
1382 apply_result_size (void)
1384 static int size
= -1;
1388 /* The values computed by this function never change. */
1393 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1394 if (targetm
.calls
.function_value_regno_p (regno
))
1396 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1398 gcc_assert (mode
!= VOIDmode
);
1400 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1401 if (size
% align
!= 0)
1402 size
= CEIL (size
, align
) * align
;
1403 size
+= GET_MODE_SIZE (mode
);
1404 apply_result_mode
[regno
] = mode
;
1407 apply_result_mode
[regno
] = VOIDmode
;
1409 /* Allow targets that use untyped_call and untyped_return to override
1410 the size so that machine-specific information can be stored here. */
1411 #ifdef APPLY_RESULT_SIZE
1412 size
= APPLY_RESULT_SIZE
;
1418 /* Create a vector describing the result block RESULT. If SAVEP is true,
1419 the result block is used to save the values; otherwise it is used to
1420 restore the values. */
1423 result_vector (int savep
, rtx result
)
1425 int regno
, size
, align
, nelts
;
1428 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1431 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1432 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1434 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1435 if (size
% align
!= 0)
1436 size
= CEIL (size
, align
) * align
;
1437 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1438 mem
= adjust_address (result
, mode
, size
);
1439 savevec
[nelts
++] = (savep
1440 ? gen_rtx_SET (mem
, reg
)
1441 : gen_rtx_SET (reg
, mem
));
1442 size
+= GET_MODE_SIZE (mode
);
1444 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1447 /* Save the state required to perform an untyped call with the same
1448 arguments as were passed to the current function. */
1451 expand_builtin_apply_args_1 (void)
1454 int size
, align
, regno
;
1456 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1458 /* Create a block where the arg-pointer, structure value address,
1459 and argument registers can be saved. */
1460 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1462 /* Walk past the arg-pointer and structure value address. */
1463 size
= GET_MODE_SIZE (Pmode
);
1464 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1465 size
+= GET_MODE_SIZE (Pmode
);
1467 /* Save each register used in calling a function to the block. */
1468 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1469 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1471 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1472 if (size
% align
!= 0)
1473 size
= CEIL (size
, align
) * align
;
1475 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1477 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1478 size
+= GET_MODE_SIZE (mode
);
1481 /* Save the arg pointer to the block. */
1482 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1483 /* We need the pointer as the caller actually passed them to us, not
1484 as we might have pretended they were passed. Make sure it's a valid
1485 operand, as emit_move_insn isn't expected to handle a PLUS. */
1486 if (STACK_GROWS_DOWNWARD
)
1488 = force_operand (plus_constant (Pmode
, tem
,
1489 crtl
->args
.pretend_args_size
),
1491 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1493 size
= GET_MODE_SIZE (Pmode
);
1495 /* Save the structure value address unless this is passed as an
1496 "invisible" first argument. */
1497 if (struct_incoming_value
)
1499 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1500 copy_to_reg (struct_incoming_value
));
1501 size
+= GET_MODE_SIZE (Pmode
);
1504 /* Return the address of the block. */
1505 return copy_addr_to_reg (XEXP (registers
, 0));
1508 /* __builtin_apply_args returns block of memory allocated on
1509 the stack into which is stored the arg pointer, structure
1510 value address, static chain, and all the registers that might
1511 possibly be used in performing a function call. The code is
1512 moved to the start of the function so the incoming values are
1516 expand_builtin_apply_args (void)
1518 /* Don't do __builtin_apply_args more than once in a function.
1519 Save the result of the first call and reuse it. */
1520 if (apply_args_value
!= 0)
1521 return apply_args_value
;
1523 /* When this function is called, it means that registers must be
1524 saved on entry to this function. So we migrate the
1525 call to the first insn of this function. */
1529 temp
= expand_builtin_apply_args_1 ();
1530 rtx_insn
*seq
= get_insns ();
1533 apply_args_value
= temp
;
1535 /* Put the insns after the NOTE that starts the function.
1536 If this is inside a start_sequence, make the outer-level insn
1537 chain current, so the code is placed at the start of the
1538 function. If internal_arg_pointer is a non-virtual pseudo,
1539 it needs to be placed after the function that initializes
1541 push_topmost_sequence ();
1542 if (REG_P (crtl
->args
.internal_arg_pointer
)
1543 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1544 emit_insn_before (seq
, parm_birth_insn
);
1546 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1547 pop_topmost_sequence ();
1552 /* Perform an untyped call and save the state required to perform an
1553 untyped return of whatever value was returned by the given function. */
1556 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1558 int size
, align
, regno
;
1560 rtx incoming_args
, result
, reg
, dest
, src
;
1561 rtx_call_insn
*call_insn
;
1562 rtx old_stack_level
= 0;
1563 rtx call_fusage
= 0;
1564 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1566 arguments
= convert_memory_address (Pmode
, arguments
);
1568 /* Create a block where the return registers can be saved. */
1569 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1571 /* Fetch the arg pointer from the ARGUMENTS block. */
1572 incoming_args
= gen_reg_rtx (Pmode
);
1573 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1574 if (!STACK_GROWS_DOWNWARD
)
1575 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1576 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1578 /* Push a new argument block and copy the arguments. Do not allow
1579 the (potential) memcpy call below to interfere with our stack
1581 do_pending_stack_adjust ();
1584 /* Save the stack with nonlocal if available. */
1585 if (targetm
.have_save_stack_nonlocal ())
1586 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1588 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1590 /* Allocate a block of memory onto the stack and copy the memory
1591 arguments to the outgoing arguments address. We can pass TRUE
1592 as the 4th argument because we just saved the stack pointer
1593 and will restore it right after the call. */
1594 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1596 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1597 may have already set current_function_calls_alloca to true.
1598 current_function_calls_alloca won't be set if argsize is zero,
1599 so we have to guarantee need_drap is true here. */
1600 if (SUPPORTS_STACK_ALIGNMENT
)
1601 crtl
->need_drap
= true;
1603 dest
= virtual_outgoing_args_rtx
;
1604 if (!STACK_GROWS_DOWNWARD
)
1606 if (CONST_INT_P (argsize
))
1607 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1609 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1611 dest
= gen_rtx_MEM (BLKmode
, dest
);
1612 set_mem_align (dest
, PARM_BOUNDARY
);
1613 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1614 set_mem_align (src
, PARM_BOUNDARY
);
1615 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1617 /* Refer to the argument block. */
1619 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1620 set_mem_align (arguments
, PARM_BOUNDARY
);
1622 /* Walk past the arg-pointer and structure value address. */
1623 size
= GET_MODE_SIZE (Pmode
);
1625 size
+= GET_MODE_SIZE (Pmode
);
1627 /* Restore each of the registers previously saved. Make USE insns
1628 for each of these registers for use in making the call. */
1629 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1630 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1632 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1633 if (size
% align
!= 0)
1634 size
= CEIL (size
, align
) * align
;
1635 reg
= gen_rtx_REG (mode
, regno
);
1636 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1637 use_reg (&call_fusage
, reg
);
1638 size
+= GET_MODE_SIZE (mode
);
1641 /* Restore the structure value address unless this is passed as an
1642 "invisible" first argument. */
1643 size
= GET_MODE_SIZE (Pmode
);
1646 rtx value
= gen_reg_rtx (Pmode
);
1647 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1648 emit_move_insn (struct_value
, value
);
1649 if (REG_P (struct_value
))
1650 use_reg (&call_fusage
, struct_value
);
1651 size
+= GET_MODE_SIZE (Pmode
);
1654 /* All arguments and registers used for the call are set up by now! */
1655 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1657 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1658 and we don't want to load it into a register as an optimization,
1659 because prepare_call_address already did it if it should be done. */
1660 if (GET_CODE (function
) != SYMBOL_REF
)
1661 function
= memory_address (FUNCTION_MODE
, function
);
1663 /* Generate the actual call instruction and save the return value. */
1664 if (targetm
.have_untyped_call ())
1666 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1667 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1668 result_vector (1, result
)));
1670 else if (targetm
.have_call_value ())
1674 /* Locate the unique return register. It is not possible to
1675 express a call that sets more than one return register using
1676 call_value; use untyped_call for that. In fact, untyped_call
1677 only needs to save the return registers in the given block. */
1678 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1679 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1681 gcc_assert (!valreg
); /* have_untyped_call required. */
1683 valreg
= gen_rtx_REG (mode
, regno
);
1686 emit_insn (targetm
.gen_call_value (valreg
,
1687 gen_rtx_MEM (FUNCTION_MODE
, function
),
1688 const0_rtx
, NULL_RTX
, const0_rtx
));
1690 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1695 /* Find the CALL insn we just emitted, and attach the register usage
1697 call_insn
= last_call_insn ();
1698 add_function_usage_to (call_insn
, call_fusage
);
1700 /* Restore the stack. */
1701 if (targetm
.have_save_stack_nonlocal ())
1702 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1704 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1705 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1709 /* Return the address of the result block. */
1710 result
= copy_addr_to_reg (XEXP (result
, 0));
1711 return convert_memory_address (ptr_mode
, result
);
1714 /* Perform an untyped return. */
1717 expand_builtin_return (rtx result
)
1719 int size
, align
, regno
;
1722 rtx_insn
*call_fusage
= 0;
1724 result
= convert_memory_address (Pmode
, result
);
1726 apply_result_size ();
1727 result
= gen_rtx_MEM (BLKmode
, result
);
1729 if (targetm
.have_untyped_return ())
1731 rtx vector
= result_vector (0, result
);
1732 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1737 /* Restore the return value and note that each value is used. */
1739 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1740 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1742 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1743 if (size
% align
!= 0)
1744 size
= CEIL (size
, align
) * align
;
1745 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1746 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1748 push_to_sequence (call_fusage
);
1750 call_fusage
= get_insns ();
1752 size
+= GET_MODE_SIZE (mode
);
1755 /* Put the USE insns before the return. */
1756 emit_insn (call_fusage
);
1758 /* Return whatever values was restored by jumping directly to the end
1760 expand_naked_return ();
1763 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1765 static enum type_class
1766 type_to_class (tree type
)
1768 switch (TREE_CODE (type
))
1770 case VOID_TYPE
: return void_type_class
;
1771 case INTEGER_TYPE
: return integer_type_class
;
1772 case ENUMERAL_TYPE
: return enumeral_type_class
;
1773 case BOOLEAN_TYPE
: return boolean_type_class
;
1774 case POINTER_TYPE
: return pointer_type_class
;
1775 case REFERENCE_TYPE
: return reference_type_class
;
1776 case OFFSET_TYPE
: return offset_type_class
;
1777 case REAL_TYPE
: return real_type_class
;
1778 case COMPLEX_TYPE
: return complex_type_class
;
1779 case FUNCTION_TYPE
: return function_type_class
;
1780 case METHOD_TYPE
: return method_type_class
;
1781 case RECORD_TYPE
: return record_type_class
;
1783 case QUAL_UNION_TYPE
: return union_type_class
;
1784 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1785 ? string_type_class
: array_type_class
);
1786 case LANG_TYPE
: return lang_type_class
;
1787 default: return no_type_class
;
1791 /* Expand a call EXP to __builtin_classify_type. */
1794 expand_builtin_classify_type (tree exp
)
1796 if (call_expr_nargs (exp
))
1797 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1798 return GEN_INT (no_type_class
);
1801 /* This helper macro, meant to be used in mathfn_built_in below,
1802 determines which among a set of three builtin math functions is
1803 appropriate for a given type mode. The `F' and `L' cases are
1804 automatically generated from the `double' case. */
1805 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1806 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1807 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1808 fcodel = BUILT_IN_MATHFN##L ; break;
1809 /* Similar to above, but appends _R after any F/L suffix. */
1810 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1811 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1812 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1813 fcodel = BUILT_IN_MATHFN##L_R ; break;
1815 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1816 if available. If IMPLICIT is true use the implicit builtin declaration,
1817 otherwise use the explicit declaration. If we can't do the conversion,
1821 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1823 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1827 CASE_MATHFN (BUILT_IN_ACOS
)
1828 CASE_MATHFN (BUILT_IN_ACOSH
)
1829 CASE_MATHFN (BUILT_IN_ASIN
)
1830 CASE_MATHFN (BUILT_IN_ASINH
)
1831 CASE_MATHFN (BUILT_IN_ATAN
)
1832 CASE_MATHFN (BUILT_IN_ATAN2
)
1833 CASE_MATHFN (BUILT_IN_ATANH
)
1834 CASE_MATHFN (BUILT_IN_CBRT
)
1835 CASE_MATHFN (BUILT_IN_CEIL
)
1836 CASE_MATHFN (BUILT_IN_CEXPI
)
1837 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1838 CASE_MATHFN (BUILT_IN_COS
)
1839 CASE_MATHFN (BUILT_IN_COSH
)
1840 CASE_MATHFN (BUILT_IN_DREM
)
1841 CASE_MATHFN (BUILT_IN_ERF
)
1842 CASE_MATHFN (BUILT_IN_ERFC
)
1843 CASE_MATHFN (BUILT_IN_EXP
)
1844 CASE_MATHFN (BUILT_IN_EXP10
)
1845 CASE_MATHFN (BUILT_IN_EXP2
)
1846 CASE_MATHFN (BUILT_IN_EXPM1
)
1847 CASE_MATHFN (BUILT_IN_FABS
)
1848 CASE_MATHFN (BUILT_IN_FDIM
)
1849 CASE_MATHFN (BUILT_IN_FLOOR
)
1850 CASE_MATHFN (BUILT_IN_FMA
)
1851 CASE_MATHFN (BUILT_IN_FMAX
)
1852 CASE_MATHFN (BUILT_IN_FMIN
)
1853 CASE_MATHFN (BUILT_IN_FMOD
)
1854 CASE_MATHFN (BUILT_IN_FREXP
)
1855 CASE_MATHFN (BUILT_IN_GAMMA
)
1856 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1857 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1858 CASE_MATHFN (BUILT_IN_HYPOT
)
1859 CASE_MATHFN (BUILT_IN_ILOGB
)
1860 CASE_MATHFN (BUILT_IN_ICEIL
)
1861 CASE_MATHFN (BUILT_IN_IFLOOR
)
1862 CASE_MATHFN (BUILT_IN_INF
)
1863 CASE_MATHFN (BUILT_IN_IRINT
)
1864 CASE_MATHFN (BUILT_IN_IROUND
)
1865 CASE_MATHFN (BUILT_IN_ISINF
)
1866 CASE_MATHFN (BUILT_IN_J0
)
1867 CASE_MATHFN (BUILT_IN_J1
)
1868 CASE_MATHFN (BUILT_IN_JN
)
1869 CASE_MATHFN (BUILT_IN_LCEIL
)
1870 CASE_MATHFN (BUILT_IN_LDEXP
)
1871 CASE_MATHFN (BUILT_IN_LFLOOR
)
1872 CASE_MATHFN (BUILT_IN_LGAMMA
)
1873 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1874 CASE_MATHFN (BUILT_IN_LLCEIL
)
1875 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1876 CASE_MATHFN (BUILT_IN_LLRINT
)
1877 CASE_MATHFN (BUILT_IN_LLROUND
)
1878 CASE_MATHFN (BUILT_IN_LOG
)
1879 CASE_MATHFN (BUILT_IN_LOG10
)
1880 CASE_MATHFN (BUILT_IN_LOG1P
)
1881 CASE_MATHFN (BUILT_IN_LOG2
)
1882 CASE_MATHFN (BUILT_IN_LOGB
)
1883 CASE_MATHFN (BUILT_IN_LRINT
)
1884 CASE_MATHFN (BUILT_IN_LROUND
)
1885 CASE_MATHFN (BUILT_IN_MODF
)
1886 CASE_MATHFN (BUILT_IN_NAN
)
1887 CASE_MATHFN (BUILT_IN_NANS
)
1888 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1889 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1890 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1891 CASE_MATHFN (BUILT_IN_POW
)
1892 CASE_MATHFN (BUILT_IN_POWI
)
1893 CASE_MATHFN (BUILT_IN_POW10
)
1894 CASE_MATHFN (BUILT_IN_REMAINDER
)
1895 CASE_MATHFN (BUILT_IN_REMQUO
)
1896 CASE_MATHFN (BUILT_IN_RINT
)
1897 CASE_MATHFN (BUILT_IN_ROUND
)
1898 CASE_MATHFN (BUILT_IN_SCALB
)
1899 CASE_MATHFN (BUILT_IN_SCALBLN
)
1900 CASE_MATHFN (BUILT_IN_SCALBN
)
1901 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1902 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1903 CASE_MATHFN (BUILT_IN_SIN
)
1904 CASE_MATHFN (BUILT_IN_SINCOS
)
1905 CASE_MATHFN (BUILT_IN_SINH
)
1906 CASE_MATHFN (BUILT_IN_SQRT
)
1907 CASE_MATHFN (BUILT_IN_TAN
)
1908 CASE_MATHFN (BUILT_IN_TANH
)
1909 CASE_MATHFN (BUILT_IN_TGAMMA
)
1910 CASE_MATHFN (BUILT_IN_TRUNC
)
1911 CASE_MATHFN (BUILT_IN_Y0
)
1912 CASE_MATHFN (BUILT_IN_Y1
)
1913 CASE_MATHFN (BUILT_IN_YN
)
1919 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1921 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1923 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1928 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1931 return builtin_decl_explicit (fcode2
);
1934 /* Like mathfn_built_in_1(), but always use the implicit array. */
1937 mathfn_built_in (tree type
, enum built_in_function fn
)
1939 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1942 /* If errno must be maintained, expand the RTL to check if the result,
1943 TARGET, of a built-in function call, EXP, is NaN, and if so set
1947 expand_errno_check (tree exp
, rtx target
)
1949 rtx_code_label
*lab
= gen_label_rtx ();
1951 /* Test the result; if it is NaN, set errno=EDOM because
1952 the argument was not in the domain. */
1953 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1954 NULL_RTX
, NULL
, lab
,
1955 /* The jump is very likely. */
1956 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
1959 /* If this built-in doesn't throw an exception, set errno directly. */
1960 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1962 #ifdef GEN_ERRNO_RTX
1963 rtx errno_rtx
= GEN_ERRNO_RTX
;
1966 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1968 emit_move_insn (errno_rtx
,
1969 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
1975 /* Make sure the library call isn't expanded as a tail call. */
1976 CALL_EXPR_TAILCALL (exp
) = 0;
1978 /* We can't set errno=EDOM directly; let the library call do it.
1979 Pop the arguments right away in case the call gets deleted. */
1981 expand_call (exp
, target
, 0);
1986 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1987 Return NULL_RTX if a normal call should be emitted rather than expanding
1988 the function in-line. EXP is the expression that is a call to the builtin
1989 function; if convenient, the result should be placed in TARGET.
1990 SUBTARGET may be used as the target for computing one of EXP's operands. */
1993 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1995 optab builtin_optab
;
1998 tree fndecl
= get_callee_fndecl (exp
);
2000 bool errno_set
= false;
2001 bool try_widening
= false;
2004 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2007 arg
= CALL_EXPR_ARG (exp
, 0);
2009 switch (DECL_FUNCTION_CODE (fndecl
))
2011 CASE_FLT_FN (BUILT_IN_SQRT
):
2012 errno_set
= ! tree_expr_nonnegative_p (arg
);
2013 try_widening
= true;
2014 builtin_optab
= sqrt_optab
;
2016 CASE_FLT_FN (BUILT_IN_EXP
):
2017 errno_set
= true; builtin_optab
= exp_optab
; break;
2018 CASE_FLT_FN (BUILT_IN_EXP10
):
2019 CASE_FLT_FN (BUILT_IN_POW10
):
2020 errno_set
= true; builtin_optab
= exp10_optab
; break;
2021 CASE_FLT_FN (BUILT_IN_EXP2
):
2022 errno_set
= true; builtin_optab
= exp2_optab
; break;
2023 CASE_FLT_FN (BUILT_IN_EXPM1
):
2024 errno_set
= true; builtin_optab
= expm1_optab
; break;
2025 CASE_FLT_FN (BUILT_IN_LOGB
):
2026 errno_set
= true; builtin_optab
= logb_optab
; break;
2027 CASE_FLT_FN (BUILT_IN_LOG
):
2028 errno_set
= true; builtin_optab
= log_optab
; break;
2029 CASE_FLT_FN (BUILT_IN_LOG10
):
2030 errno_set
= true; builtin_optab
= log10_optab
; break;
2031 CASE_FLT_FN (BUILT_IN_LOG2
):
2032 errno_set
= true; builtin_optab
= log2_optab
; break;
2033 CASE_FLT_FN (BUILT_IN_LOG1P
):
2034 errno_set
= true; builtin_optab
= log1p_optab
; break;
2035 CASE_FLT_FN (BUILT_IN_ASIN
):
2036 builtin_optab
= asin_optab
; break;
2037 CASE_FLT_FN (BUILT_IN_ACOS
):
2038 builtin_optab
= acos_optab
; break;
2039 CASE_FLT_FN (BUILT_IN_TAN
):
2040 builtin_optab
= tan_optab
; break;
2041 CASE_FLT_FN (BUILT_IN_ATAN
):
2042 builtin_optab
= atan_optab
; break;
2043 CASE_FLT_FN (BUILT_IN_FLOOR
):
2044 builtin_optab
= floor_optab
; break;
2045 CASE_FLT_FN (BUILT_IN_CEIL
):
2046 builtin_optab
= ceil_optab
; break;
2047 CASE_FLT_FN (BUILT_IN_TRUNC
):
2048 builtin_optab
= btrunc_optab
; break;
2049 CASE_FLT_FN (BUILT_IN_ROUND
):
2050 builtin_optab
= round_optab
; break;
2051 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2052 builtin_optab
= nearbyint_optab
;
2053 if (flag_trapping_math
)
2055 /* Else fallthrough and expand as rint. */
2056 CASE_FLT_FN (BUILT_IN_RINT
):
2057 builtin_optab
= rint_optab
; break;
2058 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2059 builtin_optab
= significand_optab
; break;
2064 /* Make a suitable register to place result in. */
2065 mode
= TYPE_MODE (TREE_TYPE (exp
));
2067 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2070 /* Before working hard, check whether the instruction is available, but try
2071 to widen the mode for specific operations. */
2072 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2073 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2074 && (!errno_set
|| !optimize_insn_for_size_p ()))
2076 rtx result
= gen_reg_rtx (mode
);
2078 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2079 need to expand the argument again. This way, we will not perform
2080 side-effects more the once. */
2081 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2083 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2087 /* Compute into RESULT.
2088 Set RESULT to wherever the result comes back. */
2089 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2094 expand_errno_check (exp
, result
);
2096 /* Output the entire sequence. */
2097 insns
= get_insns ();
2103 /* If we were unable to expand via the builtin, stop the sequence
2104 (without outputting the insns) and call to the library function
2105 with the stabilized argument list. */
2109 return expand_call (exp
, target
, target
== const0_rtx
);
2112 /* Expand a call to the builtin binary math functions (pow and atan2).
2113 Return NULL_RTX if a normal call should be emitted rather than expanding the
2114 function in-line. EXP is the expression that is a call to the builtin
2115 function; if convenient, the result should be placed in TARGET.
2116 SUBTARGET may be used as the target for computing one of EXP's
2120 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2122 optab builtin_optab
;
2123 rtx op0
, op1
, result
;
2125 int op1_type
= REAL_TYPE
;
2126 tree fndecl
= get_callee_fndecl (exp
);
2129 bool errno_set
= true;
2131 switch (DECL_FUNCTION_CODE (fndecl
))
2133 CASE_FLT_FN (BUILT_IN_SCALBN
):
2134 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2135 CASE_FLT_FN (BUILT_IN_LDEXP
):
2136 op1_type
= INTEGER_TYPE
;
2141 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2144 arg0
= CALL_EXPR_ARG (exp
, 0);
2145 arg1
= CALL_EXPR_ARG (exp
, 1);
2147 switch (DECL_FUNCTION_CODE (fndecl
))
2149 CASE_FLT_FN (BUILT_IN_POW
):
2150 builtin_optab
= pow_optab
; break;
2151 CASE_FLT_FN (BUILT_IN_ATAN2
):
2152 builtin_optab
= atan2_optab
; break;
2153 CASE_FLT_FN (BUILT_IN_SCALB
):
2154 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2156 builtin_optab
= scalb_optab
; break;
2157 CASE_FLT_FN (BUILT_IN_SCALBN
):
2158 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2159 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2161 /* Fall through... */
2162 CASE_FLT_FN (BUILT_IN_LDEXP
):
2163 builtin_optab
= ldexp_optab
; break;
2164 CASE_FLT_FN (BUILT_IN_FMOD
):
2165 builtin_optab
= fmod_optab
; break;
2166 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2167 CASE_FLT_FN (BUILT_IN_DREM
):
2168 builtin_optab
= remainder_optab
; break;
2173 /* Make a suitable register to place result in. */
2174 mode
= TYPE_MODE (TREE_TYPE (exp
));
2176 /* Before working hard, check whether the instruction is available. */
2177 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2180 result
= gen_reg_rtx (mode
);
2182 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2185 if (errno_set
&& optimize_insn_for_size_p ())
2188 /* Always stabilize the argument list. */
2189 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2190 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2192 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2193 op1
= expand_normal (arg1
);
2197 /* Compute into RESULT.
2198 Set RESULT to wherever the result comes back. */
2199 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2200 result
, 0, OPTAB_DIRECT
);
2202 /* If we were unable to expand via the builtin, stop the sequence
2203 (without outputting the insns) and call to the library function
2204 with the stabilized argument list. */
2208 return expand_call (exp
, target
, target
== const0_rtx
);
2212 expand_errno_check (exp
, result
);
2214 /* Output the entire sequence. */
2215 insns
= get_insns ();
2222 /* Expand a call to the builtin trinary math functions (fma).
2223 Return NULL_RTX if a normal call should be emitted rather than expanding the
2224 function in-line. EXP is the expression that is a call to the builtin
2225 function; if convenient, the result should be placed in TARGET.
2226 SUBTARGET may be used as the target for computing one of EXP's
2230 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2232 optab builtin_optab
;
2233 rtx op0
, op1
, op2
, result
;
2235 tree fndecl
= get_callee_fndecl (exp
);
2236 tree arg0
, arg1
, arg2
;
2239 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2242 arg0
= CALL_EXPR_ARG (exp
, 0);
2243 arg1
= CALL_EXPR_ARG (exp
, 1);
2244 arg2
= CALL_EXPR_ARG (exp
, 2);
2246 switch (DECL_FUNCTION_CODE (fndecl
))
2248 CASE_FLT_FN (BUILT_IN_FMA
):
2249 builtin_optab
= fma_optab
; break;
2254 /* Make a suitable register to place result in. */
2255 mode
= TYPE_MODE (TREE_TYPE (exp
));
2257 /* Before working hard, check whether the instruction is available. */
2258 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2261 result
= gen_reg_rtx (mode
);
2263 /* Always stabilize the argument list. */
2264 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2265 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2266 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2268 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2269 op1
= expand_normal (arg1
);
2270 op2
= expand_normal (arg2
);
2274 /* Compute into RESULT.
2275 Set RESULT to wherever the result comes back. */
2276 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2279 /* If we were unable to expand via the builtin, stop the sequence
2280 (without outputting the insns) and call to the library function
2281 with the stabilized argument list. */
2285 return expand_call (exp
, target
, target
== const0_rtx
);
2288 /* Output the entire sequence. */
2289 insns
= get_insns ();
2296 /* Expand a call to the builtin sin and cos math functions.
2297 Return NULL_RTX if a normal call should be emitted rather than expanding the
2298 function in-line. EXP is the expression that is a call to the builtin
2299 function; if convenient, the result should be placed in TARGET.
2300 SUBTARGET may be used as the target for computing one of EXP's
2304 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2306 optab builtin_optab
;
2309 tree fndecl
= get_callee_fndecl (exp
);
2313 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2316 arg
= CALL_EXPR_ARG (exp
, 0);
2318 switch (DECL_FUNCTION_CODE (fndecl
))
2320 CASE_FLT_FN (BUILT_IN_SIN
):
2321 CASE_FLT_FN (BUILT_IN_COS
):
2322 builtin_optab
= sincos_optab
; break;
2327 /* Make a suitable register to place result in. */
2328 mode
= TYPE_MODE (TREE_TYPE (exp
));
2330 /* Check if sincos insn is available, otherwise fallback
2331 to sin or cos insn. */
2332 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2333 switch (DECL_FUNCTION_CODE (fndecl
))
2335 CASE_FLT_FN (BUILT_IN_SIN
):
2336 builtin_optab
= sin_optab
; break;
2337 CASE_FLT_FN (BUILT_IN_COS
):
2338 builtin_optab
= cos_optab
; break;
2343 /* Before working hard, check whether the instruction is available. */
2344 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2346 rtx result
= gen_reg_rtx (mode
);
2348 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2349 need to expand the argument again. This way, we will not perform
2350 side-effects more the once. */
2351 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2353 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2357 /* Compute into RESULT.
2358 Set RESULT to wherever the result comes back. */
2359 if (builtin_optab
== sincos_optab
)
2363 switch (DECL_FUNCTION_CODE (fndecl
))
2365 CASE_FLT_FN (BUILT_IN_SIN
):
2366 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2368 CASE_FLT_FN (BUILT_IN_COS
):
2369 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2377 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2381 /* Output the entire sequence. */
2382 insns
= get_insns ();
2388 /* If we were unable to expand via the builtin, stop the sequence
2389 (without outputting the insns) and call to the library function
2390 with the stabilized argument list. */
2394 return expand_call (exp
, target
, target
== const0_rtx
);
2397 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2398 return an RTL instruction code that implements the functionality.
2399 If that isn't possible or available return CODE_FOR_nothing. */
2401 static enum insn_code
2402 interclass_mathfn_icode (tree arg
, tree fndecl
)
2404 bool errno_set
= false;
2405 optab builtin_optab
= unknown_optab
;
2408 switch (DECL_FUNCTION_CODE (fndecl
))
2410 CASE_FLT_FN (BUILT_IN_ILOGB
):
2411 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2412 CASE_FLT_FN (BUILT_IN_ISINF
):
2413 builtin_optab
= isinf_optab
; break;
2414 case BUILT_IN_ISNORMAL
:
2415 case BUILT_IN_ISFINITE
:
2416 CASE_FLT_FN (BUILT_IN_FINITE
):
2417 case BUILT_IN_FINITED32
:
2418 case BUILT_IN_FINITED64
:
2419 case BUILT_IN_FINITED128
:
2420 case BUILT_IN_ISINFD32
:
2421 case BUILT_IN_ISINFD64
:
2422 case BUILT_IN_ISINFD128
:
2423 /* These builtins have no optabs (yet). */
2429 /* There's no easy way to detect the case we need to set EDOM. */
2430 if (flag_errno_math
&& errno_set
)
2431 return CODE_FOR_nothing
;
2433 /* Optab mode depends on the mode of the input argument. */
2434 mode
= TYPE_MODE (TREE_TYPE (arg
));
2437 return optab_handler (builtin_optab
, mode
);
2438 return CODE_FOR_nothing
;
2441 /* Expand a call to one of the builtin math functions that operate on
2442 floating point argument and output an integer result (ilogb, isinf,
2444 Return 0 if a normal call should be emitted rather than expanding the
2445 function in-line. EXP is the expression that is a call to the builtin
2446 function; if convenient, the result should be placed in TARGET. */
2449 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2451 enum insn_code icode
= CODE_FOR_nothing
;
2453 tree fndecl
= get_callee_fndecl (exp
);
2457 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2460 arg
= CALL_EXPR_ARG (exp
, 0);
2461 icode
= interclass_mathfn_icode (arg
, fndecl
);
2462 mode
= TYPE_MODE (TREE_TYPE (arg
));
2464 if (icode
!= CODE_FOR_nothing
)
2466 struct expand_operand ops
[1];
2467 rtx_insn
*last
= get_last_insn ();
2468 tree orig_arg
= arg
;
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2475 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2477 if (mode
!= GET_MODE (op0
))
2478 op0
= convert_to_mode (mode
, op0
, 0);
2480 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2481 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2482 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2483 return ops
[0].value
;
2485 delete_insns_since (last
);
2486 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2492 /* Expand a call to the builtin sincos math function.
2493 Return NULL_RTX if a normal call should be emitted rather than expanding the
2494 function in-line. EXP is the expression that is a call to the builtin
2498 expand_builtin_sincos (tree exp
)
2500 rtx op0
, op1
, op2
, target1
, target2
;
2502 tree arg
, sinp
, cosp
;
2504 location_t loc
= EXPR_LOCATION (exp
);
2505 tree alias_type
, alias_off
;
2507 if (!validate_arglist (exp
, REAL_TYPE
,
2508 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2511 arg
= CALL_EXPR_ARG (exp
, 0);
2512 sinp
= CALL_EXPR_ARG (exp
, 1);
2513 cosp
= CALL_EXPR_ARG (exp
, 2);
2515 /* Make a suitable register to place result in. */
2516 mode
= TYPE_MODE (TREE_TYPE (arg
));
2518 /* Check if sincos insn is available, otherwise emit the call. */
2519 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2522 target1
= gen_reg_rtx (mode
);
2523 target2
= gen_reg_rtx (mode
);
2525 op0
= expand_normal (arg
);
2526 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2527 alias_off
= build_int_cst (alias_type
, 0);
2528 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2530 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2533 /* Compute into target1 and target2.
2534 Set TARGET to wherever the result comes back. */
2535 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2536 gcc_assert (result
);
2538 /* Move target1 and target2 to the memory locations indicated
2540 emit_move_insn (op1
, target1
);
2541 emit_move_insn (op2
, target2
);
2546 /* Expand a call to the internal cexpi builtin to the sincos math function.
2547 EXP is the expression that is a call to the builtin function; if convenient,
2548 the result should be placed in TARGET. */
2551 expand_builtin_cexpi (tree exp
, rtx target
)
2553 tree fndecl
= get_callee_fndecl (exp
);
2557 location_t loc
= EXPR_LOCATION (exp
);
2559 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2562 arg
= CALL_EXPR_ARG (exp
, 0);
2563 type
= TREE_TYPE (arg
);
2564 mode
= TYPE_MODE (TREE_TYPE (arg
));
2566 /* Try expanding via a sincos optab, fall back to emitting a libcall
2567 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2568 is only generated from sincos, cexp or if we have either of them. */
2569 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2571 op1
= gen_reg_rtx (mode
);
2572 op2
= gen_reg_rtx (mode
);
2574 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2576 /* Compute into op1 and op2. */
2577 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2579 else if (targetm
.libc_has_function (function_sincos
))
2581 tree call
, fn
= NULL_TREE
;
2585 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2586 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2587 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2588 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2589 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2590 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2594 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2595 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2596 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2597 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2598 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2599 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2601 /* Make sure not to fold the sincos call again. */
2602 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2603 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2604 call
, 3, arg
, top1
, top2
));
2608 tree call
, fn
= NULL_TREE
, narg
;
2609 tree ctype
= build_complex_type (type
);
2611 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2612 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2613 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2614 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2615 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2616 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2620 /* If we don't have a decl for cexp create one. This is the
2621 friendliest fallback if the user calls __builtin_cexpi
2622 without full target C99 function support. */
2623 if (fn
== NULL_TREE
)
2626 const char *name
= NULL
;
2628 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2630 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2632 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2635 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2636 fn
= build_fn_decl (name
, fntype
);
2639 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2640 build_real (type
, dconst0
), arg
);
2642 /* Make sure not to fold the cexp call again. */
2643 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2644 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2645 target
, VOIDmode
, EXPAND_NORMAL
);
2648 /* Now build the proper return type. */
2649 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2650 make_tree (TREE_TYPE (arg
), op2
),
2651 make_tree (TREE_TYPE (arg
), op1
)),
2652 target
, VOIDmode
, EXPAND_NORMAL
);
2655 /* Conveniently construct a function call expression. FNDECL names the
2656 function to be called, N is the number of arguments, and the "..."
2657 parameters are the argument expressions. Unlike build_call_exr
2658 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2661 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2664 tree fntype
= TREE_TYPE (fndecl
);
2665 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2668 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2670 SET_EXPR_LOCATION (fn
, loc
);
2674 /* Expand a call to one of the builtin rounding functions gcc defines
2675 as an extension (lfloor and lceil). As these are gcc extensions we
2676 do not need to worry about setting errno to EDOM.
2677 If expanding via optab fails, lower expression to (int)(floor(x)).
2678 EXP is the expression that is a call to the builtin function;
2679 if convenient, the result should be placed in TARGET. */
2682 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2684 convert_optab builtin_optab
;
2687 tree fndecl
= get_callee_fndecl (exp
);
2688 enum built_in_function fallback_fn
;
2689 tree fallback_fndecl
;
2693 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2696 arg
= CALL_EXPR_ARG (exp
, 0);
2698 switch (DECL_FUNCTION_CODE (fndecl
))
2700 CASE_FLT_FN (BUILT_IN_ICEIL
):
2701 CASE_FLT_FN (BUILT_IN_LCEIL
):
2702 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2703 builtin_optab
= lceil_optab
;
2704 fallback_fn
= BUILT_IN_CEIL
;
2707 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2708 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2709 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2710 builtin_optab
= lfloor_optab
;
2711 fallback_fn
= BUILT_IN_FLOOR
;
2718 /* Make a suitable register to place result in. */
2719 mode
= TYPE_MODE (TREE_TYPE (exp
));
2721 target
= gen_reg_rtx (mode
);
2723 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2724 need to expand the argument again. This way, we will not perform
2725 side-effects more the once. */
2726 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2728 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2732 /* Compute into TARGET. */
2733 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2735 /* Output the entire sequence. */
2736 insns
= get_insns ();
2742 /* If we were unable to expand via the builtin, stop the sequence
2743 (without outputting the insns). */
2746 /* Fall back to floating point rounding optab. */
2747 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2749 /* For non-C99 targets we may end up without a fallback fndecl here
2750 if the user called __builtin_lfloor directly. In this case emit
2751 a call to the floor/ceil variants nevertheless. This should result
2752 in the best user experience for not full C99 targets. */
2753 if (fallback_fndecl
== NULL_TREE
)
2756 const char *name
= NULL
;
2758 switch (DECL_FUNCTION_CODE (fndecl
))
2760 case BUILT_IN_ICEIL
:
2761 case BUILT_IN_LCEIL
:
2762 case BUILT_IN_LLCEIL
:
2765 case BUILT_IN_ICEILF
:
2766 case BUILT_IN_LCEILF
:
2767 case BUILT_IN_LLCEILF
:
2770 case BUILT_IN_ICEILL
:
2771 case BUILT_IN_LCEILL
:
2772 case BUILT_IN_LLCEILL
:
2775 case BUILT_IN_IFLOOR
:
2776 case BUILT_IN_LFLOOR
:
2777 case BUILT_IN_LLFLOOR
:
2780 case BUILT_IN_IFLOORF
:
2781 case BUILT_IN_LFLOORF
:
2782 case BUILT_IN_LLFLOORF
:
2785 case BUILT_IN_IFLOORL
:
2786 case BUILT_IN_LFLOORL
:
2787 case BUILT_IN_LLFLOORL
:
2794 fntype
= build_function_type_list (TREE_TYPE (arg
),
2795 TREE_TYPE (arg
), NULL_TREE
);
2796 fallback_fndecl
= build_fn_decl (name
, fntype
);
2799 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2801 tmp
= expand_normal (exp
);
2802 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2804 /* Truncate the result of floating point optab to integer
2805 via expand_fix (). */
2806 target
= gen_reg_rtx (mode
);
2807 expand_fix (target
, tmp
, 0);
2812 /* Expand a call to one of the builtin math functions doing integer
2814 Return 0 if a normal call should be emitted rather than expanding the
2815 function in-line. EXP is the expression that is a call to the builtin
2816 function; if convenient, the result should be placed in TARGET. */
2819 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2821 convert_optab builtin_optab
;
2824 tree fndecl
= get_callee_fndecl (exp
);
2827 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2829 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2832 arg
= CALL_EXPR_ARG (exp
, 0);
2834 switch (DECL_FUNCTION_CODE (fndecl
))
2836 CASE_FLT_FN (BUILT_IN_IRINT
):
2837 fallback_fn
= BUILT_IN_LRINT
;
2839 CASE_FLT_FN (BUILT_IN_LRINT
):
2840 CASE_FLT_FN (BUILT_IN_LLRINT
):
2841 builtin_optab
= lrint_optab
;
2844 CASE_FLT_FN (BUILT_IN_IROUND
):
2845 fallback_fn
= BUILT_IN_LROUND
;
2847 CASE_FLT_FN (BUILT_IN_LROUND
):
2848 CASE_FLT_FN (BUILT_IN_LLROUND
):
2849 builtin_optab
= lround_optab
;
2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2860 /* Make a suitable register to place result in. */
2861 mode
= TYPE_MODE (TREE_TYPE (exp
));
2863 /* There's no easy way to detect the case we need to set EDOM. */
2864 if (!flag_errno_math
)
2866 rtx result
= gen_reg_rtx (mode
);
2868 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2869 need to expand the argument again. This way, we will not perform
2870 side-effects more the once. */
2871 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2873 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2877 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2879 /* Output the entire sequence. */
2880 insns
= get_insns ();
2886 /* If we were unable to expand via the builtin, stop the sequence
2887 (without outputting the insns) and call to the library function
2888 with the stabilized argument list. */
2892 if (fallback_fn
!= BUILT_IN_NONE
)
2894 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2895 targets, (int) round (x) should never be transformed into
2896 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2897 a call to lround in the hope that the target provides at least some
2898 C99 functions. This should result in the best user experience for
2899 not full C99 targets. */
2900 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2903 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2904 fallback_fndecl
, 1, arg
);
2906 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2907 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2908 return convert_to_mode (mode
, target
, 0);
2911 return expand_call (exp
, target
, target
== const0_rtx
);
2914 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2915 a normal call should be emitted rather than expanding the function
2916 in-line. EXP is the expression that is a call to the builtin
2917 function; if convenient, the result should be placed in TARGET. */
2920 expand_builtin_powi (tree exp
, rtx target
)
2927 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2930 arg0
= CALL_EXPR_ARG (exp
, 0);
2931 arg1
= CALL_EXPR_ARG (exp
, 1);
2932 mode
= TYPE_MODE (TREE_TYPE (exp
));
2934 /* Emit a libcall to libgcc. */
2936 /* Mode of the 2nd argument must match that of an int. */
2937 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2939 if (target
== NULL_RTX
)
2940 target
= gen_reg_rtx (mode
);
2942 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2943 if (GET_MODE (op0
) != mode
)
2944 op0
= convert_to_mode (mode
, op0
, 0);
2945 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2946 if (GET_MODE (op1
) != mode2
)
2947 op1
= convert_to_mode (mode2
, op1
, 0);
2949 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2950 target
, LCT_CONST
, mode
, 2,
2951 op0
, mode
, op1
, mode2
);
2956 /* Expand expression EXP which is a call to the strlen builtin. Return
2957 NULL_RTX if we failed the caller should emit a normal call, otherwise
2958 try to get the result in TARGET, if convenient. */
2961 expand_builtin_strlen (tree exp
, rtx target
,
2962 machine_mode target_mode
)
2964 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2968 struct expand_operand ops
[4];
2971 tree src
= CALL_EXPR_ARG (exp
, 0);
2973 rtx_insn
*before_strlen
;
2974 machine_mode insn_mode
= target_mode
;
2975 enum insn_code icode
= CODE_FOR_nothing
;
2978 /* If the length can be computed at compile-time, return it. */
2979 len
= c_strlen (src
, 0);
2981 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2983 /* If the length can be computed at compile-time and is constant
2984 integer, but there are side-effects in src, evaluate
2985 src for side-effects, then return len.
2986 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987 can be optimized into: i++; x = 3; */
2988 len
= c_strlen (src
, 1);
2989 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2991 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2992 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2995 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2997 /* If SRC is not a pointer type, don't do this operation inline. */
3001 /* Bail out if we can't compute strlen in the right mode. */
3002 while (insn_mode
!= VOIDmode
)
3004 icode
= optab_handler (strlen_optab
, insn_mode
);
3005 if (icode
!= CODE_FOR_nothing
)
3008 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3010 if (insn_mode
== VOIDmode
)
3013 /* Make a place to hold the source address. We will not expand
3014 the actual source until we are sure that the expansion will
3015 not fail -- there are trees that cannot be expanded twice. */
3016 src_reg
= gen_reg_rtx (Pmode
);
3018 /* Mark the beginning of the strlen sequence so we can emit the
3019 source operand later. */
3020 before_strlen
= get_last_insn ();
3022 create_output_operand (&ops
[0], target
, insn_mode
);
3023 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3024 create_integer_operand (&ops
[2], 0);
3025 create_integer_operand (&ops
[3], align
);
3026 if (!maybe_expand_insn (icode
, 4, ops
))
3029 /* Now that we are assured of success, expand the source. */
3031 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3034 #ifdef POINTERS_EXTEND_UNSIGNED
3035 if (GET_MODE (pat
) != Pmode
)
3036 pat
= convert_to_mode (Pmode
, pat
,
3037 POINTERS_EXTEND_UNSIGNED
);
3039 emit_move_insn (src_reg
, pat
);
3045 emit_insn_after (pat
, before_strlen
);
3047 emit_insn_before (pat
, get_insns ());
3049 /* Return the value in the proper mode for this function. */
3050 if (GET_MODE (ops
[0].value
) == target_mode
)
3051 target
= ops
[0].value
;
3052 else if (target
!= 0)
3053 convert_move (target
, ops
[0].value
, 0);
3055 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3061 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3062 bytes from constant string DATA + OFFSET and return it as target
3066 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3069 const char *str
= (const char *) data
;
3071 gcc_assert (offset
>= 0
3072 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3073 <= strlen (str
) + 1));
3075 return c_readstr (str
+ offset
, mode
);
3078 /* LEN specify length of the block of memcpy/memset operation.
3079 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3080 In some cases we can make very likely guess on max size, then we
3081 set it into PROBABLE_MAX_SIZE. */
3084 determine_block_size (tree len
, rtx len_rtx
,
3085 unsigned HOST_WIDE_INT
*min_size
,
3086 unsigned HOST_WIDE_INT
*max_size
,
3087 unsigned HOST_WIDE_INT
*probable_max_size
)
3089 if (CONST_INT_P (len_rtx
))
3091 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3097 enum value_range_type range_type
= VR_UNDEFINED
;
3099 /* Determine bounds from the type. */
3100 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3101 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3104 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3105 *probable_max_size
= *max_size
3106 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3108 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3110 if (TREE_CODE (len
) == SSA_NAME
)
3111 range_type
= get_range_info (len
, &min
, &max
);
3112 if (range_type
== VR_RANGE
)
3114 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3115 *min_size
= min
.to_uhwi ();
3116 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3117 *probable_max_size
= *max_size
= max
.to_uhwi ();
3119 else if (range_type
== VR_ANTI_RANGE
)
3121 /* Anti range 0...N lets us to determine minimal size to N+1. */
3124 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3125 *min_size
= max
.to_uhwi () + 1;
3133 Produce anti range allowing negative values of N. We still
3134 can use the information and make a guess that N is not negative.
3136 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3137 *probable_max_size
= min
.to_uhwi () - 1;
3140 gcc_checking_assert (*max_size
<=
3141 (unsigned HOST_WIDE_INT
)
3142 GET_MODE_MASK (GET_MODE (len_rtx
)));
3145 /* Helper function to do the actual work for expand_builtin_memcpy. */
3148 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3150 const char *src_str
;
3151 unsigned int src_align
= get_pointer_alignment (src
);
3152 unsigned int dest_align
= get_pointer_alignment (dest
);
3153 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3154 HOST_WIDE_INT expected_size
= -1;
3155 unsigned int expected_align
= 0;
3156 unsigned HOST_WIDE_INT min_size
;
3157 unsigned HOST_WIDE_INT max_size
;
3158 unsigned HOST_WIDE_INT probable_max_size
;
3160 /* If DEST is not a pointer type, call the normal function. */
3161 if (dest_align
== 0)
3164 /* If either SRC is not a pointer type, don't do this
3165 operation in-line. */
3169 if (currently_expanding_gimple_stmt
)
3170 stringop_block_profile (currently_expanding_gimple_stmt
,
3171 &expected_align
, &expected_size
);
3173 if (expected_align
< dest_align
)
3174 expected_align
= dest_align
;
3175 dest_mem
= get_memory_rtx (dest
, len
);
3176 set_mem_align (dest_mem
, dest_align
);
3177 len_rtx
= expand_normal (len
);
3178 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3179 &probable_max_size
);
3180 src_str
= c_getstr (src
);
3182 /* If SRC is a string constant and block move would be done
3183 by pieces, we can avoid loading the string from memory
3184 and only stored the computed constants. */
3186 && CONST_INT_P (len_rtx
)
3187 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3188 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3189 CONST_CAST (char *, src_str
),
3192 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3193 builtin_memcpy_read_str
,
3194 CONST_CAST (char *, src_str
),
3195 dest_align
, false, 0);
3196 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3197 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3201 src_mem
= get_memory_rtx (src
, len
);
3202 set_mem_align (src_mem
, src_align
);
3204 /* Copy word part most expediently. */
3205 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3206 CALL_EXPR_TAILCALL (exp
)
3207 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3208 expected_align
, expected_size
,
3209 min_size
, max_size
, probable_max_size
);
3213 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3214 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3220 /* Expand a call EXP to the memcpy builtin.
3221 Return NULL_RTX if we failed, the caller should emit a normal call,
3222 otherwise try to get the result in TARGET, if convenient (and in
3223 mode MODE if that's convenient). */
3226 expand_builtin_memcpy (tree exp
, rtx target
)
3228 if (!validate_arglist (exp
,
3229 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3233 tree dest
= CALL_EXPR_ARG (exp
, 0);
3234 tree src
= CALL_EXPR_ARG (exp
, 1);
3235 tree len
= CALL_EXPR_ARG (exp
, 2);
3236 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3240 /* Expand an instrumented call EXP to the memcpy builtin.
3241 Return NULL_RTX if we failed, the caller should emit a normal call,
3242 otherwise try to get the result in TARGET, if convenient (and in
3243 mode MODE if that's convenient). */
3246 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3248 if (!validate_arglist (exp
,
3249 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3250 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3251 INTEGER_TYPE
, VOID_TYPE
))
3255 tree dest
= CALL_EXPR_ARG (exp
, 0);
3256 tree src
= CALL_EXPR_ARG (exp
, 2);
3257 tree len
= CALL_EXPR_ARG (exp
, 4);
3258 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3260 /* Return src bounds with the result. */
3263 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3264 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3265 res
= chkp_join_splitted_slot (res
, bnd
);
3271 /* Expand a call EXP to the mempcpy builtin.
3272 Return NULL_RTX if we failed; the caller should emit a normal call,
3273 otherwise try to get the result in TARGET, if convenient (and in
3274 mode MODE if that's convenient). If ENDP is 0 return the
3275 destination pointer, if ENDP is 1 return the end pointer ala
3276 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3280 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3282 if (!validate_arglist (exp
,
3283 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3287 tree dest
= CALL_EXPR_ARG (exp
, 0);
3288 tree src
= CALL_EXPR_ARG (exp
, 1);
3289 tree len
= CALL_EXPR_ARG (exp
, 2);
3290 return expand_builtin_mempcpy_args (dest
, src
, len
,
3291 target
, mode
, /*endp=*/ 1,
3296 /* Expand an instrumented call EXP to the mempcpy builtin.
3297 Return NULL_RTX if we failed, the caller should emit a normal call,
3298 otherwise try to get the result in TARGET, if convenient (and in
3299 mode MODE if that's convenient). */
3302 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3304 if (!validate_arglist (exp
,
3305 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3306 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3307 INTEGER_TYPE
, VOID_TYPE
))
3311 tree dest
= CALL_EXPR_ARG (exp
, 0);
3312 tree src
= CALL_EXPR_ARG (exp
, 2);
3313 tree len
= CALL_EXPR_ARG (exp
, 4);
3314 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3317 /* Return src bounds with the result. */
3320 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3321 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3322 res
= chkp_join_splitted_slot (res
, bnd
);
3328 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3329 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3330 so that this can also be called without constructing an actual CALL_EXPR.
3331 The other arguments and return value are the same as for
3332 expand_builtin_mempcpy. */
3335 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3336 rtx target
, machine_mode mode
, int endp
,
3339 tree fndecl
= get_callee_fndecl (orig_exp
);
3341 /* If return value is ignored, transform mempcpy into memcpy. */
3342 if (target
== const0_rtx
3343 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3344 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3346 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3347 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3349 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3351 else if (target
== const0_rtx
3352 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3354 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3355 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3357 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3361 const char *src_str
;
3362 unsigned int src_align
= get_pointer_alignment (src
);
3363 unsigned int dest_align
= get_pointer_alignment (dest
);
3364 rtx dest_mem
, src_mem
, len_rtx
;
3366 /* If either SRC or DEST is not a pointer type, don't do this
3367 operation in-line. */
3368 if (dest_align
== 0 || src_align
== 0)
3371 /* If LEN is not constant, call the normal function. */
3372 if (! tree_fits_uhwi_p (len
))
3375 len_rtx
= expand_normal (len
);
3376 src_str
= c_getstr (src
);
3378 /* If SRC is a string constant and block move would be done
3379 by pieces, we can avoid loading the string from memory
3380 and only stored the computed constants. */
3382 && CONST_INT_P (len_rtx
)
3383 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3384 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3385 CONST_CAST (char *, src_str
),
3388 dest_mem
= get_memory_rtx (dest
, len
);
3389 set_mem_align (dest_mem
, dest_align
);
3390 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3391 builtin_memcpy_read_str
,
3392 CONST_CAST (char *, src_str
),
3393 dest_align
, false, endp
);
3394 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3395 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3399 if (CONST_INT_P (len_rtx
)
3400 && can_move_by_pieces (INTVAL (len_rtx
),
3401 MIN (dest_align
, src_align
)))
3403 dest_mem
= get_memory_rtx (dest
, len
);
3404 set_mem_align (dest_mem
, dest_align
);
3405 src_mem
= get_memory_rtx (src
, len
);
3406 set_mem_align (src_mem
, src_align
);
3407 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3408 MIN (dest_align
, src_align
), endp
);
3409 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3410 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3418 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3419 we failed, the caller should emit a normal call, otherwise try to
3420 get the result in TARGET, if convenient. If ENDP is 0 return the
3421 destination pointer, if ENDP is 1 return the end pointer ala
3422 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3426 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3428 struct expand_operand ops
[3];
3432 if (!targetm
.have_movstr ())
3435 dest_mem
= get_memory_rtx (dest
, NULL
);
3436 src_mem
= get_memory_rtx (src
, NULL
);
3439 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3440 dest_mem
= replace_equiv_address (dest_mem
, target
);
3443 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3444 create_fixed_operand (&ops
[1], dest_mem
);
3445 create_fixed_operand (&ops
[2], src_mem
);
3446 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3449 if (endp
&& target
!= const0_rtx
)
3451 target
= ops
[0].value
;
3452 /* movstr is supposed to set end to the address of the NUL
3453 terminator. If the caller requested a mempcpy-like return value,
3457 rtx tem
= plus_constant (GET_MODE (target
),
3458 gen_lowpart (GET_MODE (target
), target
), 1);
3459 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3465 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3466 NULL_RTX if we failed the caller should emit a normal call, otherwise
3467 try to get the result in TARGET, if convenient (and in mode MODE if that's
3471 expand_builtin_strcpy (tree exp
, rtx target
)
3473 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3475 tree dest
= CALL_EXPR_ARG (exp
, 0);
3476 tree src
= CALL_EXPR_ARG (exp
, 1);
3477 return expand_builtin_strcpy_args (dest
, src
, target
);
3482 /* Helper function to do the actual work for expand_builtin_strcpy. The
3483 arguments to the builtin_strcpy call DEST and SRC are broken out
3484 so that this can also be called without constructing an actual CALL_EXPR.
3485 The other arguments and return value are the same as for
3486 expand_builtin_strcpy. */
3489 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3491 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3494 /* Expand a call EXP to the stpcpy builtin.
3495 Return NULL_RTX if we failed the caller should emit a normal call,
3496 otherwise try to get the result in TARGET, if convenient (and in
3497 mode MODE if that's convenient). */
3500 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3503 location_t loc
= EXPR_LOCATION (exp
);
3505 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3508 dst
= CALL_EXPR_ARG (exp
, 0);
3509 src
= CALL_EXPR_ARG (exp
, 1);
3511 /* If return value is ignored, transform stpcpy into strcpy. */
3512 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3514 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3515 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3516 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3523 /* Ensure we get an actual string whose length can be evaluated at
3524 compile-time, not an expression containing a string. This is
3525 because the latter will potentially produce pessimized code
3526 when used to produce the return value. */
3527 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3528 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3530 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3531 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3532 target
, mode
, /*endp=*/2,
3538 if (TREE_CODE (len
) == INTEGER_CST
)
3540 rtx len_rtx
= expand_normal (len
);
3542 if (CONST_INT_P (len_rtx
))
3544 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3550 if (mode
!= VOIDmode
)
3551 target
= gen_reg_rtx (mode
);
3553 target
= gen_reg_rtx (GET_MODE (ret
));
3555 if (GET_MODE (target
) != GET_MODE (ret
))
3556 ret
= gen_lowpart (GET_MODE (target
), ret
);
3558 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3559 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3567 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3571 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3572 bytes from constant string DATA + OFFSET and return it as target
3576 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3579 const char *str
= (const char *) data
;
3581 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3584 return c_readstr (str
+ offset
, mode
);
3587 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3588 NULL_RTX if we failed the caller should emit a normal call. */
3591 expand_builtin_strncpy (tree exp
, rtx target
)
3593 location_t loc
= EXPR_LOCATION (exp
);
3595 if (validate_arglist (exp
,
3596 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3598 tree dest
= CALL_EXPR_ARG (exp
, 0);
3599 tree src
= CALL_EXPR_ARG (exp
, 1);
3600 tree len
= CALL_EXPR_ARG (exp
, 2);
3601 tree slen
= c_strlen (src
, 1);
3603 /* We must be passed a constant len and src parameter. */
3604 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3607 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3609 /* We're required to pad with trailing zeros if the requested
3610 len is greater than strlen(s2)+1. In that case try to
3611 use store_by_pieces, if it fails, punt. */
3612 if (tree_int_cst_lt (slen
, len
))
3614 unsigned int dest_align
= get_pointer_alignment (dest
);
3615 const char *p
= c_getstr (src
);
3618 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3619 || !can_store_by_pieces (tree_to_uhwi (len
),
3620 builtin_strncpy_read_str
,
3621 CONST_CAST (char *, p
),
3625 dest_mem
= get_memory_rtx (dest
, len
);
3626 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3627 builtin_strncpy_read_str
,
3628 CONST_CAST (char *, p
), dest_align
, false, 0);
3629 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3630 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3637 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3638 bytes from constant string DATA + OFFSET and return it as target
3642 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3645 const char *c
= (const char *) data
;
3646 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3648 memset (p
, *c
, GET_MODE_SIZE (mode
));
3650 return c_readstr (p
, mode
);
3653 /* Callback routine for store_by_pieces. Return the RTL of a register
3654 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3655 char value given in the RTL register data. For example, if mode is
3656 4 bytes wide, return the RTL for 0x01010101*data. */
3659 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3666 size
= GET_MODE_SIZE (mode
);
3670 p
= XALLOCAVEC (char, size
);
3671 memset (p
, 1, size
);
3672 coeff
= c_readstr (p
, mode
);
3674 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3675 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3676 return force_reg (mode
, target
);
3679 /* Expand expression EXP, which is a call to the memset builtin. Return
3680 NULL_RTX if we failed the caller should emit a normal call, otherwise
3681 try to get the result in TARGET, if convenient (and in mode MODE if that's
3685 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3687 if (!validate_arglist (exp
,
3688 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3692 tree dest
= CALL_EXPR_ARG (exp
, 0);
3693 tree val
= CALL_EXPR_ARG (exp
, 1);
3694 tree len
= CALL_EXPR_ARG (exp
, 2);
3695 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3699 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3700 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3701 try to get the result in TARGET, if convenient (and in mode MODE if that's
3705 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3707 if (!validate_arglist (exp
,
3708 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3709 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3713 tree dest
= CALL_EXPR_ARG (exp
, 0);
3714 tree val
= CALL_EXPR_ARG (exp
, 2);
3715 tree len
= CALL_EXPR_ARG (exp
, 3);
3716 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3718 /* Return src bounds with the result. */
3721 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3722 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3723 res
= chkp_join_splitted_slot (res
, bnd
);
3729 /* Helper function to do the actual work for expand_builtin_memset. The
3730 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3731 so that this can also be called without constructing an actual CALL_EXPR.
3732 The other arguments and return value are the same as for
3733 expand_builtin_memset. */
3736 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3737 rtx target
, machine_mode mode
, tree orig_exp
)
3740 enum built_in_function fcode
;
3741 machine_mode val_mode
;
3743 unsigned int dest_align
;
3744 rtx dest_mem
, dest_addr
, len_rtx
;
3745 HOST_WIDE_INT expected_size
= -1;
3746 unsigned int expected_align
= 0;
3747 unsigned HOST_WIDE_INT min_size
;
3748 unsigned HOST_WIDE_INT max_size
;
3749 unsigned HOST_WIDE_INT probable_max_size
;
3751 dest_align
= get_pointer_alignment (dest
);
3753 /* If DEST is not a pointer type, don't do this operation in-line. */
3754 if (dest_align
== 0)
3757 if (currently_expanding_gimple_stmt
)
3758 stringop_block_profile (currently_expanding_gimple_stmt
,
3759 &expected_align
, &expected_size
);
3761 if (expected_align
< dest_align
)
3762 expected_align
= dest_align
;
3764 /* If the LEN parameter is zero, return DEST. */
3765 if (integer_zerop (len
))
3767 /* Evaluate and ignore VAL in case it has side-effects. */
3768 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3769 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3772 /* Stabilize the arguments in case we fail. */
3773 dest
= builtin_save_expr (dest
);
3774 val
= builtin_save_expr (val
);
3775 len
= builtin_save_expr (len
);
3777 len_rtx
= expand_normal (len
);
3778 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3779 &probable_max_size
);
3780 dest_mem
= get_memory_rtx (dest
, len
);
3781 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3783 if (TREE_CODE (val
) != INTEGER_CST
)
3787 val_rtx
= expand_normal (val
);
3788 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3790 /* Assume that we can memset by pieces if we can store
3791 * the coefficients by pieces (in the required modes).
3792 * We can't pass builtin_memset_gen_str as that emits RTL. */
3794 if (tree_fits_uhwi_p (len
)
3795 && can_store_by_pieces (tree_to_uhwi (len
),
3796 builtin_memset_read_str
, &c
, dest_align
,
3799 val_rtx
= force_reg (val_mode
, val_rtx
);
3800 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3801 builtin_memset_gen_str
, val_rtx
, dest_align
,
3804 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3805 dest_align
, expected_align
,
3806 expected_size
, min_size
, max_size
,
3810 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3811 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3815 if (target_char_cast (val
, &c
))
3820 if (tree_fits_uhwi_p (len
)
3821 && can_store_by_pieces (tree_to_uhwi (len
),
3822 builtin_memset_read_str
, &c
, dest_align
,
3824 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3825 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3826 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3827 gen_int_mode (c
, val_mode
),
3828 dest_align
, expected_align
,
3829 expected_size
, min_size
, max_size
,
3833 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3834 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3838 set_mem_align (dest_mem
, dest_align
);
3839 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3840 CALL_EXPR_TAILCALL (orig_exp
)
3841 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3842 expected_align
, expected_size
,
3848 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3849 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3855 fndecl
= get_callee_fndecl (orig_exp
);
3856 fcode
= DECL_FUNCTION_CODE (fndecl
);
3857 if (fcode
== BUILT_IN_MEMSET
3858 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3859 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3861 else if (fcode
== BUILT_IN_BZERO
)
3862 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3866 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3867 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3868 return expand_call (fn
, target
, target
== const0_rtx
);
3871 /* Expand expression EXP, which is a call to the bzero builtin. Return
3872 NULL_RTX if we failed the caller should emit a normal call. */
3875 expand_builtin_bzero (tree exp
)
3878 location_t loc
= EXPR_LOCATION (exp
);
3880 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3883 dest
= CALL_EXPR_ARG (exp
, 0);
3884 size
= CALL_EXPR_ARG (exp
, 1);
3886 /* New argument list transforming bzero(ptr x, int y) to
3887 memset(ptr x, int 0, size_t y). This is done this way
3888 so that if it isn't expanded inline, we fallback to
3889 calling bzero instead of memset. */
3891 return expand_builtin_memset_args (dest
, integer_zero_node
,
3892 fold_convert_loc (loc
,
3893 size_type_node
, size
),
3894 const0_rtx
, VOIDmode
, exp
);
3897 /* Try to expand cmpstr operation ICODE with the given operands.
3898 Return the result rtx on success, otherwise return null. */
3901 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3902 HOST_WIDE_INT align
)
3904 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3906 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3909 struct expand_operand ops
[4];
3910 create_output_operand (&ops
[0], target
, insn_mode
);
3911 create_fixed_operand (&ops
[1], arg1_rtx
);
3912 create_fixed_operand (&ops
[2], arg2_rtx
);
3913 create_integer_operand (&ops
[3], align
);
3914 if (maybe_expand_insn (icode
, 4, ops
))
3915 return ops
[0].value
;
3919 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3920 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3921 otherwise return null. */
3924 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
3925 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
3926 HOST_WIDE_INT align
)
3928 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3930 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3933 struct expand_operand ops
[5];
3934 create_output_operand (&ops
[0], target
, insn_mode
);
3935 create_fixed_operand (&ops
[1], arg1_rtx
);
3936 create_fixed_operand (&ops
[2], arg2_rtx
);
3937 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
3938 TYPE_UNSIGNED (arg3_type
));
3939 create_integer_operand (&ops
[4], align
);
3940 if (maybe_expand_insn (icode
, 5, ops
))
3941 return ops
[0].value
;
3945 /* Expand expression EXP, which is a call to the memcmp built-in function.
3946 Return NULL_RTX if we failed and the caller should emit a normal call,
3947 otherwise try to get the result in TARGET, if convenient. */
3950 expand_builtin_memcmp (tree exp
, rtx target
)
3952 if (!validate_arglist (exp
,
3953 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3956 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3957 implementing memcmp because it will stop if it encounters two
3959 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
3960 if (icode
== CODE_FOR_nothing
)
3963 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3964 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3965 tree len
= CALL_EXPR_ARG (exp
, 2);
3967 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3968 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3970 /* If we don't have POINTER_TYPE, call the function. */
3971 if (arg1_align
== 0 || arg2_align
== 0)
3974 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3975 location_t loc
= EXPR_LOCATION (exp
);
3976 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
3977 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
3978 rtx arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3980 /* Set MEM_SIZE as appropriate. */
3981 if (CONST_INT_P (arg3_rtx
))
3983 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
3984 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
3987 rtx result
= expand_cmpstrn_or_cmpmem (icode
, target
, arg1_rtx
, arg2_rtx
,
3988 TREE_TYPE (len
), arg3_rtx
,
3989 MIN (arg1_align
, arg2_align
));
3992 /* Return the value in the proper mode for this function. */
3993 if (GET_MODE (result
) == mode
)
3998 convert_move (target
, result
, 0);
4002 return convert_to_mode (mode
, result
, 0);
4007 && REG_P (result
) && GET_MODE (result
) == mode
4008 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4009 result
= gen_reg_rtx (mode
);
4011 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4012 TYPE_MODE (integer_type_node
), 3,
4013 XEXP (arg1_rtx
, 0), Pmode
,
4014 XEXP (arg2_rtx
, 0), Pmode
,
4015 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4016 TYPE_UNSIGNED (sizetype
)),
4017 TYPE_MODE (sizetype
));
4021 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4022 if we failed the caller should emit a normal call, otherwise try to get
4023 the result in TARGET, if convenient. */
4026 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4028 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4031 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4032 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4033 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4035 rtx arg1_rtx
, arg2_rtx
;
4037 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4038 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4039 rtx result
= NULL_RTX
;
4041 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4042 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4044 /* If we don't have POINTER_TYPE, call the function. */
4045 if (arg1_align
== 0 || arg2_align
== 0)
4048 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4049 arg1
= builtin_save_expr (arg1
);
4050 arg2
= builtin_save_expr (arg2
);
4052 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4053 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4055 /* Try to call cmpstrsi. */
4056 if (cmpstr_icode
!= CODE_FOR_nothing
)
4057 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4058 MIN (arg1_align
, arg2_align
));
4060 /* Try to determine at least one length and call cmpstrnsi. */
4061 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4066 tree len1
= c_strlen (arg1
, 1);
4067 tree len2
= c_strlen (arg2
, 1);
4070 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4072 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4074 /* If we don't have a constant length for the first, use the length
4075 of the second, if we know it. We don't require a constant for
4076 this case; some cost analysis could be done if both are available
4077 but neither is constant. For now, assume they're equally cheap,
4078 unless one has side effects. If both strings have constant lengths,
4085 else if (TREE_SIDE_EFFECTS (len1
))
4087 else if (TREE_SIDE_EFFECTS (len2
))
4089 else if (TREE_CODE (len1
) != INTEGER_CST
)
4091 else if (TREE_CODE (len2
) != INTEGER_CST
)
4093 else if (tree_int_cst_lt (len1
, len2
))
4098 /* If both arguments have side effects, we cannot optimize. */
4099 if (len
&& !TREE_SIDE_EFFECTS (len
))
4101 arg3_rtx
= expand_normal (len
);
4102 result
= expand_cmpstrn_or_cmpmem
4103 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4104 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4110 /* Return the value in the proper mode for this function. */
4111 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4112 if (GET_MODE (result
) == mode
)
4115 return convert_to_mode (mode
, result
, 0);
4116 convert_move (target
, result
, 0);
4120 /* Expand the library call ourselves using a stabilized argument
4121 list to avoid re-evaluating the function's arguments twice. */
4122 fndecl
= get_callee_fndecl (exp
);
4123 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4124 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4125 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4126 return expand_call (fn
, target
, target
== const0_rtx
);
4131 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4132 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4133 the result in TARGET, if convenient. */
4136 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4137 ATTRIBUTE_UNUSED machine_mode mode
)
4139 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4141 if (!validate_arglist (exp
,
4142 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4145 /* If c_strlen can determine an expression for one of the string
4146 lengths, and it doesn't have side effects, then emit cmpstrnsi
4147 using length MIN(strlen(string)+1, arg3). */
4148 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4149 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4151 tree len
, len1
, len2
;
4152 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4155 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4156 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4157 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4159 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4160 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4162 len1
= c_strlen (arg1
, 1);
4163 len2
= c_strlen (arg2
, 1);
4166 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4168 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4170 /* If we don't have a constant length for the first, use the length
4171 of the second, if we know it. We don't require a constant for
4172 this case; some cost analysis could be done if both are available
4173 but neither is constant. For now, assume they're equally cheap,
4174 unless one has side effects. If both strings have constant lengths,
4181 else if (TREE_SIDE_EFFECTS (len1
))
4183 else if (TREE_SIDE_EFFECTS (len2
))
4185 else if (TREE_CODE (len1
) != INTEGER_CST
)
4187 else if (TREE_CODE (len2
) != INTEGER_CST
)
4189 else if (tree_int_cst_lt (len1
, len2
))
4194 /* If both arguments have side effects, we cannot optimize. */
4195 if (!len
|| TREE_SIDE_EFFECTS (len
))
4198 /* The actual new length parameter is MIN(len,arg3). */
4199 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4200 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4202 /* If we don't have POINTER_TYPE, call the function. */
4203 if (arg1_align
== 0 || arg2_align
== 0)
4206 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4207 arg1
= builtin_save_expr (arg1
);
4208 arg2
= builtin_save_expr (arg2
);
4209 len
= builtin_save_expr (len
);
4211 arg1_rtx
= get_memory_rtx (arg1
, len
);
4212 arg2_rtx
= get_memory_rtx (arg2
, len
);
4213 arg3_rtx
= expand_normal (len
);
4214 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4215 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4216 MIN (arg1_align
, arg2_align
));
4219 /* Return the value in the proper mode for this function. */
4220 mode
= TYPE_MODE (TREE_TYPE (exp
));
4221 if (GET_MODE (result
) == mode
)
4224 return convert_to_mode (mode
, result
, 0);
4225 convert_move (target
, result
, 0);
4229 /* Expand the library call ourselves using a stabilized argument
4230 list to avoid re-evaluating the function's arguments twice. */
4231 fndecl
= get_callee_fndecl (exp
);
4232 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4234 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4235 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4236 return expand_call (fn
, target
, target
== const0_rtx
);
4241 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4242 if that's convenient. */
4245 expand_builtin_saveregs (void)
4250 /* Don't do __builtin_saveregs more than once in a function.
4251 Save the result of the first call and reuse it. */
4252 if (saveregs_value
!= 0)
4253 return saveregs_value
;
4255 /* When this function is called, it means that registers must be
4256 saved on entry to this function. So we migrate the call to the
4257 first insn of this function. */
4261 /* Do whatever the machine needs done in this case. */
4262 val
= targetm
.calls
.expand_builtin_saveregs ();
4267 saveregs_value
= val
;
4269 /* Put the insns after the NOTE that starts the function. If this
4270 is inside a start_sequence, make the outer-level insn chain current, so
4271 the code is placed at the start of the function. */
4272 push_topmost_sequence ();
4273 emit_insn_after (seq
, entry_of_function ());
4274 pop_topmost_sequence ();
4279 /* Expand a call to __builtin_next_arg. */
4282 expand_builtin_next_arg (void)
4284 /* Checking arguments is already done in fold_builtin_next_arg
4285 that must be called before this function. */
4286 return expand_binop (ptr_mode
, add_optab
,
4287 crtl
->args
.internal_arg_pointer
,
4288 crtl
->args
.arg_offset_rtx
,
4289 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4292 /* Make it easier for the backends by protecting the valist argument
4293 from multiple evaluations. */
4296 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4298 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4300 /* The current way of determining the type of valist is completely
4301 bogus. We should have the information on the va builtin instead. */
4303 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4305 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4307 if (TREE_SIDE_EFFECTS (valist
))
4308 valist
= save_expr (valist
);
4310 /* For this case, the backends will be expecting a pointer to
4311 vatype, but it's possible we've actually been given an array
4312 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4314 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4316 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4317 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4322 tree pt
= build_pointer_type (vatype
);
4326 if (! TREE_SIDE_EFFECTS (valist
))
4329 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4330 TREE_SIDE_EFFECTS (valist
) = 1;
4333 if (TREE_SIDE_EFFECTS (valist
))
4334 valist
= save_expr (valist
);
4335 valist
= fold_build2_loc (loc
, MEM_REF
,
4336 vatype
, valist
, build_int_cst (pt
, 0));
4342 /* The "standard" definition of va_list is void*. */
4345 std_build_builtin_va_list (void)
4347 return ptr_type_node
;
4350 /* The "standard" abi va_list is va_list_type_node. */
4353 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4355 return va_list_type_node
;
4358 /* The "standard" type of va_list is va_list_type_node. */
4361 std_canonical_va_list_type (tree type
)
4365 if (INDIRECT_REF_P (type
))
4366 type
= TREE_TYPE (type
);
4367 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4368 type
= TREE_TYPE (type
);
4369 wtype
= va_list_type_node
;
4371 /* Treat structure va_list types. */
4372 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4373 htype
= TREE_TYPE (htype
);
4374 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4376 /* If va_list is an array type, the argument may have decayed
4377 to a pointer type, e.g. by being passed to another function.
4378 In that case, unwrap both types so that we can compare the
4379 underlying records. */
4380 if (TREE_CODE (htype
) == ARRAY_TYPE
4381 || POINTER_TYPE_P (htype
))
4383 wtype
= TREE_TYPE (wtype
);
4384 htype
= TREE_TYPE (htype
);
4387 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4388 return va_list_type_node
;
4393 /* The "standard" implementation of va_start: just assign `nextarg' to
4397 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4399 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4400 convert_move (va_r
, nextarg
, 0);
4402 /* We do not have any valid bounds for the pointer, so
4403 just store zero bounds for it. */
4404 if (chkp_function_instrumented_p (current_function_decl
))
4405 chkp_expand_bounds_reset_for_mem (valist
,
4406 make_tree (TREE_TYPE (valist
),
4410 /* Expand EXP, a call to __builtin_va_start. */
4413 expand_builtin_va_start (tree exp
)
4417 location_t loc
= EXPR_LOCATION (exp
);
4419 if (call_expr_nargs (exp
) < 2)
4421 error_at (loc
, "too few arguments to function %<va_start%>");
4425 if (fold_builtin_next_arg (exp
, true))
4428 nextarg
= expand_builtin_next_arg ();
4429 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4431 if (targetm
.expand_builtin_va_start
)
4432 targetm
.expand_builtin_va_start (valist
, nextarg
);
4434 std_expand_builtin_va_start (valist
, nextarg
);
4439 /* Expand EXP, a call to __builtin_va_end. */
4442 expand_builtin_va_end (tree exp
)
4444 tree valist
= CALL_EXPR_ARG (exp
, 0);
4446 /* Evaluate for side effects, if needed. I hate macros that don't
4448 if (TREE_SIDE_EFFECTS (valist
))
4449 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4454 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4455 builtin rather than just as an assignment in stdarg.h because of the
4456 nastiness of array-type va_list types. */
4459 expand_builtin_va_copy (tree exp
)
4462 location_t loc
= EXPR_LOCATION (exp
);
4464 dst
= CALL_EXPR_ARG (exp
, 0);
4465 src
= CALL_EXPR_ARG (exp
, 1);
4467 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4468 src
= stabilize_va_list_loc (loc
, src
, 0);
4470 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4472 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4474 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4475 TREE_SIDE_EFFECTS (t
) = 1;
4476 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4480 rtx dstb
, srcb
, size
;
4482 /* Evaluate to pointers. */
4483 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4484 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4485 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4486 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4488 dstb
= convert_memory_address (Pmode
, dstb
);
4489 srcb
= convert_memory_address (Pmode
, srcb
);
4491 /* "Dereference" to BLKmode memories. */
4492 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4493 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4494 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4495 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4496 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4497 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4500 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4506 /* Expand a call to one of the builtin functions __builtin_frame_address or
4507 __builtin_return_address. */
4510 expand_builtin_frame_address (tree fndecl
, tree exp
)
4512 /* The argument must be a nonnegative integer constant.
4513 It counts the number of frames to scan up the stack.
4514 The value is either the frame pointer value or the return
4515 address saved in that frame. */
4516 if (call_expr_nargs (exp
) == 0)
4517 /* Warning about missing arg was already issued. */
4519 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4521 error ("invalid argument to %qD", fndecl
);
4526 /* Number of frames to scan up the stack. */
4527 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4529 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4531 /* Some ports cannot access arbitrary stack frames. */
4534 warning (0, "unsupported argument to %qD", fndecl
);
4540 /* Warn since no effort is made to ensure that any frame
4541 beyond the current one exists or can be safely reached. */
4542 warning (OPT_Wframe_address
, "calling %qD with "
4543 "a nonzero argument is unsafe", fndecl
);
4546 /* For __builtin_frame_address, return what we've got. */
4547 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4551 && ! CONSTANT_P (tem
))
4552 tem
= copy_addr_to_reg (tem
);
4557 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4558 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4559 is the same as for allocate_dynamic_stack_space. */
4562 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4568 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4569 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4572 = (alloca_with_align
4573 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4574 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4579 /* Compute the argument. */
4580 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4582 /* Compute the alignment. */
4583 align
= (alloca_with_align
4584 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4585 : BIGGEST_ALIGNMENT
);
4587 /* Allocate the desired space. */
4588 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4589 result
= convert_memory_address (ptr_mode
, result
);
4594 /* Expand a call to bswap builtin in EXP.
4595 Return NULL_RTX if a normal call should be emitted rather than expanding the
4596 function in-line. If convenient, the result should be placed in TARGET.
4597 SUBTARGET may be used as the target for computing one of EXP's operands. */
4600 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4606 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4609 arg
= CALL_EXPR_ARG (exp
, 0);
4610 op0
= expand_expr (arg
,
4611 subtarget
&& GET_MODE (subtarget
) == target_mode
4612 ? subtarget
: NULL_RTX
,
4613 target_mode
, EXPAND_NORMAL
);
4614 if (GET_MODE (op0
) != target_mode
)
4615 op0
= convert_to_mode (target_mode
, op0
, 1);
4617 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4619 gcc_assert (target
);
4621 return convert_to_mode (target_mode
, target
, 1);
4624 /* Expand a call to a unary builtin in EXP.
4625 Return NULL_RTX if a normal call should be emitted rather than expanding the
4626 function in-line. If convenient, the result should be placed in TARGET.
4627 SUBTARGET may be used as the target for computing one of EXP's operands. */
4630 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4631 rtx subtarget
, optab op_optab
)
4635 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4638 /* Compute the argument. */
4639 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4641 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4642 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4643 VOIDmode
, EXPAND_NORMAL
);
4644 /* Compute op, into TARGET if possible.
4645 Set TARGET to wherever the result comes back. */
4646 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4647 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4648 gcc_assert (target
);
4650 return convert_to_mode (target_mode
, target
, 0);
4653 /* Expand a call to __builtin_expect. We just return our argument
4654 as the builtin_expect semantic should've been already executed by
4655 tree branch prediction pass. */
4658 expand_builtin_expect (tree exp
, rtx target
)
4662 if (call_expr_nargs (exp
) < 2)
4664 arg
= CALL_EXPR_ARG (exp
, 0);
4666 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4667 /* When guessing was done, the hints should be already stripped away. */
4668 gcc_assert (!flag_guess_branch_prob
4669 || optimize
== 0 || seen_error ());
4673 /* Expand a call to __builtin_assume_aligned. We just return our first
4674 argument as the builtin_assume_aligned semantic should've been already
4678 expand_builtin_assume_aligned (tree exp
, rtx target
)
4680 if (call_expr_nargs (exp
) < 2)
4682 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4684 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4685 && (call_expr_nargs (exp
) < 3
4686 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4691 expand_builtin_trap (void)
4693 if (targetm
.have_trap ())
4695 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4696 /* For trap insns when not accumulating outgoing args force
4697 REG_ARGS_SIZE note to prevent crossjumping of calls with
4698 different args sizes. */
4699 if (!ACCUMULATE_OUTGOING_ARGS
)
4700 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4703 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4707 /* Expand a call to __builtin_unreachable. We do nothing except emit
4708 a barrier saying that control flow will not pass here.
4710 It is the responsibility of the program being compiled to ensure
4711 that control flow does never reach __builtin_unreachable. */
4713 expand_builtin_unreachable (void)
4718 /* Expand EXP, a call to fabs, fabsf or fabsl.
4719 Return NULL_RTX if a normal call should be emitted rather than expanding
4720 the function inline. If convenient, the result should be placed
4721 in TARGET. SUBTARGET may be used as the target for computing
4725 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4731 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4734 arg
= CALL_EXPR_ARG (exp
, 0);
4735 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4736 mode
= TYPE_MODE (TREE_TYPE (arg
));
4737 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4738 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4741 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4742 Return NULL is a normal call should be emitted rather than expanding the
4743 function inline. If convenient, the result should be placed in TARGET.
4744 SUBTARGET may be used as the target for computing the operand. */
4747 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4752 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4755 arg
= CALL_EXPR_ARG (exp
, 0);
4756 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4758 arg
= CALL_EXPR_ARG (exp
, 1);
4759 op1
= expand_normal (arg
);
4761 return expand_copysign (op0
, op1
, target
);
4764 /* Expand a call to __builtin___clear_cache. */
4767 expand_builtin___clear_cache (tree exp
)
4769 if (!targetm
.code_for_clear_cache
)
4771 #ifdef CLEAR_INSN_CACHE
4772 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4773 does something. Just do the default expansion to a call to
4777 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4778 does nothing. There is no need to call it. Do nothing. */
4780 #endif /* CLEAR_INSN_CACHE */
4783 /* We have a "clear_cache" insn, and it will handle everything. */
4785 rtx begin_rtx
, end_rtx
;
4787 /* We must not expand to a library call. If we did, any
4788 fallback library function in libgcc that might contain a call to
4789 __builtin___clear_cache() would recurse infinitely. */
4790 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4792 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4796 if (targetm
.have_clear_cache ())
4798 struct expand_operand ops
[2];
4800 begin
= CALL_EXPR_ARG (exp
, 0);
4801 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4803 end
= CALL_EXPR_ARG (exp
, 1);
4804 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4806 create_address_operand (&ops
[0], begin_rtx
);
4807 create_address_operand (&ops
[1], end_rtx
);
4808 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4814 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4817 round_trampoline_addr (rtx tramp
)
4819 rtx temp
, addend
, mask
;
4821 /* If we don't need too much alignment, we'll have been guaranteed
4822 proper alignment by get_trampoline_type. */
4823 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4826 /* Round address up to desired boundary. */
4827 temp
= gen_reg_rtx (Pmode
);
4828 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4829 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4831 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4832 temp
, 0, OPTAB_LIB_WIDEN
);
4833 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4834 temp
, 0, OPTAB_LIB_WIDEN
);
4840 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4842 tree t_tramp
, t_func
, t_chain
;
4843 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4845 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4846 POINTER_TYPE
, VOID_TYPE
))
4849 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4850 t_func
= CALL_EXPR_ARG (exp
, 1);
4851 t_chain
= CALL_EXPR_ARG (exp
, 2);
4853 r_tramp
= expand_normal (t_tramp
);
4854 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4855 MEM_NOTRAP_P (m_tramp
) = 1;
4857 /* If ONSTACK, the TRAMP argument should be the address of a field
4858 within the local function's FRAME decl. Either way, let's see if
4859 we can fill in the MEM_ATTRs for this memory. */
4860 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4861 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4863 /* Creator of a heap trampoline is responsible for making sure the
4864 address is aligned to at least STACK_BOUNDARY. Normally malloc
4865 will ensure this anyhow. */
4866 tmp
= round_trampoline_addr (r_tramp
);
4869 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4870 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4871 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4874 /* The FUNC argument should be the address of the nested function.
4875 Extract the actual function decl to pass to the hook. */
4876 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4877 t_func
= TREE_OPERAND (t_func
, 0);
4878 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4880 r_chain
= expand_normal (t_chain
);
4882 /* Generate insns to initialize the trampoline. */
4883 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4887 trampolines_created
= 1;
4889 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4890 "trampoline generated for nested function %qD", t_func
);
4897 expand_builtin_adjust_trampoline (tree exp
)
4901 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4904 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4905 tramp
= round_trampoline_addr (tramp
);
4906 if (targetm
.calls
.trampoline_adjust_address
)
4907 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4912 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4913 function. The function first checks whether the back end provides
4914 an insn to implement signbit for the respective mode. If not, it
4915 checks whether the floating point format of the value is such that
4916 the sign bit can be extracted. If that is not the case, error out.
4917 EXP is the expression that is a call to the builtin function; if
4918 convenient, the result should be placed in TARGET. */
4920 expand_builtin_signbit (tree exp
, rtx target
)
4922 const struct real_format
*fmt
;
4923 machine_mode fmode
, imode
, rmode
;
4926 enum insn_code icode
;
4928 location_t loc
= EXPR_LOCATION (exp
);
4930 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4933 arg
= CALL_EXPR_ARG (exp
, 0);
4934 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4935 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4936 fmt
= REAL_MODE_FORMAT (fmode
);
4938 arg
= builtin_save_expr (arg
);
4940 /* Expand the argument yielding a RTX expression. */
4941 temp
= expand_normal (arg
);
4943 /* Check if the back end provides an insn that handles signbit for the
4945 icode
= optab_handler (signbit_optab
, fmode
);
4946 if (icode
!= CODE_FOR_nothing
)
4948 rtx_insn
*last
= get_last_insn ();
4949 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4950 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4952 delete_insns_since (last
);
4955 /* For floating point formats without a sign bit, implement signbit
4957 bitpos
= fmt
->signbit_ro
;
4960 /* But we can't do this if the format supports signed zero. */
4961 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4963 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4964 build_real (TREE_TYPE (arg
), dconst0
));
4965 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4968 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4970 imode
= int_mode_for_mode (fmode
);
4971 gcc_assert (imode
!= BLKmode
);
4972 temp
= gen_lowpart (imode
, temp
);
4977 /* Handle targets with different FP word orders. */
4978 if (FLOAT_WORDS_BIG_ENDIAN
)
4979 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4981 word
= bitpos
/ BITS_PER_WORD
;
4982 temp
= operand_subword_force (temp
, word
, fmode
);
4983 bitpos
= bitpos
% BITS_PER_WORD
;
4986 /* Force the intermediate word_mode (or narrower) result into a
4987 register. This avoids attempting to create paradoxical SUBREGs
4988 of floating point modes below. */
4989 temp
= force_reg (imode
, temp
);
4991 /* If the bitpos is within the "result mode" lowpart, the operation
4992 can be implement with a single bitwise AND. Otherwise, we need
4993 a right shift and an AND. */
4995 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4997 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
4999 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5000 temp
= gen_lowpart (rmode
, temp
);
5001 temp
= expand_binop (rmode
, and_optab
, temp
,
5002 immed_wide_int_const (mask
, rmode
),
5003 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5007 /* Perform a logical right shift to place the signbit in the least
5008 significant bit, then truncate the result to the desired mode
5009 and mask just this bit. */
5010 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5011 temp
= gen_lowpart (rmode
, temp
);
5012 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5013 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5019 /* Expand fork or exec calls. TARGET is the desired target of the
5020 call. EXP is the call. FN is the
5021 identificator of the actual function. IGNORE is nonzero if the
5022 value is to be ignored. */
5025 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5030 /* If we are not profiling, just call the function. */
5031 if (!profile_arc_flag
)
5034 /* Otherwise call the wrapper. This should be equivalent for the rest of
5035 compiler, so the code does not diverge, and the wrapper may run the
5036 code necessary for keeping the profiling sane. */
5038 switch (DECL_FUNCTION_CODE (fn
))
5041 id
= get_identifier ("__gcov_fork");
5044 case BUILT_IN_EXECL
:
5045 id
= get_identifier ("__gcov_execl");
5048 case BUILT_IN_EXECV
:
5049 id
= get_identifier ("__gcov_execv");
5052 case BUILT_IN_EXECLP
:
5053 id
= get_identifier ("__gcov_execlp");
5056 case BUILT_IN_EXECLE
:
5057 id
= get_identifier ("__gcov_execle");
5060 case BUILT_IN_EXECVP
:
5061 id
= get_identifier ("__gcov_execvp");
5064 case BUILT_IN_EXECVE
:
5065 id
= get_identifier ("__gcov_execve");
5072 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5073 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5074 DECL_EXTERNAL (decl
) = 1;
5075 TREE_PUBLIC (decl
) = 1;
5076 DECL_ARTIFICIAL (decl
) = 1;
5077 TREE_NOTHROW (decl
) = 1;
5078 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5079 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5080 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5081 return expand_call (call
, target
, ignore
);
5086 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5087 the pointer in these functions is void*, the tree optimizers may remove
5088 casts. The mode computed in expand_builtin isn't reliable either, due
5089 to __sync_bool_compare_and_swap.
5091 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5092 group of builtins. This gives us log2 of the mode size. */
5094 static inline machine_mode
5095 get_builtin_sync_mode (int fcode_diff
)
5097 /* The size is not negotiable, so ask not to get BLKmode in return
5098 if the target indicates that a smaller size would be better. */
5099 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5102 /* Expand the memory expression LOC and return the appropriate memory operand
5103 for the builtin_sync operations. */
5106 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5110 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5111 addr
= convert_memory_address (Pmode
, addr
);
5113 /* Note that we explicitly do not want any alias information for this
5114 memory, so that we kill all other live memories. Otherwise we don't
5115 satisfy the full barrier semantics of the intrinsic. */
5116 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5118 /* The alignment needs to be at least according to that of the mode. */
5119 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5120 get_pointer_alignment (loc
)));
5121 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5122 MEM_VOLATILE_P (mem
) = 1;
5127 /* Make sure an argument is in the right mode.
5128 EXP is the tree argument.
5129 MODE is the mode it should be in. */
5132 expand_expr_force_mode (tree exp
, machine_mode mode
)
5135 machine_mode old_mode
;
5137 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5138 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5139 of CONST_INTs, where we know the old_mode only from the call argument. */
5141 old_mode
= GET_MODE (val
);
5142 if (old_mode
== VOIDmode
)
5143 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5144 val
= convert_modes (mode
, old_mode
, val
, 1);
5149 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5150 EXP is the CALL_EXPR. CODE is the rtx code
5151 that corresponds to the arithmetic or logical operation from the name;
5152 an exception here is that NOT actually means NAND. TARGET is an optional
5153 place for us to store the results; AFTER is true if this is the
5154 fetch_and_xxx form. */
5157 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5158 enum rtx_code code
, bool after
,
5162 location_t loc
= EXPR_LOCATION (exp
);
5164 if (code
== NOT
&& warn_sync_nand
)
5166 tree fndecl
= get_callee_fndecl (exp
);
5167 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5169 static bool warned_f_a_n
, warned_n_a_f
;
5173 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5174 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5175 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5176 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5177 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5181 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5182 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5183 warned_f_a_n
= true;
5186 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5187 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5188 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5189 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5190 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5194 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5195 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5196 warned_n_a_f
= true;
5204 /* Expand the operands. */
5205 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5206 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5208 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5212 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5213 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5214 true if this is the boolean form. TARGET is a place for us to store the
5215 results; this is NOT optional if IS_BOOL is true. */
5218 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5219 bool is_bool
, rtx target
)
5221 rtx old_val
, new_val
, mem
;
5224 /* Expand the operands. */
5225 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5226 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5227 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5229 pbool
= poval
= NULL
;
5230 if (target
!= const0_rtx
)
5237 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5238 false, MEMMODEL_SYNC_SEQ_CST
,
5239 MEMMODEL_SYNC_SEQ_CST
))
5245 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5246 general form is actually an atomic exchange, and some targets only
5247 support a reduced form with the second argument being a constant 1.
5248 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5252 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5257 /* Expand the operands. */
5258 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5259 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5261 return expand_sync_lock_test_and_set (target
, mem
, val
);
5264 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5267 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5271 /* Expand the operands. */
5272 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5274 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5277 /* Given an integer representing an ``enum memmodel'', verify its
5278 correctness and return the memory model enum. */
5280 static enum memmodel
5281 get_memmodel (tree exp
)
5284 unsigned HOST_WIDE_INT val
;
5286 /* If the parameter is not a constant, it's a run time value so we'll just
5287 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5288 if (TREE_CODE (exp
) != INTEGER_CST
)
5289 return MEMMODEL_SEQ_CST
;
5291 op
= expand_normal (exp
);
5294 if (targetm
.memmodel_check
)
5295 val
= targetm
.memmodel_check (val
);
5296 else if (val
& ~MEMMODEL_MASK
)
5298 warning (OPT_Winvalid_memory_model
,
5299 "Unknown architecture specifier in memory model to builtin.");
5300 return MEMMODEL_SEQ_CST
;
5303 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5304 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5306 warning (OPT_Winvalid_memory_model
,
5307 "invalid memory model argument to builtin");
5308 return MEMMODEL_SEQ_CST
;
5311 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5312 be conservative and promote consume to acquire. */
5313 if (val
== MEMMODEL_CONSUME
)
5314 val
= MEMMODEL_ACQUIRE
;
5316 return (enum memmodel
) val
;
5319 /* Expand the __atomic_exchange intrinsic:
5320 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5321 EXP is the CALL_EXPR.
5322 TARGET is an optional place for us to store the results. */
5325 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5328 enum memmodel model
;
5330 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5332 if (!flag_inline_atomics
)
5335 /* Expand the operands. */
5336 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5337 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5339 return expand_atomic_exchange (target
, mem
, val
, model
);
5342 /* Expand the __atomic_compare_exchange intrinsic:
5343 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5344 TYPE desired, BOOL weak,
5345 enum memmodel success,
5346 enum memmodel failure)
5347 EXP is the CALL_EXPR.
5348 TARGET is an optional place for us to store the results. */
5351 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5354 rtx expect
, desired
, mem
, oldval
;
5355 rtx_code_label
*label
;
5356 enum memmodel success
, failure
;
5360 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5361 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5363 if (failure
> success
)
5365 warning (OPT_Winvalid_memory_model
,
5366 "failure memory model cannot be stronger than success memory "
5367 "model for %<__atomic_compare_exchange%>");
5368 success
= MEMMODEL_SEQ_CST
;
5371 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5373 warning (OPT_Winvalid_memory_model
,
5374 "invalid failure memory model for "
5375 "%<__atomic_compare_exchange%>");
5376 failure
= MEMMODEL_SEQ_CST
;
5377 success
= MEMMODEL_SEQ_CST
;
5381 if (!flag_inline_atomics
)
5384 /* Expand the operands. */
5385 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5387 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5388 expect
= convert_memory_address (Pmode
, expect
);
5389 expect
= gen_rtx_MEM (mode
, expect
);
5390 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5392 weak
= CALL_EXPR_ARG (exp
, 3);
5394 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5397 if (target
== const0_rtx
)
5400 /* Lest the rtl backend create a race condition with an imporoper store
5401 to memory, always create a new pseudo for OLDVAL. */
5404 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5405 is_weak
, success
, failure
))
5408 /* Conditionally store back to EXPECT, lest we create a race condition
5409 with an improper store to memory. */
5410 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5411 the normal case where EXPECT is totally private, i.e. a register. At
5412 which point the store can be unconditional. */
5413 label
= gen_label_rtx ();
5414 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5415 GET_MODE (target
), 1, label
);
5416 emit_move_insn (expect
, oldval
);
5422 /* Expand the __atomic_load intrinsic:
5423 TYPE __atomic_load (TYPE *object, enum memmodel)
5424 EXP is the CALL_EXPR.
5425 TARGET is an optional place for us to store the results. */
5428 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5431 enum memmodel model
;
5433 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5434 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5436 warning (OPT_Winvalid_memory_model
,
5437 "invalid memory model for %<__atomic_load%>");
5438 model
= MEMMODEL_SEQ_CST
;
5441 if (!flag_inline_atomics
)
5444 /* Expand the operand. */
5445 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5447 return expand_atomic_load (target
, mem
, model
);
5451 /* Expand the __atomic_store intrinsic:
5452 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5453 EXP is the CALL_EXPR.
5454 TARGET is an optional place for us to store the results. */
5457 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5460 enum memmodel model
;
5462 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5463 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5464 || is_mm_release (model
)))
5466 warning (OPT_Winvalid_memory_model
,
5467 "invalid memory model for %<__atomic_store%>");
5468 model
= MEMMODEL_SEQ_CST
;
5471 if (!flag_inline_atomics
)
5474 /* Expand the operands. */
5475 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5476 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5478 return expand_atomic_store (mem
, val
, model
, false);
5481 /* Expand the __atomic_fetch_XXX intrinsic:
5482 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5483 EXP is the CALL_EXPR.
5484 TARGET is an optional place for us to store the results.
5485 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5486 FETCH_AFTER is true if returning the result of the operation.
5487 FETCH_AFTER is false if returning the value before the operation.
5488 IGNORE is true if the result is not used.
5489 EXT_CALL is the correct builtin for an external call if this cannot be
5490 resolved to an instruction sequence. */
5493 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5494 enum rtx_code code
, bool fetch_after
,
5495 bool ignore
, enum built_in_function ext_call
)
5498 enum memmodel model
;
5502 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5504 /* Expand the operands. */
5505 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5506 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5508 /* Only try generating instructions if inlining is turned on. */
5509 if (flag_inline_atomics
)
5511 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5516 /* Return if a different routine isn't needed for the library call. */
5517 if (ext_call
== BUILT_IN_NONE
)
5520 /* Change the call to the specified function. */
5521 fndecl
= get_callee_fndecl (exp
);
5522 addr
= CALL_EXPR_FN (exp
);
5525 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5526 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5528 /* Expand the call here so we can emit trailing code. */
5529 ret
= expand_call (exp
, target
, ignore
);
5531 /* Replace the original function just in case it matters. */
5532 TREE_OPERAND (addr
, 0) = fndecl
;
5534 /* Then issue the arithmetic correction to return the right result. */
5539 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5541 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5544 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5550 /* Expand an atomic clear operation.
5551 void _atomic_clear (BOOL *obj, enum memmodel)
5552 EXP is the call expression. */
5555 expand_builtin_atomic_clear (tree exp
)
5559 enum memmodel model
;
5561 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5562 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5563 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5565 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5567 warning (OPT_Winvalid_memory_model
,
5568 "invalid memory model for %<__atomic_store%>");
5569 model
= MEMMODEL_SEQ_CST
;
5572 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5573 Failing that, a store is issued by __atomic_store. The only way this can
5574 fail is if the bool type is larger than a word size. Unlikely, but
5575 handle it anyway for completeness. Assume a single threaded model since
5576 there is no atomic support in this case, and no barriers are required. */
5577 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5579 emit_move_insn (mem
, const0_rtx
);
5583 /* Expand an atomic test_and_set operation.
5584 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5585 EXP is the call expression. */
5588 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5591 enum memmodel model
;
5594 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5595 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5596 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5598 return expand_atomic_test_and_set (target
, mem
, model
);
5602 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5603 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5606 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5610 unsigned int mode_align
, type_align
;
5612 if (TREE_CODE (arg0
) != INTEGER_CST
)
5615 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5616 mode
= mode_for_size (size
, MODE_INT
, 0);
5617 mode_align
= GET_MODE_ALIGNMENT (mode
);
5619 if (TREE_CODE (arg1
) == INTEGER_CST
)
5621 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5623 /* Either this argument is null, or it's a fake pointer encoding
5624 the alignment of the object. */
5626 val
*= BITS_PER_UNIT
;
5628 if (val
== 0 || mode_align
< val
)
5629 type_align
= mode_align
;
5635 tree ttype
= TREE_TYPE (arg1
);
5637 /* This function is usually invoked and folded immediately by the front
5638 end before anything else has a chance to look at it. The pointer
5639 parameter at this point is usually cast to a void *, so check for that
5640 and look past the cast. */
5641 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5642 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5643 arg1
= TREE_OPERAND (arg1
, 0);
5645 ttype
= TREE_TYPE (arg1
);
5646 gcc_assert (POINTER_TYPE_P (ttype
));
5648 /* Get the underlying type of the object. */
5649 ttype
= TREE_TYPE (ttype
);
5650 type_align
= TYPE_ALIGN (ttype
);
5653 /* If the object has smaller alignment, the lock free routines cannot
5655 if (type_align
< mode_align
)
5656 return boolean_false_node
;
5658 /* Check if a compare_and_swap pattern exists for the mode which represents
5659 the required size. The pattern is not allowed to fail, so the existence
5660 of the pattern indicates support is present. */
5661 if (can_compare_and_swap_p (mode
, true))
5662 return boolean_true_node
;
5664 return boolean_false_node
;
5667 /* Return true if the parameters to call EXP represent an object which will
5668 always generate lock free instructions. The first argument represents the
5669 size of the object, and the second parameter is a pointer to the object
5670 itself. If NULL is passed for the object, then the result is based on
5671 typical alignment for an object of the specified size. Otherwise return
5675 expand_builtin_atomic_always_lock_free (tree exp
)
5678 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5679 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5681 if (TREE_CODE (arg0
) != INTEGER_CST
)
5683 error ("non-constant argument 1 to __atomic_always_lock_free");
5687 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5688 if (size
== boolean_true_node
)
5693 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5694 is lock free on this architecture. */
5697 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5699 if (!flag_inline_atomics
)
5702 /* If it isn't always lock free, don't generate a result. */
5703 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5704 return boolean_true_node
;
5709 /* Return true if the parameters to call EXP represent an object which will
5710 always generate lock free instructions. The first argument represents the
5711 size of the object, and the second parameter is a pointer to the object
5712 itself. If NULL is passed for the object, then the result is based on
5713 typical alignment for an object of the specified size. Otherwise return
5717 expand_builtin_atomic_is_lock_free (tree exp
)
5720 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5721 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5723 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5725 error ("non-integer argument 1 to __atomic_is_lock_free");
5729 if (!flag_inline_atomics
)
5732 /* If the value is known at compile time, return the RTX for it. */
5733 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5734 if (size
== boolean_true_node
)
5740 /* Expand the __atomic_thread_fence intrinsic:
5741 void __atomic_thread_fence (enum memmodel)
5742 EXP is the CALL_EXPR. */
5745 expand_builtin_atomic_thread_fence (tree exp
)
5747 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5748 expand_mem_thread_fence (model
);
5751 /* Expand the __atomic_signal_fence intrinsic:
5752 void __atomic_signal_fence (enum memmodel)
5753 EXP is the CALL_EXPR. */
5756 expand_builtin_atomic_signal_fence (tree exp
)
5758 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5759 expand_mem_signal_fence (model
);
5762 /* Expand the __sync_synchronize intrinsic. */
5765 expand_builtin_sync_synchronize (void)
5767 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5771 expand_builtin_thread_pointer (tree exp
, rtx target
)
5773 enum insn_code icode
;
5774 if (!validate_arglist (exp
, VOID_TYPE
))
5776 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5777 if (icode
!= CODE_FOR_nothing
)
5779 struct expand_operand op
;
5780 /* If the target is not sutitable then create a new target. */
5781 if (target
== NULL_RTX
5783 || GET_MODE (target
) != Pmode
)
5784 target
= gen_reg_rtx (Pmode
);
5785 create_output_operand (&op
, target
, Pmode
);
5786 expand_insn (icode
, 1, &op
);
5789 error ("__builtin_thread_pointer is not supported on this target");
5794 expand_builtin_set_thread_pointer (tree exp
)
5796 enum insn_code icode
;
5797 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5799 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5800 if (icode
!= CODE_FOR_nothing
)
5802 struct expand_operand op
;
5803 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5804 Pmode
, EXPAND_NORMAL
);
5805 create_input_operand (&op
, val
, Pmode
);
5806 expand_insn (icode
, 1, &op
);
5809 error ("__builtin_set_thread_pointer is not supported on this target");
5813 /* Emit code to restore the current value of stack. */
5816 expand_stack_restore (tree var
)
5819 rtx sa
= expand_normal (var
);
5821 sa
= convert_memory_address (Pmode
, sa
);
5823 prev
= get_last_insn ();
5824 emit_stack_restore (SAVE_BLOCK
, sa
);
5826 record_new_stack_level ();
5828 fixup_args_size_notes (prev
, get_last_insn (), 0);
5831 /* Emit code to save the current value of stack. */
5834 expand_stack_save (void)
5838 emit_stack_save (SAVE_BLOCK
, &ret
);
5843 /* Expand an expression EXP that calls a built-in function,
5844 with result going to TARGET if that's convenient
5845 (and in mode MODE if that's convenient).
5846 SUBTARGET may be used as the target for computing one of EXP's operands.
5847 IGNORE is nonzero if the value is to be ignored. */
5850 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5853 tree fndecl
= get_callee_fndecl (exp
);
5854 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5855 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5858 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5859 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5861 /* When ASan is enabled, we don't want to expand some memory/string
5862 builtins and rely on libsanitizer's hooks. This allows us to avoid
5863 redundant checks and be sure, that possible overflow will be detected
5866 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5867 return expand_call (exp
, target
, ignore
);
5869 /* When not optimizing, generate calls to library functions for a certain
5872 && !called_as_built_in (fndecl
)
5873 && fcode
!= BUILT_IN_FORK
5874 && fcode
!= BUILT_IN_EXECL
5875 && fcode
!= BUILT_IN_EXECV
5876 && fcode
!= BUILT_IN_EXECLP
5877 && fcode
!= BUILT_IN_EXECLE
5878 && fcode
!= BUILT_IN_EXECVP
5879 && fcode
!= BUILT_IN_EXECVE
5880 && fcode
!= BUILT_IN_ALLOCA
5881 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5882 && fcode
!= BUILT_IN_FREE
5883 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5884 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5885 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5886 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5887 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5888 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5889 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5890 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5891 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5892 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5893 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5894 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5895 return expand_call (exp
, target
, ignore
);
5897 /* The built-in function expanders test for target == const0_rtx
5898 to determine whether the function's result will be ignored. */
5900 target
= const0_rtx
;
5902 /* If the result of a pure or const built-in function is ignored, and
5903 none of its arguments are volatile, we can avoid expanding the
5904 built-in call and just evaluate the arguments for side-effects. */
5905 if (target
== const0_rtx
5906 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5907 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5909 bool volatilep
= false;
5911 call_expr_arg_iterator iter
;
5913 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5914 if (TREE_THIS_VOLATILE (arg
))
5922 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5923 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5928 /* expand_builtin_with_bounds is supposed to be used for
5929 instrumented builtin calls. */
5930 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5934 CASE_FLT_FN (BUILT_IN_FABS
):
5935 case BUILT_IN_FABSD32
:
5936 case BUILT_IN_FABSD64
:
5937 case BUILT_IN_FABSD128
:
5938 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5943 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5944 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5949 /* Just do a normal library call if we were unable to fold
5951 CASE_FLT_FN (BUILT_IN_CABS
):
5954 CASE_FLT_FN (BUILT_IN_EXP
):
5955 CASE_FLT_FN (BUILT_IN_EXP10
):
5956 CASE_FLT_FN (BUILT_IN_POW10
):
5957 CASE_FLT_FN (BUILT_IN_EXP2
):
5958 CASE_FLT_FN (BUILT_IN_EXPM1
):
5959 CASE_FLT_FN (BUILT_IN_LOGB
):
5960 CASE_FLT_FN (BUILT_IN_LOG
):
5961 CASE_FLT_FN (BUILT_IN_LOG10
):
5962 CASE_FLT_FN (BUILT_IN_LOG2
):
5963 CASE_FLT_FN (BUILT_IN_LOG1P
):
5964 CASE_FLT_FN (BUILT_IN_TAN
):
5965 CASE_FLT_FN (BUILT_IN_ASIN
):
5966 CASE_FLT_FN (BUILT_IN_ACOS
):
5967 CASE_FLT_FN (BUILT_IN_ATAN
):
5968 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
5969 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5970 because of possible accuracy problems. */
5971 if (! flag_unsafe_math_optimizations
)
5973 CASE_FLT_FN (BUILT_IN_SQRT
):
5974 CASE_FLT_FN (BUILT_IN_FLOOR
):
5975 CASE_FLT_FN (BUILT_IN_CEIL
):
5976 CASE_FLT_FN (BUILT_IN_TRUNC
):
5977 CASE_FLT_FN (BUILT_IN_ROUND
):
5978 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5979 CASE_FLT_FN (BUILT_IN_RINT
):
5980 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5985 CASE_FLT_FN (BUILT_IN_FMA
):
5986 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5991 CASE_FLT_FN (BUILT_IN_ILOGB
):
5992 if (! flag_unsafe_math_optimizations
)
5994 CASE_FLT_FN (BUILT_IN_ISINF
):
5995 CASE_FLT_FN (BUILT_IN_FINITE
):
5996 case BUILT_IN_ISFINITE
:
5997 case BUILT_IN_ISNORMAL
:
5998 target
= expand_builtin_interclass_mathfn (exp
, target
);
6003 CASE_FLT_FN (BUILT_IN_ICEIL
):
6004 CASE_FLT_FN (BUILT_IN_LCEIL
):
6005 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6006 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6007 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6008 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6009 target
= expand_builtin_int_roundingfn (exp
, target
);
6014 CASE_FLT_FN (BUILT_IN_IRINT
):
6015 CASE_FLT_FN (BUILT_IN_LRINT
):
6016 CASE_FLT_FN (BUILT_IN_LLRINT
):
6017 CASE_FLT_FN (BUILT_IN_IROUND
):
6018 CASE_FLT_FN (BUILT_IN_LROUND
):
6019 CASE_FLT_FN (BUILT_IN_LLROUND
):
6020 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6025 CASE_FLT_FN (BUILT_IN_POWI
):
6026 target
= expand_builtin_powi (exp
, target
);
6031 CASE_FLT_FN (BUILT_IN_ATAN2
):
6032 CASE_FLT_FN (BUILT_IN_LDEXP
):
6033 CASE_FLT_FN (BUILT_IN_SCALB
):
6034 CASE_FLT_FN (BUILT_IN_SCALBN
):
6035 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6036 if (! flag_unsafe_math_optimizations
)
6039 CASE_FLT_FN (BUILT_IN_FMOD
):
6040 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6041 CASE_FLT_FN (BUILT_IN_DREM
):
6042 CASE_FLT_FN (BUILT_IN_POW
):
6043 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6048 CASE_FLT_FN (BUILT_IN_CEXPI
):
6049 target
= expand_builtin_cexpi (exp
, target
);
6050 gcc_assert (target
);
6053 CASE_FLT_FN (BUILT_IN_SIN
):
6054 CASE_FLT_FN (BUILT_IN_COS
):
6055 if (! flag_unsafe_math_optimizations
)
6057 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6062 CASE_FLT_FN (BUILT_IN_SINCOS
):
6063 if (! flag_unsafe_math_optimizations
)
6065 target
= expand_builtin_sincos (exp
);
6070 case BUILT_IN_APPLY_ARGS
:
6071 return expand_builtin_apply_args ();
6073 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6074 FUNCTION with a copy of the parameters described by
6075 ARGUMENTS, and ARGSIZE. It returns a block of memory
6076 allocated on the stack into which is stored all the registers
6077 that might possibly be used for returning the result of a
6078 function. ARGUMENTS is the value returned by
6079 __builtin_apply_args. ARGSIZE is the number of bytes of
6080 arguments that must be copied. ??? How should this value be
6081 computed? We'll also need a safe worst case value for varargs
6083 case BUILT_IN_APPLY
:
6084 if (!validate_arglist (exp
, POINTER_TYPE
,
6085 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6086 && !validate_arglist (exp
, REFERENCE_TYPE
,
6087 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6093 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6094 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6095 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6097 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6100 /* __builtin_return (RESULT) causes the function to return the
6101 value described by RESULT. RESULT is address of the block of
6102 memory returned by __builtin_apply. */
6103 case BUILT_IN_RETURN
:
6104 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6105 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6108 case BUILT_IN_SAVEREGS
:
6109 return expand_builtin_saveregs ();
6111 case BUILT_IN_VA_ARG_PACK
:
6112 /* All valid uses of __builtin_va_arg_pack () are removed during
6114 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6117 case BUILT_IN_VA_ARG_PACK_LEN
:
6118 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6120 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6123 /* Return the address of the first anonymous stack arg. */
6124 case BUILT_IN_NEXT_ARG
:
6125 if (fold_builtin_next_arg (exp
, false))
6127 return expand_builtin_next_arg ();
6129 case BUILT_IN_CLEAR_CACHE
:
6130 target
= expand_builtin___clear_cache (exp
);
6135 case BUILT_IN_CLASSIFY_TYPE
:
6136 return expand_builtin_classify_type (exp
);
6138 case BUILT_IN_CONSTANT_P
:
6141 case BUILT_IN_FRAME_ADDRESS
:
6142 case BUILT_IN_RETURN_ADDRESS
:
6143 return expand_builtin_frame_address (fndecl
, exp
);
6145 /* Returns the address of the area where the structure is returned.
6147 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6148 if (call_expr_nargs (exp
) != 0
6149 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6150 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6153 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6155 case BUILT_IN_ALLOCA
:
6156 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6157 /* If the allocation stems from the declaration of a variable-sized
6158 object, it cannot accumulate. */
6159 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6164 case BUILT_IN_STACK_SAVE
:
6165 return expand_stack_save ();
6167 case BUILT_IN_STACK_RESTORE
:
6168 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6171 case BUILT_IN_BSWAP16
:
6172 case BUILT_IN_BSWAP32
:
6173 case BUILT_IN_BSWAP64
:
6174 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6179 CASE_INT_FN (BUILT_IN_FFS
):
6180 target
= expand_builtin_unop (target_mode
, exp
, target
,
6181 subtarget
, ffs_optab
);
6186 CASE_INT_FN (BUILT_IN_CLZ
):
6187 target
= expand_builtin_unop (target_mode
, exp
, target
,
6188 subtarget
, clz_optab
);
6193 CASE_INT_FN (BUILT_IN_CTZ
):
6194 target
= expand_builtin_unop (target_mode
, exp
, target
,
6195 subtarget
, ctz_optab
);
6200 CASE_INT_FN (BUILT_IN_CLRSB
):
6201 target
= expand_builtin_unop (target_mode
, exp
, target
,
6202 subtarget
, clrsb_optab
);
6207 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6208 target
= expand_builtin_unop (target_mode
, exp
, target
,
6209 subtarget
, popcount_optab
);
6214 CASE_INT_FN (BUILT_IN_PARITY
):
6215 target
= expand_builtin_unop (target_mode
, exp
, target
,
6216 subtarget
, parity_optab
);
6221 case BUILT_IN_STRLEN
:
6222 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6227 case BUILT_IN_STRCPY
:
6228 target
= expand_builtin_strcpy (exp
, target
);
6233 case BUILT_IN_STRNCPY
:
6234 target
= expand_builtin_strncpy (exp
, target
);
6239 case BUILT_IN_STPCPY
:
6240 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6245 case BUILT_IN_MEMCPY
:
6246 target
= expand_builtin_memcpy (exp
, target
);
6251 case BUILT_IN_MEMPCPY
:
6252 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6257 case BUILT_IN_MEMSET
:
6258 target
= expand_builtin_memset (exp
, target
, mode
);
6263 case BUILT_IN_BZERO
:
6264 target
= expand_builtin_bzero (exp
);
6269 case BUILT_IN_STRCMP
:
6270 target
= expand_builtin_strcmp (exp
, target
);
6275 case BUILT_IN_STRNCMP
:
6276 target
= expand_builtin_strncmp (exp
, target
, mode
);
6282 case BUILT_IN_MEMCMP
:
6283 target
= expand_builtin_memcmp (exp
, target
);
6288 case BUILT_IN_SETJMP
:
6289 /* This should have been lowered to the builtins below. */
6292 case BUILT_IN_SETJMP_SETUP
:
6293 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6294 and the receiver label. */
6295 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6297 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6298 VOIDmode
, EXPAND_NORMAL
);
6299 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6300 rtx_insn
*label_r
= label_rtx (label
);
6302 /* This is copied from the handling of non-local gotos. */
6303 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6304 nonlocal_goto_handler_labels
6305 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6306 nonlocal_goto_handler_labels
);
6307 /* ??? Do not let expand_label treat us as such since we would
6308 not want to be both on the list of non-local labels and on
6309 the list of forced labels. */
6310 FORCED_LABEL (label
) = 0;
6315 case BUILT_IN_SETJMP_RECEIVER
:
6316 /* __builtin_setjmp_receiver is passed the receiver label. */
6317 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6319 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6320 rtx_insn
*label_r
= label_rtx (label
);
6322 expand_builtin_setjmp_receiver (label_r
);
6327 /* __builtin_longjmp is passed a pointer to an array of five words.
6328 It's similar to the C library longjmp function but works with
6329 __builtin_setjmp above. */
6330 case BUILT_IN_LONGJMP
:
6331 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6333 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6334 VOIDmode
, EXPAND_NORMAL
);
6335 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6337 if (value
!= const1_rtx
)
6339 error ("%<__builtin_longjmp%> second argument must be 1");
6343 expand_builtin_longjmp (buf_addr
, value
);
6348 case BUILT_IN_NONLOCAL_GOTO
:
6349 target
= expand_builtin_nonlocal_goto (exp
);
6354 /* This updates the setjmp buffer that is its argument with the value
6355 of the current stack pointer. */
6356 case BUILT_IN_UPDATE_SETJMP_BUF
:
6357 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6360 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6362 expand_builtin_update_setjmp_buf (buf_addr
);
6368 expand_builtin_trap ();
6371 case BUILT_IN_UNREACHABLE
:
6372 expand_builtin_unreachable ();
6375 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6376 case BUILT_IN_SIGNBITD32
:
6377 case BUILT_IN_SIGNBITD64
:
6378 case BUILT_IN_SIGNBITD128
:
6379 target
= expand_builtin_signbit (exp
, target
);
6384 /* Various hooks for the DWARF 2 __throw routine. */
6385 case BUILT_IN_UNWIND_INIT
:
6386 expand_builtin_unwind_init ();
6388 case BUILT_IN_DWARF_CFA
:
6389 return virtual_cfa_rtx
;
6390 #ifdef DWARF2_UNWIND_INFO
6391 case BUILT_IN_DWARF_SP_COLUMN
:
6392 return expand_builtin_dwarf_sp_column ();
6393 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6394 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6397 case BUILT_IN_FROB_RETURN_ADDR
:
6398 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6399 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6400 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6401 case BUILT_IN_EH_RETURN
:
6402 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6403 CALL_EXPR_ARG (exp
, 1));
6405 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6406 return expand_builtin_eh_return_data_regno (exp
);
6407 case BUILT_IN_EXTEND_POINTER
:
6408 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6409 case BUILT_IN_EH_POINTER
:
6410 return expand_builtin_eh_pointer (exp
);
6411 case BUILT_IN_EH_FILTER
:
6412 return expand_builtin_eh_filter (exp
);
6413 case BUILT_IN_EH_COPY_VALUES
:
6414 return expand_builtin_eh_copy_values (exp
);
6416 case BUILT_IN_VA_START
:
6417 return expand_builtin_va_start (exp
);
6418 case BUILT_IN_VA_END
:
6419 return expand_builtin_va_end (exp
);
6420 case BUILT_IN_VA_COPY
:
6421 return expand_builtin_va_copy (exp
);
6422 case BUILT_IN_EXPECT
:
6423 return expand_builtin_expect (exp
, target
);
6424 case BUILT_IN_ASSUME_ALIGNED
:
6425 return expand_builtin_assume_aligned (exp
, target
);
6426 case BUILT_IN_PREFETCH
:
6427 expand_builtin_prefetch (exp
);
6430 case BUILT_IN_INIT_TRAMPOLINE
:
6431 return expand_builtin_init_trampoline (exp
, true);
6432 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6433 return expand_builtin_init_trampoline (exp
, false);
6434 case BUILT_IN_ADJUST_TRAMPOLINE
:
6435 return expand_builtin_adjust_trampoline (exp
);
6438 case BUILT_IN_EXECL
:
6439 case BUILT_IN_EXECV
:
6440 case BUILT_IN_EXECLP
:
6441 case BUILT_IN_EXECLE
:
6442 case BUILT_IN_EXECVP
:
6443 case BUILT_IN_EXECVE
:
6444 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6449 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6450 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6451 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6452 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6453 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6454 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6455 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6460 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6461 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6462 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6463 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6464 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6465 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6466 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6471 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6472 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6473 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6474 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6475 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6476 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6477 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6482 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6483 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6484 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6485 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6486 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6487 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6488 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6493 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6494 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6495 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6496 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6497 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6498 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6499 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6504 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6505 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6506 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6507 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6508 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6509 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6510 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6515 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6516 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6517 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6518 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6519 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6520 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6521 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6526 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6527 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6528 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6529 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6530 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6531 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6532 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6537 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6538 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6539 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6540 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6541 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6542 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6543 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6548 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6549 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6550 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6551 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6552 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6553 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6554 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6559 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6560 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6561 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6562 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6563 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6564 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6565 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6570 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6571 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6572 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6573 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6574 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6575 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6576 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6581 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6582 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6583 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6584 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6585 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6586 if (mode
== VOIDmode
)
6587 mode
= TYPE_MODE (boolean_type_node
);
6588 if (!target
|| !register_operand (target
, mode
))
6589 target
= gen_reg_rtx (mode
);
6591 mode
= get_builtin_sync_mode
6592 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6593 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6598 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6599 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6600 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6601 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6602 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6603 mode
= get_builtin_sync_mode
6604 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6605 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6610 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6611 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6612 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6613 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6614 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6615 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6616 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6621 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6622 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6623 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6624 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6625 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6626 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6627 expand_builtin_sync_lock_release (mode
, exp
);
6630 case BUILT_IN_SYNC_SYNCHRONIZE
:
6631 expand_builtin_sync_synchronize ();
6634 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6635 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6636 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6637 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6638 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6639 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6640 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6645 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6649 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6651 unsigned int nargs
, z
;
6652 vec
<tree
, va_gc
> *vec
;
6655 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6656 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6660 /* If this is turned into an external library call, the weak parameter
6661 must be dropped to match the expected parameter list. */
6662 nargs
= call_expr_nargs (exp
);
6663 vec_alloc (vec
, nargs
- 1);
6664 for (z
= 0; z
< 3; z
++)
6665 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6666 /* Skip the boolean weak parameter. */
6667 for (z
= 4; z
< 6; z
++)
6668 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6669 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6673 case BUILT_IN_ATOMIC_LOAD_1
:
6674 case BUILT_IN_ATOMIC_LOAD_2
:
6675 case BUILT_IN_ATOMIC_LOAD_4
:
6676 case BUILT_IN_ATOMIC_LOAD_8
:
6677 case BUILT_IN_ATOMIC_LOAD_16
:
6678 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6679 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6684 case BUILT_IN_ATOMIC_STORE_1
:
6685 case BUILT_IN_ATOMIC_STORE_2
:
6686 case BUILT_IN_ATOMIC_STORE_4
:
6687 case BUILT_IN_ATOMIC_STORE_8
:
6688 case BUILT_IN_ATOMIC_STORE_16
:
6689 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6690 target
= expand_builtin_atomic_store (mode
, exp
);
6695 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6696 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6697 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6698 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6699 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6701 enum built_in_function lib
;
6702 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6703 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6704 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6705 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6711 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6712 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6713 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6714 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6715 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6717 enum built_in_function lib
;
6718 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6719 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6720 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6721 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6727 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6728 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6729 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6730 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6731 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6733 enum built_in_function lib
;
6734 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6735 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6736 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6737 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6743 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6744 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6745 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6746 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6747 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6749 enum built_in_function lib
;
6750 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6751 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6752 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6753 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6759 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6760 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6761 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6762 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6763 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6765 enum built_in_function lib
;
6766 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6767 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6768 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6769 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6775 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6776 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6777 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6778 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6779 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6781 enum built_in_function lib
;
6782 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6783 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6784 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6785 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6791 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6792 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6793 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6794 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6795 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6796 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6797 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6798 ignore
, BUILT_IN_NONE
);
6803 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6804 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6805 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6806 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6807 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6808 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6809 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6810 ignore
, BUILT_IN_NONE
);
6815 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6816 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6817 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6818 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6819 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6820 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6821 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6822 ignore
, BUILT_IN_NONE
);
6827 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6828 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6829 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6830 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6831 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6832 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6833 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6834 ignore
, BUILT_IN_NONE
);
6839 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6840 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6841 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6842 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6843 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6844 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6845 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6846 ignore
, BUILT_IN_NONE
);
6851 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6852 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6853 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6854 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6855 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6856 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6857 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6858 ignore
, BUILT_IN_NONE
);
6863 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6864 return expand_builtin_atomic_test_and_set (exp
, target
);
6866 case BUILT_IN_ATOMIC_CLEAR
:
6867 return expand_builtin_atomic_clear (exp
);
6869 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6870 return expand_builtin_atomic_always_lock_free (exp
);
6872 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6873 target
= expand_builtin_atomic_is_lock_free (exp
);
6878 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6879 expand_builtin_atomic_thread_fence (exp
);
6882 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6883 expand_builtin_atomic_signal_fence (exp
);
6886 case BUILT_IN_OBJECT_SIZE
:
6887 return expand_builtin_object_size (exp
);
6889 case BUILT_IN_MEMCPY_CHK
:
6890 case BUILT_IN_MEMPCPY_CHK
:
6891 case BUILT_IN_MEMMOVE_CHK
:
6892 case BUILT_IN_MEMSET_CHK
:
6893 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6898 case BUILT_IN_STRCPY_CHK
:
6899 case BUILT_IN_STPCPY_CHK
:
6900 case BUILT_IN_STRNCPY_CHK
:
6901 case BUILT_IN_STPNCPY_CHK
:
6902 case BUILT_IN_STRCAT_CHK
:
6903 case BUILT_IN_STRNCAT_CHK
:
6904 case BUILT_IN_SNPRINTF_CHK
:
6905 case BUILT_IN_VSNPRINTF_CHK
:
6906 maybe_emit_chk_warning (exp
, fcode
);
6909 case BUILT_IN_SPRINTF_CHK
:
6910 case BUILT_IN_VSPRINTF_CHK
:
6911 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6915 if (warn_free_nonheap_object
)
6916 maybe_emit_free_warning (exp
);
6919 case BUILT_IN_THREAD_POINTER
:
6920 return expand_builtin_thread_pointer (exp
, target
);
6922 case BUILT_IN_SET_THREAD_POINTER
:
6923 expand_builtin_set_thread_pointer (exp
);
6926 case BUILT_IN_CILK_DETACH
:
6927 expand_builtin_cilk_detach (exp
);
6930 case BUILT_IN_CILK_POP_FRAME
:
6931 expand_builtin_cilk_pop_frame (exp
);
6934 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6935 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6936 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6937 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6938 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6939 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6940 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6941 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6942 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6943 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6944 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6945 /* We allow user CHKP builtins if Pointer Bounds
6947 if (!chkp_function_instrumented_p (current_function_decl
))
6949 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6950 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6951 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6952 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6953 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6954 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6955 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6956 return expand_normal (size_zero_node
);
6957 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6958 return expand_normal (size_int (-1));
6964 case BUILT_IN_CHKP_BNDMK
:
6965 case BUILT_IN_CHKP_BNDSTX
:
6966 case BUILT_IN_CHKP_BNDCL
:
6967 case BUILT_IN_CHKP_BNDCU
:
6968 case BUILT_IN_CHKP_BNDLDX
:
6969 case BUILT_IN_CHKP_BNDRET
:
6970 case BUILT_IN_CHKP_INTERSECT
:
6971 case BUILT_IN_CHKP_NARROW
:
6972 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6973 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6974 /* Software implementation of Pointer Bounds Checker is NYI.
6975 Target support is required. */
6976 error ("Your target platform does not support -fcheck-pointer-bounds");
6979 case BUILT_IN_ACC_ON_DEVICE
:
6980 /* Do library call, if we failed to expand the builtin when
6984 default: /* just do library call, if unknown builtin */
6988 /* The switch statement above can drop through to cause the function
6989 to be called normally. */
6990 return expand_call (exp
, target
, ignore
);
6993 /* Similar to expand_builtin but is used for instrumented calls. */
6996 expand_builtin_with_bounds (tree exp
, rtx target
,
6997 rtx subtarget ATTRIBUTE_UNUSED
,
6998 machine_mode mode
, int ignore
)
7000 tree fndecl
= get_callee_fndecl (exp
);
7001 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7003 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7005 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7006 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7008 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7009 && fcode
< END_CHKP_BUILTINS
);
7013 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7014 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7019 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7020 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7025 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7026 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7035 /* The switch statement above can drop through to cause the function
7036 to be called normally. */
7037 return expand_call (exp
, target
, ignore
);
7040 /* Determine whether a tree node represents a call to a built-in
7041 function. If the tree T is a call to a built-in function with
7042 the right number of arguments of the appropriate types, return
7043 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7044 Otherwise the return value is END_BUILTINS. */
7046 enum built_in_function
7047 builtin_mathfn_code (const_tree t
)
7049 const_tree fndecl
, arg
, parmlist
;
7050 const_tree argtype
, parmtype
;
7051 const_call_expr_arg_iterator iter
;
7053 if (TREE_CODE (t
) != CALL_EXPR
7054 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7055 return END_BUILTINS
;
7057 fndecl
= get_callee_fndecl (t
);
7058 if (fndecl
== NULL_TREE
7059 || TREE_CODE (fndecl
) != FUNCTION_DECL
7060 || ! DECL_BUILT_IN (fndecl
)
7061 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7062 return END_BUILTINS
;
7064 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7065 init_const_call_expr_arg_iterator (t
, &iter
);
7066 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7068 /* If a function doesn't take a variable number of arguments,
7069 the last element in the list will have type `void'. */
7070 parmtype
= TREE_VALUE (parmlist
);
7071 if (VOID_TYPE_P (parmtype
))
7073 if (more_const_call_expr_args_p (&iter
))
7074 return END_BUILTINS
;
7075 return DECL_FUNCTION_CODE (fndecl
);
7078 if (! more_const_call_expr_args_p (&iter
))
7079 return END_BUILTINS
;
7081 arg
= next_const_call_expr_arg (&iter
);
7082 argtype
= TREE_TYPE (arg
);
7084 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7086 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7087 return END_BUILTINS
;
7089 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7091 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7092 return END_BUILTINS
;
7094 else if (POINTER_TYPE_P (parmtype
))
7096 if (! POINTER_TYPE_P (argtype
))
7097 return END_BUILTINS
;
7099 else if (INTEGRAL_TYPE_P (parmtype
))
7101 if (! INTEGRAL_TYPE_P (argtype
))
7102 return END_BUILTINS
;
7105 return END_BUILTINS
;
7108 /* Variable-length argument list. */
7109 return DECL_FUNCTION_CODE (fndecl
);
7112 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7113 evaluate to a constant. */
7116 fold_builtin_constant_p (tree arg
)
7118 /* We return 1 for a numeric type that's known to be a constant
7119 value at compile-time or for an aggregate type that's a
7120 literal constant. */
7123 /* If we know this is a constant, emit the constant of one. */
7124 if (CONSTANT_CLASS_P (arg
)
7125 || (TREE_CODE (arg
) == CONSTRUCTOR
7126 && TREE_CONSTANT (arg
)))
7127 return integer_one_node
;
7128 if (TREE_CODE (arg
) == ADDR_EXPR
)
7130 tree op
= TREE_OPERAND (arg
, 0);
7131 if (TREE_CODE (op
) == STRING_CST
7132 || (TREE_CODE (op
) == ARRAY_REF
7133 && integer_zerop (TREE_OPERAND (op
, 1))
7134 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7135 return integer_one_node
;
7138 /* If this expression has side effects, show we don't know it to be a
7139 constant. Likewise if it's a pointer or aggregate type since in
7140 those case we only want literals, since those are only optimized
7141 when generating RTL, not later.
7142 And finally, if we are compiling an initializer, not code, we
7143 need to return a definite result now; there's not going to be any
7144 more optimization done. */
7145 if (TREE_SIDE_EFFECTS (arg
)
7146 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7147 || POINTER_TYPE_P (TREE_TYPE (arg
))
7149 || folding_initializer
7150 || force_folding_builtin_constant_p
)
7151 return integer_zero_node
;
7156 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7157 return it as a truthvalue. */
7160 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7163 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7165 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7166 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7167 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7168 pred_type
= TREE_VALUE (arg_types
);
7169 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7171 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7172 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7173 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7176 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7177 build_int_cst (ret_type
, 0));
7180 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7181 NULL_TREE if no simplification is possible. */
7184 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7186 tree inner
, fndecl
, inner_arg0
;
7187 enum tree_code code
;
7189 /* Distribute the expected value over short-circuiting operators.
7190 See through the cast from truthvalue_type_node to long. */
7192 while (CONVERT_EXPR_P (inner_arg0
)
7193 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7194 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7195 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7197 /* If this is a builtin_expect within a builtin_expect keep the
7198 inner one. See through a comparison against a constant. It
7199 might have been added to create a thruthvalue. */
7202 if (COMPARISON_CLASS_P (inner
)
7203 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7204 inner
= TREE_OPERAND (inner
, 0);
7206 if (TREE_CODE (inner
) == CALL_EXPR
7207 && (fndecl
= get_callee_fndecl (inner
))
7208 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7209 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7213 code
= TREE_CODE (inner
);
7214 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7216 tree op0
= TREE_OPERAND (inner
, 0);
7217 tree op1
= TREE_OPERAND (inner
, 1);
7219 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7220 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7221 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7223 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7226 /* If the argument isn't invariant then there's nothing else we can do. */
7227 if (!TREE_CONSTANT (inner_arg0
))
7230 /* If we expect that a comparison against the argument will fold to
7231 a constant return the constant. In practice, this means a true
7232 constant or the address of a non-weak symbol. */
7235 if (TREE_CODE (inner
) == ADDR_EXPR
)
7239 inner
= TREE_OPERAND (inner
, 0);
7241 while (TREE_CODE (inner
) == COMPONENT_REF
7242 || TREE_CODE (inner
) == ARRAY_REF
);
7243 if ((TREE_CODE (inner
) == VAR_DECL
7244 || TREE_CODE (inner
) == FUNCTION_DECL
)
7245 && DECL_WEAK (inner
))
7249 /* Otherwise, ARG0 already has the proper type for the return value. */
7253 /* Fold a call to __builtin_classify_type with argument ARG. */
7256 fold_builtin_classify_type (tree arg
)
7259 return build_int_cst (integer_type_node
, no_type_class
);
7261 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7264 /* Fold a call to __builtin_strlen with argument ARG. */
7267 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7269 if (!validate_arg (arg
, POINTER_TYPE
))
7273 tree len
= c_strlen (arg
, 0);
7276 return fold_convert_loc (loc
, type
, len
);
7282 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7285 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7287 REAL_VALUE_TYPE real
;
7289 /* __builtin_inff is intended to be usable to define INFINITY on all
7290 targets. If an infinity is not available, INFINITY expands "to a
7291 positive constant of type float that overflows at translation
7292 time", footnote "In this case, using INFINITY will violate the
7293 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7294 Thus we pedwarn to ensure this constraint violation is
7296 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7297 pedwarn (loc
, 0, "target format does not support infinity");
7300 return build_real (type
, real
);
7303 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7306 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7308 REAL_VALUE_TYPE real
;
7311 if (!validate_arg (arg
, POINTER_TYPE
))
7313 str
= c_getstr (arg
);
7317 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7320 return build_real (type
, real
);
7323 /* Return true if the floating point expression T has an integer value.
7324 We also allow +Inf, -Inf and NaN to be considered integer values. */
7327 integer_valued_real_p (tree t
)
7329 switch (TREE_CODE (t
))
7336 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7341 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7348 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7349 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7352 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7353 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7356 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7360 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7361 if (TREE_CODE (type
) == INTEGER_TYPE
)
7363 if (TREE_CODE (type
) == REAL_TYPE
)
7364 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7369 switch (builtin_mathfn_code (t
))
7371 CASE_FLT_FN (BUILT_IN_CEIL
):
7372 CASE_FLT_FN (BUILT_IN_FLOOR
):
7373 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7374 CASE_FLT_FN (BUILT_IN_RINT
):
7375 CASE_FLT_FN (BUILT_IN_ROUND
):
7376 CASE_FLT_FN (BUILT_IN_TRUNC
):
7379 CASE_FLT_FN (BUILT_IN_FMIN
):
7380 CASE_FLT_FN (BUILT_IN_FMAX
):
7381 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7382 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7395 /* FNDECL is assumed to be a builtin where truncation can be propagated
7396 across (for instance floor((double)f) == (double)floorf (f).
7397 Do the transformation for a call with argument ARG. */
7400 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7402 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7404 if (!validate_arg (arg
, REAL_TYPE
))
7407 /* Integer rounding functions are idempotent. */
7408 if (fcode
== builtin_mathfn_code (arg
))
7411 /* If argument is already integer valued, and we don't need to worry
7412 about setting errno, there's no need to perform rounding. */
7413 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7418 tree arg0
= strip_float_extensions (arg
);
7419 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7420 tree newtype
= TREE_TYPE (arg0
);
7423 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7424 && (decl
= mathfn_built_in (newtype
, fcode
)))
7425 return fold_convert_loc (loc
, ftype
,
7426 build_call_expr_loc (loc
, decl
, 1,
7427 fold_convert_loc (loc
,
7434 /* FNDECL is assumed to be builtin which can narrow the FP type of
7435 the argument, for instance lround((double)f) -> lroundf (f).
7436 Do the transformation for a call with argument ARG. */
7439 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7441 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7443 if (!validate_arg (arg
, REAL_TYPE
))
7446 /* If argument is already integer valued, and we don't need to worry
7447 about setting errno, there's no need to perform rounding. */
7448 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7449 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7450 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7454 tree ftype
= TREE_TYPE (arg
);
7455 tree arg0
= strip_float_extensions (arg
);
7456 tree newtype
= TREE_TYPE (arg0
);
7459 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7460 && (decl
= mathfn_built_in (newtype
, fcode
)))
7461 return build_call_expr_loc (loc
, decl
, 1,
7462 fold_convert_loc (loc
, newtype
, arg0
));
7465 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7466 sizeof (int) == sizeof (long). */
7467 if (TYPE_PRECISION (integer_type_node
)
7468 == TYPE_PRECISION (long_integer_type_node
))
7470 tree newfn
= NULL_TREE
;
7473 CASE_FLT_FN (BUILT_IN_ICEIL
):
7474 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7477 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7478 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7481 CASE_FLT_FN (BUILT_IN_IROUND
):
7482 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7485 CASE_FLT_FN (BUILT_IN_IRINT
):
7486 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7495 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7496 return fold_convert_loc (loc
,
7497 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7501 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7502 sizeof (long long) == sizeof (long). */
7503 if (TYPE_PRECISION (long_long_integer_type_node
)
7504 == TYPE_PRECISION (long_integer_type_node
))
7506 tree newfn
= NULL_TREE
;
7509 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7510 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7513 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7514 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7517 CASE_FLT_FN (BUILT_IN_LLROUND
):
7518 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7521 CASE_FLT_FN (BUILT_IN_LLRINT
):
7522 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7531 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7532 return fold_convert_loc (loc
,
7533 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7540 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7541 NULL_TREE if no simplification can be made. */
7544 fold_builtin_sincos (location_t loc
,
7545 tree arg0
, tree arg1
, tree arg2
)
7550 if (!validate_arg (arg0
, REAL_TYPE
)
7551 || !validate_arg (arg1
, POINTER_TYPE
)
7552 || !validate_arg (arg2
, POINTER_TYPE
))
7555 type
= TREE_TYPE (arg0
);
7557 /* Calculate the result when the argument is a constant. */
7558 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7561 /* Canonicalize sincos to cexpi. */
7562 if (!targetm
.libc_has_function (function_c99_math_complex
))
7564 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7568 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
7569 call
= builtin_save_expr (call
);
7571 return build2 (COMPOUND_EXPR
, void_type_node
,
7572 build2 (MODIFY_EXPR
, void_type_node
,
7573 build_fold_indirect_ref_loc (loc
, arg1
),
7574 build1 (IMAGPART_EXPR
, type
, call
)),
7575 build2 (MODIFY_EXPR
, void_type_node
,
7576 build_fold_indirect_ref_loc (loc
, arg2
),
7577 build1 (REALPART_EXPR
, type
, call
)));
7580 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7581 NULL_TREE if no simplification can be made. */
7584 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
7587 tree realp
, imagp
, ifn
;
7590 if (!validate_arg (arg0
, COMPLEX_TYPE
)
7591 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
7594 /* Calculate the result when the argument is a constant. */
7595 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
7598 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7600 /* In case we can figure out the real part of arg0 and it is constant zero
7602 if (!targetm
.libc_has_function (function_c99_math_complex
))
7604 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7608 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
7609 && real_zerop (realp
))
7611 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7612 return build_call_expr_loc (loc
, ifn
, 1, narg
);
7615 /* In case we can easily decompose real and imaginary parts split cexp
7616 to exp (r) * cexpi (i). */
7617 if (flag_unsafe_math_optimizations
7620 tree rfn
, rcall
, icall
;
7622 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7626 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
7630 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
7631 icall
= builtin_save_expr (icall
);
7632 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
7633 rcall
= builtin_save_expr (rcall
);
7634 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
7635 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7637 fold_build1_loc (loc
, REALPART_EXPR
,
7639 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
7641 fold_build1_loc (loc
, IMAGPART_EXPR
,
7648 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7649 Return NULL_TREE if no simplification can be made. */
7652 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
7654 if (!validate_arg (arg
, REAL_TYPE
))
7657 /* Optimize trunc of constant value. */
7658 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7660 REAL_VALUE_TYPE r
, x
;
7661 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7663 x
= TREE_REAL_CST (arg
);
7664 real_trunc (&r
, TYPE_MODE (type
), &x
);
7665 return build_real (type
, r
);
7668 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7671 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7672 Return NULL_TREE if no simplification can be made. */
7675 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
7677 if (!validate_arg (arg
, REAL_TYPE
))
7680 /* Optimize floor of constant value. */
7681 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7685 x
= TREE_REAL_CST (arg
);
7686 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7688 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7691 real_floor (&r
, TYPE_MODE (type
), &x
);
7692 return build_real (type
, r
);
7696 /* Fold floor (x) where x is nonnegative to trunc (x). */
7697 if (tree_expr_nonnegative_p (arg
))
7699 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7701 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
7704 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7707 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7708 Return NULL_TREE if no simplification can be made. */
7711 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
7713 if (!validate_arg (arg
, REAL_TYPE
))
7716 /* Optimize ceil of constant value. */
7717 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7721 x
= TREE_REAL_CST (arg
);
7722 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7724 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7727 real_ceil (&r
, TYPE_MODE (type
), &x
);
7728 return build_real (type
, r
);
7732 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7735 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7736 Return NULL_TREE if no simplification can be made. */
7739 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
7741 if (!validate_arg (arg
, REAL_TYPE
))
7744 /* Optimize round of constant value. */
7745 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7749 x
= TREE_REAL_CST (arg
);
7750 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7752 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7755 real_round (&r
, TYPE_MODE (type
), &x
);
7756 return build_real (type
, r
);
7760 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
7763 /* Fold function call to builtin lround, lroundf or lroundl (or the
7764 corresponding long long versions) and other rounding functions. ARG
7765 is the argument to the call. Return NULL_TREE if no simplification
7769 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
7771 if (!validate_arg (arg
, REAL_TYPE
))
7774 /* Optimize lround of constant value. */
7775 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7777 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7779 if (real_isfinite (&x
))
7781 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7782 tree ftype
= TREE_TYPE (arg
);
7786 switch (DECL_FUNCTION_CODE (fndecl
))
7788 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7789 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7790 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7791 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7794 CASE_FLT_FN (BUILT_IN_ICEIL
):
7795 CASE_FLT_FN (BUILT_IN_LCEIL
):
7796 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7797 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7800 CASE_FLT_FN (BUILT_IN_IROUND
):
7801 CASE_FLT_FN (BUILT_IN_LROUND
):
7802 CASE_FLT_FN (BUILT_IN_LLROUND
):
7803 real_round (&r
, TYPE_MODE (ftype
), &x
);
7810 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
7812 return wide_int_to_tree (itype
, val
);
7816 switch (DECL_FUNCTION_CODE (fndecl
))
7818 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7819 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7820 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7821 if (tree_expr_nonnegative_p (arg
))
7822 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7823 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7828 return fold_fixed_mathfn (loc
, fndecl
, arg
);
7831 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7832 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7833 the argument to the call. Return NULL_TREE if no simplification can
7837 fold_builtin_bitop (tree fndecl
, tree arg
)
7839 if (!validate_arg (arg
, INTEGER_TYPE
))
7842 /* Optimize for constant argument. */
7843 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7845 tree type
= TREE_TYPE (arg
);
7848 switch (DECL_FUNCTION_CODE (fndecl
))
7850 CASE_INT_FN (BUILT_IN_FFS
):
7851 result
= wi::ffs (arg
);
7854 CASE_INT_FN (BUILT_IN_CLZ
):
7855 if (wi::ne_p (arg
, 0))
7856 result
= wi::clz (arg
);
7857 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7858 result
= TYPE_PRECISION (type
);
7861 CASE_INT_FN (BUILT_IN_CTZ
):
7862 if (wi::ne_p (arg
, 0))
7863 result
= wi::ctz (arg
);
7864 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7865 result
= TYPE_PRECISION (type
);
7868 CASE_INT_FN (BUILT_IN_CLRSB
):
7869 result
= wi::clrsb (arg
);
7872 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7873 result
= wi::popcount (arg
);
7876 CASE_INT_FN (BUILT_IN_PARITY
):
7877 result
= wi::parity (arg
);
7884 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7890 /* Fold function call to builtin_bswap and the short, long and long long
7891 variants. Return NULL_TREE if no simplification can be made. */
7893 fold_builtin_bswap (tree fndecl
, tree arg
)
7895 if (! validate_arg (arg
, INTEGER_TYPE
))
7898 /* Optimize constant value. */
7899 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7901 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7903 switch (DECL_FUNCTION_CODE (fndecl
))
7905 case BUILT_IN_BSWAP16
:
7906 case BUILT_IN_BSWAP32
:
7907 case BUILT_IN_BSWAP64
:
7909 signop sgn
= TYPE_SIGN (type
);
7911 wide_int_to_tree (type
,
7912 wide_int::from (arg
, TYPE_PRECISION (type
),
7924 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7925 NULL_TREE if no simplification can be made. */
7928 fold_builtin_hypot (location_t loc
, tree arg0
, tree arg1
, tree type
)
7932 if (!validate_arg (arg0
, REAL_TYPE
)
7933 || !validate_arg (arg1
, REAL_TYPE
))
7936 /* Calculate the result when the argument is a constant. */
7937 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
7940 /* If either argument is zero, hypot is fabs of the other. */
7941 if (real_zerop (arg0
))
7942 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
7943 else if (real_zerop (arg1
))
7944 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
7946 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7947 if (flag_unsafe_math_optimizations
7948 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
7949 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7950 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
7951 build_real_truncate (type
, dconst_sqrt2 ()));
7957 /* Fold a builtin function call to pow, powf, or powl. Return
7958 NULL_TREE if no simplification can be made. */
7960 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
7964 if (!validate_arg (arg0
, REAL_TYPE
)
7965 || !validate_arg (arg1
, REAL_TYPE
))
7968 /* Calculate the result when the argument is a constant. */
7969 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
7972 /* Optimize pow(1.0,y) = 1.0. */
7973 if (real_onep (arg0
))
7974 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
7976 if (TREE_CODE (arg1
) == REAL_CST
7977 && !TREE_OVERFLOW (arg1
))
7979 REAL_VALUE_TYPE cint
;
7983 c
= TREE_REAL_CST (arg1
);
7985 /* Optimize pow(x,0.0) = 1.0. */
7986 if (real_equal (&c
, &dconst0
))
7987 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
7990 /* Optimize pow(x,1.0) = x. */
7991 if (real_equal (&c
, &dconst1
))
7994 /* Optimize pow(x,-1.0) = 1.0/x. */
7995 if (real_equal (&c
, &dconstm1
))
7996 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
7997 build_real (type
, dconst1
), arg0
);
7999 /* Optimize pow(x,0.5) = sqrt(x). */
8000 if (flag_unsafe_math_optimizations
8001 && real_equal (&c
, &dconsthalf
))
8003 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8005 if (sqrtfn
!= NULL_TREE
)
8006 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8009 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8010 if (flag_unsafe_math_optimizations
)
8012 const REAL_VALUE_TYPE dconstroot
8013 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8015 if (real_equal (&c
, &dconstroot
))
8017 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8018 if (cbrtfn
!= NULL_TREE
)
8019 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8023 /* Check for an integer exponent. */
8024 n
= real_to_integer (&c
);
8025 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8026 if (real_identical (&c
, &cint
))
8028 /* Attempt to evaluate pow at compile-time, unless this should
8029 raise an exception. */
8030 if (TREE_CODE (arg0
) == REAL_CST
8031 && !TREE_OVERFLOW (arg0
)
8033 || (!flag_trapping_math
&& !flag_errno_math
)
8034 || !real_equal (&TREE_REAL_CST (arg0
), &dconst0
)))
8039 x
= TREE_REAL_CST (arg0
);
8040 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8041 if (flag_unsafe_math_optimizations
|| !inexact
)
8042 return build_real (type
, x
);
8047 if (flag_unsafe_math_optimizations
)
8049 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8051 /* Optimize pow(expN(x),y) = expN(x*y). */
8052 if (BUILTIN_EXPONENT_P (fcode
))
8054 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8055 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8056 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8057 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8060 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8061 if (BUILTIN_SQRT_P (fcode
))
8063 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8064 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8065 build_real (type
, dconsthalf
));
8066 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8069 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8070 if (BUILTIN_CBRT_P (fcode
))
8072 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8073 if (tree_expr_nonnegative_p (arg
))
8075 tree c
= build_real_truncate (type
, dconst_third ());
8076 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
, c
);
8077 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8081 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8082 if (fcode
== BUILT_IN_POW
8083 || fcode
== BUILT_IN_POWF
8084 || fcode
== BUILT_IN_POWL
)
8086 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8087 if (tree_expr_nonnegative_p (arg00
))
8089 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8090 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8091 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8099 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8100 Return NULL_TREE if no simplification can be made. */
8102 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8103 tree arg0
, tree arg1
, tree type
)
8105 if (!validate_arg (arg0
, REAL_TYPE
)
8106 || !validate_arg (arg1
, INTEGER_TYPE
))
8109 /* Optimize pow(1.0,y) = 1.0. */
8110 if (real_onep (arg0
))
8111 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8113 if (tree_fits_shwi_p (arg1
))
8115 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8117 /* Evaluate powi at compile-time. */
8118 if (TREE_CODE (arg0
) == REAL_CST
8119 && !TREE_OVERFLOW (arg0
))
8122 x
= TREE_REAL_CST (arg0
);
8123 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8124 return build_real (type
, x
);
8127 /* Optimize pow(x,0) = 1.0. */
8129 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8132 /* Optimize pow(x,1) = x. */
8136 /* Optimize pow(x,-1) = 1.0/x. */
8138 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8139 build_real (type
, dconst1
), arg0
);
8145 /* A subroutine of fold_builtin to fold the various exponent
8146 functions. Return NULL_TREE if no simplification can be made.
8147 FUNC is the corresponding MPFR exponent function. */
8150 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8151 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8153 if (validate_arg (arg
, REAL_TYPE
))
8155 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8158 /* Calculate the result when the argument is a constant. */
8159 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8162 /* Optimize expN(logN(x)) = x. */
8163 if (flag_unsafe_math_optimizations
)
8165 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8167 if ((func
== mpfr_exp
8168 && (fcode
== BUILT_IN_LOG
8169 || fcode
== BUILT_IN_LOGF
8170 || fcode
== BUILT_IN_LOGL
))
8171 || (func
== mpfr_exp2
8172 && (fcode
== BUILT_IN_LOG2
8173 || fcode
== BUILT_IN_LOG2F
8174 || fcode
== BUILT_IN_LOG2L
))
8175 || (func
== mpfr_exp10
8176 && (fcode
== BUILT_IN_LOG10
8177 || fcode
== BUILT_IN_LOG10F
8178 || fcode
== BUILT_IN_LOG10L
)))
8179 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8186 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8187 arguments to the call, and TYPE is its return type.
8188 Return NULL_TREE if no simplification can be made. */
8191 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8193 if (!validate_arg (arg1
, POINTER_TYPE
)
8194 || !validate_arg (arg2
, INTEGER_TYPE
)
8195 || !validate_arg (len
, INTEGER_TYPE
))
8201 if (TREE_CODE (arg2
) != INTEGER_CST
8202 || !tree_fits_uhwi_p (len
))
8205 p1
= c_getstr (arg1
);
8206 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8212 if (target_char_cast (arg2
, &c
))
8215 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8218 return build_int_cst (TREE_TYPE (arg1
), 0);
8220 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8221 return fold_convert_loc (loc
, type
, tem
);
8227 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8228 Return NULL_TREE if no simplification can be made. */
8231 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8233 const char *p1
, *p2
;
8235 if (!validate_arg (arg1
, POINTER_TYPE
)
8236 || !validate_arg (arg2
, POINTER_TYPE
)
8237 || !validate_arg (len
, INTEGER_TYPE
))
8240 /* If the LEN parameter is zero, return zero. */
8241 if (integer_zerop (len
))
8242 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8245 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8246 if (operand_equal_p (arg1
, arg2
, 0))
8247 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8249 p1
= c_getstr (arg1
);
8250 p2
= c_getstr (arg2
);
8252 /* If all arguments are constant, and the value of len is not greater
8253 than the lengths of arg1 and arg2, evaluate at compile-time. */
8254 if (tree_fits_uhwi_p (len
) && p1
&& p2
8255 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8256 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8258 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8261 return integer_one_node
;
8263 return integer_minus_one_node
;
8265 return integer_zero_node
;
8268 /* If len parameter is one, return an expression corresponding to
8269 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8270 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8272 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8273 tree cst_uchar_ptr_node
8274 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8277 = fold_convert_loc (loc
, integer_type_node
,
8278 build1 (INDIRECT_REF
, cst_uchar_node
,
8279 fold_convert_loc (loc
,
8283 = fold_convert_loc (loc
, integer_type_node
,
8284 build1 (INDIRECT_REF
, cst_uchar_node
,
8285 fold_convert_loc (loc
,
8288 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8294 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8295 Return NULL_TREE if no simplification can be made. */
8298 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8300 const char *p1
, *p2
;
8302 if (!validate_arg (arg1
, POINTER_TYPE
)
8303 || !validate_arg (arg2
, POINTER_TYPE
))
8306 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8307 if (operand_equal_p (arg1
, arg2
, 0))
8308 return integer_zero_node
;
8310 p1
= c_getstr (arg1
);
8311 p2
= c_getstr (arg2
);
8315 const int i
= strcmp (p1
, p2
);
8317 return integer_minus_one_node
;
8319 return integer_one_node
;
8321 return integer_zero_node
;
8324 /* If the second arg is "", return *(const unsigned char*)arg1. */
8325 if (p2
&& *p2
== '\0')
8327 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8328 tree cst_uchar_ptr_node
8329 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8331 return fold_convert_loc (loc
, integer_type_node
,
8332 build1 (INDIRECT_REF
, cst_uchar_node
,
8333 fold_convert_loc (loc
,
8338 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8339 if (p1
&& *p1
== '\0')
8341 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8342 tree cst_uchar_ptr_node
8343 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8346 = fold_convert_loc (loc
, integer_type_node
,
8347 build1 (INDIRECT_REF
, cst_uchar_node
,
8348 fold_convert_loc (loc
,
8351 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8357 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8358 Return NULL_TREE if no simplification can be made. */
8361 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8363 const char *p1
, *p2
;
8365 if (!validate_arg (arg1
, POINTER_TYPE
)
8366 || !validate_arg (arg2
, POINTER_TYPE
)
8367 || !validate_arg (len
, INTEGER_TYPE
))
8370 /* If the LEN parameter is zero, return zero. */
8371 if (integer_zerop (len
))
8372 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8375 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8376 if (operand_equal_p (arg1
, arg2
, 0))
8377 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8379 p1
= c_getstr (arg1
);
8380 p2
= c_getstr (arg2
);
8382 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8384 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8386 return integer_one_node
;
8388 return integer_minus_one_node
;
8390 return integer_zero_node
;
8393 /* If the second arg is "", and the length is greater than zero,
8394 return *(const unsigned char*)arg1. */
8395 if (p2
&& *p2
== '\0'
8396 && TREE_CODE (len
) == INTEGER_CST
8397 && tree_int_cst_sgn (len
) == 1)
8399 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8400 tree cst_uchar_ptr_node
8401 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8403 return fold_convert_loc (loc
, integer_type_node
,
8404 build1 (INDIRECT_REF
, cst_uchar_node
,
8405 fold_convert_loc (loc
,
8410 /* If the first arg is "", and the length is greater than zero,
8411 return -*(const unsigned char*)arg2. */
8412 if (p1
&& *p1
== '\0'
8413 && TREE_CODE (len
) == INTEGER_CST
8414 && tree_int_cst_sgn (len
) == 1)
8416 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8417 tree cst_uchar_ptr_node
8418 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8420 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8421 build1 (INDIRECT_REF
, cst_uchar_node
,
8422 fold_convert_loc (loc
,
8425 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8428 /* If len parameter is one, return an expression corresponding to
8429 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8430 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8432 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8433 tree cst_uchar_ptr_node
8434 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8436 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8437 build1 (INDIRECT_REF
, cst_uchar_node
,
8438 fold_convert_loc (loc
,
8441 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8442 build1 (INDIRECT_REF
, cst_uchar_node
,
8443 fold_convert_loc (loc
,
8446 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8452 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8453 ARG. Return NULL_TREE if no simplification can be made. */
8456 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8458 if (!validate_arg (arg
, REAL_TYPE
))
8461 /* If ARG is a compile-time constant, determine the result. */
8462 if (TREE_CODE (arg
) == REAL_CST
8463 && !TREE_OVERFLOW (arg
))
8467 c
= TREE_REAL_CST (arg
);
8468 return (REAL_VALUE_NEGATIVE (c
)
8469 ? build_one_cst (type
)
8470 : build_zero_cst (type
));
8473 /* If ARG is non-negative, the result is always zero. */
8474 if (tree_expr_nonnegative_p (arg
))
8475 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8477 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8478 if (!HONOR_SIGNED_ZEROS (arg
))
8479 return fold_convert (type
,
8480 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
8481 build_real (TREE_TYPE (arg
), dconst0
)));
8486 /* Fold function call to builtin copysign, copysignf or copysignl with
8487 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8491 fold_builtin_copysign (location_t loc
, tree arg1
, tree arg2
, tree type
)
8493 if (!validate_arg (arg1
, REAL_TYPE
)
8494 || !validate_arg (arg2
, REAL_TYPE
))
8497 /* copysign(X,X) is X. */
8498 if (operand_equal_p (arg1
, arg2
, 0))
8499 return fold_convert_loc (loc
, type
, arg1
);
8501 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8502 if (TREE_CODE (arg1
) == REAL_CST
8503 && TREE_CODE (arg2
) == REAL_CST
8504 && !TREE_OVERFLOW (arg1
)
8505 && !TREE_OVERFLOW (arg2
))
8507 REAL_VALUE_TYPE c1
, c2
;
8509 c1
= TREE_REAL_CST (arg1
);
8510 c2
= TREE_REAL_CST (arg2
);
8511 /* c1.sign := c2.sign. */
8512 real_copysign (&c1
, &c2
);
8513 return build_real (type
, c1
);
8516 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8517 Remember to evaluate Y for side-effects. */
8518 if (tree_expr_nonnegative_p (arg2
))
8519 return omit_one_operand_loc (loc
, type
,
8520 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
8526 /* Fold a call to builtin isascii with argument ARG. */
8529 fold_builtin_isascii (location_t loc
, tree arg
)
8531 if (!validate_arg (arg
, INTEGER_TYPE
))
8535 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8536 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8537 build_int_cst (integer_type_node
,
8538 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8539 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8540 arg
, integer_zero_node
);
8544 /* Fold a call to builtin toascii with argument ARG. */
8547 fold_builtin_toascii (location_t loc
, tree arg
)
8549 if (!validate_arg (arg
, INTEGER_TYPE
))
8552 /* Transform toascii(c) -> (c & 0x7f). */
8553 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8554 build_int_cst (integer_type_node
, 0x7f));
8557 /* Fold a call to builtin isdigit with argument ARG. */
8560 fold_builtin_isdigit (location_t loc
, tree arg
)
8562 if (!validate_arg (arg
, INTEGER_TYPE
))
8566 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8567 /* According to the C standard, isdigit is unaffected by locale.
8568 However, it definitely is affected by the target character set. */
8569 unsigned HOST_WIDE_INT target_digit0
8570 = lang_hooks
.to_target_charset ('0');
8572 if (target_digit0
== 0)
8575 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8576 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8577 build_int_cst (unsigned_type_node
, target_digit0
));
8578 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8579 build_int_cst (unsigned_type_node
, 9));
8583 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8586 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8588 if (!validate_arg (arg
, REAL_TYPE
))
8591 arg
= fold_convert_loc (loc
, type
, arg
);
8592 if (TREE_CODE (arg
) == REAL_CST
)
8593 return fold_abs_const (arg
, type
);
8594 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8597 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8600 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8602 if (!validate_arg (arg
, INTEGER_TYPE
))
8605 arg
= fold_convert_loc (loc
, type
, arg
);
8606 if (TREE_CODE (arg
) == INTEGER_CST
)
8607 return fold_abs_const (arg
, type
);
8608 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8611 /* Fold a fma operation with arguments ARG[012]. */
8614 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
8615 tree type
, tree arg0
, tree arg1
, tree arg2
)
8617 if (TREE_CODE (arg0
) == REAL_CST
8618 && TREE_CODE (arg1
) == REAL_CST
8619 && TREE_CODE (arg2
) == REAL_CST
)
8620 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
8625 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8628 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8630 if (validate_arg (arg0
, REAL_TYPE
)
8631 && validate_arg (arg1
, REAL_TYPE
)
8632 && validate_arg (arg2
, REAL_TYPE
))
8634 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
8638 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8639 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8640 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
8645 /* Fold a call to builtin fmin or fmax. */
8648 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
8649 tree type
, bool max
)
8651 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
8653 /* Calculate the result when the argument is a constant. */
8654 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
8659 /* If either argument is NaN, return the other one. Avoid the
8660 transformation if we get (and honor) a signalling NaN. Using
8661 omit_one_operand() ensures we create a non-lvalue. */
8662 if (TREE_CODE (arg0
) == REAL_CST
8663 && real_isnan (&TREE_REAL_CST (arg0
))
8664 && (! HONOR_SNANS (arg0
)
8665 || ! TREE_REAL_CST (arg0
).signalling
))
8666 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
8667 if (TREE_CODE (arg1
) == REAL_CST
8668 && real_isnan (&TREE_REAL_CST (arg1
))
8669 && (! HONOR_SNANS (arg1
)
8670 || ! TREE_REAL_CST (arg1
).signalling
))
8671 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8673 /* Transform fmin/fmax(x,x) -> x. */
8674 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8675 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8677 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8678 functions to return the numeric arg if the other one is NaN.
8679 These tree codes don't honor that, so only transform if
8680 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8681 handled, so we don't have to worry about it either. */
8682 if (flag_finite_math_only
)
8683 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
8684 fold_convert_loc (loc
, type
, arg0
),
8685 fold_convert_loc (loc
, type
, arg1
));
8690 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8693 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8695 if (validate_arg (arg
, COMPLEX_TYPE
)
8696 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8698 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8702 tree new_arg
= builtin_save_expr (arg
);
8703 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8704 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8705 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8712 /* Fold a call to builtin logb/ilogb. */
8715 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
8717 if (! validate_arg (arg
, REAL_TYPE
))
8722 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
8724 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
8730 /* If arg is Inf or NaN and we're logb, return it. */
8731 if (TREE_CODE (rettype
) == REAL_TYPE
)
8733 /* For logb(-Inf) we have to return +Inf. */
8734 if (real_isinf (value
) && real_isneg (value
))
8736 REAL_VALUE_TYPE tem
;
8738 return build_real (rettype
, tem
);
8740 return fold_convert_loc (loc
, rettype
, arg
);
8742 /* Fall through... */
8744 /* Zero may set errno and/or raise an exception for logb, also
8745 for ilogb we don't know FP_ILOGB0. */
8748 /* For normal numbers, proceed iff radix == 2. In GCC,
8749 normalized significands are in the range [0.5, 1.0). We
8750 want the exponent as if they were [1.0, 2.0) so get the
8751 exponent and subtract 1. */
8752 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
8753 return fold_convert_loc (loc
, rettype
,
8754 build_int_cst (integer_type_node
,
8755 REAL_EXP (value
)-1));
8763 /* Fold a call to builtin significand, if radix == 2. */
8766 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
8768 if (! validate_arg (arg
, REAL_TYPE
))
8773 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
8775 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
8782 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8783 return fold_convert_loc (loc
, rettype
, arg
);
8785 /* For normal numbers, proceed iff radix == 2. */
8786 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
8788 REAL_VALUE_TYPE result
= *value
;
8789 /* In GCC, normalized significands are in the range [0.5,
8790 1.0). We want them to be [1.0, 2.0) so set the
8792 SET_REAL_EXP (&result
, 1);
8793 return build_real (rettype
, result
);
8802 /* Fold a call to builtin frexp, we can assume the base is 2. */
8805 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8807 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8812 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8815 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8817 /* Proceed if a valid pointer type was passed in. */
8818 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8820 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8826 /* For +-0, return (*exp = 0, +-0). */
8827 exp
= integer_zero_node
;
8832 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8833 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8836 /* Since the frexp function always expects base 2, and in
8837 GCC normalized significands are already in the range
8838 [0.5, 1.0), we have exactly what frexp wants. */
8839 REAL_VALUE_TYPE frac_rvt
= *value
;
8840 SET_REAL_EXP (&frac_rvt
, 0);
8841 frac
= build_real (rettype
, frac_rvt
);
8842 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8849 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8850 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8851 TREE_SIDE_EFFECTS (arg1
) = 1;
8852 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8858 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8859 then we can assume the base is two. If it's false, then we have to
8860 check the mode of the TYPE parameter in certain cases. */
8863 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
8864 tree type
, bool ldexp
)
8866 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
8871 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8872 if (real_zerop (arg0
) || integer_zerop (arg1
)
8873 || (TREE_CODE (arg0
) == REAL_CST
8874 && !real_isfinite (&TREE_REAL_CST (arg0
))))
8875 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
8877 /* If both arguments are constant, then try to evaluate it. */
8878 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
8879 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
8880 && tree_fits_shwi_p (arg1
))
8882 /* Bound the maximum adjustment to twice the range of the
8883 mode's valid exponents. Use abs to ensure the range is
8884 positive as a sanity check. */
8885 const long max_exp_adj
= 2 *
8886 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
8887 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
8889 /* Get the user-requested adjustment. */
8890 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
8892 /* The requested adjustment must be inside this range. This
8893 is a preliminary cap to avoid things like overflow, we
8894 may still fail to compute the result for other reasons. */
8895 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
8897 REAL_VALUE_TYPE initial_result
;
8899 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
8901 /* Ensure we didn't overflow. */
8902 if (! real_isinf (&initial_result
))
8904 const REAL_VALUE_TYPE trunc_result
8905 = real_value_truncate (TYPE_MODE (type
), initial_result
);
8907 /* Only proceed if the target mode can hold the
8909 if (real_equal (&initial_result
, &trunc_result
))
8910 return build_real (type
, trunc_result
);
8919 /* Fold a call to builtin modf. */
8922 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8924 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8929 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8932 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8934 /* Proceed if a valid pointer type was passed in. */
8935 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8937 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8938 REAL_VALUE_TYPE trunc
, frac
;
8944 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8945 trunc
= frac
= *value
;
8948 /* For +-Inf, return (*arg1 = arg0, +-0). */
8950 frac
.sign
= value
->sign
;
8954 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8955 real_trunc (&trunc
, VOIDmode
, value
);
8956 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8957 /* If the original number was negative and already
8958 integral, then the fractional part is -0.0. */
8959 if (value
->sign
&& frac
.cl
== rvc_zero
)
8960 frac
.sign
= value
->sign
;
8964 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8965 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8966 build_real (rettype
, trunc
));
8967 TREE_SIDE_EFFECTS (arg1
) = 1;
8968 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8969 build_real (rettype
, frac
));
8975 /* Given a location LOC, an interclass builtin function decl FNDECL
8976 and its single argument ARG, return an folded expression computing
8977 the same, or NULL_TREE if we either couldn't or didn't want to fold
8978 (the latter happen if there's an RTL instruction available). */
8981 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8985 if (!validate_arg (arg
, REAL_TYPE
))
8988 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8991 mode
= TYPE_MODE (TREE_TYPE (arg
));
8993 /* If there is no optab, try generic code. */
8994 switch (DECL_FUNCTION_CODE (fndecl
))
8998 CASE_FLT_FN (BUILT_IN_ISINF
):
9000 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9001 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9002 tree
const type
= TREE_TYPE (arg
);
9006 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9007 real_from_string (&r
, buf
);
9008 result
= build_call_expr (isgr_fn
, 2,
9009 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9010 build_real (type
, r
));
9013 CASE_FLT_FN (BUILT_IN_FINITE
):
9014 case BUILT_IN_ISFINITE
:
9016 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9017 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9018 tree
const type
= TREE_TYPE (arg
);
9022 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9023 real_from_string (&r
, buf
);
9024 result
= build_call_expr (isle_fn
, 2,
9025 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9026 build_real (type
, r
));
9027 /*result = fold_build2_loc (loc, UNGT_EXPR,
9028 TREE_TYPE (TREE_TYPE (fndecl)),
9029 fold_build1_loc (loc, ABS_EXPR, type, arg),
9030 build_real (type, r));
9031 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9032 TREE_TYPE (TREE_TYPE (fndecl)),
9036 case BUILT_IN_ISNORMAL
:
9038 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9039 islessequal(fabs(x),DBL_MAX). */
9040 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9041 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9042 tree
const type
= TREE_TYPE (arg
);
9043 REAL_VALUE_TYPE rmax
, rmin
;
9046 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9047 real_from_string (&rmax
, buf
);
9048 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9049 real_from_string (&rmin
, buf
);
9050 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9051 result
= build_call_expr (isle_fn
, 2, arg
,
9052 build_real (type
, rmax
));
9053 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9054 build_call_expr (isge_fn
, 2, arg
,
9055 build_real (type
, rmin
)));
9065 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9066 ARG is the argument for the call. */
9069 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9071 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9074 if (!validate_arg (arg
, REAL_TYPE
))
9077 switch (builtin_index
)
9079 case BUILT_IN_ISINF
:
9080 if (!HONOR_INFINITIES (arg
))
9081 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9083 if (TREE_CODE (arg
) == REAL_CST
)
9085 r
= TREE_REAL_CST (arg
);
9086 if (real_isinf (&r
))
9087 return real_compare (GT_EXPR
, &r
, &dconst0
)
9088 ? integer_one_node
: integer_minus_one_node
;
9090 return integer_zero_node
;
9095 case BUILT_IN_ISINF_SIGN
:
9097 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9098 /* In a boolean context, GCC will fold the inner COND_EXPR to
9099 1. So e.g. "if (isinf_sign(x))" would be folded to just
9100 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9101 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9102 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9103 tree tmp
= NULL_TREE
;
9105 arg
= builtin_save_expr (arg
);
9107 if (signbit_fn
&& isinf_fn
)
9109 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9110 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9112 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9113 signbit_call
, integer_zero_node
);
9114 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9115 isinf_call
, integer_zero_node
);
9117 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9118 integer_minus_one_node
, integer_one_node
);
9119 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9127 case BUILT_IN_ISFINITE
:
9128 if (!HONOR_NANS (arg
)
9129 && !HONOR_INFINITIES (arg
))
9130 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9132 if (TREE_CODE (arg
) == REAL_CST
)
9134 r
= TREE_REAL_CST (arg
);
9135 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9140 case BUILT_IN_ISNAN
:
9141 if (!HONOR_NANS (arg
))
9142 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9144 if (TREE_CODE (arg
) == REAL_CST
)
9146 r
= TREE_REAL_CST (arg
);
9147 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9150 arg
= builtin_save_expr (arg
);
9151 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9158 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9159 This builtin will generate code to return the appropriate floating
9160 point classification depending on the value of the floating point
9161 number passed in. The possible return values must be supplied as
9162 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9163 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9164 one floating point argument which is "type generic". */
9167 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9169 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9170 arg
, type
, res
, tmp
;
9175 /* Verify the required arguments in the original call. */
9177 || !validate_arg (args
[0], INTEGER_TYPE
)
9178 || !validate_arg (args
[1], INTEGER_TYPE
)
9179 || !validate_arg (args
[2], INTEGER_TYPE
)
9180 || !validate_arg (args
[3], INTEGER_TYPE
)
9181 || !validate_arg (args
[4], INTEGER_TYPE
)
9182 || !validate_arg (args
[5], REAL_TYPE
))
9186 fp_infinite
= args
[1];
9187 fp_normal
= args
[2];
9188 fp_subnormal
= args
[3];
9191 type
= TREE_TYPE (arg
);
9192 mode
= TYPE_MODE (type
);
9193 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9197 (fabs(x) == Inf ? FP_INFINITE :
9198 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9199 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9201 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9202 build_real (type
, dconst0
));
9203 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9204 tmp
, fp_zero
, fp_subnormal
);
9206 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9207 real_from_string (&r
, buf
);
9208 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9209 arg
, build_real (type
, r
));
9210 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9212 if (HONOR_INFINITIES (mode
))
9215 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9216 build_real (type
, r
));
9217 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9221 if (HONOR_NANS (mode
))
9223 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9224 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9230 /* Fold a call to an unordered comparison function such as
9231 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9232 being called and ARG0 and ARG1 are the arguments for the call.
9233 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9234 the opposite of the desired result. UNORDERED_CODE is used
9235 for modes that can hold NaNs and ORDERED_CODE is used for
9239 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9240 enum tree_code unordered_code
,
9241 enum tree_code ordered_code
)
9243 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9244 enum tree_code code
;
9246 enum tree_code code0
, code1
;
9247 tree cmp_type
= NULL_TREE
;
9249 type0
= TREE_TYPE (arg0
);
9250 type1
= TREE_TYPE (arg1
);
9252 code0
= TREE_CODE (type0
);
9253 code1
= TREE_CODE (type1
);
9255 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9256 /* Choose the wider of two real types. */
9257 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9259 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9261 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9264 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9265 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9267 if (unordered_code
== UNORDERED_EXPR
)
9269 if (!HONOR_NANS (arg0
))
9270 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9271 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9274 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9275 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9276 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9279 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9280 arithmetics if it can never overflow, or into internal functions that
9281 return both result of arithmetics and overflowed boolean flag in
9282 a complex integer result, or some other check for overflow. */
9285 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9286 tree arg0
, tree arg1
, tree arg2
)
9288 enum internal_fn ifn
= IFN_LAST
;
9289 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9290 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9293 case BUILT_IN_ADD_OVERFLOW
:
9294 case BUILT_IN_SADD_OVERFLOW
:
9295 case BUILT_IN_SADDL_OVERFLOW
:
9296 case BUILT_IN_SADDLL_OVERFLOW
:
9297 case BUILT_IN_UADD_OVERFLOW
:
9298 case BUILT_IN_UADDL_OVERFLOW
:
9299 case BUILT_IN_UADDLL_OVERFLOW
:
9300 ifn
= IFN_ADD_OVERFLOW
;
9302 case BUILT_IN_SUB_OVERFLOW
:
9303 case BUILT_IN_SSUB_OVERFLOW
:
9304 case BUILT_IN_SSUBL_OVERFLOW
:
9305 case BUILT_IN_SSUBLL_OVERFLOW
:
9306 case BUILT_IN_USUB_OVERFLOW
:
9307 case BUILT_IN_USUBL_OVERFLOW
:
9308 case BUILT_IN_USUBLL_OVERFLOW
:
9309 ifn
= IFN_SUB_OVERFLOW
;
9311 case BUILT_IN_MUL_OVERFLOW
:
9312 case BUILT_IN_SMUL_OVERFLOW
:
9313 case BUILT_IN_SMULL_OVERFLOW
:
9314 case BUILT_IN_SMULLL_OVERFLOW
:
9315 case BUILT_IN_UMUL_OVERFLOW
:
9316 case BUILT_IN_UMULL_OVERFLOW
:
9317 case BUILT_IN_UMULLL_OVERFLOW
:
9318 ifn
= IFN_MUL_OVERFLOW
;
9323 tree ctype
= build_complex_type (type
);
9324 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9326 tree tgt
= save_expr (call
);
9327 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9328 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9329 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9331 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9332 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9335 /* Fold a call to built-in function FNDECL with 0 arguments.
9336 This function returns NULL_TREE if no simplification was possible. */
9339 fold_builtin_0 (location_t loc
, tree fndecl
)
9341 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9342 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9345 CASE_FLT_FN (BUILT_IN_INF
):
9346 case BUILT_IN_INFD32
:
9347 case BUILT_IN_INFD64
:
9348 case BUILT_IN_INFD128
:
9349 return fold_builtin_inf (loc
, type
, true);
9351 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9352 return fold_builtin_inf (loc
, type
, false);
9354 case BUILT_IN_CLASSIFY_TYPE
:
9355 return fold_builtin_classify_type (NULL_TREE
);
9363 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9364 This function returns NULL_TREE if no simplification was possible. */
9367 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9369 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9370 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9373 case BUILT_IN_CONSTANT_P
:
9375 tree val
= fold_builtin_constant_p (arg0
);
9377 /* Gimplification will pull the CALL_EXPR for the builtin out of
9378 an if condition. When not optimizing, we'll not CSE it back.
9379 To avoid link error types of regressions, return false now. */
9380 if (!val
&& !optimize
)
9381 val
= integer_zero_node
;
9386 case BUILT_IN_CLASSIFY_TYPE
:
9387 return fold_builtin_classify_type (arg0
);
9389 case BUILT_IN_STRLEN
:
9390 return fold_builtin_strlen (loc
, type
, arg0
);
9392 CASE_FLT_FN (BUILT_IN_FABS
):
9393 case BUILT_IN_FABSD32
:
9394 case BUILT_IN_FABSD64
:
9395 case BUILT_IN_FABSD128
:
9396 return fold_builtin_fabs (loc
, arg0
, type
);
9400 case BUILT_IN_LLABS
:
9401 case BUILT_IN_IMAXABS
:
9402 return fold_builtin_abs (loc
, arg0
, type
);
9404 CASE_FLT_FN (BUILT_IN_CONJ
):
9405 if (validate_arg (arg0
, COMPLEX_TYPE
)
9406 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9407 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9410 CASE_FLT_FN (BUILT_IN_CREAL
):
9411 if (validate_arg (arg0
, COMPLEX_TYPE
)
9412 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9413 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9416 CASE_FLT_FN (BUILT_IN_CIMAG
):
9417 if (validate_arg (arg0
, COMPLEX_TYPE
)
9418 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9419 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9422 CASE_FLT_FN (BUILT_IN_CCOS
):
9423 if (validate_arg (arg0
, COMPLEX_TYPE
)
9424 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9425 return do_mpc_arg1 (arg0
, type
, mpc_cos
);
9428 CASE_FLT_FN (BUILT_IN_CCOSH
):
9429 if (validate_arg (arg0
, COMPLEX_TYPE
)
9430 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9431 return do_mpc_arg1 (arg0
, type
, mpc_cosh
);
9434 CASE_FLT_FN (BUILT_IN_CPROJ
):
9435 if (TREE_CODE (arg0
) == COMPLEX_CST
9436 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9438 const REAL_VALUE_TYPE
*real
9439 = TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
9440 const REAL_VALUE_TYPE
*imag
9441 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
9443 if (real_isinf (real
) || real_isinf (imag
))
9444 return build_complex_inf (type
, imag
->sign
);
9450 CASE_FLT_FN (BUILT_IN_CSIN
):
9451 if (validate_arg (arg0
, COMPLEX_TYPE
)
9452 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9453 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9456 CASE_FLT_FN (BUILT_IN_CSINH
):
9457 if (validate_arg (arg0
, COMPLEX_TYPE
)
9458 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9459 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9462 CASE_FLT_FN (BUILT_IN_CTAN
):
9463 if (validate_arg (arg0
, COMPLEX_TYPE
)
9464 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9465 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9468 CASE_FLT_FN (BUILT_IN_CTANH
):
9469 if (validate_arg (arg0
, COMPLEX_TYPE
)
9470 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9471 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
9474 CASE_FLT_FN (BUILT_IN_CLOG
):
9475 if (validate_arg (arg0
, COMPLEX_TYPE
)
9476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9477 return do_mpc_arg1 (arg0
, type
, mpc_log
);
9480 CASE_FLT_FN (BUILT_IN_CSQRT
):
9481 if (validate_arg (arg0
, COMPLEX_TYPE
)
9482 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9483 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
9486 CASE_FLT_FN (BUILT_IN_CASIN
):
9487 if (validate_arg (arg0
, COMPLEX_TYPE
)
9488 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9489 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
9492 CASE_FLT_FN (BUILT_IN_CACOS
):
9493 if (validate_arg (arg0
, COMPLEX_TYPE
)
9494 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9495 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
9498 CASE_FLT_FN (BUILT_IN_CATAN
):
9499 if (validate_arg (arg0
, COMPLEX_TYPE
)
9500 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9501 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
9504 CASE_FLT_FN (BUILT_IN_CASINH
):
9505 if (validate_arg (arg0
, COMPLEX_TYPE
)
9506 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9507 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
9510 CASE_FLT_FN (BUILT_IN_CACOSH
):
9511 if (validate_arg (arg0
, COMPLEX_TYPE
)
9512 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9513 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
9516 CASE_FLT_FN (BUILT_IN_CATANH
):
9517 if (validate_arg (arg0
, COMPLEX_TYPE
)
9518 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9519 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
9522 CASE_FLT_FN (BUILT_IN_CABS
):
9523 if (TREE_CODE (arg0
) == COMPLEX_CST
9524 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9525 return do_mpfr_arg2 (TREE_REALPART (arg0
), TREE_IMAGPART (arg0
),
9529 CASE_FLT_FN (BUILT_IN_CARG
):
9530 return fold_builtin_carg (loc
, arg0
, type
);
9532 CASE_FLT_FN (BUILT_IN_SQRT
):
9533 if (validate_arg (arg0
, REAL_TYPE
))
9534 return do_mpfr_arg1 (arg0
, type
, mpfr_sqrt
, &dconst0
, NULL
, true);
9537 CASE_FLT_FN (BUILT_IN_CBRT
):
9538 if (validate_arg (arg0
, REAL_TYPE
))
9539 return do_mpfr_arg1 (arg0
, type
, mpfr_cbrt
, NULL
, NULL
, 0);
9542 CASE_FLT_FN (BUILT_IN_ASIN
):
9543 if (validate_arg (arg0
, REAL_TYPE
))
9544 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9545 &dconstm1
, &dconst1
, true);
9548 CASE_FLT_FN (BUILT_IN_ACOS
):
9549 if (validate_arg (arg0
, REAL_TYPE
))
9550 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9551 &dconstm1
, &dconst1
, true);
9554 CASE_FLT_FN (BUILT_IN_ATAN
):
9555 if (validate_arg (arg0
, REAL_TYPE
))
9556 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9559 CASE_FLT_FN (BUILT_IN_ASINH
):
9560 if (validate_arg (arg0
, REAL_TYPE
))
9561 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9564 CASE_FLT_FN (BUILT_IN_ACOSH
):
9565 if (validate_arg (arg0
, REAL_TYPE
))
9566 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9567 &dconst1
, NULL
, true);
9570 CASE_FLT_FN (BUILT_IN_ATANH
):
9571 if (validate_arg (arg0
, REAL_TYPE
))
9572 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9573 &dconstm1
, &dconst1
, false);
9576 CASE_FLT_FN (BUILT_IN_SIN
):
9577 if (validate_arg (arg0
, REAL_TYPE
))
9578 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9581 CASE_FLT_FN (BUILT_IN_COS
):
9582 if (validate_arg (arg0
, REAL_TYPE
))
9583 return do_mpfr_arg1 (arg0
, type
, mpfr_cos
, NULL
, NULL
, 0);
9586 CASE_FLT_FN (BUILT_IN_TAN
):
9587 if (validate_arg (arg0
, REAL_TYPE
))
9588 return do_mpfr_arg1 (arg0
, type
, mpfr_tan
, NULL
, NULL
, 0);
9591 CASE_FLT_FN (BUILT_IN_CEXP
):
9592 return fold_builtin_cexp (loc
, arg0
, type
);
9594 CASE_FLT_FN (BUILT_IN_CEXPI
):
9595 if (validate_arg (arg0
, REAL_TYPE
))
9596 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9599 CASE_FLT_FN (BUILT_IN_SINH
):
9600 if (validate_arg (arg0
, REAL_TYPE
))
9601 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9604 CASE_FLT_FN (BUILT_IN_COSH
):
9605 if (validate_arg (arg0
, REAL_TYPE
))
9606 return do_mpfr_arg1 (arg0
, type
, mpfr_cosh
, NULL
, NULL
, 0);
9609 CASE_FLT_FN (BUILT_IN_TANH
):
9610 if (validate_arg (arg0
, REAL_TYPE
))
9611 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9614 CASE_FLT_FN (BUILT_IN_ERF
):
9615 if (validate_arg (arg0
, REAL_TYPE
))
9616 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9619 CASE_FLT_FN (BUILT_IN_ERFC
):
9620 if (validate_arg (arg0
, REAL_TYPE
))
9621 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9624 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9625 if (validate_arg (arg0
, REAL_TYPE
))
9626 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9629 CASE_FLT_FN (BUILT_IN_EXP
):
9630 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
9632 CASE_FLT_FN (BUILT_IN_EXP2
):
9633 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
9635 CASE_FLT_FN (BUILT_IN_EXP10
):
9636 CASE_FLT_FN (BUILT_IN_POW10
):
9637 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
9639 CASE_FLT_FN (BUILT_IN_EXPM1
):
9640 if (validate_arg (arg0
, REAL_TYPE
))
9641 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9644 CASE_FLT_FN (BUILT_IN_LOG
):
9645 if (validate_arg (arg0
, REAL_TYPE
))
9646 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
9649 CASE_FLT_FN (BUILT_IN_LOG2
):
9650 if (validate_arg (arg0
, REAL_TYPE
))
9651 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
9654 CASE_FLT_FN (BUILT_IN_LOG10
):
9655 if (validate_arg (arg0
, REAL_TYPE
))
9656 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
9659 CASE_FLT_FN (BUILT_IN_LOG1P
):
9660 if (validate_arg (arg0
, REAL_TYPE
))
9661 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9662 &dconstm1
, NULL
, false);
9665 CASE_FLT_FN (BUILT_IN_J0
):
9666 if (validate_arg (arg0
, REAL_TYPE
))
9667 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9671 CASE_FLT_FN (BUILT_IN_J1
):
9672 if (validate_arg (arg0
, REAL_TYPE
))
9673 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9677 CASE_FLT_FN (BUILT_IN_Y0
):
9678 if (validate_arg (arg0
, REAL_TYPE
))
9679 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9680 &dconst0
, NULL
, false);
9683 CASE_FLT_FN (BUILT_IN_Y1
):
9684 if (validate_arg (arg0
, REAL_TYPE
))
9685 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9686 &dconst0
, NULL
, false);
9689 CASE_FLT_FN (BUILT_IN_NAN
):
9690 case BUILT_IN_NAND32
:
9691 case BUILT_IN_NAND64
:
9692 case BUILT_IN_NAND128
:
9693 return fold_builtin_nan (arg0
, type
, true);
9695 CASE_FLT_FN (BUILT_IN_NANS
):
9696 return fold_builtin_nan (arg0
, type
, false);
9698 CASE_FLT_FN (BUILT_IN_FLOOR
):
9699 return fold_builtin_floor (loc
, fndecl
, arg0
);
9701 CASE_FLT_FN (BUILT_IN_CEIL
):
9702 return fold_builtin_ceil (loc
, fndecl
, arg0
);
9704 CASE_FLT_FN (BUILT_IN_TRUNC
):
9705 return fold_builtin_trunc (loc
, fndecl
, arg0
);
9707 CASE_FLT_FN (BUILT_IN_ROUND
):
9708 return fold_builtin_round (loc
, fndecl
, arg0
);
9710 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9711 CASE_FLT_FN (BUILT_IN_RINT
):
9712 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
9714 CASE_FLT_FN (BUILT_IN_ICEIL
):
9715 CASE_FLT_FN (BUILT_IN_LCEIL
):
9716 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9717 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9718 CASE_FLT_FN (BUILT_IN_IFLOOR
):
9719 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9720 CASE_FLT_FN (BUILT_IN_IROUND
):
9721 CASE_FLT_FN (BUILT_IN_LROUND
):
9722 CASE_FLT_FN (BUILT_IN_LLROUND
):
9723 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
9725 CASE_FLT_FN (BUILT_IN_IRINT
):
9726 CASE_FLT_FN (BUILT_IN_LRINT
):
9727 CASE_FLT_FN (BUILT_IN_LLRINT
):
9728 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
9730 case BUILT_IN_BSWAP16
:
9731 case BUILT_IN_BSWAP32
:
9732 case BUILT_IN_BSWAP64
:
9733 return fold_builtin_bswap (fndecl
, arg0
);
9735 CASE_INT_FN (BUILT_IN_FFS
):
9736 CASE_INT_FN (BUILT_IN_CLZ
):
9737 CASE_INT_FN (BUILT_IN_CTZ
):
9738 CASE_INT_FN (BUILT_IN_CLRSB
):
9739 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9740 CASE_INT_FN (BUILT_IN_PARITY
):
9741 return fold_builtin_bitop (fndecl
, arg0
);
9743 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9744 return fold_builtin_signbit (loc
, arg0
, type
);
9746 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
9747 return fold_builtin_significand (loc
, arg0
, type
);
9749 CASE_FLT_FN (BUILT_IN_ILOGB
):
9750 CASE_FLT_FN (BUILT_IN_LOGB
):
9751 return fold_builtin_logb (loc
, arg0
, type
);
9753 case BUILT_IN_ISASCII
:
9754 return fold_builtin_isascii (loc
, arg0
);
9756 case BUILT_IN_TOASCII
:
9757 return fold_builtin_toascii (loc
, arg0
);
9759 case BUILT_IN_ISDIGIT
:
9760 return fold_builtin_isdigit (loc
, arg0
);
9762 CASE_FLT_FN (BUILT_IN_FINITE
):
9763 case BUILT_IN_FINITED32
:
9764 case BUILT_IN_FINITED64
:
9765 case BUILT_IN_FINITED128
:
9766 case BUILT_IN_ISFINITE
:
9768 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9771 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9774 CASE_FLT_FN (BUILT_IN_ISINF
):
9775 case BUILT_IN_ISINFD32
:
9776 case BUILT_IN_ISINFD64
:
9777 case BUILT_IN_ISINFD128
:
9779 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9782 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9785 case BUILT_IN_ISNORMAL
:
9786 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9788 case BUILT_IN_ISINF_SIGN
:
9789 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9791 CASE_FLT_FN (BUILT_IN_ISNAN
):
9792 case BUILT_IN_ISNAND32
:
9793 case BUILT_IN_ISNAND64
:
9794 case BUILT_IN_ISNAND128
:
9795 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9798 if (integer_zerop (arg0
))
9799 return build_empty_stmt (loc
);
9810 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9811 This function returns NULL_TREE if no simplification was possible. */
9814 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9816 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9817 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9821 CASE_FLT_FN (BUILT_IN_JN
):
9822 if (validate_arg (arg0
, INTEGER_TYPE
)
9823 && validate_arg (arg1
, REAL_TYPE
))
9824 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
9827 CASE_FLT_FN (BUILT_IN_YN
):
9828 if (validate_arg (arg0
, INTEGER_TYPE
)
9829 && validate_arg (arg1
, REAL_TYPE
))
9830 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
9834 CASE_FLT_FN (BUILT_IN_DREM
):
9835 CASE_FLT_FN (BUILT_IN_REMAINDER
):
9836 if (validate_arg (arg0
, REAL_TYPE
)
9837 && validate_arg (arg1
, REAL_TYPE
))
9838 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
9841 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9842 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9843 if (validate_arg (arg0
, REAL_TYPE
)
9844 && validate_arg (arg1
, POINTER_TYPE
))
9845 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9848 CASE_FLT_FN (BUILT_IN_ATAN2
):
9849 if (validate_arg (arg0
, REAL_TYPE
)
9850 && validate_arg (arg1
, REAL_TYPE
))
9851 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
9854 CASE_FLT_FN (BUILT_IN_FDIM
):
9855 if (validate_arg (arg0
, REAL_TYPE
)
9856 && validate_arg (arg1
, REAL_TYPE
))
9857 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
9860 CASE_FLT_FN (BUILT_IN_HYPOT
):
9861 return fold_builtin_hypot (loc
, arg0
, arg1
, type
);
9863 CASE_FLT_FN (BUILT_IN_CPOW
):
9864 if (validate_arg (arg0
, COMPLEX_TYPE
)
9865 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
9866 && validate_arg (arg1
, COMPLEX_TYPE
)
9867 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
9868 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
9871 CASE_FLT_FN (BUILT_IN_LDEXP
):
9872 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
9873 CASE_FLT_FN (BUILT_IN_SCALBN
):
9874 CASE_FLT_FN (BUILT_IN_SCALBLN
):
9875 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
9876 type
, /*ldexp=*/false);
9878 CASE_FLT_FN (BUILT_IN_FREXP
):
9879 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9881 CASE_FLT_FN (BUILT_IN_MODF
):
9882 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9884 case BUILT_IN_STRSTR
:
9885 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
9887 case BUILT_IN_STRSPN
:
9888 return fold_builtin_strspn (loc
, arg0
, arg1
);
9890 case BUILT_IN_STRCSPN
:
9891 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9893 case BUILT_IN_STRCHR
:
9894 case BUILT_IN_INDEX
:
9895 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
9897 case BUILT_IN_STRRCHR
:
9898 case BUILT_IN_RINDEX
:
9899 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
9901 case BUILT_IN_STRCMP
:
9902 return fold_builtin_strcmp (loc
, arg0
, arg1
);
9904 case BUILT_IN_STRPBRK
:
9905 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9907 case BUILT_IN_EXPECT
:
9908 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
9910 CASE_FLT_FN (BUILT_IN_POW
):
9911 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
9913 CASE_FLT_FN (BUILT_IN_POWI
):
9914 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
9916 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
9917 return fold_builtin_copysign (loc
, arg0
, arg1
, type
);
9919 CASE_FLT_FN (BUILT_IN_FMIN
):
9920 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
9922 CASE_FLT_FN (BUILT_IN_FMAX
):
9923 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
9925 case BUILT_IN_ISGREATER
:
9926 return fold_builtin_unordered_cmp (loc
, fndecl
,
9927 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9928 case BUILT_IN_ISGREATEREQUAL
:
9929 return fold_builtin_unordered_cmp (loc
, fndecl
,
9930 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9931 case BUILT_IN_ISLESS
:
9932 return fold_builtin_unordered_cmp (loc
, fndecl
,
9933 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9934 case BUILT_IN_ISLESSEQUAL
:
9935 return fold_builtin_unordered_cmp (loc
, fndecl
,
9936 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9937 case BUILT_IN_ISLESSGREATER
:
9938 return fold_builtin_unordered_cmp (loc
, fndecl
,
9939 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9940 case BUILT_IN_ISUNORDERED
:
9941 return fold_builtin_unordered_cmp (loc
, fndecl
,
9942 arg0
, arg1
, UNORDERED_EXPR
,
9945 /* We do the folding for va_start in the expander. */
9946 case BUILT_IN_VA_START
:
9949 case BUILT_IN_OBJECT_SIZE
:
9950 return fold_builtin_object_size (arg0
, arg1
);
9952 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9953 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9955 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9956 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9964 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9966 This function returns NULL_TREE if no simplification was possible. */
9969 fold_builtin_3 (location_t loc
, tree fndecl
,
9970 tree arg0
, tree arg1
, tree arg2
)
9972 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9973 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9977 CASE_FLT_FN (BUILT_IN_SINCOS
):
9978 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9980 CASE_FLT_FN (BUILT_IN_FMA
):
9981 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
9984 CASE_FLT_FN (BUILT_IN_REMQUO
):
9985 if (validate_arg (arg0
, REAL_TYPE
)
9986 && validate_arg (arg1
, REAL_TYPE
)
9987 && validate_arg (arg2
, POINTER_TYPE
))
9988 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9991 case BUILT_IN_STRNCMP
:
9992 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
9994 case BUILT_IN_MEMCHR
:
9995 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
9998 case BUILT_IN_MEMCMP
:
9999 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10001 case BUILT_IN_EXPECT
:
10002 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10004 case BUILT_IN_ADD_OVERFLOW
:
10005 case BUILT_IN_SUB_OVERFLOW
:
10006 case BUILT_IN_MUL_OVERFLOW
:
10007 case BUILT_IN_SADD_OVERFLOW
:
10008 case BUILT_IN_SADDL_OVERFLOW
:
10009 case BUILT_IN_SADDLL_OVERFLOW
:
10010 case BUILT_IN_SSUB_OVERFLOW
:
10011 case BUILT_IN_SSUBL_OVERFLOW
:
10012 case BUILT_IN_SSUBLL_OVERFLOW
:
10013 case BUILT_IN_SMUL_OVERFLOW
:
10014 case BUILT_IN_SMULL_OVERFLOW
:
10015 case BUILT_IN_SMULLL_OVERFLOW
:
10016 case BUILT_IN_UADD_OVERFLOW
:
10017 case BUILT_IN_UADDL_OVERFLOW
:
10018 case BUILT_IN_UADDLL_OVERFLOW
:
10019 case BUILT_IN_USUB_OVERFLOW
:
10020 case BUILT_IN_USUBL_OVERFLOW
:
10021 case BUILT_IN_USUBLL_OVERFLOW
:
10022 case BUILT_IN_UMUL_OVERFLOW
:
10023 case BUILT_IN_UMULL_OVERFLOW
:
10024 case BUILT_IN_UMULLL_OVERFLOW
:
10025 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10033 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10034 arguments. IGNORE is true if the result of the
10035 function call is ignored. This function returns NULL_TREE if no
10036 simplification was possible. */
10039 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10041 tree ret
= NULL_TREE
;
10046 ret
= fold_builtin_0 (loc
, fndecl
);
10049 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10052 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10055 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10058 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10063 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10064 SET_EXPR_LOCATION (ret
, loc
);
10065 TREE_NO_WARNING (ret
) = 1;
10071 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10072 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10073 of arguments in ARGS to be omitted. OLDNARGS is the number of
10074 elements in ARGS. */
10077 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10078 int skip
, tree fndecl
, int n
, va_list newargs
)
10080 int nargs
= oldnargs
- skip
+ n
;
10087 buffer
= XALLOCAVEC (tree
, nargs
);
10088 for (i
= 0; i
< n
; i
++)
10089 buffer
[i
] = va_arg (newargs
, tree
);
10090 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10091 buffer
[i
] = args
[j
];
10094 buffer
= args
+ skip
;
10096 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10099 /* Return true if FNDECL shouldn't be folded right now.
10100 If a built-in function has an inline attribute always_inline
10101 wrapper, defer folding it after always_inline functions have
10102 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10103 might not be performed. */
10106 avoid_folding_inline_builtin (tree fndecl
)
10108 return (DECL_DECLARED_INLINE_P (fndecl
)
10109 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10111 && !cfun
->always_inline_functions_inlined
10112 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10115 /* A wrapper function for builtin folding that prevents warnings for
10116 "statement without effect" and the like, caused by removing the
10117 call node earlier than the warning is generated. */
10120 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10122 tree ret
= NULL_TREE
;
10123 tree fndecl
= get_callee_fndecl (exp
);
10125 && TREE_CODE (fndecl
) == FUNCTION_DECL
10126 && DECL_BUILT_IN (fndecl
)
10127 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10128 yet. Defer folding until we see all the arguments
10129 (after inlining). */
10130 && !CALL_EXPR_VA_ARG_PACK (exp
))
10132 int nargs
= call_expr_nargs (exp
);
10134 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10135 instead last argument is __builtin_va_arg_pack (). Defer folding
10136 even in that case, until arguments are finalized. */
10137 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10139 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10141 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10142 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10143 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10147 if (avoid_folding_inline_builtin (fndecl
))
10150 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10151 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10152 CALL_EXPR_ARGP (exp
), ignore
);
10155 tree
*args
= CALL_EXPR_ARGP (exp
);
10156 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10164 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10165 N arguments are passed in the array ARGARRAY. Return a folded
10166 expression or NULL_TREE if no simplification was possible. */
10169 fold_builtin_call_array (location_t loc
, tree
,
10174 if (TREE_CODE (fn
) != ADDR_EXPR
)
10177 tree fndecl
= TREE_OPERAND (fn
, 0);
10178 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10179 && DECL_BUILT_IN (fndecl
))
10181 /* If last argument is __builtin_va_arg_pack (), arguments to this
10182 function are not finalized yet. Defer folding until they are. */
10183 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10185 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10187 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10188 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10189 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10192 if (avoid_folding_inline_builtin (fndecl
))
10194 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10195 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10197 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10203 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10204 along with N new arguments specified as the "..." parameters. SKIP
10205 is the number of arguments in EXP to be omitted. This function is used
10206 to do varargs-to-varargs transformations. */
10209 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10215 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10216 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10222 /* Validate a single argument ARG against a tree code CODE representing
10226 validate_arg (const_tree arg
, enum tree_code code
)
10230 else if (code
== POINTER_TYPE
)
10231 return POINTER_TYPE_P (TREE_TYPE (arg
));
10232 else if (code
== INTEGER_TYPE
)
10233 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10234 return code
== TREE_CODE (TREE_TYPE (arg
));
10237 /* This function validates the types of a function call argument list
10238 against a specified list of tree_codes. If the last specifier is a 0,
10239 that represents an ellipses, otherwise the last specifier must be a
10242 This is the GIMPLE version of validate_arglist. Eventually we want to
10243 completely convert builtins.c to work from GIMPLEs and the tree based
10244 validate_arglist will then be removed. */
10247 validate_gimple_arglist (const gcall
*call
, ...)
10249 enum tree_code code
;
10255 va_start (ap
, call
);
10260 code
= (enum tree_code
) va_arg (ap
, int);
10264 /* This signifies an ellipses, any further arguments are all ok. */
10268 /* This signifies an endlink, if no arguments remain, return
10269 true, otherwise return false. */
10270 res
= (i
== gimple_call_num_args (call
));
10273 /* If no parameters remain or the parameter's code does not
10274 match the specified code, return false. Otherwise continue
10275 checking any remaining arguments. */
10276 arg
= gimple_call_arg (call
, i
++);
10277 if (!validate_arg (arg
, code
))
10284 /* We need gotos here since we can only have one VA_CLOSE in a
10292 /* Default target-specific builtin expander that does nothing. */
10295 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10296 rtx target ATTRIBUTE_UNUSED
,
10297 rtx subtarget ATTRIBUTE_UNUSED
,
10298 machine_mode mode ATTRIBUTE_UNUSED
,
10299 int ignore ATTRIBUTE_UNUSED
)
10304 /* Returns true is EXP represents data that would potentially reside
10305 in a readonly section. */
10308 readonly_data_expr (tree exp
)
10312 if (TREE_CODE (exp
) != ADDR_EXPR
)
10315 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10319 /* Make sure we call decl_readonly_section only for trees it
10320 can handle (since it returns true for everything it doesn't
10322 if (TREE_CODE (exp
) == STRING_CST
10323 || TREE_CODE (exp
) == CONSTRUCTOR
10324 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10325 return decl_readonly_section (exp
, 0);
10330 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10331 to the call, and TYPE is its return type.
10333 Return NULL_TREE if no simplification was possible, otherwise return the
10334 simplified form of the call as a tree.
10336 The simplified form may be a constant or other expression which
10337 computes the same value, but in a more efficient manner (including
10338 calls to other builtin functions).
10340 The call may contain arguments which need to be evaluated, but
10341 which are not useful to determine the result of the call. In
10342 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10343 COMPOUND_EXPR will be an argument which must be evaluated.
10344 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10345 COMPOUND_EXPR in the chain will contain the tree for the simplified
10346 form of the builtin function call. */
10349 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10351 if (!validate_arg (s1
, POINTER_TYPE
)
10352 || !validate_arg (s2
, POINTER_TYPE
))
10357 const char *p1
, *p2
;
10359 p2
= c_getstr (s2
);
10363 p1
= c_getstr (s1
);
10366 const char *r
= strstr (p1
, p2
);
10370 return build_int_cst (TREE_TYPE (s1
), 0);
10372 /* Return an offset into the constant string argument. */
10373 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10374 return fold_convert_loc (loc
, type
, tem
);
10377 /* The argument is const char *, and the result is char *, so we need
10378 a type conversion here to avoid a warning. */
10380 return fold_convert_loc (loc
, type
, s1
);
10385 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10389 /* New argument list transforming strstr(s1, s2) to
10390 strchr(s1, s2[0]). */
10391 return build_call_expr_loc (loc
, fn
, 2, s1
,
10392 build_int_cst (integer_type_node
, p2
[0]));
10396 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10397 the call, and TYPE is its return type.
10399 Return NULL_TREE if no simplification was possible, otherwise return the
10400 simplified form of the call as a tree.
10402 The simplified form may be a constant or other expression which
10403 computes the same value, but in a more efficient manner (including
10404 calls to other builtin functions).
10406 The call may contain arguments which need to be evaluated, but
10407 which are not useful to determine the result of the call. In
10408 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10409 COMPOUND_EXPR will be an argument which must be evaluated.
10410 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10411 COMPOUND_EXPR in the chain will contain the tree for the simplified
10412 form of the builtin function call. */
10415 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10417 if (!validate_arg (s1
, POINTER_TYPE
)
10418 || !validate_arg (s2
, INTEGER_TYPE
))
10424 if (TREE_CODE (s2
) != INTEGER_CST
)
10427 p1
= c_getstr (s1
);
10434 if (target_char_cast (s2
, &c
))
10437 r
= strchr (p1
, c
);
10440 return build_int_cst (TREE_TYPE (s1
), 0);
10442 /* Return an offset into the constant string argument. */
10443 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10444 return fold_convert_loc (loc
, type
, tem
);
10450 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10451 the call, and TYPE is its return type.
10453 Return NULL_TREE if no simplification was possible, otherwise return the
10454 simplified form of the call as a tree.
10456 The simplified form may be a constant or other expression which
10457 computes the same value, but in a more efficient manner (including
10458 calls to other builtin functions).
10460 The call may contain arguments which need to be evaluated, but
10461 which are not useful to determine the result of the call. In
10462 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10463 COMPOUND_EXPR will be an argument which must be evaluated.
10464 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10465 COMPOUND_EXPR in the chain will contain the tree for the simplified
10466 form of the builtin function call. */
10469 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10471 if (!validate_arg (s1
, POINTER_TYPE
)
10472 || !validate_arg (s2
, INTEGER_TYPE
))
10479 if (TREE_CODE (s2
) != INTEGER_CST
)
10482 p1
= c_getstr (s1
);
10489 if (target_char_cast (s2
, &c
))
10492 r
= strrchr (p1
, c
);
10495 return build_int_cst (TREE_TYPE (s1
), 0);
10497 /* Return an offset into the constant string argument. */
10498 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10499 return fold_convert_loc (loc
, type
, tem
);
10502 if (! integer_zerop (s2
))
10505 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10509 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10510 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
10514 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10515 to the call, and TYPE is its return type.
10517 Return NULL_TREE if no simplification was possible, otherwise return the
10518 simplified form of the call as a tree.
10520 The simplified form may be a constant or other expression which
10521 computes the same value, but in a more efficient manner (including
10522 calls to other builtin functions).
10524 The call may contain arguments which need to be evaluated, but
10525 which are not useful to determine the result of the call. In
10526 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10527 COMPOUND_EXPR will be an argument which must be evaluated.
10528 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10529 COMPOUND_EXPR in the chain will contain the tree for the simplified
10530 form of the builtin function call. */
10533 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
10535 if (!validate_arg (s1
, POINTER_TYPE
)
10536 || !validate_arg (s2
, POINTER_TYPE
))
10541 const char *p1
, *p2
;
10543 p2
= c_getstr (s2
);
10547 p1
= c_getstr (s1
);
10550 const char *r
= strpbrk (p1
, p2
);
10554 return build_int_cst (TREE_TYPE (s1
), 0);
10556 /* Return an offset into the constant string argument. */
10557 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10558 return fold_convert_loc (loc
, type
, tem
);
10562 /* strpbrk(x, "") == NULL.
10563 Evaluate and ignore s1 in case it had side-effects. */
10564 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
10567 return NULL_TREE
; /* Really call strpbrk. */
10569 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10573 /* New argument list transforming strpbrk(s1, s2) to
10574 strchr(s1, s2[0]). */
10575 return build_call_expr_loc (loc
, fn
, 2, s1
,
10576 build_int_cst (integer_type_node
, p2
[0]));
10580 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10583 Return NULL_TREE if no simplification was possible, otherwise return the
10584 simplified form of the call as a tree.
10586 The simplified form may be a constant or other expression which
10587 computes the same value, but in a more efficient manner (including
10588 calls to other builtin functions).
10590 The call may contain arguments which need to be evaluated, but
10591 which are not useful to determine the result of the call. In
10592 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10593 COMPOUND_EXPR will be an argument which must be evaluated.
10594 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10595 COMPOUND_EXPR in the chain will contain the tree for the simplified
10596 form of the builtin function call. */
10599 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
10601 if (!validate_arg (s1
, POINTER_TYPE
)
10602 || !validate_arg (s2
, POINTER_TYPE
))
10606 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10608 /* If both arguments are constants, evaluate at compile-time. */
10611 const size_t r
= strspn (p1
, p2
);
10612 return build_int_cst (size_type_node
, r
);
10615 /* If either argument is "", return NULL_TREE. */
10616 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10617 /* Evaluate and ignore both arguments in case either one has
10619 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
10625 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10628 Return NULL_TREE if no simplification was possible, otherwise return the
10629 simplified form of the call as a tree.
10631 The simplified form may be a constant or other expression which
10632 computes the same value, but in a more efficient manner (including
10633 calls to other builtin functions).
10635 The call may contain arguments which need to be evaluated, but
10636 which are not useful to determine the result of the call. In
10637 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10638 COMPOUND_EXPR will be an argument which must be evaluated.
10639 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10640 COMPOUND_EXPR in the chain will contain the tree for the simplified
10641 form of the builtin function call. */
10644 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
10646 if (!validate_arg (s1
, POINTER_TYPE
)
10647 || !validate_arg (s2
, POINTER_TYPE
))
10651 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10653 /* If both arguments are constants, evaluate at compile-time. */
10656 const size_t r
= strcspn (p1
, p2
);
10657 return build_int_cst (size_type_node
, r
);
10660 /* If the first argument is "", return NULL_TREE. */
10661 if (p1
&& *p1
== '\0')
10663 /* Evaluate and ignore argument s2 in case it has
10665 return omit_one_operand_loc (loc
, size_type_node
,
10666 size_zero_node
, s2
);
10669 /* If the second argument is "", return __builtin_strlen(s1). */
10670 if (p2
&& *p2
== '\0')
10672 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
10674 /* If the replacement _DECL isn't initialized, don't do the
10679 return build_call_expr_loc (loc
, fn
, 1, s1
);
10685 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10686 produced. False otherwise. This is done so that we don't output the error
10687 or warning twice or three times. */
10690 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10692 tree fntype
= TREE_TYPE (current_function_decl
);
10693 int nargs
= call_expr_nargs (exp
);
10695 /* There is good chance the current input_location points inside the
10696 definition of the va_start macro (perhaps on the token for
10697 builtin) in a system header, so warnings will not be emitted.
10698 Use the location in real source code. */
10699 source_location current_location
=
10700 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
10703 if (!stdarg_p (fntype
))
10705 error ("%<va_start%> used in function with fixed args");
10711 if (va_start_p
&& (nargs
!= 2))
10713 error ("wrong number of arguments to function %<va_start%>");
10716 arg
= CALL_EXPR_ARG (exp
, 1);
10718 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10719 when we checked the arguments and if needed issued a warning. */
10724 /* Evidently an out of date version of <stdarg.h>; can't validate
10725 va_start's second argument, but can still work as intended. */
10726 warning_at (current_location
,
10728 "%<__builtin_next_arg%> called without an argument");
10731 else if (nargs
> 1)
10733 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10736 arg
= CALL_EXPR_ARG (exp
, 0);
10739 if (TREE_CODE (arg
) == SSA_NAME
)
10740 arg
= SSA_NAME_VAR (arg
);
10742 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10743 or __builtin_next_arg (0) the first time we see it, after checking
10744 the arguments and if needed issuing a warning. */
10745 if (!integer_zerop (arg
))
10747 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10749 /* Strip off all nops for the sake of the comparison. This
10750 is not quite the same as STRIP_NOPS. It does more.
10751 We must also strip off INDIRECT_EXPR for C++ reference
10753 while (CONVERT_EXPR_P (arg
)
10754 || TREE_CODE (arg
) == INDIRECT_REF
)
10755 arg
= TREE_OPERAND (arg
, 0);
10756 if (arg
!= last_parm
)
10758 /* FIXME: Sometimes with the tree optimizers we can get the
10759 not the last argument even though the user used the last
10760 argument. We just warn and set the arg to be the last
10761 argument so that we will get wrong-code because of
10763 warning_at (current_location
,
10765 "second parameter of %<va_start%> not last named argument");
10768 /* Undefined by C99 7.15.1.4p4 (va_start):
10769 "If the parameter parmN is declared with the register storage
10770 class, with a function or array type, or with a type that is
10771 not compatible with the type that results after application of
10772 the default argument promotions, the behavior is undefined."
10774 else if (DECL_REGISTER (arg
))
10776 warning_at (current_location
,
10778 "undefined behaviour when second parameter of "
10779 "%<va_start%> is declared with %<register%> storage");
10782 /* We want to verify the second parameter just once before the tree
10783 optimizers are run and then avoid keeping it in the tree,
10784 as otherwise we could warn even for correct code like:
10785 void foo (int i, ...)
10786 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10788 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10790 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10796 /* Expand a call EXP to __builtin_object_size. */
10799 expand_builtin_object_size (tree exp
)
10802 int object_size_type
;
10803 tree fndecl
= get_callee_fndecl (exp
);
10805 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10807 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10809 expand_builtin_trap ();
10813 ost
= CALL_EXPR_ARG (exp
, 1);
10816 if (TREE_CODE (ost
) != INTEGER_CST
10817 || tree_int_cst_sgn (ost
) < 0
10818 || compare_tree_int (ost
, 3) > 0)
10820 error ("%Klast argument of %D is not integer constant between 0 and 3",
10822 expand_builtin_trap ();
10826 object_size_type
= tree_to_shwi (ost
);
10828 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
10831 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10832 FCODE is the BUILT_IN_* to use.
10833 Return NULL_RTX if we failed; the caller should emit a normal call,
10834 otherwise try to get the result in TARGET, if convenient (and in
10835 mode MODE if that's convenient). */
10838 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
10839 enum built_in_function fcode
)
10841 tree dest
, src
, len
, size
;
10843 if (!validate_arglist (exp
,
10845 fcode
== BUILT_IN_MEMSET_CHK
10846 ? INTEGER_TYPE
: POINTER_TYPE
,
10847 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
10850 dest
= CALL_EXPR_ARG (exp
, 0);
10851 src
= CALL_EXPR_ARG (exp
, 1);
10852 len
= CALL_EXPR_ARG (exp
, 2);
10853 size
= CALL_EXPR_ARG (exp
, 3);
10855 if (! tree_fits_uhwi_p (size
))
10858 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
10862 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
10864 warning_at (tree_nonartificial_location (exp
),
10865 0, "%Kcall to %D will always overflow destination buffer",
10866 exp
, get_callee_fndecl (exp
));
10871 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10872 mem{cpy,pcpy,move,set} is available. */
10875 case BUILT_IN_MEMCPY_CHK
:
10876 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
10878 case BUILT_IN_MEMPCPY_CHK
:
10879 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
10881 case BUILT_IN_MEMMOVE_CHK
:
10882 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
10884 case BUILT_IN_MEMSET_CHK
:
10885 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
10894 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
10895 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10896 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10897 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10899 else if (fcode
== BUILT_IN_MEMSET_CHK
)
10903 unsigned int dest_align
= get_pointer_alignment (dest
);
10905 /* If DEST is not a pointer type, call the normal function. */
10906 if (dest_align
== 0)
10909 /* If SRC and DEST are the same (and not volatile), do nothing. */
10910 if (operand_equal_p (src
, dest
, 0))
10914 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
10916 /* Evaluate and ignore LEN in case it has side-effects. */
10917 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10918 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
10921 expr
= fold_build_pointer_plus (dest
, len
);
10922 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
10925 /* __memmove_chk special case. */
10926 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
10928 unsigned int src_align
= get_pointer_alignment (src
);
10930 if (src_align
== 0)
10933 /* If src is categorized for a readonly section we can use
10934 normal __memcpy_chk. */
10935 if (readonly_data_expr (src
))
10937 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10940 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10941 dest
, src
, len
, size
);
10942 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10943 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10944 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10951 /* Emit warning if a buffer overflow is detected at compile time. */
10954 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10958 location_t loc
= tree_nonartificial_location (exp
);
10962 case BUILT_IN_STRCPY_CHK
:
10963 case BUILT_IN_STPCPY_CHK
:
10964 /* For __strcat_chk the warning will be emitted only if overflowing
10965 by at least strlen (dest) + 1 bytes. */
10966 case BUILT_IN_STRCAT_CHK
:
10967 len
= CALL_EXPR_ARG (exp
, 1);
10968 size
= CALL_EXPR_ARG (exp
, 2);
10971 case BUILT_IN_STRNCAT_CHK
:
10972 case BUILT_IN_STRNCPY_CHK
:
10973 case BUILT_IN_STPNCPY_CHK
:
10974 len
= CALL_EXPR_ARG (exp
, 2);
10975 size
= CALL_EXPR_ARG (exp
, 3);
10977 case BUILT_IN_SNPRINTF_CHK
:
10978 case BUILT_IN_VSNPRINTF_CHK
:
10979 len
= CALL_EXPR_ARG (exp
, 1);
10980 size
= CALL_EXPR_ARG (exp
, 3);
10983 gcc_unreachable ();
10989 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10994 len
= c_strlen (len
, 1);
10995 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
10998 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11000 tree src
= CALL_EXPR_ARG (exp
, 1);
11001 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11003 src
= c_strlen (src
, 1);
11004 if (! src
|| ! tree_fits_uhwi_p (src
))
11006 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11007 exp
, get_callee_fndecl (exp
));
11010 else if (tree_int_cst_lt (src
, size
))
11013 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11016 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11017 exp
, get_callee_fndecl (exp
));
11020 /* Emit warning if a buffer overflow is detected at compile time
11021 in __sprintf_chk/__vsprintf_chk calls. */
11024 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11026 tree size
, len
, fmt
;
11027 const char *fmt_str
;
11028 int nargs
= call_expr_nargs (exp
);
11030 /* Verify the required arguments in the original call. */
11034 size
= CALL_EXPR_ARG (exp
, 2);
11035 fmt
= CALL_EXPR_ARG (exp
, 3);
11037 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11040 /* Check whether the format is a literal string constant. */
11041 fmt_str
= c_getstr (fmt
);
11042 if (fmt_str
== NULL
)
11045 if (!init_target_chars ())
11048 /* If the format doesn't contain % args or %%, we know its size. */
11049 if (strchr (fmt_str
, target_percent
) == 0)
11050 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11051 /* If the format is "%s" and first ... argument is a string literal,
11053 else if (fcode
== BUILT_IN_SPRINTF_CHK
11054 && strcmp (fmt_str
, target_percent_s
) == 0)
11060 arg
= CALL_EXPR_ARG (exp
, 4);
11061 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11064 len
= c_strlen (arg
, 1);
11065 if (!len
|| ! tree_fits_uhwi_p (len
))
11071 if (! tree_int_cst_lt (len
, size
))
11072 warning_at (tree_nonartificial_location (exp
),
11073 0, "%Kcall to %D will always overflow destination buffer",
11074 exp
, get_callee_fndecl (exp
));
11077 /* Emit warning if a free is called with address of a variable. */
11080 maybe_emit_free_warning (tree exp
)
11082 tree arg
= CALL_EXPR_ARG (exp
, 0);
11085 if (TREE_CODE (arg
) != ADDR_EXPR
)
11088 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11089 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11092 if (SSA_VAR_P (arg
))
11093 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11094 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11096 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11097 "%Kattempt to free a non-heap object", exp
);
11100 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11104 fold_builtin_object_size (tree ptr
, tree ost
)
11106 unsigned HOST_WIDE_INT bytes
;
11107 int object_size_type
;
11109 if (!validate_arg (ptr
, POINTER_TYPE
)
11110 || !validate_arg (ost
, INTEGER_TYPE
))
11115 if (TREE_CODE (ost
) != INTEGER_CST
11116 || tree_int_cst_sgn (ost
) < 0
11117 || compare_tree_int (ost
, 3) > 0)
11120 object_size_type
= tree_to_shwi (ost
);
11122 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11123 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11124 and (size_t) 0 for types 2 and 3. */
11125 if (TREE_SIDE_EFFECTS (ptr
))
11126 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11128 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11130 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11131 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11132 return build_int_cstu (size_type_node
, bytes
);
11134 else if (TREE_CODE (ptr
) == SSA_NAME
)
11136 /* If object size is not known yet, delay folding until
11137 later. Maybe subsequent passes will help determining
11139 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11140 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11141 && wi::fits_to_tree_p (bytes
, size_type_node
))
11142 return build_int_cstu (size_type_node
, bytes
);
11148 /* Builtins with folding operations that operate on "..." arguments
11149 need special handling; we need to store the arguments in a convenient
11150 data structure before attempting any folding. Fortunately there are
11151 only a few builtins that fall into this category. FNDECL is the
11152 function, EXP is the CALL_EXPR for the call. */
11155 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11157 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11158 tree ret
= NULL_TREE
;
11162 case BUILT_IN_FPCLASSIFY
:
11163 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11171 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11172 SET_EXPR_LOCATION (ret
, loc
);
11173 TREE_NO_WARNING (ret
) = 1;
11179 /* Initialize format string characters in the target charset. */
11182 init_target_chars (void)
11187 target_newline
= lang_hooks
.to_target_charset ('\n');
11188 target_percent
= lang_hooks
.to_target_charset ('%');
11189 target_c
= lang_hooks
.to_target_charset ('c');
11190 target_s
= lang_hooks
.to_target_charset ('s');
11191 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11195 target_percent_c
[0] = target_percent
;
11196 target_percent_c
[1] = target_c
;
11197 target_percent_c
[2] = '\0';
11199 target_percent_s
[0] = target_percent
;
11200 target_percent_s
[1] = target_s
;
11201 target_percent_s
[2] = '\0';
11203 target_percent_s_newline
[0] = target_percent
;
11204 target_percent_s_newline
[1] = target_s
;
11205 target_percent_s_newline
[2] = target_newline
;
11206 target_percent_s_newline
[3] = '\0';
11213 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11214 and no overflow/underflow occurred. INEXACT is true if M was not
11215 exactly calculated. TYPE is the tree type for the result. This
11216 function assumes that you cleared the MPFR flags and then
11217 calculated M to see if anything subsequently set a flag prior to
11218 entering this function. Return NULL_TREE if any checks fail. */
11221 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11223 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11224 overflow/underflow occurred. If -frounding-math, proceed iff the
11225 result of calling FUNC was exact. */
11226 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11227 && (!flag_rounding_math
|| !inexact
))
11229 REAL_VALUE_TYPE rr
;
11231 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11232 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11233 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11234 but the mpft_t is not, then we underflowed in the
11236 if (real_isfinite (&rr
)
11237 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11239 REAL_VALUE_TYPE rmode
;
11241 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11242 /* Proceed iff the specified mode can hold the value. */
11243 if (real_identical (&rmode
, &rr
))
11244 return build_real (type
, rmode
);
11250 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11251 number and no overflow/underflow occurred. INEXACT is true if M
11252 was not exactly calculated. TYPE is the tree type for the result.
11253 This function assumes that you cleared the MPFR flags and then
11254 calculated M to see if anything subsequently set a flag prior to
11255 entering this function. Return NULL_TREE if any checks fail, if
11256 FORCE_CONVERT is true, then bypass the checks. */
11259 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11261 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11262 overflow/underflow occurred. If -frounding-math, proceed iff the
11263 result of calling FUNC was exact. */
11265 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11266 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11267 && (!flag_rounding_math
|| !inexact
)))
11269 REAL_VALUE_TYPE re
, im
;
11271 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11272 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11273 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11274 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11275 but the mpft_t is not, then we underflowed in the
11278 || (real_isfinite (&re
) && real_isfinite (&im
)
11279 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11280 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11282 REAL_VALUE_TYPE re_mode
, im_mode
;
11284 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11285 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11286 /* Proceed iff the specified mode can hold the value. */
11288 || (real_identical (&re_mode
, &re
)
11289 && real_identical (&im_mode
, &im
)))
11290 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11291 build_real (TREE_TYPE (type
), im_mode
));
11297 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11298 FUNC on it and return the resulting value as a tree with type TYPE.
11299 If MIN and/or MAX are not NULL, then the supplied ARG must be
11300 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11301 acceptable values, otherwise they are not. The mpfr precision is
11302 set to the precision of TYPE. We assume that function FUNC returns
11303 zero if the result could be calculated exactly within the requested
11307 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11308 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11311 tree result
= NULL_TREE
;
11315 /* To proceed, MPFR must exactly represent the target floating point
11316 format, which only happens when the target base equals two. */
11317 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11318 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11320 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11322 if (real_isfinite (ra
)
11323 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11324 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11326 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11327 const int prec
= fmt
->p
;
11328 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11332 mpfr_init2 (m
, prec
);
11333 mpfr_from_real (m
, ra
, GMP_RNDN
);
11334 mpfr_clear_flags ();
11335 inexact
= func (m
, m
, rnd
);
11336 result
= do_mpfr_ckconv (m
, type
, inexact
);
11344 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11345 FUNC on it and return the resulting value as a tree with type TYPE.
11346 The mpfr precision is set to the precision of TYPE. We assume that
11347 function FUNC returns zero if the result could be calculated
11348 exactly within the requested precision. */
11351 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11352 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11354 tree result
= NULL_TREE
;
11359 /* To proceed, MPFR must exactly represent the target floating point
11360 format, which only happens when the target base equals two. */
11361 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11362 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11363 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11365 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11366 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11368 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11370 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11371 const int prec
= fmt
->p
;
11372 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11376 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11377 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11378 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11379 mpfr_clear_flags ();
11380 inexact
= func (m1
, m1
, m2
, rnd
);
11381 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11382 mpfr_clears (m1
, m2
, NULL
);
11389 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11390 FUNC on it and return the resulting value as a tree with type TYPE.
11391 The mpfr precision is set to the precision of TYPE. We assume that
11392 function FUNC returns zero if the result could be calculated
11393 exactly within the requested precision. */
11396 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11397 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11399 tree result
= NULL_TREE
;
11405 /* To proceed, MPFR must exactly represent the target floating point
11406 format, which only happens when the target base equals two. */
11407 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11408 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11409 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11410 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11412 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11413 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11414 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11416 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11418 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11419 const int prec
= fmt
->p
;
11420 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11424 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11425 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11426 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11427 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11428 mpfr_clear_flags ();
11429 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11430 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11431 mpfr_clears (m1
, m2
, m3
, NULL
);
11438 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11439 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11440 If ARG_SINP and ARG_COSP are NULL then the result is returned
11441 as a complex value.
11442 The type is taken from the type of ARG and is used for setting the
11443 precision of the calculation and results. */
11446 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11448 tree
const type
= TREE_TYPE (arg
);
11449 tree result
= NULL_TREE
;
11453 /* To proceed, MPFR must exactly represent the target floating point
11454 format, which only happens when the target base equals two. */
11455 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11456 && TREE_CODE (arg
) == REAL_CST
11457 && !TREE_OVERFLOW (arg
))
11459 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11461 if (real_isfinite (ra
))
11463 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11464 const int prec
= fmt
->p
;
11465 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11466 tree result_s
, result_c
;
11470 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11471 mpfr_from_real (m
, ra
, GMP_RNDN
);
11472 mpfr_clear_flags ();
11473 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11474 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11475 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11476 mpfr_clears (m
, ms
, mc
, NULL
);
11477 if (result_s
&& result_c
)
11479 /* If we are to return in a complex value do so. */
11480 if (!arg_sinp
&& !arg_cosp
)
11481 return build_complex (build_complex_type (type
),
11482 result_c
, result_s
);
11484 /* Dereference the sin/cos pointer arguments. */
11485 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11486 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11487 /* Proceed if valid pointer type were passed in. */
11488 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
11489 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
11491 /* Set the values. */
11492 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
11494 TREE_SIDE_EFFECTS (result_s
) = 1;
11495 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
11497 TREE_SIDE_EFFECTS (result_c
) = 1;
11498 /* Combine the assignments into a compound expr. */
11499 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11500 result_s
, result_c
));
11508 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11509 two-argument mpfr order N Bessel function FUNC on them and return
11510 the resulting value as a tree with type TYPE. The mpfr precision
11511 is set to the precision of TYPE. We assume that function FUNC
11512 returns zero if the result could be calculated exactly within the
11513 requested precision. */
11515 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
11516 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
11517 const REAL_VALUE_TYPE
*min
, bool inclusive
)
11519 tree result
= NULL_TREE
;
11524 /* To proceed, MPFR must exactly represent the target floating point
11525 format, which only happens when the target base equals two. */
11526 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11527 && tree_fits_shwi_p (arg1
)
11528 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11530 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
11531 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
11534 && real_isfinite (ra
)
11535 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
11537 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11538 const int prec
= fmt
->p
;
11539 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11543 mpfr_init2 (m
, prec
);
11544 mpfr_from_real (m
, ra
, GMP_RNDN
);
11545 mpfr_clear_flags ();
11546 inexact
= func (m
, n
, m
, rnd
);
11547 result
= do_mpfr_ckconv (m
, type
, inexact
);
11555 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11556 the pointer *(ARG_QUO) and return the result. The type is taken
11557 from the type of ARG0 and is used for setting the precision of the
11558 calculation and results. */
11561 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
11563 tree
const type
= TREE_TYPE (arg0
);
11564 tree result
= NULL_TREE
;
11569 /* To proceed, MPFR must exactly represent the target floating point
11570 format, which only happens when the target base equals two. */
11571 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11572 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
11573 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
11575 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
11576 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
11578 if (real_isfinite (ra0
) && real_isfinite (ra1
))
11580 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11581 const int prec
= fmt
->p
;
11582 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11587 mpfr_inits2 (prec
, m0
, m1
, NULL
);
11588 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
11589 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11590 mpfr_clear_flags ();
11591 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
11592 /* Remquo is independent of the rounding mode, so pass
11593 inexact=0 to do_mpfr_ckconv(). */
11594 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
11595 mpfr_clears (m0
, m1
, NULL
);
11598 /* MPFR calculates quo in the host's long so it may
11599 return more bits in quo than the target int can hold
11600 if sizeof(host long) > sizeof(target int). This can
11601 happen even for native compilers in LP64 mode. In
11602 these cases, modulo the quo value with the largest
11603 number that the target int can hold while leaving one
11604 bit for the sign. */
11605 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
11606 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
11608 /* Dereference the quo pointer argument. */
11609 arg_quo
= build_fold_indirect_ref (arg_quo
);
11610 /* Proceed iff a valid pointer type was passed in. */
11611 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
11613 /* Set the value. */
11615 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
11616 build_int_cst (TREE_TYPE (arg_quo
),
11618 TREE_SIDE_EFFECTS (result_quo
) = 1;
11619 /* Combine the quo assignment with the rem. */
11620 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11621 result_quo
, result_rem
));
11629 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11630 resulting value as a tree with type TYPE. The mpfr precision is
11631 set to the precision of TYPE. We assume that this mpfr function
11632 returns zero if the result could be calculated exactly within the
11633 requested precision. In addition, the integer pointer represented
11634 by ARG_SG will be dereferenced and set to the appropriate signgam
11638 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
11640 tree result
= NULL_TREE
;
11644 /* To proceed, MPFR must exactly represent the target floating point
11645 format, which only happens when the target base equals two. Also
11646 verify ARG is a constant and that ARG_SG is an int pointer. */
11647 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11648 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
11649 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
11650 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
11652 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
11654 /* In addition to NaN and Inf, the argument cannot be zero or a
11655 negative integer. */
11656 if (real_isfinite (ra
)
11657 && ra
->cl
!= rvc_zero
11658 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
11660 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11661 const int prec
= fmt
->p
;
11662 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11667 mpfr_init2 (m
, prec
);
11668 mpfr_from_real (m
, ra
, GMP_RNDN
);
11669 mpfr_clear_flags ();
11670 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
11671 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
11677 /* Dereference the arg_sg pointer argument. */
11678 arg_sg
= build_fold_indirect_ref (arg_sg
);
11679 /* Assign the signgam value into *arg_sg. */
11680 result_sg
= fold_build2 (MODIFY_EXPR
,
11681 TREE_TYPE (arg_sg
), arg_sg
,
11682 build_int_cst (TREE_TYPE (arg_sg
), sg
));
11683 TREE_SIDE_EFFECTS (result_sg
) = 1;
11684 /* Combine the signgam assignment with the lgamma result. */
11685 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
11686 result_sg
, result_lg
));
11694 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11695 function FUNC on it and return the resulting value as a tree with
11696 type TYPE. The mpfr precision is set to the precision of TYPE. We
11697 assume that function FUNC returns zero if the result could be
11698 calculated exactly within the requested precision. */
11701 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
11703 tree result
= NULL_TREE
;
11707 /* To proceed, MPFR must exactly represent the target floating point
11708 format, which only happens when the target base equals two. */
11709 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
11710 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
11711 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
11713 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
11714 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
11716 if (real_isfinite (re
) && real_isfinite (im
))
11718 const struct real_format
*const fmt
=
11719 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11720 const int prec
= fmt
->p
;
11721 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11722 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11726 mpc_init2 (m
, prec
);
11727 mpfr_from_real (mpc_realref (m
), re
, rnd
);
11728 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
11729 mpfr_clear_flags ();
11730 inexact
= func (m
, m
, crnd
);
11731 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
11739 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11740 mpc function FUNC on it and return the resulting value as a tree
11741 with type TYPE. The mpfr precision is set to the precision of
11742 TYPE. We assume that function FUNC returns zero if the result
11743 could be calculated exactly within the requested precision. If
11744 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11745 in the arguments and/or results. */
11748 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
11749 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
11751 tree result
= NULL_TREE
;
11756 /* To proceed, MPFR must exactly represent the target floating point
11757 format, which only happens when the target base equals two. */
11758 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
11759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
11760 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
11761 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
11762 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
11764 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
11765 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
11766 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
11767 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
11770 || (real_isfinite (re0
) && real_isfinite (im0
)
11771 && real_isfinite (re1
) && real_isfinite (im1
)))
11773 const struct real_format
*const fmt
=
11774 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
11775 const int prec
= fmt
->p
;
11776 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11777 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
11781 mpc_init2 (m0
, prec
);
11782 mpc_init2 (m1
, prec
);
11783 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
11784 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
11785 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
11786 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
11787 mpfr_clear_flags ();
11788 inexact
= func (m0
, m0
, m1
, crnd
);
11789 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
11798 /* A wrapper function for builtin folding that prevents warnings for
11799 "statement without effect" and the like, caused by removing the
11800 call node earlier than the warning is generated. */
11803 fold_call_stmt (gcall
*stmt
, bool ignore
)
11805 tree ret
= NULL_TREE
;
11806 tree fndecl
= gimple_call_fndecl (stmt
);
11807 location_t loc
= gimple_location (stmt
);
11809 && TREE_CODE (fndecl
) == FUNCTION_DECL
11810 && DECL_BUILT_IN (fndecl
)
11811 && !gimple_call_va_arg_pack_p (stmt
))
11813 int nargs
= gimple_call_num_args (stmt
);
11814 tree
*args
= (nargs
> 0
11815 ? gimple_call_arg_ptr (stmt
, 0)
11816 : &error_mark_node
);
11818 if (avoid_folding_inline_builtin (fndecl
))
11820 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
11822 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
11826 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
11829 /* Propagate location information from original call to
11830 expansion of builtin. Otherwise things like
11831 maybe_emit_chk_warning, that operate on the expansion
11832 of a builtin, will use the wrong location information. */
11833 if (gimple_has_location (stmt
))
11835 tree realret
= ret
;
11836 if (TREE_CODE (ret
) == NOP_EXPR
)
11837 realret
= TREE_OPERAND (ret
, 0);
11838 if (CAN_HAVE_LOCATION_P (realret
)
11839 && !EXPR_HAS_LOCATION (realret
))
11840 SET_EXPR_LOCATION (realret
, loc
);
11850 /* Look up the function in builtin_decl that corresponds to DECL
11851 and set ASMSPEC as its user assembler name. DECL must be a
11852 function decl that declares a builtin. */
11855 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
11858 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
11859 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
11862 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
11863 set_user_assembler_name (builtin
, asmspec
);
11864 switch (DECL_FUNCTION_CODE (decl
))
11866 case BUILT_IN_MEMCPY
:
11867 init_block_move_fn (asmspec
);
11868 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
11870 case BUILT_IN_MEMSET
:
11871 init_block_clear_fn (asmspec
);
11872 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
11874 case BUILT_IN_MEMMOVE
:
11875 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
11877 case BUILT_IN_MEMCMP
:
11878 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
11880 case BUILT_IN_ABORT
:
11881 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
11884 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
11886 set_user_assembler_libfunc ("ffs", asmspec
);
11887 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
11888 MODE_INT
, 0), "ffs");
11896 /* Return true if DECL is a builtin that expands to a constant or similarly
11899 is_simple_builtin (tree decl
)
11901 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11902 switch (DECL_FUNCTION_CODE (decl
))
11904 /* Builtins that expand to constants. */
11905 case BUILT_IN_CONSTANT_P
:
11906 case BUILT_IN_EXPECT
:
11907 case BUILT_IN_OBJECT_SIZE
:
11908 case BUILT_IN_UNREACHABLE
:
11909 /* Simple register moves or loads from stack. */
11910 case BUILT_IN_ASSUME_ALIGNED
:
11911 case BUILT_IN_RETURN_ADDRESS
:
11912 case BUILT_IN_EXTRACT_RETURN_ADDR
:
11913 case BUILT_IN_FROB_RETURN_ADDR
:
11914 case BUILT_IN_RETURN
:
11915 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
11916 case BUILT_IN_FRAME_ADDRESS
:
11917 case BUILT_IN_VA_END
:
11918 case BUILT_IN_STACK_SAVE
:
11919 case BUILT_IN_STACK_RESTORE
:
11920 /* Exception state returns or moves registers around. */
11921 case BUILT_IN_EH_FILTER
:
11922 case BUILT_IN_EH_POINTER
:
11923 case BUILT_IN_EH_COPY_VALUES
:
11933 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11934 most probably expanded inline into reasonably simple code. This is a
11935 superset of is_simple_builtin. */
11937 is_inexpensive_builtin (tree decl
)
11941 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
11943 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
11944 switch (DECL_FUNCTION_CODE (decl
))
11947 case BUILT_IN_ALLOCA
:
11948 case BUILT_IN_ALLOCA_WITH_ALIGN
:
11949 case BUILT_IN_BSWAP16
:
11950 case BUILT_IN_BSWAP32
:
11951 case BUILT_IN_BSWAP64
:
11953 case BUILT_IN_CLZIMAX
:
11954 case BUILT_IN_CLZL
:
11955 case BUILT_IN_CLZLL
:
11957 case BUILT_IN_CTZIMAX
:
11958 case BUILT_IN_CTZL
:
11959 case BUILT_IN_CTZLL
:
11961 case BUILT_IN_FFSIMAX
:
11962 case BUILT_IN_FFSL
:
11963 case BUILT_IN_FFSLL
:
11964 case BUILT_IN_IMAXABS
:
11965 case BUILT_IN_FINITE
:
11966 case BUILT_IN_FINITEF
:
11967 case BUILT_IN_FINITEL
:
11968 case BUILT_IN_FINITED32
:
11969 case BUILT_IN_FINITED64
:
11970 case BUILT_IN_FINITED128
:
11971 case BUILT_IN_FPCLASSIFY
:
11972 case BUILT_IN_ISFINITE
:
11973 case BUILT_IN_ISINF_SIGN
:
11974 case BUILT_IN_ISINF
:
11975 case BUILT_IN_ISINFF
:
11976 case BUILT_IN_ISINFL
:
11977 case BUILT_IN_ISINFD32
:
11978 case BUILT_IN_ISINFD64
:
11979 case BUILT_IN_ISINFD128
:
11980 case BUILT_IN_ISNAN
:
11981 case BUILT_IN_ISNANF
:
11982 case BUILT_IN_ISNANL
:
11983 case BUILT_IN_ISNAND32
:
11984 case BUILT_IN_ISNAND64
:
11985 case BUILT_IN_ISNAND128
:
11986 case BUILT_IN_ISNORMAL
:
11987 case BUILT_IN_ISGREATER
:
11988 case BUILT_IN_ISGREATEREQUAL
:
11989 case BUILT_IN_ISLESS
:
11990 case BUILT_IN_ISLESSEQUAL
:
11991 case BUILT_IN_ISLESSGREATER
:
11992 case BUILT_IN_ISUNORDERED
:
11993 case BUILT_IN_VA_ARG_PACK
:
11994 case BUILT_IN_VA_ARG_PACK_LEN
:
11995 case BUILT_IN_VA_COPY
:
11996 case BUILT_IN_TRAP
:
11997 case BUILT_IN_SAVEREGS
:
11998 case BUILT_IN_POPCOUNTL
:
11999 case BUILT_IN_POPCOUNTLL
:
12000 case BUILT_IN_POPCOUNTIMAX
:
12001 case BUILT_IN_POPCOUNT
:
12002 case BUILT_IN_PARITYL
:
12003 case BUILT_IN_PARITYLL
:
12004 case BUILT_IN_PARITYIMAX
:
12005 case BUILT_IN_PARITY
:
12006 case BUILT_IN_LABS
:
12007 case BUILT_IN_LLABS
:
12008 case BUILT_IN_PREFETCH
:
12009 case BUILT_IN_ACC_ON_DEVICE
:
12013 return is_simple_builtin (decl
);