1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "stor-layout.h"
37 #include "tree-object-size.h"
41 #include "hard-reg-set.h"
44 #include "basic-block.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
53 #include "insn-config.h"
54 #include "statistics.h"
61 #include "insn-codes.h"
66 #include "typeclass.h"
69 #include "langhooks.h"
70 #include "tree-ssanames.h"
72 #include "value-prof.h"
73 #include "diagnostic-core.h"
78 #include "lto-streamer.h"
80 #include "tree-chkp.h"
82 #include "gomp-constants.h"
85 static tree
do_mpc_arg1 (tree
, tree
, int (*)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
));
87 struct target_builtins default_target_builtins
;
89 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
92 /* Define the names of the builtin function types and codes. */
93 const char *const built_in_class_names
[BUILT_IN_LAST
]
94 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
96 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
97 const char * built_in_names
[(int) END_BUILTINS
] =
99 #include "builtins.def"
103 /* Setup an array of builtin_info_type, make sure each element decl is
104 initialized to NULL_TREE. */
105 builtin_info_type builtin_info
[(int)END_BUILTINS
];
107 /* Non-zero if __builtin_constant_p should be folded right away. */
108 bool force_folding_builtin_constant_p
;
110 static rtx
c_readstr (const char *, machine_mode
);
111 static int target_char_cast (tree
, char *);
112 static rtx
get_memory_rtx (tree
, tree
);
113 static int apply_args_size (void);
114 static int apply_result_size (void);
115 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
116 static rtx
result_vector (int, rtx
);
118 static void expand_builtin_prefetch (tree
);
119 static rtx
expand_builtin_apply_args (void);
120 static rtx
expand_builtin_apply_args_1 (void);
121 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
122 static void expand_builtin_return (rtx
);
123 static enum type_class
type_to_class (tree
);
124 static rtx
expand_builtin_classify_type (tree
);
125 static void expand_errno_check (tree
, rtx
);
126 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
127 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
128 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
129 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
130 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
131 static rtx
expand_builtin_sincos (tree
);
132 static rtx
expand_builtin_cexpi (tree
, rtx
);
133 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
134 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
135 static rtx
expand_builtin_next_arg (void);
136 static rtx
expand_builtin_va_start (tree
);
137 static rtx
expand_builtin_va_end (tree
);
138 static rtx
expand_builtin_va_copy (tree
);
139 static rtx
expand_builtin_memcmp (tree
, rtx
, machine_mode
);
140 static rtx
expand_builtin_strcmp (tree
, rtx
);
141 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
142 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
143 static rtx
expand_builtin_memcpy (tree
, rtx
);
144 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
145 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
146 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
147 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
148 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
149 machine_mode
, int, tree
);
150 static rtx
expand_builtin_strcpy (tree
, rtx
);
151 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
152 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
153 static rtx
expand_builtin_strncpy (tree
, rtx
);
154 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
155 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
156 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
157 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
158 static rtx
expand_builtin_bzero (tree
);
159 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
160 static rtx
expand_builtin_alloca (tree
, bool);
161 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
162 static rtx
expand_builtin_frame_address (tree
, tree
);
163 static tree
stabilize_va_list_loc (location_t
, tree
, int);
164 static rtx
expand_builtin_expect (tree
, rtx
);
165 static tree
fold_builtin_constant_p (tree
);
166 static tree
fold_builtin_classify_type (tree
);
167 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
168 static tree
fold_builtin_inf (location_t
, tree
, int);
169 static tree
fold_builtin_nan (tree
, tree
, int);
170 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
171 static bool validate_arg (const_tree
, enum tree_code code
);
172 static bool integer_valued_real_p (tree
);
173 static tree
fold_trunc_transparent_mathfn (location_t
, tree
, tree
);
174 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
175 static rtx
expand_builtin_signbit (tree
, rtx
);
176 static tree
fold_builtin_sqrt (location_t
, tree
, tree
);
177 static tree
fold_builtin_cbrt (location_t
, tree
, tree
);
178 static tree
fold_builtin_pow (location_t
, tree
, tree
, tree
, tree
);
179 static tree
fold_builtin_powi (location_t
, tree
, tree
, tree
, tree
);
180 static tree
fold_builtin_cos (location_t
, tree
, tree
, tree
);
181 static tree
fold_builtin_cosh (location_t
, tree
, tree
, tree
);
182 static tree
fold_builtin_tan (tree
, tree
);
183 static tree
fold_builtin_trunc (location_t
, tree
, tree
);
184 static tree
fold_builtin_floor (location_t
, tree
, tree
);
185 static tree
fold_builtin_ceil (location_t
, tree
, tree
);
186 static tree
fold_builtin_round (location_t
, tree
, tree
);
187 static tree
fold_builtin_int_roundingfn (location_t
, tree
, tree
);
188 static tree
fold_builtin_bitop (tree
, tree
);
189 static tree
fold_builtin_strchr (location_t
, tree
, tree
, tree
);
190 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
191 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
192 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
193 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
194 static tree
fold_builtin_signbit (location_t
, tree
, tree
);
195 static tree
fold_builtin_copysign (location_t
, tree
, tree
, tree
, tree
);
196 static tree
fold_builtin_isascii (location_t
, tree
);
197 static tree
fold_builtin_toascii (location_t
, tree
);
198 static tree
fold_builtin_isdigit (location_t
, tree
);
199 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
200 static tree
fold_builtin_abs (location_t
, tree
, tree
);
201 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
203 static tree
fold_builtin_0 (location_t
, tree
);
204 static tree
fold_builtin_1 (location_t
, tree
, tree
);
205 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
206 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
207 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
209 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
210 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
211 static tree
fold_builtin_strrchr (location_t
, tree
, tree
, tree
);
212 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
213 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
215 static rtx
expand_builtin_object_size (tree
);
216 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
217 enum built_in_function
);
218 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
219 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
220 static void maybe_emit_free_warning (tree
);
221 static tree
fold_builtin_object_size (tree
, tree
);
223 unsigned HOST_WIDE_INT target_newline
;
224 unsigned HOST_WIDE_INT target_percent
;
225 static unsigned HOST_WIDE_INT target_c
;
226 static unsigned HOST_WIDE_INT target_s
;
227 char target_percent_c
[3];
228 char target_percent_s
[3];
229 char target_percent_s_newline
[4];
230 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
231 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
232 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
233 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
234 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
235 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
236 static tree
do_mpfr_sincos (tree
, tree
, tree
);
237 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
238 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
239 const REAL_VALUE_TYPE
*, bool);
240 static tree
do_mpfr_remquo (tree
, tree
, tree
);
241 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
242 static void expand_builtin_sync_synchronize (void);
244 /* Return true if NAME starts with __builtin_ or __sync_. */
247 is_builtin_name (const char *name
)
249 if (strncmp (name
, "__builtin_", 10) == 0)
251 if (strncmp (name
, "__sync_", 7) == 0)
253 if (strncmp (name
, "__atomic_", 9) == 0)
256 && (!strcmp (name
, "__cilkrts_detach")
257 || !strcmp (name
, "__cilkrts_pop_frame")))
263 /* Return true if DECL is a function symbol representing a built-in. */
266 is_builtin_fn (tree decl
)
268 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
271 /* Return true if NODE should be considered for inline expansion regardless
272 of the optimization level. This means whenever a function is invoked with
273 its "internal" name, which normally contains the prefix "__builtin". */
276 called_as_built_in (tree node
)
278 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
279 we want the name used to call the function, not the name it
281 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
282 return is_builtin_name (name
);
285 /* Compute values M and N such that M divides (address of EXP - N) and such
286 that N < M. If these numbers can be determined, store M in alignp and N in
287 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
288 *alignp and any bit-offset to *bitposp.
290 Note that the address (and thus the alignment) computed here is based
291 on the address to which a symbol resolves, whereas DECL_ALIGN is based
292 on the address at which an object is actually located. These two
293 addresses are not always the same. For example, on ARM targets,
294 the address &foo of a Thumb function foo() has the lowest bit set,
295 whereas foo() itself starts on an even address.
297 If ADDR_P is true we are taking the address of the memory reference EXP
298 and thus cannot rely on the access taking place. */
301 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
302 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
304 HOST_WIDE_INT bitsize
, bitpos
;
307 int unsignedp
, volatilep
;
308 unsigned int align
= BITS_PER_UNIT
;
309 bool known_alignment
= false;
311 /* Get the innermost object and the constant (bitpos) and possibly
312 variable (offset) offset of the access. */
313 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
314 &mode
, &unsignedp
, &volatilep
, true);
316 /* Extract alignment information from the innermost object and
317 possibly adjust bitpos and offset. */
318 if (TREE_CODE (exp
) == FUNCTION_DECL
)
320 /* Function addresses can encode extra information besides their
321 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
322 allows the low bit to be used as a virtual bit, we know
323 that the address itself must be at least 2-byte aligned. */
324 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
325 align
= 2 * BITS_PER_UNIT
;
327 else if (TREE_CODE (exp
) == LABEL_DECL
)
329 else if (TREE_CODE (exp
) == CONST_DECL
)
331 /* The alignment of a CONST_DECL is determined by its initializer. */
332 exp
= DECL_INITIAL (exp
);
333 align
= TYPE_ALIGN (TREE_TYPE (exp
));
334 #ifdef CONSTANT_ALIGNMENT
335 if (CONSTANT_CLASS_P (exp
))
336 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
338 known_alignment
= true;
340 else if (DECL_P (exp
))
342 align
= DECL_ALIGN (exp
);
343 known_alignment
= true;
345 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
)
347 align
= TYPE_ALIGN (TREE_TYPE (exp
));
349 else if (TREE_CODE (exp
) == INDIRECT_REF
350 || TREE_CODE (exp
) == MEM_REF
351 || TREE_CODE (exp
) == TARGET_MEM_REF
)
353 tree addr
= TREE_OPERAND (exp
, 0);
355 unsigned HOST_WIDE_INT ptr_bitpos
;
356 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
358 /* If the address is explicitely aligned, handle that. */
359 if (TREE_CODE (addr
) == BIT_AND_EXPR
360 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
362 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
363 ptr_bitmask
*= BITS_PER_UNIT
;
364 align
= ptr_bitmask
& -ptr_bitmask
;
365 addr
= TREE_OPERAND (addr
, 0);
369 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
370 align
= MAX (ptr_align
, align
);
372 /* Re-apply explicit alignment to the bitpos. */
373 ptr_bitpos
&= ptr_bitmask
;
375 /* The alignment of the pointer operand in a TARGET_MEM_REF
376 has to take the variable offset parts into account. */
377 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
381 unsigned HOST_WIDE_INT step
= 1;
383 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
384 align
= MIN (align
, (step
& -step
) * BITS_PER_UNIT
);
386 if (TMR_INDEX2 (exp
))
387 align
= BITS_PER_UNIT
;
388 known_alignment
= false;
391 /* When EXP is an actual memory reference then we can use
392 TYPE_ALIGN of a pointer indirection to derive alignment.
393 Do so only if get_pointer_alignment_1 did not reveal absolute
394 alignment knowledge and if using that alignment would
395 improve the situation. */
396 if (!addr_p
&& !known_alignment
397 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
398 align
= TYPE_ALIGN (TREE_TYPE (exp
));
401 /* Else adjust bitpos accordingly. */
402 bitpos
+= ptr_bitpos
;
403 if (TREE_CODE (exp
) == MEM_REF
404 || TREE_CODE (exp
) == TARGET_MEM_REF
)
405 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
408 else if (TREE_CODE (exp
) == STRING_CST
)
410 /* STRING_CST are the only constant objects we allow to be not
411 wrapped inside a CONST_DECL. */
412 align
= TYPE_ALIGN (TREE_TYPE (exp
));
413 #ifdef CONSTANT_ALIGNMENT
414 if (CONSTANT_CLASS_P (exp
))
415 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
417 known_alignment
= true;
420 /* If there is a non-constant offset part extract the maximum
421 alignment that can prevail. */
424 unsigned int trailing_zeros
= tree_ctz (offset
);
425 if (trailing_zeros
< HOST_BITS_PER_INT
)
427 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
429 align
= MIN (align
, inner
);
434 *bitposp
= bitpos
& (*alignp
- 1);
435 return known_alignment
;
438 /* For a memory reference expression EXP compute values M and N such that M
439 divides (&EXP - N) and such that N < M. If these numbers can be determined,
440 store M in alignp and N in *BITPOSP and return true. Otherwise return false
441 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
444 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
445 unsigned HOST_WIDE_INT
*bitposp
)
447 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
450 /* Return the alignment in bits of EXP, an object. */
453 get_object_alignment (tree exp
)
455 unsigned HOST_WIDE_INT bitpos
= 0;
458 get_object_alignment_1 (exp
, &align
, &bitpos
);
460 /* align and bitpos now specify known low bits of the pointer.
461 ptr & (align - 1) == bitpos. */
464 align
= (bitpos
& -bitpos
);
468 /* For a pointer valued expression EXP compute values M and N such that M
469 divides (EXP - N) and such that N < M. If these numbers can be determined,
470 store M in alignp and N in *BITPOSP and return true. Return false if
471 the results are just a conservative approximation.
473 If EXP is not a pointer, false is returned too. */
476 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
477 unsigned HOST_WIDE_INT
*bitposp
)
481 if (TREE_CODE (exp
) == ADDR_EXPR
)
482 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
483 alignp
, bitposp
, true);
484 else if (TREE_CODE (exp
) == SSA_NAME
485 && POINTER_TYPE_P (TREE_TYPE (exp
)))
487 unsigned int ptr_align
, ptr_misalign
;
488 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
490 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
492 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
493 *alignp
= ptr_align
* BITS_PER_UNIT
;
494 /* We cannot really tell whether this result is an approximation. */
500 *alignp
= BITS_PER_UNIT
;
504 else if (TREE_CODE (exp
) == INTEGER_CST
)
506 *alignp
= BIGGEST_ALIGNMENT
;
507 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
508 & (BIGGEST_ALIGNMENT
- 1));
513 *alignp
= BITS_PER_UNIT
;
517 /* Return the alignment in bits of EXP, a pointer valued expression.
518 The alignment returned is, by default, the alignment of the thing that
519 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
521 Otherwise, look at the expression to see if we can do better, i.e., if the
522 expression is actually pointing at an object whose alignment is tighter. */
525 get_pointer_alignment (tree exp
)
527 unsigned HOST_WIDE_INT bitpos
= 0;
530 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
532 /* align and bitpos now specify known low bits of the pointer.
533 ptr & (align - 1) == bitpos. */
536 align
= (bitpos
& -bitpos
);
541 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
542 way, because it could contain a zero byte in the middle.
543 TREE_STRING_LENGTH is the size of the character array, not the string.
545 ONLY_VALUE should be nonzero if the result is not going to be emitted
546 into the instruction stream and zero if it is going to be expanded.
547 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
548 is returned, otherwise NULL, since
549 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
550 evaluate the side-effects.
552 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
553 accesses. Note that this implies the result is not going to be emitted
554 into the instruction stream.
556 The value returned is of type `ssizetype'.
558 Unfortunately, string_constant can't access the values of const char
559 arrays with initializers, so neither can we do so here. */
562 c_strlen (tree src
, int only_value
)
565 HOST_WIDE_INT offset
;
571 if (TREE_CODE (src
) == COND_EXPR
572 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
576 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
577 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
578 if (tree_int_cst_equal (len1
, len2
))
582 if (TREE_CODE (src
) == COMPOUND_EXPR
583 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
584 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
586 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
588 src
= string_constant (src
, &offset_node
);
592 max
= TREE_STRING_LENGTH (src
) - 1;
593 ptr
= TREE_STRING_POINTER (src
);
595 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
597 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
598 compute the offset to the following null if we don't know where to
599 start searching for it. */
602 for (i
= 0; i
< max
; i
++)
606 /* We don't know the starting offset, but we do know that the string
607 has no internal zero bytes. We can assume that the offset falls
608 within the bounds of the string; otherwise, the programmer deserves
609 what he gets. Subtract the offset from the length of the string,
610 and return that. This would perhaps not be valid if we were dealing
611 with named arrays in addition to literal string constants. */
613 return size_diffop_loc (loc
, size_int (max
), offset_node
);
616 /* We have a known offset into the string. Start searching there for
617 a null character if we can represent it as a single HOST_WIDE_INT. */
618 if (offset_node
== 0)
620 else if (! tree_fits_shwi_p (offset_node
))
623 offset
= tree_to_shwi (offset_node
);
625 /* If the offset is known to be out of bounds, warn, and call strlen at
627 if (offset
< 0 || offset
> max
)
629 /* Suppress multiple warnings for propagated constant strings. */
631 && !TREE_NO_WARNING (src
))
633 warning_at (loc
, 0, "offset outside bounds of constant string");
634 TREE_NO_WARNING (src
) = 1;
639 /* Use strlen to search for the first zero byte. Since any strings
640 constructed with build_string will have nulls appended, we win even
641 if we get handed something like (char[4])"abcd".
643 Since OFFSET is our starting index into the string, no further
644 calculation is needed. */
645 return ssize_int (strlen (ptr
+ offset
));
648 /* Return a char pointer for a C string if it is a string constant
649 or sum of string constant and integer constant. */
656 src
= string_constant (src
, &offset_node
);
660 if (offset_node
== 0)
661 return TREE_STRING_POINTER (src
);
662 else if (!tree_fits_uhwi_p (offset_node
)
663 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
666 return TREE_STRING_POINTER (src
) + tree_to_uhwi (offset_node
);
669 /* Return a constant integer corresponding to target reading
670 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
673 c_readstr (const char *str
, machine_mode mode
)
677 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
679 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
680 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
681 / HOST_BITS_PER_WIDE_INT
;
683 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
684 for (i
= 0; i
< len
; i
++)
688 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
691 if (WORDS_BIG_ENDIAN
)
692 j
= GET_MODE_SIZE (mode
) - i
- 1;
693 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
694 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
695 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
699 ch
= (unsigned char) str
[i
];
700 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
703 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
704 return immed_wide_int_const (c
, mode
);
707 /* Cast a target constant CST to target CHAR and if that value fits into
708 host char type, return zero and put that value into variable pointed to by
712 target_char_cast (tree cst
, char *p
)
714 unsigned HOST_WIDE_INT val
, hostval
;
716 if (TREE_CODE (cst
) != INTEGER_CST
717 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
720 /* Do not care if it fits or not right here. */
721 val
= TREE_INT_CST_LOW (cst
);
723 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
724 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
727 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
728 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
737 /* Similar to save_expr, but assumes that arbitrary code is not executed
738 in between the multiple evaluations. In particular, we assume that a
739 non-addressable local variable will not be modified. */
742 builtin_save_expr (tree exp
)
744 if (TREE_CODE (exp
) == SSA_NAME
745 || (TREE_ADDRESSABLE (exp
) == 0
746 && (TREE_CODE (exp
) == PARM_DECL
747 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
)))))
750 return save_expr (exp
);
753 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
754 times to get the address of either a higher stack frame, or a return
755 address located within it (depending on FNDECL_CODE). */
758 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
762 #ifdef INITIAL_FRAME_ADDRESS_RTX
763 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
767 /* For a zero count with __builtin_return_address, we don't care what
768 frame address we return, because target-specific definitions will
769 override us. Therefore frame pointer elimination is OK, and using
770 the soft frame pointer is OK.
772 For a nonzero count, or a zero count with __builtin_frame_address,
773 we require a stable offset from the current frame pointer to the
774 previous one, so we must use the hard frame pointer, and
775 we must disable frame pointer elimination. */
776 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
777 tem
= frame_pointer_rtx
;
780 tem
= hard_frame_pointer_rtx
;
782 /* Tell reload not to eliminate the frame pointer. */
783 crtl
->accesses_prior_frames
= 1;
787 /* Some machines need special handling before we can access
788 arbitrary frames. For example, on the SPARC, we must first flush
789 all register windows to the stack. */
790 #ifdef SETUP_FRAME_ADDRESSES
792 SETUP_FRAME_ADDRESSES ();
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
802 /* Scan back COUNT frames to the specified frame. */
803 for (i
= 0; i
< count
; i
++)
805 /* Assume the dynamic chain pointer is in the word that the
806 frame address points to, unless otherwise specified. */
807 #ifdef DYNAMIC_CHAIN_ADDRESS
808 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
810 tem
= memory_address (Pmode
, tem
);
811 tem
= gen_frame_mem (Pmode
, tem
);
812 tem
= copy_to_reg (tem
);
815 /* For __builtin_frame_address, return what we've got. But, on
816 the SPARC for example, we may have to add a bias. */
817 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
818 #ifdef FRAME_ADDR_RTX
819 return FRAME_ADDR_RTX (tem
);
824 /* For __builtin_return_address, get the return address from that frame. */
825 #ifdef RETURN_ADDR_RTX
826 tem
= RETURN_ADDR_RTX (count
, tem
);
828 tem
= memory_address (Pmode
,
829 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
830 tem
= gen_frame_mem (Pmode
, tem
);
835 /* Alias set used for setjmp buffer. */
836 static alias_set_type setjmp_alias_set
= -1;
838 /* Construct the leading half of a __builtin_setjmp call. Control will
839 return to RECEIVER_LABEL. This is also called directly by the SJLJ
840 exception handling code. */
843 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
845 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
849 if (setjmp_alias_set
== -1)
850 setjmp_alias_set
= new_alias_set ();
852 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
854 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
856 /* We store the frame pointer and the address of receiver_label in
857 the buffer and use the rest of it for the stack save area, which
858 is machine-dependent. */
860 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
861 set_mem_alias_set (mem
, setjmp_alias_set
);
862 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
864 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
865 GET_MODE_SIZE (Pmode
))),
866 set_mem_alias_set (mem
, setjmp_alias_set
);
868 emit_move_insn (validize_mem (mem
),
869 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
871 stack_save
= gen_rtx_MEM (sa_mode
,
872 plus_constant (Pmode
, buf_addr
,
873 2 * GET_MODE_SIZE (Pmode
)));
874 set_mem_alias_set (stack_save
, setjmp_alias_set
);
875 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
877 /* If there is further processing to do, do it. */
878 #ifdef HAVE_builtin_setjmp_setup
879 if (HAVE_builtin_setjmp_setup
)
880 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
883 /* We have a nonlocal label. */
884 cfun
->has_nonlocal_label
= 1;
887 /* Construct the trailing part of a __builtin_setjmp call. This is
888 also called directly by the SJLJ exception handling code.
889 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
892 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
896 /* Mark the FP as used when we get here, so we have to make sure it's
897 marked as used by this function. */
898 emit_use (hard_frame_pointer_rtx
);
900 /* Mark the static chain as clobbered here so life information
901 doesn't get messed up for it. */
902 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
903 if (chain
&& REG_P (chain
))
904 emit_clobber (chain
);
906 /* Now put in the code to restore the frame pointer, and argument
907 pointer, if needed. */
908 #ifdef HAVE_nonlocal_goto
909 if (! HAVE_nonlocal_goto
)
912 /* First adjust our frame pointer to its actual value. It was
913 previously set to the start of the virtual area corresponding to
914 the stacked variables when we branched here and now needs to be
915 adjusted to the actual hardware fp value.
917 Assignments to virtual registers are converted by
918 instantiate_virtual_regs into the corresponding assignment
919 to the underlying register (fp in this case) that makes
920 the original assignment true.
921 So the following insn will actually be decrementing fp by
922 STARTING_FRAME_OFFSET. */
923 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
925 /* Restoring the frame pointer also modifies the hard frame pointer.
926 Mark it used (so that the previous assignment remains live once
927 the frame pointer is eliminated) and clobbered (to represent the
928 implicit update from the assignment). */
929 emit_use (hard_frame_pointer_rtx
);
930 emit_clobber (hard_frame_pointer_rtx
);
933 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
934 if (fixed_regs
[ARG_POINTER_REGNUM
])
936 #ifdef ELIMINABLE_REGS
937 /* If the argument pointer can be eliminated in favor of the
938 frame pointer, we don't need to restore it. We assume here
939 that if such an elimination is present, it can always be used.
940 This is the case on all known machines; if we don't make this
941 assumption, we do unnecessary saving on many machines. */
943 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
945 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
946 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
947 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
950 if (i
== ARRAY_SIZE (elim_regs
))
953 /* Now restore our arg pointer from the address at which it
954 was saved in our stack frame. */
955 emit_move_insn (crtl
->args
.internal_arg_pointer
,
956 copy_to_reg (get_arg_pointer_save_area ()));
961 #ifdef HAVE_builtin_setjmp_receiver
962 if (receiver_label
!= NULL
&& HAVE_builtin_setjmp_receiver
)
963 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
966 #ifdef HAVE_nonlocal_goto_receiver
967 if (HAVE_nonlocal_goto_receiver
)
968 emit_insn (gen_nonlocal_goto_receiver ());
973 /* We must not allow the code we just generated to be reordered by
974 scheduling. Specifically, the update of the frame pointer must
975 happen immediately, not later. */
976 emit_insn (gen_blockage ());
979 /* __builtin_longjmp is passed a pointer to an array of five words (not
980 all will be used on all machines). It operates similarly to the C
981 library function of the same name, but is more efficient. Much of
982 the code below is copied from the handling of non-local gotos. */
985 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
988 rtx_insn
*insn
, *last
;
989 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
991 /* DRAP is needed for stack realign if longjmp is expanded to current
993 if (SUPPORTS_STACK_ALIGNMENT
)
994 crtl
->need_drap
= true;
996 if (setjmp_alias_set
== -1)
997 setjmp_alias_set
= new_alias_set ();
999 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1001 buf_addr
= force_reg (Pmode
, buf_addr
);
1003 /* We require that the user must pass a second argument of 1, because
1004 that is what builtin_setjmp will return. */
1005 gcc_assert (value
== const1_rtx
);
1007 last
= get_last_insn ();
1008 #ifdef HAVE_builtin_longjmp
1009 if (HAVE_builtin_longjmp
)
1010 emit_insn (gen_builtin_longjmp (buf_addr
));
1014 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
1015 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
1016 GET_MODE_SIZE (Pmode
)));
1018 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1019 2 * GET_MODE_SIZE (Pmode
)));
1020 set_mem_alias_set (fp
, setjmp_alias_set
);
1021 set_mem_alias_set (lab
, setjmp_alias_set
);
1022 set_mem_alias_set (stack
, setjmp_alias_set
);
1024 /* Pick up FP, label, and SP from the block and jump. This code is
1025 from expand_goto in stmt.c; see there for detailed comments. */
1026 #ifdef HAVE_nonlocal_goto
1027 if (HAVE_nonlocal_goto
)
1028 /* We have to pass a value to the nonlocal_goto pattern that will
1029 get copied into the static_chain pointer, but it does not matter
1030 what that value is, because builtin_setjmp does not use it. */
1031 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
1035 lab
= copy_to_reg (lab
);
1037 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1038 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1040 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1041 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1043 emit_use (hard_frame_pointer_rtx
);
1044 emit_use (stack_pointer_rtx
);
1045 emit_indirect_jump (lab
);
1049 /* Search backwards and mark the jump insn as a non-local goto.
1050 Note that this precludes the use of __builtin_longjmp to a
1051 __builtin_setjmp target in the same function. However, we've
1052 already cautioned the user that these functions are for
1053 internal exception handling use only. */
1054 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1056 gcc_assert (insn
!= last
);
1060 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1063 else if (CALL_P (insn
))
1069 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1071 return (iter
->i
< iter
->n
);
1074 /* This function validates the types of a function call argument list
1075 against a specified list of tree_codes. If the last specifier is a 0,
1076 that represents an ellipses, otherwise the last specifier must be a
1080 validate_arglist (const_tree callexpr
, ...)
1082 enum tree_code code
;
1085 const_call_expr_arg_iterator iter
;
1088 va_start (ap
, callexpr
);
1089 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1093 code
= (enum tree_code
) va_arg (ap
, int);
1097 /* This signifies an ellipses, any further arguments are all ok. */
1101 /* This signifies an endlink, if no arguments remain, return
1102 true, otherwise return false. */
1103 res
= !more_const_call_expr_args_p (&iter
);
1106 /* If no parameters remain or the parameter's code does not
1107 match the specified code, return false. Otherwise continue
1108 checking any remaining arguments. */
1109 arg
= next_const_call_expr_arg (&iter
);
1110 if (!validate_arg (arg
, code
))
1117 /* We need gotos here since we can only have one VA_CLOSE in a
1125 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1126 and the address of the save area. */
1129 expand_builtin_nonlocal_goto (tree exp
)
1131 tree t_label
, t_save_area
;
1132 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1135 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1138 t_label
= CALL_EXPR_ARG (exp
, 0);
1139 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1141 r_label
= expand_normal (t_label
);
1142 r_label
= convert_memory_address (Pmode
, r_label
);
1143 r_save_area
= expand_normal (t_save_area
);
1144 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1145 /* Copy the address of the save location to a register just in case it was
1146 based on the frame pointer. */
1147 r_save_area
= copy_to_reg (r_save_area
);
1148 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1149 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1150 plus_constant (Pmode
, r_save_area
,
1151 GET_MODE_SIZE (Pmode
)));
1153 crtl
->has_nonlocal_goto
= 1;
1155 #ifdef HAVE_nonlocal_goto
1156 /* ??? We no longer need to pass the static chain value, afaik. */
1157 if (HAVE_nonlocal_goto
)
1158 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1162 r_label
= copy_to_reg (r_label
);
1164 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1165 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1167 /* Restore frame pointer for containing function. */
1168 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1169 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1171 /* USE of hard_frame_pointer_rtx added for consistency;
1172 not clear if really needed. */
1173 emit_use (hard_frame_pointer_rtx
);
1174 emit_use (stack_pointer_rtx
);
1176 /* If the architecture is using a GP register, we must
1177 conservatively assume that the target function makes use of it.
1178 The prologue of functions with nonlocal gotos must therefore
1179 initialize the GP register to the appropriate value, and we
1180 must then make sure that this value is live at the point
1181 of the jump. (Note that this doesn't necessarily apply
1182 to targets with a nonlocal_goto pattern; they are free
1183 to implement it in their own way. Note also that this is
1184 a no-op if the GP register is a global invariant.) */
1185 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1186 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1187 emit_use (pic_offset_table_rtx
);
1189 emit_indirect_jump (r_label
);
1192 /* Search backwards to the jump insn and mark it as a
1194 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1198 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1201 else if (CALL_P (insn
))
1208 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1209 (not all will be used on all machines) that was passed to __builtin_setjmp.
1210 It updates the stack pointer in that block to the current value. This is
1211 also called directly by the SJLJ exception handling code. */
1214 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1216 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1218 = gen_rtx_MEM (sa_mode
,
1221 plus_constant (Pmode
, buf_addr
,
1222 2 * GET_MODE_SIZE (Pmode
))));
1224 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1227 /* Expand a call to __builtin_prefetch. For a target that does not support
1228 data prefetch, evaluate the memory address argument in case it has side
1232 expand_builtin_prefetch (tree exp
)
1234 tree arg0
, arg1
, arg2
;
1238 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1241 arg0
= CALL_EXPR_ARG (exp
, 0);
1243 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1244 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1246 nargs
= call_expr_nargs (exp
);
1248 arg1
= CALL_EXPR_ARG (exp
, 1);
1250 arg1
= integer_zero_node
;
1252 arg2
= CALL_EXPR_ARG (exp
, 2);
1254 arg2
= integer_three_node
;
1256 /* Argument 0 is an address. */
1257 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1259 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1260 if (TREE_CODE (arg1
) != INTEGER_CST
)
1262 error ("second argument to %<__builtin_prefetch%> must be a constant");
1263 arg1
= integer_zero_node
;
1265 op1
= expand_normal (arg1
);
1266 /* Argument 1 must be either zero or one. */
1267 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1269 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1274 /* Argument 2 (locality) must be a compile-time constant int. */
1275 if (TREE_CODE (arg2
) != INTEGER_CST
)
1277 error ("third argument to %<__builtin_prefetch%> must be a constant");
1278 arg2
= integer_zero_node
;
1280 op2
= expand_normal (arg2
);
1281 /* Argument 2 must be 0, 1, 2, or 3. */
1282 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1284 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1288 #ifdef HAVE_prefetch
1291 struct expand_operand ops
[3];
1293 create_address_operand (&ops
[0], op0
);
1294 create_integer_operand (&ops
[1], INTVAL (op1
));
1295 create_integer_operand (&ops
[2], INTVAL (op2
));
1296 if (maybe_expand_insn (CODE_FOR_prefetch
, 3, ops
))
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0
) && side_effects_p (op0
))
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1313 get_memory_rtx (tree exp
, tree len
)
1315 tree orig_exp
= exp
;
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1321 exp
= TREE_OPERAND (exp
, 0);
1323 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1324 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp
)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1330 exp
= TREE_OPERAND (exp
, 0);
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp
= fold_build2 (MEM_REF
,
1335 build_array_type (char_type_node
,
1336 build_range_type (sizetype
,
1337 size_one_node
, len
)),
1338 exp
, build_int_cst (ptr_type_node
, 0));
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1344 set_mem_attributes (mem
, exp
, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1346 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1349 exp
= build_fold_addr_expr (exp
);
1350 exp
= fold_build2 (MEM_REF
,
1351 build_array_type (char_type_node
,
1352 build_range_type (sizetype
,
1355 exp
, build_int_cst (ptr_type_node
, 0));
1356 set_mem_attributes (mem
, exp
, 0);
1358 set_mem_alias_set (mem
, 0);
1362 /* Built-in functions to perform an untyped call and return. */
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1373 apply_args_size (void)
1375 static int size
= -1;
1380 /* The values computed by this function never change. */
1383 /* The first value is the incoming arg-pointer. */
1384 size
= GET_MODE_SIZE (Pmode
);
1386 /* The second value is the structure value address unless this is
1387 passed as an "invisible" first argument. */
1388 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1389 size
+= GET_MODE_SIZE (Pmode
);
1391 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1392 if (FUNCTION_ARG_REGNO_P (regno
))
1394 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1396 gcc_assert (mode
!= VOIDmode
);
1398 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1399 if (size
% align
!= 0)
1400 size
= CEIL (size
, align
) * align
;
1401 size
+= GET_MODE_SIZE (mode
);
1402 apply_args_mode
[regno
] = mode
;
1406 apply_args_mode
[regno
] = VOIDmode
;
1412 /* Return the size required for the block returned by __builtin_apply,
1413 and initialize apply_result_mode. */
1416 apply_result_size (void)
1418 static int size
= -1;
1422 /* The values computed by this function never change. */
1427 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1428 if (targetm
.calls
.function_value_regno_p (regno
))
1430 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1432 gcc_assert (mode
!= VOIDmode
);
1434 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1435 if (size
% align
!= 0)
1436 size
= CEIL (size
, align
) * align
;
1437 size
+= GET_MODE_SIZE (mode
);
1438 apply_result_mode
[regno
] = mode
;
1441 apply_result_mode
[regno
] = VOIDmode
;
1443 /* Allow targets that use untyped_call and untyped_return to override
1444 the size so that machine-specific information can be stored here. */
1445 #ifdef APPLY_RESULT_SIZE
1446 size
= APPLY_RESULT_SIZE
;
1452 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1453 /* Create a vector describing the result block RESULT. If SAVEP is true,
1454 the result block is used to save the values; otherwise it is used to
1455 restore the values. */
1458 result_vector (int savep
, rtx result
)
1460 int regno
, size
, align
, nelts
;
1463 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1466 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1467 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1469 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1470 if (size
% align
!= 0)
1471 size
= CEIL (size
, align
) * align
;
1472 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1473 mem
= adjust_address (result
, mode
, size
);
1474 savevec
[nelts
++] = (savep
1475 ? gen_rtx_SET (mem
, reg
)
1476 : gen_rtx_SET (reg
, mem
));
1477 size
+= GET_MODE_SIZE (mode
);
1479 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1481 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1483 /* Save the state required to perform an untyped call with the same
1484 arguments as were passed to the current function. */
1487 expand_builtin_apply_args_1 (void)
1490 int size
, align
, regno
;
1492 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1494 /* Create a block where the arg-pointer, structure value address,
1495 and argument registers can be saved. */
1496 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1498 /* Walk past the arg-pointer and structure value address. */
1499 size
= GET_MODE_SIZE (Pmode
);
1500 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1501 size
+= GET_MODE_SIZE (Pmode
);
1503 /* Save each register used in calling a function to the block. */
1504 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1505 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1507 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1508 if (size
% align
!= 0)
1509 size
= CEIL (size
, align
) * align
;
1511 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1513 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1514 size
+= GET_MODE_SIZE (mode
);
1517 /* Save the arg pointer to the block. */
1518 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1519 /* We need the pointer as the caller actually passed them to us, not
1520 as we might have pretended they were passed. Make sure it's a valid
1521 operand, as emit_move_insn isn't expected to handle a PLUS. */
1522 if (STACK_GROWS_DOWNWARD
)
1524 = force_operand (plus_constant (Pmode
, tem
,
1525 crtl
->args
.pretend_args_size
),
1527 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1529 size
= GET_MODE_SIZE (Pmode
);
1531 /* Save the structure value address unless this is passed as an
1532 "invisible" first argument. */
1533 if (struct_incoming_value
)
1535 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1536 copy_to_reg (struct_incoming_value
));
1537 size
+= GET_MODE_SIZE (Pmode
);
1540 /* Return the address of the block. */
1541 return copy_addr_to_reg (XEXP (registers
, 0));
1544 /* __builtin_apply_args returns block of memory allocated on
1545 the stack into which is stored the arg pointer, structure
1546 value address, static chain, and all the registers that might
1547 possibly be used in performing a function call. The code is
1548 moved to the start of the function so the incoming values are
1552 expand_builtin_apply_args (void)
1554 /* Don't do __builtin_apply_args more than once in a function.
1555 Save the result of the first call and reuse it. */
1556 if (apply_args_value
!= 0)
1557 return apply_args_value
;
1559 /* When this function is called, it means that registers must be
1560 saved on entry to this function. So we migrate the
1561 call to the first insn of this function. */
1566 temp
= expand_builtin_apply_args_1 ();
1570 apply_args_value
= temp
;
1572 /* Put the insns after the NOTE that starts the function.
1573 If this is inside a start_sequence, make the outer-level insn
1574 chain current, so the code is placed at the start of the
1575 function. If internal_arg_pointer is a non-virtual pseudo,
1576 it needs to be placed after the function that initializes
1578 push_topmost_sequence ();
1579 if (REG_P (crtl
->args
.internal_arg_pointer
)
1580 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1581 emit_insn_before (seq
, parm_birth_insn
);
1583 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1584 pop_topmost_sequence ();
1589 /* Perform an untyped call and save the state required to perform an
1590 untyped return of whatever value was returned by the given function. */
1593 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1595 int size
, align
, regno
;
1597 rtx incoming_args
, result
, reg
, dest
, src
;
1598 rtx_call_insn
*call_insn
;
1599 rtx old_stack_level
= 0;
1600 rtx call_fusage
= 0;
1601 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1603 arguments
= convert_memory_address (Pmode
, arguments
);
1605 /* Create a block where the return registers can be saved. */
1606 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1608 /* Fetch the arg pointer from the ARGUMENTS block. */
1609 incoming_args
= gen_reg_rtx (Pmode
);
1610 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1611 if (!STACK_GROWS_DOWNWARD
)
1612 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1613 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1615 /* Push a new argument block and copy the arguments. Do not allow
1616 the (potential) memcpy call below to interfere with our stack
1618 do_pending_stack_adjust ();
1621 /* Save the stack with nonlocal if available. */
1622 #ifdef HAVE_save_stack_nonlocal
1623 if (HAVE_save_stack_nonlocal
)
1624 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1627 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1629 /* Allocate a block of memory onto the stack and copy the memory
1630 arguments to the outgoing arguments address. We can pass TRUE
1631 as the 4th argument because we just saved the stack pointer
1632 and will restore it right after the call. */
1633 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1635 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1636 may have already set current_function_calls_alloca to true.
1637 current_function_calls_alloca won't be set if argsize is zero,
1638 so we have to guarantee need_drap is true here. */
1639 if (SUPPORTS_STACK_ALIGNMENT
)
1640 crtl
->need_drap
= true;
1642 dest
= virtual_outgoing_args_rtx
;
1643 if (!STACK_GROWS_DOWNWARD
)
1645 if (CONST_INT_P (argsize
))
1646 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1648 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1650 dest
= gen_rtx_MEM (BLKmode
, dest
);
1651 set_mem_align (dest
, PARM_BOUNDARY
);
1652 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1653 set_mem_align (src
, PARM_BOUNDARY
);
1654 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1656 /* Refer to the argument block. */
1658 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1659 set_mem_align (arguments
, PARM_BOUNDARY
);
1661 /* Walk past the arg-pointer and structure value address. */
1662 size
= GET_MODE_SIZE (Pmode
);
1664 size
+= GET_MODE_SIZE (Pmode
);
1666 /* Restore each of the registers previously saved. Make USE insns
1667 for each of these registers for use in making the call. */
1668 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1669 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1671 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1672 if (size
% align
!= 0)
1673 size
= CEIL (size
, align
) * align
;
1674 reg
= gen_rtx_REG (mode
, regno
);
1675 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1676 use_reg (&call_fusage
, reg
);
1677 size
+= GET_MODE_SIZE (mode
);
1680 /* Restore the structure value address unless this is passed as an
1681 "invisible" first argument. */
1682 size
= GET_MODE_SIZE (Pmode
);
1685 rtx value
= gen_reg_rtx (Pmode
);
1686 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1687 emit_move_insn (struct_value
, value
);
1688 if (REG_P (struct_value
))
1689 use_reg (&call_fusage
, struct_value
);
1690 size
+= GET_MODE_SIZE (Pmode
);
1693 /* All arguments and registers used for the call are set up by now! */
1694 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1696 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1697 and we don't want to load it into a register as an optimization,
1698 because prepare_call_address already did it if it should be done. */
1699 if (GET_CODE (function
) != SYMBOL_REF
)
1700 function
= memory_address (FUNCTION_MODE
, function
);
1702 /* Generate the actual call instruction and save the return value. */
1703 #ifdef HAVE_untyped_call
1704 if (HAVE_untyped_call
)
1705 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1706 result
, result_vector (1, result
)));
1709 #ifdef HAVE_call_value
1710 if (HAVE_call_value
)
1714 /* Locate the unique return register. It is not possible to
1715 express a call that sets more than one return register using
1716 call_value; use untyped_call for that. In fact, untyped_call
1717 only needs to save the return registers in the given block. */
1718 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1719 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1721 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1723 valreg
= gen_rtx_REG (mode
, regno
);
1726 emit_call_insn (GEN_CALL_VALUE (valreg
,
1727 gen_rtx_MEM (FUNCTION_MODE
, function
),
1728 const0_rtx
, NULL_RTX
, const0_rtx
));
1730 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1736 /* Find the CALL insn we just emitted, and attach the register usage
1738 call_insn
= last_call_insn ();
1739 add_function_usage_to (call_insn
, call_fusage
);
1741 /* Restore the stack. */
1742 #ifdef HAVE_save_stack_nonlocal
1743 if (HAVE_save_stack_nonlocal
)
1744 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1747 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1748 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1752 /* Return the address of the result block. */
1753 result
= copy_addr_to_reg (XEXP (result
, 0));
1754 return convert_memory_address (ptr_mode
, result
);
1757 /* Perform an untyped return. */
1760 expand_builtin_return (rtx result
)
1762 int size
, align
, regno
;
1765 rtx_insn
*call_fusage
= 0;
1767 result
= convert_memory_address (Pmode
, result
);
1769 apply_result_size ();
1770 result
= gen_rtx_MEM (BLKmode
, result
);
1772 #ifdef HAVE_untyped_return
1773 if (HAVE_untyped_return
)
1775 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1781 /* Restore the return value and note that each value is used. */
1783 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1784 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1786 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1787 if (size
% align
!= 0)
1788 size
= CEIL (size
, align
) * align
;
1789 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1790 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1792 push_to_sequence (call_fusage
);
1794 call_fusage
= get_insns ();
1796 size
+= GET_MODE_SIZE (mode
);
1799 /* Put the USE insns before the return. */
1800 emit_insn (call_fusage
);
1802 /* Return whatever values was restored by jumping directly to the end
1804 expand_naked_return ();
1807 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1809 static enum type_class
1810 type_to_class (tree type
)
1812 switch (TREE_CODE (type
))
1814 case VOID_TYPE
: return void_type_class
;
1815 case INTEGER_TYPE
: return integer_type_class
;
1816 case ENUMERAL_TYPE
: return enumeral_type_class
;
1817 case BOOLEAN_TYPE
: return boolean_type_class
;
1818 case POINTER_TYPE
: return pointer_type_class
;
1819 case REFERENCE_TYPE
: return reference_type_class
;
1820 case OFFSET_TYPE
: return offset_type_class
;
1821 case REAL_TYPE
: return real_type_class
;
1822 case COMPLEX_TYPE
: return complex_type_class
;
1823 case FUNCTION_TYPE
: return function_type_class
;
1824 case METHOD_TYPE
: return method_type_class
;
1825 case RECORD_TYPE
: return record_type_class
;
1827 case QUAL_UNION_TYPE
: return union_type_class
;
1828 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1829 ? string_type_class
: array_type_class
);
1830 case LANG_TYPE
: return lang_type_class
;
1831 default: return no_type_class
;
1835 /* Expand a call EXP to __builtin_classify_type. */
1838 expand_builtin_classify_type (tree exp
)
1840 if (call_expr_nargs (exp
))
1841 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1842 return GEN_INT (no_type_class
);
1845 /* This helper macro, meant to be used in mathfn_built_in below,
1846 determines which among a set of three builtin math functions is
1847 appropriate for a given type mode. The `F' and `L' cases are
1848 automatically generated from the `double' case. */
1849 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1850 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1851 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1852 fcodel = BUILT_IN_MATHFN##L ; break;
1853 /* Similar to above, but appends _R after any F/L suffix. */
1854 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1855 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1856 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1857 fcodel = BUILT_IN_MATHFN##L_R ; break;
1859 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1860 if available. If IMPLICIT is true use the implicit builtin declaration,
1861 otherwise use the explicit declaration. If we can't do the conversion,
1865 mathfn_built_in_1 (tree type
, enum built_in_function fn
, bool implicit_p
)
1867 enum built_in_function fcode
, fcodef
, fcodel
, fcode2
;
1871 CASE_MATHFN (BUILT_IN_ACOS
)
1872 CASE_MATHFN (BUILT_IN_ACOSH
)
1873 CASE_MATHFN (BUILT_IN_ASIN
)
1874 CASE_MATHFN (BUILT_IN_ASINH
)
1875 CASE_MATHFN (BUILT_IN_ATAN
)
1876 CASE_MATHFN (BUILT_IN_ATAN2
)
1877 CASE_MATHFN (BUILT_IN_ATANH
)
1878 CASE_MATHFN (BUILT_IN_CBRT
)
1879 CASE_MATHFN (BUILT_IN_CEIL
)
1880 CASE_MATHFN (BUILT_IN_CEXPI
)
1881 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1882 CASE_MATHFN (BUILT_IN_COS
)
1883 CASE_MATHFN (BUILT_IN_COSH
)
1884 CASE_MATHFN (BUILT_IN_DREM
)
1885 CASE_MATHFN (BUILT_IN_ERF
)
1886 CASE_MATHFN (BUILT_IN_ERFC
)
1887 CASE_MATHFN (BUILT_IN_EXP
)
1888 CASE_MATHFN (BUILT_IN_EXP10
)
1889 CASE_MATHFN (BUILT_IN_EXP2
)
1890 CASE_MATHFN (BUILT_IN_EXPM1
)
1891 CASE_MATHFN (BUILT_IN_FABS
)
1892 CASE_MATHFN (BUILT_IN_FDIM
)
1893 CASE_MATHFN (BUILT_IN_FLOOR
)
1894 CASE_MATHFN (BUILT_IN_FMA
)
1895 CASE_MATHFN (BUILT_IN_FMAX
)
1896 CASE_MATHFN (BUILT_IN_FMIN
)
1897 CASE_MATHFN (BUILT_IN_FMOD
)
1898 CASE_MATHFN (BUILT_IN_FREXP
)
1899 CASE_MATHFN (BUILT_IN_GAMMA
)
1900 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1901 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1902 CASE_MATHFN (BUILT_IN_HYPOT
)
1903 CASE_MATHFN (BUILT_IN_ILOGB
)
1904 CASE_MATHFN (BUILT_IN_ICEIL
)
1905 CASE_MATHFN (BUILT_IN_IFLOOR
)
1906 CASE_MATHFN (BUILT_IN_INF
)
1907 CASE_MATHFN (BUILT_IN_IRINT
)
1908 CASE_MATHFN (BUILT_IN_IROUND
)
1909 CASE_MATHFN (BUILT_IN_ISINF
)
1910 CASE_MATHFN (BUILT_IN_J0
)
1911 CASE_MATHFN (BUILT_IN_J1
)
1912 CASE_MATHFN (BUILT_IN_JN
)
1913 CASE_MATHFN (BUILT_IN_LCEIL
)
1914 CASE_MATHFN (BUILT_IN_LDEXP
)
1915 CASE_MATHFN (BUILT_IN_LFLOOR
)
1916 CASE_MATHFN (BUILT_IN_LGAMMA
)
1917 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1918 CASE_MATHFN (BUILT_IN_LLCEIL
)
1919 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1920 CASE_MATHFN (BUILT_IN_LLRINT
)
1921 CASE_MATHFN (BUILT_IN_LLROUND
)
1922 CASE_MATHFN (BUILT_IN_LOG
)
1923 CASE_MATHFN (BUILT_IN_LOG10
)
1924 CASE_MATHFN (BUILT_IN_LOG1P
)
1925 CASE_MATHFN (BUILT_IN_LOG2
)
1926 CASE_MATHFN (BUILT_IN_LOGB
)
1927 CASE_MATHFN (BUILT_IN_LRINT
)
1928 CASE_MATHFN (BUILT_IN_LROUND
)
1929 CASE_MATHFN (BUILT_IN_MODF
)
1930 CASE_MATHFN (BUILT_IN_NAN
)
1931 CASE_MATHFN (BUILT_IN_NANS
)
1932 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1933 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1934 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1935 CASE_MATHFN (BUILT_IN_POW
)
1936 CASE_MATHFN (BUILT_IN_POWI
)
1937 CASE_MATHFN (BUILT_IN_POW10
)
1938 CASE_MATHFN (BUILT_IN_REMAINDER
)
1939 CASE_MATHFN (BUILT_IN_REMQUO
)
1940 CASE_MATHFN (BUILT_IN_RINT
)
1941 CASE_MATHFN (BUILT_IN_ROUND
)
1942 CASE_MATHFN (BUILT_IN_SCALB
)
1943 CASE_MATHFN (BUILT_IN_SCALBLN
)
1944 CASE_MATHFN (BUILT_IN_SCALBN
)
1945 CASE_MATHFN (BUILT_IN_SIGNBIT
)
1946 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1947 CASE_MATHFN (BUILT_IN_SIN
)
1948 CASE_MATHFN (BUILT_IN_SINCOS
)
1949 CASE_MATHFN (BUILT_IN_SINH
)
1950 CASE_MATHFN (BUILT_IN_SQRT
)
1951 CASE_MATHFN (BUILT_IN_TAN
)
1952 CASE_MATHFN (BUILT_IN_TANH
)
1953 CASE_MATHFN (BUILT_IN_TGAMMA
)
1954 CASE_MATHFN (BUILT_IN_TRUNC
)
1955 CASE_MATHFN (BUILT_IN_Y0
)
1956 CASE_MATHFN (BUILT_IN_Y1
)
1957 CASE_MATHFN (BUILT_IN_YN
)
1963 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1965 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1967 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1972 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1975 return builtin_decl_explicit (fcode2
);
1978 /* Like mathfn_built_in_1(), but always use the implicit array. */
1981 mathfn_built_in (tree type
, enum built_in_function fn
)
1983 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1986 /* If errno must be maintained, expand the RTL to check if the result,
1987 TARGET, of a built-in function call, EXP, is NaN, and if so set
1991 expand_errno_check (tree exp
, rtx target
)
1993 rtx_code_label
*lab
= gen_label_rtx ();
1995 /* Test the result; if it is NaN, set errno=EDOM because
1996 the argument was not in the domain. */
1997 do_compare_rtx_and_jump (target
, target
, EQ
, 0, GET_MODE (target
),
1998 NULL_RTX
, NULL
, lab
,
1999 /* The jump is very likely. */
2000 REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1));
2003 /* If this built-in doesn't throw an exception, set errno directly. */
2004 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
2006 #ifdef GEN_ERRNO_RTX
2007 rtx errno_rtx
= GEN_ERRNO_RTX
;
2010 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
2012 emit_move_insn (errno_rtx
,
2013 gen_int_mode (TARGET_EDOM
, GET_MODE (errno_rtx
)));
2019 /* Make sure the library call isn't expanded as a tail call. */
2020 CALL_EXPR_TAILCALL (exp
) = 0;
2022 /* We can't set errno=EDOM directly; let the library call do it.
2023 Pop the arguments right away in case the call gets deleted. */
2025 expand_call (exp
, target
, 0);
2030 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2031 Return NULL_RTX if a normal call should be emitted rather than expanding
2032 the function in-line. EXP is the expression that is a call to the builtin
2033 function; if convenient, the result should be placed in TARGET.
2034 SUBTARGET may be used as the target for computing one of EXP's operands. */
2037 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
2039 optab builtin_optab
;
2042 tree fndecl
= get_callee_fndecl (exp
);
2044 bool errno_set
= false;
2045 bool try_widening
= false;
2048 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2051 arg
= CALL_EXPR_ARG (exp
, 0);
2053 switch (DECL_FUNCTION_CODE (fndecl
))
2055 CASE_FLT_FN (BUILT_IN_SQRT
):
2056 errno_set
= ! tree_expr_nonnegative_p (arg
);
2057 try_widening
= true;
2058 builtin_optab
= sqrt_optab
;
2060 CASE_FLT_FN (BUILT_IN_EXP
):
2061 errno_set
= true; builtin_optab
= exp_optab
; break;
2062 CASE_FLT_FN (BUILT_IN_EXP10
):
2063 CASE_FLT_FN (BUILT_IN_POW10
):
2064 errno_set
= true; builtin_optab
= exp10_optab
; break;
2065 CASE_FLT_FN (BUILT_IN_EXP2
):
2066 errno_set
= true; builtin_optab
= exp2_optab
; break;
2067 CASE_FLT_FN (BUILT_IN_EXPM1
):
2068 errno_set
= true; builtin_optab
= expm1_optab
; break;
2069 CASE_FLT_FN (BUILT_IN_LOGB
):
2070 errno_set
= true; builtin_optab
= logb_optab
; break;
2071 CASE_FLT_FN (BUILT_IN_LOG
):
2072 errno_set
= true; builtin_optab
= log_optab
; break;
2073 CASE_FLT_FN (BUILT_IN_LOG10
):
2074 errno_set
= true; builtin_optab
= log10_optab
; break;
2075 CASE_FLT_FN (BUILT_IN_LOG2
):
2076 errno_set
= true; builtin_optab
= log2_optab
; break;
2077 CASE_FLT_FN (BUILT_IN_LOG1P
):
2078 errno_set
= true; builtin_optab
= log1p_optab
; break;
2079 CASE_FLT_FN (BUILT_IN_ASIN
):
2080 builtin_optab
= asin_optab
; break;
2081 CASE_FLT_FN (BUILT_IN_ACOS
):
2082 builtin_optab
= acos_optab
; break;
2083 CASE_FLT_FN (BUILT_IN_TAN
):
2084 builtin_optab
= tan_optab
; break;
2085 CASE_FLT_FN (BUILT_IN_ATAN
):
2086 builtin_optab
= atan_optab
; break;
2087 CASE_FLT_FN (BUILT_IN_FLOOR
):
2088 builtin_optab
= floor_optab
; break;
2089 CASE_FLT_FN (BUILT_IN_CEIL
):
2090 builtin_optab
= ceil_optab
; break;
2091 CASE_FLT_FN (BUILT_IN_TRUNC
):
2092 builtin_optab
= btrunc_optab
; break;
2093 CASE_FLT_FN (BUILT_IN_ROUND
):
2094 builtin_optab
= round_optab
; break;
2095 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
2096 builtin_optab
= nearbyint_optab
;
2097 if (flag_trapping_math
)
2099 /* Else fallthrough and expand as rint. */
2100 CASE_FLT_FN (BUILT_IN_RINT
):
2101 builtin_optab
= rint_optab
; break;
2102 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
2103 builtin_optab
= significand_optab
; break;
2108 /* Make a suitable register to place result in. */
2109 mode
= TYPE_MODE (TREE_TYPE (exp
));
2111 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2114 /* Before working hard, check whether the instruction is available, but try
2115 to widen the mode for specific operations. */
2116 if ((optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
2117 || (try_widening
&& !excess_precision_type (TREE_TYPE (exp
))))
2118 && (!errno_set
|| !optimize_insn_for_size_p ()))
2120 rtx result
= gen_reg_rtx (mode
);
2122 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2123 need to expand the argument again. This way, we will not perform
2124 side-effects more the once. */
2125 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2127 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2131 /* Compute into RESULT.
2132 Set RESULT to wherever the result comes back. */
2133 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2138 expand_errno_check (exp
, result
);
2140 /* Output the entire sequence. */
2141 insns
= get_insns ();
2147 /* If we were unable to expand via the builtin, stop the sequence
2148 (without outputting the insns) and call to the library function
2149 with the stabilized argument list. */
2153 return expand_call (exp
, target
, target
== const0_rtx
);
2156 /* Expand a call to the builtin binary math functions (pow and atan2).
2157 Return NULL_RTX if a normal call should be emitted rather than expanding the
2158 function in-line. EXP is the expression that is a call to the builtin
2159 function; if convenient, the result should be placed in TARGET.
2160 SUBTARGET may be used as the target for computing one of EXP's
2164 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
2166 optab builtin_optab
;
2167 rtx op0
, op1
, result
;
2169 int op1_type
= REAL_TYPE
;
2170 tree fndecl
= get_callee_fndecl (exp
);
2173 bool errno_set
= true;
2175 switch (DECL_FUNCTION_CODE (fndecl
))
2177 CASE_FLT_FN (BUILT_IN_SCALBN
):
2178 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2179 CASE_FLT_FN (BUILT_IN_LDEXP
):
2180 op1_type
= INTEGER_TYPE
;
2185 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2188 arg0
= CALL_EXPR_ARG (exp
, 0);
2189 arg1
= CALL_EXPR_ARG (exp
, 1);
2191 switch (DECL_FUNCTION_CODE (fndecl
))
2193 CASE_FLT_FN (BUILT_IN_POW
):
2194 builtin_optab
= pow_optab
; break;
2195 CASE_FLT_FN (BUILT_IN_ATAN2
):
2196 builtin_optab
= atan2_optab
; break;
2197 CASE_FLT_FN (BUILT_IN_SCALB
):
2198 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2200 builtin_optab
= scalb_optab
; break;
2201 CASE_FLT_FN (BUILT_IN_SCALBN
):
2202 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2203 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2205 /* Fall through... */
2206 CASE_FLT_FN (BUILT_IN_LDEXP
):
2207 builtin_optab
= ldexp_optab
; break;
2208 CASE_FLT_FN (BUILT_IN_FMOD
):
2209 builtin_optab
= fmod_optab
; break;
2210 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2211 CASE_FLT_FN (BUILT_IN_DREM
):
2212 builtin_optab
= remainder_optab
; break;
2217 /* Make a suitable register to place result in. */
2218 mode
= TYPE_MODE (TREE_TYPE (exp
));
2220 /* Before working hard, check whether the instruction is available. */
2221 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2224 result
= gen_reg_rtx (mode
);
2226 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2229 if (errno_set
&& optimize_insn_for_size_p ())
2232 /* Always stabilize the argument list. */
2233 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2234 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2236 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2237 op1
= expand_normal (arg1
);
2241 /* Compute into RESULT.
2242 Set RESULT to wherever the result comes back. */
2243 result
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2244 result
, 0, OPTAB_DIRECT
);
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2252 return expand_call (exp
, target
, target
== const0_rtx
);
2256 expand_errno_check (exp
, result
);
2258 /* Output the entire sequence. */
2259 insns
= get_insns ();
2266 /* Expand a call to the builtin trinary math functions (fma).
2267 Return NULL_RTX if a normal call should be emitted rather than expanding the
2268 function in-line. EXP is the expression that is a call to the builtin
2269 function; if convenient, the result should be placed in TARGET.
2270 SUBTARGET may be used as the target for computing one of EXP's
2274 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2276 optab builtin_optab
;
2277 rtx op0
, op1
, op2
, result
;
2279 tree fndecl
= get_callee_fndecl (exp
);
2280 tree arg0
, arg1
, arg2
;
2283 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2286 arg0
= CALL_EXPR_ARG (exp
, 0);
2287 arg1
= CALL_EXPR_ARG (exp
, 1);
2288 arg2
= CALL_EXPR_ARG (exp
, 2);
2290 switch (DECL_FUNCTION_CODE (fndecl
))
2292 CASE_FLT_FN (BUILT_IN_FMA
):
2293 builtin_optab
= fma_optab
; break;
2298 /* Make a suitable register to place result in. */
2299 mode
= TYPE_MODE (TREE_TYPE (exp
));
2301 /* Before working hard, check whether the instruction is available. */
2302 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2305 result
= gen_reg_rtx (mode
);
2307 /* Always stabilize the argument list. */
2308 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2309 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2310 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2312 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2313 op1
= expand_normal (arg1
);
2314 op2
= expand_normal (arg2
);
2318 /* Compute into RESULT.
2319 Set RESULT to wherever the result comes back. */
2320 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2323 /* If we were unable to expand via the builtin, stop the sequence
2324 (without outputting the insns) and call to the library function
2325 with the stabilized argument list. */
2329 return expand_call (exp
, target
, target
== const0_rtx
);
2332 /* Output the entire sequence. */
2333 insns
= get_insns ();
2340 /* Expand a call to the builtin sin and cos math functions.
2341 Return NULL_RTX if a normal call should be emitted rather than expanding the
2342 function in-line. EXP is the expression that is a call to the builtin
2343 function; if convenient, the result should be placed in TARGET.
2344 SUBTARGET may be used as the target for computing one of EXP's
2348 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2350 optab builtin_optab
;
2353 tree fndecl
= get_callee_fndecl (exp
);
2357 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2360 arg
= CALL_EXPR_ARG (exp
, 0);
2362 switch (DECL_FUNCTION_CODE (fndecl
))
2364 CASE_FLT_FN (BUILT_IN_SIN
):
2365 CASE_FLT_FN (BUILT_IN_COS
):
2366 builtin_optab
= sincos_optab
; break;
2371 /* Make a suitable register to place result in. */
2372 mode
= TYPE_MODE (TREE_TYPE (exp
));
2374 /* Check if sincos insn is available, otherwise fallback
2375 to sin or cos insn. */
2376 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2377 switch (DECL_FUNCTION_CODE (fndecl
))
2379 CASE_FLT_FN (BUILT_IN_SIN
):
2380 builtin_optab
= sin_optab
; break;
2381 CASE_FLT_FN (BUILT_IN_COS
):
2382 builtin_optab
= cos_optab
; break;
2387 /* Before working hard, check whether the instruction is available. */
2388 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2390 rtx result
= gen_reg_rtx (mode
);
2392 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2393 need to expand the argument again. This way, we will not perform
2394 side-effects more the once. */
2395 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2397 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2401 /* Compute into RESULT.
2402 Set RESULT to wherever the result comes back. */
2403 if (builtin_optab
== sincos_optab
)
2407 switch (DECL_FUNCTION_CODE (fndecl
))
2409 CASE_FLT_FN (BUILT_IN_SIN
):
2410 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2412 CASE_FLT_FN (BUILT_IN_COS
):
2413 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2421 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2425 /* Output the entire sequence. */
2426 insns
= get_insns ();
2432 /* If we were unable to expand via the builtin, stop the sequence
2433 (without outputting the insns) and call to the library function
2434 with the stabilized argument list. */
2438 return expand_call (exp
, target
, target
== const0_rtx
);
2441 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2442 return an RTL instruction code that implements the functionality.
2443 If that isn't possible or available return CODE_FOR_nothing. */
2445 static enum insn_code
2446 interclass_mathfn_icode (tree arg
, tree fndecl
)
2448 bool errno_set
= false;
2449 optab builtin_optab
= unknown_optab
;
2452 switch (DECL_FUNCTION_CODE (fndecl
))
2454 CASE_FLT_FN (BUILT_IN_ILOGB
):
2455 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2456 CASE_FLT_FN (BUILT_IN_ISINF
):
2457 builtin_optab
= isinf_optab
; break;
2458 case BUILT_IN_ISNORMAL
:
2459 case BUILT_IN_ISFINITE
:
2460 CASE_FLT_FN (BUILT_IN_FINITE
):
2461 case BUILT_IN_FINITED32
:
2462 case BUILT_IN_FINITED64
:
2463 case BUILT_IN_FINITED128
:
2464 case BUILT_IN_ISINFD32
:
2465 case BUILT_IN_ISINFD64
:
2466 case BUILT_IN_ISINFD128
:
2467 /* These builtins have no optabs (yet). */
2473 /* There's no easy way to detect the case we need to set EDOM. */
2474 if (flag_errno_math
&& errno_set
)
2475 return CODE_FOR_nothing
;
2477 /* Optab mode depends on the mode of the input argument. */
2478 mode
= TYPE_MODE (TREE_TYPE (arg
));
2481 return optab_handler (builtin_optab
, mode
);
2482 return CODE_FOR_nothing
;
2485 /* Expand a call to one of the builtin math functions that operate on
2486 floating point argument and output an integer result (ilogb, isinf,
2488 Return 0 if a normal call should be emitted rather than expanding the
2489 function in-line. EXP is the expression that is a call to the builtin
2490 function; if convenient, the result should be placed in TARGET. */
2493 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2495 enum insn_code icode
= CODE_FOR_nothing
;
2497 tree fndecl
= get_callee_fndecl (exp
);
2501 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2504 arg
= CALL_EXPR_ARG (exp
, 0);
2505 icode
= interclass_mathfn_icode (arg
, fndecl
);
2506 mode
= TYPE_MODE (TREE_TYPE (arg
));
2508 if (icode
!= CODE_FOR_nothing
)
2510 struct expand_operand ops
[1];
2511 rtx_insn
*last
= get_last_insn ();
2512 tree orig_arg
= arg
;
2514 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2515 need to expand the argument again. This way, we will not perform
2516 side-effects more the once. */
2517 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2519 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2521 if (mode
!= GET_MODE (op0
))
2522 op0
= convert_to_mode (mode
, op0
, 0);
2524 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2525 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2526 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2527 return ops
[0].value
;
2529 delete_insns_since (last
);
2530 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2536 /* Expand a call to the builtin sincos math function.
2537 Return NULL_RTX if a normal call should be emitted rather than expanding the
2538 function in-line. EXP is the expression that is a call to the builtin
2542 expand_builtin_sincos (tree exp
)
2544 rtx op0
, op1
, op2
, target1
, target2
;
2546 tree arg
, sinp
, cosp
;
2548 location_t loc
= EXPR_LOCATION (exp
);
2549 tree alias_type
, alias_off
;
2551 if (!validate_arglist (exp
, REAL_TYPE
,
2552 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2555 arg
= CALL_EXPR_ARG (exp
, 0);
2556 sinp
= CALL_EXPR_ARG (exp
, 1);
2557 cosp
= CALL_EXPR_ARG (exp
, 2);
2559 /* Make a suitable register to place result in. */
2560 mode
= TYPE_MODE (TREE_TYPE (arg
));
2562 /* Check if sincos insn is available, otherwise emit the call. */
2563 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2566 target1
= gen_reg_rtx (mode
);
2567 target2
= gen_reg_rtx (mode
);
2569 op0
= expand_normal (arg
);
2570 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2571 alias_off
= build_int_cst (alias_type
, 0);
2572 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2574 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2577 /* Compute into target1 and target2.
2578 Set TARGET to wherever the result comes back. */
2579 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2580 gcc_assert (result
);
2582 /* Move target1 and target2 to the memory locations indicated
2584 emit_move_insn (op1
, target1
);
2585 emit_move_insn (op2
, target2
);
2590 /* Expand a call to the internal cexpi builtin to the sincos math function.
2591 EXP is the expression that is a call to the builtin function; if convenient,
2592 the result should be placed in TARGET. */
2595 expand_builtin_cexpi (tree exp
, rtx target
)
2597 tree fndecl
= get_callee_fndecl (exp
);
2601 location_t loc
= EXPR_LOCATION (exp
);
2603 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2606 arg
= CALL_EXPR_ARG (exp
, 0);
2607 type
= TREE_TYPE (arg
);
2608 mode
= TYPE_MODE (TREE_TYPE (arg
));
2610 /* Try expanding via a sincos optab, fall back to emitting a libcall
2611 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2612 is only generated from sincos, cexp or if we have either of them. */
2613 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2615 op1
= gen_reg_rtx (mode
);
2616 op2
= gen_reg_rtx (mode
);
2618 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2620 /* Compute into op1 and op2. */
2621 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2623 else if (targetm
.libc_has_function (function_sincos
))
2625 tree call
, fn
= NULL_TREE
;
2629 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2630 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2631 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2632 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2633 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2634 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2638 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2639 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2640 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2641 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2642 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2643 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2645 /* Make sure not to fold the sincos call again. */
2646 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2647 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2648 call
, 3, arg
, top1
, top2
));
2652 tree call
, fn
= NULL_TREE
, narg
;
2653 tree ctype
= build_complex_type (type
);
2655 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2656 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2657 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2658 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2659 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2660 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2664 /* If we don't have a decl for cexp create one. This is the
2665 friendliest fallback if the user calls __builtin_cexpi
2666 without full target C99 function support. */
2667 if (fn
== NULL_TREE
)
2670 const char *name
= NULL
;
2672 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2674 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2676 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2679 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2680 fn
= build_fn_decl (name
, fntype
);
2683 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2684 build_real (type
, dconst0
), arg
);
2686 /* Make sure not to fold the cexp call again. */
2687 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2688 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2689 target
, VOIDmode
, EXPAND_NORMAL
);
2692 /* Now build the proper return type. */
2693 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2694 make_tree (TREE_TYPE (arg
), op2
),
2695 make_tree (TREE_TYPE (arg
), op1
)),
2696 target
, VOIDmode
, EXPAND_NORMAL
);
2699 /* Conveniently construct a function call expression. FNDECL names the
2700 function to be called, N is the number of arguments, and the "..."
2701 parameters are the argument expressions. Unlike build_call_exr
2702 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2705 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2708 tree fntype
= TREE_TYPE (fndecl
);
2709 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2712 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2714 SET_EXPR_LOCATION (fn
, loc
);
2718 /* Expand a call to one of the builtin rounding functions gcc defines
2719 as an extension (lfloor and lceil). As these are gcc extensions we
2720 do not need to worry about setting errno to EDOM.
2721 If expanding via optab fails, lower expression to (int)(floor(x)).
2722 EXP is the expression that is a call to the builtin function;
2723 if convenient, the result should be placed in TARGET. */
2726 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2728 convert_optab builtin_optab
;
2731 tree fndecl
= get_callee_fndecl (exp
);
2732 enum built_in_function fallback_fn
;
2733 tree fallback_fndecl
;
2737 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2740 arg
= CALL_EXPR_ARG (exp
, 0);
2742 switch (DECL_FUNCTION_CODE (fndecl
))
2744 CASE_FLT_FN (BUILT_IN_ICEIL
):
2745 CASE_FLT_FN (BUILT_IN_LCEIL
):
2746 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2747 builtin_optab
= lceil_optab
;
2748 fallback_fn
= BUILT_IN_CEIL
;
2751 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2752 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2753 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2754 builtin_optab
= lfloor_optab
;
2755 fallback_fn
= BUILT_IN_FLOOR
;
2762 /* Make a suitable register to place result in. */
2763 mode
= TYPE_MODE (TREE_TYPE (exp
));
2765 target
= gen_reg_rtx (mode
);
2767 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2768 need to expand the argument again. This way, we will not perform
2769 side-effects more the once. */
2770 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2772 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2776 /* Compute into TARGET. */
2777 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2779 /* Output the entire sequence. */
2780 insns
= get_insns ();
2786 /* If we were unable to expand via the builtin, stop the sequence
2787 (without outputting the insns). */
2790 /* Fall back to floating point rounding optab. */
2791 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2793 /* For non-C99 targets we may end up without a fallback fndecl here
2794 if the user called __builtin_lfloor directly. In this case emit
2795 a call to the floor/ceil variants nevertheless. This should result
2796 in the best user experience for not full C99 targets. */
2797 if (fallback_fndecl
== NULL_TREE
)
2800 const char *name
= NULL
;
2802 switch (DECL_FUNCTION_CODE (fndecl
))
2804 case BUILT_IN_ICEIL
:
2805 case BUILT_IN_LCEIL
:
2806 case BUILT_IN_LLCEIL
:
2809 case BUILT_IN_ICEILF
:
2810 case BUILT_IN_LCEILF
:
2811 case BUILT_IN_LLCEILF
:
2814 case BUILT_IN_ICEILL
:
2815 case BUILT_IN_LCEILL
:
2816 case BUILT_IN_LLCEILL
:
2819 case BUILT_IN_IFLOOR
:
2820 case BUILT_IN_LFLOOR
:
2821 case BUILT_IN_LLFLOOR
:
2824 case BUILT_IN_IFLOORF
:
2825 case BUILT_IN_LFLOORF
:
2826 case BUILT_IN_LLFLOORF
:
2829 case BUILT_IN_IFLOORL
:
2830 case BUILT_IN_LFLOORL
:
2831 case BUILT_IN_LLFLOORL
:
2838 fntype
= build_function_type_list (TREE_TYPE (arg
),
2839 TREE_TYPE (arg
), NULL_TREE
);
2840 fallback_fndecl
= build_fn_decl (name
, fntype
);
2843 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2845 tmp
= expand_normal (exp
);
2846 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2848 /* Truncate the result of floating point optab to integer
2849 via expand_fix (). */
2850 target
= gen_reg_rtx (mode
);
2851 expand_fix (target
, tmp
, 0);
2856 /* Expand a call to one of the builtin math functions doing integer
2858 Return 0 if a normal call should be emitted rather than expanding the
2859 function in-line. EXP is the expression that is a call to the builtin
2860 function; if convenient, the result should be placed in TARGET. */
2863 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2865 convert_optab builtin_optab
;
2868 tree fndecl
= get_callee_fndecl (exp
);
2871 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2873 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2876 arg
= CALL_EXPR_ARG (exp
, 0);
2878 switch (DECL_FUNCTION_CODE (fndecl
))
2880 CASE_FLT_FN (BUILT_IN_IRINT
):
2881 fallback_fn
= BUILT_IN_LRINT
;
2883 CASE_FLT_FN (BUILT_IN_LRINT
):
2884 CASE_FLT_FN (BUILT_IN_LLRINT
):
2885 builtin_optab
= lrint_optab
;
2888 CASE_FLT_FN (BUILT_IN_IROUND
):
2889 fallback_fn
= BUILT_IN_LROUND
;
2891 CASE_FLT_FN (BUILT_IN_LROUND
):
2892 CASE_FLT_FN (BUILT_IN_LLROUND
):
2893 builtin_optab
= lround_optab
;
2900 /* There's no easy way to detect the case we need to set EDOM. */
2901 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2904 /* Make a suitable register to place result in. */
2905 mode
= TYPE_MODE (TREE_TYPE (exp
));
2907 /* There's no easy way to detect the case we need to set EDOM. */
2908 if (!flag_errno_math
)
2910 rtx result
= gen_reg_rtx (mode
);
2912 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2913 need to expand the argument again. This way, we will not perform
2914 side-effects more the once. */
2915 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2917 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2921 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2923 /* Output the entire sequence. */
2924 insns
= get_insns ();
2930 /* If we were unable to expand via the builtin, stop the sequence
2931 (without outputting the insns) and call to the library function
2932 with the stabilized argument list. */
2936 if (fallback_fn
!= BUILT_IN_NONE
)
2938 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2939 targets, (int) round (x) should never be transformed into
2940 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2941 a call to lround in the hope that the target provides at least some
2942 C99 functions. This should result in the best user experience for
2943 not full C99 targets. */
2944 tree fallback_fndecl
= mathfn_built_in_1 (TREE_TYPE (arg
),
2947 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2948 fallback_fndecl
, 1, arg
);
2950 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2951 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2952 return convert_to_mode (mode
, target
, 0);
2955 return expand_call (exp
, target
, target
== const0_rtx
);
2958 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2959 a normal call should be emitted rather than expanding the function
2960 in-line. EXP is the expression that is a call to the builtin
2961 function; if convenient, the result should be placed in TARGET. */
2964 expand_builtin_powi (tree exp
, rtx target
)
2971 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2974 arg0
= CALL_EXPR_ARG (exp
, 0);
2975 arg1
= CALL_EXPR_ARG (exp
, 1);
2976 mode
= TYPE_MODE (TREE_TYPE (exp
));
2978 /* Emit a libcall to libgcc. */
2980 /* Mode of the 2nd argument must match that of an int. */
2981 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2983 if (target
== NULL_RTX
)
2984 target
= gen_reg_rtx (mode
);
2986 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2987 if (GET_MODE (op0
) != mode
)
2988 op0
= convert_to_mode (mode
, op0
, 0);
2989 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2990 if (GET_MODE (op1
) != mode2
)
2991 op1
= convert_to_mode (mode2
, op1
, 0);
2993 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2994 target
, LCT_CONST
, mode
, 2,
2995 op0
, mode
, op1
, mode2
);
3000 /* Expand expression EXP which is a call to the strlen builtin. Return
3001 NULL_RTX if we failed the caller should emit a normal call, otherwise
3002 try to get the result in TARGET, if convenient. */
3005 expand_builtin_strlen (tree exp
, rtx target
,
3006 machine_mode target_mode
)
3008 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3012 struct expand_operand ops
[4];
3015 tree src
= CALL_EXPR_ARG (exp
, 0);
3017 rtx_insn
*before_strlen
;
3018 machine_mode insn_mode
= target_mode
;
3019 enum insn_code icode
= CODE_FOR_nothing
;
3022 /* If the length can be computed at compile-time, return it. */
3023 len
= c_strlen (src
, 0);
3025 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3027 /* If the length can be computed at compile-time and is constant
3028 integer, but there are side-effects in src, evaluate
3029 src for side-effects, then return len.
3030 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3031 can be optimized into: i++; x = 3; */
3032 len
= c_strlen (src
, 1);
3033 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3035 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3036 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3039 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
3041 /* If SRC is not a pointer type, don't do this operation inline. */
3045 /* Bail out if we can't compute strlen in the right mode. */
3046 while (insn_mode
!= VOIDmode
)
3048 icode
= optab_handler (strlen_optab
, insn_mode
);
3049 if (icode
!= CODE_FOR_nothing
)
3052 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3054 if (insn_mode
== VOIDmode
)
3057 /* Make a place to hold the source address. We will not expand
3058 the actual source until we are sure that the expansion will
3059 not fail -- there are trees that cannot be expanded twice. */
3060 src_reg
= gen_reg_rtx (Pmode
);
3062 /* Mark the beginning of the strlen sequence so we can emit the
3063 source operand later. */
3064 before_strlen
= get_last_insn ();
3066 create_output_operand (&ops
[0], target
, insn_mode
);
3067 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
3068 create_integer_operand (&ops
[2], 0);
3069 create_integer_operand (&ops
[3], align
);
3070 if (!maybe_expand_insn (icode
, 4, ops
))
3073 /* Now that we are assured of success, expand the source. */
3075 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
3078 #ifdef POINTERS_EXTEND_UNSIGNED
3079 if (GET_MODE (pat
) != Pmode
)
3080 pat
= convert_to_mode (Pmode
, pat
,
3081 POINTERS_EXTEND_UNSIGNED
);
3083 emit_move_insn (src_reg
, pat
);
3089 emit_insn_after (pat
, before_strlen
);
3091 emit_insn_before (pat
, get_insns ());
3093 /* Return the value in the proper mode for this function. */
3094 if (GET_MODE (ops
[0].value
) == target_mode
)
3095 target
= ops
[0].value
;
3096 else if (target
!= 0)
3097 convert_move (target
, ops
[0].value
, 0);
3099 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
3105 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3106 bytes from constant string DATA + OFFSET and return it as target
3110 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3113 const char *str
= (const char *) data
;
3115 gcc_assert (offset
>= 0
3116 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3117 <= strlen (str
) + 1));
3119 return c_readstr (str
+ offset
, mode
);
3122 /* LEN specify length of the block of memcpy/memset operation.
3123 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3124 In some cases we can make very likely guess on max size, then we
3125 set it into PROBABLE_MAX_SIZE. */
3128 determine_block_size (tree len
, rtx len_rtx
,
3129 unsigned HOST_WIDE_INT
*min_size
,
3130 unsigned HOST_WIDE_INT
*max_size
,
3131 unsigned HOST_WIDE_INT
*probable_max_size
)
3133 if (CONST_INT_P (len_rtx
))
3135 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
3141 enum value_range_type range_type
= VR_UNDEFINED
;
3143 /* Determine bounds from the type. */
3144 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
3145 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
3148 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
3149 *probable_max_size
= *max_size
3150 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
3152 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
3154 if (TREE_CODE (len
) == SSA_NAME
)
3155 range_type
= get_range_info (len
, &min
, &max
);
3156 if (range_type
== VR_RANGE
)
3158 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
3159 *min_size
= min
.to_uhwi ();
3160 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
3161 *probable_max_size
= *max_size
= max
.to_uhwi ();
3163 else if (range_type
== VR_ANTI_RANGE
)
3165 /* Anti range 0...N lets us to determine minimal size to N+1. */
3168 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
3169 *min_size
= max
.to_uhwi () + 1;
3177 Produce anti range allowing negative values of N. We still
3178 can use the information and make a guess that N is not negative.
3180 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3181 *probable_max_size
= min
.to_uhwi () - 1;
3184 gcc_checking_assert (*max_size
<=
3185 (unsigned HOST_WIDE_INT
)
3186 GET_MODE_MASK (GET_MODE (len_rtx
)));
3189 /* Helper function to do the actual work for expand_builtin_memcpy. */
3192 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
3194 const char *src_str
;
3195 unsigned int src_align
= get_pointer_alignment (src
);
3196 unsigned int dest_align
= get_pointer_alignment (dest
);
3197 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3198 HOST_WIDE_INT expected_size
= -1;
3199 unsigned int expected_align
= 0;
3200 unsigned HOST_WIDE_INT min_size
;
3201 unsigned HOST_WIDE_INT max_size
;
3202 unsigned HOST_WIDE_INT probable_max_size
;
3204 /* If DEST is not a pointer type, call the normal function. */
3205 if (dest_align
== 0)
3208 /* If either SRC is not a pointer type, don't do this
3209 operation in-line. */
3213 if (currently_expanding_gimple_stmt
)
3214 stringop_block_profile (currently_expanding_gimple_stmt
,
3215 &expected_align
, &expected_size
);
3217 if (expected_align
< dest_align
)
3218 expected_align
= dest_align
;
3219 dest_mem
= get_memory_rtx (dest
, len
);
3220 set_mem_align (dest_mem
, dest_align
);
3221 len_rtx
= expand_normal (len
);
3222 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3223 &probable_max_size
);
3224 src_str
= c_getstr (src
);
3226 /* If SRC is a string constant and block move would be done
3227 by pieces, we can avoid loading the string from memory
3228 and only stored the computed constants. */
3230 && CONST_INT_P (len_rtx
)
3231 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3232 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3233 CONST_CAST (char *, src_str
),
3236 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3237 builtin_memcpy_read_str
,
3238 CONST_CAST (char *, src_str
),
3239 dest_align
, false, 0);
3240 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3241 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3245 src_mem
= get_memory_rtx (src
, len
);
3246 set_mem_align (src_mem
, src_align
);
3248 /* Copy word part most expediently. */
3249 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3250 CALL_EXPR_TAILCALL (exp
)
3251 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3252 expected_align
, expected_size
,
3253 min_size
, max_size
, probable_max_size
);
3257 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3258 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3264 /* Expand a call EXP to the memcpy builtin.
3265 Return NULL_RTX if we failed, the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). */
3270 expand_builtin_memcpy (tree exp
, rtx target
)
3272 if (!validate_arglist (exp
,
3273 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3277 tree dest
= CALL_EXPR_ARG (exp
, 0);
3278 tree src
= CALL_EXPR_ARG (exp
, 1);
3279 tree len
= CALL_EXPR_ARG (exp
, 2);
3280 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3284 /* Expand an instrumented call EXP to the memcpy builtin.
3285 Return NULL_RTX if we failed, the caller should emit a normal call,
3286 otherwise try to get the result in TARGET, if convenient (and in
3287 mode MODE if that's convenient). */
3290 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3292 if (!validate_arglist (exp
,
3293 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3294 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3295 INTEGER_TYPE
, VOID_TYPE
))
3299 tree dest
= CALL_EXPR_ARG (exp
, 0);
3300 tree src
= CALL_EXPR_ARG (exp
, 2);
3301 tree len
= CALL_EXPR_ARG (exp
, 4);
3302 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3304 /* Return src bounds with the result. */
3307 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3308 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3309 res
= chkp_join_splitted_slot (res
, bnd
);
3315 /* Expand a call EXP to the mempcpy builtin.
3316 Return NULL_RTX if we failed; the caller should emit a normal call,
3317 otherwise try to get the result in TARGET, if convenient (and in
3318 mode MODE if that's convenient). If ENDP is 0 return the
3319 destination pointer, if ENDP is 1 return the end pointer ala
3320 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3324 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3326 if (!validate_arglist (exp
,
3327 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3331 tree dest
= CALL_EXPR_ARG (exp
, 0);
3332 tree src
= CALL_EXPR_ARG (exp
, 1);
3333 tree len
= CALL_EXPR_ARG (exp
, 2);
3334 return expand_builtin_mempcpy_args (dest
, src
, len
,
3335 target
, mode
, /*endp=*/ 1,
3340 /* Expand an instrumented call EXP to the mempcpy builtin.
3341 Return NULL_RTX if we failed, the caller should emit a normal call,
3342 otherwise try to get the result in TARGET, if convenient (and in
3343 mode MODE if that's convenient). */
3346 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3348 if (!validate_arglist (exp
,
3349 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3350 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3351 INTEGER_TYPE
, VOID_TYPE
))
3355 tree dest
= CALL_EXPR_ARG (exp
, 0);
3356 tree src
= CALL_EXPR_ARG (exp
, 2);
3357 tree len
= CALL_EXPR_ARG (exp
, 4);
3358 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3361 /* Return src bounds with the result. */
3364 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3365 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3366 res
= chkp_join_splitted_slot (res
, bnd
);
3372 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3373 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3374 so that this can also be called without constructing an actual CALL_EXPR.
3375 The other arguments and return value are the same as for
3376 expand_builtin_mempcpy. */
3379 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3380 rtx target
, machine_mode mode
, int endp
,
3383 tree fndecl
= get_callee_fndecl (orig_exp
);
3385 /* If return value is ignored, transform mempcpy into memcpy. */
3386 if (target
== const0_rtx
3387 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3388 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3390 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3391 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3393 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3395 else if (target
== const0_rtx
3396 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3398 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3399 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3401 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3405 const char *src_str
;
3406 unsigned int src_align
= get_pointer_alignment (src
);
3407 unsigned int dest_align
= get_pointer_alignment (dest
);
3408 rtx dest_mem
, src_mem
, len_rtx
;
3410 /* If either SRC or DEST is not a pointer type, don't do this
3411 operation in-line. */
3412 if (dest_align
== 0 || src_align
== 0)
3415 /* If LEN is not constant, call the normal function. */
3416 if (! tree_fits_uhwi_p (len
))
3419 len_rtx
= expand_normal (len
);
3420 src_str
= c_getstr (src
);
3422 /* If SRC is a string constant and block move would be done
3423 by pieces, we can avoid loading the string from memory
3424 and only stored the computed constants. */
3426 && CONST_INT_P (len_rtx
)
3427 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3428 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3429 CONST_CAST (char *, src_str
),
3432 dest_mem
= get_memory_rtx (dest
, len
);
3433 set_mem_align (dest_mem
, dest_align
);
3434 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3435 builtin_memcpy_read_str
,
3436 CONST_CAST (char *, src_str
),
3437 dest_align
, false, endp
);
3438 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3439 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3443 if (CONST_INT_P (len_rtx
)
3444 && can_move_by_pieces (INTVAL (len_rtx
),
3445 MIN (dest_align
, src_align
)))
3447 dest_mem
= get_memory_rtx (dest
, len
);
3448 set_mem_align (dest_mem
, dest_align
);
3449 src_mem
= get_memory_rtx (src
, len
);
3450 set_mem_align (src_mem
, src_align
);
3451 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3452 MIN (dest_align
, src_align
), endp
);
3453 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3454 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3463 # define HAVE_movstr 0
3464 # define CODE_FOR_movstr CODE_FOR_nothing
3467 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3468 we failed, the caller should emit a normal call, otherwise try to
3469 get the result in TARGET, if convenient. If ENDP is 0 return the
3470 destination pointer, if ENDP is 1 return the end pointer ala
3471 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3475 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3477 struct expand_operand ops
[3];
3484 dest_mem
= get_memory_rtx (dest
, NULL
);
3485 src_mem
= get_memory_rtx (src
, NULL
);
3488 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3489 dest_mem
= replace_equiv_address (dest_mem
, target
);
3492 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3493 create_fixed_operand (&ops
[1], dest_mem
);
3494 create_fixed_operand (&ops
[2], src_mem
);
3495 if (!maybe_expand_insn (CODE_FOR_movstr
, 3, ops
))
3498 if (endp
&& target
!= const0_rtx
)
3500 target
= ops
[0].value
;
3501 /* movstr is supposed to set end to the address of the NUL
3502 terminator. If the caller requested a mempcpy-like return value,
3506 rtx tem
= plus_constant (GET_MODE (target
),
3507 gen_lowpart (GET_MODE (target
), target
), 1);
3508 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3514 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3515 NULL_RTX if we failed the caller should emit a normal call, otherwise
3516 try to get the result in TARGET, if convenient (and in mode MODE if that's
3520 expand_builtin_strcpy (tree exp
, rtx target
)
3522 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3524 tree dest
= CALL_EXPR_ARG (exp
, 0);
3525 tree src
= CALL_EXPR_ARG (exp
, 1);
3526 return expand_builtin_strcpy_args (dest
, src
, target
);
3531 /* Helper function to do the actual work for expand_builtin_strcpy. The
3532 arguments to the builtin_strcpy call DEST and SRC are broken out
3533 so that this can also be called without constructing an actual CALL_EXPR.
3534 The other arguments and return value are the same as for
3535 expand_builtin_strcpy. */
3538 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3540 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3543 /* Expand a call EXP to the stpcpy builtin.
3544 Return NULL_RTX if we failed the caller should emit a normal call,
3545 otherwise try to get the result in TARGET, if convenient (and in
3546 mode MODE if that's convenient). */
3549 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3552 location_t loc
= EXPR_LOCATION (exp
);
3554 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3557 dst
= CALL_EXPR_ARG (exp
, 0);
3558 src
= CALL_EXPR_ARG (exp
, 1);
3560 /* If return value is ignored, transform stpcpy into strcpy. */
3561 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3563 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3564 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3565 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3572 /* Ensure we get an actual string whose length can be evaluated at
3573 compile-time, not an expression containing a string. This is
3574 because the latter will potentially produce pessimized code
3575 when used to produce the return value. */
3576 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3577 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3579 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3580 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3581 target
, mode
, /*endp=*/2,
3587 if (TREE_CODE (len
) == INTEGER_CST
)
3589 rtx len_rtx
= expand_normal (len
);
3591 if (CONST_INT_P (len_rtx
))
3593 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3599 if (mode
!= VOIDmode
)
3600 target
= gen_reg_rtx (mode
);
3602 target
= gen_reg_rtx (GET_MODE (ret
));
3604 if (GET_MODE (target
) != GET_MODE (ret
))
3605 ret
= gen_lowpart (GET_MODE (target
), ret
);
3607 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3608 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3616 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3620 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3621 bytes from constant string DATA + OFFSET and return it as target
3625 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3628 const char *str
= (const char *) data
;
3630 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3633 return c_readstr (str
+ offset
, mode
);
3636 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3637 NULL_RTX if we failed the caller should emit a normal call. */
3640 expand_builtin_strncpy (tree exp
, rtx target
)
3642 location_t loc
= EXPR_LOCATION (exp
);
3644 if (validate_arglist (exp
,
3645 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3647 tree dest
= CALL_EXPR_ARG (exp
, 0);
3648 tree src
= CALL_EXPR_ARG (exp
, 1);
3649 tree len
= CALL_EXPR_ARG (exp
, 2);
3650 tree slen
= c_strlen (src
, 1);
3652 /* We must be passed a constant len and src parameter. */
3653 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3656 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3658 /* We're required to pad with trailing zeros if the requested
3659 len is greater than strlen(s2)+1. In that case try to
3660 use store_by_pieces, if it fails, punt. */
3661 if (tree_int_cst_lt (slen
, len
))
3663 unsigned int dest_align
= get_pointer_alignment (dest
);
3664 const char *p
= c_getstr (src
);
3667 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3668 || !can_store_by_pieces (tree_to_uhwi (len
),
3669 builtin_strncpy_read_str
,
3670 CONST_CAST (char *, p
),
3674 dest_mem
= get_memory_rtx (dest
, len
);
3675 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3676 builtin_strncpy_read_str
,
3677 CONST_CAST (char *, p
), dest_align
, false, 0);
3678 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3679 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3686 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3687 bytes from constant string DATA + OFFSET and return it as target
3691 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3694 const char *c
= (const char *) data
;
3695 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3697 memset (p
, *c
, GET_MODE_SIZE (mode
));
3699 return c_readstr (p
, mode
);
3702 /* Callback routine for store_by_pieces. Return the RTL of a register
3703 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3704 char value given in the RTL register data. For example, if mode is
3705 4 bytes wide, return the RTL for 0x01010101*data. */
3708 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3715 size
= GET_MODE_SIZE (mode
);
3719 p
= XALLOCAVEC (char, size
);
3720 memset (p
, 1, size
);
3721 coeff
= c_readstr (p
, mode
);
3723 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3724 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3725 return force_reg (mode
, target
);
3728 /* Expand expression EXP, which is a call to the memset builtin. Return
3729 NULL_RTX if we failed the caller should emit a normal call, otherwise
3730 try to get the result in TARGET, if convenient (and in mode MODE if that's
3734 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3736 if (!validate_arglist (exp
,
3737 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3741 tree dest
= CALL_EXPR_ARG (exp
, 0);
3742 tree val
= CALL_EXPR_ARG (exp
, 1);
3743 tree len
= CALL_EXPR_ARG (exp
, 2);
3744 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3748 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3749 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3750 try to get the result in TARGET, if convenient (and in mode MODE if that's
3754 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3756 if (!validate_arglist (exp
,
3757 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3758 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3762 tree dest
= CALL_EXPR_ARG (exp
, 0);
3763 tree val
= CALL_EXPR_ARG (exp
, 2);
3764 tree len
= CALL_EXPR_ARG (exp
, 3);
3765 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3767 /* Return src bounds with the result. */
3770 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3771 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3772 res
= chkp_join_splitted_slot (res
, bnd
);
3778 /* Helper function to do the actual work for expand_builtin_memset. The
3779 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3780 so that this can also be called without constructing an actual CALL_EXPR.
3781 The other arguments and return value are the same as for
3782 expand_builtin_memset. */
3785 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3786 rtx target
, machine_mode mode
, tree orig_exp
)
3789 enum built_in_function fcode
;
3790 machine_mode val_mode
;
3792 unsigned int dest_align
;
3793 rtx dest_mem
, dest_addr
, len_rtx
;
3794 HOST_WIDE_INT expected_size
= -1;
3795 unsigned int expected_align
= 0;
3796 unsigned HOST_WIDE_INT min_size
;
3797 unsigned HOST_WIDE_INT max_size
;
3798 unsigned HOST_WIDE_INT probable_max_size
;
3800 dest_align
= get_pointer_alignment (dest
);
3802 /* If DEST is not a pointer type, don't do this operation in-line. */
3803 if (dest_align
== 0)
3806 if (currently_expanding_gimple_stmt
)
3807 stringop_block_profile (currently_expanding_gimple_stmt
,
3808 &expected_align
, &expected_size
);
3810 if (expected_align
< dest_align
)
3811 expected_align
= dest_align
;
3813 /* If the LEN parameter is zero, return DEST. */
3814 if (integer_zerop (len
))
3816 /* Evaluate and ignore VAL in case it has side-effects. */
3817 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3818 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3821 /* Stabilize the arguments in case we fail. */
3822 dest
= builtin_save_expr (dest
);
3823 val
= builtin_save_expr (val
);
3824 len
= builtin_save_expr (len
);
3826 len_rtx
= expand_normal (len
);
3827 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3828 &probable_max_size
);
3829 dest_mem
= get_memory_rtx (dest
, len
);
3830 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3832 if (TREE_CODE (val
) != INTEGER_CST
)
3836 val_rtx
= expand_normal (val
);
3837 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3839 /* Assume that we can memset by pieces if we can store
3840 * the coefficients by pieces (in the required modes).
3841 * We can't pass builtin_memset_gen_str as that emits RTL. */
3843 if (tree_fits_uhwi_p (len
)
3844 && can_store_by_pieces (tree_to_uhwi (len
),
3845 builtin_memset_read_str
, &c
, dest_align
,
3848 val_rtx
= force_reg (val_mode
, val_rtx
);
3849 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3850 builtin_memset_gen_str
, val_rtx
, dest_align
,
3853 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3854 dest_align
, expected_align
,
3855 expected_size
, min_size
, max_size
,
3859 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3860 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3864 if (target_char_cast (val
, &c
))
3869 if (tree_fits_uhwi_p (len
)
3870 && can_store_by_pieces (tree_to_uhwi (len
),
3871 builtin_memset_read_str
, &c
, dest_align
,
3873 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3874 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3875 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3876 gen_int_mode (c
, val_mode
),
3877 dest_align
, expected_align
,
3878 expected_size
, min_size
, max_size
,
3882 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3883 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3887 set_mem_align (dest_mem
, dest_align
);
3888 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3889 CALL_EXPR_TAILCALL (orig_exp
)
3890 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3891 expected_align
, expected_size
,
3897 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3898 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3904 fndecl
= get_callee_fndecl (orig_exp
);
3905 fcode
= DECL_FUNCTION_CODE (fndecl
);
3906 if (fcode
== BUILT_IN_MEMSET
3907 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3908 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3910 else if (fcode
== BUILT_IN_BZERO
)
3911 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3915 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3916 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3917 return expand_call (fn
, target
, target
== const0_rtx
);
3920 /* Expand expression EXP, which is a call to the bzero builtin. Return
3921 NULL_RTX if we failed the caller should emit a normal call. */
3924 expand_builtin_bzero (tree exp
)
3927 location_t loc
= EXPR_LOCATION (exp
);
3929 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3932 dest
= CALL_EXPR_ARG (exp
, 0);
3933 size
= CALL_EXPR_ARG (exp
, 1);
3935 /* New argument list transforming bzero(ptr x, int y) to
3936 memset(ptr x, int 0, size_t y). This is done this way
3937 so that if it isn't expanded inline, we fallback to
3938 calling bzero instead of memset. */
3940 return expand_builtin_memset_args (dest
, integer_zero_node
,
3941 fold_convert_loc (loc
,
3942 size_type_node
, size
),
3943 const0_rtx
, VOIDmode
, exp
);
3946 /* Expand expression EXP, which is a call to the memcmp built-in function.
3947 Return NULL_RTX if we failed and the caller should emit a normal call,
3948 otherwise try to get the result in TARGET, if convenient (and in mode
3949 MODE, if that's convenient). */
3952 expand_builtin_memcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3953 ATTRIBUTE_UNUSED machine_mode mode
)
3955 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3957 if (!validate_arglist (exp
,
3958 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3961 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3962 implementing memcmp because it will stop if it encounters two
3964 #if defined HAVE_cmpmemsi
3966 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3969 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3970 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3971 tree len
= CALL_EXPR_ARG (exp
, 2);
3973 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3974 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3975 machine_mode insn_mode
;
3978 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3982 /* If we don't have POINTER_TYPE, call the function. */
3983 if (arg1_align
== 0 || arg2_align
== 0)
3986 /* Make a place to write the result of the instruction. */
3989 && REG_P (result
) && GET_MODE (result
) == insn_mode
3990 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3991 result
= gen_reg_rtx (insn_mode
);
3993 arg1_rtx
= get_memory_rtx (arg1
, len
);
3994 arg2_rtx
= get_memory_rtx (arg2
, len
);
3995 arg3_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3997 /* Set MEM_SIZE as appropriate. */
3998 if (CONST_INT_P (arg3_rtx
))
4000 set_mem_size (arg1_rtx
, INTVAL (arg3_rtx
));
4001 set_mem_size (arg2_rtx
, INTVAL (arg3_rtx
));
4005 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4006 GEN_INT (MIN (arg1_align
, arg2_align
)));
4013 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE
,
4014 TYPE_MODE (integer_type_node
), 3,
4015 XEXP (arg1_rtx
, 0), Pmode
,
4016 XEXP (arg2_rtx
, 0), Pmode
,
4017 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4018 TYPE_UNSIGNED (sizetype
)),
4019 TYPE_MODE (sizetype
));
4021 /* Return the value in the proper mode for this function. */
4022 mode
= TYPE_MODE (TREE_TYPE (exp
));
4023 if (GET_MODE (result
) == mode
)
4025 else if (target
!= 0)
4027 convert_move (target
, result
, 0);
4031 return convert_to_mode (mode
, result
, 0);
4033 #endif /* HAVE_cmpmemsi. */
4038 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4039 if we failed the caller should emit a normal call, otherwise try to get
4040 the result in TARGET, if convenient. */
4043 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4045 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4048 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4049 if (direct_optab_handler (cmpstr_optab
, SImode
) != CODE_FOR_nothing
4050 || direct_optab_handler (cmpstrn_optab
, SImode
) != CODE_FOR_nothing
)
4052 rtx arg1_rtx
, arg2_rtx
;
4053 rtx result
, insn
= NULL_RTX
;
4055 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4056 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4058 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4059 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4061 /* If we don't have POINTER_TYPE, call the function. */
4062 if (arg1_align
== 0 || arg2_align
== 0)
4065 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4066 arg1
= builtin_save_expr (arg1
);
4067 arg2
= builtin_save_expr (arg2
);
4069 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4070 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4072 #ifdef HAVE_cmpstrsi
4073 /* Try to call cmpstrsi. */
4076 machine_mode insn_mode
4077 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4079 /* Make a place to write the result of the instruction. */
4082 && REG_P (result
) && GET_MODE (result
) == insn_mode
4083 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4084 result
= gen_reg_rtx (insn_mode
);
4086 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4087 GEN_INT (MIN (arg1_align
, arg2_align
)));
4090 #ifdef HAVE_cmpstrnsi
4091 /* Try to determine at least one length and call cmpstrnsi. */
4092 if (!insn
&& HAVE_cmpstrnsi
)
4097 machine_mode insn_mode
4098 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4099 tree len1
= c_strlen (arg1
, 1);
4100 tree len2
= c_strlen (arg2
, 1);
4103 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4105 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4107 /* If we don't have a constant length for the first, use the length
4108 of the second, if we know it. We don't require a constant for
4109 this case; some cost analysis could be done if both are available
4110 but neither is constant. For now, assume they're equally cheap,
4111 unless one has side effects. If both strings have constant lengths,
4118 else if (TREE_SIDE_EFFECTS (len1
))
4120 else if (TREE_SIDE_EFFECTS (len2
))
4122 else if (TREE_CODE (len1
) != INTEGER_CST
)
4124 else if (TREE_CODE (len2
) != INTEGER_CST
)
4126 else if (tree_int_cst_lt (len1
, len2
))
4131 /* If both arguments have side effects, we cannot optimize. */
4132 if (!len
|| TREE_SIDE_EFFECTS (len
))
4135 arg3_rtx
= expand_normal (len
);
4137 /* Make a place to write the result of the instruction. */
4140 && REG_P (result
) && GET_MODE (result
) == insn_mode
4141 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4142 result
= gen_reg_rtx (insn_mode
);
4144 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4145 GEN_INT (MIN (arg1_align
, arg2_align
)));
4154 /* Return the value in the proper mode for this function. */
4155 mode
= TYPE_MODE (TREE_TYPE (exp
));
4156 if (GET_MODE (result
) == mode
)
4159 return convert_to_mode (mode
, result
, 0);
4160 convert_move (target
, result
, 0);
4164 /* Expand the library call ourselves using a stabilized argument
4165 list to avoid re-evaluating the function's arguments twice. */
4166 #ifdef HAVE_cmpstrnsi
4169 fndecl
= get_callee_fndecl (exp
);
4170 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4171 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4172 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4173 return expand_call (fn
, target
, target
== const0_rtx
);
4179 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4180 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4181 the result in TARGET, if convenient. */
4184 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4185 ATTRIBUTE_UNUSED machine_mode mode
)
4187 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4189 if (!validate_arglist (exp
,
4190 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4193 /* If c_strlen can determine an expression for one of the string
4194 lengths, and it doesn't have side effects, then emit cmpstrnsi
4195 using length MIN(strlen(string)+1, arg3). */
4196 #ifdef HAVE_cmpstrnsi
4199 tree len
, len1
, len2
;
4200 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4203 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4204 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4205 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4207 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4208 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4209 machine_mode insn_mode
4210 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4212 len1
= c_strlen (arg1
, 1);
4213 len2
= c_strlen (arg2
, 1);
4216 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4218 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4220 /* If we don't have a constant length for the first, use the length
4221 of the second, if we know it. We don't require a constant for
4222 this case; some cost analysis could be done if both are available
4223 but neither is constant. For now, assume they're equally cheap,
4224 unless one has side effects. If both strings have constant lengths,
4231 else if (TREE_SIDE_EFFECTS (len1
))
4233 else if (TREE_SIDE_EFFECTS (len2
))
4235 else if (TREE_CODE (len1
) != INTEGER_CST
)
4237 else if (TREE_CODE (len2
) != INTEGER_CST
)
4239 else if (tree_int_cst_lt (len1
, len2
))
4244 /* If both arguments have side effects, we cannot optimize. */
4245 if (!len
|| TREE_SIDE_EFFECTS (len
))
4248 /* The actual new length parameter is MIN(len,arg3). */
4249 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
4250 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
4252 /* If we don't have POINTER_TYPE, call the function. */
4253 if (arg1_align
== 0 || arg2_align
== 0)
4256 /* Make a place to write the result of the instruction. */
4259 && REG_P (result
) && GET_MODE (result
) == insn_mode
4260 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4261 result
= gen_reg_rtx (insn_mode
);
4263 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4264 arg1
= builtin_save_expr (arg1
);
4265 arg2
= builtin_save_expr (arg2
);
4266 len
= builtin_save_expr (len
);
4268 arg1_rtx
= get_memory_rtx (arg1
, len
);
4269 arg2_rtx
= get_memory_rtx (arg2
, len
);
4270 arg3_rtx
= expand_normal (len
);
4271 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4272 GEN_INT (MIN (arg1_align
, arg2_align
)));
4277 /* Return the value in the proper mode for this function. */
4278 mode
= TYPE_MODE (TREE_TYPE (exp
));
4279 if (GET_MODE (result
) == mode
)
4282 return convert_to_mode (mode
, result
, 0);
4283 convert_move (target
, result
, 0);
4287 /* Expand the library call ourselves using a stabilized argument
4288 list to avoid re-evaluating the function's arguments twice. */
4289 fndecl
= get_callee_fndecl (exp
);
4290 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4292 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4293 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4294 return expand_call (fn
, target
, target
== const0_rtx
);
4300 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4301 if that's convenient. */
4304 expand_builtin_saveregs (void)
4309 /* Don't do __builtin_saveregs more than once in a function.
4310 Save the result of the first call and reuse it. */
4311 if (saveregs_value
!= 0)
4312 return saveregs_value
;
4314 /* When this function is called, it means that registers must be
4315 saved on entry to this function. So we migrate the call to the
4316 first insn of this function. */
4320 /* Do whatever the machine needs done in this case. */
4321 val
= targetm
.calls
.expand_builtin_saveregs ();
4326 saveregs_value
= val
;
4328 /* Put the insns after the NOTE that starts the function. If this
4329 is inside a start_sequence, make the outer-level insn chain current, so
4330 the code is placed at the start of the function. */
4331 push_topmost_sequence ();
4332 emit_insn_after (seq
, entry_of_function ());
4333 pop_topmost_sequence ();
4338 /* Expand a call to __builtin_next_arg. */
4341 expand_builtin_next_arg (void)
4343 /* Checking arguments is already done in fold_builtin_next_arg
4344 that must be called before this function. */
4345 return expand_binop (ptr_mode
, add_optab
,
4346 crtl
->args
.internal_arg_pointer
,
4347 crtl
->args
.arg_offset_rtx
,
4348 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4351 /* Make it easier for the backends by protecting the valist argument
4352 from multiple evaluations. */
4355 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4357 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4359 /* The current way of determining the type of valist is completely
4360 bogus. We should have the information on the va builtin instead. */
4362 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4364 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4366 if (TREE_SIDE_EFFECTS (valist
))
4367 valist
= save_expr (valist
);
4369 /* For this case, the backends will be expecting a pointer to
4370 vatype, but it's possible we've actually been given an array
4371 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4373 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4375 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4376 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4381 tree pt
= build_pointer_type (vatype
);
4385 if (! TREE_SIDE_EFFECTS (valist
))
4388 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4389 TREE_SIDE_EFFECTS (valist
) = 1;
4392 if (TREE_SIDE_EFFECTS (valist
))
4393 valist
= save_expr (valist
);
4394 valist
= fold_build2_loc (loc
, MEM_REF
,
4395 vatype
, valist
, build_int_cst (pt
, 0));
4401 /* The "standard" definition of va_list is void*. */
4404 std_build_builtin_va_list (void)
4406 return ptr_type_node
;
4409 /* The "standard" abi va_list is va_list_type_node. */
4412 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4414 return va_list_type_node
;
4417 /* The "standard" type of va_list is va_list_type_node. */
4420 std_canonical_va_list_type (tree type
)
4424 if (INDIRECT_REF_P (type
))
4425 type
= TREE_TYPE (type
);
4426 else if (POINTER_TYPE_P (type
) && POINTER_TYPE_P (TREE_TYPE (type
)))
4427 type
= TREE_TYPE (type
);
4428 wtype
= va_list_type_node
;
4430 /* Treat structure va_list types. */
4431 if (TREE_CODE (wtype
) == RECORD_TYPE
&& POINTER_TYPE_P (htype
))
4432 htype
= TREE_TYPE (htype
);
4433 else if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4435 /* If va_list is an array type, the argument may have decayed
4436 to a pointer type, e.g. by being passed to another function.
4437 In that case, unwrap both types so that we can compare the
4438 underlying records. */
4439 if (TREE_CODE (htype
) == ARRAY_TYPE
4440 || POINTER_TYPE_P (htype
))
4442 wtype
= TREE_TYPE (wtype
);
4443 htype
= TREE_TYPE (htype
);
4446 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4447 return va_list_type_node
;
4452 /* The "standard" implementation of va_start: just assign `nextarg' to
4456 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4458 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4459 convert_move (va_r
, nextarg
, 0);
4461 /* We do not have any valid bounds for the pointer, so
4462 just store zero bounds for it. */
4463 if (chkp_function_instrumented_p (current_function_decl
))
4464 chkp_expand_bounds_reset_for_mem (valist
,
4465 make_tree (TREE_TYPE (valist
),
4469 /* Expand EXP, a call to __builtin_va_start. */
4472 expand_builtin_va_start (tree exp
)
4476 location_t loc
= EXPR_LOCATION (exp
);
4478 if (call_expr_nargs (exp
) < 2)
4480 error_at (loc
, "too few arguments to function %<va_start%>");
4484 if (fold_builtin_next_arg (exp
, true))
4487 nextarg
= expand_builtin_next_arg ();
4488 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4490 if (targetm
.expand_builtin_va_start
)
4491 targetm
.expand_builtin_va_start (valist
, nextarg
);
4493 std_expand_builtin_va_start (valist
, nextarg
);
4498 /* Expand EXP, a call to __builtin_va_end. */
4501 expand_builtin_va_end (tree exp
)
4503 tree valist
= CALL_EXPR_ARG (exp
, 0);
4505 /* Evaluate for side effects, if needed. I hate macros that don't
4507 if (TREE_SIDE_EFFECTS (valist
))
4508 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4513 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4514 builtin rather than just as an assignment in stdarg.h because of the
4515 nastiness of array-type va_list types. */
4518 expand_builtin_va_copy (tree exp
)
4521 location_t loc
= EXPR_LOCATION (exp
);
4523 dst
= CALL_EXPR_ARG (exp
, 0);
4524 src
= CALL_EXPR_ARG (exp
, 1);
4526 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4527 src
= stabilize_va_list_loc (loc
, src
, 0);
4529 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4531 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4533 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4534 TREE_SIDE_EFFECTS (t
) = 1;
4535 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4539 rtx dstb
, srcb
, size
;
4541 /* Evaluate to pointers. */
4542 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4543 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4544 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4545 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4547 dstb
= convert_memory_address (Pmode
, dstb
);
4548 srcb
= convert_memory_address (Pmode
, srcb
);
4550 /* "Dereference" to BLKmode memories. */
4551 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4552 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4553 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4554 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4555 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4556 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4559 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4565 /* Expand a call to one of the builtin functions __builtin_frame_address or
4566 __builtin_return_address. */
4569 expand_builtin_frame_address (tree fndecl
, tree exp
)
4571 /* The argument must be a nonnegative integer constant.
4572 It counts the number of frames to scan up the stack.
4573 The value is the return address saved in that frame. */
4574 if (call_expr_nargs (exp
) == 0)
4575 /* Warning about missing arg was already issued. */
4577 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4579 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4580 error ("invalid argument to %<__builtin_frame_address%>");
4582 error ("invalid argument to %<__builtin_return_address%>");
4588 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4589 tree_to_uhwi (CALL_EXPR_ARG (exp
, 0)));
4591 /* Some ports cannot access arbitrary stack frames. */
4594 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4595 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4597 warning (0, "unsupported argument to %<__builtin_return_address%>");
4601 /* For __builtin_frame_address, return what we've got. */
4602 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4606 && ! CONSTANT_P (tem
))
4607 tem
= copy_addr_to_reg (tem
);
4612 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4613 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4614 is the same as for allocate_dynamic_stack_space. */
4617 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4623 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4624 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4627 = (alloca_with_align
4628 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4629 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4634 /* Compute the argument. */
4635 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4637 /* Compute the alignment. */
4638 align
= (alloca_with_align
4639 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4640 : BIGGEST_ALIGNMENT
);
4642 /* Allocate the desired space. */
4643 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4644 result
= convert_memory_address (ptr_mode
, result
);
4649 /* Expand a call to bswap builtin in EXP.
4650 Return NULL_RTX if a normal call should be emitted rather than expanding the
4651 function in-line. If convenient, the result should be placed in TARGET.
4652 SUBTARGET may be used as the target for computing one of EXP's operands. */
4655 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4661 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4664 arg
= CALL_EXPR_ARG (exp
, 0);
4665 op0
= expand_expr (arg
,
4666 subtarget
&& GET_MODE (subtarget
) == target_mode
4667 ? subtarget
: NULL_RTX
,
4668 target_mode
, EXPAND_NORMAL
);
4669 if (GET_MODE (op0
) != target_mode
)
4670 op0
= convert_to_mode (target_mode
, op0
, 1);
4672 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4674 gcc_assert (target
);
4676 return convert_to_mode (target_mode
, target
, 1);
4679 /* Expand a call to a unary builtin in EXP.
4680 Return NULL_RTX if a normal call should be emitted rather than expanding the
4681 function in-line. If convenient, the result should be placed in TARGET.
4682 SUBTARGET may be used as the target for computing one of EXP's operands. */
4685 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4686 rtx subtarget
, optab op_optab
)
4690 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4693 /* Compute the argument. */
4694 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4696 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4697 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4698 VOIDmode
, EXPAND_NORMAL
);
4699 /* Compute op, into TARGET if possible.
4700 Set TARGET to wherever the result comes back. */
4701 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4702 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4703 gcc_assert (target
);
4705 return convert_to_mode (target_mode
, target
, 0);
4708 /* Expand a call to __builtin_expect. We just return our argument
4709 as the builtin_expect semantic should've been already executed by
4710 tree branch prediction pass. */
4713 expand_builtin_expect (tree exp
, rtx target
)
4717 if (call_expr_nargs (exp
) < 2)
4719 arg
= CALL_EXPR_ARG (exp
, 0);
4721 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4722 /* When guessing was done, the hints should be already stripped away. */
4723 gcc_assert (!flag_guess_branch_prob
4724 || optimize
== 0 || seen_error ());
4728 /* Expand a call to __builtin_assume_aligned. We just return our first
4729 argument as the builtin_assume_aligned semantic should've been already
4733 expand_builtin_assume_aligned (tree exp
, rtx target
)
4735 if (call_expr_nargs (exp
) < 2)
4737 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4739 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4740 && (call_expr_nargs (exp
) < 3
4741 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4746 expand_builtin_trap (void)
4751 rtx_insn
*insn
= emit_insn (gen_trap ());
4752 /* For trap insns when not accumulating outgoing args force
4753 REG_ARGS_SIZE note to prevent crossjumping of calls with
4754 different args sizes. */
4755 if (!ACCUMULATE_OUTGOING_ARGS
)
4756 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4760 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4764 /* Expand a call to __builtin_unreachable. We do nothing except emit
4765 a barrier saying that control flow will not pass here.
4767 It is the responsibility of the program being compiled to ensure
4768 that control flow does never reach __builtin_unreachable. */
4770 expand_builtin_unreachable (void)
4775 /* Expand EXP, a call to fabs, fabsf or fabsl.
4776 Return NULL_RTX if a normal call should be emitted rather than expanding
4777 the function inline. If convenient, the result should be placed
4778 in TARGET. SUBTARGET may be used as the target for computing
4782 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4788 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4791 arg
= CALL_EXPR_ARG (exp
, 0);
4792 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4793 mode
= TYPE_MODE (TREE_TYPE (arg
));
4794 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4795 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4798 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4799 Return NULL is a normal call should be emitted rather than expanding the
4800 function inline. If convenient, the result should be placed in TARGET.
4801 SUBTARGET may be used as the target for computing the operand. */
4804 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4809 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4812 arg
= CALL_EXPR_ARG (exp
, 0);
4813 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4815 arg
= CALL_EXPR_ARG (exp
, 1);
4816 op1
= expand_normal (arg
);
4818 return expand_copysign (op0
, op1
, target
);
4821 /* Expand a call to __builtin___clear_cache. */
4824 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
4826 #ifndef HAVE_clear_cache
4827 #ifdef CLEAR_INSN_CACHE
4828 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4829 does something. Just do the default expansion to a call to
4833 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4834 does nothing. There is no need to call it. Do nothing. */
4836 #endif /* CLEAR_INSN_CACHE */
4838 /* We have a "clear_cache" insn, and it will handle everything. */
4840 rtx begin_rtx
, end_rtx
;
4842 /* We must not expand to a library call. If we did, any
4843 fallback library function in libgcc that might contain a call to
4844 __builtin___clear_cache() would recurse infinitely. */
4845 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4847 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4851 if (HAVE_clear_cache
)
4853 struct expand_operand ops
[2];
4855 begin
= CALL_EXPR_ARG (exp
, 0);
4856 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4858 end
= CALL_EXPR_ARG (exp
, 1);
4859 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4861 create_address_operand (&ops
[0], begin_rtx
);
4862 create_address_operand (&ops
[1], end_rtx
);
4863 if (maybe_expand_insn (CODE_FOR_clear_cache
, 2, ops
))
4867 #endif /* HAVE_clear_cache */
4870 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4873 round_trampoline_addr (rtx tramp
)
4875 rtx temp
, addend
, mask
;
4877 /* If we don't need too much alignment, we'll have been guaranteed
4878 proper alignment by get_trampoline_type. */
4879 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4882 /* Round address up to desired boundary. */
4883 temp
= gen_reg_rtx (Pmode
);
4884 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4885 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4887 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4888 temp
, 0, OPTAB_LIB_WIDEN
);
4889 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4890 temp
, 0, OPTAB_LIB_WIDEN
);
4896 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4898 tree t_tramp
, t_func
, t_chain
;
4899 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4901 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4902 POINTER_TYPE
, VOID_TYPE
))
4905 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4906 t_func
= CALL_EXPR_ARG (exp
, 1);
4907 t_chain
= CALL_EXPR_ARG (exp
, 2);
4909 r_tramp
= expand_normal (t_tramp
);
4910 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4911 MEM_NOTRAP_P (m_tramp
) = 1;
4913 /* If ONSTACK, the TRAMP argument should be the address of a field
4914 within the local function's FRAME decl. Either way, let's see if
4915 we can fill in the MEM_ATTRs for this memory. */
4916 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4917 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4919 /* Creator of a heap trampoline is responsible for making sure the
4920 address is aligned to at least STACK_BOUNDARY. Normally malloc
4921 will ensure this anyhow. */
4922 tmp
= round_trampoline_addr (r_tramp
);
4925 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4926 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4927 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4930 /* The FUNC argument should be the address of the nested function.
4931 Extract the actual function decl to pass to the hook. */
4932 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4933 t_func
= TREE_OPERAND (t_func
, 0);
4934 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4936 r_chain
= expand_normal (t_chain
);
4938 /* Generate insns to initialize the trampoline. */
4939 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4943 trampolines_created
= 1;
4945 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4946 "trampoline generated for nested function %qD", t_func
);
4953 expand_builtin_adjust_trampoline (tree exp
)
4957 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4960 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4961 tramp
= round_trampoline_addr (tramp
);
4962 if (targetm
.calls
.trampoline_adjust_address
)
4963 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4968 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4969 function. The function first checks whether the back end provides
4970 an insn to implement signbit for the respective mode. If not, it
4971 checks whether the floating point format of the value is such that
4972 the sign bit can be extracted. If that is not the case, the
4973 function returns NULL_RTX to indicate that a normal call should be
4974 emitted rather than expanding the function in-line. EXP is the
4975 expression that is a call to the builtin function; if convenient,
4976 the result should be placed in TARGET. */
4978 expand_builtin_signbit (tree exp
, rtx target
)
4980 const struct real_format
*fmt
;
4981 machine_mode fmode
, imode
, rmode
;
4984 enum insn_code icode
;
4986 location_t loc
= EXPR_LOCATION (exp
);
4988 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4991 arg
= CALL_EXPR_ARG (exp
, 0);
4992 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4993 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4994 fmt
= REAL_MODE_FORMAT (fmode
);
4996 arg
= builtin_save_expr (arg
);
4998 /* Expand the argument yielding a RTX expression. */
4999 temp
= expand_normal (arg
);
5001 /* Check if the back end provides an insn that handles signbit for the
5003 icode
= optab_handler (signbit_optab
, fmode
);
5004 if (icode
!= CODE_FOR_nothing
)
5006 rtx_insn
*last
= get_last_insn ();
5007 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5008 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5010 delete_insns_since (last
);
5013 /* For floating point formats without a sign bit, implement signbit
5015 bitpos
= fmt
->signbit_ro
;
5018 /* But we can't do this if the format supports signed zero. */
5019 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5022 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5023 build_real (TREE_TYPE (arg
), dconst0
));
5024 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5027 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5029 imode
= int_mode_for_mode (fmode
);
5030 if (imode
== BLKmode
)
5032 temp
= gen_lowpart (imode
, temp
);
5037 /* Handle targets with different FP word orders. */
5038 if (FLOAT_WORDS_BIG_ENDIAN
)
5039 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5041 word
= bitpos
/ BITS_PER_WORD
;
5042 temp
= operand_subword_force (temp
, word
, fmode
);
5043 bitpos
= bitpos
% BITS_PER_WORD
;
5046 /* Force the intermediate word_mode (or narrower) result into a
5047 register. This avoids attempting to create paradoxical SUBREGs
5048 of floating point modes below. */
5049 temp
= force_reg (imode
, temp
);
5051 /* If the bitpos is within the "result mode" lowpart, the operation
5052 can be implement with a single bitwise AND. Otherwise, we need
5053 a right shift and an AND. */
5055 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5057 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5059 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5060 temp
= gen_lowpart (rmode
, temp
);
5061 temp
= expand_binop (rmode
, and_optab
, temp
,
5062 immed_wide_int_const (mask
, rmode
),
5063 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5067 /* Perform a logical right shift to place the signbit in the least
5068 significant bit, then truncate the result to the desired mode
5069 and mask just this bit. */
5070 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5071 temp
= gen_lowpart (rmode
, temp
);
5072 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5073 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5079 /* Expand fork or exec calls. TARGET is the desired target of the
5080 call. EXP is the call. FN is the
5081 identificator of the actual function. IGNORE is nonzero if the
5082 value is to be ignored. */
5085 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5090 /* If we are not profiling, just call the function. */
5091 if (!profile_arc_flag
)
5094 /* Otherwise call the wrapper. This should be equivalent for the rest of
5095 compiler, so the code does not diverge, and the wrapper may run the
5096 code necessary for keeping the profiling sane. */
5098 switch (DECL_FUNCTION_CODE (fn
))
5101 id
= get_identifier ("__gcov_fork");
5104 case BUILT_IN_EXECL
:
5105 id
= get_identifier ("__gcov_execl");
5108 case BUILT_IN_EXECV
:
5109 id
= get_identifier ("__gcov_execv");
5112 case BUILT_IN_EXECLP
:
5113 id
= get_identifier ("__gcov_execlp");
5116 case BUILT_IN_EXECLE
:
5117 id
= get_identifier ("__gcov_execle");
5120 case BUILT_IN_EXECVP
:
5121 id
= get_identifier ("__gcov_execvp");
5124 case BUILT_IN_EXECVE
:
5125 id
= get_identifier ("__gcov_execve");
5132 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5133 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5134 DECL_EXTERNAL (decl
) = 1;
5135 TREE_PUBLIC (decl
) = 1;
5136 DECL_ARTIFICIAL (decl
) = 1;
5137 TREE_NOTHROW (decl
) = 1;
5138 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5139 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5140 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5141 return expand_call (call
, target
, ignore
);
5146 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5147 the pointer in these functions is void*, the tree optimizers may remove
5148 casts. The mode computed in expand_builtin isn't reliable either, due
5149 to __sync_bool_compare_and_swap.
5151 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5152 group of builtins. This gives us log2 of the mode size. */
5154 static inline machine_mode
5155 get_builtin_sync_mode (int fcode_diff
)
5157 /* The size is not negotiable, so ask not to get BLKmode in return
5158 if the target indicates that a smaller size would be better. */
5159 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5162 /* Expand the memory expression LOC and return the appropriate memory operand
5163 for the builtin_sync operations. */
5166 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5170 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5171 addr
= convert_memory_address (Pmode
, addr
);
5173 /* Note that we explicitly do not want any alias information for this
5174 memory, so that we kill all other live memories. Otherwise we don't
5175 satisfy the full barrier semantics of the intrinsic. */
5176 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5178 /* The alignment needs to be at least according to that of the mode. */
5179 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5180 get_pointer_alignment (loc
)));
5181 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5182 MEM_VOLATILE_P (mem
) = 1;
5187 /* Make sure an argument is in the right mode.
5188 EXP is the tree argument.
5189 MODE is the mode it should be in. */
5192 expand_expr_force_mode (tree exp
, machine_mode mode
)
5195 machine_mode old_mode
;
5197 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5198 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5199 of CONST_INTs, where we know the old_mode only from the call argument. */
5201 old_mode
= GET_MODE (val
);
5202 if (old_mode
== VOIDmode
)
5203 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5204 val
= convert_modes (mode
, old_mode
, val
, 1);
5209 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5210 EXP is the CALL_EXPR. CODE is the rtx code
5211 that corresponds to the arithmetic or logical operation from the name;
5212 an exception here is that NOT actually means NAND. TARGET is an optional
5213 place for us to store the results; AFTER is true if this is the
5214 fetch_and_xxx form. */
5217 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5218 enum rtx_code code
, bool after
,
5222 location_t loc
= EXPR_LOCATION (exp
);
5224 if (code
== NOT
&& warn_sync_nand
)
5226 tree fndecl
= get_callee_fndecl (exp
);
5227 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5229 static bool warned_f_a_n
, warned_n_a_f
;
5233 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5234 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5235 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5236 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5237 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5241 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5242 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5243 warned_f_a_n
= true;
5246 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5247 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5248 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5249 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5250 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5254 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5255 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5256 warned_n_a_f
= true;
5264 /* Expand the operands. */
5265 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5266 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5268 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5272 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5273 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5274 true if this is the boolean form. TARGET is a place for us to store the
5275 results; this is NOT optional if IS_BOOL is true. */
5278 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5279 bool is_bool
, rtx target
)
5281 rtx old_val
, new_val
, mem
;
5284 /* Expand the operands. */
5285 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5286 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5287 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5289 pbool
= poval
= NULL
;
5290 if (target
!= const0_rtx
)
5297 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5298 false, MEMMODEL_SYNC_SEQ_CST
,
5299 MEMMODEL_SYNC_SEQ_CST
))
5305 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5306 general form is actually an atomic exchange, and some targets only
5307 support a reduced form with the second argument being a constant 1.
5308 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5312 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5317 /* Expand the operands. */
5318 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5319 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5321 return expand_sync_lock_test_and_set (target
, mem
, val
);
5324 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5327 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5331 /* Expand the operands. */
5332 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5334 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5337 /* Given an integer representing an ``enum memmodel'', verify its
5338 correctness and return the memory model enum. */
5340 static enum memmodel
5341 get_memmodel (tree exp
)
5344 unsigned HOST_WIDE_INT val
;
5346 /* If the parameter is not a constant, it's a run time value so we'll just
5347 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5348 if (TREE_CODE (exp
) != INTEGER_CST
)
5349 return MEMMODEL_SEQ_CST
;
5351 op
= expand_normal (exp
);
5354 if (targetm
.memmodel_check
)
5355 val
= targetm
.memmodel_check (val
);
5356 else if (val
& ~MEMMODEL_MASK
)
5358 warning (OPT_Winvalid_memory_model
,
5359 "Unknown architecture specifier in memory model to builtin.");
5360 return MEMMODEL_SEQ_CST
;
5363 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5364 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5366 warning (OPT_Winvalid_memory_model
,
5367 "invalid memory model argument to builtin");
5368 return MEMMODEL_SEQ_CST
;
5371 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5372 be conservative and promote consume to acquire. */
5373 if (val
== MEMMODEL_CONSUME
)
5374 val
= MEMMODEL_ACQUIRE
;
5376 return (enum memmodel
) val
;
5379 /* Expand the __atomic_exchange intrinsic:
5380 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5381 EXP is the CALL_EXPR.
5382 TARGET is an optional place for us to store the results. */
5385 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5388 enum memmodel model
;
5390 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5392 if (!flag_inline_atomics
)
5395 /* Expand the operands. */
5396 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5397 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5399 return expand_atomic_exchange (target
, mem
, val
, model
);
5402 /* Expand the __atomic_compare_exchange intrinsic:
5403 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5404 TYPE desired, BOOL weak,
5405 enum memmodel success,
5406 enum memmodel failure)
5407 EXP is the CALL_EXPR.
5408 TARGET is an optional place for us to store the results. */
5411 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5414 rtx expect
, desired
, mem
, oldval
;
5415 rtx_code_label
*label
;
5416 enum memmodel success
, failure
;
5420 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5421 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5423 if (failure
> success
)
5425 warning (OPT_Winvalid_memory_model
,
5426 "failure memory model cannot be stronger than success memory "
5427 "model for %<__atomic_compare_exchange%>");
5428 success
= MEMMODEL_SEQ_CST
;
5431 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5433 warning (OPT_Winvalid_memory_model
,
5434 "invalid failure memory model for "
5435 "%<__atomic_compare_exchange%>");
5436 failure
= MEMMODEL_SEQ_CST
;
5437 success
= MEMMODEL_SEQ_CST
;
5441 if (!flag_inline_atomics
)
5444 /* Expand the operands. */
5445 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5447 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5448 expect
= convert_memory_address (Pmode
, expect
);
5449 expect
= gen_rtx_MEM (mode
, expect
);
5450 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5452 weak
= CALL_EXPR_ARG (exp
, 3);
5454 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5457 if (target
== const0_rtx
)
5460 /* Lest the rtl backend create a race condition with an imporoper store
5461 to memory, always create a new pseudo for OLDVAL. */
5464 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5465 is_weak
, success
, failure
))
5468 /* Conditionally store back to EXPECT, lest we create a race condition
5469 with an improper store to memory. */
5470 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5471 the normal case where EXPECT is totally private, i.e. a register. At
5472 which point the store can be unconditional. */
5473 label
= gen_label_rtx ();
5474 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
, VOIDmode
, 1, label
);
5475 emit_move_insn (expect
, oldval
);
5481 /* Expand the __atomic_load intrinsic:
5482 TYPE __atomic_load (TYPE *object, enum memmodel)
5483 EXP is the CALL_EXPR.
5484 TARGET is an optional place for us to store the results. */
5487 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5490 enum memmodel model
;
5492 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5493 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5495 warning (OPT_Winvalid_memory_model
,
5496 "invalid memory model for %<__atomic_load%>");
5497 model
= MEMMODEL_SEQ_CST
;
5500 if (!flag_inline_atomics
)
5503 /* Expand the operand. */
5504 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5506 return expand_atomic_load (target
, mem
, model
);
5510 /* Expand the __atomic_store intrinsic:
5511 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5512 EXP is the CALL_EXPR.
5513 TARGET is an optional place for us to store the results. */
5516 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5519 enum memmodel model
;
5521 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5522 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5523 || is_mm_release (model
)))
5525 warning (OPT_Winvalid_memory_model
,
5526 "invalid memory model for %<__atomic_store%>");
5527 model
= MEMMODEL_SEQ_CST
;
5530 if (!flag_inline_atomics
)
5533 /* Expand the operands. */
5534 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5535 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5537 return expand_atomic_store (mem
, val
, model
, false);
5540 /* Expand the __atomic_fetch_XXX intrinsic:
5541 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5542 EXP is the CALL_EXPR.
5543 TARGET is an optional place for us to store the results.
5544 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5545 FETCH_AFTER is true if returning the result of the operation.
5546 FETCH_AFTER is false if returning the value before the operation.
5547 IGNORE is true if the result is not used.
5548 EXT_CALL is the correct builtin for an external call if this cannot be
5549 resolved to an instruction sequence. */
5552 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5553 enum rtx_code code
, bool fetch_after
,
5554 bool ignore
, enum built_in_function ext_call
)
5557 enum memmodel model
;
5561 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5563 /* Expand the operands. */
5564 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5565 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5567 /* Only try generating instructions if inlining is turned on. */
5568 if (flag_inline_atomics
)
5570 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5575 /* Return if a different routine isn't needed for the library call. */
5576 if (ext_call
== BUILT_IN_NONE
)
5579 /* Change the call to the specified function. */
5580 fndecl
= get_callee_fndecl (exp
);
5581 addr
= CALL_EXPR_FN (exp
);
5584 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5585 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5587 /* Expand the call here so we can emit trailing code. */
5588 ret
= expand_call (exp
, target
, ignore
);
5590 /* Replace the original function just in case it matters. */
5591 TREE_OPERAND (addr
, 0) = fndecl
;
5593 /* Then issue the arithmetic correction to return the right result. */
5598 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5600 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5603 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5610 #ifndef HAVE_atomic_clear
5611 # define HAVE_atomic_clear 0
5612 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5615 /* Expand an atomic clear operation.
5616 void _atomic_clear (BOOL *obj, enum memmodel)
5617 EXP is the call expression. */
5620 expand_builtin_atomic_clear (tree exp
)
5624 enum memmodel model
;
5626 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5627 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5628 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5630 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5632 warning (OPT_Winvalid_memory_model
,
5633 "invalid memory model for %<__atomic_store%>");
5634 model
= MEMMODEL_SEQ_CST
;
5637 if (HAVE_atomic_clear
)
5639 emit_insn (gen_atomic_clear (mem
, model
));
5643 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5644 Failing that, a store is issued by __atomic_store. The only way this can
5645 fail is if the bool type is larger than a word size. Unlikely, but
5646 handle it anyway for completeness. Assume a single threaded model since
5647 there is no atomic support in this case, and no barriers are required. */
5648 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5650 emit_move_insn (mem
, const0_rtx
);
5654 /* Expand an atomic test_and_set operation.
5655 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5656 EXP is the call expression. */
5659 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5662 enum memmodel model
;
5665 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5666 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5667 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5669 return expand_atomic_test_and_set (target
, mem
, model
);
5673 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5674 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5677 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5681 unsigned int mode_align
, type_align
;
5683 if (TREE_CODE (arg0
) != INTEGER_CST
)
5686 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5687 mode
= mode_for_size (size
, MODE_INT
, 0);
5688 mode_align
= GET_MODE_ALIGNMENT (mode
);
5690 if (TREE_CODE (arg1
) == INTEGER_CST
&& INTVAL (expand_normal (arg1
)) == 0)
5691 type_align
= mode_align
;
5694 tree ttype
= TREE_TYPE (arg1
);
5696 /* This function is usually invoked and folded immediately by the front
5697 end before anything else has a chance to look at it. The pointer
5698 parameter at this point is usually cast to a void *, so check for that
5699 and look past the cast. */
5700 if (CONVERT_EXPR_P (arg1
) && POINTER_TYPE_P (ttype
)
5701 && VOID_TYPE_P (TREE_TYPE (ttype
)))
5702 arg1
= TREE_OPERAND (arg1
, 0);
5704 ttype
= TREE_TYPE (arg1
);
5705 gcc_assert (POINTER_TYPE_P (ttype
));
5707 /* Get the underlying type of the object. */
5708 ttype
= TREE_TYPE (ttype
);
5709 type_align
= TYPE_ALIGN (ttype
);
5712 /* If the object has smaller alignment, the the lock free routines cannot
5714 if (type_align
< mode_align
)
5715 return boolean_false_node
;
5717 /* Check if a compare_and_swap pattern exists for the mode which represents
5718 the required size. The pattern is not allowed to fail, so the existence
5719 of the pattern indicates support is present. */
5720 if (can_compare_and_swap_p (mode
, true))
5721 return boolean_true_node
;
5723 return boolean_false_node
;
5726 /* Return true if the parameters to call EXP represent an object which will
5727 always generate lock free instructions. The first argument represents the
5728 size of the object, and the second parameter is a pointer to the object
5729 itself. If NULL is passed for the object, then the result is based on
5730 typical alignment for an object of the specified size. Otherwise return
5734 expand_builtin_atomic_always_lock_free (tree exp
)
5737 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5738 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5740 if (TREE_CODE (arg0
) != INTEGER_CST
)
5742 error ("non-constant argument 1 to __atomic_always_lock_free");
5746 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5747 if (size
== boolean_true_node
)
5752 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5753 is lock free on this architecture. */
5756 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5758 if (!flag_inline_atomics
)
5761 /* If it isn't always lock free, don't generate a result. */
5762 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5763 return boolean_true_node
;
5768 /* Return true if the parameters to call EXP represent an object which will
5769 always generate lock free instructions. The first argument represents the
5770 size of the object, and the second parameter is a pointer to the object
5771 itself. If NULL is passed for the object, then the result is based on
5772 typical alignment for an object of the specified size. Otherwise return
5776 expand_builtin_atomic_is_lock_free (tree exp
)
5779 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5780 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5782 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5784 error ("non-integer argument 1 to __atomic_is_lock_free");
5788 if (!flag_inline_atomics
)
5791 /* If the value is known at compile time, return the RTX for it. */
5792 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5793 if (size
== boolean_true_node
)
5799 /* Expand the __atomic_thread_fence intrinsic:
5800 void __atomic_thread_fence (enum memmodel)
5801 EXP is the CALL_EXPR. */
5804 expand_builtin_atomic_thread_fence (tree exp
)
5806 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5807 expand_mem_thread_fence (model
);
5810 /* Expand the __atomic_signal_fence intrinsic:
5811 void __atomic_signal_fence (enum memmodel)
5812 EXP is the CALL_EXPR. */
5815 expand_builtin_atomic_signal_fence (tree exp
)
5817 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5818 expand_mem_signal_fence (model
);
5821 /* Expand the __sync_synchronize intrinsic. */
5824 expand_builtin_sync_synchronize (void)
5826 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5830 expand_builtin_thread_pointer (tree exp
, rtx target
)
5832 enum insn_code icode
;
5833 if (!validate_arglist (exp
, VOID_TYPE
))
5835 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5836 if (icode
!= CODE_FOR_nothing
)
5838 struct expand_operand op
;
5839 /* If the target is not sutitable then create a new target. */
5840 if (target
== NULL_RTX
5842 || GET_MODE (target
) != Pmode
)
5843 target
= gen_reg_rtx (Pmode
);
5844 create_output_operand (&op
, target
, Pmode
);
5845 expand_insn (icode
, 1, &op
);
5848 error ("__builtin_thread_pointer is not supported on this target");
5853 expand_builtin_set_thread_pointer (tree exp
)
5855 enum insn_code icode
;
5856 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5858 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5859 if (icode
!= CODE_FOR_nothing
)
5861 struct expand_operand op
;
5862 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5863 Pmode
, EXPAND_NORMAL
);
5864 create_input_operand (&op
, val
, Pmode
);
5865 expand_insn (icode
, 1, &op
);
5868 error ("__builtin_set_thread_pointer is not supported on this target");
5872 /* Emit code to restore the current value of stack. */
5875 expand_stack_restore (tree var
)
5878 rtx sa
= expand_normal (var
);
5880 sa
= convert_memory_address (Pmode
, sa
);
5882 prev
= get_last_insn ();
5883 emit_stack_restore (SAVE_BLOCK
, sa
);
5885 record_new_stack_level ();
5887 fixup_args_size_notes (prev
, get_last_insn (), 0);
5890 /* Emit code to save the current value of stack. */
5893 expand_stack_save (void)
5897 emit_stack_save (SAVE_BLOCK
, &ret
);
5902 /* Expand OpenACC acc_on_device.
5904 This has to happen late (that is, not in early folding; expand_builtin_*,
5905 rather than fold_builtin_*), as we have to act differently for host and
5906 acceleration device (ACCEL_COMPILER conditional). */
5909 expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED
,
5910 rtx target ATTRIBUTE_UNUSED
)
5912 #ifdef ACCEL_COMPILER
5913 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5916 tree arg
= CALL_EXPR_ARG (exp
, 0);
5918 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5919 machine_mode v_mode
= TYPE_MODE (TREE_TYPE (arg
));
5920 rtx v
= expand_normal (arg
), v1
, v2
;
5921 v1
= GEN_INT (GOMP_DEVICE_NOT_HOST
);
5922 v2
= GEN_INT (ACCEL_COMPILER_acc_device
);
5923 machine_mode target_mode
= TYPE_MODE (integer_type_node
);
5924 if (!target
|| !register_operand (target
, target_mode
))
5925 target
= gen_reg_rtx (target_mode
);
5926 emit_move_insn (target
, const1_rtx
);
5927 rtx_code_label
*done_label
= gen_label_rtx ();
5928 do_compare_rtx_and_jump (v
, v1
, EQ
, false, v_mode
, NULL_RTX
,
5929 NULL
, done_label
, PROB_EVEN
);
5930 do_compare_rtx_and_jump (v
, v2
, EQ
, false, v_mode
, NULL_RTX
,
5931 NULL
, done_label
, PROB_EVEN
);
5932 emit_move_insn (target
, const0_rtx
);
5933 emit_label (done_label
);
5942 /* Expand an expression EXP that calls a built-in function,
5943 with result going to TARGET if that's convenient
5944 (and in mode MODE if that's convenient).
5945 SUBTARGET may be used as the target for computing one of EXP's operands.
5946 IGNORE is nonzero if the value is to be ignored. */
5949 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5952 tree fndecl
= get_callee_fndecl (exp
);
5953 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5954 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5957 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5958 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5960 /* When ASan is enabled, we don't want to expand some memory/string
5961 builtins and rely on libsanitizer's hooks. This allows us to avoid
5962 redundant checks and be sure, that possible overflow will be detected
5965 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5966 return expand_call (exp
, target
, ignore
);
5968 /* When not optimizing, generate calls to library functions for a certain
5971 && !called_as_built_in (fndecl
)
5972 && fcode
!= BUILT_IN_FORK
5973 && fcode
!= BUILT_IN_EXECL
5974 && fcode
!= BUILT_IN_EXECV
5975 && fcode
!= BUILT_IN_EXECLP
5976 && fcode
!= BUILT_IN_EXECLE
5977 && fcode
!= BUILT_IN_EXECVP
5978 && fcode
!= BUILT_IN_EXECVE
5979 && fcode
!= BUILT_IN_ALLOCA
5980 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5981 && fcode
!= BUILT_IN_FREE
5982 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5983 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5984 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5985 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5986 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5987 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5988 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5989 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5990 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5991 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5992 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5993 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5994 return expand_call (exp
, target
, ignore
);
5996 /* The built-in function expanders test for target == const0_rtx
5997 to determine whether the function's result will be ignored. */
5999 target
= const0_rtx
;
6001 /* If the result of a pure or const built-in function is ignored, and
6002 none of its arguments are volatile, we can avoid expanding the
6003 built-in call and just evaluate the arguments for side-effects. */
6004 if (target
== const0_rtx
6005 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6006 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6008 bool volatilep
= false;
6010 call_expr_arg_iterator iter
;
6012 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6013 if (TREE_THIS_VOLATILE (arg
))
6021 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6022 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6027 /* expand_builtin_with_bounds is supposed to be used for
6028 instrumented builtin calls. */
6029 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6033 CASE_FLT_FN (BUILT_IN_FABS
):
6034 case BUILT_IN_FABSD32
:
6035 case BUILT_IN_FABSD64
:
6036 case BUILT_IN_FABSD128
:
6037 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6042 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6043 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6048 /* Just do a normal library call if we were unable to fold
6050 CASE_FLT_FN (BUILT_IN_CABS
):
6053 CASE_FLT_FN (BUILT_IN_EXP
):
6054 CASE_FLT_FN (BUILT_IN_EXP10
):
6055 CASE_FLT_FN (BUILT_IN_POW10
):
6056 CASE_FLT_FN (BUILT_IN_EXP2
):
6057 CASE_FLT_FN (BUILT_IN_EXPM1
):
6058 CASE_FLT_FN (BUILT_IN_LOGB
):
6059 CASE_FLT_FN (BUILT_IN_LOG
):
6060 CASE_FLT_FN (BUILT_IN_LOG10
):
6061 CASE_FLT_FN (BUILT_IN_LOG2
):
6062 CASE_FLT_FN (BUILT_IN_LOG1P
):
6063 CASE_FLT_FN (BUILT_IN_TAN
):
6064 CASE_FLT_FN (BUILT_IN_ASIN
):
6065 CASE_FLT_FN (BUILT_IN_ACOS
):
6066 CASE_FLT_FN (BUILT_IN_ATAN
):
6067 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
6068 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6069 because of possible accuracy problems. */
6070 if (! flag_unsafe_math_optimizations
)
6072 CASE_FLT_FN (BUILT_IN_SQRT
):
6073 CASE_FLT_FN (BUILT_IN_FLOOR
):
6074 CASE_FLT_FN (BUILT_IN_CEIL
):
6075 CASE_FLT_FN (BUILT_IN_TRUNC
):
6076 CASE_FLT_FN (BUILT_IN_ROUND
):
6077 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6078 CASE_FLT_FN (BUILT_IN_RINT
):
6079 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6084 CASE_FLT_FN (BUILT_IN_FMA
):
6085 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6090 CASE_FLT_FN (BUILT_IN_ILOGB
):
6091 if (! flag_unsafe_math_optimizations
)
6093 CASE_FLT_FN (BUILT_IN_ISINF
):
6094 CASE_FLT_FN (BUILT_IN_FINITE
):
6095 case BUILT_IN_ISFINITE
:
6096 case BUILT_IN_ISNORMAL
:
6097 target
= expand_builtin_interclass_mathfn (exp
, target
);
6102 CASE_FLT_FN (BUILT_IN_ICEIL
):
6103 CASE_FLT_FN (BUILT_IN_LCEIL
):
6104 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6105 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6106 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6107 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6108 target
= expand_builtin_int_roundingfn (exp
, target
);
6113 CASE_FLT_FN (BUILT_IN_IRINT
):
6114 CASE_FLT_FN (BUILT_IN_LRINT
):
6115 CASE_FLT_FN (BUILT_IN_LLRINT
):
6116 CASE_FLT_FN (BUILT_IN_IROUND
):
6117 CASE_FLT_FN (BUILT_IN_LROUND
):
6118 CASE_FLT_FN (BUILT_IN_LLROUND
):
6119 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6124 CASE_FLT_FN (BUILT_IN_POWI
):
6125 target
= expand_builtin_powi (exp
, target
);
6130 CASE_FLT_FN (BUILT_IN_ATAN2
):
6131 CASE_FLT_FN (BUILT_IN_LDEXP
):
6132 CASE_FLT_FN (BUILT_IN_SCALB
):
6133 CASE_FLT_FN (BUILT_IN_SCALBN
):
6134 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6135 if (! flag_unsafe_math_optimizations
)
6138 CASE_FLT_FN (BUILT_IN_FMOD
):
6139 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6140 CASE_FLT_FN (BUILT_IN_DREM
):
6141 CASE_FLT_FN (BUILT_IN_POW
):
6142 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6147 CASE_FLT_FN (BUILT_IN_CEXPI
):
6148 target
= expand_builtin_cexpi (exp
, target
);
6149 gcc_assert (target
);
6152 CASE_FLT_FN (BUILT_IN_SIN
):
6153 CASE_FLT_FN (BUILT_IN_COS
):
6154 if (! flag_unsafe_math_optimizations
)
6156 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6161 CASE_FLT_FN (BUILT_IN_SINCOS
):
6162 if (! flag_unsafe_math_optimizations
)
6164 target
= expand_builtin_sincos (exp
);
6169 case BUILT_IN_APPLY_ARGS
:
6170 return expand_builtin_apply_args ();
6172 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6173 FUNCTION with a copy of the parameters described by
6174 ARGUMENTS, and ARGSIZE. It returns a block of memory
6175 allocated on the stack into which is stored all the registers
6176 that might possibly be used for returning the result of a
6177 function. ARGUMENTS is the value returned by
6178 __builtin_apply_args. ARGSIZE is the number of bytes of
6179 arguments that must be copied. ??? How should this value be
6180 computed? We'll also need a safe worst case value for varargs
6182 case BUILT_IN_APPLY
:
6183 if (!validate_arglist (exp
, POINTER_TYPE
,
6184 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6185 && !validate_arglist (exp
, REFERENCE_TYPE
,
6186 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6192 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6193 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6194 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6196 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6199 /* __builtin_return (RESULT) causes the function to return the
6200 value described by RESULT. RESULT is address of the block of
6201 memory returned by __builtin_apply. */
6202 case BUILT_IN_RETURN
:
6203 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6204 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6207 case BUILT_IN_SAVEREGS
:
6208 return expand_builtin_saveregs ();
6210 case BUILT_IN_VA_ARG_PACK
:
6211 /* All valid uses of __builtin_va_arg_pack () are removed during
6213 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6216 case BUILT_IN_VA_ARG_PACK_LEN
:
6217 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6219 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6222 /* Return the address of the first anonymous stack arg. */
6223 case BUILT_IN_NEXT_ARG
:
6224 if (fold_builtin_next_arg (exp
, false))
6226 return expand_builtin_next_arg ();
6228 case BUILT_IN_CLEAR_CACHE
:
6229 target
= expand_builtin___clear_cache (exp
);
6234 case BUILT_IN_CLASSIFY_TYPE
:
6235 return expand_builtin_classify_type (exp
);
6237 case BUILT_IN_CONSTANT_P
:
6240 case BUILT_IN_FRAME_ADDRESS
:
6241 case BUILT_IN_RETURN_ADDRESS
:
6242 return expand_builtin_frame_address (fndecl
, exp
);
6244 /* Returns the address of the area where the structure is returned.
6246 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6247 if (call_expr_nargs (exp
) != 0
6248 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6249 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6252 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6254 case BUILT_IN_ALLOCA
:
6255 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6256 /* If the allocation stems from the declaration of a variable-sized
6257 object, it cannot accumulate. */
6258 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6263 case BUILT_IN_STACK_SAVE
:
6264 return expand_stack_save ();
6266 case BUILT_IN_STACK_RESTORE
:
6267 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6270 case BUILT_IN_BSWAP16
:
6271 case BUILT_IN_BSWAP32
:
6272 case BUILT_IN_BSWAP64
:
6273 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6278 CASE_INT_FN (BUILT_IN_FFS
):
6279 target
= expand_builtin_unop (target_mode
, exp
, target
,
6280 subtarget
, ffs_optab
);
6285 CASE_INT_FN (BUILT_IN_CLZ
):
6286 target
= expand_builtin_unop (target_mode
, exp
, target
,
6287 subtarget
, clz_optab
);
6292 CASE_INT_FN (BUILT_IN_CTZ
):
6293 target
= expand_builtin_unop (target_mode
, exp
, target
,
6294 subtarget
, ctz_optab
);
6299 CASE_INT_FN (BUILT_IN_CLRSB
):
6300 target
= expand_builtin_unop (target_mode
, exp
, target
,
6301 subtarget
, clrsb_optab
);
6306 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6307 target
= expand_builtin_unop (target_mode
, exp
, target
,
6308 subtarget
, popcount_optab
);
6313 CASE_INT_FN (BUILT_IN_PARITY
):
6314 target
= expand_builtin_unop (target_mode
, exp
, target
,
6315 subtarget
, parity_optab
);
6320 case BUILT_IN_STRLEN
:
6321 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6326 case BUILT_IN_STRCPY
:
6327 target
= expand_builtin_strcpy (exp
, target
);
6332 case BUILT_IN_STRNCPY
:
6333 target
= expand_builtin_strncpy (exp
, target
);
6338 case BUILT_IN_STPCPY
:
6339 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6344 case BUILT_IN_MEMCPY
:
6345 target
= expand_builtin_memcpy (exp
, target
);
6350 case BUILT_IN_MEMPCPY
:
6351 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6356 case BUILT_IN_MEMSET
:
6357 target
= expand_builtin_memset (exp
, target
, mode
);
6362 case BUILT_IN_BZERO
:
6363 target
= expand_builtin_bzero (exp
);
6368 case BUILT_IN_STRCMP
:
6369 target
= expand_builtin_strcmp (exp
, target
);
6374 case BUILT_IN_STRNCMP
:
6375 target
= expand_builtin_strncmp (exp
, target
, mode
);
6381 case BUILT_IN_MEMCMP
:
6382 target
= expand_builtin_memcmp (exp
, target
, mode
);
6387 case BUILT_IN_SETJMP
:
6388 /* This should have been lowered to the builtins below. */
6391 case BUILT_IN_SETJMP_SETUP
:
6392 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6393 and the receiver label. */
6394 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6396 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6397 VOIDmode
, EXPAND_NORMAL
);
6398 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6399 rtx label_r
= label_rtx (label
);
6401 /* This is copied from the handling of non-local gotos. */
6402 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6403 nonlocal_goto_handler_labels
6404 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6405 nonlocal_goto_handler_labels
);
6406 /* ??? Do not let expand_label treat us as such since we would
6407 not want to be both on the list of non-local labels and on
6408 the list of forced labels. */
6409 FORCED_LABEL (label
) = 0;
6414 case BUILT_IN_SETJMP_RECEIVER
:
6415 /* __builtin_setjmp_receiver is passed the receiver label. */
6416 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6418 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6419 rtx label_r
= label_rtx (label
);
6421 expand_builtin_setjmp_receiver (label_r
);
6426 /* __builtin_longjmp is passed a pointer to an array of five words.
6427 It's similar to the C library longjmp function but works with
6428 __builtin_setjmp above. */
6429 case BUILT_IN_LONGJMP
:
6430 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6432 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6433 VOIDmode
, EXPAND_NORMAL
);
6434 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6436 if (value
!= const1_rtx
)
6438 error ("%<__builtin_longjmp%> second argument must be 1");
6442 expand_builtin_longjmp (buf_addr
, value
);
6447 case BUILT_IN_NONLOCAL_GOTO
:
6448 target
= expand_builtin_nonlocal_goto (exp
);
6453 /* This updates the setjmp buffer that is its argument with the value
6454 of the current stack pointer. */
6455 case BUILT_IN_UPDATE_SETJMP_BUF
:
6456 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6459 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6461 expand_builtin_update_setjmp_buf (buf_addr
);
6467 expand_builtin_trap ();
6470 case BUILT_IN_UNREACHABLE
:
6471 expand_builtin_unreachable ();
6474 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6475 case BUILT_IN_SIGNBITD32
:
6476 case BUILT_IN_SIGNBITD64
:
6477 case BUILT_IN_SIGNBITD128
:
6478 target
= expand_builtin_signbit (exp
, target
);
6483 /* Various hooks for the DWARF 2 __throw routine. */
6484 case BUILT_IN_UNWIND_INIT
:
6485 expand_builtin_unwind_init ();
6487 case BUILT_IN_DWARF_CFA
:
6488 return virtual_cfa_rtx
;
6489 #ifdef DWARF2_UNWIND_INFO
6490 case BUILT_IN_DWARF_SP_COLUMN
:
6491 return expand_builtin_dwarf_sp_column ();
6492 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6493 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6496 case BUILT_IN_FROB_RETURN_ADDR
:
6497 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6498 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6499 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6500 case BUILT_IN_EH_RETURN
:
6501 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6502 CALL_EXPR_ARG (exp
, 1));
6504 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6505 return expand_builtin_eh_return_data_regno (exp
);
6506 case BUILT_IN_EXTEND_POINTER
:
6507 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6508 case BUILT_IN_EH_POINTER
:
6509 return expand_builtin_eh_pointer (exp
);
6510 case BUILT_IN_EH_FILTER
:
6511 return expand_builtin_eh_filter (exp
);
6512 case BUILT_IN_EH_COPY_VALUES
:
6513 return expand_builtin_eh_copy_values (exp
);
6515 case BUILT_IN_VA_START
:
6516 return expand_builtin_va_start (exp
);
6517 case BUILT_IN_VA_END
:
6518 return expand_builtin_va_end (exp
);
6519 case BUILT_IN_VA_COPY
:
6520 return expand_builtin_va_copy (exp
);
6521 case BUILT_IN_EXPECT
:
6522 return expand_builtin_expect (exp
, target
);
6523 case BUILT_IN_ASSUME_ALIGNED
:
6524 return expand_builtin_assume_aligned (exp
, target
);
6525 case BUILT_IN_PREFETCH
:
6526 expand_builtin_prefetch (exp
);
6529 case BUILT_IN_INIT_TRAMPOLINE
:
6530 return expand_builtin_init_trampoline (exp
, true);
6531 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6532 return expand_builtin_init_trampoline (exp
, false);
6533 case BUILT_IN_ADJUST_TRAMPOLINE
:
6534 return expand_builtin_adjust_trampoline (exp
);
6537 case BUILT_IN_EXECL
:
6538 case BUILT_IN_EXECV
:
6539 case BUILT_IN_EXECLP
:
6540 case BUILT_IN_EXECLE
:
6541 case BUILT_IN_EXECVP
:
6542 case BUILT_IN_EXECVE
:
6543 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6548 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6549 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6550 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6551 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6552 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6553 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6554 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6559 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6560 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6561 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6562 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6563 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6564 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6565 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6570 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6571 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6572 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6573 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6574 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6575 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6576 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6581 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6582 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6583 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6584 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6585 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6586 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6587 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6592 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6593 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6594 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6595 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6596 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6597 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6598 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6603 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6604 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6605 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6606 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6607 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6608 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6609 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6614 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6615 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6616 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6617 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6618 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6619 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6620 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6625 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6626 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6627 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6628 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6629 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6630 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6631 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6636 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6637 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6638 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6639 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6640 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6641 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6642 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6647 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6648 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6649 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6650 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6651 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6652 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6653 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6658 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6659 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6660 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6661 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6662 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6663 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6664 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6669 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6670 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6671 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6672 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6673 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6674 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6675 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6680 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6681 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6685 if (mode
== VOIDmode
)
6686 mode
= TYPE_MODE (boolean_type_node
);
6687 if (!target
|| !register_operand (target
, mode
))
6688 target
= gen_reg_rtx (mode
);
6690 mode
= get_builtin_sync_mode
6691 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6692 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6699 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6700 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6701 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6702 mode
= get_builtin_sync_mode
6703 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6704 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6709 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6711 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6712 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6713 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6714 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6715 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6720 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6721 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6722 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6723 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6724 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6725 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6726 expand_builtin_sync_lock_release (mode
, exp
);
6729 case BUILT_IN_SYNC_SYNCHRONIZE
:
6730 expand_builtin_sync_synchronize ();
6733 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6734 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6735 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6736 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6737 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6738 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6739 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6744 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6745 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6746 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6747 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6748 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6750 unsigned int nargs
, z
;
6751 vec
<tree
, va_gc
> *vec
;
6754 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6755 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6759 /* If this is turned into an external library call, the weak parameter
6760 must be dropped to match the expected parameter list. */
6761 nargs
= call_expr_nargs (exp
);
6762 vec_alloc (vec
, nargs
- 1);
6763 for (z
= 0; z
< 3; z
++)
6764 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6765 /* Skip the boolean weak parameter. */
6766 for (z
= 4; z
< 6; z
++)
6767 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6768 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6772 case BUILT_IN_ATOMIC_LOAD_1
:
6773 case BUILT_IN_ATOMIC_LOAD_2
:
6774 case BUILT_IN_ATOMIC_LOAD_4
:
6775 case BUILT_IN_ATOMIC_LOAD_8
:
6776 case BUILT_IN_ATOMIC_LOAD_16
:
6777 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6778 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6783 case BUILT_IN_ATOMIC_STORE_1
:
6784 case BUILT_IN_ATOMIC_STORE_2
:
6785 case BUILT_IN_ATOMIC_STORE_4
:
6786 case BUILT_IN_ATOMIC_STORE_8
:
6787 case BUILT_IN_ATOMIC_STORE_16
:
6788 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6789 target
= expand_builtin_atomic_store (mode
, exp
);
6794 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6795 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6796 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6797 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6798 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6800 enum built_in_function lib
;
6801 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6802 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6803 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6804 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6810 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6811 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6812 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6813 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6814 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6816 enum built_in_function lib
;
6817 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6818 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6819 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6820 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6826 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6827 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6828 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6829 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6830 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6832 enum built_in_function lib
;
6833 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6834 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6835 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6836 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6842 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6843 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6844 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6845 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6846 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6848 enum built_in_function lib
;
6849 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6850 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6851 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6852 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6858 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6859 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6860 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6861 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6862 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6864 enum built_in_function lib
;
6865 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6866 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6867 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6868 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6874 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6875 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6876 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6877 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6878 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6880 enum built_in_function lib
;
6881 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6882 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6883 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6884 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6890 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6891 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6892 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6893 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6894 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6895 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6896 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6897 ignore
, BUILT_IN_NONE
);
6902 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6903 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6904 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6905 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6906 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6907 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6908 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6909 ignore
, BUILT_IN_NONE
);
6914 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6915 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6916 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6917 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6918 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6919 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6920 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6921 ignore
, BUILT_IN_NONE
);
6926 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6927 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6928 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6929 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6930 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6931 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6932 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6933 ignore
, BUILT_IN_NONE
);
6938 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6939 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6940 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6941 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6942 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6943 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6944 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6945 ignore
, BUILT_IN_NONE
);
6950 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6951 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6952 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6953 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6954 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6955 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6956 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6957 ignore
, BUILT_IN_NONE
);
6962 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6963 return expand_builtin_atomic_test_and_set (exp
, target
);
6965 case BUILT_IN_ATOMIC_CLEAR
:
6966 return expand_builtin_atomic_clear (exp
);
6968 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6969 return expand_builtin_atomic_always_lock_free (exp
);
6971 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6972 target
= expand_builtin_atomic_is_lock_free (exp
);
6977 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6978 expand_builtin_atomic_thread_fence (exp
);
6981 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6982 expand_builtin_atomic_signal_fence (exp
);
6985 case BUILT_IN_OBJECT_SIZE
:
6986 return expand_builtin_object_size (exp
);
6988 case BUILT_IN_MEMCPY_CHK
:
6989 case BUILT_IN_MEMPCPY_CHK
:
6990 case BUILT_IN_MEMMOVE_CHK
:
6991 case BUILT_IN_MEMSET_CHK
:
6992 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6997 case BUILT_IN_STRCPY_CHK
:
6998 case BUILT_IN_STPCPY_CHK
:
6999 case BUILT_IN_STRNCPY_CHK
:
7000 case BUILT_IN_STPNCPY_CHK
:
7001 case BUILT_IN_STRCAT_CHK
:
7002 case BUILT_IN_STRNCAT_CHK
:
7003 case BUILT_IN_SNPRINTF_CHK
:
7004 case BUILT_IN_VSNPRINTF_CHK
:
7005 maybe_emit_chk_warning (exp
, fcode
);
7008 case BUILT_IN_SPRINTF_CHK
:
7009 case BUILT_IN_VSPRINTF_CHK
:
7010 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7014 if (warn_free_nonheap_object
)
7015 maybe_emit_free_warning (exp
);
7018 case BUILT_IN_THREAD_POINTER
:
7019 return expand_builtin_thread_pointer (exp
, target
);
7021 case BUILT_IN_SET_THREAD_POINTER
:
7022 expand_builtin_set_thread_pointer (exp
);
7025 case BUILT_IN_CILK_DETACH
:
7026 expand_builtin_cilk_detach (exp
);
7029 case BUILT_IN_CILK_POP_FRAME
:
7030 expand_builtin_cilk_pop_frame (exp
);
7033 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7034 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7035 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7036 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7037 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7038 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7039 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7040 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7041 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7042 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7043 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7044 /* We allow user CHKP builtins if Pointer Bounds
7046 if (!chkp_function_instrumented_p (current_function_decl
))
7048 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7049 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7050 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7051 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7052 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7053 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7054 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7055 return expand_normal (size_zero_node
);
7056 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7057 return expand_normal (size_int (-1));
7063 case BUILT_IN_CHKP_BNDMK
:
7064 case BUILT_IN_CHKP_BNDSTX
:
7065 case BUILT_IN_CHKP_BNDCL
:
7066 case BUILT_IN_CHKP_BNDCU
:
7067 case BUILT_IN_CHKP_BNDLDX
:
7068 case BUILT_IN_CHKP_BNDRET
:
7069 case BUILT_IN_CHKP_INTERSECT
:
7070 case BUILT_IN_CHKP_NARROW
:
7071 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7072 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7073 /* Software implementation of Pointer Bounds Checker is NYI.
7074 Target support is required. */
7075 error ("Your target platform does not support -fcheck-pointer-bounds");
7078 case BUILT_IN_ACC_ON_DEVICE
:
7079 target
= expand_builtin_acc_on_device (exp
, target
);
7084 default: /* just do library call, if unknown builtin */
7088 /* The switch statement above can drop through to cause the function
7089 to be called normally. */
7090 return expand_call (exp
, target
, ignore
);
7093 /* Similar to expand_builtin but is used for instrumented calls. */
7096 expand_builtin_with_bounds (tree exp
, rtx target
,
7097 rtx subtarget ATTRIBUTE_UNUSED
,
7098 machine_mode mode
, int ignore
)
7100 tree fndecl
= get_callee_fndecl (exp
);
7101 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7103 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7105 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7106 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7108 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7109 && fcode
< END_CHKP_BUILTINS
);
7113 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7114 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7119 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7120 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
7125 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7126 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7135 /* The switch statement above can drop through to cause the function
7136 to be called normally. */
7137 return expand_call (exp
, target
, ignore
);
7140 /* Determine whether a tree node represents a call to a built-in
7141 function. If the tree T is a call to a built-in function with
7142 the right number of arguments of the appropriate types, return
7143 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7144 Otherwise the return value is END_BUILTINS. */
7146 enum built_in_function
7147 builtin_mathfn_code (const_tree t
)
7149 const_tree fndecl
, arg
, parmlist
;
7150 const_tree argtype
, parmtype
;
7151 const_call_expr_arg_iterator iter
;
7153 if (TREE_CODE (t
) != CALL_EXPR
7154 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7155 return END_BUILTINS
;
7157 fndecl
= get_callee_fndecl (t
);
7158 if (fndecl
== NULL_TREE
7159 || TREE_CODE (fndecl
) != FUNCTION_DECL
7160 || ! DECL_BUILT_IN (fndecl
)
7161 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7162 return END_BUILTINS
;
7164 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7165 init_const_call_expr_arg_iterator (t
, &iter
);
7166 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7168 /* If a function doesn't take a variable number of arguments,
7169 the last element in the list will have type `void'. */
7170 parmtype
= TREE_VALUE (parmlist
);
7171 if (VOID_TYPE_P (parmtype
))
7173 if (more_const_call_expr_args_p (&iter
))
7174 return END_BUILTINS
;
7175 return DECL_FUNCTION_CODE (fndecl
);
7178 if (! more_const_call_expr_args_p (&iter
))
7179 return END_BUILTINS
;
7181 arg
= next_const_call_expr_arg (&iter
);
7182 argtype
= TREE_TYPE (arg
);
7184 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7186 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7187 return END_BUILTINS
;
7189 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7191 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7192 return END_BUILTINS
;
7194 else if (POINTER_TYPE_P (parmtype
))
7196 if (! POINTER_TYPE_P (argtype
))
7197 return END_BUILTINS
;
7199 else if (INTEGRAL_TYPE_P (parmtype
))
7201 if (! INTEGRAL_TYPE_P (argtype
))
7202 return END_BUILTINS
;
7205 return END_BUILTINS
;
7208 /* Variable-length argument list. */
7209 return DECL_FUNCTION_CODE (fndecl
);
7212 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7213 evaluate to a constant. */
7216 fold_builtin_constant_p (tree arg
)
7218 /* We return 1 for a numeric type that's known to be a constant
7219 value at compile-time or for an aggregate type that's a
7220 literal constant. */
7223 /* If we know this is a constant, emit the constant of one. */
7224 if (CONSTANT_CLASS_P (arg
)
7225 || (TREE_CODE (arg
) == CONSTRUCTOR
7226 && TREE_CONSTANT (arg
)))
7227 return integer_one_node
;
7228 if (TREE_CODE (arg
) == ADDR_EXPR
)
7230 tree op
= TREE_OPERAND (arg
, 0);
7231 if (TREE_CODE (op
) == STRING_CST
7232 || (TREE_CODE (op
) == ARRAY_REF
7233 && integer_zerop (TREE_OPERAND (op
, 1))
7234 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7235 return integer_one_node
;
7238 /* If this expression has side effects, show we don't know it to be a
7239 constant. Likewise if it's a pointer or aggregate type since in
7240 those case we only want literals, since those are only optimized
7241 when generating RTL, not later.
7242 And finally, if we are compiling an initializer, not code, we
7243 need to return a definite result now; there's not going to be any
7244 more optimization done. */
7245 if (TREE_SIDE_EFFECTS (arg
)
7246 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7247 || POINTER_TYPE_P (TREE_TYPE (arg
))
7249 || folding_initializer
7250 || force_folding_builtin_constant_p
)
7251 return integer_zero_node
;
7256 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7257 return it as a truthvalue. */
7260 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7263 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7265 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7266 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7267 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7268 pred_type
= TREE_VALUE (arg_types
);
7269 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7271 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7272 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7273 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7276 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7277 build_int_cst (ret_type
, 0));
7280 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7281 NULL_TREE if no simplification is possible. */
7284 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7286 tree inner
, fndecl
, inner_arg0
;
7287 enum tree_code code
;
7289 /* Distribute the expected value over short-circuiting operators.
7290 See through the cast from truthvalue_type_node to long. */
7292 while (CONVERT_EXPR_P (inner_arg0
)
7293 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7294 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7295 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7297 /* If this is a builtin_expect within a builtin_expect keep the
7298 inner one. See through a comparison against a constant. It
7299 might have been added to create a thruthvalue. */
7302 if (COMPARISON_CLASS_P (inner
)
7303 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7304 inner
= TREE_OPERAND (inner
, 0);
7306 if (TREE_CODE (inner
) == CALL_EXPR
7307 && (fndecl
= get_callee_fndecl (inner
))
7308 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7309 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7313 code
= TREE_CODE (inner
);
7314 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7316 tree op0
= TREE_OPERAND (inner
, 0);
7317 tree op1
= TREE_OPERAND (inner
, 1);
7319 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7320 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7321 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7323 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7326 /* If the argument isn't invariant then there's nothing else we can do. */
7327 if (!TREE_CONSTANT (inner_arg0
))
7330 /* If we expect that a comparison against the argument will fold to
7331 a constant return the constant. In practice, this means a true
7332 constant or the address of a non-weak symbol. */
7335 if (TREE_CODE (inner
) == ADDR_EXPR
)
7339 inner
= TREE_OPERAND (inner
, 0);
7341 while (TREE_CODE (inner
) == COMPONENT_REF
7342 || TREE_CODE (inner
) == ARRAY_REF
);
7343 if ((TREE_CODE (inner
) == VAR_DECL
7344 || TREE_CODE (inner
) == FUNCTION_DECL
)
7345 && DECL_WEAK (inner
))
7349 /* Otherwise, ARG0 already has the proper type for the return value. */
7353 /* Fold a call to __builtin_classify_type with argument ARG. */
7356 fold_builtin_classify_type (tree arg
)
7359 return build_int_cst (integer_type_node
, no_type_class
);
7361 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7364 /* Fold a call to __builtin_strlen with argument ARG. */
7367 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7369 if (!validate_arg (arg
, POINTER_TYPE
))
7373 tree len
= c_strlen (arg
, 0);
7376 return fold_convert_loc (loc
, type
, len
);
7382 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7385 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7387 REAL_VALUE_TYPE real
;
7389 /* __builtin_inff is intended to be usable to define INFINITY on all
7390 targets. If an infinity is not available, INFINITY expands "to a
7391 positive constant of type float that overflows at translation
7392 time", footnote "In this case, using INFINITY will violate the
7393 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7394 Thus we pedwarn to ensure this constraint violation is
7396 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7397 pedwarn (loc
, 0, "target format does not support infinity");
7400 return build_real (type
, real
);
7403 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7406 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7408 REAL_VALUE_TYPE real
;
7411 if (!validate_arg (arg
, POINTER_TYPE
))
7413 str
= c_getstr (arg
);
7417 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7420 return build_real (type
, real
);
7423 /* Return true if the floating point expression T has an integer value.
7424 We also allow +Inf, -Inf and NaN to be considered integer values. */
7427 integer_valued_real_p (tree t
)
7429 switch (TREE_CODE (t
))
7436 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7441 return integer_valued_real_p (TREE_OPERAND (t
, 1));
7448 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7449 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7452 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7453 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7456 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7460 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7461 if (TREE_CODE (type
) == INTEGER_TYPE
)
7463 if (TREE_CODE (type
) == REAL_TYPE
)
7464 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7469 switch (builtin_mathfn_code (t
))
7471 CASE_FLT_FN (BUILT_IN_CEIL
):
7472 CASE_FLT_FN (BUILT_IN_FLOOR
):
7473 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7474 CASE_FLT_FN (BUILT_IN_RINT
):
7475 CASE_FLT_FN (BUILT_IN_ROUND
):
7476 CASE_FLT_FN (BUILT_IN_TRUNC
):
7479 CASE_FLT_FN (BUILT_IN_FMIN
):
7480 CASE_FLT_FN (BUILT_IN_FMAX
):
7481 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7482 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7495 /* FNDECL is assumed to be a builtin where truncation can be propagated
7496 across (for instance floor((double)f) == (double)floorf (f).
7497 Do the transformation for a call with argument ARG. */
7500 fold_trunc_transparent_mathfn (location_t loc
, tree fndecl
, tree arg
)
7502 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7504 if (!validate_arg (arg
, REAL_TYPE
))
7507 /* Integer rounding functions are idempotent. */
7508 if (fcode
== builtin_mathfn_code (arg
))
7511 /* If argument is already integer valued, and we don't need to worry
7512 about setting errno, there's no need to perform rounding. */
7513 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7518 tree arg0
= strip_float_extensions (arg
);
7519 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7520 tree newtype
= TREE_TYPE (arg0
);
7523 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7524 && (decl
= mathfn_built_in (newtype
, fcode
)))
7525 return fold_convert_loc (loc
, ftype
,
7526 build_call_expr_loc (loc
, decl
, 1,
7527 fold_convert_loc (loc
,
7534 /* FNDECL is assumed to be builtin which can narrow the FP type of
7535 the argument, for instance lround((double)f) -> lroundf (f).
7536 Do the transformation for a call with argument ARG. */
7539 fold_fixed_mathfn (location_t loc
, tree fndecl
, tree arg
)
7541 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7543 if (!validate_arg (arg
, REAL_TYPE
))
7546 /* If argument is already integer valued, and we don't need to worry
7547 about setting errno, there's no need to perform rounding. */
7548 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7549 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
7550 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7554 tree ftype
= TREE_TYPE (arg
);
7555 tree arg0
= strip_float_extensions (arg
);
7556 tree newtype
= TREE_TYPE (arg0
);
7559 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7560 && (decl
= mathfn_built_in (newtype
, fcode
)))
7561 return build_call_expr_loc (loc
, decl
, 1,
7562 fold_convert_loc (loc
, newtype
, arg0
));
7565 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7566 sizeof (int) == sizeof (long). */
7567 if (TYPE_PRECISION (integer_type_node
)
7568 == TYPE_PRECISION (long_integer_type_node
))
7570 tree newfn
= NULL_TREE
;
7573 CASE_FLT_FN (BUILT_IN_ICEIL
):
7574 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7577 CASE_FLT_FN (BUILT_IN_IFLOOR
):
7578 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7581 CASE_FLT_FN (BUILT_IN_IROUND
):
7582 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7585 CASE_FLT_FN (BUILT_IN_IRINT
):
7586 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7595 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7596 return fold_convert_loc (loc
,
7597 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7601 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7602 sizeof (long long) == sizeof (long). */
7603 if (TYPE_PRECISION (long_long_integer_type_node
)
7604 == TYPE_PRECISION (long_integer_type_node
))
7606 tree newfn
= NULL_TREE
;
7609 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7610 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7613 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7614 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7617 CASE_FLT_FN (BUILT_IN_LLROUND
):
7618 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7621 CASE_FLT_FN (BUILT_IN_LLRINT
):
7622 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7631 tree newcall
= build_call_expr_loc (loc
, newfn
, 1, arg
);
7632 return fold_convert_loc (loc
,
7633 TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7640 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7641 return type. Return NULL_TREE if no simplification can be made. */
7644 fold_builtin_cabs (location_t loc
, tree arg
, tree type
, tree fndecl
)
7648 if (!validate_arg (arg
, COMPLEX_TYPE
)
7649 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7652 /* Calculate the result when the argument is a constant. */
7653 if (TREE_CODE (arg
) == COMPLEX_CST
7654 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7658 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7660 tree real
= TREE_OPERAND (arg
, 0);
7661 tree imag
= TREE_OPERAND (arg
, 1);
7663 /* If either part is zero, cabs is fabs of the other. */
7664 if (real_zerop (real
))
7665 return fold_build1_loc (loc
, ABS_EXPR
, type
, imag
);
7666 if (real_zerop (imag
))
7667 return fold_build1_loc (loc
, ABS_EXPR
, type
, real
);
7669 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7670 if (flag_unsafe_math_optimizations
7671 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7673 const REAL_VALUE_TYPE sqrt2_trunc
7674 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
7676 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7677 fold_build1_loc (loc
, ABS_EXPR
, type
, real
),
7678 build_real (type
, sqrt2_trunc
));
7682 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7683 if (TREE_CODE (arg
) == NEGATE_EXPR
7684 || TREE_CODE (arg
) == CONJ_EXPR
)
7685 return build_call_expr_loc (loc
, fndecl
, 1, TREE_OPERAND (arg
, 0));
7687 /* Don't do this when optimizing for size. */
7688 if (flag_unsafe_math_optimizations
7689 && optimize
&& optimize_function_for_speed_p (cfun
))
7691 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7693 if (sqrtfn
!= NULL_TREE
)
7695 tree rpart
, ipart
, result
;
7697 arg
= builtin_save_expr (arg
);
7699 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, type
, arg
);
7700 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg
);
7702 rpart
= builtin_save_expr (rpart
);
7703 ipart
= builtin_save_expr (ipart
);
7705 result
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
7706 fold_build2_loc (loc
, MULT_EXPR
, type
,
7708 fold_build2_loc (loc
, MULT_EXPR
, type
,
7711 return build_call_expr_loc (loc
, sqrtfn
, 1, result
);
7718 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7719 complex tree type of the result. If NEG is true, the imaginary
7720 zero is negative. */
7723 build_complex_cproj (tree type
, bool neg
)
7725 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
7729 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
7730 build_real (TREE_TYPE (type
), rzero
));
7733 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7734 return type. Return NULL_TREE if no simplification can be made. */
7737 fold_builtin_cproj (location_t loc
, tree arg
, tree type
)
7739 if (!validate_arg (arg
, COMPLEX_TYPE
)
7740 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7743 /* If there are no infinities, return arg. */
7744 if (! HONOR_INFINITIES (type
))
7745 return non_lvalue_loc (loc
, arg
);
7747 /* Calculate the result when the argument is a constant. */
7748 if (TREE_CODE (arg
) == COMPLEX_CST
)
7750 const REAL_VALUE_TYPE
*real
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
7751 const REAL_VALUE_TYPE
*imag
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
7753 if (real_isinf (real
) || real_isinf (imag
))
7754 return build_complex_cproj (type
, imag
->sign
);
7758 else if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7760 tree real
= TREE_OPERAND (arg
, 0);
7761 tree imag
= TREE_OPERAND (arg
, 1);
7766 /* If the real part is inf and the imag part is known to be
7767 nonnegative, return (inf + 0i). Remember side-effects are
7768 possible in the imag part. */
7769 if (TREE_CODE (real
) == REAL_CST
7770 && real_isinf (TREE_REAL_CST_PTR (real
))
7771 && tree_expr_nonnegative_p (imag
))
7772 return omit_one_operand_loc (loc
, type
,
7773 build_complex_cproj (type
, false),
7776 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7777 Remember side-effects are possible in the real part. */
7778 if (TREE_CODE (imag
) == REAL_CST
7779 && real_isinf (TREE_REAL_CST_PTR (imag
)))
7781 omit_one_operand_loc (loc
, type
,
7782 build_complex_cproj (type
, TREE_REAL_CST_PTR
7783 (imag
)->sign
), arg
);
7789 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7790 Return NULL_TREE if no simplification can be made. */
7793 fold_builtin_sqrt (location_t loc
, tree arg
, tree type
)
7796 enum built_in_function fcode
;
7799 if (!validate_arg (arg
, REAL_TYPE
))
7802 /* Calculate the result when the argument is a constant. */
7803 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7806 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7807 fcode
= builtin_mathfn_code (arg
);
7808 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7810 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7811 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7812 CALL_EXPR_ARG (arg
, 0),
7813 build_real (type
, dconsthalf
));
7814 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7817 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7818 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7820 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7824 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7826 /* The inner root was either sqrt or cbrt. */
7827 /* This was a conditional expression but it triggered a bug
7829 REAL_VALUE_TYPE dconstroot
;
7830 if (BUILTIN_SQRT_P (fcode
))
7831 dconstroot
= dconsthalf
;
7833 dconstroot
= dconst_third ();
7835 /* Adjust for the outer root. */
7836 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7837 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7838 tree_root
= build_real (type
, dconstroot
);
7839 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7843 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7844 if (flag_unsafe_math_optimizations
7845 && (fcode
== BUILT_IN_POW
7846 || fcode
== BUILT_IN_POWF
7847 || fcode
== BUILT_IN_POWL
))
7849 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7850 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7851 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7853 if (!tree_expr_nonnegative_p (arg0
))
7854 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7855 narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
7856 build_real (type
, dconsthalf
));
7857 return build_call_expr_loc (loc
, powfn
, 2, arg0
, narg1
);
7863 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7864 Return NULL_TREE if no simplification can be made. */
7867 fold_builtin_cbrt (location_t loc
, tree arg
, tree type
)
7869 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7872 if (!validate_arg (arg
, REAL_TYPE
))
7875 /* Calculate the result when the argument is a constant. */
7876 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7879 if (flag_unsafe_math_optimizations
)
7881 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7882 if (BUILTIN_EXPONENT_P (fcode
))
7884 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7885 const REAL_VALUE_TYPE third_trunc
=
7886 real_value_truncate (TYPE_MODE (type
), dconst_third ());
7887 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
7888 CALL_EXPR_ARG (arg
, 0),
7889 build_real (type
, third_trunc
));
7890 return build_call_expr_loc (loc
, expfn
, 1, arg
);
7893 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7894 if (BUILTIN_SQRT_P (fcode
))
7896 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7900 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7902 REAL_VALUE_TYPE dconstroot
= dconst_third ();
7904 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7905 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7906 tree_root
= build_real (type
, dconstroot
);
7907 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7911 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7912 if (BUILTIN_CBRT_P (fcode
))
7914 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7915 if (tree_expr_nonnegative_p (arg0
))
7917 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7922 REAL_VALUE_TYPE dconstroot
;
7924 real_arithmetic (&dconstroot
, MULT_EXPR
,
7925 dconst_third_ptr (), dconst_third_ptr ());
7926 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7927 tree_root
= build_real (type
, dconstroot
);
7928 return build_call_expr_loc (loc
, powfn
, 2, arg0
, tree_root
);
7933 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7934 if (fcode
== BUILT_IN_POW
7935 || fcode
== BUILT_IN_POWF
7936 || fcode
== BUILT_IN_POWL
)
7938 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7939 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7940 if (tree_expr_nonnegative_p (arg00
))
7942 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7943 const REAL_VALUE_TYPE dconstroot
7944 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
7945 tree narg01
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
,
7946 build_real (type
, dconstroot
));
7947 return build_call_expr_loc (loc
, powfn
, 2, arg00
, narg01
);
7954 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7955 TYPE is the type of the return value. Return NULL_TREE if no
7956 simplification can be made. */
7959 fold_builtin_cos (location_t loc
,
7960 tree arg
, tree type
, tree fndecl
)
7964 if (!validate_arg (arg
, REAL_TYPE
))
7967 /* Calculate the result when the argument is a constant. */
7968 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7971 /* Optimize cos(-x) into cos (x). */
7972 if ((narg
= fold_strip_sign_ops (arg
)))
7973 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
7978 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7979 Return NULL_TREE if no simplification can be made. */
7982 fold_builtin_cosh (location_t loc
, tree arg
, tree type
, tree fndecl
)
7984 if (validate_arg (arg
, REAL_TYPE
))
7988 /* Calculate the result when the argument is a constant. */
7989 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7992 /* Optimize cosh(-x) into cosh (x). */
7993 if ((narg
= fold_strip_sign_ops (arg
)))
7994 return build_call_expr_loc (loc
, fndecl
, 1, narg
);
8000 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8001 argument ARG. TYPE is the type of the return value. Return
8002 NULL_TREE if no simplification can be made. */
8005 fold_builtin_ccos (location_t loc
, tree arg
, tree type
, tree fndecl
,
8008 if (validate_arg (arg
, COMPLEX_TYPE
)
8009 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8013 /* Calculate the result when the argument is a constant. */
8014 if ((tmp
= do_mpc_arg1 (arg
, type
, (hyper
? mpc_cosh
: mpc_cos
))))
8017 /* Optimize fn(-x) into fn(x). */
8018 if ((tmp
= fold_strip_sign_ops (arg
)))
8019 return build_call_expr_loc (loc
, fndecl
, 1, tmp
);
8025 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8026 Return NULL_TREE if no simplification can be made. */
8029 fold_builtin_tan (tree arg
, tree type
)
8031 enum built_in_function fcode
;
8034 if (!validate_arg (arg
, REAL_TYPE
))
8037 /* Calculate the result when the argument is a constant. */
8038 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
8041 /* Optimize tan(atan(x)) = x. */
8042 fcode
= builtin_mathfn_code (arg
);
8043 if (flag_unsafe_math_optimizations
8044 && (fcode
== BUILT_IN_ATAN
8045 || fcode
== BUILT_IN_ATANF
8046 || fcode
== BUILT_IN_ATANL
))
8047 return CALL_EXPR_ARG (arg
, 0);
8052 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8053 NULL_TREE if no simplification can be made. */
8056 fold_builtin_sincos (location_t loc
,
8057 tree arg0
, tree arg1
, tree arg2
)
8062 if (!validate_arg (arg0
, REAL_TYPE
)
8063 || !validate_arg (arg1
, POINTER_TYPE
)
8064 || !validate_arg (arg2
, POINTER_TYPE
))
8067 type
= TREE_TYPE (arg0
);
8069 /* Calculate the result when the argument is a constant. */
8070 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
8073 /* Canonicalize sincos to cexpi. */
8074 if (!targetm
.libc_has_function (function_c99_math_complex
))
8076 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
8080 call
= build_call_expr_loc (loc
, fn
, 1, arg0
);
8081 call
= builtin_save_expr (call
);
8083 return build2 (COMPOUND_EXPR
, void_type_node
,
8084 build2 (MODIFY_EXPR
, void_type_node
,
8085 build_fold_indirect_ref_loc (loc
, arg1
),
8086 build1 (IMAGPART_EXPR
, type
, call
)),
8087 build2 (MODIFY_EXPR
, void_type_node
,
8088 build_fold_indirect_ref_loc (loc
, arg2
),
8089 build1 (REALPART_EXPR
, type
, call
)));
8092 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
8093 NULL_TREE if no simplification can be made. */
8096 fold_builtin_cexp (location_t loc
, tree arg0
, tree type
)
8099 tree realp
, imagp
, ifn
;
8102 if (!validate_arg (arg0
, COMPLEX_TYPE
)
8103 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) != REAL_TYPE
)
8106 /* Calculate the result when the argument is a constant. */
8107 if ((res
= do_mpc_arg1 (arg0
, type
, mpc_exp
)))
8110 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
8112 /* In case we can figure out the real part of arg0 and it is constant zero
8114 if (!targetm
.libc_has_function (function_c99_math_complex
))
8116 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
8120 if ((realp
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
))
8121 && real_zerop (realp
))
8123 tree narg
= fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8124 return build_call_expr_loc (loc
, ifn
, 1, narg
);
8127 /* In case we can easily decompose real and imaginary parts split cexp
8128 to exp (r) * cexpi (i). */
8129 if (flag_unsafe_math_optimizations
8132 tree rfn
, rcall
, icall
;
8134 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
8138 imagp
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
8142 icall
= build_call_expr_loc (loc
, ifn
, 1, imagp
);
8143 icall
= builtin_save_expr (icall
);
8144 rcall
= build_call_expr_loc (loc
, rfn
, 1, realp
);
8145 rcall
= builtin_save_expr (rcall
);
8146 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
8147 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8149 fold_build1_loc (loc
, REALPART_EXPR
,
8151 fold_build2_loc (loc
, MULT_EXPR
, rtype
,
8153 fold_build1_loc (loc
, IMAGPART_EXPR
,
8160 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8161 Return NULL_TREE if no simplification can be made. */
8164 fold_builtin_trunc (location_t loc
, tree fndecl
, tree arg
)
8166 if (!validate_arg (arg
, REAL_TYPE
))
8169 /* Optimize trunc of constant value. */
8170 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8172 REAL_VALUE_TYPE r
, x
;
8173 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8175 x
= TREE_REAL_CST (arg
);
8176 real_trunc (&r
, TYPE_MODE (type
), &x
);
8177 return build_real (type
, r
);
8180 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8183 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
8184 Return NULL_TREE if no simplification can be made. */
8187 fold_builtin_floor (location_t loc
, tree fndecl
, tree arg
)
8189 if (!validate_arg (arg
, REAL_TYPE
))
8192 /* Optimize floor of constant value. */
8193 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8197 x
= TREE_REAL_CST (arg
);
8198 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8200 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8203 real_floor (&r
, TYPE_MODE (type
), &x
);
8204 return build_real (type
, r
);
8208 /* Fold floor (x) where x is nonnegative to trunc (x). */
8209 if (tree_expr_nonnegative_p (arg
))
8211 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
8213 return build_call_expr_loc (loc
, truncfn
, 1, arg
);
8216 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8219 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8220 Return NULL_TREE if no simplification can be made. */
8223 fold_builtin_ceil (location_t loc
, tree fndecl
, tree arg
)
8225 if (!validate_arg (arg
, REAL_TYPE
))
8228 /* Optimize ceil of constant value. */
8229 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8233 x
= TREE_REAL_CST (arg
);
8234 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8236 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8239 real_ceil (&r
, TYPE_MODE (type
), &x
);
8240 return build_real (type
, r
);
8244 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8247 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8248 Return NULL_TREE if no simplification can be made. */
8251 fold_builtin_round (location_t loc
, tree fndecl
, tree arg
)
8253 if (!validate_arg (arg
, REAL_TYPE
))
8256 /* Optimize round of constant value. */
8257 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8261 x
= TREE_REAL_CST (arg
);
8262 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
8264 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8267 real_round (&r
, TYPE_MODE (type
), &x
);
8268 return build_real (type
, r
);
8272 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg
);
8275 /* Fold function call to builtin lround, lroundf or lroundl (or the
8276 corresponding long long versions) and other rounding functions. ARG
8277 is the argument to the call. Return NULL_TREE if no simplification
8281 fold_builtin_int_roundingfn (location_t loc
, tree fndecl
, tree arg
)
8283 if (!validate_arg (arg
, REAL_TYPE
))
8286 /* Optimize lround of constant value. */
8287 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
8289 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
8291 if (real_isfinite (&x
))
8293 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
8294 tree ftype
= TREE_TYPE (arg
);
8298 switch (DECL_FUNCTION_CODE (fndecl
))
8300 CASE_FLT_FN (BUILT_IN_IFLOOR
):
8301 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8302 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8303 real_floor (&r
, TYPE_MODE (ftype
), &x
);
8306 CASE_FLT_FN (BUILT_IN_ICEIL
):
8307 CASE_FLT_FN (BUILT_IN_LCEIL
):
8308 CASE_FLT_FN (BUILT_IN_LLCEIL
):
8309 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
8312 CASE_FLT_FN (BUILT_IN_IROUND
):
8313 CASE_FLT_FN (BUILT_IN_LROUND
):
8314 CASE_FLT_FN (BUILT_IN_LLROUND
):
8315 real_round (&r
, TYPE_MODE (ftype
), &x
);
8322 wide_int val
= real_to_integer (&r
, &fail
, TYPE_PRECISION (itype
));
8324 return wide_int_to_tree (itype
, val
);
8328 switch (DECL_FUNCTION_CODE (fndecl
))
8330 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8331 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8332 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8333 if (tree_expr_nonnegative_p (arg
))
8334 return fold_build1_loc (loc
, FIX_TRUNC_EXPR
,
8335 TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
8340 return fold_fixed_mathfn (loc
, fndecl
, arg
);
8343 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8344 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8345 the argument to the call. Return NULL_TREE if no simplification can
8349 fold_builtin_bitop (tree fndecl
, tree arg
)
8351 if (!validate_arg (arg
, INTEGER_TYPE
))
8354 /* Optimize for constant argument. */
8355 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8357 tree type
= TREE_TYPE (arg
);
8360 switch (DECL_FUNCTION_CODE (fndecl
))
8362 CASE_INT_FN (BUILT_IN_FFS
):
8363 result
= wi::ffs (arg
);
8366 CASE_INT_FN (BUILT_IN_CLZ
):
8367 if (wi::ne_p (arg
, 0))
8368 result
= wi::clz (arg
);
8369 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8370 result
= TYPE_PRECISION (type
);
8373 CASE_INT_FN (BUILT_IN_CTZ
):
8374 if (wi::ne_p (arg
, 0))
8375 result
= wi::ctz (arg
);
8376 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8377 result
= TYPE_PRECISION (type
);
8380 CASE_INT_FN (BUILT_IN_CLRSB
):
8381 result
= wi::clrsb (arg
);
8384 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8385 result
= wi::popcount (arg
);
8388 CASE_INT_FN (BUILT_IN_PARITY
):
8389 result
= wi::parity (arg
);
8396 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8402 /* Fold function call to builtin_bswap and the short, long and long long
8403 variants. Return NULL_TREE if no simplification can be made. */
8405 fold_builtin_bswap (tree fndecl
, tree arg
)
8407 if (! validate_arg (arg
, INTEGER_TYPE
))
8410 /* Optimize constant value. */
8411 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8413 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8415 switch (DECL_FUNCTION_CODE (fndecl
))
8417 case BUILT_IN_BSWAP16
:
8418 case BUILT_IN_BSWAP32
:
8419 case BUILT_IN_BSWAP64
:
8421 signop sgn
= TYPE_SIGN (type
);
8423 wide_int_to_tree (type
,
8424 wide_int::from (arg
, TYPE_PRECISION (type
),
8436 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8437 NULL_TREE if no simplification can be made. */
8440 fold_builtin_hypot (location_t loc
, tree fndecl
,
8441 tree arg0
, tree arg1
, tree type
)
8443 tree res
, narg0
, narg1
;
8445 if (!validate_arg (arg0
, REAL_TYPE
)
8446 || !validate_arg (arg1
, REAL_TYPE
))
8449 /* Calculate the result when the argument is a constant. */
8450 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8453 /* If either argument to hypot has a negate or abs, strip that off.
8454 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8455 narg0
= fold_strip_sign_ops (arg0
);
8456 narg1
= fold_strip_sign_ops (arg1
);
8459 return build_call_expr_loc (loc
, fndecl
, 2, narg0
? narg0
: arg0
,
8460 narg1
? narg1
: arg1
);
8463 /* If either argument is zero, hypot is fabs of the other. */
8464 if (real_zerop (arg0
))
8465 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
);
8466 else if (real_zerop (arg1
))
8467 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
);
8469 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8470 if (flag_unsafe_math_optimizations
8471 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8473 const REAL_VALUE_TYPE sqrt2_trunc
8474 = real_value_truncate (TYPE_MODE (type
), dconst_sqrt2 ());
8475 return fold_build2_loc (loc
, MULT_EXPR
, type
,
8476 fold_build1_loc (loc
, ABS_EXPR
, type
, arg0
),
8477 build_real (type
, sqrt2_trunc
));
8484 /* Fold a builtin function call to pow, powf, or powl. Return
8485 NULL_TREE if no simplification can be made. */
8487 fold_builtin_pow (location_t loc
, tree fndecl
, tree arg0
, tree arg1
, tree type
)
8491 if (!validate_arg (arg0
, REAL_TYPE
)
8492 || !validate_arg (arg1
, REAL_TYPE
))
8495 /* Calculate the result when the argument is a constant. */
8496 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8499 /* Optimize pow(1.0,y) = 1.0. */
8500 if (real_onep (arg0
))
8501 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8503 if (TREE_CODE (arg1
) == REAL_CST
8504 && !TREE_OVERFLOW (arg1
))
8506 REAL_VALUE_TYPE cint
;
8510 c
= TREE_REAL_CST (arg1
);
8512 /* Optimize pow(x,0.0) = 1.0. */
8513 if (REAL_VALUES_EQUAL (c
, dconst0
))
8514 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8517 /* Optimize pow(x,1.0) = x. */
8518 if (REAL_VALUES_EQUAL (c
, dconst1
))
8521 /* Optimize pow(x,-1.0) = 1.0/x. */
8522 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8523 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8524 build_real (type
, dconst1
), arg0
);
8526 /* Optimize pow(x,0.5) = sqrt(x). */
8527 if (flag_unsafe_math_optimizations
8528 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8530 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8532 if (sqrtfn
!= NULL_TREE
)
8533 return build_call_expr_loc (loc
, sqrtfn
, 1, arg0
);
8536 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8537 if (flag_unsafe_math_optimizations
)
8539 const REAL_VALUE_TYPE dconstroot
8540 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8542 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8544 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8545 if (cbrtfn
!= NULL_TREE
)
8546 return build_call_expr_loc (loc
, cbrtfn
, 1, arg0
);
8550 /* Check for an integer exponent. */
8551 n
= real_to_integer (&c
);
8552 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
8553 if (real_identical (&c
, &cint
))
8555 /* Attempt to evaluate pow at compile-time, unless this should
8556 raise an exception. */
8557 if (TREE_CODE (arg0
) == REAL_CST
8558 && !TREE_OVERFLOW (arg0
)
8560 || (!flag_trapping_math
&& !flag_errno_math
)
8561 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
), dconst0
)))
8566 x
= TREE_REAL_CST (arg0
);
8567 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8568 if (flag_unsafe_math_optimizations
|| !inexact
)
8569 return build_real (type
, x
);
8572 /* Strip sign ops from even integer powers. */
8573 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8575 tree narg0
= fold_strip_sign_ops (arg0
);
8577 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, arg1
);
8582 if (flag_unsafe_math_optimizations
)
8584 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8586 /* Optimize pow(expN(x),y) = expN(x*y). */
8587 if (BUILTIN_EXPONENT_P (fcode
))
8589 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8590 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8591 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg
, arg1
);
8592 return build_call_expr_loc (loc
, expfn
, 1, arg
);
8595 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8596 if (BUILTIN_SQRT_P (fcode
))
8598 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8599 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8600 build_real (type
, dconsthalf
));
8601 return build_call_expr_loc (loc
, fndecl
, 2, narg0
, narg1
);
8604 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8605 if (BUILTIN_CBRT_P (fcode
))
8607 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8608 if (tree_expr_nonnegative_p (arg
))
8610 const REAL_VALUE_TYPE dconstroot
8611 = real_value_truncate (TYPE_MODE (type
), dconst_third ());
8612 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg1
,
8613 build_real (type
, dconstroot
));
8614 return build_call_expr_loc (loc
, fndecl
, 2, arg
, narg1
);
8618 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8619 if (fcode
== BUILT_IN_POW
8620 || fcode
== BUILT_IN_POWF
8621 || fcode
== BUILT_IN_POWL
)
8623 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8624 if (tree_expr_nonnegative_p (arg00
))
8626 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8627 tree narg1
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg01
, arg1
);
8628 return build_call_expr_loc (loc
, fndecl
, 2, arg00
, narg1
);
8636 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8637 Return NULL_TREE if no simplification can be made. */
8639 fold_builtin_powi (location_t loc
, tree fndecl ATTRIBUTE_UNUSED
,
8640 tree arg0
, tree arg1
, tree type
)
8642 if (!validate_arg (arg0
, REAL_TYPE
)
8643 || !validate_arg (arg1
, INTEGER_TYPE
))
8646 /* Optimize pow(1.0,y) = 1.0. */
8647 if (real_onep (arg0
))
8648 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
), arg1
);
8650 if (tree_fits_shwi_p (arg1
))
8652 HOST_WIDE_INT c
= tree_to_shwi (arg1
);
8654 /* Evaluate powi at compile-time. */
8655 if (TREE_CODE (arg0
) == REAL_CST
8656 && !TREE_OVERFLOW (arg0
))
8659 x
= TREE_REAL_CST (arg0
);
8660 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8661 return build_real (type
, x
);
8664 /* Optimize pow(x,0) = 1.0. */
8666 return omit_one_operand_loc (loc
, type
, build_real (type
, dconst1
),
8669 /* Optimize pow(x,1) = x. */
8673 /* Optimize pow(x,-1) = 1.0/x. */
8675 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
8676 build_real (type
, dconst1
), arg0
);
8682 /* A subroutine of fold_builtin to fold the various exponent
8683 functions. Return NULL_TREE if no simplification can be made.
8684 FUNC is the corresponding MPFR exponent function. */
8687 fold_builtin_exponent (location_t loc
, tree fndecl
, tree arg
,
8688 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8690 if (validate_arg (arg
, REAL_TYPE
))
8692 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8695 /* Calculate the result when the argument is a constant. */
8696 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8699 /* Optimize expN(logN(x)) = x. */
8700 if (flag_unsafe_math_optimizations
)
8702 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8704 if ((func
== mpfr_exp
8705 && (fcode
== BUILT_IN_LOG
8706 || fcode
== BUILT_IN_LOGF
8707 || fcode
== BUILT_IN_LOGL
))
8708 || (func
== mpfr_exp2
8709 && (fcode
== BUILT_IN_LOG2
8710 || fcode
== BUILT_IN_LOG2F
8711 || fcode
== BUILT_IN_LOG2L
))
8712 || (func
== mpfr_exp10
8713 && (fcode
== BUILT_IN_LOG10
8714 || fcode
== BUILT_IN_LOG10F
8715 || fcode
== BUILT_IN_LOG10L
)))
8716 return fold_convert_loc (loc
, type
, CALL_EXPR_ARG (arg
, 0));
8723 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8724 arguments to the call, and TYPE is its return type.
8725 Return NULL_TREE if no simplification can be made. */
8728 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
8730 if (!validate_arg (arg1
, POINTER_TYPE
)
8731 || !validate_arg (arg2
, INTEGER_TYPE
)
8732 || !validate_arg (len
, INTEGER_TYPE
))
8738 if (TREE_CODE (arg2
) != INTEGER_CST
8739 || !tree_fits_uhwi_p (len
))
8742 p1
= c_getstr (arg1
);
8743 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8749 if (target_char_cast (arg2
, &c
))
8752 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
8755 return build_int_cst (TREE_TYPE (arg1
), 0);
8757 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
8758 return fold_convert_loc (loc
, type
, tem
);
8764 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8765 Return NULL_TREE if no simplification can be made. */
8768 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8770 const char *p1
, *p2
;
8772 if (!validate_arg (arg1
, POINTER_TYPE
)
8773 || !validate_arg (arg2
, POINTER_TYPE
)
8774 || !validate_arg (len
, INTEGER_TYPE
))
8777 /* If the LEN parameter is zero, return zero. */
8778 if (integer_zerop (len
))
8779 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8782 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8783 if (operand_equal_p (arg1
, arg2
, 0))
8784 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8786 p1
= c_getstr (arg1
);
8787 p2
= c_getstr (arg2
);
8789 /* If all arguments are constant, and the value of len is not greater
8790 than the lengths of arg1 and arg2, evaluate at compile-time. */
8791 if (tree_fits_uhwi_p (len
) && p1
&& p2
8792 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8793 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8795 const int r
= memcmp (p1
, p2
, tree_to_uhwi (len
));
8798 return integer_one_node
;
8800 return integer_minus_one_node
;
8802 return integer_zero_node
;
8805 /* If len parameter is one, return an expression corresponding to
8806 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8807 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8809 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8810 tree cst_uchar_ptr_node
8811 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8814 = fold_convert_loc (loc
, integer_type_node
,
8815 build1 (INDIRECT_REF
, cst_uchar_node
,
8816 fold_convert_loc (loc
,
8820 = fold_convert_loc (loc
, integer_type_node
,
8821 build1 (INDIRECT_REF
, cst_uchar_node
,
8822 fold_convert_loc (loc
,
8825 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8831 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8832 Return NULL_TREE if no simplification can be made. */
8835 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
8837 const char *p1
, *p2
;
8839 if (!validate_arg (arg1
, POINTER_TYPE
)
8840 || !validate_arg (arg2
, POINTER_TYPE
))
8843 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8844 if (operand_equal_p (arg1
, arg2
, 0))
8845 return integer_zero_node
;
8847 p1
= c_getstr (arg1
);
8848 p2
= c_getstr (arg2
);
8852 const int i
= strcmp (p1
, p2
);
8854 return integer_minus_one_node
;
8856 return integer_one_node
;
8858 return integer_zero_node
;
8861 /* If the second arg is "", return *(const unsigned char*)arg1. */
8862 if (p2
&& *p2
== '\0')
8864 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8865 tree cst_uchar_ptr_node
8866 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8868 return fold_convert_loc (loc
, integer_type_node
,
8869 build1 (INDIRECT_REF
, cst_uchar_node
,
8870 fold_convert_loc (loc
,
8875 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8876 if (p1
&& *p1
== '\0')
8878 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8879 tree cst_uchar_ptr_node
8880 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8883 = fold_convert_loc (loc
, integer_type_node
,
8884 build1 (INDIRECT_REF
, cst_uchar_node
,
8885 fold_convert_loc (loc
,
8888 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8894 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8895 Return NULL_TREE if no simplification can be made. */
8898 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8900 const char *p1
, *p2
;
8902 if (!validate_arg (arg1
, POINTER_TYPE
)
8903 || !validate_arg (arg2
, POINTER_TYPE
)
8904 || !validate_arg (len
, INTEGER_TYPE
))
8907 /* If the LEN parameter is zero, return zero. */
8908 if (integer_zerop (len
))
8909 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8912 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8913 if (operand_equal_p (arg1
, arg2
, 0))
8914 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8916 p1
= c_getstr (arg1
);
8917 p2
= c_getstr (arg2
);
8919 if (tree_fits_uhwi_p (len
) && p1
&& p2
)
8921 const int i
= strncmp (p1
, p2
, tree_to_uhwi (len
));
8923 return integer_one_node
;
8925 return integer_minus_one_node
;
8927 return integer_zero_node
;
8930 /* If the second arg is "", and the length is greater than zero,
8931 return *(const unsigned char*)arg1. */
8932 if (p2
&& *p2
== '\0'
8933 && TREE_CODE (len
) == INTEGER_CST
8934 && tree_int_cst_sgn (len
) == 1)
8936 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8937 tree cst_uchar_ptr_node
8938 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8940 return fold_convert_loc (loc
, integer_type_node
,
8941 build1 (INDIRECT_REF
, cst_uchar_node
,
8942 fold_convert_loc (loc
,
8947 /* If the first arg is "", and the length is greater than zero,
8948 return -*(const unsigned char*)arg2. */
8949 if (p1
&& *p1
== '\0'
8950 && TREE_CODE (len
) == INTEGER_CST
8951 && tree_int_cst_sgn (len
) == 1)
8953 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8954 tree cst_uchar_ptr_node
8955 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8957 tree temp
= fold_convert_loc (loc
, integer_type_node
,
8958 build1 (INDIRECT_REF
, cst_uchar_node
,
8959 fold_convert_loc (loc
,
8962 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
8965 /* If len parameter is one, return an expression corresponding to
8966 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8967 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8969 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8970 tree cst_uchar_ptr_node
8971 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8973 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
8974 build1 (INDIRECT_REF
, cst_uchar_node
,
8975 fold_convert_loc (loc
,
8978 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
8979 build1 (INDIRECT_REF
, cst_uchar_node
,
8980 fold_convert_loc (loc
,
8983 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8989 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8990 ARG. Return NULL_TREE if no simplification can be made. */
8993 fold_builtin_signbit (location_t loc
, tree arg
, tree type
)
8995 if (!validate_arg (arg
, REAL_TYPE
))
8998 /* If ARG is a compile-time constant, determine the result. */
8999 if (TREE_CODE (arg
) == REAL_CST
9000 && !TREE_OVERFLOW (arg
))
9004 c
= TREE_REAL_CST (arg
);
9005 return (REAL_VALUE_NEGATIVE (c
)
9006 ? build_one_cst (type
)
9007 : build_zero_cst (type
));
9010 /* If ARG is non-negative, the result is always zero. */
9011 if (tree_expr_nonnegative_p (arg
))
9012 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9014 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9015 if (!HONOR_SIGNED_ZEROS (arg
))
9016 return fold_convert (type
,
9017 fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, arg
,
9018 build_real (TREE_TYPE (arg
), dconst0
)));
9023 /* Fold function call to builtin copysign, copysignf or copysignl with
9024 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9028 fold_builtin_copysign (location_t loc
, tree fndecl
,
9029 tree arg1
, tree arg2
, tree type
)
9033 if (!validate_arg (arg1
, REAL_TYPE
)
9034 || !validate_arg (arg2
, REAL_TYPE
))
9037 /* copysign(X,X) is X. */
9038 if (operand_equal_p (arg1
, arg2
, 0))
9039 return fold_convert_loc (loc
, type
, arg1
);
9041 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9042 if (TREE_CODE (arg1
) == REAL_CST
9043 && TREE_CODE (arg2
) == REAL_CST
9044 && !TREE_OVERFLOW (arg1
)
9045 && !TREE_OVERFLOW (arg2
))
9047 REAL_VALUE_TYPE c1
, c2
;
9049 c1
= TREE_REAL_CST (arg1
);
9050 c2
= TREE_REAL_CST (arg2
);
9051 /* c1.sign := c2.sign. */
9052 real_copysign (&c1
, &c2
);
9053 return build_real (type
, c1
);
9056 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9057 Remember to evaluate Y for side-effects. */
9058 if (tree_expr_nonnegative_p (arg2
))
9059 return omit_one_operand_loc (loc
, type
,
9060 fold_build1_loc (loc
, ABS_EXPR
, type
, arg1
),
9063 /* Strip sign changing operations for the first argument. */
9064 tem
= fold_strip_sign_ops (arg1
);
9066 return build_call_expr_loc (loc
, fndecl
, 2, tem
, arg2
);
9071 /* Fold a call to builtin isascii with argument ARG. */
9074 fold_builtin_isascii (location_t loc
, tree arg
)
9076 if (!validate_arg (arg
, INTEGER_TYPE
))
9080 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9081 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9082 build_int_cst (integer_type_node
,
9083 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9084 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
9085 arg
, integer_zero_node
);
9089 /* Fold a call to builtin toascii with argument ARG. */
9092 fold_builtin_toascii (location_t loc
, tree arg
)
9094 if (!validate_arg (arg
, INTEGER_TYPE
))
9097 /* Transform toascii(c) -> (c & 0x7f). */
9098 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
9099 build_int_cst (integer_type_node
, 0x7f));
9102 /* Fold a call to builtin isdigit with argument ARG. */
9105 fold_builtin_isdigit (location_t loc
, tree arg
)
9107 if (!validate_arg (arg
, INTEGER_TYPE
))
9111 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9112 /* According to the C standard, isdigit is unaffected by locale.
9113 However, it definitely is affected by the target character set. */
9114 unsigned HOST_WIDE_INT target_digit0
9115 = lang_hooks
.to_target_charset ('0');
9117 if (target_digit0
== 0)
9120 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
9121 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9122 build_int_cst (unsigned_type_node
, target_digit0
));
9123 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
9124 build_int_cst (unsigned_type_node
, 9));
9128 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9131 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
9133 if (!validate_arg (arg
, REAL_TYPE
))
9136 arg
= fold_convert_loc (loc
, type
, arg
);
9137 if (TREE_CODE (arg
) == REAL_CST
)
9138 return fold_abs_const (arg
, type
);
9139 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9142 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9145 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
9147 if (!validate_arg (arg
, INTEGER_TYPE
))
9150 arg
= fold_convert_loc (loc
, type
, arg
);
9151 if (TREE_CODE (arg
) == INTEGER_CST
)
9152 return fold_abs_const (arg
, type
);
9153 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
9156 /* Fold a fma operation with arguments ARG[012]. */
9159 fold_fma (location_t loc ATTRIBUTE_UNUSED
,
9160 tree type
, tree arg0
, tree arg1
, tree arg2
)
9162 if (TREE_CODE (arg0
) == REAL_CST
9163 && TREE_CODE (arg1
) == REAL_CST
9164 && TREE_CODE (arg2
) == REAL_CST
)
9165 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9170 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9173 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
9175 if (validate_arg (arg0
, REAL_TYPE
)
9176 && validate_arg (arg1
, REAL_TYPE
)
9177 && validate_arg (arg2
, REAL_TYPE
))
9179 tree tem
= fold_fma (loc
, type
, arg0
, arg1
, arg2
);
9183 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9184 if (optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
9185 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
9190 /* Fold a call to builtin fmin or fmax. */
9193 fold_builtin_fmin_fmax (location_t loc
, tree arg0
, tree arg1
,
9194 tree type
, bool max
)
9196 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9198 /* Calculate the result when the argument is a constant. */
9199 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9204 /* If either argument is NaN, return the other one. Avoid the
9205 transformation if we get (and honor) a signalling NaN. Using
9206 omit_one_operand() ensures we create a non-lvalue. */
9207 if (TREE_CODE (arg0
) == REAL_CST
9208 && real_isnan (&TREE_REAL_CST (arg0
))
9209 && (! HONOR_SNANS (arg0
)
9210 || ! TREE_REAL_CST (arg0
).signalling
))
9211 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
9212 if (TREE_CODE (arg1
) == REAL_CST
9213 && real_isnan (&TREE_REAL_CST (arg1
))
9214 && (! HONOR_SNANS (arg1
)
9215 || ! TREE_REAL_CST (arg1
).signalling
))
9216 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9218 /* Transform fmin/fmax(x,x) -> x. */
9219 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9220 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9222 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9223 functions to return the numeric arg if the other one is NaN.
9224 These tree codes don't honor that, so only transform if
9225 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9226 handled, so we don't have to worry about it either. */
9227 if (flag_finite_math_only
)
9228 return fold_build2_loc (loc
, (max
? MAX_EXPR
: MIN_EXPR
), type
,
9229 fold_convert_loc (loc
, type
, arg0
),
9230 fold_convert_loc (loc
, type
, arg1
));
9235 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9238 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
9240 if (validate_arg (arg
, COMPLEX_TYPE
)
9241 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
9243 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9247 tree new_arg
= builtin_save_expr (arg
);
9248 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
9249 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
9250 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
9257 /* Fold a call to builtin logb/ilogb. */
9260 fold_builtin_logb (location_t loc
, tree arg
, tree rettype
)
9262 if (! validate_arg (arg
, REAL_TYPE
))
9267 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9269 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9275 /* If arg is Inf or NaN and we're logb, return it. */
9276 if (TREE_CODE (rettype
) == REAL_TYPE
)
9278 /* For logb(-Inf) we have to return +Inf. */
9279 if (real_isinf (value
) && real_isneg (value
))
9281 REAL_VALUE_TYPE tem
;
9283 return build_real (rettype
, tem
);
9285 return fold_convert_loc (loc
, rettype
, arg
);
9287 /* Fall through... */
9289 /* Zero may set errno and/or raise an exception for logb, also
9290 for ilogb we don't know FP_ILOGB0. */
9293 /* For normal numbers, proceed iff radix == 2. In GCC,
9294 normalized significands are in the range [0.5, 1.0). We
9295 want the exponent as if they were [1.0, 2.0) so get the
9296 exponent and subtract 1. */
9297 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9298 return fold_convert_loc (loc
, rettype
,
9299 build_int_cst (integer_type_node
,
9300 REAL_EXP (value
)-1));
9308 /* Fold a call to builtin significand, if radix == 2. */
9311 fold_builtin_significand (location_t loc
, tree arg
, tree rettype
)
9313 if (! validate_arg (arg
, REAL_TYPE
))
9318 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9320 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9327 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9328 return fold_convert_loc (loc
, rettype
, arg
);
9330 /* For normal numbers, proceed iff radix == 2. */
9331 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9333 REAL_VALUE_TYPE result
= *value
;
9334 /* In GCC, normalized significands are in the range [0.5,
9335 1.0). We want them to be [1.0, 2.0) so set the
9337 SET_REAL_EXP (&result
, 1);
9338 return build_real (rettype
, result
);
9347 /* Fold a call to builtin frexp, we can assume the base is 2. */
9350 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9352 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9357 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9360 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9362 /* Proceed if a valid pointer type was passed in. */
9363 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9365 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9371 /* For +-0, return (*exp = 0, +-0). */
9372 exp
= integer_zero_node
;
9377 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9378 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
9381 /* Since the frexp function always expects base 2, and in
9382 GCC normalized significands are already in the range
9383 [0.5, 1.0), we have exactly what frexp wants. */
9384 REAL_VALUE_TYPE frac_rvt
= *value
;
9385 SET_REAL_EXP (&frac_rvt
, 0);
9386 frac
= build_real (rettype
, frac_rvt
);
9387 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
9394 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9395 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
9396 TREE_SIDE_EFFECTS (arg1
) = 1;
9397 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
9403 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9404 then we can assume the base is two. If it's false, then we have to
9405 check the mode of the TYPE parameter in certain cases. */
9408 fold_builtin_load_exponent (location_t loc
, tree arg0
, tree arg1
,
9409 tree type
, bool ldexp
)
9411 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9416 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9417 if (real_zerop (arg0
) || integer_zerop (arg1
)
9418 || (TREE_CODE (arg0
) == REAL_CST
9419 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9420 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
9422 /* If both arguments are constant, then try to evaluate it. */
9423 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9424 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9425 && tree_fits_shwi_p (arg1
))
9427 /* Bound the maximum adjustment to twice the range of the
9428 mode's valid exponents. Use abs to ensure the range is
9429 positive as a sanity check. */
9430 const long max_exp_adj
= 2 *
9431 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9432 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9434 /* Get the user-requested adjustment. */
9435 const HOST_WIDE_INT req_exp_adj
= tree_to_shwi (arg1
);
9437 /* The requested adjustment must be inside this range. This
9438 is a preliminary cap to avoid things like overflow, we
9439 may still fail to compute the result for other reasons. */
9440 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9442 REAL_VALUE_TYPE initial_result
;
9444 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9446 /* Ensure we didn't overflow. */
9447 if (! real_isinf (&initial_result
))
9449 const REAL_VALUE_TYPE trunc_result
9450 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9452 /* Only proceed if the target mode can hold the
9454 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9455 return build_real (type
, trunc_result
);
9464 /* Fold a call to builtin modf. */
9467 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
9469 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9474 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9477 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
9479 /* Proceed if a valid pointer type was passed in. */
9480 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9482 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9483 REAL_VALUE_TYPE trunc
, frac
;
9489 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9490 trunc
= frac
= *value
;
9493 /* For +-Inf, return (*arg1 = arg0, +-0). */
9495 frac
.sign
= value
->sign
;
9499 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9500 real_trunc (&trunc
, VOIDmode
, value
);
9501 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9502 /* If the original number was negative and already
9503 integral, then the fractional part is -0.0. */
9504 if (value
->sign
&& frac
.cl
== rvc_zero
)
9505 frac
.sign
= value
->sign
;
9509 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9510 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
9511 build_real (rettype
, trunc
));
9512 TREE_SIDE_EFFECTS (arg1
) = 1;
9513 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
9514 build_real (rettype
, frac
));
9520 /* Given a location LOC, an interclass builtin function decl FNDECL
9521 and its single argument ARG, return an folded expression computing
9522 the same, or NULL_TREE if we either couldn't or didn't want to fold
9523 (the latter happen if there's an RTL instruction available). */
9526 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
9530 if (!validate_arg (arg
, REAL_TYPE
))
9533 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
9536 mode
= TYPE_MODE (TREE_TYPE (arg
));
9538 /* If there is no optab, try generic code. */
9539 switch (DECL_FUNCTION_CODE (fndecl
))
9543 CASE_FLT_FN (BUILT_IN_ISINF
):
9545 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9546 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
9547 tree
const type
= TREE_TYPE (arg
);
9551 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9552 real_from_string (&r
, buf
);
9553 result
= build_call_expr (isgr_fn
, 2,
9554 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9555 build_real (type
, r
));
9558 CASE_FLT_FN (BUILT_IN_FINITE
):
9559 case BUILT_IN_ISFINITE
:
9561 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9562 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9563 tree
const type
= TREE_TYPE (arg
);
9567 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9568 real_from_string (&r
, buf
);
9569 result
= build_call_expr (isle_fn
, 2,
9570 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
9571 build_real (type
, r
));
9572 /*result = fold_build2_loc (loc, UNGT_EXPR,
9573 TREE_TYPE (TREE_TYPE (fndecl)),
9574 fold_build1_loc (loc, ABS_EXPR, type, arg),
9575 build_real (type, r));
9576 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9577 TREE_TYPE (TREE_TYPE (fndecl)),
9581 case BUILT_IN_ISNORMAL
:
9583 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9584 islessequal(fabs(x),DBL_MAX). */
9585 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
9586 tree
const isge_fn
= builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
9587 tree
const type
= TREE_TYPE (arg
);
9588 REAL_VALUE_TYPE rmax
, rmin
;
9591 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
9592 real_from_string (&rmax
, buf
);
9593 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9594 real_from_string (&rmin
, buf
);
9595 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9596 result
= build_call_expr (isle_fn
, 2, arg
,
9597 build_real (type
, rmax
));
9598 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
9599 build_call_expr (isge_fn
, 2, arg
,
9600 build_real (type
, rmin
)));
9610 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9611 ARG is the argument for the call. */
9614 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
9616 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9619 if (!validate_arg (arg
, REAL_TYPE
))
9622 switch (builtin_index
)
9624 case BUILT_IN_ISINF
:
9625 if (!HONOR_INFINITIES (arg
))
9626 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9628 if (TREE_CODE (arg
) == REAL_CST
)
9630 r
= TREE_REAL_CST (arg
);
9631 if (real_isinf (&r
))
9632 return real_compare (GT_EXPR
, &r
, &dconst0
)
9633 ? integer_one_node
: integer_minus_one_node
;
9635 return integer_zero_node
;
9640 case BUILT_IN_ISINF_SIGN
:
9642 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9643 /* In a boolean context, GCC will fold the inner COND_EXPR to
9644 1. So e.g. "if (isinf_sign(x))" would be folded to just
9645 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9646 tree signbit_fn
= mathfn_built_in_1 (TREE_TYPE (arg
), BUILT_IN_SIGNBIT
, 0);
9647 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
9648 tree tmp
= NULL_TREE
;
9650 arg
= builtin_save_expr (arg
);
9652 if (signbit_fn
&& isinf_fn
)
9654 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
9655 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
9657 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9658 signbit_call
, integer_zero_node
);
9659 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
9660 isinf_call
, integer_zero_node
);
9662 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
9663 integer_minus_one_node
, integer_one_node
);
9664 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9672 case BUILT_IN_ISFINITE
:
9673 if (!HONOR_NANS (arg
)
9674 && !HONOR_INFINITIES (arg
))
9675 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
9677 if (TREE_CODE (arg
) == REAL_CST
)
9679 r
= TREE_REAL_CST (arg
);
9680 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9685 case BUILT_IN_ISNAN
:
9686 if (!HONOR_NANS (arg
))
9687 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
9689 if (TREE_CODE (arg
) == REAL_CST
)
9691 r
= TREE_REAL_CST (arg
);
9692 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9695 arg
= builtin_save_expr (arg
);
9696 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
9703 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9704 This builtin will generate code to return the appropriate floating
9705 point classification depending on the value of the floating point
9706 number passed in. The possible return values must be supplied as
9707 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9708 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9709 one floating point argument which is "type generic". */
9712 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
9714 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
9715 arg
, type
, res
, tmp
;
9720 /* Verify the required arguments in the original call. */
9722 || !validate_arg (args
[0], INTEGER_TYPE
)
9723 || !validate_arg (args
[1], INTEGER_TYPE
)
9724 || !validate_arg (args
[2], INTEGER_TYPE
)
9725 || !validate_arg (args
[3], INTEGER_TYPE
)
9726 || !validate_arg (args
[4], INTEGER_TYPE
)
9727 || !validate_arg (args
[5], REAL_TYPE
))
9731 fp_infinite
= args
[1];
9732 fp_normal
= args
[2];
9733 fp_subnormal
= args
[3];
9736 type
= TREE_TYPE (arg
);
9737 mode
= TYPE_MODE (type
);
9738 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
9742 (fabs(x) == Inf ? FP_INFINITE :
9743 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9744 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9746 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9747 build_real (type
, dconst0
));
9748 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
9749 tmp
, fp_zero
, fp_subnormal
);
9751 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
9752 real_from_string (&r
, buf
);
9753 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
9754 arg
, build_real (type
, r
));
9755 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
9757 if (HONOR_INFINITIES (mode
))
9760 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
9761 build_real (type
, r
));
9762 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
9766 if (HONOR_NANS (mode
))
9768 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
9769 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
9775 /* Fold a call to an unordered comparison function such as
9776 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9777 being called and ARG0 and ARG1 are the arguments for the call.
9778 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9779 the opposite of the desired result. UNORDERED_CODE is used
9780 for modes that can hold NaNs and ORDERED_CODE is used for
9784 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
9785 enum tree_code unordered_code
,
9786 enum tree_code ordered_code
)
9788 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9789 enum tree_code code
;
9791 enum tree_code code0
, code1
;
9792 tree cmp_type
= NULL_TREE
;
9794 type0
= TREE_TYPE (arg0
);
9795 type1
= TREE_TYPE (arg1
);
9797 code0
= TREE_CODE (type0
);
9798 code1
= TREE_CODE (type1
);
9800 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9801 /* Choose the wider of two real types. */
9802 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9804 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9806 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9809 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
9810 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
9812 if (unordered_code
== UNORDERED_EXPR
)
9814 if (!HONOR_NANS (arg0
))
9815 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
9816 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
9819 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
9820 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
9821 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
9824 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9825 arithmetics if it can never overflow, or into internal functions that
9826 return both result of arithmetics and overflowed boolean flag in
9827 a complex integer result, or some other check for overflow. */
9830 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
9831 tree arg0
, tree arg1
, tree arg2
)
9833 enum internal_fn ifn
= IFN_LAST
;
9834 tree type
= TREE_TYPE (TREE_TYPE (arg2
));
9835 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
9838 case BUILT_IN_ADD_OVERFLOW
:
9839 case BUILT_IN_SADD_OVERFLOW
:
9840 case BUILT_IN_SADDL_OVERFLOW
:
9841 case BUILT_IN_SADDLL_OVERFLOW
:
9842 case BUILT_IN_UADD_OVERFLOW
:
9843 case BUILT_IN_UADDL_OVERFLOW
:
9844 case BUILT_IN_UADDLL_OVERFLOW
:
9845 ifn
= IFN_ADD_OVERFLOW
;
9847 case BUILT_IN_SUB_OVERFLOW
:
9848 case BUILT_IN_SSUB_OVERFLOW
:
9849 case BUILT_IN_SSUBL_OVERFLOW
:
9850 case BUILT_IN_SSUBLL_OVERFLOW
:
9851 case BUILT_IN_USUB_OVERFLOW
:
9852 case BUILT_IN_USUBL_OVERFLOW
:
9853 case BUILT_IN_USUBLL_OVERFLOW
:
9854 ifn
= IFN_SUB_OVERFLOW
;
9856 case BUILT_IN_MUL_OVERFLOW
:
9857 case BUILT_IN_SMUL_OVERFLOW
:
9858 case BUILT_IN_SMULL_OVERFLOW
:
9859 case BUILT_IN_SMULLL_OVERFLOW
:
9860 case BUILT_IN_UMUL_OVERFLOW
:
9861 case BUILT_IN_UMULL_OVERFLOW
:
9862 case BUILT_IN_UMULLL_OVERFLOW
:
9863 ifn
= IFN_MUL_OVERFLOW
;
9868 tree ctype
= build_complex_type (type
);
9869 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
9871 tree tgt
= save_expr (call
);
9872 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
9873 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
9874 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
9876 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
9877 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
9880 /* Fold a call to built-in function FNDECL with 0 arguments.
9881 This function returns NULL_TREE if no simplification was possible. */
9884 fold_builtin_0 (location_t loc
, tree fndecl
)
9886 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9887 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9890 CASE_FLT_FN (BUILT_IN_INF
):
9891 case BUILT_IN_INFD32
:
9892 case BUILT_IN_INFD64
:
9893 case BUILT_IN_INFD128
:
9894 return fold_builtin_inf (loc
, type
, true);
9896 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9897 return fold_builtin_inf (loc
, type
, false);
9899 case BUILT_IN_CLASSIFY_TYPE
:
9900 return fold_builtin_classify_type (NULL_TREE
);
9908 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9909 This function returns NULL_TREE if no simplification was possible. */
9912 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9914 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9915 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9918 case BUILT_IN_CONSTANT_P
:
9920 tree val
= fold_builtin_constant_p (arg0
);
9922 /* Gimplification will pull the CALL_EXPR for the builtin out of
9923 an if condition. When not optimizing, we'll not CSE it back.
9924 To avoid link error types of regressions, return false now. */
9925 if (!val
&& !optimize
)
9926 val
= integer_zero_node
;
9931 case BUILT_IN_CLASSIFY_TYPE
:
9932 return fold_builtin_classify_type (arg0
);
9934 case BUILT_IN_STRLEN
:
9935 return fold_builtin_strlen (loc
, type
, arg0
);
9937 CASE_FLT_FN (BUILT_IN_FABS
):
9938 case BUILT_IN_FABSD32
:
9939 case BUILT_IN_FABSD64
:
9940 case BUILT_IN_FABSD128
:
9941 return fold_builtin_fabs (loc
, arg0
, type
);
9945 case BUILT_IN_LLABS
:
9946 case BUILT_IN_IMAXABS
:
9947 return fold_builtin_abs (loc
, arg0
, type
);
9949 CASE_FLT_FN (BUILT_IN_CONJ
):
9950 if (validate_arg (arg0
, COMPLEX_TYPE
)
9951 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9952 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9955 CASE_FLT_FN (BUILT_IN_CREAL
):
9956 if (validate_arg (arg0
, COMPLEX_TYPE
)
9957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9958 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9961 CASE_FLT_FN (BUILT_IN_CIMAG
):
9962 if (validate_arg (arg0
, COMPLEX_TYPE
)
9963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9964 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9967 CASE_FLT_FN (BUILT_IN_CCOS
):
9968 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ false);
9970 CASE_FLT_FN (BUILT_IN_CCOSH
):
9971 return fold_builtin_ccos (loc
, arg0
, type
, fndecl
, /*hyper=*/ true);
9973 CASE_FLT_FN (BUILT_IN_CPROJ
):
9974 return fold_builtin_cproj (loc
, arg0
, type
);
9976 CASE_FLT_FN (BUILT_IN_CSIN
):
9977 if (validate_arg (arg0
, COMPLEX_TYPE
)
9978 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9979 return do_mpc_arg1 (arg0
, type
, mpc_sin
);
9982 CASE_FLT_FN (BUILT_IN_CSINH
):
9983 if (validate_arg (arg0
, COMPLEX_TYPE
)
9984 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9985 return do_mpc_arg1 (arg0
, type
, mpc_sinh
);
9988 CASE_FLT_FN (BUILT_IN_CTAN
):
9989 if (validate_arg (arg0
, COMPLEX_TYPE
)
9990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9991 return do_mpc_arg1 (arg0
, type
, mpc_tan
);
9994 CASE_FLT_FN (BUILT_IN_CTANH
):
9995 if (validate_arg (arg0
, COMPLEX_TYPE
)
9996 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9997 return do_mpc_arg1 (arg0
, type
, mpc_tanh
);
10000 CASE_FLT_FN (BUILT_IN_CLOG
):
10001 if (validate_arg (arg0
, COMPLEX_TYPE
)
10002 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10003 return do_mpc_arg1 (arg0
, type
, mpc_log
);
10006 CASE_FLT_FN (BUILT_IN_CSQRT
):
10007 if (validate_arg (arg0
, COMPLEX_TYPE
)
10008 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10009 return do_mpc_arg1 (arg0
, type
, mpc_sqrt
);
10012 CASE_FLT_FN (BUILT_IN_CASIN
):
10013 if (validate_arg (arg0
, COMPLEX_TYPE
)
10014 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10015 return do_mpc_arg1 (arg0
, type
, mpc_asin
);
10018 CASE_FLT_FN (BUILT_IN_CACOS
):
10019 if (validate_arg (arg0
, COMPLEX_TYPE
)
10020 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10021 return do_mpc_arg1 (arg0
, type
, mpc_acos
);
10024 CASE_FLT_FN (BUILT_IN_CATAN
):
10025 if (validate_arg (arg0
, COMPLEX_TYPE
)
10026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10027 return do_mpc_arg1 (arg0
, type
, mpc_atan
);
10030 CASE_FLT_FN (BUILT_IN_CASINH
):
10031 if (validate_arg (arg0
, COMPLEX_TYPE
)
10032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10033 return do_mpc_arg1 (arg0
, type
, mpc_asinh
);
10036 CASE_FLT_FN (BUILT_IN_CACOSH
):
10037 if (validate_arg (arg0
, COMPLEX_TYPE
)
10038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10039 return do_mpc_arg1 (arg0
, type
, mpc_acosh
);
10042 CASE_FLT_FN (BUILT_IN_CATANH
):
10043 if (validate_arg (arg0
, COMPLEX_TYPE
)
10044 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
10045 return do_mpc_arg1 (arg0
, type
, mpc_atanh
);
10048 CASE_FLT_FN (BUILT_IN_CABS
):
10049 return fold_builtin_cabs (loc
, arg0
, type
, fndecl
);
10051 CASE_FLT_FN (BUILT_IN_CARG
):
10052 return fold_builtin_carg (loc
, arg0
, type
);
10054 CASE_FLT_FN (BUILT_IN_SQRT
):
10055 return fold_builtin_sqrt (loc
, arg0
, type
);
10057 CASE_FLT_FN (BUILT_IN_CBRT
):
10058 return fold_builtin_cbrt (loc
, arg0
, type
);
10060 CASE_FLT_FN (BUILT_IN_ASIN
):
10061 if (validate_arg (arg0
, REAL_TYPE
))
10062 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
10063 &dconstm1
, &dconst1
, true);
10066 CASE_FLT_FN (BUILT_IN_ACOS
):
10067 if (validate_arg (arg0
, REAL_TYPE
))
10068 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
10069 &dconstm1
, &dconst1
, true);
10072 CASE_FLT_FN (BUILT_IN_ATAN
):
10073 if (validate_arg (arg0
, REAL_TYPE
))
10074 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
10077 CASE_FLT_FN (BUILT_IN_ASINH
):
10078 if (validate_arg (arg0
, REAL_TYPE
))
10079 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
10082 CASE_FLT_FN (BUILT_IN_ACOSH
):
10083 if (validate_arg (arg0
, REAL_TYPE
))
10084 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
10085 &dconst1
, NULL
, true);
10088 CASE_FLT_FN (BUILT_IN_ATANH
):
10089 if (validate_arg (arg0
, REAL_TYPE
))
10090 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
10091 &dconstm1
, &dconst1
, false);
10094 CASE_FLT_FN (BUILT_IN_SIN
):
10095 if (validate_arg (arg0
, REAL_TYPE
))
10096 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
10099 CASE_FLT_FN (BUILT_IN_COS
):
10100 return fold_builtin_cos (loc
, arg0
, type
, fndecl
);
10102 CASE_FLT_FN (BUILT_IN_TAN
):
10103 return fold_builtin_tan (arg0
, type
);
10105 CASE_FLT_FN (BUILT_IN_CEXP
):
10106 return fold_builtin_cexp (loc
, arg0
, type
);
10108 CASE_FLT_FN (BUILT_IN_CEXPI
):
10109 if (validate_arg (arg0
, REAL_TYPE
))
10110 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
10113 CASE_FLT_FN (BUILT_IN_SINH
):
10114 if (validate_arg (arg0
, REAL_TYPE
))
10115 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
10118 CASE_FLT_FN (BUILT_IN_COSH
):
10119 return fold_builtin_cosh (loc
, arg0
, type
, fndecl
);
10121 CASE_FLT_FN (BUILT_IN_TANH
):
10122 if (validate_arg (arg0
, REAL_TYPE
))
10123 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
10126 CASE_FLT_FN (BUILT_IN_ERF
):
10127 if (validate_arg (arg0
, REAL_TYPE
))
10128 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
10131 CASE_FLT_FN (BUILT_IN_ERFC
):
10132 if (validate_arg (arg0
, REAL_TYPE
))
10133 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
10136 CASE_FLT_FN (BUILT_IN_TGAMMA
):
10137 if (validate_arg (arg0
, REAL_TYPE
))
10138 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
10141 CASE_FLT_FN (BUILT_IN_EXP
):
10142 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp
);
10144 CASE_FLT_FN (BUILT_IN_EXP2
):
10145 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp2
);
10147 CASE_FLT_FN (BUILT_IN_EXP10
):
10148 CASE_FLT_FN (BUILT_IN_POW10
):
10149 return fold_builtin_exponent (loc
, fndecl
, arg0
, mpfr_exp10
);
10151 CASE_FLT_FN (BUILT_IN_EXPM1
):
10152 if (validate_arg (arg0
, REAL_TYPE
))
10153 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
10156 CASE_FLT_FN (BUILT_IN_LOG
):
10157 if (validate_arg (arg0
, REAL_TYPE
))
10158 return do_mpfr_arg1 (arg0
, type
, mpfr_log
, &dconst0
, NULL
, false);
10161 CASE_FLT_FN (BUILT_IN_LOG2
):
10162 if (validate_arg (arg0
, REAL_TYPE
))
10163 return do_mpfr_arg1 (arg0
, type
, mpfr_log2
, &dconst0
, NULL
, false);
10166 CASE_FLT_FN (BUILT_IN_LOG10
):
10167 if (validate_arg (arg0
, REAL_TYPE
))
10168 return do_mpfr_arg1 (arg0
, type
, mpfr_log10
, &dconst0
, NULL
, false);
10171 CASE_FLT_FN (BUILT_IN_LOG1P
):
10172 if (validate_arg (arg0
, REAL_TYPE
))
10173 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
10174 &dconstm1
, NULL
, false);
10177 CASE_FLT_FN (BUILT_IN_J0
):
10178 if (validate_arg (arg0
, REAL_TYPE
))
10179 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
10183 CASE_FLT_FN (BUILT_IN_J1
):
10184 if (validate_arg (arg0
, REAL_TYPE
))
10185 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
10189 CASE_FLT_FN (BUILT_IN_Y0
):
10190 if (validate_arg (arg0
, REAL_TYPE
))
10191 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
10192 &dconst0
, NULL
, false);
10195 CASE_FLT_FN (BUILT_IN_Y1
):
10196 if (validate_arg (arg0
, REAL_TYPE
))
10197 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
10198 &dconst0
, NULL
, false);
10201 CASE_FLT_FN (BUILT_IN_NAN
):
10202 case BUILT_IN_NAND32
:
10203 case BUILT_IN_NAND64
:
10204 case BUILT_IN_NAND128
:
10205 return fold_builtin_nan (arg0
, type
, true);
10207 CASE_FLT_FN (BUILT_IN_NANS
):
10208 return fold_builtin_nan (arg0
, type
, false);
10210 CASE_FLT_FN (BUILT_IN_FLOOR
):
10211 return fold_builtin_floor (loc
, fndecl
, arg0
);
10213 CASE_FLT_FN (BUILT_IN_CEIL
):
10214 return fold_builtin_ceil (loc
, fndecl
, arg0
);
10216 CASE_FLT_FN (BUILT_IN_TRUNC
):
10217 return fold_builtin_trunc (loc
, fndecl
, arg0
);
10219 CASE_FLT_FN (BUILT_IN_ROUND
):
10220 return fold_builtin_round (loc
, fndecl
, arg0
);
10222 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10223 CASE_FLT_FN (BUILT_IN_RINT
):
10224 return fold_trunc_transparent_mathfn (loc
, fndecl
, arg0
);
10226 CASE_FLT_FN (BUILT_IN_ICEIL
):
10227 CASE_FLT_FN (BUILT_IN_LCEIL
):
10228 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10229 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10230 CASE_FLT_FN (BUILT_IN_IFLOOR
):
10231 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10232 CASE_FLT_FN (BUILT_IN_IROUND
):
10233 CASE_FLT_FN (BUILT_IN_LROUND
):
10234 CASE_FLT_FN (BUILT_IN_LLROUND
):
10235 return fold_builtin_int_roundingfn (loc
, fndecl
, arg0
);
10237 CASE_FLT_FN (BUILT_IN_IRINT
):
10238 CASE_FLT_FN (BUILT_IN_LRINT
):
10239 CASE_FLT_FN (BUILT_IN_LLRINT
):
10240 return fold_fixed_mathfn (loc
, fndecl
, arg0
);
10242 case BUILT_IN_BSWAP16
:
10243 case BUILT_IN_BSWAP32
:
10244 case BUILT_IN_BSWAP64
:
10245 return fold_builtin_bswap (fndecl
, arg0
);
10247 CASE_INT_FN (BUILT_IN_FFS
):
10248 CASE_INT_FN (BUILT_IN_CLZ
):
10249 CASE_INT_FN (BUILT_IN_CTZ
):
10250 CASE_INT_FN (BUILT_IN_CLRSB
):
10251 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10252 CASE_INT_FN (BUILT_IN_PARITY
):
10253 return fold_builtin_bitop (fndecl
, arg0
);
10255 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10256 return fold_builtin_signbit (loc
, arg0
, type
);
10258 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10259 return fold_builtin_significand (loc
, arg0
, type
);
10261 CASE_FLT_FN (BUILT_IN_ILOGB
):
10262 CASE_FLT_FN (BUILT_IN_LOGB
):
10263 return fold_builtin_logb (loc
, arg0
, type
);
10265 case BUILT_IN_ISASCII
:
10266 return fold_builtin_isascii (loc
, arg0
);
10268 case BUILT_IN_TOASCII
:
10269 return fold_builtin_toascii (loc
, arg0
);
10271 case BUILT_IN_ISDIGIT
:
10272 return fold_builtin_isdigit (loc
, arg0
);
10274 CASE_FLT_FN (BUILT_IN_FINITE
):
10275 case BUILT_IN_FINITED32
:
10276 case BUILT_IN_FINITED64
:
10277 case BUILT_IN_FINITED128
:
10278 case BUILT_IN_ISFINITE
:
10280 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
10283 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10286 CASE_FLT_FN (BUILT_IN_ISINF
):
10287 case BUILT_IN_ISINFD32
:
10288 case BUILT_IN_ISINFD64
:
10289 case BUILT_IN_ISINFD128
:
10291 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
10294 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10297 case BUILT_IN_ISNORMAL
:
10298 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
10300 case BUILT_IN_ISINF_SIGN
:
10301 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
10303 CASE_FLT_FN (BUILT_IN_ISNAN
):
10304 case BUILT_IN_ISNAND32
:
10305 case BUILT_IN_ISNAND64
:
10306 case BUILT_IN_ISNAND128
:
10307 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
10309 case BUILT_IN_FREE
:
10310 if (integer_zerop (arg0
))
10311 return build_empty_stmt (loc
);
10322 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10323 This function returns NULL_TREE if no simplification was possible. */
10326 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
10328 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10329 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10333 CASE_FLT_FN (BUILT_IN_JN
):
10334 if (validate_arg (arg0
, INTEGER_TYPE
)
10335 && validate_arg (arg1
, REAL_TYPE
))
10336 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10339 CASE_FLT_FN (BUILT_IN_YN
):
10340 if (validate_arg (arg0
, INTEGER_TYPE
)
10341 && validate_arg (arg1
, REAL_TYPE
))
10342 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10346 CASE_FLT_FN (BUILT_IN_DREM
):
10347 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10348 if (validate_arg (arg0
, REAL_TYPE
)
10349 && validate_arg (arg1
, REAL_TYPE
))
10350 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10353 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10354 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10355 if (validate_arg (arg0
, REAL_TYPE
)
10356 && validate_arg (arg1
, POINTER_TYPE
))
10357 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10360 CASE_FLT_FN (BUILT_IN_ATAN2
):
10361 if (validate_arg (arg0
, REAL_TYPE
)
10362 && validate_arg (arg1
, REAL_TYPE
))
10363 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10366 CASE_FLT_FN (BUILT_IN_FDIM
):
10367 if (validate_arg (arg0
, REAL_TYPE
)
10368 && validate_arg (arg1
, REAL_TYPE
))
10369 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10372 CASE_FLT_FN (BUILT_IN_HYPOT
):
10373 return fold_builtin_hypot (loc
, fndecl
, arg0
, arg1
, type
);
10375 CASE_FLT_FN (BUILT_IN_CPOW
):
10376 if (validate_arg (arg0
, COMPLEX_TYPE
)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10378 && validate_arg (arg1
, COMPLEX_TYPE
)
10379 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
)
10380 return do_mpc_arg2 (arg0
, arg1
, type
, /*do_nonfinite=*/ 0, mpc_pow
);
10383 CASE_FLT_FN (BUILT_IN_LDEXP
):
10384 return fold_builtin_load_exponent (loc
, arg0
, arg1
, type
, /*ldexp=*/true);
10385 CASE_FLT_FN (BUILT_IN_SCALBN
):
10386 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10387 return fold_builtin_load_exponent (loc
, arg0
, arg1
,
10388 type
, /*ldexp=*/false);
10390 CASE_FLT_FN (BUILT_IN_FREXP
):
10391 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
10393 CASE_FLT_FN (BUILT_IN_MODF
):
10394 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
10396 case BUILT_IN_STRSTR
:
10397 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
10399 case BUILT_IN_STRSPN
:
10400 return fold_builtin_strspn (loc
, arg0
, arg1
);
10402 case BUILT_IN_STRCSPN
:
10403 return fold_builtin_strcspn (loc
, arg0
, arg1
);
10405 case BUILT_IN_STRCHR
:
10406 case BUILT_IN_INDEX
:
10407 return fold_builtin_strchr (loc
, arg0
, arg1
, type
);
10409 case BUILT_IN_STRRCHR
:
10410 case BUILT_IN_RINDEX
:
10411 return fold_builtin_strrchr (loc
, arg0
, arg1
, type
);
10413 case BUILT_IN_STRCMP
:
10414 return fold_builtin_strcmp (loc
, arg0
, arg1
);
10416 case BUILT_IN_STRPBRK
:
10417 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
10419 case BUILT_IN_EXPECT
:
10420 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
10422 CASE_FLT_FN (BUILT_IN_POW
):
10423 return fold_builtin_pow (loc
, fndecl
, arg0
, arg1
, type
);
10425 CASE_FLT_FN (BUILT_IN_POWI
):
10426 return fold_builtin_powi (loc
, fndecl
, arg0
, arg1
, type
);
10428 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10429 return fold_builtin_copysign (loc
, fndecl
, arg0
, arg1
, type
);
10431 CASE_FLT_FN (BUILT_IN_FMIN
):
10432 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/false);
10434 CASE_FLT_FN (BUILT_IN_FMAX
):
10435 return fold_builtin_fmin_fmax (loc
, arg0
, arg1
, type
, /*max=*/true);
10437 case BUILT_IN_ISGREATER
:
10438 return fold_builtin_unordered_cmp (loc
, fndecl
,
10439 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10440 case BUILT_IN_ISGREATEREQUAL
:
10441 return fold_builtin_unordered_cmp (loc
, fndecl
,
10442 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10443 case BUILT_IN_ISLESS
:
10444 return fold_builtin_unordered_cmp (loc
, fndecl
,
10445 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10446 case BUILT_IN_ISLESSEQUAL
:
10447 return fold_builtin_unordered_cmp (loc
, fndecl
,
10448 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10449 case BUILT_IN_ISLESSGREATER
:
10450 return fold_builtin_unordered_cmp (loc
, fndecl
,
10451 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10452 case BUILT_IN_ISUNORDERED
:
10453 return fold_builtin_unordered_cmp (loc
, fndecl
,
10454 arg0
, arg1
, UNORDERED_EXPR
,
10457 /* We do the folding for va_start in the expander. */
10458 case BUILT_IN_VA_START
:
10461 case BUILT_IN_OBJECT_SIZE
:
10462 return fold_builtin_object_size (arg0
, arg1
);
10464 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
10465 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
10467 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
10468 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
10476 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10478 This function returns NULL_TREE if no simplification was possible. */
10481 fold_builtin_3 (location_t loc
, tree fndecl
,
10482 tree arg0
, tree arg1
, tree arg2
)
10484 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10485 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10489 CASE_FLT_FN (BUILT_IN_SINCOS
):
10490 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
10492 CASE_FLT_FN (BUILT_IN_FMA
):
10493 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
10496 CASE_FLT_FN (BUILT_IN_REMQUO
):
10497 if (validate_arg (arg0
, REAL_TYPE
)
10498 && validate_arg (arg1
, REAL_TYPE
)
10499 && validate_arg (arg2
, POINTER_TYPE
))
10500 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10503 case BUILT_IN_STRNCMP
:
10504 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
10506 case BUILT_IN_MEMCHR
:
10507 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
10509 case BUILT_IN_BCMP
:
10510 case BUILT_IN_MEMCMP
:
10511 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
10513 case BUILT_IN_EXPECT
:
10514 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
10516 case BUILT_IN_ADD_OVERFLOW
:
10517 case BUILT_IN_SUB_OVERFLOW
:
10518 case BUILT_IN_MUL_OVERFLOW
:
10519 case BUILT_IN_SADD_OVERFLOW
:
10520 case BUILT_IN_SADDL_OVERFLOW
:
10521 case BUILT_IN_SADDLL_OVERFLOW
:
10522 case BUILT_IN_SSUB_OVERFLOW
:
10523 case BUILT_IN_SSUBL_OVERFLOW
:
10524 case BUILT_IN_SSUBLL_OVERFLOW
:
10525 case BUILT_IN_SMUL_OVERFLOW
:
10526 case BUILT_IN_SMULL_OVERFLOW
:
10527 case BUILT_IN_SMULLL_OVERFLOW
:
10528 case BUILT_IN_UADD_OVERFLOW
:
10529 case BUILT_IN_UADDL_OVERFLOW
:
10530 case BUILT_IN_UADDLL_OVERFLOW
:
10531 case BUILT_IN_USUB_OVERFLOW
:
10532 case BUILT_IN_USUBL_OVERFLOW
:
10533 case BUILT_IN_USUBLL_OVERFLOW
:
10534 case BUILT_IN_UMUL_OVERFLOW
:
10535 case BUILT_IN_UMULL_OVERFLOW
:
10536 case BUILT_IN_UMULLL_OVERFLOW
:
10537 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
10545 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10546 arguments. IGNORE is true if the result of the
10547 function call is ignored. This function returns NULL_TREE if no
10548 simplification was possible. */
10551 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
10553 tree ret
= NULL_TREE
;
10558 ret
= fold_builtin_0 (loc
, fndecl
);
10561 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
10564 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
10567 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
10570 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
10575 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10576 SET_EXPR_LOCATION (ret
, loc
);
10577 TREE_NO_WARNING (ret
) = 1;
10583 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10584 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10585 of arguments in ARGS to be omitted. OLDNARGS is the number of
10586 elements in ARGS. */
10589 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
10590 int skip
, tree fndecl
, int n
, va_list newargs
)
10592 int nargs
= oldnargs
- skip
+ n
;
10599 buffer
= XALLOCAVEC (tree
, nargs
);
10600 for (i
= 0; i
< n
; i
++)
10601 buffer
[i
] = va_arg (newargs
, tree
);
10602 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10603 buffer
[i
] = args
[j
];
10606 buffer
= args
+ skip
;
10608 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
10611 /* Return true if FNDECL shouldn't be folded right now.
10612 If a built-in function has an inline attribute always_inline
10613 wrapper, defer folding it after always_inline functions have
10614 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10615 might not be performed. */
10618 avoid_folding_inline_builtin (tree fndecl
)
10620 return (DECL_DECLARED_INLINE_P (fndecl
)
10621 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
10623 && !cfun
->always_inline_functions_inlined
10624 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
10627 /* A wrapper function for builtin folding that prevents warnings for
10628 "statement without effect" and the like, caused by removing the
10629 call node earlier than the warning is generated. */
10632 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
10634 tree ret
= NULL_TREE
;
10635 tree fndecl
= get_callee_fndecl (exp
);
10637 && TREE_CODE (fndecl
) == FUNCTION_DECL
10638 && DECL_BUILT_IN (fndecl
)
10639 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10640 yet. Defer folding until we see all the arguments
10641 (after inlining). */
10642 && !CALL_EXPR_VA_ARG_PACK (exp
))
10644 int nargs
= call_expr_nargs (exp
);
10646 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10647 instead last argument is __builtin_va_arg_pack (). Defer folding
10648 even in that case, until arguments are finalized. */
10649 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10651 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10653 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10654 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10655 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10659 if (avoid_folding_inline_builtin (fndecl
))
10662 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10663 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
10664 CALL_EXPR_ARGP (exp
), ignore
);
10667 tree
*args
= CALL_EXPR_ARGP (exp
);
10668 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10676 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10677 N arguments are passed in the array ARGARRAY. Return a folded
10678 expression or NULL_TREE if no simplification was possible. */
10681 fold_builtin_call_array (location_t loc
, tree
,
10686 if (TREE_CODE (fn
) != ADDR_EXPR
)
10689 tree fndecl
= TREE_OPERAND (fn
, 0);
10690 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10691 && DECL_BUILT_IN (fndecl
))
10693 /* If last argument is __builtin_va_arg_pack (), arguments to this
10694 function are not finalized yet. Defer folding until they are. */
10695 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10697 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10699 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10700 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10701 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10704 if (avoid_folding_inline_builtin (fndecl
))
10706 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10707 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
10709 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
10715 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10716 along with N new arguments specified as the "..." parameters. SKIP
10717 is the number of arguments in EXP to be omitted. This function is used
10718 to do varargs-to-varargs transformations. */
10721 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
10727 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
10728 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
10734 /* Validate a single argument ARG against a tree code CODE representing
10738 validate_arg (const_tree arg
, enum tree_code code
)
10742 else if (code
== POINTER_TYPE
)
10743 return POINTER_TYPE_P (TREE_TYPE (arg
));
10744 else if (code
== INTEGER_TYPE
)
10745 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
10746 return code
== TREE_CODE (TREE_TYPE (arg
));
10749 /* This function validates the types of a function call argument list
10750 against a specified list of tree_codes. If the last specifier is a 0,
10751 that represents an ellipses, otherwise the last specifier must be a
10754 This is the GIMPLE version of validate_arglist. Eventually we want to
10755 completely convert builtins.c to work from GIMPLEs and the tree based
10756 validate_arglist will then be removed. */
10759 validate_gimple_arglist (const gcall
*call
, ...)
10761 enum tree_code code
;
10767 va_start (ap
, call
);
10772 code
= (enum tree_code
) va_arg (ap
, int);
10776 /* This signifies an ellipses, any further arguments are all ok. */
10780 /* This signifies an endlink, if no arguments remain, return
10781 true, otherwise return false. */
10782 res
= (i
== gimple_call_num_args (call
));
10785 /* If no parameters remain or the parameter's code does not
10786 match the specified code, return false. Otherwise continue
10787 checking any remaining arguments. */
10788 arg
= gimple_call_arg (call
, i
++);
10789 if (!validate_arg (arg
, code
))
10796 /* We need gotos here since we can only have one VA_CLOSE in a
10804 /* Default target-specific builtin expander that does nothing. */
10807 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10808 rtx target ATTRIBUTE_UNUSED
,
10809 rtx subtarget ATTRIBUTE_UNUSED
,
10810 machine_mode mode ATTRIBUTE_UNUSED
,
10811 int ignore ATTRIBUTE_UNUSED
)
10816 /* Returns true is EXP represents data that would potentially reside
10817 in a readonly section. */
10820 readonly_data_expr (tree exp
)
10824 if (TREE_CODE (exp
) != ADDR_EXPR
)
10827 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10831 /* Make sure we call decl_readonly_section only for trees it
10832 can handle (since it returns true for everything it doesn't
10834 if (TREE_CODE (exp
) == STRING_CST
10835 || TREE_CODE (exp
) == CONSTRUCTOR
10836 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10837 return decl_readonly_section (exp
, 0);
10842 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10843 to the call, and TYPE is its return type.
10845 Return NULL_TREE if no simplification was possible, otherwise return the
10846 simplified form of the call as a tree.
10848 The simplified form may be a constant or other expression which
10849 computes the same value, but in a more efficient manner (including
10850 calls to other builtin functions).
10852 The call may contain arguments which need to be evaluated, but
10853 which are not useful to determine the result of the call. In
10854 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10855 COMPOUND_EXPR will be an argument which must be evaluated.
10856 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10857 COMPOUND_EXPR in the chain will contain the tree for the simplified
10858 form of the builtin function call. */
10861 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
10863 if (!validate_arg (s1
, POINTER_TYPE
)
10864 || !validate_arg (s2
, POINTER_TYPE
))
10869 const char *p1
, *p2
;
10871 p2
= c_getstr (s2
);
10875 p1
= c_getstr (s1
);
10878 const char *r
= strstr (p1
, p2
);
10882 return build_int_cst (TREE_TYPE (s1
), 0);
10884 /* Return an offset into the constant string argument. */
10885 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10886 return fold_convert_loc (loc
, type
, tem
);
10889 /* The argument is const char *, and the result is char *, so we need
10890 a type conversion here to avoid a warning. */
10892 return fold_convert_loc (loc
, type
, s1
);
10897 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
10901 /* New argument list transforming strstr(s1, s2) to
10902 strchr(s1, s2[0]). */
10903 return build_call_expr_loc (loc
, fn
, 2, s1
,
10904 build_int_cst (integer_type_node
, p2
[0]));
10908 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10909 the call, and TYPE is its return type.
10911 Return NULL_TREE if no simplification was possible, otherwise return the
10912 simplified form of the call as a tree.
10914 The simplified form may be a constant or other expression which
10915 computes the same value, but in a more efficient manner (including
10916 calls to other builtin functions).
10918 The call may contain arguments which need to be evaluated, but
10919 which are not useful to determine the result of the call. In
10920 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10921 COMPOUND_EXPR will be an argument which must be evaluated.
10922 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10923 COMPOUND_EXPR in the chain will contain the tree for the simplified
10924 form of the builtin function call. */
10927 fold_builtin_strchr (location_t loc
, tree s1
, tree s2
, tree type
)
10929 if (!validate_arg (s1
, POINTER_TYPE
)
10930 || !validate_arg (s2
, INTEGER_TYPE
))
10936 if (TREE_CODE (s2
) != INTEGER_CST
)
10939 p1
= c_getstr (s1
);
10946 if (target_char_cast (s2
, &c
))
10949 r
= strchr (p1
, c
);
10952 return build_int_cst (TREE_TYPE (s1
), 0);
10954 /* Return an offset into the constant string argument. */
10955 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
10956 return fold_convert_loc (loc
, type
, tem
);
10962 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10963 the call, and TYPE is its return type.
10965 Return NULL_TREE if no simplification was possible, otherwise return the
10966 simplified form of the call as a tree.
10968 The simplified form may be a constant or other expression which
10969 computes the same value, but in a more efficient manner (including
10970 calls to other builtin functions).
10972 The call may contain arguments which need to be evaluated, but
10973 which are not useful to determine the result of the call. In
10974 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10975 COMPOUND_EXPR will be an argument which must be evaluated.
10976 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10977 COMPOUND_EXPR in the chain will contain the tree for the simplified
10978 form of the builtin function call. */
10981 fold_builtin_strrchr (location_t loc
, tree s1
, tree s2
, tree type
)
10983 if (!validate_arg (s1
, POINTER_TYPE
)
10984 || !validate_arg (s2
, INTEGER_TYPE
))
10991 if (TREE_CODE (s2
) != INTEGER_CST
)
10994 p1
= c_getstr (s1
);
11001 if (target_char_cast (s2
, &c
))
11004 r
= strrchr (p1
, c
);
11007 return build_int_cst (TREE_TYPE (s1
), 0);
11009 /* Return an offset into the constant string argument. */
11010 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11011 return fold_convert_loc (loc
, type
, tem
);
11014 if (! integer_zerop (s2
))
11017 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11021 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11022 return build_call_expr_loc (loc
, fn
, 2, s1
, s2
);
11026 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11027 to the call, and TYPE is its return type.
11029 Return NULL_TREE if no simplification was possible, otherwise return the
11030 simplified form of the call as a tree.
11032 The simplified form may be a constant or other expression which
11033 computes the same value, but in a more efficient manner (including
11034 calls to other builtin functions).
11036 The call may contain arguments which need to be evaluated, but
11037 which are not useful to determine the result of the call. In
11038 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11039 COMPOUND_EXPR will be an argument which must be evaluated.
11040 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11041 COMPOUND_EXPR in the chain will contain the tree for the simplified
11042 form of the builtin function call. */
11045 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
11047 if (!validate_arg (s1
, POINTER_TYPE
)
11048 || !validate_arg (s2
, POINTER_TYPE
))
11053 const char *p1
, *p2
;
11055 p2
= c_getstr (s2
);
11059 p1
= c_getstr (s1
);
11062 const char *r
= strpbrk (p1
, p2
);
11066 return build_int_cst (TREE_TYPE (s1
), 0);
11068 /* Return an offset into the constant string argument. */
11069 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
11070 return fold_convert_loc (loc
, type
, tem
);
11074 /* strpbrk(x, "") == NULL.
11075 Evaluate and ignore s1 in case it had side-effects. */
11076 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
11079 return NULL_TREE
; /* Really call strpbrk. */
11081 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
11085 /* New argument list transforming strpbrk(s1, s2) to
11086 strchr(s1, s2[0]). */
11087 return build_call_expr_loc (loc
, fn
, 2, s1
,
11088 build_int_cst (integer_type_node
, p2
[0]));
11092 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11095 Return NULL_TREE if no simplification was possible, otherwise return the
11096 simplified form of the call as a tree.
11098 The simplified form may be a constant or other expression which
11099 computes the same value, but in a more efficient manner (including
11100 calls to other builtin functions).
11102 The call may contain arguments which need to be evaluated, but
11103 which are not useful to determine the result of the call. In
11104 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11105 COMPOUND_EXPR will be an argument which must be evaluated.
11106 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11107 COMPOUND_EXPR in the chain will contain the tree for the simplified
11108 form of the builtin function call. */
11111 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
11113 if (!validate_arg (s1
, POINTER_TYPE
)
11114 || !validate_arg (s2
, POINTER_TYPE
))
11118 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11120 /* If both arguments are constants, evaluate at compile-time. */
11123 const size_t r
= strspn (p1
, p2
);
11124 return build_int_cst (size_type_node
, r
);
11127 /* If either argument is "", return NULL_TREE. */
11128 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11129 /* Evaluate and ignore both arguments in case either one has
11131 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
11137 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11140 Return NULL_TREE if no simplification was possible, otherwise return the
11141 simplified form of the call as a tree.
11143 The simplified form may be a constant or other expression which
11144 computes the same value, but in a more efficient manner (including
11145 calls to other builtin functions).
11147 The call may contain arguments which need to be evaluated, but
11148 which are not useful to determine the result of the call. In
11149 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11150 COMPOUND_EXPR will be an argument which must be evaluated.
11151 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11152 COMPOUND_EXPR in the chain will contain the tree for the simplified
11153 form of the builtin function call. */
11156 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
11158 if (!validate_arg (s1
, POINTER_TYPE
)
11159 || !validate_arg (s2
, POINTER_TYPE
))
11163 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11165 /* If both arguments are constants, evaluate at compile-time. */
11168 const size_t r
= strcspn (p1
, p2
);
11169 return build_int_cst (size_type_node
, r
);
11172 /* If the first argument is "", return NULL_TREE. */
11173 if (p1
&& *p1
== '\0')
11175 /* Evaluate and ignore argument s2 in case it has
11177 return omit_one_operand_loc (loc
, size_type_node
,
11178 size_zero_node
, s2
);
11181 /* If the second argument is "", return __builtin_strlen(s1). */
11182 if (p2
&& *p2
== '\0')
11184 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
11186 /* If the replacement _DECL isn't initialized, don't do the
11191 return build_call_expr_loc (loc
, fn
, 1, s1
);
11197 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11198 produced. False otherwise. This is done so that we don't output the error
11199 or warning twice or three times. */
11202 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11204 tree fntype
= TREE_TYPE (current_function_decl
);
11205 int nargs
= call_expr_nargs (exp
);
11207 /* There is good chance the current input_location points inside the
11208 definition of the va_start macro (perhaps on the token for
11209 builtin) in a system header, so warnings will not be emitted.
11210 Use the location in real source code. */
11211 source_location current_location
=
11212 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
11215 if (!stdarg_p (fntype
))
11217 error ("%<va_start%> used in function with fixed args");
11223 if (va_start_p
&& (nargs
!= 2))
11225 error ("wrong number of arguments to function %<va_start%>");
11228 arg
= CALL_EXPR_ARG (exp
, 1);
11230 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11231 when we checked the arguments and if needed issued a warning. */
11236 /* Evidently an out of date version of <stdarg.h>; can't validate
11237 va_start's second argument, but can still work as intended. */
11238 warning_at (current_location
,
11240 "%<__builtin_next_arg%> called without an argument");
11243 else if (nargs
> 1)
11245 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11248 arg
= CALL_EXPR_ARG (exp
, 0);
11251 if (TREE_CODE (arg
) == SSA_NAME
)
11252 arg
= SSA_NAME_VAR (arg
);
11254 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11255 or __builtin_next_arg (0) the first time we see it, after checking
11256 the arguments and if needed issuing a warning. */
11257 if (!integer_zerop (arg
))
11259 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11261 /* Strip off all nops for the sake of the comparison. This
11262 is not quite the same as STRIP_NOPS. It does more.
11263 We must also strip off INDIRECT_EXPR for C++ reference
11265 while (CONVERT_EXPR_P (arg
)
11266 || TREE_CODE (arg
) == INDIRECT_REF
)
11267 arg
= TREE_OPERAND (arg
, 0);
11268 if (arg
!= last_parm
)
11270 /* FIXME: Sometimes with the tree optimizers we can get the
11271 not the last argument even though the user used the last
11272 argument. We just warn and set the arg to be the last
11273 argument so that we will get wrong-code because of
11275 warning_at (current_location
,
11277 "second parameter of %<va_start%> not last named argument");
11280 /* Undefined by C99 7.15.1.4p4 (va_start):
11281 "If the parameter parmN is declared with the register storage
11282 class, with a function or array type, or with a type that is
11283 not compatible with the type that results after application of
11284 the default argument promotions, the behavior is undefined."
11286 else if (DECL_REGISTER (arg
))
11288 warning_at (current_location
,
11290 "undefined behaviour when second parameter of "
11291 "%<va_start%> is declared with %<register%> storage");
11294 /* We want to verify the second parameter just once before the tree
11295 optimizers are run and then avoid keeping it in the tree,
11296 as otherwise we could warn even for correct code like:
11297 void foo (int i, ...)
11298 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11300 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11302 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11308 /* Expand a call EXP to __builtin_object_size. */
11311 expand_builtin_object_size (tree exp
)
11314 int object_size_type
;
11315 tree fndecl
= get_callee_fndecl (exp
);
11317 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11319 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11321 expand_builtin_trap ();
11325 ost
= CALL_EXPR_ARG (exp
, 1);
11328 if (TREE_CODE (ost
) != INTEGER_CST
11329 || tree_int_cst_sgn (ost
) < 0
11330 || compare_tree_int (ost
, 3) > 0)
11332 error ("%Klast argument of %D is not integer constant between 0 and 3",
11334 expand_builtin_trap ();
11338 object_size_type
= tree_to_shwi (ost
);
11340 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11343 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11344 FCODE is the BUILT_IN_* to use.
11345 Return NULL_RTX if we failed; the caller should emit a normal call,
11346 otherwise try to get the result in TARGET, if convenient (and in
11347 mode MODE if that's convenient). */
11350 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
11351 enum built_in_function fcode
)
11353 tree dest
, src
, len
, size
;
11355 if (!validate_arglist (exp
,
11357 fcode
== BUILT_IN_MEMSET_CHK
11358 ? INTEGER_TYPE
: POINTER_TYPE
,
11359 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11362 dest
= CALL_EXPR_ARG (exp
, 0);
11363 src
= CALL_EXPR_ARG (exp
, 1);
11364 len
= CALL_EXPR_ARG (exp
, 2);
11365 size
= CALL_EXPR_ARG (exp
, 3);
11367 if (! tree_fits_uhwi_p (size
))
11370 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
11374 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11376 warning_at (tree_nonartificial_location (exp
),
11377 0, "%Kcall to %D will always overflow destination buffer",
11378 exp
, get_callee_fndecl (exp
));
11383 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11384 mem{cpy,pcpy,move,set} is available. */
11387 case BUILT_IN_MEMCPY_CHK
:
11388 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
11390 case BUILT_IN_MEMPCPY_CHK
:
11391 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
11393 case BUILT_IN_MEMMOVE_CHK
:
11394 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
11396 case BUILT_IN_MEMSET_CHK
:
11397 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
11406 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
11407 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11408 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11409 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11411 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11415 unsigned int dest_align
= get_pointer_alignment (dest
);
11417 /* If DEST is not a pointer type, call the normal function. */
11418 if (dest_align
== 0)
11421 /* If SRC and DEST are the same (and not volatile), do nothing. */
11422 if (operand_equal_p (src
, dest
, 0))
11426 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11428 /* Evaluate and ignore LEN in case it has side-effects. */
11429 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11430 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11433 expr
= fold_build_pointer_plus (dest
, len
);
11434 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11437 /* __memmove_chk special case. */
11438 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11440 unsigned int src_align
= get_pointer_alignment (src
);
11442 if (src_align
== 0)
11445 /* If src is categorized for a readonly section we can use
11446 normal __memcpy_chk. */
11447 if (readonly_data_expr (src
))
11449 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
11452 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
11453 dest
, src
, len
, size
);
11454 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
11455 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11456 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11463 /* Emit warning if a buffer overflow is detected at compile time. */
11466 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11470 location_t loc
= tree_nonartificial_location (exp
);
11474 case BUILT_IN_STRCPY_CHK
:
11475 case BUILT_IN_STPCPY_CHK
:
11476 /* For __strcat_chk the warning will be emitted only if overflowing
11477 by at least strlen (dest) + 1 bytes. */
11478 case BUILT_IN_STRCAT_CHK
:
11479 len
= CALL_EXPR_ARG (exp
, 1);
11480 size
= CALL_EXPR_ARG (exp
, 2);
11483 case BUILT_IN_STRNCAT_CHK
:
11484 case BUILT_IN_STRNCPY_CHK
:
11485 case BUILT_IN_STPNCPY_CHK
:
11486 len
= CALL_EXPR_ARG (exp
, 2);
11487 size
= CALL_EXPR_ARG (exp
, 3);
11489 case BUILT_IN_SNPRINTF_CHK
:
11490 case BUILT_IN_VSNPRINTF_CHK
:
11491 len
= CALL_EXPR_ARG (exp
, 1);
11492 size
= CALL_EXPR_ARG (exp
, 3);
11495 gcc_unreachable ();
11501 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11506 len
= c_strlen (len
, 1);
11507 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11510 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11512 tree src
= CALL_EXPR_ARG (exp
, 1);
11513 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
11515 src
= c_strlen (src
, 1);
11516 if (! src
|| ! tree_fits_uhwi_p (src
))
11518 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
11519 exp
, get_callee_fndecl (exp
));
11522 else if (tree_int_cst_lt (src
, size
))
11525 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
11528 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
11529 exp
, get_callee_fndecl (exp
));
11532 /* Emit warning if a buffer overflow is detected at compile time
11533 in __sprintf_chk/__vsprintf_chk calls. */
11536 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11538 tree size
, len
, fmt
;
11539 const char *fmt_str
;
11540 int nargs
= call_expr_nargs (exp
);
11542 /* Verify the required arguments in the original call. */
11546 size
= CALL_EXPR_ARG (exp
, 2);
11547 fmt
= CALL_EXPR_ARG (exp
, 3);
11549 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
11552 /* Check whether the format is a literal string constant. */
11553 fmt_str
= c_getstr (fmt
);
11554 if (fmt_str
== NULL
)
11557 if (!init_target_chars ())
11560 /* If the format doesn't contain % args or %%, we know its size. */
11561 if (strchr (fmt_str
, target_percent
) == 0)
11562 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11563 /* If the format is "%s" and first ... argument is a string literal,
11565 else if (fcode
== BUILT_IN_SPRINTF_CHK
11566 && strcmp (fmt_str
, target_percent_s
) == 0)
11572 arg
= CALL_EXPR_ARG (exp
, 4);
11573 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11576 len
= c_strlen (arg
, 1);
11577 if (!len
|| ! tree_fits_uhwi_p (len
))
11583 if (! tree_int_cst_lt (len
, size
))
11584 warning_at (tree_nonartificial_location (exp
),
11585 0, "%Kcall to %D will always overflow destination buffer",
11586 exp
, get_callee_fndecl (exp
));
11589 /* Emit warning if a free is called with address of a variable. */
11592 maybe_emit_free_warning (tree exp
)
11594 tree arg
= CALL_EXPR_ARG (exp
, 0);
11597 if (TREE_CODE (arg
) != ADDR_EXPR
)
11600 arg
= get_base_address (TREE_OPERAND (arg
, 0));
11601 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
11604 if (SSA_VAR_P (arg
))
11605 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11606 "%Kattempt to free a non-heap object %qD", exp
, arg
);
11608 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
11609 "%Kattempt to free a non-heap object", exp
);
11612 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11616 fold_builtin_object_size (tree ptr
, tree ost
)
11618 unsigned HOST_WIDE_INT bytes
;
11619 int object_size_type
;
11621 if (!validate_arg (ptr
, POINTER_TYPE
)
11622 || !validate_arg (ost
, INTEGER_TYPE
))
11627 if (TREE_CODE (ost
) != INTEGER_CST
11628 || tree_int_cst_sgn (ost
) < 0
11629 || compare_tree_int (ost
, 3) > 0)
11632 object_size_type
= tree_to_shwi (ost
);
11634 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11635 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11636 and (size_t) 0 for types 2 and 3. */
11637 if (TREE_SIDE_EFFECTS (ptr
))
11638 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11640 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11642 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11643 if (wi::fits_to_tree_p (bytes
, size_type_node
))
11644 return build_int_cstu (size_type_node
, bytes
);
11646 else if (TREE_CODE (ptr
) == SSA_NAME
)
11648 /* If object size is not known yet, delay folding until
11649 later. Maybe subsequent passes will help determining
11651 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11652 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2 ? -1 : 0)
11653 && wi::fits_to_tree_p (bytes
, size_type_node
))
11654 return build_int_cstu (size_type_node
, bytes
);
11660 /* Builtins with folding operations that operate on "..." arguments
11661 need special handling; we need to store the arguments in a convenient
11662 data structure before attempting any folding. Fortunately there are
11663 only a few builtins that fall into this category. FNDECL is the
11664 function, EXP is the CALL_EXPR for the call. */
11667 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
11669 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
11670 tree ret
= NULL_TREE
;
11674 case BUILT_IN_FPCLASSIFY
:
11675 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
11683 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
11684 SET_EXPR_LOCATION (ret
, loc
);
11685 TREE_NO_WARNING (ret
) = 1;
11691 /* Initialize format string characters in the target charset. */
11694 init_target_chars (void)
11699 target_newline
= lang_hooks
.to_target_charset ('\n');
11700 target_percent
= lang_hooks
.to_target_charset ('%');
11701 target_c
= lang_hooks
.to_target_charset ('c');
11702 target_s
= lang_hooks
.to_target_charset ('s');
11703 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
11707 target_percent_c
[0] = target_percent
;
11708 target_percent_c
[1] = target_c
;
11709 target_percent_c
[2] = '\0';
11711 target_percent_s
[0] = target_percent
;
11712 target_percent_s
[1] = target_s
;
11713 target_percent_s
[2] = '\0';
11715 target_percent_s_newline
[0] = target_percent
;
11716 target_percent_s_newline
[1] = target_s
;
11717 target_percent_s_newline
[2] = target_newline
;
11718 target_percent_s_newline
[3] = '\0';
11725 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11726 and no overflow/underflow occurred. INEXACT is true if M was not
11727 exactly calculated. TYPE is the tree type for the result. This
11728 function assumes that you cleared the MPFR flags and then
11729 calculated M to see if anything subsequently set a flag prior to
11730 entering this function. Return NULL_TREE if any checks fail. */
11733 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
11735 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11736 overflow/underflow occurred. If -frounding-math, proceed iff the
11737 result of calling FUNC was exact. */
11738 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11739 && (!flag_rounding_math
|| !inexact
))
11741 REAL_VALUE_TYPE rr
;
11743 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
11744 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11745 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11746 but the mpft_t is not, then we underflowed in the
11748 if (real_isfinite (&rr
)
11749 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
11751 REAL_VALUE_TYPE rmode
;
11753 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
11754 /* Proceed iff the specified mode can hold the value. */
11755 if (real_identical (&rmode
, &rr
))
11756 return build_real (type
, rmode
);
11762 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11763 number and no overflow/underflow occurred. INEXACT is true if M
11764 was not exactly calculated. TYPE is the tree type for the result.
11765 This function assumes that you cleared the MPFR flags and then
11766 calculated M to see if anything subsequently set a flag prior to
11767 entering this function. Return NULL_TREE if any checks fail, if
11768 FORCE_CONVERT is true, then bypass the checks. */
11771 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
11773 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11774 overflow/underflow occurred. If -frounding-math, proceed iff the
11775 result of calling FUNC was exact. */
11777 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
11778 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11779 && (!flag_rounding_math
|| !inexact
)))
11781 REAL_VALUE_TYPE re
, im
;
11783 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
11784 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
11785 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11786 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11787 but the mpft_t is not, then we underflowed in the
11790 || (real_isfinite (&re
) && real_isfinite (&im
)
11791 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
11792 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
11794 REAL_VALUE_TYPE re_mode
, im_mode
;
11796 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
11797 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
11798 /* Proceed iff the specified mode can hold the value. */
11800 || (real_identical (&re_mode
, &re
)
11801 && real_identical (&im_mode
, &im
)))
11802 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
11803 build_real (TREE_TYPE (type
), im_mode
));
11809 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
11810 FUNC on it and return the resulting value as a tree with type TYPE.
11811 If MIN and/or MAX are not NULL, then the supplied ARG must be
11812 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11813 acceptable values, otherwise they are not. The mpfr precision is
11814 set to the precision of TYPE. We assume that function FUNC returns
11815 zero if the result could be calculated exactly within the requested
11819 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
11820 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
11823 tree result
= NULL_TREE
;
11827 /* To proceed, MPFR must exactly represent the target floating point
11828 format, which only happens when the target base equals two. */
11829 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11830 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
11832 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11834 if (real_isfinite (ra
)
11835 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
11836 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
11838 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11839 const int prec
= fmt
->p
;
11840 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11844 mpfr_init2 (m
, prec
);
11845 mpfr_from_real (m
, ra
, GMP_RNDN
);
11846 mpfr_clear_flags ();
11847 inexact
= func (m
, m
, rnd
);
11848 result
= do_mpfr_ckconv (m
, type
, inexact
);
11856 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
11857 FUNC on it and return the resulting value as a tree with type TYPE.
11858 The mpfr precision is set to the precision of TYPE. We assume that
11859 function FUNC returns zero if the result could be calculated
11860 exactly within the requested precision. */
11863 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
11864 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11866 tree result
= NULL_TREE
;
11871 /* To proceed, MPFR must exactly represent the target floating point
11872 format, which only happens when the target base equals two. */
11873 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11874 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11875 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
11877 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11878 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11880 if (real_isfinite (ra1
) && real_isfinite (ra2
))
11882 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11883 const int prec
= fmt
->p
;
11884 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11888 mpfr_inits2 (prec
, m1
, m2
, NULL
);
11889 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11890 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11891 mpfr_clear_flags ();
11892 inexact
= func (m1
, m1
, m2
, rnd
);
11893 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11894 mpfr_clears (m1
, m2
, NULL
);
11901 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
11902 FUNC on it and return the resulting value as a tree with type TYPE.
11903 The mpfr precision is set to the precision of TYPE. We assume that
11904 function FUNC returns zero if the result could be calculated
11905 exactly within the requested precision. */
11908 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
11909 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
11911 tree result
= NULL_TREE
;
11917 /* To proceed, MPFR must exactly represent the target floating point
11918 format, which only happens when the target base equals two. */
11919 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11920 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
11921 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
11922 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
11924 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
11925 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
11926 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
11928 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
11930 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11931 const int prec
= fmt
->p
;
11932 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11936 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
11937 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
11938 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
11939 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
11940 mpfr_clear_flags ();
11941 inexact
= func (m1
, m1
, m2
, m3
, rnd
);
11942 result
= do_mpfr_ckconv (m1
, type
, inexact
);
11943 mpfr_clears (m1
, m2
, m3
, NULL
);
11950 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11951 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
11952 If ARG_SINP and ARG_COSP are NULL then the result is returned
11953 as a complex value.
11954 The type is taken from the type of ARG and is used for setting the
11955 precision of the calculation and results. */
11958 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
11960 tree
const type
= TREE_TYPE (arg
);
11961 tree result
= NULL_TREE
;
11965 /* To proceed, MPFR must exactly represent the target floating point
11966 format, which only happens when the target base equals two. */
11967 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
11968 && TREE_CODE (arg
) == REAL_CST
11969 && !TREE_OVERFLOW (arg
))
11971 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
11973 if (real_isfinite (ra
))
11975 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
11976 const int prec
= fmt
->p
;
11977 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
11978 tree result_s
, result_c
;
11982 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
11983 mpfr_from_real (m
, ra
, GMP_RNDN
);
11984 mpfr_clear_flags ();
11985 inexact
= mpfr_sin_cos (ms
, mc
, m
, rnd
);
11986 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
11987 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
11988 mpfr_clears (m
, ms
, mc
, NULL
);
11989 if (result_s
&& result_c
)
11991 /* If we are to return in a complex value do so. */
11992 if (!arg_sinp
&& !arg_cosp
)
11993 return build_complex (build_complex_type (type
),
11994 result_c
, result_s
);
11996 /* Dereference the sin/cos pointer arguments. */
11997 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
11998 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
11999 /* Proceed if valid pointer type were passed in. */
12000 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12001 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12003 /* Set the values. */
12004 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12006 TREE_SIDE_EFFECTS (result_s
) = 1;
12007 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12009 TREE_SIDE_EFFECTS (result_c
) = 1;
12010 /* Combine the assignments into a compound expr. */
12011 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12012 result_s
, result_c
));
12020 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12021 two-argument mpfr order N Bessel function FUNC on them and return
12022 the resulting value as a tree with type TYPE. The mpfr precision
12023 is set to the precision of TYPE. We assume that function FUNC
12024 returns zero if the result could be calculated exactly within the
12025 requested precision. */
12027 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12028 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12029 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12031 tree result
= NULL_TREE
;
12036 /* To proceed, MPFR must exactly represent the target floating point
12037 format, which only happens when the target base equals two. */
12038 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12039 && tree_fits_shwi_p (arg1
)
12040 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12042 const HOST_WIDE_INT n
= tree_to_shwi (arg1
);
12043 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12046 && real_isfinite (ra
)
12047 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12049 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12050 const int prec
= fmt
->p
;
12051 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12055 mpfr_init2 (m
, prec
);
12056 mpfr_from_real (m
, ra
, GMP_RNDN
);
12057 mpfr_clear_flags ();
12058 inexact
= func (m
, n
, m
, rnd
);
12059 result
= do_mpfr_ckconv (m
, type
, inexact
);
12067 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12068 the pointer *(ARG_QUO) and return the result. The type is taken
12069 from the type of ARG0 and is used for setting the precision of the
12070 calculation and results. */
12073 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12075 tree
const type
= TREE_TYPE (arg0
);
12076 tree result
= NULL_TREE
;
12081 /* To proceed, MPFR must exactly represent the target floating point
12082 format, which only happens when the target base equals two. */
12083 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12084 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12085 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12087 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12088 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12090 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12092 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12093 const int prec
= fmt
->p
;
12094 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12099 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12100 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12101 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12102 mpfr_clear_flags ();
12103 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
12104 /* Remquo is independent of the rounding mode, so pass
12105 inexact=0 to do_mpfr_ckconv(). */
12106 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12107 mpfr_clears (m0
, m1
, NULL
);
12110 /* MPFR calculates quo in the host's long so it may
12111 return more bits in quo than the target int can hold
12112 if sizeof(host long) > sizeof(target int). This can
12113 happen even for native compilers in LP64 mode. In
12114 these cases, modulo the quo value with the largest
12115 number that the target int can hold while leaving one
12116 bit for the sign. */
12117 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12118 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12120 /* Dereference the quo pointer argument. */
12121 arg_quo
= build_fold_indirect_ref (arg_quo
);
12122 /* Proceed iff a valid pointer type was passed in. */
12123 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12125 /* Set the value. */
12127 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
12128 build_int_cst (TREE_TYPE (arg_quo
),
12130 TREE_SIDE_EFFECTS (result_quo
) = 1;
12131 /* Combine the quo assignment with the rem. */
12132 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12133 result_quo
, result_rem
));
12141 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12142 resulting value as a tree with type TYPE. The mpfr precision is
12143 set to the precision of TYPE. We assume that this mpfr function
12144 returns zero if the result could be calculated exactly within the
12145 requested precision. In addition, the integer pointer represented
12146 by ARG_SG will be dereferenced and set to the appropriate signgam
12150 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12152 tree result
= NULL_TREE
;
12156 /* To proceed, MPFR must exactly represent the target floating point
12157 format, which only happens when the target base equals two. Also
12158 verify ARG is a constant and that ARG_SG is an int pointer. */
12159 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12160 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12161 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12162 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
12164 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
12166 /* In addition to NaN and Inf, the argument cannot be zero or a
12167 negative integer. */
12168 if (real_isfinite (ra
)
12169 && ra
->cl
!= rvc_zero
12170 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
12172 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
12173 const int prec
= fmt
->p
;
12174 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12179 mpfr_init2 (m
, prec
);
12180 mpfr_from_real (m
, ra
, GMP_RNDN
);
12181 mpfr_clear_flags ();
12182 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
12183 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
12189 /* Dereference the arg_sg pointer argument. */
12190 arg_sg
= build_fold_indirect_ref (arg_sg
);
12191 /* Assign the signgam value into *arg_sg. */
12192 result_sg
= fold_build2 (MODIFY_EXPR
,
12193 TREE_TYPE (arg_sg
), arg_sg
,
12194 build_int_cst (TREE_TYPE (arg_sg
), sg
));
12195 TREE_SIDE_EFFECTS (result_sg
) = 1;
12196 /* Combine the signgam assignment with the lgamma result. */
12197 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12198 result_sg
, result_lg
));
12206 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12207 function FUNC on it and return the resulting value as a tree with
12208 type TYPE. The mpfr precision is set to the precision of TYPE. We
12209 assume that function FUNC returns zero if the result could be
12210 calculated exactly within the requested precision. */
12213 do_mpc_arg1 (tree arg
, tree type
, int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_rnd_t
))
12215 tree result
= NULL_TREE
;
12219 /* To proceed, MPFR must exactly represent the target floating point
12220 format, which only happens when the target base equals two. */
12221 if (TREE_CODE (arg
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg
)
12222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
12223 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg
))))->b
== 2)
12225 const REAL_VALUE_TYPE
*const re
= TREE_REAL_CST_PTR (TREE_REALPART (arg
));
12226 const REAL_VALUE_TYPE
*const im
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg
));
12228 if (real_isfinite (re
) && real_isfinite (im
))
12230 const struct real_format
*const fmt
=
12231 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12232 const int prec
= fmt
->p
;
12233 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12234 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12238 mpc_init2 (m
, prec
);
12239 mpfr_from_real (mpc_realref (m
), re
, rnd
);
12240 mpfr_from_real (mpc_imagref (m
), im
, rnd
);
12241 mpfr_clear_flags ();
12242 inexact
= func (m
, m
, crnd
);
12243 result
= do_mpc_ckconv (m
, type
, inexact
, /*force_convert=*/ 0);
12251 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12252 mpc function FUNC on it and return the resulting value as a tree
12253 with type TYPE. The mpfr precision is set to the precision of
12254 TYPE. We assume that function FUNC returns zero if the result
12255 could be calculated exactly within the requested precision. If
12256 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12257 in the arguments and/or results. */
12260 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
12261 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
12263 tree result
= NULL_TREE
;
12268 /* To proceed, MPFR must exactly represent the target floating point
12269 format, which only happens when the target base equals two. */
12270 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
12271 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
12272 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
12273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
12274 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
12276 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
12277 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
12278 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
12279 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
12282 || (real_isfinite (re0
) && real_isfinite (im0
)
12283 && real_isfinite (re1
) && real_isfinite (im1
)))
12285 const struct real_format
*const fmt
=
12286 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
12287 const int prec
= fmt
->p
;
12288 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
12289 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
12293 mpc_init2 (m0
, prec
);
12294 mpc_init2 (m1
, prec
);
12295 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
12296 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
12297 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
12298 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
12299 mpfr_clear_flags ();
12300 inexact
= func (m0
, m0
, m1
, crnd
);
12301 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
12310 /* A wrapper function for builtin folding that prevents warnings for
12311 "statement without effect" and the like, caused by removing the
12312 call node earlier than the warning is generated. */
12315 fold_call_stmt (gcall
*stmt
, bool ignore
)
12317 tree ret
= NULL_TREE
;
12318 tree fndecl
= gimple_call_fndecl (stmt
);
12319 location_t loc
= gimple_location (stmt
);
12321 && TREE_CODE (fndecl
) == FUNCTION_DECL
12322 && DECL_BUILT_IN (fndecl
)
12323 && !gimple_call_va_arg_pack_p (stmt
))
12325 int nargs
= gimple_call_num_args (stmt
);
12326 tree
*args
= (nargs
> 0
12327 ? gimple_call_arg_ptr (stmt
, 0)
12328 : &error_mark_node
);
12330 if (avoid_folding_inline_builtin (fndecl
))
12332 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
12334 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
12338 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
12341 /* Propagate location information from original call to
12342 expansion of builtin. Otherwise things like
12343 maybe_emit_chk_warning, that operate on the expansion
12344 of a builtin, will use the wrong location information. */
12345 if (gimple_has_location (stmt
))
12347 tree realret
= ret
;
12348 if (TREE_CODE (ret
) == NOP_EXPR
)
12349 realret
= TREE_OPERAND (ret
, 0);
12350 if (CAN_HAVE_LOCATION_P (realret
)
12351 && !EXPR_HAS_LOCATION (realret
))
12352 SET_EXPR_LOCATION (realret
, loc
);
12362 /* Look up the function in builtin_decl that corresponds to DECL
12363 and set ASMSPEC as its user assembler name. DECL must be a
12364 function decl that declares a builtin. */
12367 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
12370 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
12371 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
12374 builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
12375 set_user_assembler_name (builtin
, asmspec
);
12376 switch (DECL_FUNCTION_CODE (decl
))
12378 case BUILT_IN_MEMCPY
:
12379 init_block_move_fn (asmspec
);
12380 memcpy_libfunc
= set_user_assembler_libfunc ("memcpy", asmspec
);
12382 case BUILT_IN_MEMSET
:
12383 init_block_clear_fn (asmspec
);
12384 memset_libfunc
= set_user_assembler_libfunc ("memset", asmspec
);
12386 case BUILT_IN_MEMMOVE
:
12387 memmove_libfunc
= set_user_assembler_libfunc ("memmove", asmspec
);
12389 case BUILT_IN_MEMCMP
:
12390 memcmp_libfunc
= set_user_assembler_libfunc ("memcmp", asmspec
);
12392 case BUILT_IN_ABORT
:
12393 abort_libfunc
= set_user_assembler_libfunc ("abort", asmspec
);
12396 if (INT_TYPE_SIZE
< BITS_PER_WORD
)
12398 set_user_assembler_libfunc ("ffs", asmspec
);
12399 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
,
12400 MODE_INT
, 0), "ffs");
12408 /* Return true if DECL is a builtin that expands to a constant or similarly
12411 is_simple_builtin (tree decl
)
12413 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12414 switch (DECL_FUNCTION_CODE (decl
))
12416 /* Builtins that expand to constants. */
12417 case BUILT_IN_CONSTANT_P
:
12418 case BUILT_IN_EXPECT
:
12419 case BUILT_IN_OBJECT_SIZE
:
12420 case BUILT_IN_UNREACHABLE
:
12421 /* Simple register moves or loads from stack. */
12422 case BUILT_IN_ASSUME_ALIGNED
:
12423 case BUILT_IN_RETURN_ADDRESS
:
12424 case BUILT_IN_EXTRACT_RETURN_ADDR
:
12425 case BUILT_IN_FROB_RETURN_ADDR
:
12426 case BUILT_IN_RETURN
:
12427 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
12428 case BUILT_IN_FRAME_ADDRESS
:
12429 case BUILT_IN_VA_END
:
12430 case BUILT_IN_STACK_SAVE
:
12431 case BUILT_IN_STACK_RESTORE
:
12432 /* Exception state returns or moves registers around. */
12433 case BUILT_IN_EH_FILTER
:
12434 case BUILT_IN_EH_POINTER
:
12435 case BUILT_IN_EH_COPY_VALUES
:
12445 /* Return true if DECL is a builtin that is not expensive, i.e., they are
12446 most probably expanded inline into reasonably simple code. This is a
12447 superset of is_simple_builtin. */
12449 is_inexpensive_builtin (tree decl
)
12453 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
12455 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
12456 switch (DECL_FUNCTION_CODE (decl
))
12459 case BUILT_IN_ALLOCA
:
12460 case BUILT_IN_ALLOCA_WITH_ALIGN
:
12461 case BUILT_IN_BSWAP16
:
12462 case BUILT_IN_BSWAP32
:
12463 case BUILT_IN_BSWAP64
:
12465 case BUILT_IN_CLZIMAX
:
12466 case BUILT_IN_CLZL
:
12467 case BUILT_IN_CLZLL
:
12469 case BUILT_IN_CTZIMAX
:
12470 case BUILT_IN_CTZL
:
12471 case BUILT_IN_CTZLL
:
12473 case BUILT_IN_FFSIMAX
:
12474 case BUILT_IN_FFSL
:
12475 case BUILT_IN_FFSLL
:
12476 case BUILT_IN_IMAXABS
:
12477 case BUILT_IN_FINITE
:
12478 case BUILT_IN_FINITEF
:
12479 case BUILT_IN_FINITEL
:
12480 case BUILT_IN_FINITED32
:
12481 case BUILT_IN_FINITED64
:
12482 case BUILT_IN_FINITED128
:
12483 case BUILT_IN_FPCLASSIFY
:
12484 case BUILT_IN_ISFINITE
:
12485 case BUILT_IN_ISINF_SIGN
:
12486 case BUILT_IN_ISINF
:
12487 case BUILT_IN_ISINFF
:
12488 case BUILT_IN_ISINFL
:
12489 case BUILT_IN_ISINFD32
:
12490 case BUILT_IN_ISINFD64
:
12491 case BUILT_IN_ISINFD128
:
12492 case BUILT_IN_ISNAN
:
12493 case BUILT_IN_ISNANF
:
12494 case BUILT_IN_ISNANL
:
12495 case BUILT_IN_ISNAND32
:
12496 case BUILT_IN_ISNAND64
:
12497 case BUILT_IN_ISNAND128
:
12498 case BUILT_IN_ISNORMAL
:
12499 case BUILT_IN_ISGREATER
:
12500 case BUILT_IN_ISGREATEREQUAL
:
12501 case BUILT_IN_ISLESS
:
12502 case BUILT_IN_ISLESSEQUAL
:
12503 case BUILT_IN_ISLESSGREATER
:
12504 case BUILT_IN_ISUNORDERED
:
12505 case BUILT_IN_VA_ARG_PACK
:
12506 case BUILT_IN_VA_ARG_PACK_LEN
:
12507 case BUILT_IN_VA_COPY
:
12508 case BUILT_IN_TRAP
:
12509 case BUILT_IN_SAVEREGS
:
12510 case BUILT_IN_POPCOUNTL
:
12511 case BUILT_IN_POPCOUNTLL
:
12512 case BUILT_IN_POPCOUNTIMAX
:
12513 case BUILT_IN_POPCOUNT
:
12514 case BUILT_IN_PARITYL
:
12515 case BUILT_IN_PARITYLL
:
12516 case BUILT_IN_PARITYIMAX
:
12517 case BUILT_IN_PARITY
:
12518 case BUILT_IN_LABS
:
12519 case BUILT_IN_LLABS
:
12520 case BUILT_IN_PREFETCH
:
12521 case BUILT_IN_ACC_ON_DEVICE
:
12525 return is_simple_builtin (decl
);